idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
47,800
def indent ( lines , amount , ch = ' ' ) : padding = amount * ch return padding + ( '\n' + padding ) . join ( lines . split ( '\n' ) )
Indent the lines in a string by padding each one with proper number of pad characters
47,801
def prof_main ( main ) : @ wraps ( main ) def wrapper ( * args , ** kwargs ) : import sys try : do_prof = sys . argv [ 1 ] == "prof" if do_prof : sys . argv . pop ( 1 ) except Exception : do_prof = False if not do_prof : sys . exit ( main ( ) ) else : print ( "Entering profiling mode..." ) import pstats , cProfile , tempfile prof_file = kwargs . get ( "prof_file" , None ) if prof_file is None : _ , prof_file = tempfile . mkstemp ( ) print ( "Profiling data stored in %s" % prof_file ) sortby = kwargs . get ( "sortby" , "time" ) cProfile . runctx ( "main()" , globals ( ) , locals ( ) , prof_file ) s = pstats . Stats ( prof_file ) s . strip_dirs ( ) . sort_stats ( sortby ) . print_stats ( ) if "retval" not in kwargs : sys . exit ( 0 ) else : return kwargs [ "retval" ] return wrapper
Decorator for profiling main programs .
47,802
def invalidate ( cls , inst , name ) : inst_cls = inst . __class__ if not hasattr ( inst , '__dict__' ) : raise AttributeError ( "'%s' object has no attribute '__dict__'" % ( inst_cls . __name__ , ) ) if name . startswith ( '__' ) and not name . endswith ( '__' ) : name = '_%s%s' % ( inst_cls . __name__ , name ) if not isinstance ( getattr ( inst_cls , name ) , cls ) : raise AttributeError ( "'%s.%s' is not a %s attribute" % ( inst_cls . __name__ , name , cls . __name__ ) ) if name in inst . __dict__ : del inst . __dict__ [ name ]
Invalidate a lazy attribute .
47,803
def as_set ( obj ) : if obj is None or isinstance ( obj , collections . Set ) : return obj if not isinstance ( obj , collections . Iterable ) : return set ( ( obj , ) ) else : return set ( obj )
Convert obj into a set returns None if obj is None .
47,804
def logged ( level = logging . DEBUG ) : def wrap ( f ) : _logger = logging . getLogger ( "{}.{}" . format ( f . __module__ , f . __name__ ) ) def wrapped_f ( * args , ** kwargs ) : _logger . log ( level , "Called at {} with args = {} and kwargs = {}" . format ( datetime . datetime . now ( ) , args , kwargs ) ) data = f ( * args , ** kwargs ) _logger . log ( level , "Done at {} with args = {} and kwargs = {}" . format ( datetime . datetime . now ( ) , args , kwargs ) ) return data return wrapped_f return wrap
Useful logging decorator . If a method is logged the beginning and end of the method call will be logged at a pre - specified level .
47,805
def enable_logging ( main ) : @ functools . wraps ( main ) def wrapper ( * args , ** kwargs ) : import argparse parser = argparse . ArgumentParser ( ) parser . add_argument ( '--loglevel' , default = "ERROR" , type = str , help = "Set the loglevel. Possible values: CRITICAL, ERROR (default)," "WARNING, INFO, DEBUG" ) options = parser . parse_args ( ) numeric_level = getattr ( logging , options . loglevel . upper ( ) , None ) if not isinstance ( numeric_level , int ) : raise ValueError ( 'Invalid log level: %s' % options . loglevel ) logging . basicConfig ( level = numeric_level ) retcode = main ( * args , ** kwargs ) return retcode return wrapper
This decorator is used to decorate main functions . It adds the initialization of the logger and an argument parser that allows one to select the loglevel . Useful if we are writing simple main functions that call libraries where the logging module is used
47,806
def which ( cmd ) : def is_exe ( fp ) : return os . path . isfile ( fp ) and os . access ( fp , os . X_OK ) fpath , fname = os . path . split ( cmd ) if fpath : if is_exe ( cmd ) : return cmd else : for path in os . environ [ "PATH" ] . split ( os . pathsep ) : exe_file = os . path . join ( path , cmd ) if is_exe ( exe_file ) : return exe_file return None
Returns full path to a executable .
47,807
def all_subclasses ( cls ) : subclasses = cls . __subclasses__ ( ) return subclasses + [ g for s in subclasses for g in all_subclasses ( s ) ]
Given a class cls this recursive function returns a list with all subclasses subclasses of subclasses and so on .
47,808
def find_top_pyfile ( ) : import os frame = currentframe ( ) while True : if frame . f_back is None : finfo = getframeinfo ( frame ) return os . path . abspath ( finfo . filename ) frame = frame . f_back
This function inspects the Cpython frame to find the path of the script .
47,809
def pprint_table ( table , out = sys . stdout , rstrip = False ) : def max_width_col ( table , col_idx ) : return max ( [ len ( row [ col_idx ] ) for row in table ] ) if rstrip : for row_idx , row in enumerate ( table ) : table [ row_idx ] = [ c . rstrip ( ) for c in row ] col_paddings = [ ] ncols = len ( table [ 0 ] ) for i in range ( ncols ) : col_paddings . append ( max_width_col ( table , i ) ) for row in table : out . write ( row [ 0 ] . ljust ( col_paddings [ 0 ] + 1 ) ) for i in range ( 1 , len ( row ) ) : col = row [ i ] . rjust ( col_paddings [ i ] + 2 ) out . write ( col ) out . write ( "\n" )
Prints out a table of data padded for alignment Each row must have the same number of columns .
47,810
def gcd ( * numbers ) : n = numbers [ 0 ] for i in numbers : n = pygcd ( n , i ) return n
Returns the greatest common divisor for a sequence of numbers .
47,811
def lcm ( * numbers ) : n = 1 for i in numbers : n = ( i * n ) // gcd ( i , n ) return n
Return lowest common multiple of a sequence of numbers .
47,812
def gcd_float ( numbers , tol = 1e-8 ) : def pair_gcd_tol ( a , b ) : while b > tol : a , b = b , a % b return a n = numbers [ 0 ] for i in numbers : n = pair_gcd_tol ( n , i ) return n
Returns the greatest common divisor for a sequence of numbers . Uses a numerical tolerance so can be used on floats
47,813
def chunks ( items , n ) : it = iter ( items ) chunk = tuple ( itertools . islice ( it , n ) ) while chunk : yield chunk chunk = tuple ( itertools . islice ( it , n ) )
Yield successive n - sized chunks from a list - like object .
47,814
def iterator_from_slice ( s ) : import numpy as np start = s . start if s . start is not None else 0 step = s . step if s . step is not None else 1 if s . stop is None : return itertools . count ( start = start , step = step ) else : return iter ( np . arange ( start , s . stop , step ) )
Constructs an iterator given a slice object s .
47,815
def colored_map ( text , cmap ) : if not __ISON : return text for key , v in cmap . items ( ) : if isinstance ( v , dict ) : text = text . replace ( key , colored ( key , ** v ) ) else : text = text . replace ( key , colored ( key , color = v ) ) return text
Return colorized text . cmap is a dict mapping tokens to color options .
47,816
def cprint_map ( text , cmap , ** kwargs ) : try : print ( colored_map ( text , cmap ) , ** kwargs ) except TypeError : kwargs . pop ( "flush" , None ) print ( colored_map ( text , cmap ) , ** kwargs )
Print colorize text . cmap is a dict mapping keys to color options . kwargs are passed to print function
47,817
def as_dict ( self ) : d = { "@module" : self . __class__ . __module__ , "@class" : self . __class__ . __name__ } try : parent_module = self . __class__ . __module__ . split ( '.' ) [ 0 ] module_version = import_module ( parent_module ) . __version__ d [ "@version" ] = u"{}" . format ( module_version ) except AttributeError : d [ "@version" ] = None args = getargspec ( self . __class__ . __init__ ) . args def recursive_as_dict ( obj ) : if isinstance ( obj , ( list , tuple ) ) : return [ recursive_as_dict ( it ) for it in obj ] elif isinstance ( obj , dict ) : return { kk : recursive_as_dict ( vv ) for kk , vv in obj . items ( ) } elif hasattr ( obj , "as_dict" ) : return obj . as_dict ( ) return obj for c in args : if c != "self" : try : a = self . __getattribute__ ( c ) except AttributeError : try : a = self . __getattribute__ ( "_" + c ) except AttributeError : raise NotImplementedError ( "Unable to automatically determine as_dict " "format from class. MSONAble requires all " "args to be present as either self.argname or " "self._argname, and kwargs to be present under" "a self.kwargs variable to automatically " "determine the dict format. Alternatively, " "you can implement both as_dict and from_dict." ) d [ c ] = recursive_as_dict ( a ) if hasattr ( self , "kwargs" ) : d . update ( ** self . kwargs ) if hasattr ( self , "_kwargs" ) : d . update ( ** self . _kwargs ) return d
A JSON serializable dict representation of an object .
47,818
def process_decoded ( self , d ) : if isinstance ( d , dict ) : if "@module" in d and "@class" in d : modname = d [ "@module" ] classname = d [ "@class" ] else : modname = None classname = None if modname and modname not in [ "bson.objectid" , "numpy" ] : if modname == "datetime" and classname == "datetime" : try : dt = datetime . datetime . strptime ( d [ "string" ] , "%Y-%m-%d %H:%M:%S.%f" ) except ValueError : dt = datetime . datetime . strptime ( d [ "string" ] , "%Y-%m-%d %H:%M:%S" ) return dt mod = __import__ ( modname , globals ( ) , locals ( ) , [ classname ] , 0 ) if hasattr ( mod , classname ) : cls_ = getattr ( mod , classname ) data = { k : v for k , v in d . items ( ) if not k . startswith ( "@" ) } if hasattr ( cls_ , "from_dict" ) : return cls_ . from_dict ( data ) elif np is not None and modname == "numpy" and classname == "array" : return np . array ( d [ "data" ] , dtype = d [ "dtype" ] ) elif ( bson is not None ) and modname == "bson.objectid" and classname == "ObjectId" : return bson . objectid . ObjectId ( d [ "oid" ] ) return { self . process_decoded ( k ) : self . process_decoded ( v ) for k , v in d . items ( ) } elif isinstance ( d , list ) : return [ self . process_decoded ( x ) for x in d ] return d
Recursive method to support decoding dicts and lists containing pymatgen objects .
47,819
def nCr ( n , r ) : f = math . factorial return int ( f ( n ) / f ( r ) / f ( n - r ) )
Calculates nCr .
47,820
def nPr ( n , r ) : f = math . factorial return int ( f ( n ) / f ( n - r ) )
Calculates nPr .
47,821
def copy_r ( src , dst ) : abssrc = os . path . abspath ( src ) absdst = os . path . abspath ( dst ) try : os . makedirs ( absdst ) except OSError : pass for f in os . listdir ( abssrc ) : fpath = os . path . join ( abssrc , f ) if os . path . isfile ( fpath ) : shutil . copy ( fpath , absdst ) elif not absdst . startswith ( fpath ) : copy_r ( fpath , os . path . join ( absdst , f ) ) else : warnings . warn ( "Cannot copy %s to itself" % fpath )
Implements a recursive copy function similar to Unix s cp - r command . Surprisingly python does not have a real equivalent . shutil . copytree only works if the destination directory is not present .
47,822
def gzip_dir ( path , compresslevel = 6 ) : for f in os . listdir ( path ) : full_f = os . path . join ( path , f ) if not f . lower ( ) . endswith ( "gz" ) : with open ( full_f , 'rb' ) as f_in , GzipFile ( '{}.gz' . format ( full_f ) , 'wb' , compresslevel = compresslevel ) as f_out : shutil . copyfileobj ( f_in , f_out ) shutil . copystat ( full_f , '{}.gz' . format ( full_f ) ) os . remove ( full_f )
Gzips all files in a directory . Note that this is different from shutil . make_archive which creates a tar archive . The aim of this method is to create gzipped files that can still be read using common Unix - style commands like zless or zcat .
47,823
def compress_file ( filepath , compression = "gz" ) : if compression not in [ "gz" , "bz2" ] : raise ValueError ( "Supported compression formats are 'gz' and 'bz2'." ) from monty . io import zopen if not filepath . lower ( ) . endswith ( ".%s" % compression ) : with open ( filepath , 'rb' ) as f_in , zopen ( '%s.%s' % ( filepath , compression ) , 'wb' ) as f_out : f_out . writelines ( f_in ) os . remove ( filepath )
Compresses a file with the correct extension . Functions like standard Unix command line gzip and bzip2 in the sense that the original uncompressed files are not retained .
47,824
def compress_dir ( path , compression = "gz" ) : for parent , subdirs , files in os . walk ( path ) : for f in files : compress_file ( os . path . join ( parent , f ) , compression = compression )
Recursively compresses all files in a directory . Note that this compresses all files singly i . e . it does not create a tar archive . For that just use Python tarfile class .
47,825
def decompress_file ( filepath ) : toks = filepath . split ( "." ) file_ext = toks [ - 1 ] . upper ( ) from monty . io import zopen if file_ext in [ "BZ2" , "GZ" , "Z" ] : with open ( "." . join ( toks [ 0 : - 1 ] ) , 'wb' ) as f_out , zopen ( filepath , 'rb' ) as f_in : f_out . writelines ( f_in ) os . remove ( filepath )
Decompresses a file with the correct extension . Automatically detects gz bz2 or z extension .
47,826
def decompress_dir ( path ) : for parent , subdirs , files in os . walk ( path ) : for f in files : decompress_file ( os . path . join ( parent , f ) )
Recursively decompresses all files in a directory .
47,827
def remove ( path , follow_symlink = False ) : if os . path . isfile ( path ) : os . remove ( path ) elif os . path . islink ( path ) : if follow_symlink : remove ( os . readlink ( path ) ) os . unlink ( path ) else : shutil . rmtree ( path )
Implements an remove function that will delete files folder trees and symlink trees
47,828
def compute_hashes ( obj , hashes = frozenset ( [ 'md5' ] ) ) : if not ( hasattr ( obj , 'read' ) or isinstance ( obj , bytes ) ) : raise ValueError ( "Cannot compute hash for given input: a file-like object or bytes-like object is required" ) hashers = dict ( ) for alg in hashes : try : hashers [ alg ] = hashlib . new ( alg . lower ( ) ) except ValueError : logging . warning ( "Unable to validate file contents using unknown hash algorithm: %s" , alg ) while True : if hasattr ( obj , 'read' ) : block = obj . read ( 1024 ** 2 ) else : block = obj obj = None if not block : break for i in hashers . values ( ) : i . update ( block ) hashes = dict ( ) for alg , h in hashers . items ( ) : digest = h . hexdigest ( ) base64digest = base64 . b64encode ( h . digest ( ) ) if not isinstance ( base64digest , str ) and isinstance ( base64digest , bytes ) : base64digest = base64digest . decode ( 'ascii' ) hashes [ alg ] = digest hashes [ alg + "_base64" ] = base64digest return hashes
Digests input data read from file - like object fd or passed directly as bytes - like object . Compute hashes for multiple algorithms . Default is MD5 . Returns a tuple of a hex - encoded digest string and a base64 - encoded value suitable for an HTTP header .
47,829
def compute_file_hashes ( file_path , hashes = frozenset ( [ 'md5' ] ) ) : if not os . path . exists ( file_path ) : logging . warning ( "%s does not exist" % file_path ) return else : logging . debug ( "Computing [%s] hashes for file [%s]" % ( ',' . join ( hashes ) , file_path ) ) try : with open ( file_path , 'rb' ) as fd : return compute_hashes ( fd , hashes ) except ( IOError , OSError ) as e : logging . warning ( "Error while calculating digest(s) for file %s: %s" % ( file_path , str ( e ) ) ) raise
Digests data read from file denoted by file_path .
47,830
def validate ( self , processes = 1 , fast = False , completeness_only = False , callback = None ) : self . _validate_structure ( ) self . _validate_bagittxt ( ) self . _validate_fetch ( ) self . _validate_contents ( processes = processes , fast = fast , completeness_only = completeness_only , callback = callback ) return True
Checks the structure and contents are valid .
47,831
def _validate_fetch ( self ) : for url , file_size , filename in self . fetch_entries ( ) : parsed_url = urlparse ( url ) if not all ( parsed_url . scheme ) : raise BagError ( _ ( 'Malformed URL in fetch.txt: %s' ) % url )
Validate the fetch . txt file
47,832
def _validate_completeness ( self ) : errors = list ( ) only_in_manifests , only_on_fs , only_in_fetch = self . compare_manifests_with_fs_and_fetch ( ) for path in only_in_manifests : e = FileMissing ( path ) LOGGER . warning ( force_unicode ( e ) ) errors . append ( e ) for path in only_on_fs : e = UnexpectedFile ( path ) LOGGER . warning ( force_unicode ( e ) ) errors . append ( e ) for path in only_in_fetch : e = UnexpectedRemoteFile ( path ) LOGGER . warning ( force_unicode ( e ) ) if errors : raise BagValidationError ( _ ( "Bag validation failed" ) , errors )
Verify that the actual file manifests match the files in the data directory
47,833
def get_detection_results ( url , timeout , metadata = False , save_har = False ) : plugins = load_plugins ( ) if not plugins : raise NoPluginsError ( 'No plugins found' ) logger . debug ( '[+] Starting detection with %(n)d plugins' , { 'n' : len ( plugins ) } ) response = get_response ( url , plugins , timeout ) if save_har : fd , path = tempfile . mkstemp ( suffix = '.har' ) logger . info ( f'Saving HAR file to {path}' ) with open ( fd , 'w' ) as f : json . dump ( response [ 'har' ] , f ) det = Detector ( response , plugins , url ) softwares = det . get_results ( metadata = metadata ) output = { 'url' : url , 'softwares' : softwares , } return output
Return results from detector .
47,834
def get_plugins ( metadata ) : plugins = load_plugins ( ) if not plugins : raise NoPluginsError ( 'No plugins found' ) results = [ ] for p in sorted ( plugins . get_all ( ) , key = attrgetter ( 'name' ) ) : if metadata : data = { 'name' : p . name , 'homepage' : p . homepage } hints = getattr ( p , 'hints' , [ ] ) if hints : data [ 'hints' ] = hints results . append ( data ) else : results . append ( p . name ) return results
Return the registered plugins .
47,835
def get_most_complete_pm ( pms ) : if not pms : return None selected_version = None selected_presence = None for pm in pms : if pm . version : if not selected_version : selected_version = pm else : if len ( pm . version ) > len ( selected_version . version ) : selected_version = pm elif pm . presence : selected_presence = pm return selected_version or selected_presence
Return plugin match with longer version if not available will return plugin match with presence = True
47,836
def docker_container ( ) : if SETUP_SPLASH : dm = DockerManager ( ) dm . start_container ( ) try : requests . post ( f'{SPLASH_URL}/_gc' ) except requests . exceptions . RequestException : pass yield
Start the Splash server on a Docker container . If the container doesn t exist it is created and named splash - detectem .
47,837
def is_url_allowed ( url ) : blacklist = [ r'\.ttf' , r'\.woff' , r'fonts\.googleapis\.com' , r'\.png' , r'\.jpe?g' , r'\.gif' , r'\.svg' ] for ft in blacklist : if re . search ( ft , url ) : return False return True
Return True if url is not in blacklist .
47,838
def is_valid_mimetype ( response ) : blacklist = [ 'image/' , ] mimetype = response . get ( 'mimeType' ) if not mimetype : return True for bw in blacklist : if bw in mimetype : return False return True
Return True if the mimetype is not blacklisted .
47,839
def get_charset ( response ) : charset = DEFAULT_CHARSET m = re . findall ( r';charset=(.*)' , response . get ( 'mimeType' , '' ) ) if m : charset = m [ 0 ] return charset
Return charset from response or default charset .
47,840
def create_lua_script ( plugins ) : lua_template = pkg_resources . resource_string ( 'detectem' , 'script.lua' ) template = Template ( lua_template . decode ( 'utf-8' ) ) javascript_data = to_javascript_data ( plugins ) return template . substitute ( js_data = json . dumps ( javascript_data ) )
Return script template filled up with plugin javascript data .
47,841
def to_javascript_data ( plugins ) : def escape ( v ) : return re . sub ( r'"' , r'\\"' , v ) def dom_matchers ( p ) : dom_matchers = p . get_matchers ( 'dom' ) escaped_dom_matchers = [ ] for dm in dom_matchers : check_statement , version_statement = dm escaped_dom_matchers . append ( { 'check_statement' : escape ( check_statement ) , 'version_statement' : escape ( version_statement or '' ) , } ) return escaped_dom_matchers return [ { 'name' : p . name , 'matchers' : dom_matchers ( p ) } for p in plugins . with_dom_matchers ( ) ]
Return a dictionary with all JavaScript matchers . Quotes are escaped .
47,842
def get_response ( url , plugins , timeout = SPLASH_TIMEOUT ) : lua_script = create_lua_script ( plugins ) lua = urllib . parse . quote_plus ( lua_script ) page_url = f'{SPLASH_URL}/execute?url={url}&timeout={timeout}&lua_source={lua}' try : with docker_container ( ) : logger . debug ( '[+] Sending request to Splash instance' ) res = requests . get ( page_url ) except requests . exceptions . ConnectionError : raise SplashError ( "Could not connect to Splash server {}" . format ( SPLASH_URL ) ) logger . debug ( '[+] Response received' ) json_data = res . json ( ) if res . status_code in ERROR_STATUS_CODES : raise SplashError ( get_splash_error ( json_data ) ) softwares = json_data [ 'softwares' ] scripts = json_data [ 'scripts' ] . values ( ) har = get_valid_har ( json_data [ 'har' ] ) js_error = get_evaljs_error ( json_data ) if js_error : logger . debug ( '[+] WARNING: failed to eval JS matchers: %(n)s' , { 'n' : js_error } ) else : logger . debug ( '[+] Detected %(n)d softwares from the DOM' , { 'n' : len ( softwares ) } ) logger . debug ( '[+] Detected %(n)d scripts from the DOM' , { 'n' : len ( scripts ) } ) logger . debug ( '[+] Final HAR has %(n)d valid entries' , { 'n' : len ( har ) } ) return { 'har' : har , 'scripts' : scripts , 'softwares' : softwares }
Return response with HAR inline scritps and software detected by JS matchers .
47,843
def get_valid_har ( har_data ) : new_entries = [ ] entries = har_data . get ( 'log' , { } ) . get ( 'entries' , [ ] ) logger . debug ( '[+] Detected %(n)d entries in HAR' , { 'n' : len ( entries ) } ) for entry in entries : url = entry [ 'request' ] [ 'url' ] if not is_url_allowed ( url ) : continue response = entry [ 'response' ] [ 'content' ] if not is_valid_mimetype ( response ) : continue if response . get ( 'text' ) : charset = get_charset ( response ) response [ 'text' ] = base64 . b64decode ( response [ 'text' ] ) . decode ( charset ) else : response [ 'text' ] = '' new_entries . append ( entry ) logger . debug ( '[+] Added URL: %(url)s ...' , { 'url' : url [ : 100 ] } ) return new_entries
Return list of valid HAR entries .
47,844
def _script_to_har_entry ( cls , script , url ) : entry = { 'request' : { 'url' : url } , 'response' : { 'url' : url , 'content' : { 'text' : script } } } cls . _set_entry_type ( entry , INLINE_SCRIPT_ENTRY ) return entry
Return entry for embed script
47,845
def mark_entries ( self , entries ) : for entry in entries : self . _set_entry_type ( entry , RESOURCE_ENTRY ) main_entry = entries [ 0 ] main_location = self . _get_location ( main_entry ) if not main_location : self . _set_entry_type ( main_entry , MAIN_ENTRY ) return main_url = urllib . parse . urljoin ( get_url ( main_entry ) , main_location ) for entry in entries [ 1 : ] : url = get_url ( entry ) if url == main_url : self . _set_entry_type ( entry , MAIN_ENTRY ) break else : self . _set_entry_type ( main_entry , MAIN_ENTRY )
Mark one entry as main entry and the rest as resource entry .
47,846
def get_hints ( self , plugin ) : hints = [ ] for hint_name in getattr ( plugin , 'hints' , [ ] ) : hint_plugin = self . _plugins . get ( hint_name ) if hint_plugin : hint_result = Result ( name = hint_plugin . name , homepage = hint_plugin . homepage , from_url = self . requested_url , type = HINT_TYPE , plugin = plugin . name , ) hints . append ( hint_result ) logger . debug ( f'{plugin.name} & hint {hint_result.name} detected' ) else : logger . error ( f'{plugin.name} hints an invalid plugin: {hint_name}' ) return hints
Return plugin hints from plugin .
47,847
def process_from_splash ( self ) : for software in self . _softwares_from_splash : plugin = self . _plugins . get ( software [ 'name' ] ) try : additional_data = { 'version' : software [ 'version' ] } except KeyError : additional_data = { 'type' : INDICATOR_TYPE } self . _results . add_result ( Result ( name = plugin . name , homepage = plugin . homepage , from_url = self . requested_url , plugin = plugin . name , ** additional_data , ) ) for hint in self . get_hints ( plugin ) : self . _results . add_result ( hint )
Add softwares found in the DOM
47,848
def process_har ( self ) : hints = [ ] version_plugins = self . _plugins . with_version_matchers ( ) generic_plugins = self . _plugins . with_generic_matchers ( ) for entry in self . har : for plugin in version_plugins : pm = self . apply_plugin_matchers ( plugin , entry ) if not pm : continue if pm . name : name = '{}-{}' . format ( plugin . name , pm . name ) else : name = plugin . name if pm . version : self . _results . add_result ( Result ( name = name , version = pm . version , homepage = plugin . homepage , from_url = get_url ( entry ) , plugin = plugin . name , ) ) elif pm . presence : version = get_version_via_file_hashes ( plugin , entry ) if version : self . _results . add_result ( Result ( name = name , version = version , homepage = plugin . homepage , from_url = get_url ( entry ) , plugin = plugin . name , ) ) else : self . _results . add_result ( Result ( name = name , homepage = plugin . homepage , from_url = get_url ( entry ) , type = INDICATOR_TYPE , plugin = plugin . name , ) ) hints += self . get_hints ( plugin ) for plugin in generic_plugins : pm = self . apply_plugin_matchers ( plugin , entry ) if not pm : continue plugin_data = plugin . get_information ( entry ) if 'name' in plugin_data : self . _results . add_result ( Result ( name = plugin_data [ 'name' ] , homepage = plugin_data [ 'homepage' ] , from_url = get_url ( entry ) , type = GENERIC_TYPE , plugin = plugin . name , ) ) hints += self . get_hints ( plugin ) for hint in hints : self . _results . add_result ( hint )
Detect plugins present in the page .
47,849
def get_results ( self , metadata = False ) : results_data = [ ] self . process_har ( ) self . process_from_splash ( ) for rt in sorted ( self . _results . get_results ( ) ) : rdict = { 'name' : rt . name } if rt . version : rdict [ 'version' ] = rt . version if metadata : rdict [ 'homepage' ] = rt . homepage rdict [ 'type' ] = rt . type rdict [ 'from_url' ] = rt . from_url rdict [ 'plugin' ] = rt . plugin results_data . append ( rdict ) return results_data
Return results of the analysis .
47,850
def load_plugins ( ) : loader = _PluginLoader ( ) for pkg in PLUGIN_PACKAGES : loader . load_plugins ( pkg ) return loader . plugins
Return the list of plugin instances .
47,851
def _get_plugin_module_paths ( self , plugin_dir ) : filepaths = [ fp for fp in glob . glob ( '{}/**/*.py' . format ( plugin_dir ) , recursive = True ) if not fp . endswith ( '__init__.py' ) ] rel_paths = [ re . sub ( plugin_dir . rstrip ( '/' ) + '/' , '' , fp ) for fp in filepaths ] module_paths = [ rp . replace ( '/' , '.' ) . replace ( '.py' , '' ) for rp in rel_paths ] return module_paths
Return a list of every module in plugin_dir .
47,852
def load_plugins ( self , plugins_package ) : try : plugin_dir = find_spec ( plugins_package ) . submodule_search_locations [ 0 ] except ImportError : logger . error ( "Could not load plugins package '%(pkg)s'" , { 'pkg' : plugins_package } ) return for module_path in self . _get_plugin_module_paths ( plugin_dir ) : spec = find_spec ( '{}.{}' . format ( plugins_package , module_path ) ) m = module_from_spec ( spec ) spec . loader . exec_module ( m ) classes = inspect . getmembers ( m , predicate = inspect . isclass ) for _ , klass in classes : if klass . __module__ != spec . name : continue if not klass . __name__ . endswith ( 'Plugin' ) : continue instance = klass ( ) if self . _is_plugin_ok ( instance ) : self . plugins . add ( instance )
Load plugins from plugins_package module .
47,853
def extract_named_group ( text , named_group , matchers , return_presence = False ) : presence = False for matcher in matchers : if isinstance ( matcher , str ) : v = re . search ( matcher , text , flags = re . DOTALL ) if v : dict_result = v . groupdict ( ) try : return dict_result [ named_group ] except KeyError : if dict_result : continue else : presence = True elif callable ( matcher ) : v = matcher ( text ) if v : return v if return_presence and presence : return 'presence' return None
Return named_group match from text reached by using a matcher from matchers .
47,854
def parsed ( self ) : return BeautifulSoup ( self . response . content , features = self . browser . parser , )
Lazily parse response content using HTML parser specified by the browser .
47,855
def _build_send_args ( self , ** kwargs ) : out = { } out . update ( self . _default_send_args ) out . update ( kwargs ) return out
Merge optional arguments with defaults .
47,856
def open ( self , url , method = 'get' , ** kwargs ) : response = self . session . request ( method , url , ** self . _build_send_args ( ** kwargs ) ) self . _update_state ( response )
Open a URL .
47,857
def _update_state ( self , response ) : self . _states = self . _states [ : self . _cursor + 1 ] state = RoboState ( self , response ) self . _states . append ( state ) self . _cursor += 1 if self . _maxlen : decrement = len ( self . _states ) - self . _maxlen if decrement > 0 : self . _states = self . _states [ decrement : ] self . _cursor -= decrement
Update the state of the browser . Create a new state object and append to or overwrite the browser s state history .
47,858
def _traverse ( self , n = 1 ) : if not self . history : raise exceptions . RoboError ( 'Not tracking history' ) cursor = self . _cursor + n if cursor >= len ( self . _states ) or cursor < 0 : raise exceptions . RoboError ( 'Index out of range' ) self . _cursor = cursor
Traverse state history . Used by back and forward methods .
47,859
def get_link ( self , text = None , * args , ** kwargs ) : return helpers . find ( self . parsed , _link_ptn , text = text , * args , ** kwargs )
Find an anchor or button by containing text as well as standard BeautifulSoup arguments .
47,860
def get_links ( self , text = None , * args , ** kwargs ) : return helpers . find_all ( self . parsed , _link_ptn , text = text , * args , ** kwargs )
Find anchors or buttons by containing text as well as standard BeautifulSoup arguments .
47,861
def get_form ( self , id = None , * args , ** kwargs ) : if id : kwargs [ 'id' ] = id form = self . find ( _form_ptn , * args , ** kwargs ) if form is not None : return Form ( form )
Find form by ID as well as standard BeautifulSoup arguments .
47,862
def follow_link ( self , link , ** kwargs ) : try : href = link [ 'href' ] except KeyError : raise exceptions . RoboError ( 'Link element must have "href" ' 'attribute' ) self . open ( self . _build_url ( href ) , ** kwargs )
Click a link .
47,863
def submit_form ( self , form , submit = None , ** kwargs ) : method = form . method . upper ( ) url = self . _build_url ( form . action ) or self . url payload = form . serialize ( submit = submit ) serialized = payload . to_requests ( method ) send_args = self . _build_send_args ( ** kwargs ) send_args . update ( serialized ) response = self . session . request ( method , url , ** send_args ) self . _update_state ( response )
Submit a form .
47,864
def find ( soup , name = None , attrs = None , recursive = True , text = None , ** kwargs ) : tags = find_all ( soup , name , attrs or { } , recursive , text , 1 , ** kwargs ) if tags : return tags [ 0 ]
Modified find method ; see find_all above .
47,865
def _set_initial ( self , initial ) : super ( Select , self ) . _set_initial ( initial ) if not self . _value and self . options : self . value = self . options [ 0 ]
If no option is selected initially select the first option .
47,866
def encode_if_py2 ( func ) : if not PY2 : return func def wrapped ( * args , ** kwargs ) : ret = func ( * args , ** kwargs ) if not isinstance ( ret , unicode ) : raise TypeError ( 'Wrapped function must return `unicode`' ) return ret . encode ( 'utf-8' , 'ignore' ) return wrapped
If Python 2 . x return decorated function encoding unicode return value to UTF - 8 ; else noop .
47,867
def _reduce_age ( self , now ) : if self . max_age : keys = [ key for key , value in iteritems ( self . data ) if now - value [ 'date' ] > self . max_age ] for key in keys : del self . data [ key ]
Reduce size of cache by date .
47,868
def _reduce_count ( self ) : if self . max_count : while len ( self . data ) > self . max_count : self . data . popitem ( last = False )
Reduce size of cache by count .
47,869
def store ( self , response ) : if response . status_code not in CACHE_CODES : return now = datetime . datetime . now ( ) self . data [ response . url ] = { 'date' : now , 'response' : response , } logger . info ( 'Stored response in cache' ) self . _reduce_age ( now ) self . _reduce_count ( )
Store response in cache skipping if code is forbidden .
47,870
def retrieve ( self , request ) : if request . method not in CACHE_VERBS : return try : response = self . data [ request . url ] [ 'response' ] logger . info ( 'Retrieved response from cache' ) return response except KeyError : return None
Look up request in cache skipping if verb is forbidden .
47,871
def _group_flat_tags ( tag , tags ) : grouped = [ tag ] name = tag . get ( 'name' , '' ) . lower ( ) while tags and tags [ 0 ] . get ( 'name' , '' ) . lower ( ) == name : grouped . append ( tags . pop ( 0 ) ) return grouped
Extract tags sharing the same name as the provided tag . Used to collect options for radio and checkbox inputs .
47,872
def _parse_fields ( parsed ) : out = [ ] tags = parsed . find_all ( _tag_ptn ) for tag in tags : helpers . lowercase_attr_names ( tag ) while tags : tag = tags . pop ( 0 ) try : field = _parse_field ( tag , tags ) except exceptions . InvalidNameError : continue if field is not None : out . append ( field ) return out
Parse form fields from HTML .
47,873
def add ( self , data , key = None ) : sink = self . options [ key ] if key is not None else self . data for key , value in iteritems ( data ) : sink . add ( key , value )
Add field values to container .
47,874
def to_requests ( self , method = 'get' ) : out = { } data_key = 'params' if method . lower ( ) == 'get' else 'data' out [ data_key ] = self . data out . update ( self . options ) return dict ( [ ( key , list ( value . items ( multi = True ) ) ) for key , value in iteritems ( out ) ] )
Export to Requests format .
47,875
def add_field ( self , field ) : if not isinstance ( field , fields . BaseField ) : raise ValueError ( 'Argument "field" must be an instance of ' 'BaseField' ) self . fields . add ( field . name , field )
Add a field .
47,876
def serialize ( self , submit = None ) : include_fields = prepare_fields ( self . fields , self . submit_fields , submit ) return Payload . from_fields ( include_fields )
Serialize each form field to a Payload container .
47,877
async def save ( self , db ) : kwargs = { } for col in self . _auto_columns : if not self . has_real_data ( col . name ) : kwargs [ col . name ] = await col . auto_generate ( db , self ) self . __dict__ . update ( kwargs ) stale_object = await self . __class__ . load ( db , identifier = self . identifier ( ) ) d = { k : ( v . strftime ( DATETIME_FORMAT ) if isinstance ( v , datetime ) else v ) for k , v in self . __dict__ . items ( ) } success = await db . hmset_dict ( self . redis_key ( ) , d ) await self . save_index ( db , stale_object = stale_object ) return success
Save the object to Redis .
47,878
def check_address ( address ) : if isinstance ( address , tuple ) : check_host ( address [ 0 ] ) check_port ( address [ 1 ] ) elif isinstance ( address , string_types ) : if os . name != 'posix' : raise ValueError ( 'Platform does not support UNIX domain sockets' ) if not ( os . path . exists ( address ) or os . access ( os . path . dirname ( address ) , os . W_OK ) ) : raise ValueError ( 'ADDRESS not a valid socket domain socket ({0})' . format ( address ) ) else : raise ValueError ( 'ADDRESS is not a tuple, string, or character buffer ' '({0})' . format ( type ( address ) . __name__ ) )
Check if the format of the address is correct
47,879
def check_addresses ( address_list , is_remote = False ) : assert all ( isinstance ( x , ( tuple , string_types ) ) for x in address_list ) if ( is_remote and any ( isinstance ( x , string_types ) for x in address_list ) ) : raise AssertionError ( 'UNIX domain sockets not allowed for remote' 'addresses' ) for address in address_list : check_address ( address )
Check if the format of the addresses is correct
47,880
def create_logger ( logger = None , loglevel = None , capture_warnings = True , add_paramiko_handler = True ) : logger = logger or logging . getLogger ( '{0}.SSHTunnelForwarder' . format ( __name__ ) ) if not any ( isinstance ( x , logging . Handler ) for x in logger . handlers ) : logger . setLevel ( loglevel or DEFAULT_LOGLEVEL ) console_handler = logging . StreamHandler ( ) _add_handler ( logger , handler = console_handler , loglevel = loglevel or DEFAULT_LOGLEVEL ) if loglevel : logger . setLevel ( loglevel ) for handler in logger . handlers : handler . setLevel ( loglevel ) if add_paramiko_handler : _check_paramiko_handlers ( logger = logger ) if capture_warnings and sys . version_info >= ( 2 , 7 ) : logging . captureWarnings ( True ) pywarnings = logging . getLogger ( 'py.warnings' ) pywarnings . handlers . extend ( logger . handlers ) return logger
Attach or create a new logger and add a console handler if not present
47,881
def _add_handler ( logger , handler = None , loglevel = None ) : handler . setLevel ( loglevel or DEFAULT_LOGLEVEL ) if handler . level <= logging . DEBUG : _fmt = '%(asctime)s| %(levelname)-4.3s|%(threadName)10.9s/' '%(lineno)04d@%(module)-10.9s| %(message)s' handler . setFormatter ( logging . Formatter ( _fmt ) ) else : handler . setFormatter ( logging . Formatter ( '%(asctime)s| %(levelname)-8s| %(message)s' ) ) logger . addHandler ( handler )
Add a handler to an existing logging . Logger object
47,882
def _check_paramiko_handlers ( logger = None ) : paramiko_logger = logging . getLogger ( 'paramiko.transport' ) if not paramiko_logger . handlers : if logger : paramiko_logger . handlers = logger . handlers else : console_handler = logging . StreamHandler ( ) console_handler . setFormatter ( logging . Formatter ( '%(asctime)s | %(levelname)-8s| PARAMIKO: ' '%(lineno)03d@%(module)-10s| %(message)s' ) ) paramiko_logger . addHandler ( console_handler )
Add a console handler for paramiko . transport s logger if not present
47,883
def _remove_none_values ( dictionary ) : return list ( map ( dictionary . pop , [ i for i in dictionary if dictionary [ i ] is None ] ) )
Remove dictionary keys whose value is None
47,884
def _cli_main ( args = None ) : arguments = _parse_arguments ( args ) _remove_none_values ( arguments ) verbosity = min ( arguments . pop ( 'verbose' ) , 4 ) levels = [ logging . ERROR , logging . WARNING , logging . INFO , logging . DEBUG , TRACE_LEVEL ] arguments . setdefault ( 'debug_level' , levels [ verbosity ] ) with open_tunnel ( ** arguments ) as tunnel : if tunnel . is_alive : input_ ( )
Pass input arguments to open_tunnel
47,885
def _make_ssh_forward_handler_class ( self , remote_address_ ) : class Handler ( _ForwardHandler ) : remote_address = remote_address_ ssh_transport = self . _transport logger = self . logger return Handler
Make SSH Handler class
47,886
def _make_ssh_forward_server ( self , remote_address , local_bind_address ) : _Handler = self . _make_ssh_forward_handler_class ( remote_address ) try : if isinstance ( local_bind_address , string_types ) : forward_maker_class = self . _make_unix_ssh_forward_server_class else : forward_maker_class = self . _make_ssh_forward_server_class _Server = forward_maker_class ( remote_address ) ssh_forward_server = _Server ( local_bind_address , _Handler , logger = self . logger , ) if ssh_forward_server : ssh_forward_server . daemon_threads = self . daemon_forward_servers self . _server_list . append ( ssh_forward_server ) self . tunnel_is_up [ ssh_forward_server . server_address ] = False else : self . _raise ( BaseSSHTunnelForwarderError , 'Problem setting up ssh {0} <> {1} forwarder. You can ' 'suppress this exception by using the `mute_exceptions`' 'argument' . format ( address_to_str ( local_bind_address ) , address_to_str ( remote_address ) ) ) except IOError : self . _raise ( BaseSSHTunnelForwarderError , "Couldn't open tunnel {0} <> {1} might be in use or " "destination not reachable" . format ( address_to_str ( local_bind_address ) , address_to_str ( remote_address ) ) )
Make SSH forward proxy Server class
47,887
def get_agent_keys ( logger = None ) : paramiko_agent = paramiko . Agent ( ) agent_keys = paramiko_agent . get_keys ( ) if logger : logger . info ( '{0} keys loaded from agent' . format ( len ( agent_keys ) ) ) return list ( agent_keys )
Load public keys from any available SSH agent
47,888
def get_keys ( logger = None , host_pkey_directories = None , allow_agent = False ) : keys = SSHTunnelForwarder . get_agent_keys ( logger = logger ) if allow_agent else [ ] if host_pkey_directories is not None : paramiko_key_types = { 'rsa' : paramiko . RSAKey , 'dsa' : paramiko . DSSKey , 'ecdsa' : paramiko . ECDSAKey , 'ed25519' : paramiko . Ed25519Key } for directory in host_pkey_directories or [ DEFAULT_SSH_DIRECTORY ] : for keytype in paramiko_key_types . keys ( ) : ssh_pkey_expanded = os . path . expanduser ( os . path . join ( directory , 'id_{}' . format ( keytype ) ) ) if os . path . isfile ( ssh_pkey_expanded ) : ssh_pkey = SSHTunnelForwarder . read_private_key_file ( pkey_file = ssh_pkey_expanded , logger = logger , key_type = paramiko_key_types [ keytype ] ) if ssh_pkey : keys . append ( ssh_pkey ) if logger : logger . info ( '{0} keys loaded from host directory' . format ( len ( keys ) ) ) return keys
Load public keys from any available SSH agent or local . ssh directory .
47,889
def _get_transport ( self ) : if self . ssh_proxy : if isinstance ( self . ssh_proxy , paramiko . proxy . ProxyCommand ) : proxy_repr = repr ( self . ssh_proxy . cmd [ 1 ] ) else : proxy_repr = repr ( self . ssh_proxy ) self . logger . debug ( 'Connecting via proxy: {0}' . format ( proxy_repr ) ) _socket = self . ssh_proxy else : _socket = socket . socket ( socket . AF_INET , socket . SOCK_STREAM ) if isinstance ( _socket , socket . socket ) : _socket . settimeout ( SSH_TIMEOUT ) _socket . connect ( ( self . ssh_host , self . ssh_port ) ) transport = paramiko . Transport ( _socket ) transport . set_keepalive ( self . set_keepalive ) transport . use_compression ( compress = self . compression ) transport . daemon = self . daemon_transport return transport
Return the SSH transport to the remote gateway
47,890
def _create_tunnels ( self ) : if not self . is_active : try : self . _connect_to_gateway ( ) except socket . gaierror : msg = 'Could not resolve IP address for {0}, aborting!' . format ( self . ssh_host ) self . logger . error ( msg ) return except ( paramiko . SSHException , socket . error ) as e : template = 'Could not connect to gateway {0}:{1} : {2}' msg = template . format ( self . ssh_host , self . ssh_port , e . args [ 0 ] ) self . logger . error ( msg ) return for ( rem , loc ) in zip ( self . _remote_binds , self . _local_binds ) : try : self . _make_ssh_forward_server ( rem , loc ) except BaseSSHTunnelForwarderError as e : msg = 'Problem setting SSH Forwarder up: {0}' . format ( e . value ) self . logger . error ( msg )
Create SSH tunnels on top of a transport to the remote gateway
47,891
def _process_deprecated ( attrib , deprecated_attrib , kwargs ) : if deprecated_attrib not in DEPRECATIONS : raise ValueError ( '{0} not included in deprecations list' . format ( deprecated_attrib ) ) if deprecated_attrib in kwargs : warnings . warn ( "'{0}' is DEPRECATED use '{1}' instead" . format ( deprecated_attrib , DEPRECATIONS [ deprecated_attrib ] ) , DeprecationWarning ) if attrib : raise ValueError ( "You can't use both '{0}' and '{1}'. " "Please only use one of them" . format ( deprecated_attrib , DEPRECATIONS [ deprecated_attrib ] ) ) else : return kwargs . pop ( deprecated_attrib ) return attrib
Processes optional deprecate arguments
47,892
def read_private_key_file ( pkey_file , pkey_password = None , key_type = None , logger = None ) : ssh_pkey = None for pkey_class in ( key_type , ) if key_type else ( paramiko . RSAKey , paramiko . DSSKey , paramiko . ECDSAKey , paramiko . Ed25519Key ) : try : ssh_pkey = pkey_class . from_private_key_file ( pkey_file , password = pkey_password ) if logger : logger . debug ( 'Private key file ({0}, {1}) successfully ' 'loaded' . format ( pkey_file , pkey_class ) ) break except paramiko . PasswordRequiredException : if logger : logger . error ( 'Password is required for key {0}' . format ( pkey_file ) ) break except paramiko . SSHException : if logger : logger . debug ( 'Private key file ({0}) could not be loaded ' 'as type {1} or bad password' . format ( pkey_file , pkey_class ) ) return ssh_pkey
Get SSH Public key from a private key file given an optional password
47,893
def _check_tunnel ( self , _srv ) : if self . skip_tunnel_checkup : self . tunnel_is_up [ _srv . local_address ] = True return self . logger . info ( 'Checking tunnel to: {0}' . format ( _srv . remote_address ) ) if isinstance ( _srv . local_address , string_types ) : s = socket . socket ( socket . AF_UNIX , socket . SOCK_STREAM ) else : s = socket . socket ( socket . AF_INET , socket . SOCK_STREAM ) s . settimeout ( TUNNEL_TIMEOUT ) try : connect_to = ( '127.0.0.1' , _srv . local_port ) if _srv . local_host == '0.0.0.0' else _srv . local_address s . connect ( connect_to ) self . tunnel_is_up [ _srv . local_address ] = _srv . tunnel_ok . get ( timeout = TUNNEL_TIMEOUT * 1.1 ) self . logger . debug ( 'Tunnel to {0} is DOWN' . format ( _srv . remote_address ) ) except socket . error : self . logger . debug ( 'Tunnel to {0} is DOWN' . format ( _srv . remote_address ) ) self . tunnel_is_up [ _srv . local_address ] = False except queue . Empty : self . logger . debug ( 'Tunnel to {0} is UP' . format ( _srv . remote_address ) ) self . tunnel_is_up [ _srv . local_address ] = True finally : s . close ( )
Check if tunnel is already established
47,894
def start ( self ) : if self . is_alive : self . logger . warning ( 'Already started!' ) return self . _create_tunnels ( ) if not self . is_active : self . _raise ( BaseSSHTunnelForwarderError , reason = 'Could not establish session to SSH gateway' ) for _srv in self . _server_list : thread = threading . Thread ( target = self . _serve_forever_wrapper , args = ( _srv , ) , name = 'Srv-{0}' . format ( address_to_str ( _srv . local_port ) ) ) thread . daemon = self . daemon_forward_servers thread . start ( ) self . _check_tunnel ( _srv ) self . is_alive = any ( self . tunnel_is_up . values ( ) ) if not self . is_alive : self . _raise ( HandlerSSHTunnelForwarderError , 'An error occurred while opening tunnels.' )
Start the SSH tunnels
47,895
def stop ( self ) : self . logger . info ( 'Closing all open connections...' ) opened_address_text = ', ' . join ( ( address_to_str ( k . local_address ) for k in self . _server_list ) ) or 'None' self . logger . debug ( 'Listening tunnels: ' + opened_address_text ) self . _stop_transport ( ) self . _server_list = [ ] self . tunnel_is_up = { }
Shut the tunnel down .
47,896
def _serve_forever_wrapper ( self , _srv , poll_interval = 0.1 ) : self . logger . info ( 'Opening tunnel: {0} <> {1}' . format ( address_to_str ( _srv . local_address ) , address_to_str ( _srv . remote_address ) ) ) _srv . serve_forever ( poll_interval ) self . logger . info ( 'Tunnel: {0} <> {1} released' . format ( address_to_str ( _srv . local_address ) , address_to_str ( _srv . remote_address ) ) )
Wrapper for the server created for a SSH forward
47,897
def _stop_transport ( self ) : try : self . _check_is_started ( ) except ( BaseSSHTunnelForwarderError , HandlerSSHTunnelForwarderError ) as e : self . logger . warning ( e ) for _srv in self . _server_list : tunnel = _srv . local_address if self . tunnel_is_up [ tunnel ] : self . logger . info ( 'Shutting down tunnel {0}' . format ( tunnel ) ) _srv . shutdown ( ) _srv . server_close ( ) if isinstance ( _srv , _UnixStreamForwardServer ) : try : os . unlink ( _srv . local_address ) except Exception as e : self . logger . error ( 'Unable to unlink socket {0}: {1}' . format ( self . local_address , repr ( e ) ) ) self . is_alive = False if self . is_active : self . _transport . close ( ) self . _transport . stop_thread ( ) self . logger . debug ( 'Transport is closed' )
Close the underlying transport when nothing more is needed
47,898
def local_bind_ports ( self ) : self . _check_is_started ( ) return [ _server . local_port for _server in self . _server_list if _server . local_port is not None ]
Return a list containing the ports of local side of the TCP tunnels
47,899
def local_bind_hosts ( self ) : self . _check_is_started ( ) return [ _server . local_host for _server in self . _server_list if _server . local_host is not None ]
Return a list containing the IP addresses listening for the tunnels