idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
237,600
def get_tx_amount ( cls , txid , txindex ) : for api_call in cls . GET_TX_AMOUNT_MAIN : try : return api_call ( txid , txindex ) except cls . IGNORED_ERRORS : pass raise ConnectionError ( 'All APIs are unreachable.' )
Gets the amount of a given transaction output .
71
10
237,601
def get_fee ( speed = FEE_SPEED_MEDIUM ) : if speed == FEE_SPEED_FAST : return DEFAULT_FEE_FAST elif speed == FEE_SPEED_MEDIUM : return DEFAULT_FEE_MEDIUM elif speed == FEE_SPEED_SLOW : return DEFAULT_FEE_SLOW else : raise ValueError ( 'Invalid speed argument.' )
Gets the recommended satoshi per byte fee .
94
10
237,602
def find_binutils_libs ( self , libdir , lib_ext ) : bfd_expr = re . compile ( "(lib(?:bfd)|(?:opcodes))(.*?)\%s" % lib_ext ) libs = { } for root , dirs , files in os . walk ( libdir ) : for f in files : m = bfd_expr . search ( f ) if m : lib , version = m . groups ( ) fp = os . path . join ( root , f ) if version in libs : libs [ version ] . append ( fp ) else : libs [ version ] = [ fp , ] # first, search for multiarch files. # check if we found more than one version of the multiarch libs. multiarch_libs = dict ( [ ( v , _l ) for v , _l in libs . items ( ) if v . find ( "multiarch" ) != - 1 ] ) if len ( multiarch_libs ) > 1 : print "[W] Multiple binutils versions detected. Trying to build with default..." return multiarch_libs . values ( ) [ 0 ] if len ( multiarch_libs ) == 1 : return multiarch_libs . values ( ) [ 0 ] # or use the default libs, or .. none return libs . get ( "" , [ ] )
Find Binutils libraries .
299
5
237,603
def _darwin_current_arch ( self ) : if sys . platform == "darwin" : if sys . maxsize > 2 ** 32 : # 64bits. return platform . mac_ver ( ) [ 2 ] # Both Darwin and Python are 64bits. else : # Python 32 bits return platform . processor ( )
Add Mac OS X support .
68
6
237,604
def dump ( self , src , length = 16 , start = 0 , preffix = "" ) : FILTER = "" . join ( [ ( len ( repr ( chr ( x ) ) ) == 3 ) and chr ( x ) or '.' for x in xrange ( 256 ) ] ) result = list ( ) for i in xrange ( 0 , len ( src ) , length ) : s = src [ i : i + length ] hexa = " " . join ( [ "%02X" % ord ( x ) for x in s ] ) printable = s . translate ( FILTER ) result . append ( "%s%08X %-*s %s\n" % ( preffix , start + i , length * 3 , hexa , printable ) ) return '' . join ( result )
Dump the specified buffer in hex + ASCII format .
175
11
237,605
def content ( self ) : return _bfd . section_get_content ( self . bfd , self . _ptr , 0 , self . size )
Return the entire section content .
33
6
237,606
def get_content ( self , offset , size ) : return _bfd . section_get_content ( self . bfd , self . _ptr , offset , size )
Return the specified number of bytes from the current section .
37
11
237,607
def main ( ) : test_targets = ( [ ARCH_I386 , MACH_I386_I386_INTEL_SYNTAX , ENDIAN_MONO , "\x55\x89\xe5\xE8\xB8\xFF\xFF\xFF" , 0x1000 ] , [ ARCH_I386 , MACH_X86_64_INTEL_SYNTAX , ENDIAN_MONO , "\x55\x48\x89\xe5\xE8\xA3\xFF\xFF\xFF" , 0x1000 ] , [ ARCH_ARM , MACH_ARM_2 , ENDIAN_LITTLE , "\x04\xe0\x2d\xe5\xED\xFF\xFF\xEB" , 0x1000 ] , [ ARCH_MIPS , MACH_MIPSISA32 , ENDIAN_BIG , "\x0C\x10\x00\x97\x00\x00\x00\x00" , 0x1000 ] , [ ARCH_POWERPC , MACH_PPC , ENDIAN_BIG , "\x94\x21\xFF\xE8\x7C\x08\x02\xA6" , 0x1000 ] , #[ARCH_XTENSA, MACH_XTENSA, ENDIAN_BIG, "\x6C\x10\x06\xD7\x10", 0x1000], ) for target_arch , target_mach , target_endian , binary , address in test_targets : # # Initialize libopcodes with the current architecture. # opcodes = Opcodes ( target_arch , target_mach , target_endian ) # Print some architecture-specific information. print "\n[+] Architecture %s - Machine %d" % ( opcodes . architecture_name , opcodes . machine ) print "[+] Disassembly:" # Print all the disassembled instructions. for vma , size , disasm in opcodes . disassemble ( binary , address ) : print "0x%X (size=%d)\t %s" % ( vma , size , disasm )
Test case for simple opcode disassembly .
499
9
237,608
def initialize_bfd ( self , abfd ) : self . _ptr = _opcodes . initialize_bfd ( abfd . _ptr ) # Already done inside opcodes.c #self.architecture = abfd.architecture #self.machine = abfd.machine #self.endian = abfd.endian # force intel syntax if self . architecture == ARCH_I386 : if abfd . arch_size == 32 : self . machine = MACH_I386_I386_INTEL_SYNTAX #abfd.machine = MACH_I386_I386_INTEL_SYNTAX elif abfd . arch_size == 64 : self . machine = MACH_X86_64_INTEL_SYNTAX
Initialize underlying libOpcodes library using BFD .
170
12
237,609
def initialize_non_bfd ( self , architecture = None , machine = None , endian = ENDIAN_UNKNOWN ) : if None in [ architecture , machine , endian ] : return self . architecture = architecture self . machine = machine self . endian = endian
Initialize underlying libOpcodes library not using BFD .
58
13
237,610
def initialize_smart_disassemble ( self , data , start_address = 0 ) : _opcodes . initialize_smart_disassemble ( self . _ptr , data , start_address )
Set the binary buffer to disassemble with other related information ready for an instruction by instruction disassembly session .
43
22
237,611
def print_single_instruction_callback ( self , address , size , branch_delay_insn , insn_type , target , target2 , disassembly ) : print "0x%X SZ=%d BD=%d IT=%d\t%s" % ( address , size , branch_delay_insn , insn_type , disassembly ) return PYBFD_DISASM_CONTINUE
Callack on each disassembled instruction to print its information .
95
12
237,612
def disassemble ( self , data , start_address = 0 ) : return _opcodes . disassemble ( self . _ptr , data , start_address )
Return a list containing the virtual memory address instruction length and disassembly code for the given binary buffer .
36
20
237,613
def open ( self , _file , target = DEFAULT_TARGET ) : # Close any existing BFD structure instance. self . close ( ) # # STEP 1. Open the BFD pointer. # # Determine if the user passed a file-descriptor or a _file and # proceed accordingly. if type ( _file ) is FileType : # The user specified a file descriptor. filename = _file . name if islink ( filename ) : raise BfdException ( "Symlinks file-descriptors are not valid" ) try : self . _ptr = _bfd . fdopenr ( filename , target , dup ( _file . fileno ( ) ) ) except Exception , err : raise BfdException ( "Unable to open file-descriptor %s : %s" % ( filename , err ) ) elif type ( _file ) is StringType : # The user spcified a filaname so first check if file exists. filename = _file try : with open ( _file ) : pass except IOError : raise BfdException ( "File %s does not exist." % filename ) # # Proceed to open the specified file and create a new BFD. # try : self . _ptr = _bfd . openr ( filename , target ) except ( TypeError , IOError ) , err : raise BfdException ( "Unable to open file %s : %s" % ( filename , err ) ) elif type ( _file ) is IntType : # The user specified an already-open BFD pointer so we avoid any # further open operation and move on to file format recognition. self . _ptr = _file else : raise BfdException ( "Invalid file type specified for open operation (%r)" % _file ) # # STEP 2. Determine file format of the BFD. # # Now that the BFD is open we'll proceed to determine its file format. # We'll use the objdump logic to determine it and raise an error in # case we were unable to get it right. # try : # Type opening it as an archieve and if it success then check # subfiles. if _bfd . check_format ( self . _ptr , BfdFormat . ARCHIVE ) : # Set current format and store the inner file list. self . file_format = BfdFormat . ARCHIVE self . __populate_archive_files ( ) else : # DO NOT USE bfd_check_format_matches() becuase its not tested. # An implementation example if on objdump.c at function # display_bfd(). if _bfd . check_format ( self . _ptr , BfdFormat . OBJECT ) : self . file_format = BfdFormat . OBJECT elif _bfd . check_format ( self . _ptr , BfdFormat . CORE ) : self . file_format = BfdFormat . CORE else : pass raise BfdException ( _bfd . get_last_error_message ( ) ) except TypeError , err : raise BfdException ( "Unable to initialize file format : %s" % err ) # # STEP 3. Extract inner sections and symbolic information. # if self . _ptr is not None : # If the file is a valid BFD file format but not an archive then # get its sections and symbolic information (if any). if self . file_format in [ BfdFormat . OBJECT , BfdFormat . CORE ] : self . __populate_sections ( ) self . __populate_symbols ( )
Open the existing file for reading .
762
7
237,614
def __populate_archive_files ( self ) : self . archive_files = [ ] for _ptr in _bfd . archive_list_files ( self . _ptr ) : try : self . archive_files . append ( Bfd ( _ptr ) ) except BfdException , err : #print "Error populating archive file list : %s" % err #print_exc() pass
Store the list of files inside an archive file .
85
10
237,615
def archive_filenames ( self ) : try : return _bfd . archive_list_filenames ( self . _ptr ) except TypeError , err : raise BfdException ( err )
Return the list of files inside an archive file .
43
10
237,616
def file_format_name ( self ) : try : return BfdFormatNamesLong [ self . file_format ] except IndexError , err : raise BfdException ( "Invalid format specified (%d)" % self . file_format )
Return the current format name of the open bdf .
50
11
237,617
def __populate_sections ( self ) : if not self . _ptr : raise BfdException ( "BFD not initialized" ) for section in _bfd . get_sections_list ( self . _ptr ) : try : bfd_section = BfdSection ( self . _ptr , section ) self . _sections [ bfd_section . name ] = bfd_section except BfdSectionException , err : #print "Exception during section pasing : %s" % err pass
Get a list of the section present in the bfd to populate our internal list .
106
17
237,618
def __populate_symbols ( self ) : if not self . _ptr : raise BfdException ( "BFD not initialized" ) try : symbols = _bfd . get_symbols ( self . _ptr ) # Temporary dictionary ordered by section index. This is necessary # because the symbolic information return the section index it belongs # to. sections = { } for section in self . sections : sections [ self . sections [ section ] . index ] = self . sections [ section ] for symbol in symbols : # Extract each field for further processing. symbol_section_index = symbol [ 0 ] symbol_name = symbol [ 1 ] symbol_value = symbol [ 2 ] symbol_flags = symbol [ 3 ] # Get the effective address of the current symbol. symbol_flags = tuple ( [ f for f in SYMBOL_FLAGS_LIST if symbol_flags & f == f ] ) # Create a new symbol instance to hold symbolic information. new_symbol = Symbol ( sections . get ( symbol_section_index , None ) , symbol_name , symbol_value , symbol_flags ) if new_symbol . section is None : continue symbol_address = new_symbol . section . vma + new_symbol . value #if new_symbol.flags in \ # [SymbolFlags.LOCAL , SymbolFlags.GLOBAL , SymbolFlags.EXPORT]: # symbol_address = new_symbol.section.vma + new_symbol.value #else: # # TODO: Enhance this! # # Discard any other symbol information. # continue self . _symbols [ symbol_address ] = new_symbol del sections except BfdSectionException , err : raise BfdException ( "Exception on symbolic ifnormation parsing." )
Get a list of the symbols present in the bfd to populate our internal list .
380
17
237,619
def close ( self ) : if self . _ptr : #try: # # Release inner BFD files in case we're an archive BFD. # if self.is_archive: # [inner_bfd.close() for inner_bfd in self.archive_files] #except TypeError, err: # pass try : _bfd . close ( self . _ptr ) except TypeError , err : raise BfdException ( "Unable to close bfd (%s)" % err ) finally : self . _ptr = None
Close any existing BFD structure before open a new one .
114
12
237,620
def filename ( self ) : if not self . _ptr : raise BfdException ( "BFD not initialized" ) return _bfd . get_bfd_attribute ( self . _ptr , BfdAttributes . FILENAME )
Return the filename of the BFD file being processed .
50
11
237,621
def cacheable ( self ) : if not self . _ptr : raise BfdException ( "BFD not initialized" ) return _bfd . get_bfd_attribute ( self . _ptr , BfdAttributes . CACHEABLE )
Return the cacheable attribute of the BFD file being processed .
52
13
237,622
def format ( self ) : if not self . _ptr : raise BfdException ( "BFD not initialized" ) return _bfd . get_bfd_attribute ( self . _ptr , BfdAttributes . FORMAT )
Return the format attribute of the BFD file being processed .
49
12
237,623
def target ( self ) : if not self . _ptr : raise BfdException ( "BFD not initialized" ) return _bfd . get_bfd_attribute ( self . _ptr , BfdAttributes . TARGET )
Return the target of the BFD file being processed .
49
11
237,624
def machine ( self ) : if not self . _ptr : raise BfdException ( "BFD not initialized" ) return _bfd . get_bfd_attribute ( self . _ptr , BfdAttributes . FLAVOUR )
Return the flavour attribute of the BFD file being processed .
50
12
237,625
def family_coff ( self ) : if not self . _ptr : raise BfdException ( "BFD not initialized" ) return _bfd . get_bfd_attribute ( self . _ptr , BfdAttributes . FAMILY_COFF )
Return the family_coff attribute of the BFD file being processed .
55
15
237,626
def big_endian ( self ) : if not self . _ptr : raise BfdException ( "BFD not initialized" ) return _bfd . get_bfd_attribute ( self . _ptr , BfdAttributes . IS_BIG_ENDIAN )
Return the big endian attribute of the BFD file being processed .
57
14
237,627
def little_endian ( self ) : if not self . _ptr : raise BfdException ( "BFD not initialized" ) return _bfd . get_bfd_attribute ( self . _ptr , BfdAttributes . IS_LITTLE_ENDIAN )
Return the little_endian attribute of the BFD file being processed .
58
15
237,628
def header_big_endian ( self ) : if not self . _ptr : raise BfdException ( "BFD not initialized" ) return _bfd . get_bfd_attribute ( self . _ptr , BfdAttributes . HEADER_BIG_ENDIAN )
Return the header_big_endian attribute of the BFD file being processed .
60
17
237,629
def header_little_endian ( self ) : if not self . _ptr : raise BfdException ( "BFD not initialized" ) return _bfd . get_bfd_attribute ( self . _ptr , BfdAttributes . HEADER_LITTLE_ENDIAN )
Return the header_little_endian attribute of the BFD file being processed .
61
17
237,630
def file_flags ( self ) : if not self . _ptr : raise BfdException ( "BFD not initialized" ) return _bfd . get_bfd_attribute ( self . _ptr , BfdAttributes . FILE_FLAGS )
Return the file flags attribute of the BFD file being processed .
53
13
237,631
def file_flags ( self , _file_flags ) : if not self . _ptr : raise BfdException ( "BFD not initialized" ) return _bfd . set_file_flags ( self . _ptr , _file_flags )
Set the new file flags attribute of the BFD file being processed .
53
14
237,632
def applicable_file_flags ( self ) : if not self . _ptr : raise BfdException ( "BFD not initialized" ) return _bfd . get_bfd_attribute ( self . _ptr , BfdAttributes . APPLICABLE_FILE_FLAGS )
Return the applicable file flags attribute of the BFD file being processed .
58
14
237,633
def my_archieve ( self ) : if not self . _ptr : raise BfdException ( "BFD not initialized" ) return _bfd . get_bfd_attribute ( self . _ptr , BfdAttributes . MY_ARCHIEVE )
Return the my archieve attribute of the BFD file being processed .
55
14
237,634
def has_map ( self ) : if not self . _ptr : raise BfdException ( "BFD not initialized" ) return _bfd . get_bfd_attribute ( self . _ptr , BfdAttributes . HAS_MAP )
Return the has map attribute of the BFD file being processed .
52
13
237,635
def is_thin_archieve ( self ) : if not self . _ptr : raise BfdException ( "BFD not initialized" ) return _bfd . get_bfd_attribute ( self . _ptr , BfdAttributes . IS_THIN_ARCHIEVE )
Return the is thin archieve attribute of the BFD file being processed .
60
15
237,636
def has_gap_in_elf_shndx ( self ) : if not self . _ptr : raise BfdException ( "BFD not initialized" ) return _bfd . get_bfd_attribute ( self . _ptr , BfdAttributes . HAS_GAP_IN_ELF_SHNDX )
Return the has gap in elf shndx attribute of the BFD file being processed .
69
18
237,637
def valid_reloction_types ( self ) : if not self . _ptr : raise BfdException ( "BFD not initialized" ) return _bfd . get_bfd_attribute ( self . _ptr , BfdAttributes . VALID_RELOC_TYPES )
Return the valid_reloc_types attribute of the BFD file being processed .
62
17
237,638
def user_data ( self ) : if not self . _ptr : raise BfdException ( "BFD not initialized" ) return _bfd . get_bfd_attribute ( self . _ptr , BfdAttributes . USRDATA )
Return the usrdata attribute of the BFD file being processed .
52
14
237,639
def start_address ( self ) : if not self . _ptr : raise BfdException ( "BFD not initialized" ) return _bfd . get_bfd_attribute ( self . _ptr , BfdAttributes . START_ADDRESS )
Return the start address attribute of the BFD file being processed .
54
13
237,640
def symbols_count ( self ) : if not self . _ptr : raise BfdException ( "BFD not initialized" ) return _bfd . get_bfd_attribute ( self . _ptr , BfdAttributes . SYMCOUNT )
Return the symcount attribute of the BFD file being processed .
52
13
237,641
def out_symbols ( self ) : if not self . _ptr : raise BfdException ( "BFD not initialized" ) return _bfd . get_bfd_attribute ( self . _ptr , BfdAttributes . OUTSYMBOLS )
Return the out symbols attribute of the BFD file being processed .
56
13
237,642
def sections_count ( self ) : # This should match the 'sections' attribute length so instead should # use : # # len(bfd.sections) # if not self . _ptr : raise BfdException ( "BFD not initialized" ) return _bfd . get_bfd_attribute ( self . _ptr , BfdAttributes . COUNT_SECTIONS )
Return the sections_count attribute of the BFD file being processed .
81
14
237,643
def dynamic_symbols_count ( self ) : if not self . _ptr : raise BfdException ( "BFD not initialized" ) return _bfd . get_bfd_attribute ( self . _ptr , BfdAttributes . DYNAMIC_SYMCOUNT )
Return the dynamic symbols count attribute of the BFD file being processed .
61
14
237,644
def symbol_leading_char ( self ) : if not self . _ptr : raise BfdException ( "BFD not initialized" ) return _bfd . get_bfd_attribute ( self . _ptr , BfdAttributes . SYMBOL_LEADING_CHAR )
Return the symbol leading char attribute of the BFD file being processed .
60
14
237,645
def arch_size ( self ) : if not self . _ptr : raise BfdException ( "BFD not initialized" ) try : return _bfd . get_arch_size ( self . _ptr ) except Exception , err : raise BfdException ( "Unable to determine architeure size." )
Return the architecure size in bits .
66
10
237,646
def display_matrix ( self , matrix , interval = 2.0 , brightness = 1.0 , fading = False , ignore_duplicates = False ) : self . _matrix_writer . write ( matrix = matrix , interval = interval , brightness = brightness , fading = fading , ignore_duplicates = ignore_duplicates )
Displays an LED matrix on Nuimo s LED matrix display .
73
13
237,647
def get_asn_origin_whois ( self , asn_registry = 'radb' , asn = None , retry_count = 3 , server = None , port = 43 ) : try : if server is None : server = ASN_ORIGIN_WHOIS [ asn_registry ] [ 'server' ] # Create the connection for the whois query. conn = socket . socket ( socket . AF_INET , socket . SOCK_STREAM ) conn . settimeout ( self . timeout ) log . debug ( 'ASN origin WHOIS query for {0} at {1}:{2}' . format ( asn , server , port ) ) conn . connect ( ( server , port ) ) # Prep the query. query = ' -i origin {0}{1}' . format ( asn , '\r\n' ) # Query the whois server, and store the results. conn . send ( query . encode ( ) ) response = '' while True : d = conn . recv ( 4096 ) . decode ( ) response += d if not d : break conn . close ( ) # TODO: this was taken from get_whois(). Need to test rate limiting if 'Query rate limit exceeded' in response : # pragma: no cover if retry_count > 0 : log . debug ( 'ASN origin WHOIS query rate limit exceeded. ' 'Waiting...' ) sleep ( 1 ) return self . get_asn_origin_whois ( asn_registry = asn_registry , asn = asn , retry_count = retry_count - 1 , server = server , port = port ) else : raise WhoisRateLimitError ( 'ASN origin Whois lookup failed for {0}. Rate limit ' 'exceeded, wait and try again (possibly a ' 'temporary block).' . format ( asn ) ) elif ( 'error 501' in response or 'error 230' in response ) : # pragma: no cover log . debug ( 'ASN origin WHOIS query error: {0}' . format ( response ) ) raise ValueError return str ( response ) except ( socket . timeout , socket . error ) as e : log . debug ( 'ASN origin WHOIS query socket error: {0}' . format ( e ) ) if retry_count > 0 : log . debug ( 'ASN origin WHOIS query retrying (count: {0})' '' . format ( str ( retry_count ) ) ) return self . get_asn_origin_whois ( asn_registry = asn_registry , asn = asn , retry_count = retry_count - 1 , server = server , port = port ) else : raise WhoisLookupError ( 'ASN origin WHOIS lookup failed for {0}.' . format ( asn ) ) except WhoisRateLimitError : # pragma: no cover raise except : # pragma: no cover raise WhoisLookupError ( 'ASN origin WHOIS lookup failed for {0}.' . format ( asn ) )
The function for retrieving CIDR info for an ASN via whois .
674
16
237,648
def get_http_json ( self , url = None , retry_count = 3 , rate_limit_timeout = 120 , headers = None ) : if headers is None : headers = { 'Accept' : 'application/rdap+json' } try : # Create the connection for the whois query. log . debug ( 'HTTP query for {0} at {1}' . format ( self . address_str , url ) ) conn = Request ( url , headers = headers ) data = self . opener . open ( conn , timeout = self . timeout ) try : d = json . loads ( data . readall ( ) . decode ( 'utf-8' , 'ignore' ) ) except AttributeError : # pragma: no cover d = json . loads ( data . read ( ) . decode ( 'utf-8' , 'ignore' ) ) try : # Tests written but commented out. I do not want to send a # flood of requests on every test. for tmp in d [ 'notices' ] : # pragma: no cover if tmp [ 'title' ] == 'Rate Limit Notice' : log . debug ( 'RDAP query rate limit exceeded.' ) if retry_count > 0 : log . debug ( 'Waiting {0} seconds...' . format ( str ( rate_limit_timeout ) ) ) sleep ( rate_limit_timeout ) return self . get_http_json ( url = url , retry_count = retry_count - 1 , rate_limit_timeout = rate_limit_timeout , headers = headers ) else : raise HTTPRateLimitError ( 'HTTP lookup failed for {0}. Rate limit ' 'exceeded, wait and try again (possibly a ' 'temporary block).' . format ( url ) ) except ( KeyError , IndexError ) : # pragma: no cover pass return d except HTTPError as e : # pragma: no cover # RIPE is producing this HTTP error rather than a JSON error. if e . code == 429 : log . debug ( 'HTTP query rate limit exceeded.' ) if retry_count > 0 : log . debug ( 'Waiting {0} seconds...' . format ( str ( rate_limit_timeout ) ) ) sleep ( rate_limit_timeout ) return self . get_http_json ( url = url , retry_count = retry_count - 1 , rate_limit_timeout = rate_limit_timeout , headers = headers ) else : raise HTTPRateLimitError ( 'HTTP lookup failed for {0}. Rate limit ' 'exceeded, wait and try again (possibly a ' 'temporary block).' . format ( url ) ) else : raise HTTPLookupError ( 'HTTP lookup failed for {0} with error ' 'code {1}.' . format ( url , str ( e . code ) ) ) except ( URLError , socket . timeout , socket . error ) as e : log . debug ( 'HTTP query socket error: {0}' . format ( e ) ) if retry_count > 0 : log . debug ( 'HTTP query retrying (count: {0})' . format ( str ( retry_count ) ) ) return self . get_http_json ( url = url , retry_count = retry_count - 1 , rate_limit_timeout = rate_limit_timeout , headers = headers ) else : raise HTTPLookupError ( 'HTTP lookup failed for {0}.' . format ( url ) ) except ( HTTPLookupError , HTTPRateLimitError ) as e : # pragma: no cover raise e except : # pragma: no cover raise HTTPLookupError ( 'HTTP lookup failed for {0}.' . format ( url ) )
The function for retrieving a json result via HTTP .
811
10
237,649
def get_host ( self , retry_count = 3 ) : try : default_timeout_set = False if not socket . getdefaulttimeout ( ) : socket . setdefaulttimeout ( self . timeout ) default_timeout_set = True log . debug ( 'Host query for {0}' . format ( self . address_str ) ) ret = socket . gethostbyaddr ( self . address_str ) if default_timeout_set : # pragma: no cover socket . setdefaulttimeout ( None ) results = namedtuple ( 'get_host_results' , 'hostname, aliaslist, ' 'ipaddrlist' ) return results ( ret ) except ( socket . timeout , socket . error ) as e : log . debug ( 'Host query socket error: {0}' . format ( e ) ) if retry_count > 0 : log . debug ( 'Host query retrying (count: {0})' . format ( str ( retry_count ) ) ) return self . get_host ( retry_count - 1 ) else : raise HostLookupError ( 'Host lookup failed for {0}.' . format ( self . address_str ) ) except : # pragma: no cover raise HostLookupError ( 'Host lookup failed for {0}.' . format ( self . address_str ) )
The function for retrieving host information for an IP address .
285
11
237,650
def get_http_raw ( self , url = None , retry_count = 3 , headers = None , request_type = 'GET' , form_data = None ) : if headers is None : headers = { 'Accept' : 'text/html' } enc_form_data = None if form_data : enc_form_data = urlencode ( form_data ) try : # Py 2 inspection will alert on the encoding arg, no harm done. enc_form_data = bytes ( enc_form_data , encoding = 'ascii' ) except TypeError : # pragma: no cover pass try : # Create the connection for the HTTP query. log . debug ( 'HTTP query for {0} at {1}' . format ( self . address_str , url ) ) try : # Py 2 inspection alert bypassed by using kwargs dict. conn = Request ( url = url , data = enc_form_data , headers = headers , * * { 'method' : request_type } ) except TypeError : # pragma: no cover conn = Request ( url = url , data = enc_form_data , headers = headers ) data = self . opener . open ( conn , timeout = self . timeout ) try : d = data . readall ( ) . decode ( 'ascii' , 'ignore' ) except AttributeError : # pragma: no cover d = data . read ( ) . decode ( 'ascii' , 'ignore' ) return str ( d ) except ( URLError , socket . timeout , socket . error ) as e : log . debug ( 'HTTP query socket error: {0}' . format ( e ) ) if retry_count > 0 : log . debug ( 'HTTP query retrying (count: {0})' . format ( str ( retry_count ) ) ) return self . get_http_raw ( url = url , retry_count = retry_count - 1 , headers = headers , request_type = request_type , form_data = form_data ) else : raise HTTPLookupError ( 'HTTP lookup failed for {0}.' . format ( url ) ) except HTTPLookupError as e : # pragma: no cover raise e except Exception : # pragma: no cover raise HTTPLookupError ( 'HTTP lookup failed for {0}.' . format ( url ) )
The function for retrieving a raw HTML result via HTTP .
519
11
237,651
def generate_output ( line = '0' , short = None , name = None , value = None , is_parent = False , colorize = True ) : # TODO: so ugly output = '{0}{1}{2}{3}{4}{5}{6}{7}\n' . format ( LINES [ '{0}{1}' . format ( line , 'C' if colorize else '' ) ] if ( line in LINES . keys ( ) ) else '' , COLOR_DEPTH [ line ] if ( colorize and line in COLOR_DEPTH ) else '' , ANSI [ 'b' ] , short if short is not None else ( name if ( name is not None ) else '' ) , '' if ( name is None or short is None ) else ' ({0})' . format ( name ) , '' if ( name is None and short is None ) else ': ' , ANSI [ 'end' ] if colorize else '' , '' if is_parent else value ) return output
The function for formatting CLI output results .
219
8
237,652
def generate_output_header ( self , query_type = 'RDAP' ) : output = '\n{0}{1}{2} query for {3}:{4}\n\n' . format ( ANSI [ 'ul' ] , ANSI [ 'b' ] , query_type , self . obj . address_str , ANSI [ 'end' ] ) return output
The function for generating the CLI output header .
84
9
237,653
def generate_output_newline ( self , line = '0' , colorize = True ) : return generate_output ( line = line , is_parent = True , colorize = colorize )
The function for generating a CLI output new line .
43
10
237,654
def generate_output_asn ( self , json_data = None , hr = True , show_name = False , colorize = True ) : if json_data is None : json_data = { } keys = { 'asn' , 'asn_cidr' , 'asn_country_code' , 'asn_date' , 'asn_registry' , 'asn_description' } . intersection ( json_data ) output = '' for key in keys : output += generate_output ( line = '0' , short = HR_ASN [ key ] [ '_short' ] if hr else key , name = HR_ASN [ key ] [ '_name' ] if ( hr and show_name ) else None , value = ( json_data [ key ] if ( json_data [ key ] is not None and len ( json_data [ key ] ) > 0 and json_data [ key ] != 'NA' ) else 'None' ) , colorize = colorize ) return output
The function for generating CLI output ASN results .
224
10
237,655
def generate_output_entities ( self , json_data = None , hr = True , show_name = False , colorize = True ) : output = '' short = HR_RDAP [ 'entities' ] [ '_short' ] if hr else 'entities' name = HR_RDAP [ 'entities' ] [ '_name' ] if ( hr and show_name ) else None output += generate_output ( line = '0' , short = short , name = name , is_parent = False if ( json_data is None or json_data [ 'entities' ] is None ) else True , value = 'None' if ( json_data is None or json_data [ 'entities' ] is None ) else None , colorize = colorize ) if json_data is not None : for ent in json_data [ 'entities' ] : output += generate_output ( line = '1' , value = ent , colorize = colorize ) return output
The function for generating CLI output RDAP entity results .
215
11
237,656
def generate_output_events ( self , source , key , val , line = '2' , hr = True , show_name = False , colorize = True ) : output = generate_output ( line = line , short = HR_RDAP [ source ] [ key ] [ '_short' ] if hr else key , name = HR_RDAP [ source ] [ key ] [ '_name' ] if ( hr and show_name ) else None , is_parent = False if ( val is None or len ( val ) == 0 ) else True , value = 'None' if ( val is None or len ( val ) == 0 ) else None , colorize = colorize ) if val is not None : count = 0 for item in val : try : action = item [ 'action' ] except KeyError : action = None try : timestamp = item [ 'timestamp' ] except KeyError : timestamp = None try : actor = item [ 'actor' ] except KeyError : actor = None if count > 0 : output += generate_output ( line = str ( int ( line ) + 1 ) , is_parent = True , colorize = colorize ) output += generate_output ( line = str ( int ( line ) + 1 ) , short = HR_RDAP_COMMON [ key ] [ 'action' ] [ '_short' ] if hr else 'action' , name = HR_RDAP_COMMON [ key ] [ 'action' ] [ '_name' ] if ( hr and show_name ) else None , value = action , colorize = colorize ) output += generate_output ( line = str ( int ( line ) + 1 ) , short = HR_RDAP_COMMON [ key ] [ 'timestamp' ] [ '_short' ] if hr else 'timestamp' , name = HR_RDAP_COMMON [ key ] [ 'timestamp' ] [ '_name' ] if ( hr and show_name ) else None , value = timestamp , colorize = colorize ) output += generate_output ( line = str ( int ( line ) + 1 ) , short = HR_RDAP_COMMON [ key ] [ 'actor' ] [ '_short' ] if hr else 'actor' , name = HR_RDAP_COMMON [ key ] [ 'actor' ] [ '_name' ] if ( hr and show_name ) else None , value = actor , colorize = colorize ) count += 1 return output
The function for generating CLI output RDAP events results .
532
11
237,657
def generate_output_list ( self , source , key , val , line = '2' , hr = True , show_name = False , colorize = True ) : output = generate_output ( line = line , short = HR_RDAP [ source ] [ key ] [ '_short' ] if hr else key , name = HR_RDAP [ source ] [ key ] [ '_name' ] if ( hr and show_name ) else None , is_parent = False if ( val is None or len ( val ) == 0 ) else True , value = 'None' if ( val is None or len ( val ) == 0 ) else None , colorize = colorize ) if val is not None : for item in val : output += generate_output ( line = str ( int ( line ) + 1 ) , value = item , colorize = colorize ) return output
The function for generating CLI output RDAP list results .
188
11
237,658
def generate_output_notices ( self , source , key , val , line = '1' , hr = True , show_name = False , colorize = True ) : output = generate_output ( line = line , short = HR_RDAP [ source ] [ key ] [ '_short' ] if hr else key , name = HR_RDAP [ source ] [ key ] [ '_name' ] if ( hr and show_name ) else None , is_parent = False if ( val is None or len ( val ) == 0 ) else True , value = 'None' if ( val is None or len ( val ) == 0 ) else None , colorize = colorize ) if val is not None : count = 0 for item in val : title = item [ 'title' ] description = item [ 'description' ] links = item [ 'links' ] if count > 0 : output += generate_output ( line = str ( int ( line ) + 1 ) , is_parent = True , colorize = colorize ) output += generate_output ( line = str ( int ( line ) + 1 ) , short = HR_RDAP_COMMON [ key ] [ 'title' ] [ '_short' ] if hr else ( 'title' ) , name = HR_RDAP_COMMON [ key ] [ 'title' ] [ '_name' ] if ( hr and show_name ) else None , value = title , colorize = colorize ) output += generate_output ( line = str ( int ( line ) + 1 ) , short = HR_RDAP_COMMON [ key ] [ 'description' ] [ '_short' ] if hr else 'description' , name = HR_RDAP_COMMON [ key ] [ 'description' ] [ '_name' ] if ( hr and show_name ) else None , value = description . replace ( '\n' , '\n{0}' . format ( generate_output ( line = '3' ) ) ) , colorize = colorize ) output += self . generate_output_list ( source = source , key = 'links' , val = links , line = str ( int ( line ) + 1 ) , hr = hr , show_name = show_name , colorize = colorize ) count += 1 return output
The function for generating CLI output RDAP notices results .
496
11
237,659
def generate_output_network ( self , json_data = None , hr = True , show_name = False , colorize = True ) : if json_data is None : json_data = { } output = generate_output ( line = '0' , short = HR_RDAP [ 'network' ] [ '_short' ] if hr else 'network' , name = HR_RDAP [ 'network' ] [ '_name' ] if ( hr and show_name ) else None , is_parent = True , colorize = colorize ) for key , val in json_data [ 'network' ] . items ( ) : if key in [ 'links' , 'status' ] : output += self . generate_output_list ( source = 'network' , key = key , val = val , line = '1' , hr = hr , show_name = show_name , colorize = colorize ) elif key in [ 'notices' , 'remarks' ] : output += self . generate_output_notices ( source = 'network' , key = key , val = val , line = '1' , hr = hr , show_name = show_name , colorize = colorize ) elif key == 'events' : output += self . generate_output_events ( source = 'network' , key = key , val = val , line = '1' , hr = hr , show_name = show_name , colorize = colorize ) elif key not in [ 'raw' ] : output += generate_output ( line = '1' , short = HR_RDAP [ 'network' ] [ key ] [ '_short' ] if hr else key , name = HR_RDAP [ 'network' ] [ key ] [ '_name' ] if ( hr and show_name ) else None , value = val , colorize = colorize ) return output
The function for generating CLI output RDAP network results .
411
11
237,660
def generate_output_whois_nets ( self , json_data = None , hr = True , show_name = False , colorize = True ) : if json_data is None : json_data = { } output = generate_output ( line = '0' , short = HR_WHOIS [ 'nets' ] [ '_short' ] if hr else 'nets' , name = HR_WHOIS [ 'nets' ] [ '_name' ] if ( hr and show_name ) else None , is_parent = True , colorize = colorize ) count = 0 for net in json_data [ 'nets' ] : if count > 0 : output += self . generate_output_newline ( line = '1' , colorize = colorize ) count += 1 output += generate_output ( line = '1' , short = net [ 'handle' ] , is_parent = True , colorize = colorize ) for key , val in net . items ( ) : if val and '\n' in val : output += generate_output ( line = '2' , short = HR_WHOIS [ 'nets' ] [ key ] [ '_short' ] if hr else key , name = HR_WHOIS [ 'nets' ] [ key ] [ '_name' ] if ( hr and show_name ) else None , is_parent = False if ( val is None or len ( val ) == 0 ) else True , value = 'None' if ( val is None or len ( val ) == 0 ) else None , colorize = colorize ) for v in val . split ( '\n' ) : output += generate_output ( line = '3' , value = v , colorize = colorize ) else : output += generate_output ( line = '2' , short = HR_WHOIS [ 'nets' ] [ key ] [ '_short' ] if hr else key , name = HR_WHOIS [ 'nets' ] [ key ] [ '_name' ] if ( hr and show_name ) else None , value = val , colorize = colorize ) return output
The function for generating CLI output Legacy Whois networks results .
456
12
237,661
def generate_output_nir ( self , json_data = None , hr = True , show_name = False , colorize = True ) : if json_data is None : json_data = { } output = generate_output ( line = '0' , short = HR_WHOIS_NIR [ 'nets' ] [ '_short' ] if hr else 'nir_nets' , name = HR_WHOIS_NIR [ 'nets' ] [ '_name' ] if ( hr and show_name ) else None , is_parent = True , colorize = colorize ) count = 0 if json_data [ 'nir' ] : for net in json_data [ 'nir' ] [ 'nets' ] : if count > 0 : output += self . generate_output_newline ( line = '1' , colorize = colorize ) count += 1 output += generate_output ( line = '1' , short = net [ 'handle' ] , is_parent = True , colorize = colorize ) for key , val in net . items ( ) : if val and ( isinstance ( val , dict ) or '\n' in val or key == 'nameservers' ) : output += generate_output ( line = '2' , short = ( HR_WHOIS_NIR [ 'nets' ] [ key ] [ '_short' ] if ( hr ) else key ) , name = HR_WHOIS_NIR [ 'nets' ] [ key ] [ '_name' ] if ( hr and show_name ) else None , is_parent = False if ( val is None or len ( val ) == 0 ) else True , value = 'None' if ( val is None or len ( val ) == 0 ) else None , colorize = colorize ) if key == 'contacts' : for k , v in val . items ( ) : if v : output += generate_output ( line = '3' , is_parent = False if ( len ( v ) == 0 ) else True , name = k , colorize = colorize ) for contact_key , contact_val in v . items ( ) : if v is not None : tmp_out = '{0}{1}{2}' . format ( contact_key , ': ' , contact_val ) output += generate_output ( line = '4' , value = tmp_out , colorize = colorize ) elif key == 'nameservers' : for v in val : output += generate_output ( line = '3' , value = v , colorize = colorize ) else : for v in val . split ( '\n' ) : output += generate_output ( line = '3' , value = v , colorize = colorize ) else : output += generate_output ( line = '2' , short = ( HR_WHOIS_NIR [ 'nets' ] [ key ] [ '_short' ] if ( hr ) else key ) , name = HR_WHOIS_NIR [ 'nets' ] [ key ] [ '_name' ] if ( hr and show_name ) else None , value = val , colorize = colorize ) else : output += 'None' return output
The function for generating CLI output NIR network results .
694
11
237,662
def parse_fields_whois ( self , response ) : try : temp = response . split ( '|' ) # Parse out the ASN information. ret = { 'asn_registry' : temp [ 4 ] . strip ( ' \n' ) } if ret [ 'asn_registry' ] not in self . rir_whois . keys ( ) : raise ASNRegistryError ( 'ASN registry {0} is not known.' . format ( ret [ 'asn_registry' ] ) ) ret [ 'asn' ] = temp [ 0 ] . strip ( ' \n' ) ret [ 'asn_cidr' ] = temp [ 2 ] . strip ( ' \n' ) ret [ 'asn_country_code' ] = temp [ 3 ] . strip ( ' \n' ) . upper ( ) ret [ 'asn_date' ] = temp [ 5 ] . strip ( ' \n' ) ret [ 'asn_description' ] = temp [ 6 ] . strip ( ' \n' ) except ASNRegistryError : raise except Exception as e : raise ASNParseError ( 'Parsing failed for "{0}" with exception: {1}.' '' . format ( response , e ) [ : 100 ] ) return ret
The function for parsing ASN fields from a whois response .
283
13
237,663
def parse_fields_http ( self , response , extra_org_map = None ) : # Set the org_map. Map the orgRef handle to an RIR. org_map = self . org_map . copy ( ) try : org_map . update ( extra_org_map ) except ( TypeError , ValueError , IndexError , KeyError ) : pass try : asn_data = { 'asn_registry' : None , 'asn' : None , 'asn_cidr' : None , 'asn_country_code' : None , 'asn_date' : None , 'asn_description' : None } try : net_list = response [ 'nets' ] [ 'net' ] if not isinstance ( net_list , list ) : net_list = [ net_list ] except ( KeyError , TypeError ) : log . debug ( 'No networks found' ) net_list = [ ] for n in reversed ( net_list ) : try : asn_data [ 'asn_registry' ] = ( org_map [ n [ 'orgRef' ] [ '@handle' ] . upper ( ) ] ) except KeyError as e : log . debug ( 'Could not parse ASN registry via HTTP: ' '{0}' . format ( str ( e ) ) ) continue break if not asn_data [ 'asn_registry' ] : log . debug ( 'Could not parse ASN registry via HTTP' ) raise ASNRegistryError ( 'ASN registry lookup failed.' ) except ASNRegistryError : raise except Exception as e : # pragma: no cover raise ASNParseError ( 'Parsing failed for "{0}" with exception: {1}.' '' . format ( response , e ) [ : 100 ] ) return asn_data
The function for parsing ASN fields from a http response .
401
12
237,664
def get_nets_radb ( self , response , is_http = False ) : nets = [ ] if is_http : regex = r'route(?:6)?:[^\S\n]+(?P<val>.+?)<br>' else : regex = r'^route(?:6)?:[^\S\n]+(?P<val>.+|.+)$' # Iterate through all of the networks found, storing the CIDR value # and the start and end positions. for match in re . finditer ( regex , response , re . MULTILINE ) : try : net = copy . deepcopy ( BASE_NET ) net [ 'cidr' ] = match . group ( 1 ) . strip ( ) net [ 'start' ] = match . start ( ) net [ 'end' ] = match . end ( ) nets . append ( net ) except ValueError : # pragma: no cover pass return nets
The function for parsing network blocks from ASN origin data .
206
12
237,665
def get_nets_jpnic ( self , response ) : nets = [ ] # Iterate through all of the networks found, storing the CIDR value # and the start and end positions. for match in re . finditer ( r'^.*?(\[Network Number\])[^\S\n]+.+?>(?P<val>.+?)</A>$' , response , re . MULTILINE ) : try : net = copy . deepcopy ( BASE_NET ) tmp = ip_network ( match . group ( 2 ) ) try : # pragma: no cover network_address = tmp . network_address except AttributeError : # pragma: no cover network_address = tmp . ip pass try : # pragma: no cover broadcast_address = tmp . broadcast_address except AttributeError : # pragma: no cover broadcast_address = tmp . broadcast pass net [ 'range' ] = '{0} - {1}' . format ( network_address + 1 , broadcast_address ) cidr = ip_network ( match . group ( 2 ) . strip ( ) ) . __str__ ( ) net [ 'cidr' ] = cidr net [ 'start' ] = match . start ( ) net [ 'end' ] = match . end ( ) nets . append ( net ) except ( ValueError , TypeError ) : pass return nets
The function for parsing network blocks from jpnic whois data .
300
14
237,666
def get_contact ( self , response = None , nir = None , handle = None , retry_count = 3 , dt_format = None ) : if response or nir == 'krnic' : contact_response = response else : # Retrieve the whois data. contact_response = self . _net . get_http_raw ( url = str ( NIR_WHOIS [ nir ] [ 'url' ] ) . format ( handle ) , retry_count = retry_count , headers = NIR_WHOIS [ nir ] [ 'request_headers' ] , request_type = NIR_WHOIS [ nir ] [ 'request_type' ] ) return self . parse_fields ( response = contact_response , fields_dict = NIR_WHOIS [ nir ] [ 'contact_fields' ] , dt_format = dt_format , hourdelta = int ( NIR_WHOIS [ nir ] [ 'dt_hourdelta' ] ) , is_contact = True )
The function for retrieving and parsing NIR whois data based on NIR_WHOIS contact_fields .
227
22
237,667
def _parse_address ( self , val ) : ret = { 'type' : None , 'value' : None } try : ret [ 'type' ] = val [ 1 ] [ 'type' ] except ( KeyError , ValueError , TypeError ) : pass try : ret [ 'value' ] = val [ 1 ] [ 'label' ] except ( KeyError , ValueError , TypeError ) : ret [ 'value' ] = '\n' . join ( val [ 3 ] ) . strip ( ) try : self . vars [ 'address' ] . append ( ret ) except AttributeError : self . vars [ 'address' ] = [ ] self . vars [ 'address' ] . append ( ret )
The function for parsing the vcard address .
157
9
237,668
def _parse_phone ( self , val ) : ret = { 'type' : None , 'value' : None } try : ret [ 'type' ] = val [ 1 ] [ 'type' ] except ( IndexError , KeyError , ValueError , TypeError ) : pass ret [ 'value' ] = val [ 3 ] . strip ( ) try : self . vars [ 'phone' ] . append ( ret ) except AttributeError : self . vars [ 'phone' ] = [ ] self . vars [ 'phone' ] . append ( ret )
The function for parsing the vcard phone numbers .
122
10
237,669
def _parse_email ( self , val ) : ret = { 'type' : None , 'value' : None } try : ret [ 'type' ] = val [ 1 ] [ 'type' ] except ( KeyError , ValueError , TypeError ) : pass ret [ 'value' ] = val [ 3 ] . strip ( ) try : self . vars [ 'email' ] . append ( ret ) except AttributeError : self . vars [ 'email' ] = [ ] self . vars [ 'email' ] . append ( ret )
The function for parsing the vcard email addresses .
119
10
237,670
def parse ( self ) : keys = { 'fn' : self . _parse_name , 'kind' : self . _parse_kind , 'adr' : self . _parse_address , 'tel' : self . _parse_phone , 'email' : self . _parse_email , 'role' : self . _parse_role , 'title' : self . _parse_title } for val in self . vcard : try : parser = keys . get ( val [ 0 ] ) parser ( val ) except ( KeyError , ValueError , TypeError ) : pass
The function for parsing the vcard to the vars dictionary .
124
13
237,671
def ipv4_lstrip_zeros ( address ) : # Split the octets. obj = address . strip ( ) . split ( '.' ) for x , y in enumerate ( obj ) : # Strip leading zeros. Split / here in case CIDR is attached. obj [ x ] = y . split ( '/' ) [ 0 ] . lstrip ( '0' ) if obj [ x ] in [ '' , None ] : obj [ x ] = '0' return '.' . join ( obj )
The function to strip leading zeros in each octet of an IPv4 address .
112
17
237,672
def get_countries ( is_legacy_xml = False ) : # Initialize the countries dictionary. countries = { } # Set the data directory based on if the script is a frozen executable. if sys . platform == 'win32' and getattr ( sys , 'frozen' , False ) : data_dir = path . dirname ( sys . executable ) # pragma: no cover else : data_dir = path . dirname ( __file__ ) if is_legacy_xml : log . debug ( 'Opening country code legacy XML: {0}' . format ( str ( data_dir ) + '/data/iso_3166-1_list_en.xml' ) ) # Create the country codes file object. f = io . open ( str ( data_dir ) + '/data/iso_3166-1_list_en.xml' , 'r' , encoding = 'ISO-8859-1' ) # Read the file. data = f . read ( ) # Check if there is data. if not data : # pragma: no cover return { } # Parse the data to get the DOM. dom = parseString ( data ) # Retrieve the country entries. entries = dom . getElementsByTagName ( 'ISO_3166-1_Entry' ) # Iterate through the entries and add to the countries dictionary. for entry in entries : # Retrieve the country code and name from the DOM. code = entry . getElementsByTagName ( 'ISO_3166-1_Alpha-2_Code_element' ) [ 0 ] . firstChild . data name = entry . getElementsByTagName ( 'ISO_3166-1_Country_name' ) [ 0 ] . firstChild . data # Add to the countries dictionary. countries [ code ] = name . title ( ) else : log . debug ( 'Opening country code CSV: {0}' . format ( str ( data_dir ) + '/data/iso_3166-1_list_en.xml' ) ) # Create the country codes file object. f = io . open ( str ( data_dir ) + '/data/iso_3166-1.csv' , 'r' , encoding = 'utf-8' ) # Create csv reader object. csv_reader = csv . reader ( f , delimiter = ',' , quotechar = '"' ) # Iterate through the rows and add to the countries dictionary. for row in csv_reader : # Retrieve the country code and name columns. code = row [ 0 ] name = row [ 1 ] # Add to the countries dictionary. countries [ code ] = name return countries
The function to generate a dictionary containing ISO_3166 - 1 country codes to names .
575
18
237,673
def unique_everseen ( iterable , key = None ) : seen = set ( ) seen_add = seen . add if key is None : for element in filterfalse ( seen . __contains__ , iterable ) : seen_add ( element ) yield element else : for element in iterable : k = key ( element ) if k not in seen : seen_add ( k ) yield element
The generator to list unique elements preserving the order . Remember all elements ever seen . This was taken from the itertools recipes .
84
26
237,674
def get_nets_arin ( self , response ) : nets = [ ] # Find the first NetRange value. pattern = re . compile ( r'^NetRange:[^\S\n]+(.+)$' , re . MULTILINE ) temp = pattern . search ( response ) net_range = None net_range_start = None if temp is not None : net_range = temp . group ( 1 ) . strip ( ) net_range_start = temp . start ( ) # Iterate through all of the networks found, storing the CIDR value # and the start and end positions. for match in re . finditer ( r'^CIDR:[^\S\n]+(.+?,[^\S\n].+|.+)$' , response , re . MULTILINE ) : try : net = copy . deepcopy ( BASE_NET ) if len ( nets ) > 0 : temp = pattern . search ( response , match . start ( ) ) net_range = None net_range_start = None if temp is not None : net_range = temp . group ( 1 ) . strip ( ) net_range_start = temp . start ( ) if net_range is not None : if net_range_start < match . start ( ) or len ( nets ) > 0 : try : net [ 'range' ] = '{0} - {1}' . format ( ip_network ( net_range ) [ 0 ] . __str__ ( ) , ip_network ( net_range ) [ - 1 ] . __str__ ( ) ) if '/' in net_range else net_range except ValueError : # pragma: no cover net [ 'range' ] = net_range net [ 'cidr' ] = ', ' . join ( [ ip_network ( c . strip ( ) ) . __str__ ( ) for c in match . group ( 1 ) . split ( ', ' ) ] ) net [ 'start' ] = match . start ( ) net [ 'end' ] = match . end ( ) nets . append ( net ) except ValueError : pass return nets
The function for parsing network blocks from ARIN whois data .
459
13
237,675
def get_nets_lacnic ( self , response ) : nets = [ ] # Iterate through all of the networks found, storing the CIDR value # and the start and end positions. for match in re . finditer ( r'^(inetnum|inet6num|route):[^\S\n]+(.+?,[^\S\n].+|.+)$' , response , re . MULTILINE ) : try : net = copy . deepcopy ( BASE_NET ) net_range = match . group ( 2 ) . strip ( ) try : net [ 'range' ] = net [ 'range' ] = '{0} - {1}' . format ( ip_network ( net_range ) [ 0 ] . __str__ ( ) , ip_network ( net_range ) [ - 1 ] . __str__ ( ) ) if '/' in net_range else net_range except ValueError : # pragma: no cover net [ 'range' ] = net_range temp = [ ] for addr in net_range . split ( ', ' ) : count = addr . count ( '.' ) if count is not 0 and count < 4 : addr_split = addr . strip ( ) . split ( '/' ) for i in range ( count + 1 , 4 ) : addr_split [ 0 ] += '.0' addr = '/' . join ( addr_split ) temp . append ( ip_network ( addr . strip ( ) ) . __str__ ( ) ) net [ 'cidr' ] = ', ' . join ( temp ) net [ 'start' ] = match . start ( ) net [ 'end' ] = match . end ( ) nets . append ( net ) except ValueError : pass return nets
The function for parsing network blocks from LACNIC whois data .
379
15
237,676
def get_nets_other ( self , response ) : nets = [ ] # Iterate through all of the networks found, storing the CIDR value # and the start and end positions. for match in re . finditer ( r'^(inetnum|inet6num|route):[^\S\n]+((.+?)[^\S\n]-[^\S\n](.+)|' '.+)$' , response , re . MULTILINE ) : try : net = copy . deepcopy ( BASE_NET ) net_range = match . group ( 2 ) . strip ( ) try : net [ 'range' ] = net [ 'range' ] = '{0} - {1}' . format ( ip_network ( net_range ) [ 0 ] . __str__ ( ) , ip_network ( net_range ) [ - 1 ] . __str__ ( ) ) if '/' in net_range else net_range except ValueError : # pragma: no cover net [ 'range' ] = net_range if match . group ( 3 ) and match . group ( 4 ) : addrs = [ ] addrs . extend ( summarize_address_range ( ip_address ( match . group ( 3 ) . strip ( ) ) , ip_address ( match . group ( 4 ) . strip ( ) ) ) ) cidr = ', ' . join ( [ i . __str__ ( ) for i in collapse_addresses ( addrs ) ] ) else : cidr = ip_network ( net_range ) . __str__ ( ) net [ 'cidr' ] = cidr net [ 'start' ] = match . start ( ) net [ 'end' ] = match . end ( ) nets . append ( net ) except ( ValueError , TypeError ) : pass return nets
The function for parsing network blocks from generic whois data .
396
12
237,677
def convert_default ( self , field , * * params ) : for klass , ma_field in self . TYPE_MAPPING : if isinstance ( field , klass ) : return ma_field ( * * params ) return fields . Raw ( * * params )
Return raw field .
58
4
237,678
def make_instance ( self , data ) : if not self . opts . model : return data if self . instance is not None : for key , value in data . items ( ) : setattr ( self . instance , key , value ) return self . instance return self . opts . model ( * * data )
Build object from data .
67
5
237,679
def _extract ( self , stim ) : props = [ ( e . text , e . onset , e . duration ) for e in stim . elements ] vals , onsets , durations = map ( list , zip ( * props ) ) return ExtractorResult ( vals , stim , self , [ 'word' ] , onsets , durations )
Returns all words .
76
4
237,680
def get_filename ( self ) : if self . filename is None or not os . path . exists ( self . filename ) : tf = tempfile . mktemp ( ) + self . _default_file_extension self . save ( tf ) yield tf os . remove ( tf ) else : yield self . filename
Return the source filename of the current Stim .
66
9
237,681
def get_stim ( self , type_ , return_all = False ) : if isinstance ( type_ , string_types ) : type_ = _get_stim_class ( type_ ) matches = [ ] for s in self . elements : if isinstance ( s , type_ ) : if not return_all : return s matches . append ( s ) if not matches : return [ ] if return_all else None return matches
Returns component elements of the specified type .
92
8
237,682
def has_types ( self , types , all_ = True ) : func = all if all_ else any return func ( [ self . get_stim ( t ) for t in listify ( types ) ] )
Check whether the current component list matches all Stim types in the types argument .
45
15
237,683
def save ( self , path ) : self . clip . write_audiofile ( path , fps = self . sampling_rate )
Save clip data to file .
27
6
237,684
def get_converter ( in_type , out_type , * args , * * kwargs ) : convs = pliers . converters . __all__ # If config includes default converters for this combination, try them # first out_type = listify ( out_type ) [ : : - 1 ] default_convs = config . get_option ( 'default_converters' ) for ot in out_type : conv_str = '%s->%s' % ( in_type . __name__ , ot . __name__ ) if conv_str in default_convs : convs = list ( default_convs [ conv_str ] ) + convs for name in convs : cls = getattr ( pliers . converters , name ) if not issubclass ( cls , Converter ) : continue available = cls . available if issubclass ( cls , EnvironmentKeyMixin ) else True if cls . _input_type == in_type and cls . _output_type in out_type and available : conv = cls ( * args , * * kwargs ) return conv return None
Scans the list of available Converters and returns an instantiation of the first one whose input and output types match those passed in .
250
27
237,685
def create_graph ( ) : # Creates graph from saved graph_def.pb. with tf . gfile . FastGFile ( os . path . join ( FLAGS . model_dir , 'classify_image_graph_def.pb' ) , 'rb' ) as f : graph_def = tf . GraphDef ( ) graph_def . ParseFromString ( f . read ( ) ) _ = tf . import_graph_def ( graph_def , name = '' )
Creates a graph from saved GraphDef file and returns a saver .
107
15
237,686
def run_inference_on_image ( image ) : if not tf . gfile . Exists ( image ) : tf . logging . fatal ( 'File does not exist %s' , image ) image_data = tf . gfile . FastGFile ( image , 'rb' ) . read ( ) # Creates graph from saved GraphDef. create_graph ( ) with tf . Session ( ) as sess : # Some useful tensors: # 'softmax:0': A tensor containing the normalized prediction across # 1000 labels. # 'pool_3:0': A tensor containing the next-to-last layer containing 2048 # float description of the image. # 'DecodeJpeg/contents:0': A tensor containing a string providing JPEG # encoding of the image. # Runs the softmax tensor by feeding the image_data as input to the graph. softmax_tensor = sess . graph . get_tensor_by_name ( 'softmax:0' ) predictions = sess . run ( softmax_tensor , { 'DecodeJpeg/contents:0' : image_data } ) predictions = np . squeeze ( predictions ) # Creates node ID --> English string lookup. node_lookup = NodeLookup ( ) top_k = predictions . argsort ( ) [ - FLAGS . num_top_predictions : ] [ : : - 1 ] for node_id in top_k : human_string = node_lookup . id_to_string ( node_id ) score = predictions [ node_id ] print ( '%s (score = %.5f)' % ( human_string , score ) )
Runs inference on an image .
362
7
237,687
def load ( self , label_lookup_path , uid_lookup_path ) : if not tf . gfile . Exists ( uid_lookup_path ) : tf . logging . fatal ( 'File does not exist %s' , uid_lookup_path ) if not tf . gfile . Exists ( label_lookup_path ) : tf . logging . fatal ( 'File does not exist %s' , label_lookup_path ) # Loads mapping from string UID to human-readable string proto_as_ascii_lines = tf . gfile . GFile ( uid_lookup_path ) . readlines ( ) uid_to_human = { } p = re . compile ( r'[n\d]*[ \S,]*' ) for line in proto_as_ascii_lines : parsed_items = p . findall ( line ) uid = parsed_items [ 0 ] human_string = parsed_items [ 2 ] uid_to_human [ uid ] = human_string # Loads mapping from string UID to integer node ID. node_id_to_uid = { } proto_as_ascii = tf . gfile . GFile ( label_lookup_path ) . readlines ( ) for line in proto_as_ascii : if line . startswith ( ' target_class:' ) : target_class = int ( line . split ( ': ' ) [ 1 ] ) if line . startswith ( ' target_class_string:' ) : target_class_string = line . split ( ': ' ) [ 1 ] node_id_to_uid [ target_class ] = target_class_string [ 1 : - 2 ] # Loads the final mapping of integer node ID to human-readable string node_id_to_name = { } for key , val in node_id_to_uid . items ( ) : if val not in uid_to_human : tf . logging . fatal ( 'Failed to locate: %s' , val ) name = uid_to_human [ val ] node_id_to_name [ key ] = name return node_id_to_name
Loads a human readable English name for each softmax node .
486
13
237,688
def fetch_dictionary ( name , url = None , format = None , index = 0 , rename = None , save = True , force_retrieve = False ) : file_path = os . path . join ( _get_dictionary_path ( ) , name + '.csv' ) if not force_retrieve and os . path . exists ( file_path ) : df = pd . read_csv ( file_path ) index = datasets [ name ] . get ( 'index' , df . columns [ index ] ) return df . set_index ( index ) if name in datasets : url = datasets [ name ] [ 'url' ] format = datasets [ name ] . get ( 'format' , format ) index = datasets [ name ] . get ( 'index' , index ) rename = datasets . get ( 'rename' , rename ) if url is None : raise ValueError ( "Dataset '%s' not found in local storage or presets, " "and no download URL provided." % name ) data = _download_dictionary ( url , format = format , rename = rename ) if isinstance ( index , int ) : index = data . columns [ index ] data = data . set_index ( index ) if save : file_path = os . path . join ( _get_dictionary_path ( ) , name + '.csv' ) data . to_csv ( file_path , encoding = 'utf-8' ) return data
Retrieve a dictionary of text norms from the web or local storage .
311
14
237,689
def _to_df ( self , result , handle_annotations = None ) : annotations = result . _data if handle_annotations == 'first' : annotations = [ annotations [ 0 ] ] face_results = [ ] for i , annotation in enumerate ( annotations ) : data_dict = { } for field , val in annotation . items ( ) : if 'Confidence' in field : data_dict [ 'face_' + field ] = val elif 'oundingPoly' in field : for j , vertex in enumerate ( val [ 'vertices' ] ) : for dim in [ 'x' , 'y' ] : name = '%s_vertex%d_%s' % ( field , j + 1 , dim ) val = vertex [ dim ] if dim in vertex else np . nan data_dict [ name ] = val elif field == 'landmarks' : for lm in val : name = 'landmark_' + lm [ 'type' ] + '_%s' lm_pos = { name % k : v for ( k , v ) in lm [ 'position' ] . items ( ) } data_dict . update ( lm_pos ) else : data_dict [ field ] = val face_results . append ( data_dict ) return pd . DataFrame ( face_results )
Converts a Google API Face JSON response into a Pandas Dataframe .
290
15
237,690
def correlation_matrix ( df ) : columns = df . columns . tolist ( ) corr = pd . DataFrame ( np . corrcoef ( df , rowvar = 0 ) , columns = columns , index = columns ) return corr
Returns a pandas DataFrame with the pair - wise correlations of the columns .
54
16
237,691
def eigenvalues ( df ) : corr = np . corrcoef ( df , rowvar = 0 ) eigvals = np . linalg . eigvals ( corr ) return pd . Series ( eigvals , df . columns , name = 'Eigenvalue' )
Returns a pandas Series with eigenvalues of the correlation matrix .
64
14
237,692
def condition_indices ( df ) : eigvals = eigenvalues ( df ) cond_idx = np . sqrt ( eigvals . max ( ) / eigvals ) return pd . Series ( cond_idx , df . columns , name = 'Condition index' )
Returns a pandas Series with condition indices of the df columns .
63
13
237,693
def mahalanobis_distances ( df , axis = 0 ) : df = df . transpose ( ) if axis == 1 else df means = df . mean ( ) try : inv_cov = np . linalg . inv ( df . cov ( ) ) except LinAlgError : return pd . Series ( [ np . NAN ] * len ( df . index ) , df . index , name = 'Mahalanobis' ) dists = [ ] for i , sample in df . iterrows ( ) : dists . append ( mahalanobis ( sample , means , inv_cov ) ) return pd . Series ( dists , df . index , name = 'Mahalanobis' )
Returns a pandas Series with Mahalanobis distances for each sample on the axis .
156
18
237,694
def summary ( self , stdout = True , plot = False ) : if stdout : print ( 'Collinearity summary:' ) print ( pd . concat ( [ self . results [ 'Eigenvalues' ] , self . results [ 'ConditionIndices' ] , self . results [ 'VIFs' ] , self . results [ 'CorrelationMatrix' ] ] , axis = 1 ) ) print ( 'Outlier summary:' ) print ( self . results [ 'RowMahalanobisDistances' ] ) print ( self . results [ 'ColumnMahalanobisDistances' ] ) print ( 'Validity summary:' ) print ( self . results [ 'Variances' ] ) if plot : verify_dependencies ( 'seaborn' ) for key , result in self . results . items ( ) : if key == 'CorrelationMatrix' : ax = plt . axes ( ) sns . heatmap ( result , cmap = 'Blues' , ax = ax ) ax . set_title ( key ) sns . plt . show ( ) else : result . plot ( kind = 'bar' , title = key ) plt . show ( )
Displays diagnostics to the user
254
7
237,695
def add_nodes ( self , nodes , parent = None , mode = 'horizontal' ) : for n in nodes : node_args = self . _parse_node_args ( n ) if mode == 'horizontal' : self . add_node ( parent = parent , * * node_args ) elif mode == 'vertical' : parent = self . add_node ( parent = parent , return_node = True , * * node_args ) else : raise ValueError ( "Invalid mode for adding nodes to a graph:" "%s" % mode )
Adds one or more nodes to the current graph .
121
10
237,696
def add_node ( self , transformer , name = None , children = None , parent = None , parameters = { } , return_node = False ) : node = Node ( transformer , name , * * parameters ) self . nodes [ node . id ] = node if parent is None : self . roots . append ( node ) else : parent = self . nodes [ parent . id ] parent . add_child ( node ) if children is not None : self . add_nodes ( children , parent = node ) if return_node : return node
Adds a node to the current graph .
114
8
237,697
def run ( self , stim , merge = True , * * merge_kwargs ) : results = list ( chain ( * [ self . run_node ( n , stim ) for n in self . roots ] ) ) results = list ( flatten ( results ) ) self . _results = results # For use in plotting return merge_results ( results , * * merge_kwargs ) if merge else results
Executes the graph by calling all Transformers in sequence .
85
11
237,698
def run_node ( self , node , stim ) : if isinstance ( node , string_types ) : node = self . nodes [ node ] result = node . transformer . transform ( stim ) if node . is_leaf ( ) : return listify ( result ) stim = result # If result is a generator, the first child will destroy the # iterable, so cache via list conversion if len ( node . children ) > 1 and isgenerator ( stim ) : stim = list ( stim ) return list ( chain ( * [ self . run_node ( c , stim ) for c in node . children ] ) )
Executes the Transformer at a specific node .
130
10
237,699
def draw ( self , filename , color = True ) : verify_dependencies ( [ 'pgv' ] ) if not hasattr ( self , '_results' ) : raise RuntimeError ( "Graph cannot be drawn before it is executed. " "Try calling run() first." ) g = pgv . AGraph ( directed = True ) g . node_attr [ 'colorscheme' ] = 'set312' for elem in self . _results : if not hasattr ( elem , 'history' ) : continue log = elem . history while log : # Configure nodes source_from = log . parent [ 6 ] if log . parent else '' s_node = hash ( ( source_from , log [ 2 ] ) ) s_color = stim_list . index ( log [ 2 ] ) s_color = s_color % 12 + 1 t_node = hash ( ( log [ 6 ] , log [ 7 ] ) ) t_style = 'filled,' if color else '' t_style += 'dotted' if log . implicit else '' if log [ 6 ] . endswith ( 'Extractor' ) : t_color = '#0082c8' elif log [ 6 ] . endswith ( 'Filter' ) : t_color = '#e6194b' else : t_color = '#3cb44b' r_node = hash ( ( log [ 6 ] , log [ 5 ] ) ) r_color = stim_list . index ( log [ 5 ] ) r_color = r_color % 12 + 1 # Add nodes if color : g . add_node ( s_node , label = log [ 2 ] , shape = 'ellipse' , style = 'filled' , fillcolor = s_color ) g . add_node ( t_node , label = log [ 6 ] , shape = 'box' , style = t_style , fillcolor = t_color ) g . add_node ( r_node , label = log [ 5 ] , shape = 'ellipse' , style = 'filled' , fillcolor = r_color ) else : g . add_node ( s_node , label = log [ 2 ] , shape = 'ellipse' ) g . add_node ( t_node , label = log [ 6 ] , shape = 'box' , style = t_style ) g . add_node ( r_node , label = log [ 5 ] , shape = 'ellipse' ) # Add edges g . add_edge ( s_node , t_node , style = t_style ) g . add_edge ( t_node , r_node , style = t_style ) log = log . parent g . draw ( filename , prog = 'dot' )
Render a plot of the graph via pygraphviz .
597
12