idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
19,400
def GetVolumeByIndex ( self , volume_index ) : if not self . _is_parsed : self . _Parse ( ) self . _is_parsed = True if volume_index < 0 or volume_index >= len ( self . _volume_identifiers ) : return None volume_identifier = self . _volume_identifiers [ volume_index ] return self . _volumes [ volume_identifier ]
Retrieves a specific volume based on the index .
19,401
def _GetUncompressedStreamSize ( self ) : self . _file_object . seek ( 0 , os . SEEK_SET ) self . _decompressor = self . _GetDecompressor ( ) self . _uncompressed_data = b'' compressed_data_offset = 0 compressed_data_size = self . _file_object . get_size ( ) uncompressed_stream_size = 0 while compressed_data_offset < compressed_data_size : read_count = self . _ReadCompressedData ( self . _COMPRESSED_DATA_BUFFER_SIZE ) if read_count == 0 : break compressed_data_offset += read_count uncompressed_stream_size += self . _uncompressed_data_size return uncompressed_stream_size
Retrieves the uncompressed stream size .
19,402
def GetVShadowStoreByPathSpec ( self , path_spec ) : store_index = vshadow . VShadowPathSpecGetStoreIndex ( path_spec ) if store_index is None : return None return self . _vshadow_volume . get_store ( store_index )
Retrieves a VSS store for a path specification .
19,403
def deprecated ( function ) : def IssueDeprecationWarning ( * args , ** kwargs ) : warnings . simplefilter ( 'default' , DeprecationWarning ) warnings . warn ( 'Call to deprecated function: {0:s}.' . format ( function . __name__ ) , category = DeprecationWarning , stacklevel = 2 ) return function ( * args , ** kwargs ) IssueDeprecationWarning . __name__ = function . __name__ IssueDeprecationWarning . __doc__ = function . __doc__ IssueDeprecationWarning . __dict__ . update ( function . __dict__ ) return IssueDeprecationWarning
Decorator to mark functions or methods as deprecated .
19,404
def _CheckFileEntryType ( self , file_entry ) : if not self . _file_entry_types : return None return ( self . _CheckIsDevice ( file_entry ) or self . _CheckIsDirectory ( file_entry ) or self . _CheckIsFile ( file_entry ) or self . _CheckIsLink ( file_entry ) or self . _CheckIsPipe ( file_entry ) or self . _CheckIsSocket ( file_entry ) )
Checks the file entry type find specifications .
19,405
def _CheckIsDevice ( self , file_entry ) : if definitions . FILE_ENTRY_TYPE_DEVICE not in self . _file_entry_types : return False return file_entry . IsDevice ( )
Checks the is_device find specification .
19,406
def _CheckIsDirectory ( self , file_entry ) : if definitions . FILE_ENTRY_TYPE_DIRECTORY not in self . _file_entry_types : return False return file_entry . IsDirectory ( )
Checks the is_directory find specification .
19,407
def _CheckIsFile ( self , file_entry ) : if definitions . FILE_ENTRY_TYPE_FILE not in self . _file_entry_types : return False return file_entry . IsFile ( )
Checks the is_file find specification .
19,408
def _CheckIsLink ( self , file_entry ) : if definitions . FILE_ENTRY_TYPE_LINK not in self . _file_entry_types : return False return file_entry . IsLink ( )
Checks the is_link find specification .
19,409
def _CheckIsPipe ( self , file_entry ) : if definitions . FILE_ENTRY_TYPE_PIPE not in self . _file_entry_types : return False return file_entry . IsPipe ( )
Checks the is_pipe find specification .
19,410
def _CheckIsSocket ( self , file_entry ) : if definitions . FILE_ENTRY_TYPE_SOCKET not in self . _file_entry_types : return False return file_entry . IsSocket ( )
Checks the is_socket find specification .
19,411
def _CheckLocation ( self , file_entry , search_depth ) : if self . _location_segments is None : return False if search_depth < 0 or search_depth > self . _number_of_location_segments : return False if search_depth == 0 : segment_name = '' else : segment_name = self . _location_segments [ search_depth - 1 ] if self . _is_regex : if isinstance ( segment_name , py2to3 . STRING_TYPES ) : flags = re . DOTALL | re . UNICODE if not self . _is_case_sensitive : flags |= re . IGNORECASE try : segment_name = r'^{0:s}$' . format ( segment_name ) segment_name = re . compile ( segment_name , flags = flags ) except sre_constants . error : return False self . _location_segments [ search_depth - 1 ] = segment_name elif not self . _is_case_sensitive : segment_name = segment_name . lower ( ) self . _location_segments [ search_depth - 1 ] = segment_name if search_depth > 0 : if self . _is_regex : if not segment_name . match ( file_entry . name ) : return False elif self . _is_case_sensitive : if segment_name != file_entry . name : return False elif segment_name != file_entry . name . lower ( ) : return False return True
Checks the location find specification .
19,412
def Matches ( self , file_entry , search_depth ) : if self . _location_segments is None : location_match = None else : location_match = self . _CheckLocation ( file_entry , search_depth ) if not location_match : return False , location_match if search_depth != self . _number_of_location_segments : return False , location_match match = self . _CheckFileEntryType ( file_entry ) if match is not None and not match : return False , location_match match = self . _CheckIsAllocated ( file_entry ) if match is not None and not match : return False , location_match return True , location_match
Determines if the file entry matches the find specification .
19,413
def PrepareMatches ( self , file_system ) : if self . _location is not None : self . _location_segments = self . _SplitPath ( self . _location , file_system . PATH_SEPARATOR ) elif self . _location_regex is not None : path_separator = file_system . PATH_SEPARATOR if path_separator == '\\' : path_separator = '\\\\' self . _location_segments = self . _SplitPath ( self . _location_regex , path_separator ) if self . _location_segments is not None : self . _number_of_location_segments = len ( self . _location_segments )
Prepare find specification for matching .
19,414
def _FindInFileEntry ( self , file_entry , find_specs , search_depth ) : sub_find_specs = [ ] for find_spec in find_specs : match , location_match = find_spec . Matches ( file_entry , search_depth ) if match : yield file_entry . path_spec if location_match != False and not find_spec . AtMaximumDepth ( search_depth ) : sub_find_specs . append ( find_spec ) if not sub_find_specs : return search_depth += 1 try : for sub_file_entry in file_entry . sub_file_entries : for matching_path_spec in self . _FindInFileEntry ( sub_file_entry , sub_find_specs , search_depth ) : yield matching_path_spec except errors . AccessError : pass
Searches for matching file entries within the file entry .
19,415
def Find ( self , find_specs = None ) : if not find_specs : find_specs . append ( FindSpec ( ) ) for find_spec in find_specs : find_spec . PrepareMatches ( self . _file_system ) if path_spec_factory . Factory . IsSystemLevelTypeIndicator ( self . _file_system . type_indicator ) : file_entry = self . _file_system . GetFileEntryByPathSpec ( self . _mount_point ) else : file_entry = self . _file_system . GetRootFileEntry ( ) for matching_path_spec in self . _FindInFileEntry ( file_entry , find_specs , 0 ) : yield matching_path_spec
Searches for matching file entries within the file system .
19,416
def GetRelativePath ( self , path_spec ) : location = getattr ( path_spec , 'location' , None ) if location is None : raise errors . PathSpecError ( 'Path specification missing location.' ) if path_spec_factory . Factory . IsSystemLevelTypeIndicator ( self . _file_system . type_indicator ) : if not location . startswith ( self . _mount_point . location ) : raise errors . PathSpecError ( 'Path specification does not contain mount point.' ) else : if not hasattr ( path_spec , 'parent' ) : raise errors . PathSpecError ( 'Path specification missing parent.' ) if path_spec . parent != self . _mount_point : raise errors . PathSpecError ( 'Path specification does not contain mount point.' ) path_segments = self . _file_system . SplitPath ( location ) if path_spec_factory . Factory . IsSystemLevelTypeIndicator ( self . _file_system . type_indicator ) : mount_point_path_segments = self . _file_system . SplitPath ( self . _mount_point . location ) path_segments = path_segments [ len ( mount_point_path_segments ) : ] return '{0:s}{1:s}' . format ( self . _file_system . PATH_SEPARATOR , self . _file_system . PATH_SEPARATOR . join ( path_segments ) )
Returns the relative path based on a resolved path specification .
19,417
def _PromptUserForEncryptedVolumeCredential ( self , scan_context , locked_scan_node , output_writer ) : credentials = credentials_manager . CredentialsManager . GetCredentials ( locked_scan_node . path_spec ) if locked_scan_node . type_indicator == ( definitions . TYPE_INDICATOR_APFS_CONTAINER ) : line = 'Found an APFS encrypted volume.' elif locked_scan_node . type_indicator == definitions . TYPE_INDICATOR_BDE : line = 'Found a BitLocker encrypted volume.' elif locked_scan_node . type_indicator == definitions . TYPE_INDICATOR_FVDE : line = 'Found a CoreStorage (FVDE) encrypted volume.' else : line = 'Found an encrypted volume.' output_writer . WriteLine ( line ) credentials_list = list ( credentials . CREDENTIALS ) credentials_list . append ( 'skip' ) output_writer . WriteLine ( 'Supported credentials:' ) output_writer . WriteLine ( '' ) for index , name in enumerate ( credentials_list ) : output_writer . WriteLine ( ' {0:d}. {1:s}' . format ( index + 1 , name ) ) output_writer . WriteLine ( '' ) result = False while not result : output_writer . WriteString ( 'Select a credential to unlock the volume: ' ) input_line = sys . stdin . readline ( ) input_line = input_line . strip ( ) if input_line in credentials_list : credential_identifier = input_line else : try : credential_identifier = int ( input_line , 10 ) credential_identifier = credentials_list [ credential_identifier - 1 ] except ( IndexError , ValueError ) : output_writer . WriteLine ( 'Unsupported credential: {0:s}' . format ( input_line ) ) continue if credential_identifier == 'skip' : break getpass_string = 'Enter credential data: ' if sys . platform . startswith ( 'win' ) and sys . version_info [ 0 ] < 3 : getpass_string = self . _EncodeString ( getpass_string ) credential_data = getpass . getpass ( getpass_string ) output_writer . WriteLine ( '' ) result = self . _source_scanner . Unlock ( scan_context , locked_scan_node . path_spec , credential_identifier , credential_data ) if not result : output_writer . WriteLine ( 'Unable to unlock volume.' ) output_writer . WriteLine ( '' )
Prompts the user to provide a credential for an encrypted volume .
19,418
def Analyze ( self , source_path , output_writer ) : if not os . path . exists ( source_path ) : raise RuntimeError ( 'No such source: {0:s}.' . format ( source_path ) ) scan_context = source_scanner . SourceScannerContext ( ) scan_path_spec = None scan_step = 0 scan_context . OpenSourcePath ( source_path ) while True : self . _source_scanner . Scan ( scan_context , auto_recurse = self . _auto_recurse , scan_path_spec = scan_path_spec ) if not scan_context . updated : break if not self . _auto_recurse : output_writer . WriteScanContext ( scan_context , scan_step = scan_step ) scan_step += 1 if scan_context . source_type in [ definitions . SOURCE_TYPE_DIRECTORY , definitions . SOURCE_TYPE_FILE ] : break for locked_scan_node in scan_context . locked_scan_nodes : self . _PromptUserForEncryptedVolumeCredential ( scan_context , locked_scan_node , output_writer ) if not self . _auto_recurse : scan_node = scan_context . GetUnscannedScanNode ( ) if not scan_node : return scan_path_spec = scan_node . path_spec if self . _auto_recurse : output_writer . WriteScanContext ( scan_context )
Analyzes the source .
19,419
def WriteScanContext ( self , scan_context , scan_step = None ) : if scan_step is not None : print ( 'Scan step: {0:d}' . format ( scan_step ) ) print ( 'Source type\t\t: {0:s}' . format ( scan_context . source_type ) ) print ( '' ) scan_node = scan_context . GetRootScanNode ( ) self . WriteScanNode ( scan_context , scan_node ) print ( '' )
Writes the source scanner context to stdout .
19,420
def WriteScanNode ( self , scan_context , scan_node , indentation = '' ) : if not scan_node : return values = [ ] part_index = getattr ( scan_node . path_spec , 'part_index' , None ) if part_index is not None : values . append ( '{0:d}' . format ( part_index ) ) store_index = getattr ( scan_node . path_spec , 'store_index' , None ) if store_index is not None : values . append ( '{0:d}' . format ( store_index ) ) start_offset = getattr ( scan_node . path_spec , 'start_offset' , None ) if start_offset is not None : values . append ( 'start offset: {0:d} (0x{0:08x})' . format ( start_offset ) ) location = getattr ( scan_node . path_spec , 'location' , None ) if location is not None : values . append ( 'location: {0:s}' . format ( location ) ) values = ', ' . join ( values ) flags = '' if scan_node in scan_context . locked_scan_nodes : flags = ' [LOCKED]' print ( '{0:s}{1:s}: {2:s}{3:s}' . format ( indentation , scan_node . path_spec . type_indicator , values , flags ) ) indentation = ' {0:s}' . format ( indentation ) for sub_scan_node in scan_node . sub_nodes : self . WriteScanNode ( scan_context , sub_scan_node , indentation = indentation )
Writes the source scanner node to stdout .
19,421
def _CalculateHashDataStream ( self , file_entry , data_stream_name ) : hash_context = hashlib . sha256 ( ) try : file_object = file_entry . GetFileObject ( data_stream_name = data_stream_name ) except IOError as exception : logging . warning ( ( 'Unable to open path specification:\n{0:s}' 'with error: {1!s}' ) . format ( file_entry . path_spec . comparable , exception ) ) return None if not file_object : return None try : data = file_object . read ( self . _READ_BUFFER_SIZE ) while data : hash_context . update ( data ) data = file_object . read ( self . _READ_BUFFER_SIZE ) except IOError as exception : logging . warning ( ( 'Unable to read from path specification:\n{0:s}' 'with error: {1!s}' ) . format ( file_entry . path_spec . comparable , exception ) ) return None finally : file_object . close ( ) return hash_context . hexdigest ( )
Calculates a message digest hash of the data of the file entry .
19,422
def _CalculateHashesFileEntry ( self , file_system , file_entry , parent_full_path , output_writer ) : full_path = file_system . JoinPath ( [ parent_full_path , file_entry . name ] ) for data_stream in file_entry . data_streams : hash_value = self . _CalculateHashDataStream ( file_entry , data_stream . name ) display_path = self . _GetDisplayPath ( file_entry . path_spec , full_path , data_stream . name ) output_writer . WriteFileHash ( display_path , hash_value or 'N/A' ) for sub_file_entry in file_entry . sub_file_entries : self . _CalculateHashesFileEntry ( file_system , sub_file_entry , full_path , output_writer )
Recursive calculates hashes starting with the file entry .
19,423
def _GetDisplayPath ( self , path_spec , full_path , data_stream_name ) : display_path = '' if path_spec . HasParent ( ) : parent_path_spec = path_spec . parent if parent_path_spec and parent_path_spec . type_indicator == ( dfvfs_definitions . TYPE_INDICATOR_TSK_PARTITION ) : display_path = '' . join ( [ display_path , parent_path_spec . location ] ) display_path = '' . join ( [ display_path , full_path ] ) if data_stream_name : display_path = ':' . join ( [ display_path , data_stream_name ] ) return display_path
Retrieves a path to display .
19,424
def CalculateHashes ( self , base_path_specs , output_writer ) : for base_path_spec in base_path_specs : file_system = resolver . Resolver . OpenFileSystem ( base_path_spec ) file_entry = resolver . Resolver . OpenFileEntry ( base_path_spec ) if file_entry is None : logging . warning ( 'Unable to open base path specification:\n{0:s}' . format ( base_path_spec . comparable ) ) continue self . _CalculateHashesFileEntry ( file_system , file_entry , '' , output_writer )
Recursive calculates hashes starting with the base path specification .
19,425
def _EncodeString ( self , string ) : try : encoded_string = string . encode ( self . _encoding , errors = self . _errors ) except UnicodeEncodeError : if self . _errors == 'strict' : logging . error ( 'Unable to properly write output due to encoding error. ' 'Switching to error tolerant encoding which can result in ' 'non Basic Latin (C0) characters to be replaced with "?" or ' '"\\ufffd".' ) self . _errors = 'replace' encoded_string = string . encode ( self . _encoding , errors = self . _errors ) return encoded_string
Encodes the string .
19,426
def WriteFileHash ( self , path , hash_value ) : string = '{0:s}\t{1:s}\n' . format ( hash_value , path ) encoded_string = self . _EncodeString ( string ) self . _file_object . write ( encoded_string )
Writes the file path and hash to file .
19,427
def WriteFileHash ( self , path , hash_value ) : string = '{0:s}\t{1:s}' . format ( hash_value , path ) encoded_string = self . _EncodeString ( string ) print ( encoded_string )
Writes the file path and hash to stdout .
19,428
def GetLinkedFileEntry ( self ) : link = self . _GetLink ( ) if not link : return None path_spec = os_path_spec . OSPathSpec ( location = link ) return OSFileEntry ( self . _resolver_context , self . _file_system , path_spec )
Retrieves the linked file entry for example for a symbolic link .
19,429
def GetAPFSVolumeByPathSpec ( self , path_spec ) : volume_index = apfs_helper . APFSContainerPathSpecGetVolumeIndex ( path_spec ) if volume_index is None : return None return self . _fsapfs_container . get_volume ( volume_index )
Retrieves an APFS volume for a path specification .
19,430
def Glob2Regex ( glob_pattern ) : if not glob_pattern : raise ValueError ( 'Missing glob pattern.' ) regex_pattern = [ ] glob_pattern_index = 0 glob_pattern_length = len ( glob_pattern ) while glob_pattern_index < glob_pattern_length : character = glob_pattern [ glob_pattern_index ] glob_pattern_index += 1 if character == '*' : regex_pattern . append ( '.*' ) elif character == '?' : regex_pattern . append ( '.' ) elif character != '[' : regex_character = re . escape ( character ) regex_pattern . append ( regex_character ) else : glob_group_index = glob_pattern_index if ( glob_group_index < glob_pattern_length and glob_pattern [ glob_group_index ] == '!' ) : glob_group_index += 1 if ( glob_group_index < glob_pattern_length and glob_pattern [ glob_group_index ] == ']' ) : glob_group_index += 1 while ( glob_group_index < glob_pattern_length and glob_pattern [ glob_group_index ] != ']' ) : glob_group_index += 1 if glob_group_index >= glob_pattern_length : regex_pattern . append ( '\\[' ) continue glob_group = glob_pattern [ glob_pattern_index : glob_group_index ] glob_pattern_index = glob_group_index + 1 glob_group = glob_group . replace ( '\\' , '\\\\' ) if py2to3 . PY_3_7_AND_LATER : glob_group = glob_group . replace ( '|' , '\\|' ) regex_pattern . append ( '[' ) if glob_group [ 0 ] == '!' : regex_pattern . append ( '^' ) glob_group = glob_group [ 1 : ] elif glob_group [ 0 ] == '^' : regex_pattern . append ( '\\' ) regex_pattern . append ( glob_group ) regex_pattern . append ( ']' ) return '' . join ( regex_pattern )
Converts a glob pattern to a regular expression .
19,431
def CacheObject ( self , identifier , vfs_object ) : if identifier in self . _values : raise KeyError ( 'Object already cached for identifier: {0:s}' . format ( identifier ) ) if len ( self . _values ) == self . _maximum_number_of_cached_values : raise errors . CacheFullError ( 'Maximum number of cached values reached.' ) self . _values [ identifier ] = ObjectsCacheValue ( vfs_object )
Caches a VFS object .
19,432
def GetCacheValueByObject ( self , vfs_object ) : for identifier , cache_value in iter ( self . _values . items ( ) ) : if not cache_value : raise RuntimeError ( 'Missing cache value.' ) if cache_value . vfs_object == vfs_object : return identifier , cache_value return None , None
Retrieves the cache value for the cached object .
19,433
def GetObject ( self , identifier ) : cache_value = self . _values . get ( identifier , None ) if not cache_value : return None return cache_value . vfs_object
Retrieves a cached object based on the identifier .
19,434
def GrabObject ( self , identifier ) : if identifier not in self . _values : raise KeyError ( 'Missing cached object for identifier: {0:s}' . format ( identifier ) ) cache_value = self . _values [ identifier ] if not cache_value : raise RuntimeError ( 'Missing cache value for identifier: {0:s}' . format ( identifier ) ) cache_value . IncrementReferenceCount ( )
Grabs a cached object based on the identifier .
19,435
def ReleaseObject ( self , identifier ) : if identifier not in self . _values : raise KeyError ( 'Missing cached object for identifier: {0:s}' . format ( identifier ) ) cache_value = self . _values [ identifier ] if not cache_value : raise RuntimeError ( 'Missing cache value for identifier: {0:s}' . format ( identifier ) ) cache_value . DecrementReferenceCount ( )
Releases a cached object based on the identifier .
19,436
def RemoveObject ( self , identifier ) : if identifier not in self . _values : raise KeyError ( 'Missing cached object for identifier: {0:s}' . format ( identifier ) ) del self . _values [ identifier ]
Removes a cached object based on the identifier .
19,437
def GetSecurityDescriptor ( self ) : fwnt_security_descriptor = pyfwnt . security_descriptor ( ) fwnt_security_descriptor . copy_from_byte_stream ( self . _fsntfs_file_entry . security_descriptor_data ) return fwnt_security_descriptor
Retrieves the security descriptor .
19,438
def _ReadFileEntries ( self , file_object ) : self . _file_entries = { } file_offset = 0 while file_offset < self . _file_size or self . _file_size == 0 : file_entry = self . _ReadFileEntry ( file_object , file_offset ) file_offset += file_entry . size if file_entry . path == 'TRAILER!!!' : break if file_entry . path in self . _file_entries : continue self . _file_entries [ file_entry . path ] = file_entry
Reads the file entries from the cpio archive .
19,439
def GetFileEntries ( self , path_prefix = '' ) : if self . _file_entries : for path , file_entry in iter ( self . _file_entries . items ( ) ) : if path . startswith ( path_prefix ) : yield file_entry
Retrieves the file entries .
19,440
def Open ( self , file_object ) : file_object . seek ( 0 , os . SEEK_SET ) signature_data = file_object . read ( 6 ) self . file_format = None if len ( signature_data ) > 2 : if signature_data [ : 2 ] == self . _CPIO_SIGNATURE_BINARY_BIG_ENDIAN : self . file_format = 'bin-big-endian' elif signature_data [ : 2 ] == self . _CPIO_SIGNATURE_BINARY_LITTLE_ENDIAN : self . file_format = 'bin-little-endian' elif signature_data == self . _CPIO_SIGNATURE_PORTABLE_ASCII : self . file_format = 'odc' elif signature_data == self . _CPIO_SIGNATURE_NEW_ASCII : self . file_format = 'newc' elif signature_data == self . _CPIO_SIGNATURE_NEW_ASCII_WITH_CHECKSUM : self . file_format = 'crc' if self . file_format is None : raise IOError ( 'Unsupported CPIO format.' ) self . _file_object = file_object self . _file_size = file_object . get_size ( ) self . _ReadFileEntries ( self . _file_object )
Opens the CPIO archive file .
19,441
def ReadDataAtOffset ( self , file_offset , size ) : self . _file_object . seek ( file_offset , os . SEEK_SET ) return self . _file_object . read ( size )
Reads a byte string from the file - like object at a specific offset .
19,442
def GetNTFSFileEntryByPathSpec ( self , path_spec ) : location = getattr ( path_spec , 'location' , None ) mft_attribute = getattr ( path_spec , 'mft_attribute' , None ) mft_entry = getattr ( path_spec , 'mft_entry' , None ) if mft_attribute is not None and mft_entry is not None : fsntfs_file_entry = self . _fsntfs_volume . get_file_entry ( mft_entry ) elif location is not None : fsntfs_file_entry = self . _fsntfs_volume . get_file_entry_by_path ( location ) else : raise errors . PathSpecError ( 'Path specification missing location and MFT entry.' ) return fsntfs_file_entry
Retrieves the NTFS file entry for a path specification .
19,443
def _RawGlobPathSpecWithAlphabeticalSchema ( file_system , parent_path_spec , segment_format , location , segment_length , upper_case = False ) : segment_number = 0 segment_files = [ ] while True : segment_index = segment_number segment_letters = [ ] while len ( segment_letters ) < segment_length : segment_index , remainder = divmod ( segment_index , 26 ) if upper_case : segment_letters . append ( chr ( ord ( 'A' ) + remainder ) ) else : segment_letters . append ( chr ( ord ( 'a' ) + remainder ) ) segment_letters = '' . join ( segment_letters [ : : - 1 ] ) segment_location = segment_format . format ( location , segment_letters ) kwargs = path_spec_factory . Factory . GetProperties ( parent_path_spec ) kwargs [ 'location' ] = segment_location if parent_path_spec . parent is not None : kwargs [ 'parent' ] = parent_path_spec . parent segment_path_spec = path_spec_factory . Factory . NewPathSpec ( parent_path_spec . type_indicator , ** kwargs ) if not file_system . FileEntryExistsByPathSpec ( segment_path_spec ) : break segment_files . append ( segment_path_spec ) segment_number += 1 return segment_files
Globs for path specifications according to an alphabetical naming schema .
19,444
def _RawGlobPathSpecWithNumericSchema ( file_system , parent_path_spec , segment_format , location , segment_number ) : segment_files = [ ] while True : segment_location = segment_format . format ( location , segment_number ) kwargs = path_spec_factory . Factory . GetProperties ( parent_path_spec ) kwargs [ 'location' ] = segment_location if parent_path_spec . parent is not None : kwargs [ 'parent' ] = parent_path_spec . parent segment_path_spec = path_spec_factory . Factory . NewPathSpec ( parent_path_spec . type_indicator , ** kwargs ) if not file_system . FileEntryExistsByPathSpec ( segment_path_spec ) : break segment_files . append ( segment_path_spec ) segment_number += 1 return segment_files
Globs for path specifications according to a numeric naming schema .
19,445
def GetSubNodeByLocation ( self , location ) : for sub_node in self . sub_nodes : sub_node_location = getattr ( sub_node . path_spec , 'location' , None ) if location == sub_node_location : return sub_node return None
Retrieves a sub scan node based on the location .
19,446
def GetUnscannedSubNode ( self ) : if not self . sub_nodes and not self . scanned : return self for sub_node in self . sub_nodes : result = sub_node . GetUnscannedSubNode ( ) if result : return result return None
Retrieves the first unscanned sub node .
19,447
def AddScanNode ( self , path_spec , parent_scan_node ) : scan_node = self . _scan_nodes . get ( path_spec , None ) if scan_node : raise KeyError ( 'Scan node already exists.' ) scan_node = SourceScanNode ( path_spec ) if parent_scan_node : if parent_scan_node . path_spec not in self . _scan_nodes : raise RuntimeError ( 'Parent scan node not present.' ) scan_node . parent_node = parent_scan_node parent_scan_node . sub_nodes . append ( scan_node ) if not self . _root_path_spec : self . _root_path_spec = path_spec self . _scan_nodes [ path_spec ] = scan_node if path_spec . IsFileSystem ( ) : self . _file_system_scan_nodes [ path_spec ] = scan_node self . updated = True return scan_node
Adds a scan node for a certain path specification .
19,448
def GetUnscannedScanNode ( self ) : root_scan_node = self . _scan_nodes . get ( self . _root_path_spec , None ) if not root_scan_node or not root_scan_node . scanned : return root_scan_node return root_scan_node . GetUnscannedSubNode ( )
Retrieves the first unscanned scan node .
19,449
def LockScanNode ( self , path_spec ) : scan_node = self . _scan_nodes . get ( path_spec , None ) if not scan_node : raise KeyError ( 'Scan node does not exist.' ) self . _locked_scan_nodes [ path_spec ] = scan_node
Marks a scan node as locked .
19,450
def OpenSourcePath ( self , source_path ) : source_path_spec = path_spec_factory . Factory . NewPathSpec ( definitions . TYPE_INDICATOR_OS , location = source_path ) self . AddScanNode ( source_path_spec , None )
Opens the source path .
19,451
def RemoveScanNode ( self , path_spec ) : scan_node = self . _scan_nodes . get ( path_spec , None ) if not scan_node : return None if scan_node . sub_nodes : raise RuntimeError ( 'Scan node has sub nodes.' ) parent_scan_node = scan_node . parent_node if parent_scan_node : parent_scan_node . sub_nodes . remove ( scan_node ) if path_spec == self . _root_path_spec : self . _root_path_spec = None del self . _scan_nodes [ path_spec ] if path_spec . IsFileSystem ( ) : del self . _file_system_scan_nodes [ path_spec ] return parent_scan_node
Removes a scan node of a certain path specification .
19,452
def UnlockScanNode ( self , path_spec ) : if not self . HasScanNode ( path_spec ) : raise KeyError ( 'Scan node does not exist.' ) if path_spec not in self . _locked_scan_nodes : raise KeyError ( 'Scan node is not locked.' ) del self . _locked_scan_nodes [ path_spec ] self . _scan_nodes [ path_spec ] . scanned = False
Marks a scan node as unlocked .
19,453
def _ScanNode ( self , scan_context , scan_node , auto_recurse = True ) : if not scan_context : raise ValueError ( 'Invalid scan context.' ) if not scan_node : raise ValueError ( 'Invalid scan node.' ) scan_path_spec = scan_node . path_spec system_level_file_entry = None if scan_node . IsSystemLevel ( ) : system_level_file_entry = resolver . Resolver . OpenFileEntry ( scan_node . path_spec , resolver_context = self . _resolver_context ) if system_level_file_entry is None : raise errors . BackEndError ( 'Unable to open file entry.' ) if system_level_file_entry . IsDirectory ( ) : scan_context . SetSourceType ( definitions . SOURCE_TYPE_DIRECTORY ) return source_path_spec = self . ScanForStorageMediaImage ( scan_node . path_spec ) if source_path_spec : scan_node . scanned = True scan_node = scan_context . AddScanNode ( source_path_spec , scan_node ) if system_level_file_entry . IsDevice ( ) : source_type = definitions . SOURCE_TYPE_STORAGE_MEDIA_DEVICE else : source_type = definitions . SOURCE_TYPE_STORAGE_MEDIA_IMAGE scan_context . SetSourceType ( source_type ) if not auto_recurse : return source_path_spec = None while True : if scan_node . IsFileSystem ( ) : break if scan_node . SupportsEncryption ( ) : self . _ScanEncryptedVolumeNode ( scan_context , scan_node ) if scan_context . IsLockedScanNode ( scan_node . path_spec ) : break source_path_spec = self . ScanForVolumeSystem ( scan_node . path_spec ) if not source_path_spec : break if not scan_context . HasScanNode ( source_path_spec ) : scan_node . scanned = True scan_node = scan_context . AddScanNode ( source_path_spec , scan_node ) if system_level_file_entry and system_level_file_entry . IsDevice ( ) : source_type = definitions . SOURCE_TYPE_STORAGE_MEDIA_DEVICE else : source_type = definitions . SOURCE_TYPE_STORAGE_MEDIA_IMAGE scan_context . SetSourceType ( source_type ) if scan_node . IsVolumeSystemRoot ( ) : self . _ScanVolumeSystemRootNode ( scan_context , scan_node , auto_recurse = auto_recurse ) return if not auto_recurse and scan_context . updated : return if not scan_context . updated : break if scan_node . IsVolumeSystemRoot ( ) : pass elif scan_context . IsLockedScanNode ( scan_node . path_spec ) : pass elif ( scan_node . type_indicator == definitions . TYPE_INDICATOR_VSHADOW and auto_recurse and scan_node . path_spec != scan_path_spec ) : pass elif not scan_node . IsFileSystem ( ) : source_path_spec = self . ScanForFileSystem ( scan_node . path_spec ) if not source_path_spec : if scan_node . path_spec . type_indicator == definitions . TYPE_INDICATOR_RAW : scan_node = scan_context . RemoveScanNode ( scan_node . path_spec ) scan_context . source_type = definitions . SOURCE_TYPE_FILE else : scan_context . SetSourceType ( definitions . SOURCE_TYPE_FILE ) elif not scan_context . HasScanNode ( source_path_spec ) : scan_node . scanned = True scan_node = scan_context . AddScanNode ( source_path_spec , scan_node ) if system_level_file_entry and system_level_file_entry . IsDevice ( ) : source_type = definitions . SOURCE_TYPE_STORAGE_MEDIA_DEVICE else : source_type = definitions . SOURCE_TYPE_STORAGE_MEDIA_IMAGE scan_context . SetSourceType ( source_type ) if not scan_node . scanned : scan_node . scanned = True
Scans a node for supported formats .
19,454
def _ScanEncryptedVolumeNode ( self , scan_context , scan_node ) : if scan_node . type_indicator == definitions . TYPE_INDICATOR_APFS_CONTAINER : container_file_entry = resolver . Resolver . OpenFileEntry ( scan_node . path_spec , resolver_context = self . _resolver_context ) fsapfs_volume = container_file_entry . GetAPFSVolume ( ) try : is_locked = not apfs_helper . APFSUnlockVolume ( fsapfs_volume , scan_node . path_spec , resolver . Resolver . key_chain ) except IOError as exception : raise errors . BackEndError ( 'Unable to unlock APFS volume with error: {0!s}' . format ( exception ) ) else : file_object = resolver . Resolver . OpenFileObject ( scan_node . path_spec , resolver_context = self . _resolver_context ) is_locked = not file_object or file_object . is_locked file_object . close ( ) if is_locked : scan_context . LockScanNode ( scan_node . path_spec ) if scan_node . type_indicator == definitions . TYPE_INDICATOR_BDE : path_spec = self . ScanForFileSystem ( scan_node . path_spec . parent ) if path_spec : scan_context . AddScanNode ( path_spec , scan_node . parent_node )
Scans an encrypted volume node for supported formats .
19,455
def _ScanVolumeSystemRootNode ( self , scan_context , scan_node , auto_recurse = True ) : if scan_node . type_indicator == definitions . TYPE_INDICATOR_VSHADOW : path_spec = self . ScanForFileSystem ( scan_node . path_spec . parent ) if path_spec : scan_context . AddScanNode ( path_spec , scan_node . parent_node ) file_entry = resolver . Resolver . OpenFileEntry ( scan_node . path_spec , resolver_context = self . _resolver_context ) for sub_file_entry in file_entry . sub_file_entries : sub_scan_node = scan_context . AddScanNode ( sub_file_entry . path_spec , scan_node ) if scan_node . type_indicator == definitions . TYPE_INDICATOR_VSHADOW : continue if auto_recurse or not scan_context . updated : self . _ScanNode ( scan_context , sub_scan_node , auto_recurse = auto_recurse )
Scans a volume system root node for supported formats .
19,456
def GetVolumeIdentifiers ( self , volume_system ) : volume_identifiers = [ ] for volume in volume_system . volumes : volume_identifier = getattr ( volume , 'identifier' , None ) if volume_identifier : volume_identifiers . append ( volume_identifier ) return sorted ( volume_identifiers )
Retrieves the volume identifiers .
19,457
def Scan ( self , scan_context , auto_recurse = True , scan_path_spec = None ) : if not scan_context : raise ValueError ( 'Invalid scan context.' ) scan_context . updated = False if scan_path_spec : scan_node = scan_context . GetScanNode ( scan_path_spec ) else : scan_node = scan_context . GetUnscannedScanNode ( ) if scan_node : self . _ScanNode ( scan_context , scan_node , auto_recurse = auto_recurse )
Scans for supported formats .
19,458
def ScanForFileSystem ( self , source_path_spec ) : if source_path_spec . type_indicator == ( definitions . TYPE_INDICATOR_APFS_CONTAINER ) : return path_spec_factory . Factory . NewPathSpec ( definitions . TYPE_INDICATOR_APFS , location = '/' , parent = source_path_spec ) try : type_indicators = analyzer . Analyzer . GetFileSystemTypeIndicators ( source_path_spec , resolver_context = self . _resolver_context ) except RuntimeError as exception : raise errors . BackEndError ( ( 'Unable to process source path specification with error: ' '{0!s}' ) . format ( exception ) ) if not type_indicators : return None type_indicator = type_indicators [ 0 ] if len ( type_indicators ) > 1 : if definitions . PREFERRED_NTFS_BACK_END not in type_indicators : raise errors . BackEndError ( 'Unsupported source found more than one file system types.' ) type_indicator = definitions . PREFERRED_NTFS_BACK_END if type_indicator == definitions . TYPE_INDICATOR_NTFS : root_location = '\\' else : root_location = '/' file_system_path_spec = path_spec_factory . Factory . NewPathSpec ( type_indicator , location = root_location , parent = source_path_spec ) if type_indicator == definitions . TYPE_INDICATOR_TSK : try : file_system = resolver . Resolver . OpenFileSystem ( file_system_path_spec , resolver_context = self . _resolver_context ) file_system . Close ( ) except errors . BackEndError : file_system_path_spec = None return file_system_path_spec
Scans the path specification for a supported file system format .
19,459
def ScanForStorageMediaImage ( self , source_path_spec ) : try : type_indicators = analyzer . Analyzer . GetStorageMediaImageTypeIndicators ( source_path_spec , resolver_context = self . _resolver_context ) except RuntimeError as exception : raise errors . BackEndError ( ( 'Unable to process source path specification with error: ' '{0!s}' ) . format ( exception ) ) if not type_indicators : file_system = resolver . Resolver . OpenFileSystem ( source_path_spec , resolver_context = self . _resolver_context ) raw_path_spec = path_spec_factory . Factory . NewPathSpec ( definitions . TYPE_INDICATOR_RAW , parent = source_path_spec ) try : glob_results = raw . RawGlobPathSpec ( file_system , raw_path_spec ) except errors . PathSpecError : glob_results = None file_system . Close ( ) if not glob_results : return None return raw_path_spec if len ( type_indicators ) > 1 : raise errors . BackEndError ( 'Unsupported source found more than one storage media image types.' ) return path_spec_factory . Factory . NewPathSpec ( type_indicators [ 0 ] , parent = source_path_spec )
Scans the path specification for a supported storage media image format .
19,460
def ScanForVolumeSystem ( self , source_path_spec ) : if source_path_spec . type_indicator == definitions . TYPE_INDICATOR_VSHADOW : return None if source_path_spec . IsVolumeSystemRoot ( ) : return source_path_spec if source_path_spec . type_indicator == ( definitions . TYPE_INDICATOR_APFS_CONTAINER ) : return None try : type_indicators = analyzer . Analyzer . GetVolumeSystemTypeIndicators ( source_path_spec , resolver_context = self . _resolver_context ) except ( IOError , RuntimeError ) as exception : raise errors . BackEndError ( ( 'Unable to process source path specification with error: ' '{0!s}' ) . format ( exception ) ) if not type_indicators : return None if len ( type_indicators ) > 1 : raise errors . BackEndError ( 'Unsupported source found more than one volume system types.' ) if ( type_indicators [ 0 ] == definitions . TYPE_INDICATOR_TSK_PARTITION and source_path_spec . type_indicator in [ definitions . TYPE_INDICATOR_TSK_PARTITION ] ) : return None if type_indicators [ 0 ] in definitions . VOLUME_SYSTEM_TYPE_INDICATORS : return path_spec_factory . Factory . NewPathSpec ( type_indicators [ 0 ] , location = '/' , parent = source_path_spec ) return path_spec_factory . Factory . NewPathSpec ( type_indicators [ 0 ] , parent = source_path_spec )
Scans the path specification for a supported volume system format .
19,461
def _WriteRow ( self , output_writer , values , in_bold = False ) : row_strings = [ ] for value_index , value_string in enumerate ( values ) : padding_size = self . _column_sizes [ value_index ] - len ( value_string ) padding_string = ' ' * padding_size row_strings . extend ( [ value_string , padding_string ] ) row_strings . pop ( ) row_strings = '' . join ( row_strings ) if in_bold and not win32console : row_strings = '\x1b[1m{0:s}\x1b[0m' . format ( row_strings ) output_writer . Write ( '{0:s}\n' . format ( row_strings ) )
Writes a row of values aligned with the width to the output writer .
19,462
def Write ( self , output_writer ) : for column_index , column_size in enumerate ( self . _column_sizes ) : column_size , _ = divmod ( column_size , self . _NUMBER_OF_SPACES_IN_TAB ) column_size = ( column_size + 1 ) * self . _NUMBER_OF_SPACES_IN_TAB self . _column_sizes [ column_index ] = column_size if self . _columns : self . _WriteRow ( output_writer , self . _columns , in_bold = True ) for values in self . _rows : self . _WriteRow ( output_writer , values )
Writes the table to output writer .
19,463
def GetVSSStoreIdentifiers ( self , volume_system , volume_identifiers ) : print_header = True while True : if print_header : self . _PrintVSSStoreIdentifiersOverview ( volume_system , volume_identifiers ) print_header = False self . _output_writer . Write ( '\n' ) lines = self . _textwrapper . wrap ( self . _USER_PROMPT_VSS ) self . _output_writer . Write ( '\n' . join ( lines ) ) self . _output_writer . Write ( '\n\nVSS identifier(s): ' ) try : selected_volumes = self . _ReadSelectedVolumes ( volume_system , prefix = 'vss' ) if ( not selected_volumes or not set ( selected_volumes ) . difference ( volume_identifiers ) ) : break except ValueError : pass self . _output_writer . Write ( '\n' ) lines = self . _textwrapper . wrap ( 'Unsupported VSS identifier(s), please try again or abort with ' 'Ctrl^C.' ) self . _output_writer . Write ( '\n' . join ( lines ) ) self . _output_writer . Write ( '\n\n' ) return selected_volumes
Retrieves VSS store identifiers .
19,464
def UnlockEncryptedVolume ( self , source_scanner_object , scan_context , locked_scan_node , credentials ) : if locked_scan_node . type_indicator == ( definitions . TYPE_INDICATOR_APFS_CONTAINER ) : header = 'Found an APFS encrypted volume.' elif locked_scan_node . type_indicator == definitions . TYPE_INDICATOR_BDE : header = 'Found a BitLocker encrypted volume.' elif locked_scan_node . type_indicator == definitions . TYPE_INDICATOR_FVDE : header = 'Found a CoreStorage (FVDE) encrypted volume.' else : header = 'Found an encrypted volume.' self . _output_writer . Write ( header ) credentials_list = list ( credentials . CREDENTIALS ) credentials_list . append ( 'skip' ) self . _output_writer . Write ( 'Supported credentials:\n\n' ) for index , name in enumerate ( credentials_list ) : available_credential = ' {0:d}. {1:s}\n' . format ( index + 1 , name ) self . _output_writer . Write ( available_credential ) self . _output_writer . Write ( '\nNote that you can abort with Ctrl^C.\n\n' ) result = False while not result : self . _output_writer . Write ( 'Select a credential to unlock the volume: ' ) input_line = self . _input_reader . Read ( ) input_line = input_line . strip ( ) if input_line in credentials_list : credential_type = input_line else : try : credential_type = int ( input_line , 10 ) credential_type = credentials_list [ credential_type - 1 ] except ( IndexError , ValueError ) : self . _output_writer . Write ( 'Unsupported credential: {0:s}\n' . format ( input_line ) ) continue if credential_type == 'skip' : break getpass_string = 'Enter credential data: ' if sys . platform . startswith ( 'win' ) and sys . version_info [ 0 ] < 3 : getpass_string = self . _EncodeString ( getpass_string ) credential_data = getpass . getpass ( getpass_string ) self . _output_writer . Write ( '\n' ) if credential_type == 'key' : try : credential_data = credential_data . decode ( 'hex' ) except TypeError : self . _output_writer . Write ( 'Unsupported credential data.\n' ) continue result = source_scanner_object . Unlock ( scan_context , locked_scan_node . path_spec , credential_type , credential_data ) if not result : self . _output_writer . Write ( 'Unable to unlock volume.\n\n' ) return result
Unlocks an encrypted volume .
19,465
def GetTARInfo ( self ) : if not self . _tar_info : location = getattr ( self . path_spec , 'location' , None ) if location is None : raise errors . PathSpecError ( 'Path specification missing location.' ) if not location . startswith ( self . _file_system . LOCATION_ROOT ) : raise errors . PathSpecError ( 'Invalid location in path specification.' ) if len ( location ) == 1 : return None tar_file = self . _file_system . GetTARFile ( ) try : self . _tar_info = tar_file . getmember ( location [ 1 : ] ) except KeyError : pass return self . _tar_info
Retrieves the TAR info .
19,466
def GetDecoder ( cls , encoding_method ) : encoding_method = encoding_method . lower ( ) decoder = cls . _decoders . get ( encoding_method , None ) if not decoder : return None return decoder ( )
Retrieves the decoder object for a specific encoding method .
19,467
def RegisterDecoder ( cls , decoder ) : encoding_method = decoder . ENCODING_METHOD . lower ( ) if encoding_method in cls . _decoders : raise KeyError ( 'Decoder for encoding method: {0:s} already set.' . format ( decoder . ENCODING_METHOD ) ) cls . _decoders [ encoding_method ] = decoder
Registers a decoder for a specific encoding method .
19,468
def _GetDirectory ( self ) : if self . entry_type != definitions . FILE_ENTRY_TYPE_DIRECTORY : return None return LVMDirectory ( self . _file_system , self . path_spec )
Retrieves the directory .
19,469
def GetTSKVsPartByPathSpec ( tsk_volume , path_spec ) : location = getattr ( path_spec , 'location' , None ) part_index = getattr ( path_spec , 'part_index' , None ) start_offset = getattr ( path_spec , 'start_offset' , None ) partition_index = None if part_index is None : if location is not None : if location . startswith ( '/p' ) : try : partition_index = int ( location [ 2 : ] , 10 ) - 1 except ValueError : pass if partition_index is None or partition_index < 0 : location = None if location is None and start_offset is None : return None , None bytes_per_sector = TSKVolumeGetBytesPerSector ( tsk_volume ) current_part_index = 0 current_partition_index = 0 tsk_vs_part = None tsk_vs_part_list = list ( tsk_volume ) number_of_tsk_vs_parts = len ( tsk_vs_part_list ) if number_of_tsk_vs_parts > 0 : if ( part_index is not None and ( part_index < 0 or part_index >= number_of_tsk_vs_parts ) ) : return None , None for tsk_vs_part in tsk_vs_part_list : if TSKVsPartIsAllocated ( tsk_vs_part ) : if partition_index is not None : if partition_index == current_partition_index : break current_partition_index += 1 if part_index is not None and part_index == current_part_index : break if start_offset is not None : start_sector = TSKVsPartGetStartSector ( tsk_vs_part ) if start_sector is not None : start_sector *= bytes_per_sector if start_sector == start_offset : break current_part_index += 1 if tsk_vs_part is None or current_part_index >= number_of_tsk_vs_parts : return None , None if not TSKVsPartIsAllocated ( tsk_vs_part ) : current_partition_index = None return tsk_vs_part , current_partition_index
Retrieves the TSK volume system part object from the TSK volume object .
19,470
def TSKVolumeGetBytesPerSector ( tsk_volume ) : if hasattr ( tsk_volume , 'info' ) and tsk_volume . info is not None : block_size = getattr ( tsk_volume . info , 'block_size' , 512 ) else : block_size = 512 return block_size
Retrieves the number of bytes per sector from a TSK volume object .
19,471
def _OpenParentFile ( self , file_system , path_spec , vhdi_file ) : location = getattr ( path_spec , 'location' , None ) if not location : raise errors . PathSpecError ( 'Unsupported path specification without location.' ) location_path_segments = file_system . SplitPath ( location ) parent_filename = vhdi_file . parent_filename _ , _ , parent_filename = parent_filename . rpartition ( '\\' ) location_path_segments . pop ( ) location_path_segments . append ( parent_filename ) parent_file_location = file_system . JoinPath ( location_path_segments ) kwargs = path_spec_factory . Factory . GetProperties ( path_spec ) kwargs [ 'location' ] = parent_file_location if path_spec . parent is not None : kwargs [ 'parent' ] = path_spec . parent parent_file_path_spec = path_spec_factory . Factory . NewPathSpec ( path_spec . type_indicator , ** kwargs ) if not file_system . FileEntryExistsByPathSpec ( parent_file_path_spec ) : return file_object = resolver . Resolver . OpenFileObject ( parent_file_path_spec , resolver_context = self . _resolver_context ) vhdi_parent_file = pyvhdi . file ( ) vhdi_parent_file . open_file_object ( file_object ) if vhdi_parent_file . parent_identifier : self . _OpenParentFile ( file_system , parent_file_path_spec , vhdi_parent_file ) vhdi_file . set_parent ( vhdi_parent_file ) self . _parent_vhdi_files . append ( vhdi_parent_file ) self . _sub_file_objects . append ( file_object )
Opens the parent file .
19,472
def DeregisterDecrypter ( cls , decrypter ) : encryption_method = decrypter . ENCRYPTION_METHOD . lower ( ) if encryption_method not in cls . _decrypters : raise KeyError ( 'Decrypter for encryption method: {0:s} not set.' . format ( decrypter . ENCRYPTION_METHOD ) ) del cls . _decrypters [ encryption_method ]
Deregisters a decrypter for a specific encryption method .
19,473
def GetDecrypter ( cls , encryption_method , ** kwargs ) : encryption_method = encryption_method . lower ( ) decrypter = cls . _decrypters . get ( encryption_method , None ) if not decrypter : return None return decrypter ( ** kwargs )
Retrieves the decrypter object for a specific encryption method .
19,474
def GetTARInfoByPathSpec ( self , path_spec ) : location = getattr ( path_spec , 'location' , None ) if location is None : raise errors . PathSpecError ( 'Path specification missing location.' ) if not location . startswith ( self . LOCATION_ROOT ) : raise errors . PathSpecError ( 'Invalid location in path specification.' ) if len ( location ) == 1 : return None try : return self . _tar_file . getmember ( location [ 1 : ] ) except KeyError : pass
Retrieves the TAR info for a path specification .
19,475
def GetCPIOArchiveFileEntryByPathSpec ( self , path_spec ) : location = getattr ( path_spec , 'location' , None ) if location is None : raise errors . PathSpecError ( 'Path specification missing location.' ) if not location . startswith ( self . LOCATION_ROOT ) : raise errors . PathSpecError ( 'Invalid location in path specification.' ) if len ( location ) == 1 : return None return self . _cpio_archive_file . GetFileEntryByPath ( location [ 1 : ] )
Retrieves the CPIO archive file entry for a path specification .
19,476
def _PathStripPrefix ( self , path ) : if path . startswith ( '\\\\.\\' ) or path . startswith ( '\\\\?\\' ) : if len ( path ) < 7 or path [ 5 ] != ':' or path [ 6 ] != self . _PATH_SEPARATOR : return None path = path [ 7 : ] elif path . startswith ( '\\\\' ) : return None elif len ( path ) >= 3 and path [ 1 ] == ':' : if path [ 2 ] != self . _PATH_SEPARATOR : return None path = path [ 3 : ] elif path . startswith ( '\\' ) : path = path [ 1 : ] else : return None return path
Strips the prefix from a path .
19,477
def SetEnvironmentVariable ( self , name , value ) : if isinstance ( value , py2to3 . STRING_TYPES ) : value = self . _PathStripPrefix ( value ) if value is not None : self . _environment_variables [ name . upper ( ) ] = value
Sets an environment variable in the Windows path helper .
19,478
def APFSUnlockVolume ( fsapfs_volume , path_spec , key_chain ) : is_locked = fsapfs_volume . is_locked ( ) if is_locked : password = key_chain . GetCredential ( path_spec , 'password' ) if password : fsapfs_volume . set_password ( password ) recovery_password = key_chain . GetCredential ( path_spec , 'recovery_password' ) if recovery_password : fsapfs_volume . set_recovery_password ( recovery_password ) is_locked = not fsapfs_volume . unlock ( ) return not is_locked
Unlocks an APFS volume using the path specification .
19,479
def GetNumberOfRows ( self ) : if not self . _database_object : raise IOError ( 'Not opened.' ) if self . _number_of_rows is None : self . _number_of_rows = self . _database_object . GetNumberOfRows ( self . _table_name ) return self . _number_of_rows
Retrieves the number of rows of the table .
19,480
def GetZipInfoByPathSpec ( self , path_spec ) : location = getattr ( path_spec , 'location' , None ) if location is None : raise errors . PathSpecError ( 'Path specification missing location.' ) if not location . startswith ( self . LOCATION_ROOT ) : raise errors . PathSpecError ( 'Invalid location in path specification.' ) if len ( location ) > 1 : return self . _zip_file . getinfo ( location [ 1 : ] ) return None
Retrieves the ZIP info for a path specification .
19,481
def GetAPFSFileEntryByPathSpec ( self , path_spec ) : location = getattr ( path_spec , 'location' , None ) identifier = getattr ( path_spec , 'identifier' , None ) if identifier is not None : fsapfs_file_entry = self . _fsapfs_volume . get_file_entry_by_identifier ( identifier ) elif location is not None : fsapfs_file_entry = self . _fsapfs_volume . get_file_entry_by_path ( location ) else : raise errors . PathSpecError ( 'Path specification missing location and identifier.' ) return fsapfs_file_entry
Retrieves the APFS file entry for a path specification .
19,482
def DeregisterPathSpec ( cls , path_spec_type ) : type_indicator = path_spec_type . TYPE_INDICATOR if type_indicator not in cls . _path_spec_types : raise KeyError ( 'Path specification type: {0:s} not set.' . format ( type_indicator ) ) del cls . _path_spec_types [ type_indicator ] if type_indicator in cls . _system_level_type_indicators : del cls . _system_level_type_indicators [ type_indicator ]
Deregisters a path specification .
19,483
def GetProperties ( cls , path_spec ) : properties = { } for property_name in cls . PROPERTY_NAMES : if hasattr ( path_spec , property_name ) : properties [ property_name ] = getattr ( path_spec , property_name ) return properties
Retrieves a dictionary containing the path specification properties .
19,484
def NewPathSpec ( cls , type_indicator , ** kwargs ) : if type_indicator not in cls . _path_spec_types : raise KeyError ( 'Path specification type: {0:s} not set.' . format ( type_indicator ) ) if 'parent' in kwargs and kwargs [ 'parent' ] is None : del kwargs [ 'parent' ] path_spec_type = cls . _path_spec_types [ type_indicator ] return path_spec_type ( ** kwargs )
Creates a new path specification for the specific type indicator .
19,485
def RegisterPathSpec ( cls , path_spec_type ) : type_indicator = path_spec_type . TYPE_INDICATOR if type_indicator in cls . _path_spec_types : raise KeyError ( 'Path specification type: {0:s} already set.' . format ( type_indicator ) ) cls . _path_spec_types [ type_indicator ] = path_spec_type if getattr ( path_spec_type , '_IS_SYSTEM_LEVEL' , False ) : cls . _system_level_type_indicators [ type_indicator ] = path_spec_type
Registers a path specification type .
19,486
def _ReadString ( self , file_object , file_offset , data_type_map , description ) : element_data_size = ( data_type_map . _element_data_type_definition . GetByteSize ( ) ) elements_terminator = ( data_type_map . _data_type_definition . elements_terminator ) byte_stream = [ ] element_data = file_object . read ( element_data_size ) byte_stream . append ( element_data ) while element_data and element_data != elements_terminator : element_data = file_object . read ( element_data_size ) byte_stream . append ( element_data ) byte_stream = b'' . join ( byte_stream ) return self . _ReadStructureFromByteStream ( byte_stream , file_offset , data_type_map , description )
Reads a string .
19,487
def _ReadStructure ( self , file_object , file_offset , data_size , data_type_map , description ) : data = self . _ReadData ( file_object , file_offset , data_size , description ) return self . _ReadStructureFromByteStream ( data , file_offset , data_type_map , description )
Reads a structure .
19,488
def read ( self , offset , size ) : self . _file_object . seek ( offset , os . SEEK_SET ) return self . _file_object . read ( size )
Reads a byte string from the image object at the specified offset .
19,489
def CopyFromDateTimeString ( self , time_string ) : date_time_values = self . _CopyDateTimeFromString ( time_string ) year = date_time_values . get ( 'year' , 0 ) month = date_time_values . get ( 'month' , 0 ) day_of_month = date_time_values . get ( 'day_of_month' , 0 ) hours = date_time_values . get ( 'hours' , 0 ) minutes = date_time_values . get ( 'minutes' , 0 ) seconds = date_time_values . get ( 'seconds' , 0 ) microseconds = date_time_values . get ( 'microseconds' , 0 ) self . _timestamp = self . _GetNumberOfSecondsFromElements ( year , month , day_of_month , hours , minutes , seconds ) self . fraction_of_second = microseconds if pytsk3 . TSK_VERSION_NUM >= 0x040200ff : self . fraction_of_second *= 1000 else : self . fraction_of_second *= 10 self . _normalized_timestamp = None self . is_local_time = False
Copies a SleuthKit timestamp from a date and time string .
19,490
def CopyToDateTimeString ( self ) : if self . _timestamp is None : return None number_of_days , hours , minutes , seconds = self . _GetTimeValues ( self . _timestamp ) year , month , day_of_month = self . _GetDateValues ( number_of_days , 1970 , 1 , 1 ) if self . fraction_of_second is None : return '{0:04d}-{1:02d}-{2:02d} {3:02d}:{4:02d}:{5:02d}' . format ( year , month , day_of_month , hours , minutes , seconds ) if pytsk3 . TSK_VERSION_NUM >= 0x040200ff : return '{0:04d}-{1:02d}-{2:02d} {3:02d}:{4:02d}:{5:02d}.{6:09d}' . format ( year , month , day_of_month , hours , minutes , seconds , self . fraction_of_second ) return '{0:04d}-{1:02d}-{2:02d} {3:02d}:{4:02d}:{5:02d}.{6:07d}' . format ( year , month , day_of_month , hours , minutes , seconds , self . fraction_of_second )
Copies the date time value to a date and time string .
19,491
def CopyToStatTimeTuple ( self ) : if self . fraction_of_second is None : return self . _timestamp , None return super ( TSKTime , self ) . CopyToStatTimeTuple ( )
Copies the SleuthKit timestamp to a stat timestamp tuple .
19,492
def IsDefault ( self ) : if not self . _tsk_attribute or not self . _file_system : return True if self . _file_system . IsHFS ( ) : attribute_type = getattr ( self . _tsk_attribute . info , 'type' , None ) return attribute_type in ( pytsk3 . TSK_FS_ATTR_TYPE_HFS_DEFAULT , pytsk3 . TSK_FS_ATTR_TYPE_HFS_DATA ) if self . _file_system . IsNTFS ( ) : return not bool ( self . name ) return True
Determines if the data stream is the default data stream .
19,493
def _GetTimeValue ( self , name ) : timestamp = getattr ( self . _tsk_file . info . meta , name , None ) if self . _file_system_type in self . _TSK_HAS_NANO_FS_TYPES : name_fragment = '{0:s}_nano' . format ( name ) fraction_of_second = getattr ( self . _tsk_file . info . meta , name_fragment , None ) else : fraction_of_second = None return TSKTime ( timestamp = timestamp , fraction_of_second = fraction_of_second )
Retrieves a date and time value .
19,494
def _TSKFileTimeCopyToStatTimeTuple ( self , tsk_file , time_value ) : if ( not tsk_file or not tsk_file . info or not tsk_file . info . meta or not tsk_file . info . fs_info ) : raise errors . BackEndError ( 'Missing TSK File .info, .info.meta. or .info.fs_info' ) stat_time = getattr ( tsk_file . info . meta , time_value , None ) stat_time_nano = None if self . _file_system_type in self . _TSK_HAS_NANO_FS_TYPES : time_value_nano = '{0:s}_nano' . format ( time_value ) stat_time_nano = getattr ( tsk_file . info . meta , time_value_nano , None ) if stat_time_nano is not None and pytsk3 . TSK_VERSION_NUM >= 0x040200ff : stat_time_nano /= 100 return stat_time , stat_time_nano
Copies a SleuthKit file object time value to a stat timestamp tuple .
19,495
def FVDEVolumeOpen ( fvde_volume , path_spec , file_object , key_chain ) : encrypted_root_plist = key_chain . GetCredential ( path_spec , 'encrypted_root_plist' ) if encrypted_root_plist : fvde_volume . read_encrypted_root_plist ( encrypted_root_plist ) password = key_chain . GetCredential ( path_spec , 'password' ) if password : fvde_volume . set_password ( password ) recovery_password = key_chain . GetCredential ( path_spec , 'recovery_password' ) if recovery_password : fvde_volume . set_recovery_password ( recovery_password ) fvde_volume . open_file_object ( file_object )
Opens the FVDE volume using the path specification .
19,496
def VShadowPathSpecGetStoreIndex ( path_spec ) : store_index = getattr ( path_spec , 'store_index' , None ) if store_index is None : location = getattr ( path_spec , 'location' , None ) if location is None or not location . startswith ( '/vss' ) : return None store_index = None try : store_index = int ( location [ 4 : ] , 10 ) - 1 except ( TypeError , ValueError ) : pass if store_index is None or store_index < 0 : return None return store_index
Retrieves the store index from the path specification .
19,497
def _GetMemberForOffset ( self , offset ) : if offset < 0 or offset >= self . uncompressed_data_size : raise ValueError ( 'Offset {0:d} is larger than file size {1:d}.' . format ( offset , self . uncompressed_data_size ) ) for end_offset , member in iter ( self . _members_by_end_offset . items ( ) ) : if offset < end_offset : return member return None
Finds the member whose data includes the provided offset .
19,498
def read ( self , size = None ) : data = b'' while ( ( size and len ( data ) < size ) and self . _current_offset < self . uncompressed_data_size ) : member = self . _GetMemberForOffset ( self . _current_offset ) member_offset = self . _current_offset - member . uncompressed_data_offset data_read = member . ReadAtOffset ( member_offset , size ) if data_read : self . _current_offset += len ( data_read ) data = b'' . join ( [ data , data_read ] ) return data
Reads a byte string from the gzip file at the current offset .
19,499
def _GetStat ( self ) : stat_object = vfs_stat . VFSStat ( ) stat_object . size = self . path_spec . range_size stat_object . type = stat_object . TYPE_FILE return stat_object
Retrieves a stat object .