docstring
stringlengths
52
499
function
stringlengths
67
35.2k
__index_level_0__
int64
52.6k
1.16M
Retrieves a file entry for a path specification. Args: path_spec (PathSpec): path specification. Returns: FVDEFileEntry: file entry or None.
def GetFileEntryByPathSpec(self, path_spec): return fvde_file_entry.FVDEFileEntry( self._resolver_context, self, path_spec, is_root=True, is_virtual=True)
391,646
Decompresses the compressed data. Args: compressed_data (bytes): compressed data. Returns: tuple(bytes, bytes): uncompressed data and remaining compressed data. Raises: BackEndError: if the XZ compressed stream cannot be decompressed.
def Decompress(self, compressed_data): try: if hasattr(lzma, 'LZMA_VERSION'): # Note that we cannot use max_length=0 here due to different # versions of the lzma code. uncompressed_data = self._lzma_decompressor.decompress( compressed_data, 0) else: uncompressed_data = self._lzma_decompressor.decompress(compressed_data) remaining_compressed_data = getattr( self._lzma_decompressor, 'unused_data', b'') except (EOFError, IOError, LZMAError) as exception: raise errors.BackEndError(( 'Unable to decompress XZ compressed stream with error: ' '{0!s}.').format(exception)) return uncompressed_data, remaining_compressed_data
391,649
Initializes a path specification. Note that the encrypted stream path specification must have a parent. Args: cipher_mode (Optional[str]): cipher mode. encryption_method (Optional[str]): method used to the encrypt the data. initialization_vector (Optional[bytes]): initialization vector. key (Optional[bytes]): key. parent (Optional[PathSpec]): parent path specification. Raises: ValueError: when encryption method or parent are not set.
def __init__( self, cipher_mode=None, encryption_method=None, initialization_vector=None, key=None, parent=None, **kwargs): if not encryption_method or not parent: raise ValueError('Missing encryption method or parent value.') super(EncryptedStreamPathSpec, self).__init__(parent=parent, **kwargs) self.cipher_mode = cipher_mode self.encryption_method = encryption_method self.initialization_vector = initialization_vector self.key = key
391,650
Initializes a path specification. Note that an APFS path specification must have a parent. Args: identifier (Optional[int]): identifier. location (Optional[str]): location. parent (Optional[PathSpec]): parent path specification. Raises: ValueError: when parent or both identifier and location are not set.
def __init__( self, identifier=None, location=None, parent=None, **kwargs): if (not identifier and not location) or not parent: raise ValueError('Missing identifier and location, or parent value.') super(APFSPathSpec, self).__init__(parent=parent, **kwargs) self.identifier = identifier self.location = location
391,652
Initializes an image object. Args: file_object (FileIO): file-like object. Raises: ValueError: if the file-like object is invalid.
def __init__(self, file_object): if not file_object: raise ValueError('Missing file-like object.') # pytsk3.Img_Info does not let you set attributes after initialization. self._file_object = file_object # Using the old parent class invocation style otherwise some versions # of pylint complain also setting type to RAW or EXTERNAL to make sure # Img_Info does not do detection. tsk_img_type = getattr( pytsk3, 'TSK_IMG_TYPE_EXTERNAL', pytsk3.TSK_IMG_TYPE_RAW) # Note that we want url to be a binary string in Python 2 and a Unicode # string in Python 3. Hence the string is not prefixed. pytsk3.Img_Info.__init__(self, url='', type=tsk_img_type)
391,653
Reads a byte string from the image object at the specified offset. Args: offset (int): offset where to start reading. size (int): number of bytes to read. Returns: bytes: data read.
def read(self, offset, size): self._file_object.seek(offset, os.SEEK_SET) return self._file_object.read(size)
391,654
Initializes a SleuthKit timestamp. Args: fraction_of_second (Optional[int]): fraction of second, which is an integer that contains the number 100 nano seconds before Sleuthkit 4.2.0 or number of nano seconds in Sleuthkit 4.2.0 and later. timestamp (Optional[int]): POSIX timestamp.
def __init__(self, fraction_of_second=None, timestamp=None): # Sleuthkit 4.2.0 switched from 100 nano seconds precision to # 1 nano second precision. if pytsk3.TSK_VERSION_NUM >= 0x040200ff: precision = dfdatetime_definitions.PRECISION_1_NANOSECOND else: precision = dfdatetime_definitions.PRECISION_100_NANOSECONDS super(TSKTime, self).__init__() self._precision = precision self._timestamp = timestamp self.fraction_of_second = fraction_of_second
391,655
Copies a SleuthKit timestamp from a date and time string. Args: time_string (str): date and time value formatted as: YYYY-MM-DD hh:mm:ss.######[+-]##:## Where # are numeric digits ranging from 0 to 9 and the seconds fraction can be either 3 or 6 digits. The time of day, seconds fraction and time zone offset are optional. The default time zone is UTC.
def CopyFromDateTimeString(self, time_string): date_time_values = self._CopyDateTimeFromString(time_string) year = date_time_values.get('year', 0) month = date_time_values.get('month', 0) day_of_month = date_time_values.get('day_of_month', 0) hours = date_time_values.get('hours', 0) minutes = date_time_values.get('minutes', 0) seconds = date_time_values.get('seconds', 0) microseconds = date_time_values.get('microseconds', 0) self._timestamp = self._GetNumberOfSecondsFromElements( year, month, day_of_month, hours, minutes, seconds) self.fraction_of_second = microseconds if pytsk3.TSK_VERSION_NUM >= 0x040200ff: self.fraction_of_second *= 1000 else: self.fraction_of_second *= 10 self._normalized_timestamp = None self.is_local_time = False
391,657
Initializes an attribute. Args: tsk_attribute (pytsk3.Attribute): TSK attribute.
def __init__(self, tsk_attribute): super(TSKAttribute, self).__init__() self._tsk_attribute = tsk_attribute
391,661
Initializes a data stream. Args: file_system (TSKFileSystem): file system. tsk_attribute (pytsk3.Attribute): TSK attribute.
def __init__(self, file_system, tsk_attribute): super(TSKDataStream, self).__init__() self._file_system = file_system self._tsk_attribute = tsk_attribute
391,662
Retrieves a date and time value. Args: name (str): name of the date and time value, for example "atime" or "mtime". Returns: dfdatetime.DateTimeValues: date and time value or None if not available.
def _GetTimeValue(self, name): timestamp = getattr(self._tsk_file.info.meta, name, None) if self._file_system_type in self._TSK_HAS_NANO_FS_TYPES: name_fragment = '{0:s}_nano'.format(name) fraction_of_second = getattr( self._tsk_file.info.meta, name_fragment, None) else: fraction_of_second = None return TSKTime(timestamp=timestamp, fraction_of_second=fraction_of_second)
391,673
Retrieves the file-like object. Args: data_stream_name (Optional[str]): data stream name, where an empty string represents the default data stream. Returns: TSKFileIO: file-like object or None.
def GetFileObject(self, data_stream_name=''): data_stream_names = [ data_stream.name for data_stream in self._GetDataStreams()] if data_stream_name and data_stream_name not in data_stream_names: return None path_spec = copy.deepcopy(self.path_spec) if data_stream_name: # For HFS DECOMP fork name is exposed however libtsk 4.6.0 seems to handle # these differently when opened and the correct behavior seems to be # treating this as the default (nameless) fork instead. For context libtsk # 4.5.0 is unable to read the data steam and yields an error. if self._file_system.IsHFS() and data_stream_name == 'DECOMP': data_stream_name = '' setattr(path_spec, 'data_stream', data_stream_name) return resolver.Resolver.OpenFileObject( path_spec, resolver_context=self._resolver_context)
391,676
Opens the FVDE volume using the path specification. Args: fvde_volume (pyfvde.volume): FVDE volume. path_spec (PathSpec): path specification. file_object (FileIO): file-like object. key_chain (KeyChain): key chain.
def FVDEVolumeOpen(fvde_volume, path_spec, file_object, key_chain): encrypted_root_plist = key_chain.GetCredential( path_spec, 'encrypted_root_plist') if encrypted_root_plist: fvde_volume.read_encrypted_root_plist(encrypted_root_plist) password = key_chain.GetCredential(path_spec, 'password') if password: fvde_volume.set_password(password) recovery_password = key_chain.GetCredential(path_spec, 'recovery_password') if recovery_password: fvde_volume.set_recovery_password(recovery_password) fvde_volume.open_file_object(file_object)
391,679
Retrieves the store index from the path specification. Args: path_spec (PathSpec): path specification. Returns: int: store index or None if not available.
def VShadowPathSpecGetStoreIndex(path_spec): store_index = getattr(path_spec, 'store_index', None) if store_index is None: location = getattr(path_spec, 'location', None) if location is None or not location.startswith('/vss'): return None store_index = None try: store_index = int(location[4:], 10) - 1 except (TypeError, ValueError): pass if store_index is None or store_index < 0: return None return store_index
391,680
Initializes a file-like object. Args: resolver_context (Context): resolver context. Raises: ValueError: when file_object is set.
def __init__(self, resolver_context): super(GzipFile, self).__init__(resolver_context) self._compressed_data_size = -1 self._current_offset = 0 self._gzip_file_object = None self._members_by_end_offset = collections.OrderedDict() self.uncompressed_data_size = 0
391,681
Finds the member whose data includes the provided offset. Args: offset (int): offset in the uncompressed data to find the containing member for. Returns: gzipfile.GzipMember: gzip file member or None if not available. Raises: ValueError: if the provided offset is outside of the bounds of the uncompressed data.
def _GetMemberForOffset(self, offset): if offset < 0 or offset >= self.uncompressed_data_size: raise ValueError('Offset {0:d} is larger than file size {1:d}.'.format( offset, self.uncompressed_data_size)) for end_offset, member in iter(self._members_by_end_offset.items()): if offset < end_offset: return member return None
391,682
Seeks to an offset within the file-like object. Args: offset (int): offset to seek to. whence (Optional(int)): value that indicates whether offset is an absolute or relative position within the file. Raises: IOError: if the seek failed or the file has not been opened. OSError: if the seek failed or the file has not been opened.
def seek(self, offset, whence=os.SEEK_SET): if not self._gzip_file_object: raise IOError('Not opened.') if whence == os.SEEK_CUR: offset += self._current_offset elif whence == os.SEEK_END: offset += self.uncompressed_data_size elif whence != os.SEEK_SET: raise IOError('Unsupported whence.') if offset < 0: raise IOError('Invalid offset value less than zero.') self._current_offset = offset
391,683
Reads a byte string from the gzip file at the current offset. The function will read a byte string up to the specified size or all of the remaining data if no size was specified. Args: size (Optional[int]): number of bytes to read, where None is all remaining data. Returns: bytes: data read. Raises: IOError: if the read failed. OSError: if the read failed.
def read(self, size=None): data = b'' while ((size and len(data) < size) and self._current_offset < self.uncompressed_data_size): member = self._GetMemberForOffset(self._current_offset) member_offset = self._current_offset - member.uncompressed_data_offset data_read = member.ReadAtOffset(member_offset, size) if data_read: self._current_offset += len(data_read) data = b''.join([data, data_read]) return data
391,684
Opens the file-like object defined by path specification. Args: path_spec (Optional[PathSpec]): path specification. mode (Optional[str]): file access mode. Raises: AccessError: if the access to open the file was denied. IOError: if the file-like object could not be opened. OSError: if the file-like object could not be opened. PathSpecError: if the path specification is incorrect. ValueError: if the path specification is invalid.
def _Open(self, path_spec=None, mode='rb'): if not path_spec: raise ValueError('Missing path specification.') if not path_spec.HasParent(): raise errors.PathSpecError( 'Unsupported path specification without parent.') self._gzip_file_object = resolver.Resolver.OpenFileObject( path_spec.parent, resolver_context=self._resolver_context) file_size = self._gzip_file_object.get_size() self._gzip_file_object.seek(0, os.SEEK_SET) uncompressed_data_offset = 0 next_member_offset = 0 while next_member_offset < file_size: member = gzipfile.GzipMember( self._gzip_file_object, next_member_offset, uncompressed_data_offset) uncompressed_data_offset = ( uncompressed_data_offset + member.uncompressed_data_size) self._members_by_end_offset[uncompressed_data_offset] = member self.uncompressed_data_size += member.uncompressed_data_size next_member_offset = member.member_end_offset
391,685
Initializes a path specification. Args: parent (Optional[PathSpec]): parent path specification. kwargs (dict[str, object]): keyword arguments depending on the path specification. Raises: ValueError: if a derived path specification class does not define a type indicator or when there are unused keyword arguments.
def __init__(self, parent=None, **kwargs): if kwargs: raise ValueError('Unused keyword arguments: {0:s}.'.format( ', '.join(kwargs))) super(PathSpec, self).__init__() self.parent = parent if not getattr(self, 'TYPE_INDICATOR', None): raise ValueError('Missing type indicator.')
391,687
Retrieves the comparable representation. This is a convenience function for constructing comparables. Args: sub_comparable_string (str): sub comparable string. Returns: str: comparable representation of the path specification.
def _GetComparable(self, sub_comparable_string=''): string_parts = [] string_parts.append(getattr(self.parent, 'comparable', '')) string_parts.append('type: {0:s}'.format(self.type_indicator)) if sub_comparable_string: string_parts.append(', {0:s}'.format(sub_comparable_string)) string_parts.append('\n') return ''.join(string_parts)
391,689
Initializes a path specification. Note that the FVDE path specification must have a parent. Args: encrypted_root_plist (Optional[str]): path to the EncryptedRoot.plist.wipekey file. password (Optional[str]): password. parent (Optional[PathSpec]): parent path specification. recovery_password (Optional[str]): recovery password. Raises: ValueError: when parent is not set.
def __init__( self, encrypted_root_plist=None, password=None, parent=None, recovery_password=None, **kwargs): if not parent: raise ValueError('Missing parent value.') super(FVDEPathSpec, self).__init__(parent=parent, **kwargs) self.encrypted_root_plist = encrypted_root_plist self.password = password self.recovery_password = recovery_password
391,691
Deregisters a path specification mount point. Args: mount_point (str): mount point identifier. Raises: KeyError: if the corresponding mount point is not set.
def DeregisterMountPoint(cls, mount_point): if mount_point not in cls._mount_points: raise KeyError('Mount point: {0:s} not set.'.format(mount_point)) del cls._mount_points[mount_point]
391,693
Registers a path specification mount point. Args: mount_point (str): mount point identifier. path_spec (PathSpec): path specification of the mount point. Raises: KeyError: if the corresponding mount point is already set.
def RegisterMountPoint(cls, mount_point, path_spec): if mount_point in cls._mount_points: raise KeyError('Mount point: {0:s} already set.'.format(mount_point)) cls._mount_points[mount_point] = path_spec
391,694
Deregisters a path specification resolver helper. Args: resolver_helper (ResolverHelper): resolver helper. Raises: KeyError: if resolver helper object is not set for the corresponding type indicator.
def DeregisterHelper(cls, resolver_helper): if resolver_helper.type_indicator not in cls._resolver_helpers: raise KeyError( 'Resolver helper object not set for type indicator: {0:s}.'.format( resolver_helper.type_indicator)) del cls._resolver_helpers[resolver_helper.type_indicator]
391,695
Retrieves the path specification resolver helper for the specified type. Args: type_indicator (str): type indicator. Returns: ResolverHelper: a resolver helper. Raises: KeyError: if resolver helper is not set for the corresponding type indicator.
def GetHelper(cls, type_indicator): if type_indicator not in cls._resolver_helpers: raise KeyError( 'Resolver helper not set for type indicator: {0:s}.'.format( type_indicator)) return cls._resolver_helpers[type_indicator]
391,696
Registers a path specification resolver helper. Args: resolver_helper (ResolverHelper): resolver helper. Raises: KeyError: if resolver helper object is already set for the corresponding type indicator.
def RegisterHelper(cls, resolver_helper): if resolver_helper.type_indicator in cls._resolver_helpers: raise KeyError(( 'Resolver helper object already set for type indicator: ' '{0!s}.').format(resolver_helper.type_indicator)) cls._resolver_helpers[resolver_helper.type_indicator] = resolver_helper
391,697
Initializes a path specification. Note that the compressed stream path specification must have a parent. Args: compression_method (Optional[str]): method used to the compress the data. parent (Optional[PathSpec]): parent path specification. Raises: ValueError: when compression method or parent are not set.
def __init__(self, compression_method=None, parent=None, **kwargs): if not compression_method or not parent: raise ValueError('Missing compression method or parent value.') super(CompressedStreamPathSpec, self).__init__(parent=parent, **kwargs) self.compression_method = compression_method
391,698
Initializes a file-like object. If the file-like object is chained do not separately use the parent file-like object. Args: resolver_context (Context): resolver context. file_object (Optional[file]): parent file-like object.
def __init__(self, resolver_context, file_object=None): super(DataRange, self).__init__(resolver_context) self._current_offset = 0 self._file_object = file_object if file_object: self._file_object_set_in_init = True self._range_offset = 0 self._range_size = file_object.get_size() else: self._file_object_set_in_init = False self._range_offset = -1 self._range_size = -1
391,700
Sets the data range (offset and size). The data range is used to map a range of data within one file (e.g. a single partition within a full disk image) as a file-like object. Args: range_offset (int): start offset of the data range. range_size (int): size of the data range. Raises: IOError: if the file-like object is already open. OSError: if the file-like object is already open. ValueError: if the range offset or range size is invalid.
def SetRange(self, range_offset, range_size): if self._is_open: raise IOError('Already open.') if range_offset < 0: raise ValueError( 'Invalid range offset: {0:d} value out of bounds.'.format( range_offset)) if range_size < 0: raise ValueError( 'Invalid range size: {0:d} value out of bounds.'.format( range_size)) self._range_offset = range_offset self._range_size = range_size self._current_offset = 0
391,702
Reads a byte string from the file-like object at the current offset. The function will read a byte string of the specified size or all of the remaining data if no size was specified. Args: size (Optional[int]): number of bytes to read, where None is all remaining data. Returns: bytes: data read. Raises: IOError: if the read failed. OSError: if the read failed.
def read(self, size=None): if not self._is_open: raise IOError('Not opened.') if self._range_offset < 0 or self._range_size < 0: raise IOError('Invalid data range.') if self._current_offset < 0: raise IOError( 'Invalid current offset: {0:d} value less than zero.'.format( self._current_offset)) if self._current_offset >= self._range_size: return b'' if size is None: size = self._range_size if self._current_offset + size > self._range_size: size = self._range_size - self._current_offset self._file_object.seek( self._range_offset + self._current_offset, os.SEEK_SET) data = self._file_object.read(size) self._current_offset += len(data) return data
391,703
Seeks to an offset within the file-like object. Args: offset (int): offset to seek to. whence (Optional(int)): value that indicates whether offset is an absolute or relative position within the file. Raises: IOError: if the seek failed. OSError: if the seek failed.
def seek(self, offset, whence=os.SEEK_SET): if not self._is_open: raise IOError('Not opened.') if self._current_offset < 0: raise IOError( 'Invalid current offset: {0:d} value less than zero.'.format( self._current_offset)) if whence == os.SEEK_CUR: offset += self._current_offset elif whence == os.SEEK_END: offset += self._range_size elif whence != os.SEEK_SET: raise IOError('Unsupported whence.') if offset < 0: raise IOError('Invalid offset value less than zero.') self._current_offset = offset
391,704
Initializes a path specification. Note that the EWF file path specification must have a parent. Args: parent (Optional[PathSpec]): parent path specification. Raises: ValueError: when parent is not set.
def __init__(self, parent=None, **kwargs): if not parent: raise ValueError('Missing parent value.') super(EWFPathSpec, self).__init__(parent=parent, **kwargs)
391,705
Initializes a compressed stream file system. Args: resolver_context (Context): a resolver context.
def __init__(self, resolver_context): super(CompressedStreamFileSystem, self).__init__(resolver_context) self._compression_method = None
391,706
Opens the file system defined by path specification. Args: path_spec (PathSpec): a path specification. mode (Optional[str]): file access mode. The default is 'rb' which represents read-only binary. Raises: AccessError: if the access to open the file was denied. IOError: if the file system could not be opened. PathSpecError: if the path specification is incorrect. ValueError: if the path specification is invalid.
def _Open(self, path_spec, mode='rb'): if not path_spec.HasParent(): raise errors.PathSpecError( 'Unsupported path specification without parent.') compression_method = getattr(path_spec, 'compression_method', None) if not compression_method: raise errors.PathSpecError( 'Unsupported path specification without compression method.') self._compression_method = compression_method
391,707
Retrieves a file entry for a path specification. Args: path_spec (PathSpec): a path specification. Returns: CompressedStreamFileEntry: a file entry or None if not available.
def GetFileEntryByPathSpec(self, path_spec): return compressed_stream_file_entry.CompressedStreamFileEntry( self._resolver_context, self, path_spec, is_root=True, is_virtual=True)
391,708
Initializes a directory. Args: file_system (FileSystem): file system. path_spec (PathSpec): path specification.
def __init__(self, file_system, path_spec): super(Directory, self).__init__() self._entries = None self._file_system = file_system self.path_spec = path_spec
391,710
Retrieves a data stream by name. Args: name (str): name of the data stream. case_sensitive (Optional[bool]): True if the name is case sensitive. Returns: DataStream: a data stream or None if not available. Raises: ValueError: if the name is not string.
def GetDataStream(self, name, case_sensitive=True): if not isinstance(name, py2to3.STRING_TYPES): raise ValueError('Name is not a string.') name_lower = name.lower() matching_data_stream = None for data_stream in self._GetDataStreams(): if data_stream.name == name: return data_stream if not case_sensitive and data_stream.name.lower() == name_lower: if not matching_data_stream: matching_data_stream = data_stream return matching_data_stream
391,716
Retrieves the file-like object. Args: data_stream_name (Optional[str]): name of the data stream, where an empty string represents the default data stream. Returns: FileIO: a file-like object or None if not available.
def GetFileObject(self, data_stream_name=''): if data_stream_name: return None return resolver.Resolver.OpenFileObject( self.path_spec, resolver_context=self._resolver_context)
391,717
Retrieves a sub file entry by name. Args: name (str): name of the file entry. case_sensitive (Optional[bool]): True if the name is case sensitive. Returns: FileEntry: a file entry or None if not available.
def GetSubFileEntryByName(self, name, case_sensitive=True): name_lower = name.lower() matching_sub_file_entry = None for sub_file_entry in self.sub_file_entries: if sub_file_entry.name == name: return sub_file_entry if not case_sensitive and sub_file_entry.name.lower() == name_lower: if not matching_sub_file_entry: matching_sub_file_entry = sub_file_entry return matching_sub_file_entry
391,718
Determines if the file entry has specific data stream. Args: name (str): name of the data stream. case_sensitive (Optional[bool]): True if the name is case sensitive. Returns: bool: True if the file entry has the data stream. Raises: ValueError: if the name is not string.
def HasDataStream(self, name, case_sensitive=True): if not isinstance(name, py2to3.STRING_TYPES): raise ValueError('Name is not a string.') name_lower = name.lower() for data_stream in self._GetDataStreams(): if data_stream.name == name: return True if not case_sensitive and data_stream.name.lower() == name_lower: return True return False
391,720
Initializes a path specification. Note that the data range path specification must have a parent. Args: parent (Optional[PathSpec]): parent path specification. range_offset (Optional[int]): start offset of the data range. range_size (Optional[int]): size of the data range. Raises: ValueError: when range offset, range offset or parent are not set.
def __init__(self, parent=None, range_offset=None, range_size=None, **kwargs): if not range_offset or not range_size or not parent: raise ValueError('Missing range offset, range size or parent value.') super(DataRangePathSpec, self).__init__(parent=parent, **kwargs) self.range_offset = range_offset self.range_size = range_size
391,728
Initializes a volume. Args: file_entry (TSKPartitionFileEntry): a TSK partition file entry. bytes_per_sector (int): number of bytes per sector.
def __init__(self, file_entry, bytes_per_sector): super(TSKVolume, self).__init__(file_entry.name) self._file_entry = file_entry self._bytes_per_sector = bytes_per_sector
391,730
Opens a volume defined by path specification. Args: path_spec (PathSpec): a path specification. Raises: VolumeSystemError: if the TSK partition virtual file system could not be resolved.
def Open(self, path_spec): self._file_system = resolver.Resolver.OpenFileSystem(path_spec) if self._file_system is None: raise errors.VolumeSystemError('Unable to resolve path specification.') type_indicator = self._file_system.type_indicator if type_indicator != definitions.TYPE_INDICATOR_TSK_PARTITION: raise errors.VolumeSystemError('Unsupported type indicator.')
391,734
Initializes a path specification. Note that the encoded stream path specification must have a parent. Args: encoding_method (Optional[str]): method used to the encode the data. parent (Optional[PathSpec]): parent path specification. Raises: ValueError: when encoding method or parent are not set.
def __init__(self, encoding_method=None, parent=None, **kwargs): if not encoding_method or not parent: raise ValueError('Missing encoding method or parent value.') super(EncodedStreamPathSpec, self).__init__(parent=parent, **kwargs) self.encoding_method = encoding_method
391,735
Initializes a file system. Args: resolver_context (Context): resolver context.
def __init__(self, resolver_context): super(LVMFileSystem, self).__init__(resolver_context) self._file_object = None self._vslvm_volume_group = None self._vslvm_handle = None
391,737
Opens the file system object defined by path specification. Args: path_spec (PathSpec): path specification. mode (Optional[str]): file access mode. The default is 'rb' which represents read-only binary. Raises: AccessError: if the access to open the file was denied. IOError: if the file system object could not be opened. PathSpecError: if the path specification is incorrect. ValueError: if the path specification is invalid.
def _Open(self, path_spec, mode='rb'): if not path_spec.HasParent(): raise errors.PathSpecError( 'Unsupported path specification without parent.') file_object = resolver.Resolver.OpenFileObject( path_spec.parent, resolver_context=self._resolver_context) try: vslvm_handle = pyvslvm.handle() vslvm_handle.open_file_object(file_object) # TODO: implement multi physical volume support. vslvm_handle.open_physical_volume_files_as_file_objects([ file_object]) vslvm_volume_group = vslvm_handle.get_volume_group() except: file_object.close() raise self._file_object = file_object self._vslvm_handle = vslvm_handle self._vslvm_volume_group = vslvm_volume_group
391,739
Determines if a file entry for a path specification exists. Args: path_spec (PathSpec): path specification. Returns: bool: True if the file entry exists.
def FileEntryExistsByPathSpec(self, path_spec): volume_index = lvm.LVMPathSpecGetVolumeIndex(path_spec) # The virtual root file has not corresponding volume index but # should have a location. if volume_index is None: location = getattr(path_spec, 'location', None) return location is not None and location == self.LOCATION_ROOT return ( 0 <= volume_index < self._vslvm_volume_group.number_of_logical_volumes)
391,740
Retrieves a file entry for a path specification. Args: path_spec (PathSpec): path specification. Returns: LVMFileEntry: a file entry or None if not available.
def GetFileEntryByPathSpec(self, path_spec): volume_index = lvm.LVMPathSpecGetVolumeIndex(path_spec) # The virtual root file has not corresponding volume index but # should have a location. if volume_index is None: location = getattr(path_spec, 'location', None) if location is None or location != self.LOCATION_ROOT: return None return lvm_file_entry.LVMFileEntry( self._resolver_context, self, path_spec, is_root=True, is_virtual=True) if (volume_index < 0 or volume_index >= self._vslvm_volume_group.number_of_logical_volumes): return None return lvm_file_entry.LVMFileEntry(self._resolver_context, self, path_spec)
391,741
Retrieves a LVM logical volume for a path specification. Args: path_spec (PathSpec): path specification. Returns: pyvslvm.logical_volume: a LVM logical volume or None if not available.
def GetLVMLogicalVolumeByPathSpec(self, path_spec): volume_index = lvm.LVMPathSpecGetVolumeIndex(path_spec) if volume_index is None: return None return self._vslvm_volume_group.get_logical_volume(volume_index)
391,742
Initializes a file-like object. Args: resolver_context (Context): resolver context.
def __init__(self, resolver_context): super(TARFile, self).__init__(resolver_context) self._current_offset = 0 self._file_system = None self._size = 0 self._tar_ext_file = None
391,744
Opens the file-like object defined by path specification. Args: path_spec (Optional[PathSpec]): path specification. mode (Optional[str]): file access mode. Raises: AccessError: if the access to open the file was denied. IOError: if the file-like object could not be opened. OSError: if the file-like object could not be opened. PathSpecError: if the path specification is incorrect. ValueError: if the path specification is invalid.
def _Open(self, path_spec=None, mode='rb'): if not path_spec: raise ValueError('Missing path specification.') file_system = resolver.Resolver.OpenFileSystem( path_spec, resolver_context=self._resolver_context) file_entry = file_system.GetFileEntryByPathSpec(path_spec) if not file_entry: file_system.Close() raise IOError('Unable to retrieve file entry.') if not file_entry.IsFile(): file_system.Close() raise IOError('Not a regular file.') self._file_system = file_system tar_file = self._file_system.GetTARFile() tar_info = file_entry.GetTARInfo() self._tar_ext_file = tar_file.extractfile(tar_info) self._current_offset = 0 self._size = tar_info.size
391,746
Reads a byte string from the file-like object at the current offset. The function will read a byte string of the specified size or all of the remaining data if no size was specified. Args: size (Optional[int]): number of bytes to read, where None is all remaining data. Returns: bytes: data read. Raises: IOError: if the read failed. OSError: if the read failed.
def read(self, size=None): if not self._is_open: raise IOError('Not opened.') if self._current_offset < 0: raise IOError('Invalid current offset value less than zero.') if self._current_offset > self._size: return b'' if size is None or self._current_offset + size > self._size: size = self._size - self._current_offset self._tar_ext_file.seek(self._current_offset, os.SEEK_SET) data = self._tar_ext_file.read(size) # It is possible the that returned data size is not the same as the # requested data size. At this layer we don't care and this discrepancy # should be dealt with on a higher layer if necessary. self._current_offset += len(data) return data
391,747
Extracts credentials from a path specification. Args: path_spec (PathSpec): path specification to extract credentials from.
def ExtractCredentialsFromPathSpec(self, path_spec): credentials = manager.CredentialsManager.GetCredentials(path_spec) for identifier in credentials.CREDENTIALS: value = getattr(path_spec, identifier, None) if value is None: continue self.SetCredential(path_spec, identifier, value)
391,748
Retrieves a specific credential from the key chain. Args: path_spec (PathSpec): path specification. identifier (str): credential identifier. Returns: object: credential or None if the credential for the path specification is not set.
def GetCredential(self, path_spec, identifier): credentials = self._credentials_per_path_spec.get(path_spec.comparable, {}) return credentials.get(identifier, None)
391,749
Sets a specific credential for the path specification. Args: path_spec (PathSpec): path specification. identifier (str): credential identifier. data (object): credential data. Raises: KeyError: if the credential is not supported by the path specification type.
def SetCredential(self, path_spec, identifier, data): supported_credentials = manager.CredentialsManager.GetCredentials(path_spec) if identifier not in supported_credentials.CREDENTIALS: raise KeyError(( 'Unsuppored credential: {0:s} for path specification type: ' '{1:s}').format(identifier, path_spec.type_indicator)) credentials = self._credentials_per_path_spec.get(path_spec.comparable, {}) credentials[identifier] = data self._credentials_per_path_spec[path_spec.comparable] = credentials
391,750
Initializes the file entry object. Args: resolver_context (Context): resolver context. file_system (FileSystem): file system. path_spec (PathSpec): path specification. is_root (Optional[bool]): True if the file entry is the root file entry of the corresponding file system. is_virtual (Optional[bool]): True if the file entry is a virtual file Raises: BackEndError: when the BDE volume is missing.
def __init__( self, resolver_context, file_system, path_spec, is_root=False, is_virtual=False): bde_volume = file_system.GetBDEVolume() if bde_volume is None: raise errors.BackEndError('Missing BDE volume.') super(BDEFileEntry, self).__init__( resolver_context, file_system, path_spec, is_root=is_root, is_virtual=is_virtual) self._bde_volume = bde_volume self.entry_type = definitions.FILE_ENTRY_TYPE_FILE
391,751
Globs for path specifications according to the EWF naming schema. Args: file_system (FileSystem): file system. path_spec (PathSpec): path specification. Returns: list[PathSpec]: path specifications that match the glob. Raises: PathSpecError: if the path specification is invalid. RuntimeError: if the maximum number of supported segment files is reached.
def EWFGlobPathSpec(file_system, path_spec): if not path_spec.HasParent(): raise errors.PathSpecError( 'Unsupported path specification without parent.') parent_path_spec = path_spec.parent parent_location = getattr(parent_path_spec, 'location', None) if not parent_location: raise errors.PathSpecError( 'Unsupported parent path specification without location.') parent_location, _, segment_extension = parent_location.rpartition('.') segment_extension_start = segment_extension[0] segment_extension_length = len(segment_extension) if (segment_extension_length not in [3, 4] or not segment_extension.endswith('01') or ( segment_extension_length == 3 and segment_extension_start not in ['E', 'e', 's']) or ( segment_extension_length == 4 and not segment_extension.startswith('Ex'))): raise errors.PathSpecError(( 'Unsupported parent path specification invalid segment file ' 'extension: {0:s}').format(segment_extension)) segment_number = 1 segment_files = [] while True: segment_location = '{0:s}.{1:s}'.format(parent_location, segment_extension) # Note that we don't want to set the keyword arguments when not used # because the path specification base class will check for unused # keyword arguments and raise. kwargs = path_spec_factory.Factory.GetProperties(parent_path_spec) kwargs['location'] = segment_location if parent_path_spec.parent is not None: kwargs['parent'] = parent_path_spec.parent segment_path_spec = path_spec_factory.Factory.NewPathSpec( parent_path_spec.type_indicator, **kwargs) if not file_system.FileEntryExistsByPathSpec(segment_path_spec): break segment_files.append(segment_path_spec) segment_number += 1 if segment_number <= 99: if segment_extension_length == 3: segment_extension = '{0:s}{1:02d}'.format( segment_extension_start, segment_number) elif segment_extension_length == 4: segment_extension = '{0:s}x{1:02d}'.format( segment_extension_start, segment_number) else: segment_index = segment_number - 100 if segment_extension_start in ['e', 's']: letter_offset = ord('a') else: letter_offset = ord('A') segment_index, remainder = divmod(segment_index, 26) third_letter = chr(letter_offset + remainder) segment_index, remainder = divmod(segment_index, 26) second_letter = chr(letter_offset + remainder) first_letter = chr(ord(segment_extension_start) + segment_index) if first_letter in ['[', '{']: raise RuntimeError('Unsupported number of segment files.') if segment_extension_length == 3: segment_extension = '{0:s}{1:s}{2:s}'.format( first_letter, second_letter, third_letter) elif segment_extension_length == 4: segment_extension = '{0:s}x{1:s}{2:s}'.format( first_letter, second_letter, third_letter) return segment_files
391,754
Initializes a path specification. Note that the TAR file path specification must have a parent. Args: location (str): TAR file internal location string prefixed with a path separator character. parent (Optional[PathSpec]): parent path specification. Raises: ValueError: when parent is not set.
def __init__(self, location=None, parent=None, **kwargs): if not parent: raise ValueError('Missing parent value.') super(TARPathSpec, self).__init__( location=location, parent=parent, **kwargs)
391,755
Adds the parent directories of a path to the fake file system. Args: path (str): path of the file within the fake file system. Raises: ValueError: if a parent directory is already set and is not a directory.
def _AddParentDirectories(self, path): path_segments = self.file_system.SplitPath(path) for segment_index in range(len(path_segments)): parent_path = self.file_system.JoinPath(path_segments[:segment_index]) file_entry = self.file_system.GetFileEntryByPath(parent_path) if file_entry and not file_entry.IsDirectory(): raise ValueError( 'Non-directory parent file entry: {0:s} already exists.'.format( parent_path)) for segment_index in range(len(path_segments)): parent_path = self.file_system.JoinPath(path_segments[:segment_index]) if not self.file_system.FileEntryExistsByPath(parent_path): self.file_system.AddFileEntry( parent_path, file_entry_type=definitions.FILE_ENTRY_TYPE_DIRECTORY)
391,757
Adds a directory to the fake file system. Note that this function will create parent directories if needed. Args: path (str): path of the directory within the fake file system. Raises: ValueError: if the path is already set.
def AddDirectory(self, path): if self.file_system.FileEntryExistsByPath(path): raise ValueError('Path: {0:s} already set.'.format(path)) self._AddParentDirectories(path) self.file_system.AddFileEntry( path, file_entry_type=definitions.FILE_ENTRY_TYPE_DIRECTORY)
391,758
Adds a "regular" file to the fake file system. Note that this function will create parent directories if needed. Args: path (str): path of the file within the fake file system. file_data (bytes): data of the file. Raises: ValueError: if the path is already set.
def AddFile(self, path, file_data): if self.file_system.FileEntryExistsByPath(path): raise ValueError('Path: {0:s} already set.'.format(path)) self._AddParentDirectories(path) self.file_system.AddFileEntry(path, file_data=file_data)
391,759
Adds a "regular" file to the fake file system. Args: path (str): path of the file within the fake file system. file_data_path (str): path of the file to read the file data from. Raises: ValueError: if the path is already set.
def AddFileReadData(self, path, file_data_path): if self.file_system.FileEntryExistsByPath(path): raise ValueError('Path: {0:s} already set.'.format(path)) with open(file_data_path, 'rb') as file_object: file_data = file_object.read() self._AddParentDirectories(path) self.file_system.AddFileEntry(path, file_data=file_data)
391,760
Adds a symbolic link to the fake file system. Args: path (str): path of the symbolic link within the fake file system. linked_path (str): path that is linked. Raises: ValueError: if the path is already set.
def AddSymbolicLink(self, path, linked_path): if self.file_system.FileEntryExistsByPath(path): raise ValueError('Path: {0:s} already set.'.format(path)) self._AddParentDirectories(path) self.file_system.AddFileEntry( path, file_entry_type=definitions.FILE_ENTRY_TYPE_LINK, link_data=linked_path)
391,761
Deregisters a path specification credentials. Args: credentials (Credentials): credentials. Raises: KeyError: if credential object is not set for the corresponding type indicator.
def DeregisterCredentials(cls, credentials): if credentials.type_indicator not in cls._credentials: raise KeyError( 'Credential object not set for type indicator: {0:s}.'.format( credentials.type_indicator)) del cls._credentials[credentials.type_indicator]
391,762
Registers a path specification credentials. Args: credentials (Credentials): credentials. Raises: KeyError: if credentials object is already set for the corresponding type indicator.
def RegisterCredentials(cls, credentials): if credentials.type_indicator in cls._credentials: raise KeyError( 'Credentials object already set for type indicator: {0:s}.'.format( credentials.type_indicator)) cls._credentials[credentials.type_indicator] = credentials
391,763
Initializes a gzip member decompressor wrapper. Args: stream_start (int): offset to the compressed stream within the containing file object.
def __init__(self, stream_start): self._decompressor = zlib_decompressor.DeflateDecompressor() self.last_read = stream_start self.uncompressed_offset = 0 self._compressed_data = b''
391,764
Reads the next uncompressed data from the gzip stream. Args: file_object (FileIO): file object that contains the compressed stream. Returns: bytes: next uncompressed data from the compressed stream.
def Read(self, file_object): file_object.seek(self.last_read, os.SEEK_SET) read_data = file_object.read(self._MAXIMUM_READ_SIZE) self.last_read = file_object.get_offset() compressed_data = b''.join([self._compressed_data, read_data]) decompressed, extra_compressed = self._decompressor.Decompress( compressed_data) self._compressed_data = extra_compressed self.uncompressed_offset += len(decompressed) return decompressed
391,765
Initializes a gzip member. Args: file_object (FileIO): file-like object, containing the gzip member. member_start_offset (int): offset to the beginning of the gzip member in the containing file. uncompressed_data_offset (int): current offset into the uncompressed data in the containing file.
def __init__( self, file_object, member_start_offset, uncompressed_data_offset): self.comment = None self.modification_time = None self.operating_system = None self.original_filename = None # Offset into this member's uncompressed data of the first item in # the cache. self._cache_start_offset = None # Offset into this member's uncompressed data of the last item in # the cache. self._cache_end_offset = None self._cache = b'' # Total size of the data in this gzip member after decompression. self.uncompressed_data_size = None # Offset of the start of the uncompressed data in this member relative to # the whole gzip file's uncompressed data. self.uncompressed_data_offset = uncompressed_data_offset # Offset to the start of the member in the parent file object. self.member_start_offset = member_start_offset # Initialize the member with data. self._file_object = file_object self._file_object.seek(self.member_start_offset, os.SEEK_SET) self._ReadMemberHeader(file_object) # Offset to the beginning of the compressed data in the file object. self._compressed_data_start = file_object.get_offset() self._decompressor_state = _GzipDecompressorState( self._compressed_data_start) self._LoadDataIntoCache(file_object, 0, read_all_data=True) # TODO: gracefully handle missing footer. self._ReadMemberFooter(file_object) # Offset to the end of the member in the parent file object. self.member_end_offset = file_object.get_offset()
391,766
Reads a member header. Args: file_object (FileIO): file-like object to read from. Raises: FileFormatError: if the member header cannot be read.
def _ReadMemberHeader(self, file_object): file_offset = file_object.get_offset() member_header = self._ReadStructure( file_object, file_offset, self._MEMBER_HEADER_SIZE, self._MEMBER_HEADER, 'member header') if member_header.signature != self._GZIP_SIGNATURE: raise errors.FileFormatError( 'Unsupported signature: 0x{0:04x}.'.format(member_header.signature)) if member_header.compression_method != self._COMPRESSION_METHOD_DEFLATE: raise errors.FileFormatError( 'Unsupported compression method: {0:d}.'.format( member_header.compression_method)) self.modification_time = member_header.modification_time self.operating_system = member_header.operating_system if member_header.flags & self._FLAG_FEXTRA: file_offset = file_object.get_offset() extra_field_data_size = self._ReadStructure( file_object, file_offset, self._UINT16LE_SIZE, self._UINT16LE, 'extra field data size') file_object.seek(extra_field_data_size, os.SEEK_CUR) if member_header.flags & self._FLAG_FNAME: file_offset = file_object.get_offset() string_value = self._ReadString( file_object, file_offset, self._CSTRING, 'original filename') self.original_filename = string_value.rstrip('\x00') if member_header.flags & self._FLAG_FCOMMENT: file_offset = file_object.get_offset() string_value = self._ReadString( file_object, file_offset, self._CSTRING, 'comment') self.comment = string_value.rstrip('\x00') if member_header.flags & self._FLAG_FHCRC: file_object.read(2)
391,767
Reads a member footer. Args: file_object (FileIO): file-like object to read from. Raises: FileFormatError: if the member footer cannot be read.
def _ReadMemberFooter(self, file_object): file_offset = file_object.get_offset() member_footer = self._ReadStructure( file_object, file_offset, self._MEMBER_FOOTER_SIZE, self._MEMBER_FOOTER, 'member footer') self.uncompressed_data_size = member_footer.uncompressed_data_size
391,768
Reads and decompresses the data in the member. This function already loads as much data as possible in the cache, up to UNCOMPRESSED_DATA_CACHE_SIZE bytes. Args: file_object (FileIO): file-like object. minimum_offset (int): offset into this member's uncompressed data at which the cache should start. read_all_data (bool): True if all the compressed data should be read from the member.
def _LoadDataIntoCache( self, file_object, minimum_offset, read_all_data=False): # Decompression can only be performed from beginning to end of the stream. # So, if data before the current position of the decompressor in the stream # is required, it's necessary to throw away the current decompression # state and start again. if minimum_offset < self._decompressor_state.uncompressed_offset: self._ResetDecompressorState() while not self.IsCacheFull() or read_all_data: decompressed_data = self._decompressor_state.Read(file_object) # Note that decompressed_data will be empty if there is no data left # to read and decompress. if not decompressed_data: break decompressed_data_length = len(decompressed_data) decompressed_end_offset = self._decompressor_state.uncompressed_offset decompressed_start_offset = ( decompressed_end_offset - decompressed_data_length) data_to_add = decompressed_data added_data_start_offset = decompressed_start_offset if decompressed_start_offset < minimum_offset: data_to_add = None if decompressed_start_offset < minimum_offset < decompressed_end_offset: data_add_offset = decompressed_end_offset - minimum_offset data_to_add = decompressed_data[-data_add_offset] added_data_start_offset = decompressed_end_offset - data_add_offset if not self.IsCacheFull() and data_to_add: self._cache = b''.join([self._cache, data_to_add]) if self._cache_start_offset is None: self._cache_start_offset = added_data_start_offset if self._cache_end_offset is None: self._cache_end_offset = self._cache_start_offset + len(data_to_add) else: self._cache_end_offset += len(data_to_add) # If there's no more data in the member, the unused_data value is # populated in the decompressor. When this situation arises, we rewind # to the end of the compressed_data section. unused_data = self._decompressor_state.GetUnusedData() if unused_data: seek_offset = -len(unused_data) file_object.seek(seek_offset, os.SEEK_CUR) self._ResetDecompressorState() break
391,772
Initializes a file entry lister. Args: mediator (VolumeScannerMediator): a volume scanner mediator.
def __init__(self, mediator=None): super(FileEntryLister, self).__init__(mediator=mediator) self._list_only_files = False
391,773
Lists a file entry. Args: file_system (dfvfs.FileSystem): file system that contains the file entry. file_entry (dfvfs.FileEntry): file entry to list. parent_full_path (str): full path of the parent file entry. output_writer (StdoutWriter): output writer.
def _ListFileEntry( self, file_system, file_entry, parent_full_path, output_writer): # Since every file system implementation can have their own path # segment separator we are using JoinPath to be platform and file system # type independent. full_path = file_system.JoinPath([parent_full_path, file_entry.name]) if not self._list_only_files or file_entry.IsFile(): output_writer.WriteFileEntry(full_path) for sub_file_entry in file_entry.sub_file_entries: self._ListFileEntry(file_system, sub_file_entry, full_path, output_writer)
391,774
Lists file entries in the base path specification. Args: base_path_specs (list[dfvfs.PathSpec]): source path specification. output_writer (StdoutWriter): output writer.
def ListFileEntries(self, base_path_specs, output_writer): for base_path_spec in base_path_specs: file_system = resolver.Resolver.OpenFileSystem(base_path_spec) file_entry = resolver.Resolver.OpenFileEntry(base_path_spec) if file_entry is None: logging.warning( 'Unable to open base path specification:\n{0:s}'.format( base_path_spec.comparable)) return self._ListFileEntry(file_system, file_entry, '', output_writer)
391,775
Writes the file path to file. Args: path (str): path of the file.
def WriteFileEntry(self, path): string = '{0:s}\n'.format(path) encoded_string = self._EncodeString(string) self._file_object.write(encoded_string)
391,776
Initializes a file system. Args: resolver_context (Context): a resolver context.
def __init__(self, resolver_context): super(FakeFileSystem, self).__init__(resolver_context) self._paths = {} self.AddFileEntry( self.LOCATION_ROOT, file_entry_type=definitions.FILE_ENTRY_TYPE_DIRECTORY)
391,777
Retrieves the data associated to a path. Args: path (str): path of the file entry. Returns: bytes: data or None if not available.
def GetDataByPath(self, path): _, path_data = self._paths.get(path, (None, None)) return path_data
391,779
Retrieves a file entry for a path. Args: path (str): path of the file entry. Returns: FakeFileEntry: a file entry or None if not available.
def GetFileEntryByPath(self, path): if path is None: return None file_entry_type, _ = self._paths.get(path, (None, None)) if not file_entry_type: return None path_spec = fake_path_spec.FakePathSpec(location=path) return fake_file_entry.FakeFileEntry( self._resolver_context, self, path_spec, file_entry_type=file_entry_type)
391,780
Initializes a file system. Args: resolver_context (Context): resolver context. Raises: ValueError: if a derived file system class does not define a type indicator.
def __init__(self, resolver_context): super(FileSystem, self).__init__() self._is_cached = False self._is_open = False self._path_spec = None self._resolver_context = resolver_context if not getattr(self, 'TYPE_INDICATOR', None): raise ValueError('Missing type indicator.')
391,782
Determines the basename of the path. Args: path (str): path. Returns: str: basename of the path.
def BasenamePath(self, path): if path.endswith(self.PATH_SEPARATOR): path = path[:-1] _, _, basename = path.rpartition(self.PATH_SEPARATOR) return basename
391,783
Determines the directory name of the path. The file system root is represented by an empty string. Args: path (str): path. Returns: str: directory name of the path or None.
def DirnamePath(self, path): if path.endswith(self.PATH_SEPARATOR): path = path[:-1] if not path: return None dirname, _, _ = path.rpartition(self.PATH_SEPARATOR) return dirname
391,785
Retrieves a data stream for a path specification. Args: path_spec (PathSpec): a path specification. Returns: DataStream: a data stream or None if not available.
def GetDataStreamByPathSpec(self, path_spec): file_entry = self.GetFileEntryByPathSpec(path_spec) if not file_entry: return None data_stream_name = getattr(path_spec, 'data_stream', None) return file_entry.GetDataStream(data_stream_name)
391,786
Retrieves a file-like object for a path specification. Args: path_spec (PathSpec): a path specification. Returns: FileIO: a file-like object or None if not available.
def GetFileObjectByPathSpec(self, path_spec): file_entry = self.GetFileEntryByPathSpec(path_spec) if not file_entry: return None return file_entry.GetFileObject()
391,787
Determines the path segment and suffix of the path. None is returned if the path does not start with the base path and an empty string if the path exactly matches the base path. Args: base_path (str): base path. path (str): path. Returns: tuple[str, str]: path segment and suffix string.
def GetPathSegmentAndSuffix(self, base_path, path): if path is None or base_path is None or not path.startswith(base_path): return None, None path_index = len(base_path) if base_path and not base_path.endswith(self.PATH_SEPARATOR): path_index += 1 if path_index == len(path): return '', '' path_segment, _, suffix = path[path_index:].partition(self.PATH_SEPARATOR) return path_segment, suffix
391,788
Joins the path segments into a path. Args: path_segments (list[str]): path segments. Returns: str: joined path segments prefixed with the path separator.
def JoinPath(self, path_segments): # This is an optimized way to combine the path segments into a single path # and combine multiple successive path separators to one. # Split all the path segments based on the path (segment) separator. path_segments = [ segment.split(self.PATH_SEPARATOR) for segment in path_segments] # Flatten the sublists into one list. path_segments = [ element for sublist in path_segments for element in sublist] # Remove empty path segments. path_segments = list(filter(None, path_segments)) return '{0:s}{1:s}'.format( self.PATH_SEPARATOR, self.PATH_SEPARATOR.join(path_segments))
391,789
Retrieves the volume index from the path specification. Args: path_spec (PathSpec): path specification. Returns: int: volume index or None if not available.
def LVMPathSpecGetVolumeIndex(path_spec): volume_index = getattr(path_spec, 'volume_index', None) if volume_index is None: location = getattr(path_spec, 'location', None) if location is None or not location.startswith('/lvm'): return None volume_index = None try: volume_index = int(location[4:], 10) - 1 except ValueError: pass if volume_index is None or volume_index < 0: return None return volume_index
391,799
Retrieves the path specification resolver helper for the specified type. Args: type_indicator (str): type indicator. Returns: ResolverHelper: a resolver helper.
def _GetResolverHelper(cls, type_indicator): if not cls._resolver_helpers_manager: # Delay the import of the resolver helpers manager to prevent circular # imports. from dfvfs.resolver_helpers import manager cls._resolver_helpers_manager = manager.ResolverHelperManager return cls._resolver_helpers_manager.GetHelper(type_indicator)
391,800
Opens a file entry object defined by path specification. Args: path_spec_object (PathSpec): path specification. resolver_context (Optional[Context]): resolver context, where None represents the built in context which is not multi process safe. Returns: FileEntry: file entry or None if the path specification could not be resolved.
def OpenFileEntry(cls, path_spec_object, resolver_context=None): file_system = cls.OpenFileSystem( path_spec_object, resolver_context=resolver_context) if resolver_context is None: resolver_context = cls._resolver_context file_entry = file_system.GetFileEntryByPathSpec(path_spec_object) # Release the file system so it will be removed from the cache # when the file entry is destroyed. resolver_context.ReleaseFileSystem(file_system) return file_entry
391,801
Decode the encoded data. Args: encoded_data (byte): encoded data. Returns: tuple(bytes, bytes): decoded data and remaining encoded data. Raises: BackEndError: if the base32 stream cannot be decoded.
def Decode(self, encoded_data): try: decoded_data = base64.b32decode(encoded_data, casefold=False) except (TypeError, binascii.Error) as exception: raise errors.BackEndError( 'Unable to decode base32 stream with error: {0!s}.'.format( exception)) return decoded_data, b''
391,804
Opens the BDE volume using the path specification. Args: bde_volume (pybde.volume): BDE volume. path_spec (PathSpec): path specification. file_object (FileIO): file-like object. key_chain (KeyChain): key chain.
def BDEVolumeOpen(bde_volume, path_spec, file_object, key_chain): password = key_chain.GetCredential(path_spec, 'password') if password: bde_volume.set_password(password) recovery_password = key_chain.GetCredential(path_spec, 'recovery_password') if recovery_password: bde_volume.set_recovery_password(recovery_password) startup_key = key_chain.GetCredential(path_spec, 'startup_key') if startup_key: bde_volume.read_startup_key(startup_key) bde_volume.open_file_object(file_object)
391,805
Retrieves the decompressor object for a specific compression method. Args: compression_method (str): compression method identifier. Returns: Decompressor: decompressor or None if the compression method does not exists.
def GetDecompressor(cls, compression_method): compression_method = compression_method.lower() decompressor = cls._decompressors.get(compression_method, None) if not decompressor: return None return decompressor()
391,806
Registers a decompressor for a specific compression method. Args: decompressor (type): decompressor class. Raises: KeyError: if the corresponding decompressor is already set.
def RegisterDecompressor(cls, decompressor): compression_method = decompressor.COMPRESSION_METHOD.lower() if compression_method in cls._decompressors: raise KeyError( 'Decompressor for compression method: {0:s} already set.'.format( decompressor.COMPRESSION_METHOD)) cls._decompressors[compression_method] = decompressor
391,807
Initializes a file-like object. Args: resolver_context (Context): resolver context.
def __init__(self, resolver_context): super(FileIO, self).__init__() self._is_cached = False self._is_open = False self._resolver_context = resolver_context
391,808
Initializes a file-like object. If the file-like object is chained do not separately use the parent file-like object. Args: resolver_context (Context): resolver context. encryption_method (Optional[str]): method used to the encrypt the data. file_object (Optional[FileIO]): parent file-like object. Raises: ValueError: if file_object provided but encryption_method is not.
def __init__( self, resolver_context, encryption_method=None, file_object=None): if file_object is not None and encryption_method is None: raise ValueError( 'File-like object provided without corresponding encryption method.') super(EncryptedStream, self).__init__(resolver_context) self._current_offset = 0 self._decrypted_data = b'' self._decrypted_data_offset = 0 self._decrypted_data_size = 0 self._decrypted_stream_size = None self._decrypter = None self._encrypted_data = b'' self._encryption_method = encryption_method self._file_object = file_object self._file_object_set_in_init = bool(file_object) self._path_spec = None self._realign_offset = True
391,811
Opens the file-like object. Args: path_spec (Optional[PathSpec]): path specification. mode (Optional[str]): file access mode. Raises: AccessError: if the access to open the file was denied. IOError: if the file-like object could not be opened. OSError: if the file-like object could not be opened. PathSpecError: if the path specification is incorrect. ValueError: if the path specification is invalid.
def _Open(self, path_spec=None, mode='rb'): if not self._file_object_set_in_init and not path_spec: raise ValueError('Missing path specification.') if not self._file_object_set_in_init: if not path_spec.HasParent(): raise errors.PathSpecError( 'Unsupported path specification without parent.') self._encryption_method = getattr(path_spec, 'encryption_method', None) if self._encryption_method is None: raise errors.PathSpecError( 'Path specification missing encryption method.') self._file_object = resolver.Resolver.OpenFileObject( path_spec.parent, resolver_context=self._resolver_context) self._path_spec = path_spec
391,815
Aligns the encrypted file with the decrypted data offset. Args: decrypted_data_offset (int): decrypted data offset.
def _AlignDecryptedDataOffset(self, decrypted_data_offset): self._file_object.seek(0, os.SEEK_SET) self._decrypter = self._GetDecrypter() self._decrypted_data = b'' encrypted_data_offset = 0 encrypted_data_size = self._file_object.get_size() while encrypted_data_offset < encrypted_data_size: read_count = self._ReadEncryptedData(self._ENCRYPTED_DATA_BUFFER_SIZE) if read_count == 0: break encrypted_data_offset += read_count if decrypted_data_offset < self._decrypted_data_size: self._decrypted_data_offset = decrypted_data_offset break decrypted_data_offset -= self._decrypted_data_size
391,816
Reads encrypted data from the file-like object. Args: read_size (int): number of bytes of encrypted data to read. Returns: int: number of bytes of encrypted data read.
def _ReadEncryptedData(self, read_size): encrypted_data = self._file_object.read(read_size) read_count = len(encrypted_data) self._encrypted_data = b''.join([self._encrypted_data, encrypted_data]) self._decrypted_data, self._encrypted_data = ( self._decrypter.Decrypt(self._encrypted_data)) self._decrypted_data_size = len(self._decrypted_data) return read_count
391,817
Sets the decrypted stream size. This function is used to set the decrypted stream size if it can be determined separately. Args: decrypted_stream_size (int): size of the decrypted stream in bytes. Raises: IOError: if the file-like object is already open. OSError: if the file-like object is already open. ValueError: if the decrypted stream size is invalid.
def SetDecryptedStreamSize(self, decrypted_stream_size): if self._is_open: raise IOError('Already open.') if decrypted_stream_size < 0: raise ValueError(( 'Invalid decrypted stream size: {0:d} value out of ' 'bounds.').format(decrypted_stream_size)) self._decrypted_stream_size = decrypted_stream_size
391,818
Reads a byte string from the file-like object at the current offset. The function will read a byte string of the specified size or all of the remaining data if no size was specified. Args: size (Optional[int]): number of bytes to read, where None is all remaining data. Returns: bytes: data read. Raises: IOError: if the read failed. OSError: if the read failed.
def read(self, size=None): if not self._is_open: raise IOError('Not opened.') if self._current_offset < 0: raise IOError( 'Invalid current offset: {0:d} value less than zero.'.format( self._current_offset)) if self._decrypted_stream_size is None: self._decrypted_stream_size = self._GetDecryptedStreamSize() if self._decrypted_stream_size < 0: raise IOError('Invalid decrypted stream size.') if self._current_offset >= self._decrypted_stream_size: return b'' if self._realign_offset: self._AlignDecryptedDataOffset(self._current_offset) self._realign_offset = False if size is None: size = self._decrypted_stream_size if self._current_offset + size > self._decrypted_stream_size: size = self._decrypted_stream_size - self._current_offset decrypted_data = b'' if size == 0: return decrypted_data while size > self._decrypted_data_size: decrypted_data = b''.join([ decrypted_data, self._decrypted_data[self._decrypted_data_offset:]]) remaining_decrypted_data_size = ( self._decrypted_data_size - self._decrypted_data_offset) self._current_offset += remaining_decrypted_data_size size -= remaining_decrypted_data_size if self._current_offset >= self._decrypted_stream_size: break read_count = self._ReadEncryptedData(self._ENCRYPTED_DATA_BUFFER_SIZE) self._decrypted_data_offset = 0 if read_count == 0: break if size > 0: slice_start_offset = self._decrypted_data_offset slice_end_offset = slice_start_offset + size decrypted_data = b''.join([ decrypted_data, self._decrypted_data[slice_start_offset:slice_end_offset]]) self._decrypted_data_offset += size self._current_offset += size return decrypted_data
391,819