text_prompt stringlengths 157 13.1k | code_prompt stringlengths 7 19.8k ⌀ |
|---|---|
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _GetTypeIndicators( cls, signature_scanner, specification_store, remainder_list, path_spec, resolver_context=None):
"""Determines if a file contains a supported format types. Args: signature_scanner (pysigscan.scanner):
signature scanner. specification_store (FormatSpecificationStore):
specification store. remainder_list (list[AnalyzerHelper]):
remaining analyzer helpers that do not have a format specification. path_spec (PathSpec):
path specification. resolver_context (Optional[Context]):
resolver context, where None represents the built-in context which is not multi process safe. Returns: list[str]: supported format type indicators. """ |
type_indicator_list = []
file_object = resolver.Resolver.OpenFileObject(
path_spec, resolver_context=resolver_context)
scan_state = pysigscan.scan_state()
try:
signature_scanner.scan_file_object(scan_state, file_object)
for scan_result in iter(scan_state.scan_results):
format_specification = specification_store.GetSpecificationBySignature(
scan_result.identifier)
if format_specification.identifier not in type_indicator_list:
type_indicator_list.append(format_specification.identifier)
for analyzer_helper in remainder_list:
result = analyzer_helper.AnalyzeFileObject(file_object)
if result is not None:
type_indicator_list.append(result)
finally:
file_object.close()
return type_indicator_list |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def DeregisterHelper(cls, analyzer_helper):
"""Deregisters a format analyzer helper. Args: analyzer_helper (AnalyzerHelper):
analyzer helper. Raises: KeyError: if analyzer helper object is not set for the corresponding type indicator. """ |
if analyzer_helper.type_indicator not in cls._analyzer_helpers:
raise KeyError(
'Analyzer helper object not set for type indicator: {0:s}.'.format(
analyzer_helper.type_indicator))
analyzer_helper = cls._analyzer_helpers[analyzer_helper.type_indicator]
cls._FlushCache(analyzer_helper.format_categories)
del cls._analyzer_helpers[analyzer_helper.type_indicator] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def GetArchiveTypeIndicators(cls, path_spec, resolver_context=None):
"""Determines if a file contains a supported archive types. Args: path_spec (PathSpec):
path specification. resolver_context (Optional[Context]):
resolver context, where None represents the built-in context which is not multi process safe. Returns: list[str]: supported format type indicators. """ |
if (cls._archive_remainder_list is None or
cls._archive_store is None):
specification_store, remainder_list = cls._GetSpecificationStore(
definitions.FORMAT_CATEGORY_ARCHIVE)
cls._archive_remainder_list = remainder_list
cls._archive_store = specification_store
if cls._archive_scanner is None:
cls._archive_scanner = cls._GetSignatureScanner(cls._archive_store)
return cls._GetTypeIndicators(
cls._archive_scanner, cls._archive_store,
cls._archive_remainder_list, path_spec,
resolver_context=resolver_context) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def GetCompressedStreamTypeIndicators(cls, path_spec, resolver_context=None):
"""Determines if a file contains a supported compressed stream types. Args: path_spec (PathSpec):
path specification. resolver_context (Optional[Context]):
resolver context, where None represents the built-in context which is not multi process safe. Returns: list[str]: supported format type indicators. """ |
if (cls._compressed_stream_remainder_list is None or
cls._compressed_stream_store is None):
specification_store, remainder_list = cls._GetSpecificationStore(
definitions.FORMAT_CATEGORY_COMPRESSED_STREAM)
cls._compressed_stream_remainder_list = remainder_list
cls._compressed_stream_store = specification_store
if cls._compressed_stream_scanner is None:
cls._compressed_stream_scanner = cls._GetSignatureScanner(
cls._compressed_stream_store)
return cls._GetTypeIndicators(
cls._compressed_stream_scanner, cls._compressed_stream_store,
cls._compressed_stream_remainder_list, path_spec,
resolver_context=resolver_context) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def GetFileSystemTypeIndicators(cls, path_spec, resolver_context=None):
"""Determines if a file contains a supported file system types. Args: path_spec (PathSpec):
path specification. resolver_context (Optional[Context]):
resolver context, where None represents the built-in context which is not multi process safe. Returns: list[str]: supported format type indicators. """ |
if (cls._file_system_remainder_list is None or
cls._file_system_store is None):
specification_store, remainder_list = cls._GetSpecificationStore(
definitions.FORMAT_CATEGORY_FILE_SYSTEM)
cls._file_system_remainder_list = remainder_list
cls._file_system_store = specification_store
if cls._file_system_scanner is None:
cls._file_system_scanner = cls._GetSignatureScanner(
cls._file_system_store)
return cls._GetTypeIndicators(
cls._file_system_scanner, cls._file_system_store,
cls._file_system_remainder_list, path_spec,
resolver_context=resolver_context) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def GetStorageMediaImageTypeIndicators(cls, path_spec, resolver_context=None):
"""Determines if a file contains a supported storage media image types. Args: path_spec (PathSpec):
path specification. resolver_context (Optional[Context]):
resolver context, where None represents the built-in context which is not multi process safe. Returns: list[str]: supported format type indicators. """ |
if (cls._storage_media_image_remainder_list is None or
cls._storage_media_image_store is None):
specification_store, remainder_list = cls._GetSpecificationStore(
definitions.FORMAT_CATEGORY_STORAGE_MEDIA_IMAGE)
cls._storage_media_image_remainder_list = remainder_list
cls._storage_media_image_store = specification_store
if cls._storage_media_image_scanner is None:
cls._storage_media_image_scanner = cls._GetSignatureScanner(
cls._storage_media_image_store)
return cls._GetTypeIndicators(
cls._storage_media_image_scanner, cls._storage_media_image_store,
cls._storage_media_image_remainder_list, path_spec,
resolver_context=resolver_context) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def GetVolumeSystemTypeIndicators(cls, path_spec, resolver_context=None):
"""Determines if a file contains a supported volume system types. Args: path_spec (PathSpec):
path specification. resolver_context (Optional[Context]):
resolver context, where None represents the built-in context which is not multi process safe. Returns: list[str]: supported format type indicators. """ |
if (cls._volume_system_remainder_list is None or
cls._volume_system_store is None):
specification_store, remainder_list = cls._GetSpecificationStore(
definitions.FORMAT_CATEGORY_VOLUME_SYSTEM)
cls._volume_system_remainder_list = remainder_list
cls._volume_system_store = specification_store
if cls._volume_system_scanner is None:
cls._volume_system_scanner = cls._GetSignatureScanner(
cls._volume_system_store)
return cls._GetTypeIndicators(
cls._volume_system_scanner, cls._volume_system_store,
cls._volume_system_remainder_list, path_spec,
resolver_context=resolver_context) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def GetFsType(self):
"""Retrieves the file system type. Returns: pytsk3.TSK_FS_TYPE_ENUM: file system type. """ |
if self._tsk_fs_type is None:
self._tsk_fs_type = pytsk3.TSK_FS_TYPE_UNSUPP
if (not self._tsk_file_system or
not hasattr(self._tsk_file_system, 'info')):
return self._tsk_fs_type
self._tsk_fs_type = getattr(
self._tsk_file_system.info, 'ftype', pytsk3.TSK_FS_TYPE_UNSUPP)
return self._tsk_fs_type |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def GetTSKFileByPathSpec(self, path_spec):
"""Retrieves the SleuthKit file object for a path specification. Args: path_spec (PathSpec):
path specification. Returns: pytsk3.File: TSK file. Raises: PathSpecError: if the path specification is missing inode and location. """ |
# Opening a file by inode number is faster than opening a file
# by location.
inode = getattr(path_spec, 'inode', None)
location = getattr(path_spec, 'location', None)
if inode is not None:
tsk_file = self._tsk_file_system.open_meta(inode=inode)
elif location is not None:
tsk_file = self._tsk_file_system.open(location)
else:
raise errors.PathSpecError(
'Path specification missing inode and location.')
return tsk_file |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def IsHFS(self):
"""Determines if the file system is HFS, HFS+ or HFSX. Returns: bool: True if the file system is HFS. """ |
tsk_fs_type = self.GetFsType()
return tsk_fs_type in [
pytsk3.TSK_FS_TYPE_HFS, pytsk3.TSK_FS_TYPE_HFS_DETECT] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def IsNTFS(self):
"""Determines if the file system is NTFS. Returns: bool: True if the file system is NTFS. """ |
tsk_fs_type = self.GetFsType()
return tsk_fs_type in [
pytsk3.TSK_FS_TYPE_NTFS, pytsk3.TSK_FS_TYPE_NTFS_DETECT] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def Close(self):
"""Closes the database file object. Raises: IOError: if the close failed. OSError: if the close failed. """ |
if self._connection:
self._cursor = None
self._connection.close()
self._connection = None
# TODO: move this to a central temp file manager and have it track errors.
# https://github.com/log2timeline/dfvfs/issues/92
try:
os.remove(self._temp_file_path)
except (IOError, OSError):
pass
self._temp_file_path = '' |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def HasColumn(self, table_name, column_name):
"""Determines if a specific column exists. Args: table_name (str):
name of the table. column_name (str):
name of the column. Returns: bool: True if the column exists. Raises: IOError: if the database file is not opened. OSError: if the database file is not opened. """ |
if not self._connection:
raise IOError('Not opened.')
if not column_name:
return False
table_name = table_name.lower()
column_names = self._column_names_per_table.get(table_name, None)
if column_names is None:
column_names = []
self._cursor.execute(self._HAS_COLUMN_QUERY.format(table_name))
for row in self._cursor.fetchall():
if not row[1]:
continue
row_column_name = row[1]
if isinstance(row_column_name, bytes):
row_column_name = row_column_name.decode('utf-8')
column_names.append(row_column_name.lower())
self._column_names_per_table[table_name] = column_names
column_name = column_name.lower()
return column_name in column_names |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def Open(self, file_object):
"""Opens the database file object. Args: file_object (FileIO):
file-like object. Raises: IOError: if the SQLite database signature does not match. OSError: if the SQLite database signature does not match. ValueError: if the file-like object is invalid. """ |
if not file_object:
raise ValueError('Missing file-like object.')
# Since pysqlite3 does not provide an exclusive read-only mode and
# cannot interact with a file-like object directly we make a temporary
# copy. Before making a copy we check the header signature.
file_object.seek(0, os.SEEK_SET)
data = file_object.read(len(self._HEADER_SIGNATURE))
if data != self._HEADER_SIGNATURE:
file_object.close()
raise IOError('Unsupported SQLite database signature.')
with tempfile.NamedTemporaryFile(delete=False) as temp_file:
self._temp_file_path = temp_file.name
while data:
temp_file.write(data)
data = file_object.read(self._COPY_BUFFER_SIZE)
self._connection = sqlite3.connect(self._temp_file_path)
self._connection.text_factory = bytes
self._cursor = self._connection.cursor() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def Query(self, query, parameters=None):
"""Queries the database file. Args: query (str):
SQL query. parameters (Optional[dict|tuple]):
query parameters. Returns: list[sqlite3.Row]: rows resulting from the query. """ |
# TODO: catch Warning and return None.
# Note that we cannot pass parameters as a keyword argument here.
# A parameters value of None is not supported.
if parameters:
self._cursor.execute(query, parameters)
else:
self._cursor.execute(query)
return self._cursor.fetchall() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _GetFileSystemCacheIdentifier(self, path_spec):
"""Determines the file system cache identifier for the path specification. Args: path_spec (PathSpec):
path specification. Returns: str: identifier of the VFS object. """ |
string_parts = []
string_parts.append(getattr(path_spec.parent, 'comparable', ''))
string_parts.append('type: {0:s}'.format(path_spec.type_indicator))
return ''.join(string_parts) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def CacheFileObject(self, path_spec, file_object):
"""Caches a file-like object based on a path specification. Args: path_spec (PathSpec):
path specification. file_object (FileIO):
file-like object. """ |
self._file_object_cache.CacheObject(path_spec.comparable, file_object) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def CacheFileSystem(self, path_spec, file_system):
"""Caches a file system object based on a path specification. Args: path_spec (PathSpec):
path specification. file_system (FileSystem):
file system object. """ |
identifier = self._GetFileSystemCacheIdentifier(path_spec)
self._file_system_cache.CacheObject(identifier, file_system) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def ForceRemoveFileObject(self, path_spec):
"""Forces the removal of a file-like object based on a path specification. Args: path_spec (PathSpec):
path specification. Returns: bool: True if the file-like object was cached. """ |
cache_value = self._file_object_cache.GetCacheValue(path_spec.comparable)
if not cache_value:
return False
while not cache_value.IsDereferenced():
cache_value.vfs_object.close()
return True |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def GetFileObjectReferenceCount(self, path_spec):
"""Retrieves the reference count of a cached file-like object. Args: path_spec (PathSpec):
path specification. Returns: int: reference count or None if there is no file-like object for the corresponding path specification cached. """ |
cache_value = self._file_object_cache.GetCacheValue(path_spec.comparable)
if not cache_value:
return None
return cache_value.reference_count |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def GetFileSystem(self, path_spec):
"""Retrieves a file system object defined by path specification. Args: path_spec (PathSpec):
path specification. Returns: FileSystem: a file system object or None if not cached. """ |
identifier = self._GetFileSystemCacheIdentifier(path_spec)
return self._file_system_cache.GetObject(identifier) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def GetFileSystemReferenceCount(self, path_spec):
"""Retrieves the reference count of a cached file system object. Args: path_spec (PathSpec):
path specification. Returns: int: reference count or None if there is no file system object for the corresponding path specification cached. """ |
identifier = self._GetFileSystemCacheIdentifier(path_spec)
cache_value = self._file_system_cache.GetCacheValue(identifier)
if not cache_value:
return None
return cache_value.reference_count |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def GrabFileSystem(self, path_spec):
"""Grabs a cached file system object defined by path specification. Args: path_spec (PathSpec):
path specification. """ |
identifier = self._GetFileSystemCacheIdentifier(path_spec)
self._file_system_cache.GrabObject(identifier) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def ReleaseFileObject(self, file_object):
"""Releases a cached file-like object. Args: file_object (FileIO):
file-like object. Returns: bool: True if the file-like object can be closed. Raises: PathSpecError: if the path specification is incorrect. RuntimeError: if the file-like object is not cached or an inconsistency is detected in the cache. """ |
identifier, cache_value = self._file_object_cache.GetCacheValueByObject(
file_object)
if not identifier:
raise RuntimeError('Object not cached.')
if not cache_value:
raise RuntimeError('Invalid cache value.')
self._file_object_cache.ReleaseObject(identifier)
result = cache_value.IsDereferenced()
if result:
self._file_object_cache.RemoveObject(identifier)
return result |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def ReleaseFileSystem(self, file_system):
"""Releases a cached file system object. Args: file_system (FileSystem):
file system object. Returns: bool: True if the file system object can be closed. Raises: PathSpecError: if the path specification is incorrect. RuntimeError: if the file system object is not cached or an inconsistency is detected in the cache. """ |
identifier, cache_value = self._file_system_cache.GetCacheValueByObject(
file_system)
if not identifier:
raise RuntimeError('Object not cached.')
if not cache_value:
raise RuntimeError('Invalid cache value.')
self._file_system_cache.ReleaseObject(identifier)
result = cache_value.IsDereferenced()
if result:
self._file_system_cache.RemoveObject(identifier)
return result |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _GetDecodedStreamSize(self):
"""Retrieves the decoded stream size. Returns: int: decoded stream size. """ |
self._file_object.seek(0, os.SEEK_SET)
self._decoder = self._GetDecoder()
self._decoded_data = b''
encoded_data_offset = 0
encoded_data_size = self._file_object.get_size()
decoded_stream_size = 0
while encoded_data_offset < encoded_data_size:
read_count = self._ReadEncodedData(self._ENCODED_DATA_BUFFER_SIZE)
if read_count == 0:
break
encoded_data_offset += read_count
decoded_stream_size += self._decoded_data_size
return decoded_stream_size |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _AlignDecodedDataOffset(self, decoded_data_offset):
"""Aligns the encoded file with the decoded data offset. Args: decoded_data_offset (int):
decoded data offset. """ |
self._file_object.seek(0, os.SEEK_SET)
self._decoder = self._GetDecoder()
self._decoded_data = b''
encoded_data_offset = 0
encoded_data_size = self._file_object.get_size()
while encoded_data_offset < encoded_data_size:
read_count = self._ReadEncodedData(self._ENCODED_DATA_BUFFER_SIZE)
if read_count == 0:
break
encoded_data_offset += read_count
if decoded_data_offset < self._decoded_data_size:
self._decoded_data_offset = decoded_data_offset
break
decoded_data_offset -= self._decoded_data_size |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _ReadEncodedData(self, read_size):
"""Reads encoded data from the file-like object. Args: read_size (int):
number of bytes of encoded data to read. Returns: int: number of bytes of encoded data read. """ |
encoded_data = self._file_object.read(read_size)
read_count = len(encoded_data)
self._encoded_data = b''.join([self._encoded_data, encoded_data])
self._decoded_data, self._encoded_data = (
self._decoder.Decode(self._encoded_data))
self._decoded_data_size = len(self._decoded_data)
return read_count |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def SetDecodedStreamSize(self, decoded_stream_size):
"""Sets the decoded stream size. This function is used to set the decoded stream size if it can be determined separately. Args: decoded_stream_size (int):
size of the decoded stream in bytes. Raises: IOError: if the file-like object is already open. OSError: if the file-like object is already open. ValueError: if the decoded stream size is invalid. """ |
if self._is_open:
raise IOError('Already open.')
if decoded_stream_size < 0:
raise ValueError((
'Invalid decoded stream size: {0:d} value out of '
'bounds.').format(decoded_stream_size))
self._decoded_stream_size = decoded_stream_size |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _AddAttribute(self, attribute):
"""Adds an attribute. Args: attribute (VolumeAttribute):
a volume attribute. Raises: KeyError: if volume attribute is already set for the corresponding volume attribute identifier. """ |
if attribute.identifier in self._attributes:
raise KeyError((
'Volume attribute object already set for volume attribute '
'identifier: {0:s}.').format(attribute.identifier))
self._attributes[attribute.identifier] = attribute |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def GetAttribute(self, identifier):
"""Retrieves a specific attribute. Args: identifier (str):
identifier of the attribute within the volume. Returns: VolumeAttribute: volume attribute or None if not available. """ |
if not self._is_parsed:
self._Parse()
self._is_parsed = True
if identifier not in self._attributes:
return None
return self._attributes[identifier] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _AddVolume(self, volume):
"""Adds a volume. Args: volume (Volume):
a volume. Raises: KeyError: if volume is already set for the corresponding volume identifier. """ |
if volume.identifier in self._volumes:
raise KeyError(
'Volume object already set for volume identifier: {0:s}'.format(
volume.identifier))
self._volumes[volume.identifier] = volume
self._volume_identifiers.append(volume.identifier) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def GetSectionByIndex(self, section_index):
"""Retrieves a specific section based on the index. Args: section_index (int):
index of the section. Returns: VolumeExtent: a volume extent or None if not available. """ |
if not self._is_parsed:
self._Parse()
self._is_parsed = True
if section_index < 0 or section_index >= len(self._sections):
return None
return self._sections[section_index] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def GetVolumeByIdentifier(self, volume_identifier):
"""Retrieves a specific volume based on the identifier. Args: volume_identifier (str):
identifier of the volume within the volume system. Returns: Volume: a volume. """ |
if not self._is_parsed:
self._Parse()
self._is_parsed = True
return self._volumes[volume_identifier] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def GetVolumeByIndex(self, volume_index):
"""Retrieves a specific volume based on the index. Args: volume_index (int):
index of the volume. Returns: Volume: a volume or None if not available. """ |
if not self._is_parsed:
self._Parse()
self._is_parsed = True
if volume_index < 0 or volume_index >= len(self._volume_identifiers):
return None
volume_identifier = self._volume_identifiers[volume_index]
return self._volumes[volume_identifier] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _GetUncompressedStreamSize(self):
"""Retrieves the uncompressed stream size. Returns: int: uncompressed stream size. """ |
self._file_object.seek(0, os.SEEK_SET)
self._decompressor = self._GetDecompressor()
self._uncompressed_data = b''
compressed_data_offset = 0
compressed_data_size = self._file_object.get_size()
uncompressed_stream_size = 0
while compressed_data_offset < compressed_data_size:
read_count = self._ReadCompressedData(self._COMPRESSED_DATA_BUFFER_SIZE)
if read_count == 0:
break
compressed_data_offset += read_count
uncompressed_stream_size += self._uncompressed_data_size
return uncompressed_stream_size |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def GetVShadowStoreByPathSpec(self, path_spec):
"""Retrieves a VSS store for a path specification. Args: path_spec (PathSpec):
path specification. Returns: pyvshadow.store: a VSS store or None if not available. """ |
store_index = vshadow.VShadowPathSpecGetStoreIndex(path_spec)
if store_index is None:
return None
return self._vshadow_volume.get_store(store_index) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def deprecated(function):
# pylint: disable=invalid-name """Decorator to mark functions or methods as deprecated.""" |
def IssueDeprecationWarning(*args, **kwargs):
"""Issue a deprecation warning."""
warnings.simplefilter('default', DeprecationWarning)
warnings.warn('Call to deprecated function: {0:s}.'.format(
function.__name__), category=DeprecationWarning, stacklevel=2)
return function(*args, **kwargs)
IssueDeprecationWarning.__name__ = function.__name__
IssueDeprecationWarning.__doc__ = function.__doc__
IssueDeprecationWarning.__dict__.update(function.__dict__)
return IssueDeprecationWarning |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _CheckFileEntryType(self, file_entry):
"""Checks the file entry type find specifications. Args: file_entry (FileEntry):
file entry. Returns: bool: True if the file entry matches the find specification, False if not or None if no file entry type specification is defined. """ |
if not self._file_entry_types:
return None
return (
self._CheckIsDevice(file_entry) or self._CheckIsDirectory(file_entry) or
self._CheckIsFile(file_entry) or self._CheckIsLink(file_entry) or
self._CheckIsPipe(file_entry) or self._CheckIsSocket(file_entry)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _CheckIsDevice(self, file_entry):
"""Checks the is_device find specification. Args: file_entry (FileEntry):
file entry. Returns: bool: True if the file entry matches the find specification, False if not. """ |
if definitions.FILE_ENTRY_TYPE_DEVICE not in self._file_entry_types:
return False
return file_entry.IsDevice() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _CheckIsDirectory(self, file_entry):
"""Checks the is_directory find specification. Args: file_entry (FileEntry):
file entry. Returns: bool: True if the file entry matches the find specification, False if not. """ |
if definitions.FILE_ENTRY_TYPE_DIRECTORY not in self._file_entry_types:
return False
return file_entry.IsDirectory() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _CheckIsFile(self, file_entry):
"""Checks the is_file find specification. Args: file_entry (FileEntry):
file entry. Returns: bool: True if the file entry matches the find specification, False if not. """ |
if definitions.FILE_ENTRY_TYPE_FILE not in self._file_entry_types:
return False
return file_entry.IsFile() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _CheckIsLink(self, file_entry):
"""Checks the is_link find specification. Args: file_entry (FileEntry):
file entry. Returns: bool: True if the file entry matches the find specification, False if not. """ |
if definitions.FILE_ENTRY_TYPE_LINK not in self._file_entry_types:
return False
return file_entry.IsLink() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _CheckIsPipe(self, file_entry):
"""Checks the is_pipe find specification. Args: file_entry (FileEntry):
file entry. Returns: bool: True if the file entry matches the find specification, False if not. """ |
if definitions.FILE_ENTRY_TYPE_PIPE not in self._file_entry_types:
return False
return file_entry.IsPipe() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _CheckIsSocket(self, file_entry):
"""Checks the is_socket find specification. Args: file_entry (FileEntry):
file entry. Returns: bool: True if the file entry matches the find specification, False if not. """ |
if definitions.FILE_ENTRY_TYPE_SOCKET not in self._file_entry_types:
return False
return file_entry.IsSocket() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _CheckLocation(self, file_entry, search_depth):
"""Checks the location find specification. Args: file_entry (FileEntry):
file entry. search_depth (int):
number of location path segments to compare. Returns: bool: True if the file entry matches the find specification, False if not. """ |
if self._location_segments is None:
return False
if search_depth < 0 or search_depth > self._number_of_location_segments:
return False
# Note that the root has no entry in the location segments and
# no name to match.
if search_depth == 0:
segment_name = ''
else:
segment_name = self._location_segments[search_depth - 1]
if self._is_regex:
if isinstance(segment_name, py2to3.STRING_TYPES):
# Allow '\n' to be matched by '.' and make '\w', '\W', '\b', '\B',
# '\d', '\D', '\s' and '\S' Unicode safe.
flags = re.DOTALL | re.UNICODE
if not self._is_case_sensitive:
flags |= re.IGNORECASE
try:
segment_name = r'^{0:s}$'.format(segment_name)
segment_name = re.compile(segment_name, flags=flags)
except sre_constants.error:
# TODO: set self._location_segments[search_depth - 1] to None ?
return False
self._location_segments[search_depth - 1] = segment_name
elif not self._is_case_sensitive:
segment_name = segment_name.lower()
self._location_segments[search_depth - 1] = segment_name
if search_depth > 0:
if self._is_regex:
if not segment_name.match(file_entry.name): # pylint: disable=no-member
return False
elif self._is_case_sensitive:
if segment_name != file_entry.name:
return False
elif segment_name != file_entry.name.lower():
return False
return True |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def Matches(self, file_entry, search_depth):
"""Determines if the file entry matches the find specification. Args: file_entry (FileEntry):
file entry. search_depth (int):
number of location path segments to compare. Returns: tuple: contains: bool: True if the file entry matches the find specification, False otherwise. bool: True if the location matches, False if not or None if no location specified. """ |
if self._location_segments is None:
location_match = None
else:
location_match = self._CheckLocation(file_entry, search_depth)
if not location_match:
return False, location_match
if search_depth != self._number_of_location_segments:
return False, location_match
match = self._CheckFileEntryType(file_entry)
if match is not None and not match:
return False, location_match
match = self._CheckIsAllocated(file_entry)
if match is not None and not match:
return False, location_match
return True, location_match |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def PrepareMatches(self, file_system):
"""Prepare find specification for matching. Args: file_system (FileSystem):
file system. """ |
if self._location is not None:
self._location_segments = self._SplitPath(
self._location, file_system.PATH_SEPARATOR)
elif self._location_regex is not None:
path_separator = file_system.PATH_SEPARATOR
if path_separator == '\\':
# The backslash '\' is escaped within a regular expression.
path_separator = '\\\\'
self._location_segments = self._SplitPath(
self._location_regex, path_separator)
if self._location_segments is not None:
self._number_of_location_segments = len(self._location_segments) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _FindInFileEntry(self, file_entry, find_specs, search_depth):
"""Searches for matching file entries within the file entry. Args: file_entry (FileEntry):
file entry. find_specs (list[FindSpec]):
find specifications. search_depth (int):
number of location path segments to compare. Yields: PathSpec: path specification of a matching file entry. """ |
sub_find_specs = []
for find_spec in find_specs:
match, location_match = find_spec.Matches(file_entry, search_depth)
if match:
yield file_entry.path_spec
# pylint: disable=singleton-comparison
if location_match != False and not find_spec.AtMaximumDepth(search_depth):
sub_find_specs.append(find_spec)
if not sub_find_specs:
return
search_depth += 1
try:
for sub_file_entry in file_entry.sub_file_entries:
for matching_path_spec in self._FindInFileEntry(
sub_file_entry, sub_find_specs, search_depth):
yield matching_path_spec
except errors.AccessError:
pass |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def Find(self, find_specs=None):
"""Searches for matching file entries within the file system. Args: find_specs (list[FindSpec]):
find specifications. where None will return all allocated file entries. Yields: PathSpec: path specification of a matching file entry. """ |
if not find_specs:
find_specs.append(FindSpec())
for find_spec in find_specs:
find_spec.PrepareMatches(self._file_system)
if path_spec_factory.Factory.IsSystemLevelTypeIndicator(
self._file_system.type_indicator):
file_entry = self._file_system.GetFileEntryByPathSpec(self._mount_point)
else:
file_entry = self._file_system.GetRootFileEntry()
for matching_path_spec in self._FindInFileEntry(file_entry, find_specs, 0):
yield matching_path_spec |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def GetRelativePath(self, path_spec):
"""Returns the relative path based on a resolved path specification. The relative path is the location of the upper most path specification. The the location of the mount point is stripped off if relevant. Args: path_spec (PathSpec):
path specification. Returns: str: corresponding relative path or None if the relative path could not be determined. Raises: PathSpecError: if the path specification is incorrect. """ |
location = getattr(path_spec, 'location', None)
if location is None:
raise errors.PathSpecError('Path specification missing location.')
if path_spec_factory.Factory.IsSystemLevelTypeIndicator(
self._file_system.type_indicator):
if not location.startswith(self._mount_point.location):
raise errors.PathSpecError(
'Path specification does not contain mount point.')
else:
if not hasattr(path_spec, 'parent'):
raise errors.PathSpecError('Path specification missing parent.')
if path_spec.parent != self._mount_point:
raise errors.PathSpecError(
'Path specification does not contain mount point.')
path_segments = self._file_system.SplitPath(location)
if path_spec_factory.Factory.IsSystemLevelTypeIndicator(
self._file_system.type_indicator):
mount_point_path_segments = self._file_system.SplitPath(
self._mount_point.location)
path_segments = path_segments[len(mount_point_path_segments):]
return '{0:s}{1:s}'.format(
self._file_system.PATH_SEPARATOR,
self._file_system.PATH_SEPARATOR.join(path_segments)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _PromptUserForEncryptedVolumeCredential( self, scan_context, locked_scan_node, output_writer):
"""Prompts the user to provide a credential for an encrypted volume. Args: scan_context (SourceScannerContext):
the source scanner context. locked_scan_node (SourceScanNode):
the locked scan node. output_writer (StdoutWriter):
the output writer. """ |
credentials = credentials_manager.CredentialsManager.GetCredentials(
locked_scan_node.path_spec)
# TODO: print volume description.
if locked_scan_node.type_indicator == (
definitions.TYPE_INDICATOR_APFS_CONTAINER):
line = 'Found an APFS encrypted volume.'
elif locked_scan_node.type_indicator == definitions.TYPE_INDICATOR_BDE:
line = 'Found a BitLocker encrypted volume.'
elif locked_scan_node.type_indicator == definitions.TYPE_INDICATOR_FVDE:
line = 'Found a CoreStorage (FVDE) encrypted volume.'
else:
line = 'Found an encrypted volume.'
output_writer.WriteLine(line)
credentials_list = list(credentials.CREDENTIALS)
credentials_list.append('skip')
# TODO: check which credentials are available.
output_writer.WriteLine('Supported credentials:')
output_writer.WriteLine('')
for index, name in enumerate(credentials_list):
output_writer.WriteLine(' {0:d}. {1:s}'.format(index + 1, name))
output_writer.WriteLine('')
result = False
while not result:
output_writer.WriteString(
'Select a credential to unlock the volume: ')
# TODO: add an input reader.
input_line = sys.stdin.readline()
input_line = input_line.strip()
if input_line in credentials_list:
credential_identifier = input_line
else:
try:
credential_identifier = int(input_line, 10)
credential_identifier = credentials_list[credential_identifier - 1]
except (IndexError, ValueError):
output_writer.WriteLine(
'Unsupported credential: {0:s}'.format(input_line))
continue
if credential_identifier == 'skip':
break
getpass_string = 'Enter credential data: '
if sys.platform.startswith('win') and sys.version_info[0] < 3:
# For Python 2 on Windows getpass (win_getpass) requires an encoded
# byte string. For Python 3 we need it to be a Unicode string.
getpass_string = self._EncodeString(getpass_string)
credential_data = getpass.getpass(getpass_string)
output_writer.WriteLine('')
result = self._source_scanner.Unlock(
scan_context, locked_scan_node.path_spec, credential_identifier,
credential_data)
if not result:
output_writer.WriteLine('Unable to unlock volume.')
output_writer.WriteLine('') |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def Analyze(self, source_path, output_writer):
"""Analyzes the source. Args: source_path (str):
the source path. output_writer (StdoutWriter):
the output writer. Raises: RuntimeError: if the source path does not exists, or if the source path is not a file or directory, or if the format of or within the source file is not supported. """ |
if not os.path.exists(source_path):
raise RuntimeError('No such source: {0:s}.'.format(source_path))
scan_context = source_scanner.SourceScannerContext()
scan_path_spec = None
scan_step = 0
scan_context.OpenSourcePath(source_path)
while True:
self._source_scanner.Scan(
scan_context, auto_recurse=self._auto_recurse,
scan_path_spec=scan_path_spec)
if not scan_context.updated:
break
if not self._auto_recurse:
output_writer.WriteScanContext(scan_context, scan_step=scan_step)
scan_step += 1
# The source is a directory or file.
if scan_context.source_type in [
definitions.SOURCE_TYPE_DIRECTORY, definitions.SOURCE_TYPE_FILE]:
break
# The source scanner found a locked volume, e.g. an encrypted volume,
# and we need a credential to unlock the volume.
for locked_scan_node in scan_context.locked_scan_nodes:
self._PromptUserForEncryptedVolumeCredential(
scan_context, locked_scan_node, output_writer)
if not self._auto_recurse:
scan_node = scan_context.GetUnscannedScanNode()
if not scan_node:
return
scan_path_spec = scan_node.path_spec
if self._auto_recurse:
output_writer.WriteScanContext(scan_context) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def WriteScanContext(self, scan_context, scan_step=None):
"""Writes the source scanner context to stdout. Args: scan_context (SourceScannerContext):
the source scanner context. scan_step (Optional[int]):
the scan step, where None represents no step. """ |
if scan_step is not None:
print('Scan step: {0:d}'.format(scan_step))
print('Source type\t\t: {0:s}'.format(scan_context.source_type))
print('')
scan_node = scan_context.GetRootScanNode()
self.WriteScanNode(scan_context, scan_node)
print('') |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def WriteScanNode(self, scan_context, scan_node, indentation=''):
"""Writes the source scanner node to stdout. Args: scan_context (SourceScannerContext):
the source scanner context. scan_node (SourceScanNode):
the scan node. indentation (Optional[str]):
indentation. """ |
if not scan_node:
return
values = []
part_index = getattr(scan_node.path_spec, 'part_index', None)
if part_index is not None:
values.append('{0:d}'.format(part_index))
store_index = getattr(scan_node.path_spec, 'store_index', None)
if store_index is not None:
values.append('{0:d}'.format(store_index))
start_offset = getattr(scan_node.path_spec, 'start_offset', None)
if start_offset is not None:
values.append('start offset: {0:d} (0x{0:08x})'.format(start_offset))
location = getattr(scan_node.path_spec, 'location', None)
if location is not None:
values.append('location: {0:s}'.format(location))
values = ', '.join(values)
flags = ''
if scan_node in scan_context.locked_scan_nodes:
flags = ' [LOCKED]'
print('{0:s}{1:s}: {2:s}{3:s}'.format(
indentation, scan_node.path_spec.type_indicator, values, flags))
indentation = ' {0:s}'.format(indentation)
for sub_scan_node in scan_node.sub_nodes:
self.WriteScanNode(scan_context, sub_scan_node, indentation=indentation) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _CalculateHashDataStream(self, file_entry, data_stream_name):
"""Calculates a message digest hash of the data of the file entry. Args: file_entry (dfvfs.FileEntry):
file entry. data_stream_name (str):
name of the data stream. Returns: bytes: digest hash or None. """ |
hash_context = hashlib.sha256()
try:
file_object = file_entry.GetFileObject(data_stream_name=data_stream_name)
except IOError as exception:
logging.warning((
'Unable to open path specification:\n{0:s}'
'with error: {1!s}').format(
file_entry.path_spec.comparable, exception))
return None
if not file_object:
return None
try:
data = file_object.read(self._READ_BUFFER_SIZE)
while data:
hash_context.update(data)
data = file_object.read(self._READ_BUFFER_SIZE)
except IOError as exception:
logging.warning((
'Unable to read from path specification:\n{0:s}'
'with error: {1!s}').format(
file_entry.path_spec.comparable, exception))
return None
finally:
file_object.close()
return hash_context.hexdigest() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _CalculateHashesFileEntry( self, file_system, file_entry, parent_full_path, output_writer):
"""Recursive calculates hashes starting with the file entry. Args: file_system (dfvfs.FileSystem):
file system. file_entry (dfvfs.FileEntry):
file entry. parent_full_path (str):
full path of the parent file entry. output_writer (StdoutWriter):
output writer. """ |
# Since every file system implementation can have their own path
# segment separator we are using JoinPath to be platform and file system
# type independent.
full_path = file_system.JoinPath([parent_full_path, file_entry.name])
for data_stream in file_entry.data_streams:
hash_value = self._CalculateHashDataStream(file_entry, data_stream.name)
display_path = self._GetDisplayPath(
file_entry.path_spec, full_path, data_stream.name)
output_writer.WriteFileHash(display_path, hash_value or 'N/A')
for sub_file_entry in file_entry.sub_file_entries:
self._CalculateHashesFileEntry(
file_system, sub_file_entry, full_path, output_writer) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _GetDisplayPath(self, path_spec, full_path, data_stream_name):
"""Retrieves a path to display. Args: path_spec (dfvfs.PathSpec):
path specification of the file entry. full_path (str):
full path of the file entry. data_stream_name (str):
name of the data stream. Returns: str: path to display. """ |
display_path = ''
if path_spec.HasParent():
parent_path_spec = path_spec.parent
if parent_path_spec and parent_path_spec.type_indicator == (
dfvfs_definitions.TYPE_INDICATOR_TSK_PARTITION):
display_path = ''.join([display_path, parent_path_spec.location])
display_path = ''.join([display_path, full_path])
if data_stream_name:
display_path = ':'.join([display_path, data_stream_name])
return display_path |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def CalculateHashes(self, base_path_specs, output_writer):
"""Recursive calculates hashes starting with the base path specification. Args: base_path_specs (list[dfvfs.PathSpec]):
source path specification. output_writer (StdoutWriter):
output writer. """ |
for base_path_spec in base_path_specs:
file_system = resolver.Resolver.OpenFileSystem(base_path_spec)
file_entry = resolver.Resolver.OpenFileEntry(base_path_spec)
if file_entry is None:
logging.warning('Unable to open base path specification:\n{0:s}'.format(
base_path_spec.comparable))
continue
self._CalculateHashesFileEntry(file_system, file_entry, '', output_writer) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _EncodeString(self, string):
"""Encodes the string. Args: string (str):
string to encode. Returns: bytes: encoded string. """ |
try:
# Note that encode() will first convert string into a Unicode string
# if necessary.
encoded_string = string.encode(self._encoding, errors=self._errors)
except UnicodeEncodeError:
if self._errors == 'strict':
logging.error(
'Unable to properly write output due to encoding error. '
'Switching to error tolerant encoding which can result in '
'non Basic Latin (C0) characters to be replaced with "?" or '
'"\\ufffd".')
self._errors = 'replace'
encoded_string = string.encode(self._encoding, errors=self._errors)
return encoded_string |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def WriteFileHash(self, path, hash_value):
"""Writes the file path and hash to file. Args: path (str):
path of the file. hash_value (str):
message digest hash calculated over the file data. """ |
string = '{0:s}\t{1:s}\n'.format(hash_value, path)
encoded_string = self._EncodeString(string)
self._file_object.write(encoded_string) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def WriteFileHash(self, path, hash_value):
"""Writes the file path and hash to stdout. Args: path (str):
path of the file. hash_value (str):
message digest hash calculated over the file data. """ |
string = '{0:s}\t{1:s}'.format(hash_value, path)
encoded_string = self._EncodeString(string)
print(encoded_string) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def GetLinkedFileEntry(self):
"""Retrieves the linked file entry, for example for a symbolic link. Returns: OSFileEntry: linked file entry or None if not available. """ |
link = self._GetLink()
if not link:
return None
path_spec = os_path_spec.OSPathSpec(location=link)
return OSFileEntry(self._resolver_context, self._file_system, path_spec) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def GetAPFSVolumeByPathSpec(self, path_spec):
"""Retrieves an APFS volume for a path specification. Args: path_spec (PathSpec):
path specification. Returns: pyfsapfs.volume: an APFS volume or None if not available. """ |
volume_index = apfs_helper.APFSContainerPathSpecGetVolumeIndex(path_spec)
if volume_index is None:
return None
return self._fsapfs_container.get_volume(volume_index) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def Glob2Regex(glob_pattern):
"""Converts a glob pattern to a regular expression. This function supports basic glob patterns that consist of: * matches everything ? matches any single character [seq] matches any character in sequence [!seq] matches any character not in sequence Args: glob_pattern (str):
glob pattern. Returns: str: regular expression pattern. Raises: ValueError: if the glob pattern cannot be converted. """ |
if not glob_pattern:
raise ValueError('Missing glob pattern.')
regex_pattern = []
glob_pattern_index = 0
glob_pattern_length = len(glob_pattern)
while glob_pattern_index < glob_pattern_length:
character = glob_pattern[glob_pattern_index]
glob_pattern_index += 1
if character == '*':
regex_pattern.append('.*')
elif character == '?':
regex_pattern.append('.')
elif character != '[':
regex_character = re.escape(character)
regex_pattern.append(regex_character)
else:
glob_group_index = glob_pattern_index
if (glob_group_index < glob_pattern_length and
glob_pattern[glob_group_index] == '!'):
glob_group_index += 1
if (glob_group_index < glob_pattern_length and
glob_pattern[glob_group_index] == ']'):
glob_group_index += 1
while (glob_group_index < glob_pattern_length and
glob_pattern[glob_group_index] != ']'):
glob_group_index += 1
if glob_group_index >= glob_pattern_length:
regex_pattern.append('\\[')
continue
glob_group = glob_pattern[glob_pattern_index:glob_group_index]
glob_pattern_index = glob_group_index + 1
glob_group = glob_group.replace('\\', '\\\\')
if py2to3.PY_3_7_AND_LATER:
glob_group = glob_group.replace('|', '\\|')
regex_pattern.append('[')
if glob_group[0] == '!':
regex_pattern.append('^')
glob_group = glob_group[1:]
elif glob_group[0] == '^':
regex_pattern.append('\\')
regex_pattern.append(glob_group)
regex_pattern.append(']')
return ''.join(regex_pattern) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def CacheObject(self, identifier, vfs_object):
"""Caches a VFS object. This method ignores the cache value reference count. Args: identifier (str):
VFS object identifier. vfs_object (object):
VFS object to cache. Raises: CacheFullError: if he maximum number of cached values is reached. KeyError: if the VFS object already is cached. """ |
if identifier in self._values:
raise KeyError('Object already cached for identifier: {0:s}'.format(
identifier))
if len(self._values) == self._maximum_number_of_cached_values:
raise errors.CacheFullError('Maximum number of cached values reached.')
self._values[identifier] = ObjectsCacheValue(vfs_object) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def GetCacheValueByObject(self, vfs_object):
"""Retrieves the cache value for the cached object. Args: vfs_object (object):
VFS object that was cached. Returns: tuple[str, ObjectsCacheValue]: identifier and cache value object or (None, None) if not cached. Raises: RuntimeError: if the cache value is missing. """ |
for identifier, cache_value in iter(self._values.items()):
if not cache_value:
raise RuntimeError('Missing cache value.')
if cache_value.vfs_object == vfs_object:
return identifier, cache_value
return None, None |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def GetObject(self, identifier):
"""Retrieves a cached object based on the identifier. This method ignores the cache value reference count. Args: identifier (str):
VFS object identifier. Returns: object: cached VFS object or None if not cached. """ |
cache_value = self._values.get(identifier, None)
if not cache_value:
return None
return cache_value.vfs_object |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def GrabObject(self, identifier):
"""Grabs a cached object based on the identifier. This method increments the cache value reference count. Args: identifier (str):
VFS object identifier. Raises: KeyError: if the VFS object is not found in the cache. RuntimeError: if the cache value is missing. """ |
if identifier not in self._values:
raise KeyError('Missing cached object for identifier: {0:s}'.format(
identifier))
cache_value = self._values[identifier]
if not cache_value:
raise RuntimeError('Missing cache value for identifier: {0:s}'.format(
identifier))
cache_value.IncrementReferenceCount() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def ReleaseObject(self, identifier):
"""Releases a cached object based on the identifier. This method decrements the cache value reference count. Args: identifier (str):
VFS object identifier. Raises: KeyError: if the VFS object is not found in the cache. RuntimeError: if the cache value is missing. """ |
if identifier not in self._values:
raise KeyError('Missing cached object for identifier: {0:s}'.format(
identifier))
cache_value = self._values[identifier]
if not cache_value:
raise RuntimeError('Missing cache value for identifier: {0:s}'.format(
identifier))
cache_value.DecrementReferenceCount() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def RemoveObject(self, identifier):
"""Removes a cached object based on the identifier. This method ignores the cache value reference count. Args: identifier (str):
VFS object identifier. Raises: KeyError: if the VFS object is not found in the cache. """ |
if identifier not in self._values:
raise KeyError('Missing cached object for identifier: {0:s}'.format(
identifier))
del self._values[identifier] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def GetSecurityDescriptor(self):
"""Retrieves the security descriptor. Returns: pyfwnt.security_descriptor: security descriptor. """ |
fwnt_security_descriptor = pyfwnt.security_descriptor()
fwnt_security_descriptor.copy_from_byte_stream(
self._fsntfs_file_entry.security_descriptor_data)
return fwnt_security_descriptor |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _ReadFileEntries(self, file_object):
"""Reads the file entries from the cpio archive. Args: file_object (FileIO):
file-like object. """ |
self._file_entries = {}
file_offset = 0
while file_offset < self._file_size or self._file_size == 0:
file_entry = self._ReadFileEntry(file_object, file_offset)
file_offset += file_entry.size
if file_entry.path == 'TRAILER!!!':
break
if file_entry.path in self._file_entries:
# TODO: alert on file entries with duplicate paths?
continue
self._file_entries[file_entry.path] = file_entry |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def Open(self, file_object):
"""Opens the CPIO archive file. Args: file_object (FileIO):
a file-like object. Raises: IOError: if the file format signature is not supported. OSError: if the file format signature is not supported. """ |
file_object.seek(0, os.SEEK_SET)
signature_data = file_object.read(6)
self.file_format = None
if len(signature_data) > 2:
if signature_data[:2] == self._CPIO_SIGNATURE_BINARY_BIG_ENDIAN:
self.file_format = 'bin-big-endian'
elif signature_data[:2] == self._CPIO_SIGNATURE_BINARY_LITTLE_ENDIAN:
self.file_format = 'bin-little-endian'
elif signature_data == self._CPIO_SIGNATURE_PORTABLE_ASCII:
self.file_format = 'odc'
elif signature_data == self._CPIO_SIGNATURE_NEW_ASCII:
self.file_format = 'newc'
elif signature_data == self._CPIO_SIGNATURE_NEW_ASCII_WITH_CHECKSUM:
self.file_format = 'crc'
if self.file_format is None:
raise IOError('Unsupported CPIO format.')
self._file_object = file_object
self._file_size = file_object.get_size()
self._ReadFileEntries(self._file_object) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def ReadDataAtOffset(self, file_offset, size):
"""Reads a byte string from the file-like object at a specific offset. Args: file_offset (int):
file offset. size (int):
number of bytes to read. Returns: bytes: data read. Raises: IOError: if the read failed. OSError: if the read failed. """ |
self._file_object.seek(file_offset, os.SEEK_SET)
return self._file_object.read(size) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def GetNTFSFileEntryByPathSpec(self, path_spec):
"""Retrieves the NTFS file entry for a path specification. Args: path_spec (PathSpec):
a path specification. Returns: pyfsntfs.file_entry: NTFS file entry. Raises: PathSpecError: if the path specification is missing location and MFT entry. """ |
# Opening a file by MFT entry is faster than opening a file by location.
# However we need the index of the corresponding $FILE_NAME MFT attribute.
location = getattr(path_spec, 'location', None)
mft_attribute = getattr(path_spec, 'mft_attribute', None)
mft_entry = getattr(path_spec, 'mft_entry', None)
if mft_attribute is not None and mft_entry is not None:
fsntfs_file_entry = self._fsntfs_volume.get_file_entry(mft_entry)
elif location is not None:
fsntfs_file_entry = self._fsntfs_volume.get_file_entry_by_path(location)
else:
raise errors.PathSpecError(
'Path specification missing location and MFT entry.')
return fsntfs_file_entry |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _RawGlobPathSpecWithAlphabeticalSchema( file_system, parent_path_spec, segment_format, location, segment_length, upper_case=False):
"""Globs for path specifications according to an alphabetical naming schema. Args: file_system (FileSystem):
file system. parent_path_spec (PathSpec):
parent path specification. segment_format (str):
naming schema of the segment file location. location (str):
the base segment file location string. segment_length (int):
length (number of characters) of the segment indicator. upper_case (Optional[bool]):
True if the segment name is in upper case. Returns: list[PathSpec]: path specifications that match the glob. """ |
segment_number = 0
segment_files = []
while True:
segment_index = segment_number
segment_letters = []
while len(segment_letters) < segment_length:
segment_index, remainder = divmod(segment_index, 26)
if upper_case:
segment_letters.append(chr(ord('A') + remainder))
else:
segment_letters.append(chr(ord('a') + remainder))
# Reverse the segment letters list to form the extension.
segment_letters = ''.join(segment_letters[::-1])
segment_location = segment_format.format(location, segment_letters)
# Note that we don't want to set the keyword arguments when not used
# because the path specification base class will check for unused
# keyword arguments and raise.
kwargs = path_spec_factory.Factory.GetProperties(parent_path_spec)
kwargs['location'] = segment_location
if parent_path_spec.parent is not None:
kwargs['parent'] = parent_path_spec.parent
segment_path_spec = path_spec_factory.Factory.NewPathSpec(
parent_path_spec.type_indicator, **kwargs)
if not file_system.FileEntryExistsByPathSpec(segment_path_spec):
break
segment_files.append(segment_path_spec)
segment_number += 1
return segment_files |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _RawGlobPathSpecWithNumericSchema( file_system, parent_path_spec, segment_format, location, segment_number):
"""Globs for path specifications according to a numeric naming schema. Args: file_system (FileSystem):
file system. parent_path_spec (PathSpec):
parent path specification. segment_format (str):
naming schema of the segment file location. location (str):
the base segment file location string. segment_number (int):
first segment number. Returns: list[PathSpec]: path specifications that match the glob. """ |
segment_files = []
while True:
segment_location = segment_format.format(location, segment_number)
# Note that we don't want to set the keyword arguments when not used
# because the path specification base class will check for unused
# keyword arguments and raise.
kwargs = path_spec_factory.Factory.GetProperties(parent_path_spec)
kwargs['location'] = segment_location
if parent_path_spec.parent is not None:
kwargs['parent'] = parent_path_spec.parent
segment_path_spec = path_spec_factory.Factory.NewPathSpec(
parent_path_spec.type_indicator, **kwargs)
if not file_system.FileEntryExistsByPathSpec(segment_path_spec):
break
segment_files.append(segment_path_spec)
segment_number += 1
return segment_files |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def GetSubNodeByLocation(self, location):
"""Retrieves a sub scan node based on the location. Args: location (str):
location that should match the location of the path specification of a sub scan node. Returns: SourceScanNode: sub scan node or None if not available. """ |
for sub_node in self.sub_nodes:
sub_node_location = getattr(sub_node.path_spec, 'location', None)
if location == sub_node_location:
return sub_node
return None |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def GetUnscannedSubNode(self):
"""Retrieves the first unscanned sub node. Returns: SourceScanNode: sub scan node or None if not available. """ |
if not self.sub_nodes and not self.scanned:
return self
for sub_node in self.sub_nodes:
result = sub_node.GetUnscannedSubNode()
if result:
return result
return None |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def AddScanNode(self, path_spec, parent_scan_node):
"""Adds a scan node for a certain path specification. Args: path_spec (PathSpec):
path specification. parent_scan_node (SourceScanNode):
parent scan node or None. Returns: SourceScanNode: scan node. Raises: KeyError: if the scan node already exists. RuntimeError: if the parent scan node is not present. """ |
scan_node = self._scan_nodes.get(path_spec, None)
if scan_node:
raise KeyError('Scan node already exists.')
scan_node = SourceScanNode(path_spec)
if parent_scan_node:
if parent_scan_node.path_spec not in self._scan_nodes:
raise RuntimeError('Parent scan node not present.')
scan_node.parent_node = parent_scan_node
parent_scan_node.sub_nodes.append(scan_node)
if not self._root_path_spec:
self._root_path_spec = path_spec
self._scan_nodes[path_spec] = scan_node
if path_spec.IsFileSystem():
self._file_system_scan_nodes[path_spec] = scan_node
self.updated = True
return scan_node |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def GetUnscannedScanNode(self):
"""Retrieves the first unscanned scan node. Returns: SourceScanNode: scan node or None if not available. """ |
root_scan_node = self._scan_nodes.get(self._root_path_spec, None)
if not root_scan_node or not root_scan_node.scanned:
return root_scan_node
return root_scan_node.GetUnscannedSubNode() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def LockScanNode(self, path_spec):
"""Marks a scan node as locked. Args: path_spec (PathSpec):
path specification. Raises: KeyError: if the scan node does not exists. """ |
scan_node = self._scan_nodes.get(path_spec, None)
if not scan_node:
raise KeyError('Scan node does not exist.')
self._locked_scan_nodes[path_spec] = scan_node |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def OpenSourcePath(self, source_path):
"""Opens the source path. Args: source_path (str):
source path. """ |
source_path_spec = path_spec_factory.Factory.NewPathSpec(
definitions.TYPE_INDICATOR_OS, location=source_path)
self.AddScanNode(source_path_spec, None) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def RemoveScanNode(self, path_spec):
"""Removes a scan node of a certain path specification. Args: path_spec (PathSpec):
path specification. Returns: SourceScanNode: parent scan node or None if not available. Raises: RuntimeError: if the scan node has sub nodes. """ |
scan_node = self._scan_nodes.get(path_spec, None)
if not scan_node:
return None
if scan_node.sub_nodes:
raise RuntimeError('Scan node has sub nodes.')
parent_scan_node = scan_node.parent_node
if parent_scan_node:
parent_scan_node.sub_nodes.remove(scan_node)
if path_spec == self._root_path_spec:
self._root_path_spec = None
del self._scan_nodes[path_spec]
if path_spec.IsFileSystem():
del self._file_system_scan_nodes[path_spec]
return parent_scan_node |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def UnlockScanNode(self, path_spec):
"""Marks a scan node as unlocked. Args: path_spec (PathSpec):
path specification. Raises: KeyError: if the scan node does not exists or is not locked. """ |
if not self.HasScanNode(path_spec):
raise KeyError('Scan node does not exist.')
if path_spec not in self._locked_scan_nodes:
raise KeyError('Scan node is not locked.')
del self._locked_scan_nodes[path_spec]
# Scan a node again after it has been unlocked.
self._scan_nodes[path_spec].scanned = False |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _ScanNode(self, scan_context, scan_node, auto_recurse=True):
"""Scans a node for supported formats. Args: scan_context (SourceScannerContext):
source scanner context. scan_node (SourceScanNode):
source scan node. auto_recurse (Optional[bool]):
True if the scan should automatically recurse as far as possible. Raises: BackEndError: if the source cannot be scanned. ValueError: if the scan context or scan node is invalid. """ |
if not scan_context:
raise ValueError('Invalid scan context.')
if not scan_node:
raise ValueError('Invalid scan node.')
scan_path_spec = scan_node.path_spec
system_level_file_entry = None
if scan_node.IsSystemLevel():
system_level_file_entry = resolver.Resolver.OpenFileEntry(
scan_node.path_spec, resolver_context=self._resolver_context)
if system_level_file_entry is None:
raise errors.BackEndError('Unable to open file entry.')
if system_level_file_entry.IsDirectory():
scan_context.SetSourceType(definitions.SOURCE_TYPE_DIRECTORY)
return
source_path_spec = self.ScanForStorageMediaImage(scan_node.path_spec)
if source_path_spec:
scan_node.scanned = True
scan_node = scan_context.AddScanNode(source_path_spec, scan_node)
if system_level_file_entry.IsDevice():
source_type = definitions.SOURCE_TYPE_STORAGE_MEDIA_DEVICE
else:
source_type = definitions.SOURCE_TYPE_STORAGE_MEDIA_IMAGE
scan_context.SetSourceType(source_type)
if not auto_recurse:
return
# In case we did not find a storage media image type we keep looking
# since not all RAW storage media image naming schemas are known and
# its type can only detected by its content.
source_path_spec = None
while True:
if scan_node.IsFileSystem():
# No need to scan a file systems scan node for volume systems.
break
if scan_node.SupportsEncryption():
self._ScanEncryptedVolumeNode(scan_context, scan_node)
if scan_context.IsLockedScanNode(scan_node.path_spec):
# Scan node is locked, such as an encrypted volume, and we cannot
# scan it for a volume system.
break
source_path_spec = self.ScanForVolumeSystem(scan_node.path_spec)
if not source_path_spec:
# No volume system found continue with a file system scan.
break
if not scan_context.HasScanNode(source_path_spec):
scan_node.scanned = True
scan_node = scan_context.AddScanNode(source_path_spec, scan_node)
if system_level_file_entry and system_level_file_entry.IsDevice():
source_type = definitions.SOURCE_TYPE_STORAGE_MEDIA_DEVICE
else:
source_type = definitions.SOURCE_TYPE_STORAGE_MEDIA_IMAGE
scan_context.SetSourceType(source_type)
if scan_node.IsVolumeSystemRoot():
self._ScanVolumeSystemRootNode(
scan_context, scan_node, auto_recurse=auto_recurse)
# We already have already scanned for the file systems.
return
if not auto_recurse and scan_context.updated:
return
# Nothing new found.
if not scan_context.updated:
break
# In case we did not find a volume system type we keep looking
# since we could be dealing with a storage media image that contains
# a single volume.
# No need to scan the root of a volume system for a file system.
if scan_node.IsVolumeSystemRoot():
pass
elif scan_context.IsLockedScanNode(scan_node.path_spec):
# Scan node is locked, such as an encrypted volume, and we cannot
# scan it for a file system.
pass
elif (scan_node.type_indicator == definitions.TYPE_INDICATOR_VSHADOW and
auto_recurse and scan_node.path_spec != scan_path_spec):
# Since scanning for file systems in VSS snapshot volumes can
# be expensive we only do this when explicitly asked for.
pass
elif not scan_node.IsFileSystem():
source_path_spec = self.ScanForFileSystem(scan_node.path_spec)
if not source_path_spec:
# Since RAW storage media image can only be determined by naming schema
# we could have single file that is not a RAW storage media image yet
# matches the naming schema.
if scan_node.path_spec.type_indicator == definitions.TYPE_INDICATOR_RAW:
scan_node = scan_context.RemoveScanNode(scan_node.path_spec)
# Make sure to override the previously assigned source type.
scan_context.source_type = definitions.SOURCE_TYPE_FILE
else:
scan_context.SetSourceType(definitions.SOURCE_TYPE_FILE)
elif not scan_context.HasScanNode(source_path_spec):
scan_node.scanned = True
scan_node = scan_context.AddScanNode(source_path_spec, scan_node)
if system_level_file_entry and system_level_file_entry.IsDevice():
source_type = definitions.SOURCE_TYPE_STORAGE_MEDIA_DEVICE
else:
source_type = definitions.SOURCE_TYPE_STORAGE_MEDIA_IMAGE
scan_context.SetSourceType(source_type)
# If all scans failed mark the scan node as scanned so we do not scan it
# again.
if not scan_node.scanned:
scan_node.scanned = True |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _ScanEncryptedVolumeNode(self, scan_context, scan_node):
"""Scans an encrypted volume node for supported formats. Args: scan_context (SourceScannerContext):
source scanner context. scan_node (SourceScanNode):
source scan node. Raises: BackEndError: if the scan node cannot be unlocked. ValueError: if the scan context or scan node is invalid. """ |
if scan_node.type_indicator == definitions.TYPE_INDICATOR_APFS_CONTAINER:
# TODO: consider changes this when upstream changes have been made.
# Currently pyfsapfs does not support reading from a volume as a device.
# Also see: https://github.com/log2timeline/dfvfs/issues/332
container_file_entry = resolver.Resolver.OpenFileEntry(
scan_node.path_spec, resolver_context=self._resolver_context)
fsapfs_volume = container_file_entry.GetAPFSVolume()
# TODO: unlocking the volume multiple times is inefficient cache volume
# object in scan node and use is_locked = fsapfs_volume.is_locked()
try:
is_locked = not apfs_helper.APFSUnlockVolume(
fsapfs_volume, scan_node.path_spec, resolver.Resolver.key_chain)
except IOError as exception:
raise errors.BackEndError(
'Unable to unlock APFS volume with error: {0!s}'.format(exception))
else:
file_object = resolver.Resolver.OpenFileObject(
scan_node.path_spec, resolver_context=self._resolver_context)
is_locked = not file_object or file_object.is_locked
file_object.close()
if is_locked:
scan_context.LockScanNode(scan_node.path_spec)
# For BitLocker To Go add a scan node for the unencrypted part of
# the volume.
if scan_node.type_indicator == definitions.TYPE_INDICATOR_BDE:
path_spec = self.ScanForFileSystem(scan_node.path_spec.parent)
if path_spec:
scan_context.AddScanNode(path_spec, scan_node.parent_node) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _ScanVolumeSystemRootNode( self, scan_context, scan_node, auto_recurse=True):
"""Scans a volume system root node for supported formats. Args: scan_context (SourceScannerContext):
source scanner context. scan_node (SourceScanNode):
source scan node. auto_recurse (Optional[bool]):
True if the scan should automatically recurse as far as possible. Raises: ValueError: if the scan context or scan node is invalid. """ |
if scan_node.type_indicator == definitions.TYPE_INDICATOR_VSHADOW:
# For VSS add a scan node for the current volume.
path_spec = self.ScanForFileSystem(scan_node.path_spec.parent)
if path_spec:
scan_context.AddScanNode(path_spec, scan_node.parent_node)
# Determine the path specifications of the sub file entries.
file_entry = resolver.Resolver.OpenFileEntry(
scan_node.path_spec, resolver_context=self._resolver_context)
for sub_file_entry in file_entry.sub_file_entries:
sub_scan_node = scan_context.AddScanNode(
sub_file_entry.path_spec, scan_node)
if scan_node.type_indicator == definitions.TYPE_INDICATOR_VSHADOW:
# Since scanning for file systems in VSS snapshot volumes can
# be expensive we only do this when explicitly asked for.
continue
if auto_recurse or not scan_context.updated:
self._ScanNode(scan_context, sub_scan_node, auto_recurse=auto_recurse) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def GetVolumeIdentifiers(self, volume_system):
"""Retrieves the volume identifiers. Args: volume_system (VolumeSystem):
volume system. Returns: list[str]: sorted volume identifiers. """ |
volume_identifiers = []
for volume in volume_system.volumes:
volume_identifier = getattr(volume, 'identifier', None)
if volume_identifier:
volume_identifiers.append(volume_identifier)
return sorted(volume_identifiers) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def Scan(self, scan_context, auto_recurse=True, scan_path_spec=None):
"""Scans for supported formats. Args: scan_context (SourceScannerContext):
source scanner context. auto_recurse (Optional[bool]):
True if the scan should automatically recurse as far as possible. scan_path_spec (Optional[PathSpec]):
path specification to indicate where the source scanner should continue scanning, where None indicates the scanner will start with the sources. Raises: ValueError: if the scan context is invalid. """ |
if not scan_context:
raise ValueError('Invalid scan context.')
scan_context.updated = False
if scan_path_spec:
scan_node = scan_context.GetScanNode(scan_path_spec)
else:
scan_node = scan_context.GetUnscannedScanNode()
if scan_node:
self._ScanNode(scan_context, scan_node, auto_recurse=auto_recurse) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def ScanForFileSystem(self, source_path_spec):
"""Scans the path specification for a supported file system format. Args: source_path_spec (PathSpec):
source path specification. Returns: PathSpec: file system path specification or None if no supported file system type was found. Raises: BackEndError: if the source cannot be scanned or more than one file system type is found. """ |
if source_path_spec.type_indicator == (
definitions.TYPE_INDICATOR_APFS_CONTAINER):
# TODO: consider changes this when upstream changes have been made.
# Currently pyfsapfs does not support reading from a volume as a device.
# Also see: https://github.com/log2timeline/dfvfs/issues/332
return path_spec_factory.Factory.NewPathSpec(
definitions.TYPE_INDICATOR_APFS, location='/',
parent=source_path_spec)
try:
type_indicators = analyzer.Analyzer.GetFileSystemTypeIndicators(
source_path_spec, resolver_context=self._resolver_context)
except RuntimeError as exception:
raise errors.BackEndError((
'Unable to process source path specification with error: '
'{0!s}').format(exception))
if not type_indicators:
return None
type_indicator = type_indicators[0]
if len(type_indicators) > 1:
if definitions.PREFERRED_NTFS_BACK_END not in type_indicators:
raise errors.BackEndError(
'Unsupported source found more than one file system types.')
type_indicator = definitions.PREFERRED_NTFS_BACK_END
# TODO: determine root location from file system or path specification.
if type_indicator == definitions.TYPE_INDICATOR_NTFS:
root_location = '\\'
else:
root_location = '/'
file_system_path_spec = path_spec_factory.Factory.NewPathSpec(
type_indicator, location=root_location, parent=source_path_spec)
if type_indicator == definitions.TYPE_INDICATOR_TSK:
# Check if the file system can be opened since the file system by
# signature detection results in false positives.
try:
file_system = resolver.Resolver.OpenFileSystem(
file_system_path_spec, resolver_context=self._resolver_context)
file_system.Close()
except errors.BackEndError:
file_system_path_spec = None
return file_system_path_spec |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def ScanForStorageMediaImage(self, source_path_spec):
"""Scans the path specification for a supported storage media image format. Args: source_path_spec (PathSpec):
source path specification. Returns: PathSpec: storage media image path specification or None if no supported storage media image type was found. Raises: BackEndError: if the source cannot be scanned or more than one storage media image type is found. """ |
try:
type_indicators = analyzer.Analyzer.GetStorageMediaImageTypeIndicators(
source_path_spec, resolver_context=self._resolver_context)
except RuntimeError as exception:
raise errors.BackEndError((
'Unable to process source path specification with error: '
'{0!s}').format(exception))
if not type_indicators:
# The RAW storage media image type cannot be detected based on
# a signature so we try to detect it based on common file naming schemas.
file_system = resolver.Resolver.OpenFileSystem(
source_path_spec, resolver_context=self._resolver_context)
raw_path_spec = path_spec_factory.Factory.NewPathSpec(
definitions.TYPE_INDICATOR_RAW, parent=source_path_spec)
try:
# The RAW glob function will raise a PathSpecError if the path
# specification is unsuitable for globbing.
glob_results = raw.RawGlobPathSpec(file_system, raw_path_spec)
except errors.PathSpecError:
glob_results = None
file_system.Close()
if not glob_results:
return None
return raw_path_spec
if len(type_indicators) > 1:
raise errors.BackEndError(
'Unsupported source found more than one storage media image types.')
return path_spec_factory.Factory.NewPathSpec(
type_indicators[0], parent=source_path_spec) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def ScanForVolumeSystem(self, source_path_spec):
"""Scans the path specification for a supported volume system format. Args: source_path_spec (PathSpec):
source path specification. Returns: PathSpec: volume system path specification or None if no supported volume system type was found. Raises: BackEndError: if the source cannot be scanned or more than one volume system type is found. """ |
if source_path_spec.type_indicator == definitions.TYPE_INDICATOR_VSHADOW:
# It is technically possible to scan for VSS-in-VSS but makes no sense
# to do so.
return None
if source_path_spec.IsVolumeSystemRoot():
return source_path_spec
if source_path_spec.type_indicator == (
definitions.TYPE_INDICATOR_APFS_CONTAINER):
# TODO: consider changes this when upstream changes have been made.
# Currently pyfsapfs does not support reading from a volume as a device.
# Also see: https://github.com/log2timeline/dfvfs/issues/332
return None
try:
type_indicators = analyzer.Analyzer.GetVolumeSystemTypeIndicators(
source_path_spec, resolver_context=self._resolver_context)
except (IOError, RuntimeError) as exception:
raise errors.BackEndError((
'Unable to process source path specification with error: '
'{0!s}').format(exception))
if not type_indicators:
return None
if len(type_indicators) > 1:
raise errors.BackEndError(
'Unsupported source found more than one volume system types.')
if (type_indicators[0] == definitions.TYPE_INDICATOR_TSK_PARTITION and
source_path_spec.type_indicator in [
definitions.TYPE_INDICATOR_TSK_PARTITION]):
return None
if type_indicators[0] in definitions.VOLUME_SYSTEM_TYPE_INDICATORS:
return path_spec_factory.Factory.NewPathSpec(
type_indicators[0], location='/', parent=source_path_spec)
return path_spec_factory.Factory.NewPathSpec(
type_indicators[0], parent=source_path_spec) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _WriteRow(self, output_writer, values, in_bold=False):
"""Writes a row of values aligned with the width to the output writer. Args: output_writer (CLIOutputWriter):
output writer. values (list[object]):
values. in_bold (Optional[bool]):
True if the row should be written in bold. """ |
row_strings = []
for value_index, value_string in enumerate(values):
padding_size = self._column_sizes[value_index] - len(value_string)
padding_string = ' ' * padding_size
row_strings.extend([value_string, padding_string])
row_strings.pop()
row_strings = ''.join(row_strings)
if in_bold and not win32console:
# TODO: for win32console get current color and set intensity,
# write the header separately then reset intensity.
row_strings = '\x1b[1m{0:s}\x1b[0m'.format(row_strings)
output_writer.Write('{0:s}\n'.format(row_strings)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def Write(self, output_writer):
"""Writes the table to output writer. Args: output_writer (CLIOutputWriter):
output writer. """ |
# Round up the column sizes to the nearest tab.
for column_index, column_size in enumerate(self._column_sizes):
column_size, _ = divmod(column_size, self._NUMBER_OF_SPACES_IN_TAB)
column_size = (column_size + 1) * self._NUMBER_OF_SPACES_IN_TAB
self._column_sizes[column_index] = column_size
if self._columns:
self._WriteRow(output_writer, self._columns, in_bold=True)
for values in self._rows:
self._WriteRow(output_writer, values) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def GetVSSStoreIdentifiers(self, volume_system, volume_identifiers):
"""Retrieves VSS store identifiers. This method can be used to prompt the user to provide VSS store identifiers. Args: volume_system (VShadowVolumeSystem):
volume system. volume_identifiers (list[str]):
volume identifiers including prefix. Returns: list[str]: selected volume identifiers including prefix or None. """ |
print_header = True
while True:
if print_header:
self._PrintVSSStoreIdentifiersOverview(
volume_system, volume_identifiers)
print_header = False
self._output_writer.Write('\n')
lines = self._textwrapper.wrap(self._USER_PROMPT_VSS)
self._output_writer.Write('\n'.join(lines))
self._output_writer.Write('\n\nVSS identifier(s): ')
try:
selected_volumes = self._ReadSelectedVolumes(
volume_system, prefix='vss')
if (not selected_volumes or
not set(selected_volumes).difference(volume_identifiers)):
break
except ValueError:
pass
self._output_writer.Write('\n')
lines = self._textwrapper.wrap(
'Unsupported VSS identifier(s), please try again or abort with '
'Ctrl^C.')
self._output_writer.Write('\n'.join(lines))
self._output_writer.Write('\n\n')
return selected_volumes |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def UnlockEncryptedVolume( self, source_scanner_object, scan_context, locked_scan_node, credentials):
"""Unlocks an encrypted volume. This method can be used to prompt the user to provide encrypted volume credentials. Args: source_scanner_object (SourceScanner):
source scanner. scan_context (SourceScannerContext):
source scanner context. locked_scan_node (SourceScanNode):
locked scan node. credentials (Credentials):
credentials supported by the locked scan node. Returns: bool: True if the volume was unlocked. """ |
# TODO: print volume description.
if locked_scan_node.type_indicator == (
definitions.TYPE_INDICATOR_APFS_CONTAINER):
header = 'Found an APFS encrypted volume.'
elif locked_scan_node.type_indicator == definitions.TYPE_INDICATOR_BDE:
header = 'Found a BitLocker encrypted volume.'
elif locked_scan_node.type_indicator == definitions.TYPE_INDICATOR_FVDE:
header = 'Found a CoreStorage (FVDE) encrypted volume.'
else:
header = 'Found an encrypted volume.'
self._output_writer.Write(header)
credentials_list = list(credentials.CREDENTIALS)
credentials_list.append('skip')
self._output_writer.Write('Supported credentials:\n\n')
for index, name in enumerate(credentials_list):
available_credential = ' {0:d}. {1:s}\n'.format(index + 1, name)
self._output_writer.Write(available_credential)
self._output_writer.Write('\nNote that you can abort with Ctrl^C.\n\n')
result = False
while not result:
self._output_writer.Write('Select a credential to unlock the volume: ')
input_line = self._input_reader.Read()
input_line = input_line.strip()
if input_line in credentials_list:
credential_type = input_line
else:
try:
credential_type = int(input_line, 10)
credential_type = credentials_list[credential_type - 1]
except (IndexError, ValueError):
self._output_writer.Write(
'Unsupported credential: {0:s}\n'.format(input_line))
continue
if credential_type == 'skip':
break
getpass_string = 'Enter credential data: '
if sys.platform.startswith('win') and sys.version_info[0] < 3:
# For Python 2 on Windows getpass (win_getpass) requires an encoded
# byte string. For Python 3 we need it to be a Unicode string.
getpass_string = self._EncodeString(getpass_string)
credential_data = getpass.getpass(getpass_string)
self._output_writer.Write('\n')
if credential_type == 'key':
try:
credential_data = credential_data.decode('hex')
except TypeError:
self._output_writer.Write('Unsupported credential data.\n')
continue
result = source_scanner_object.Unlock(
scan_context, locked_scan_node.path_spec, credential_type,
credential_data)
if not result:
self._output_writer.Write('Unable to unlock volume.\n\n')
return result |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def GetTARInfo(self):
"""Retrieves the TAR info. Returns: tarfile.TARInfo: TAR info or None if it does not exist. Raises: PathSpecError: if the path specification is incorrect. """ |
if not self._tar_info:
location = getattr(self.path_spec, 'location', None)
if location is None:
raise errors.PathSpecError('Path specification missing location.')
if not location.startswith(self._file_system.LOCATION_ROOT):
raise errors.PathSpecError('Invalid location in path specification.')
if len(location) == 1:
return None
tar_file = self._file_system.GetTARFile()
try:
self._tar_info = tar_file.getmember(location[1:])
except KeyError:
pass
return self._tar_info |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def GetDecoder(cls, encoding_method):
"""Retrieves the decoder object for a specific encoding method. Args: encoding_method (str):
encoding method identifier. Returns: Decoder: decoder or None if the encoding method does not exists. """ |
encoding_method = encoding_method.lower()
decoder = cls._decoders.get(encoding_method, None)
if not decoder:
return None
return decoder() |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.