text_prompt stringlengths 157 13.1k | code_prompt stringlengths 7 19.8k ⌀ |
|---|---|
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def RegisterDecoder(cls, decoder):
"""Registers a decoder for a specific encoding method. Args: decoder (type):
decoder class. Raises: KeyError: if the corresponding decoder is already set. """ |
encoding_method = decoder.ENCODING_METHOD.lower()
if encoding_method in cls._decoders:
raise KeyError(
'Decoder for encoding method: {0:s} already set.'.format(
decoder.ENCODING_METHOD))
cls._decoders[encoding_method] = decoder |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _GetDirectory(self):
"""Retrieves the directory. Returns: LVMDirectory: a directory or None if not available. """ |
if self.entry_type != definitions.FILE_ENTRY_TYPE_DIRECTORY:
return None
return LVMDirectory(self._file_system, self.path_spec) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def GetTSKVsPartByPathSpec(tsk_volume, path_spec):
"""Retrieves the TSK volume system part object from the TSK volume object. Args: tsk_volume (pytsk3.Volume_Info):
TSK volume information. path_spec (PathSpec):
path specification. Returns: tuple: contains: pytsk3.TSK_VS_PART_INFO: TSK volume system part information or None on error. int: partition index or None if not available. """ |
location = getattr(path_spec, 'location', None)
part_index = getattr(path_spec, 'part_index', None)
start_offset = getattr(path_spec, 'start_offset', None)
partition_index = None
if part_index is None:
if location is not None:
if location.startswith('/p'):
try:
partition_index = int(location[2:], 10) - 1
except ValueError:
pass
if partition_index is None or partition_index < 0:
location = None
if location is None and start_offset is None:
return None, None
bytes_per_sector = TSKVolumeGetBytesPerSector(tsk_volume)
current_part_index = 0
current_partition_index = 0
tsk_vs_part = None
# pytsk3 does not handle the Volume_Info iterator correctly therefore
# the explicit cast to list is needed to prevent the iterator terminating
# too soon or looping forever.
tsk_vs_part_list = list(tsk_volume)
number_of_tsk_vs_parts = len(tsk_vs_part_list)
if number_of_tsk_vs_parts > 0:
if (part_index is not None and
(part_index < 0 or part_index >= number_of_tsk_vs_parts)):
return None, None
for tsk_vs_part in tsk_vs_part_list:
if TSKVsPartIsAllocated(tsk_vs_part):
if partition_index is not None:
if partition_index == current_partition_index:
break
current_partition_index += 1
if part_index is not None and part_index == current_part_index:
break
if start_offset is not None:
start_sector = TSKVsPartGetStartSector(tsk_vs_part)
if start_sector is not None:
start_sector *= bytes_per_sector
if start_sector == start_offset:
break
current_part_index += 1
# Note that here we cannot solely rely on testing if tsk_vs_part is set
# since the for loop will exit with tsk_vs_part set.
if tsk_vs_part is None or current_part_index >= number_of_tsk_vs_parts:
return None, None
if not TSKVsPartIsAllocated(tsk_vs_part):
current_partition_index = None
return tsk_vs_part, current_partition_index |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def TSKVolumeGetBytesPerSector(tsk_volume):
"""Retrieves the number of bytes per sector from a TSK volume object. Args: tsk_volume (pytsk3.Volume_Info):
TSK volume information. Returns: int: number of bytes per sector or 512 by default. """ |
# Note that because pytsk3.Volume_Info does not explicitly defines info
# we need to check if the attribute exists and has a value other
# than None. Default to 512 otherwise.
if hasattr(tsk_volume, 'info') and tsk_volume.info is not None:
block_size = getattr(tsk_volume.info, 'block_size', 512)
else:
block_size = 512
return block_size |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _OpenParentFile(self, file_system, path_spec, vhdi_file):
"""Opens the parent file. Args: file_system (FileSystem):
file system of the VHDI file. path_spec (PathSpec):
path specification of the VHDI file. vhdi_file (pyvhdi.file):
VHDI file. Raises: PathSpecError: if the path specification is incorrect. """ |
location = getattr(path_spec, 'location', None)
if not location:
raise errors.PathSpecError(
'Unsupported path specification without location.')
location_path_segments = file_system.SplitPath(location)
parent_filename = vhdi_file.parent_filename
_, _, parent_filename = parent_filename.rpartition('\\')
location_path_segments.pop()
location_path_segments.append(parent_filename)
parent_file_location = file_system.JoinPath(location_path_segments)
# Note that we don't want to set the keyword arguments when not used
# because the path specification base class will check for unused
# keyword arguments and raise.
kwargs = path_spec_factory.Factory.GetProperties(path_spec)
kwargs['location'] = parent_file_location
if path_spec.parent is not None:
kwargs['parent'] = path_spec.parent
parent_file_path_spec = path_spec_factory.Factory.NewPathSpec(
path_spec.type_indicator, **kwargs)
if not file_system.FileEntryExistsByPathSpec(parent_file_path_spec):
return
file_object = resolver.Resolver.OpenFileObject(
parent_file_path_spec, resolver_context=self._resolver_context)
vhdi_parent_file = pyvhdi.file()
vhdi_parent_file.open_file_object(file_object)
if vhdi_parent_file.parent_identifier:
self._OpenParentFile(
file_system, parent_file_path_spec, vhdi_parent_file)
vhdi_file.set_parent(vhdi_parent_file)
self._parent_vhdi_files.append(vhdi_parent_file)
self._sub_file_objects.append(file_object) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def DeregisterDecrypter(cls, decrypter):
"""Deregisters a decrypter for a specific encryption method. Args: decrypter (type):
decrypter class. Raises: KeyError: if the corresponding decrypter is not set. """ |
encryption_method = decrypter.ENCRYPTION_METHOD.lower()
if encryption_method not in cls._decrypters:
raise KeyError(
'Decrypter for encryption method: {0:s} not set.'.format(
decrypter.ENCRYPTION_METHOD))
del cls._decrypters[encryption_method] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def GetDecrypter(cls, encryption_method, **kwargs):
"""Retrieves the decrypter object for a specific encryption method. Args: encryption_method (str):
encryption method identifier. kwargs (dict):
keyword arguments depending on the decrypter. Returns: Decrypter: decrypter or None if the encryption method does not exists. Raises: CredentialError: if the necessary credentials are missing. """ |
encryption_method = encryption_method.lower()
decrypter = cls._decrypters.get(encryption_method, None)
if not decrypter:
return None
return decrypter(**kwargs) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def GetTARInfoByPathSpec(self, path_spec):
"""Retrieves the TAR info for a path specification. Args: path_spec (PathSpec):
a path specification. Returns: tarfile.TARInfo: TAR info or None if it does not exist. Raises: PathSpecError: if the path specification is incorrect. """ |
location = getattr(path_spec, 'location', None)
if location is None:
raise errors.PathSpecError('Path specification missing location.')
if not location.startswith(self.LOCATION_ROOT):
raise errors.PathSpecError('Invalid location in path specification.')
if len(location) == 1:
return None
try:
return self._tar_file.getmember(location[1:])
except KeyError:
pass |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _PathStripPrefix(self, path):
"""Strips the prefix from a path. Args: path (str):
Windows path to strip the prefix from. Returns: str: path without the prefix or None if the path is not supported. """ |
if path.startswith('\\\\.\\') or path.startswith('\\\\?\\'):
if len(path) < 7 or path[5] != ':' or path[6] != self._PATH_SEPARATOR:
# Cannot handle a non-volume path.
return None
path = path[7:]
elif path.startswith('\\\\'):
# Cannot handle an UNC path.
return None
elif len(path) >= 3 and path[1] == ':':
# Check if the path is a Volume 'absolute' path.
if path[2] != self._PATH_SEPARATOR:
# Cannot handle a Volume 'relative' path.
return None
path = path[3:]
elif path.startswith('\\'):
path = path[1:]
else:
# Cannot handle a relative path.
return None
return path |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def SetEnvironmentVariable(self, name, value):
"""Sets an environment variable in the Windows path helper. Args: name (str):
name of the environment variable without enclosing %-characters, e.g. SystemRoot as in %SystemRoot%. value (str):
value of the environment variable. """ |
if isinstance(value, py2to3.STRING_TYPES):
value = self._PathStripPrefix(value)
if value is not None:
self._environment_variables[name.upper()] = value |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def APFSUnlockVolume(fsapfs_volume, path_spec, key_chain):
"""Unlocks an APFS volume using the path specification. Args: fsapfs_volume (pyapfs.volume):
APFS volume. path_spec (PathSpec):
path specification. key_chain (KeyChain):
key chain. Returns: bool: True if the volume is unlocked, False otherwise. """ |
is_locked = fsapfs_volume.is_locked()
if is_locked:
password = key_chain.GetCredential(path_spec, 'password')
if password:
fsapfs_volume.set_password(password)
recovery_password = key_chain.GetCredential(path_spec, 'recovery_password')
if recovery_password:
fsapfs_volume.set_recovery_password(recovery_password)
is_locked = not fsapfs_volume.unlock()
return not is_locked |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def GetNumberOfRows(self):
"""Retrieves the number of rows of the table. Returns: int: number of rows. Raises: IOError: if the file-like object has not been opened. OSError: if the file-like object has not been opened. """ |
if not self._database_object:
raise IOError('Not opened.')
if self._number_of_rows is None:
self._number_of_rows = self._database_object.GetNumberOfRows(
self._table_name)
return self._number_of_rows |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def GetZipInfoByPathSpec(self, path_spec):
"""Retrieves the ZIP info for a path specification. Args: path_spec (PathSpec):
a path specification. Returns: zipfile.ZipInfo: a ZIP info object or None if not available. Raises: PathSpecError: if the path specification is incorrect. """ |
location = getattr(path_spec, 'location', None)
if location is None:
raise errors.PathSpecError('Path specification missing location.')
if not location.startswith(self.LOCATION_ROOT):
raise errors.PathSpecError('Invalid location in path specification.')
if len(location) > 1:
return self._zip_file.getinfo(location[1:])
return None |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def GetAPFSFileEntryByPathSpec(self, path_spec):
"""Retrieves the APFS file entry for a path specification. Args: path_spec (PathSpec):
a path specification. Returns: pyfsapfs.file_entry: file entry. Raises: PathSpecError: if the path specification is missing location and identifier. """ |
# Opening a file by identifier is faster than opening a file by location.
location = getattr(path_spec, 'location', None)
identifier = getattr(path_spec, 'identifier', None)
if identifier is not None:
fsapfs_file_entry = self._fsapfs_volume.get_file_entry_by_identifier(
identifier)
elif location is not None:
fsapfs_file_entry = self._fsapfs_volume.get_file_entry_by_path(location)
else:
raise errors.PathSpecError(
'Path specification missing location and identifier.')
return fsapfs_file_entry |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def DeregisterPathSpec(cls, path_spec_type):
"""Deregisters a path specification. Args: path_spec_type (type):
path specification type. Raises: KeyError: if path specification is not registered. """ |
type_indicator = path_spec_type.TYPE_INDICATOR
if type_indicator not in cls._path_spec_types:
raise KeyError(
'Path specification type: {0:s} not set.'.format(type_indicator))
del cls._path_spec_types[type_indicator]
if type_indicator in cls._system_level_type_indicators:
del cls._system_level_type_indicators[type_indicator] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def GetProperties(cls, path_spec):
"""Retrieves a dictionary containing the path specification properties. Args: path_spec (PathSpec):
path specification. Returns: dict[str, str]: path specification properties. Raises: dict: path specification properties. """ |
properties = {}
for property_name in cls.PROPERTY_NAMES:
# Note that we do not want to set the properties when not used.
if hasattr(path_spec, property_name):
properties[property_name] = getattr(path_spec, property_name)
return properties |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def NewPathSpec(cls, type_indicator, **kwargs):
"""Creates a new path specification for the specific type indicator. Args: type_indicator (str):
type indicator. kwargs (dict):
keyword arguments depending on the path specification. Returns: PathSpec: path specification. Raises: KeyError: if path specification is not registered. """ |
if type_indicator not in cls._path_spec_types:
raise KeyError(
'Path specification type: {0:s} not set.'.format(type_indicator))
# An empty parent will cause parentless path specifications to raise
# so we conveniently remove it here.
if 'parent' in kwargs and kwargs['parent'] is None:
del kwargs['parent']
path_spec_type = cls._path_spec_types[type_indicator]
return path_spec_type(**kwargs) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def RegisterPathSpec(cls, path_spec_type):
"""Registers a path specification type. Args: path_spec_type (type):
path specification type. Raises: KeyError: if path specification is already registered. """ |
type_indicator = path_spec_type.TYPE_INDICATOR
if type_indicator in cls._path_spec_types:
raise KeyError(
'Path specification type: {0:s} already set.'.format(
type_indicator))
cls._path_spec_types[type_indicator] = path_spec_type
if getattr(path_spec_type, '_IS_SYSTEM_LEVEL', False):
cls._system_level_type_indicators[type_indicator] = path_spec_type |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _ReadString( self, file_object, file_offset, data_type_map, description):
"""Reads a string. Args: file_object (FileIO):
file-like object. file_offset (int):
offset of the data relative from the start of the file-like object. data_type_map (dtfabric.DataTypeMap):
data type map of the string. description (str):
description of the string. Returns: object: structure values object. Raises: FileFormatError: if the string cannot be read. ValueError: if file-like object or date type map are invalid. """ |
# pylint: disable=protected-access
element_data_size = (
data_type_map._element_data_type_definition.GetByteSize())
elements_terminator = (
data_type_map._data_type_definition.elements_terminator)
byte_stream = []
element_data = file_object.read(element_data_size)
byte_stream.append(element_data)
while element_data and element_data != elements_terminator:
element_data = file_object.read(element_data_size)
byte_stream.append(element_data)
byte_stream = b''.join(byte_stream)
return self._ReadStructureFromByteStream(
byte_stream, file_offset, data_type_map, description) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _ReadStructure( self, file_object, file_offset, data_size, data_type_map, description):
"""Reads a structure. Args: file_object (FileIO):
file-like object. file_offset (int):
offset of the data relative from the start of the file-like object. data_size (int):
data size of the structure. data_type_map (dtfabric.DataTypeMap):
data type map of the structure. description (str):
description of the structure. Returns: object: structure values object. Raises: FileFormatError: if the structure cannot be read. ValueError: if file-like object or date type map are invalid. """ |
data = self._ReadData(file_object, file_offset, data_size, description)
return self._ReadStructureFromByteStream(
data, file_offset, data_type_map, description) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def read(self, offset, size):
"""Reads a byte string from the image object at the specified offset. Args: offset (int):
offset where to start reading. size (int):
number of bytes to read. Returns: bytes: data read. """ |
self._file_object.seek(offset, os.SEEK_SET)
return self._file_object.read(size) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def CopyFromDateTimeString(self, time_string):
"""Copies a SleuthKit timestamp from a date and time string. Args: time_string (str):
date and time value formatted as: YYYY-MM-DD hh:mm:ss.######[+-]##:## Where # are numeric digits ranging from 0 to 9 and the seconds fraction can be either 3 or 6 digits. The time of day, seconds fraction and time zone offset are optional. The default time zone is UTC. """ |
date_time_values = self._CopyDateTimeFromString(time_string)
year = date_time_values.get('year', 0)
month = date_time_values.get('month', 0)
day_of_month = date_time_values.get('day_of_month', 0)
hours = date_time_values.get('hours', 0)
minutes = date_time_values.get('minutes', 0)
seconds = date_time_values.get('seconds', 0)
microseconds = date_time_values.get('microseconds', 0)
self._timestamp = self._GetNumberOfSecondsFromElements(
year, month, day_of_month, hours, minutes, seconds)
self.fraction_of_second = microseconds
if pytsk3.TSK_VERSION_NUM >= 0x040200ff:
self.fraction_of_second *= 1000
else:
self.fraction_of_second *= 10
self._normalized_timestamp = None
self.is_local_time = False |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def CopyToDateTimeString(self):
"""Copies the date time value to a date and time string. Returns: str: date and time value formatted as: YYYY-MM-DD hh:mm:ss or YYYY-MM-DD hh:mm:ss.####### or YYYY-MM-DD hh:mm:ss.######### """ |
if self._timestamp is None:
return None
number_of_days, hours, minutes, seconds = self._GetTimeValues(
self._timestamp)
year, month, day_of_month = self._GetDateValues(number_of_days, 1970, 1, 1)
if self.fraction_of_second is None:
return '{0:04d}-{1:02d}-{2:02d} {3:02d}:{4:02d}:{5:02d}'.format(
year, month, day_of_month, hours, minutes, seconds)
if pytsk3.TSK_VERSION_NUM >= 0x040200ff:
return '{0:04d}-{1:02d}-{2:02d} {3:02d}:{4:02d}:{5:02d}.{6:09d}'.format(
year, month, day_of_month, hours, minutes, seconds,
self.fraction_of_second)
return '{0:04d}-{1:02d}-{2:02d} {3:02d}:{4:02d}:{5:02d}.{6:07d}'.format(
year, month, day_of_month, hours, minutes, seconds,
self.fraction_of_second) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def CopyToStatTimeTuple(self):
"""Copies the SleuthKit timestamp to a stat timestamp tuple. Returns: tuple[int, int]: a POSIX timestamp in seconds and the remainder in 100 nano seconds or (None, None) on error. """ |
if self.fraction_of_second is None:
return self._timestamp, None
return super(TSKTime, self).CopyToStatTimeTuple() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def IsDefault(self):
"""Determines if the data stream is the default data stream. Returns: bool: True if the data stream is the default data stream, false if not. """ |
if not self._tsk_attribute or not self._file_system:
return True
if self._file_system.IsHFS():
attribute_type = getattr(self._tsk_attribute.info, 'type', None)
return attribute_type in (
pytsk3.TSK_FS_ATTR_TYPE_HFS_DEFAULT, pytsk3.TSK_FS_ATTR_TYPE_HFS_DATA)
if self._file_system.IsNTFS():
return not bool(self.name)
return True |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _GetTimeValue(self, name):
"""Retrieves a date and time value. Args: name (str):
name of the date and time value, for example "atime" or "mtime". Returns: dfdatetime.DateTimeValues: date and time value or None if not available. """ |
timestamp = getattr(self._tsk_file.info.meta, name, None)
if self._file_system_type in self._TSK_HAS_NANO_FS_TYPES:
name_fragment = '{0:s}_nano'.format(name)
fraction_of_second = getattr(
self._tsk_file.info.meta, name_fragment, None)
else:
fraction_of_second = None
return TSKTime(timestamp=timestamp, fraction_of_second=fraction_of_second) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _TSKFileTimeCopyToStatTimeTuple(self, tsk_file, time_value):
"""Copies a SleuthKit file object time value to a stat timestamp tuple. Args: tsk_file (pytsk3.File):
TSK file. time_value (str):
name of the time value. Returns: tuple[int, int]: number of seconds since 1970-01-01 00:00:00 and fraction of second in 100 nano seconds intervals. The number of seconds is None on error, or if the file system does not include the requested timestamp. The fraction of second is None on error, or if the file system does not support sub-second precision. Raises: BackEndError: if the TSK File .info, .info.meta or info.fs_info attribute is missing. """ |
if (not tsk_file or not tsk_file.info or not tsk_file.info.meta or
not tsk_file.info.fs_info):
raise errors.BackEndError(
'Missing TSK File .info, .info.meta. or .info.fs_info')
stat_time = getattr(tsk_file.info.meta, time_value, None)
stat_time_nano = None
if self._file_system_type in self._TSK_HAS_NANO_FS_TYPES:
time_value_nano = '{0:s}_nano'.format(time_value)
stat_time_nano = getattr(tsk_file.info.meta, time_value_nano, None)
# Sleuthkit 4.2.0 switched from 100 nano seconds precision to
# 1 nano seconds precision.
if stat_time_nano is not None and pytsk3.TSK_VERSION_NUM >= 0x040200ff:
stat_time_nano /= 100
return stat_time, stat_time_nano |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def FVDEVolumeOpen(fvde_volume, path_spec, file_object, key_chain):
"""Opens the FVDE volume using the path specification. Args: fvde_volume (pyfvde.volume):
FVDE volume. path_spec (PathSpec):
path specification. file_object (FileIO):
file-like object. key_chain (KeyChain):
key chain. """ |
encrypted_root_plist = key_chain.GetCredential(
path_spec, 'encrypted_root_plist')
if encrypted_root_plist:
fvde_volume.read_encrypted_root_plist(encrypted_root_plist)
password = key_chain.GetCredential(path_spec, 'password')
if password:
fvde_volume.set_password(password)
recovery_password = key_chain.GetCredential(path_spec, 'recovery_password')
if recovery_password:
fvde_volume.set_recovery_password(recovery_password)
fvde_volume.open_file_object(file_object) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def VShadowPathSpecGetStoreIndex(path_spec):
"""Retrieves the store index from the path specification. Args: path_spec (PathSpec):
path specification. Returns: int: store index or None if not available. """ |
store_index = getattr(path_spec, 'store_index', None)
if store_index is None:
location = getattr(path_spec, 'location', None)
if location is None or not location.startswith('/vss'):
return None
store_index = None
try:
store_index = int(location[4:], 10) - 1
except (TypeError, ValueError):
pass
if store_index is None or store_index < 0:
return None
return store_index |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _GetMemberForOffset(self, offset):
"""Finds the member whose data includes the provided offset. Args: offset (int):
offset in the uncompressed data to find the containing member for. Returns: gzipfile.GzipMember: gzip file member or None if not available. Raises: ValueError: if the provided offset is outside of the bounds of the uncompressed data. """ |
if offset < 0 or offset >= self.uncompressed_data_size:
raise ValueError('Offset {0:d} is larger than file size {1:d}.'.format(
offset, self.uncompressed_data_size))
for end_offset, member in iter(self._members_by_end_offset.items()):
if offset < end_offset:
return member
return None |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def read(self, size=None):
"""Reads a byte string from the gzip file at the current offset. The function will read a byte string up to the specified size or all of the remaining data if no size was specified. Args: size (Optional[int]):
number of bytes to read, where None is all remaining data. Returns: bytes: data read. Raises: IOError: if the read failed. OSError: if the read failed. """ |
data = b''
while ((size and len(data) < size) and
self._current_offset < self.uncompressed_data_size):
member = self._GetMemberForOffset(self._current_offset)
member_offset = self._current_offset - member.uncompressed_data_offset
data_read = member.ReadAtOffset(member_offset, size)
if data_read:
self._current_offset += len(data_read)
data = b''.join([data, data_read])
return data |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _GetStat(self):
"""Retrieves a stat object. Returns: VFSStat: a stat object. Raises: BackEndError: when the encoded stream is missing. """ |
stat_object = vfs_stat.VFSStat()
# File data stat information.
stat_object.size = self.path_spec.range_size
# File entry type stat information.
stat_object.type = stat_object.TYPE_FILE
return stat_object |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _GetComparable(self, sub_comparable_string=''):
"""Retrieves the comparable representation. This is a convenience function for constructing comparables. Args: sub_comparable_string (str):
sub comparable string. Returns: str: comparable representation of the path specification. """ |
string_parts = []
string_parts.append(getattr(self.parent, 'comparable', ''))
string_parts.append('type: {0:s}'.format(self.type_indicator))
if sub_comparable_string:
string_parts.append(', {0:s}'.format(sub_comparable_string))
string_parts.append('\n')
return ''.join(string_parts) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def CopyToDict(self):
"""Copies the path specification to a dictionary. Returns: dict[str, object]: path specification attributes. """ |
path_spec_dict = {}
for attribute_name, attribute_value in iter(self.__dict__.items()):
if attribute_value is None:
continue
if attribute_name == 'parent':
attribute_value = attribute_value.CopyToDict()
path_spec_dict[attribute_name] = attribute_value
return path_spec_dict |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def DeregisterMountPoint(cls, mount_point):
"""Deregisters a path specification mount point. Args: mount_point (str):
mount point identifier. Raises: KeyError: if the corresponding mount point is not set. """ |
if mount_point not in cls._mount_points:
raise KeyError('Mount point: {0:s} not set.'.format(mount_point))
del cls._mount_points[mount_point] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def RegisterMountPoint(cls, mount_point, path_spec):
"""Registers a path specification mount point. Args: mount_point (str):
mount point identifier. path_spec (PathSpec):
path specification of the mount point. Raises: KeyError: if the corresponding mount point is already set. """ |
if mount_point in cls._mount_points:
raise KeyError('Mount point: {0:s} already set.'.format(mount_point))
cls._mount_points[mount_point] = path_spec |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def DeregisterHelper(cls, resolver_helper):
"""Deregisters a path specification resolver helper. Args: resolver_helper (ResolverHelper):
resolver helper. Raises: KeyError: if resolver helper object is not set for the corresponding type indicator. """ |
if resolver_helper.type_indicator not in cls._resolver_helpers:
raise KeyError(
'Resolver helper object not set for type indicator: {0:s}.'.format(
resolver_helper.type_indicator))
del cls._resolver_helpers[resolver_helper.type_indicator] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def RegisterHelper(cls, resolver_helper):
"""Registers a path specification resolver helper. Args: resolver_helper (ResolverHelper):
resolver helper. Raises: KeyError: if resolver helper object is already set for the corresponding type indicator. """ |
if resolver_helper.type_indicator in cls._resolver_helpers:
raise KeyError((
'Resolver helper object already set for type indicator: '
'{0!s}.').format(resolver_helper.type_indicator))
cls._resolver_helpers[resolver_helper.type_indicator] = resolver_helper |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def GetDataStream(self, name, case_sensitive=True):
"""Retrieves a data stream by name. Args: name (str):
name of the data stream. case_sensitive (Optional[bool]):
True if the name is case sensitive. Returns: DataStream: a data stream or None if not available. Raises: ValueError: if the name is not string. """ |
if not isinstance(name, py2to3.STRING_TYPES):
raise ValueError('Name is not a string.')
name_lower = name.lower()
matching_data_stream = None
for data_stream in self._GetDataStreams():
if data_stream.name == name:
return data_stream
if not case_sensitive and data_stream.name.lower() == name_lower:
if not matching_data_stream:
matching_data_stream = data_stream
return matching_data_stream |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def GetSubFileEntryByName(self, name, case_sensitive=True):
"""Retrieves a sub file entry by name. Args: name (str):
name of the file entry. case_sensitive (Optional[bool]):
True if the name is case sensitive. Returns: FileEntry: a file entry or None if not available. """ |
name_lower = name.lower()
matching_sub_file_entry = None
for sub_file_entry in self.sub_file_entries:
if sub_file_entry.name == name:
return sub_file_entry
if not case_sensitive and sub_file_entry.name.lower() == name_lower:
if not matching_sub_file_entry:
matching_sub_file_entry = sub_file_entry
return matching_sub_file_entry |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def HasDataStream(self, name, case_sensitive=True):
"""Determines if the file entry has specific data stream. Args: name (str):
name of the data stream. case_sensitive (Optional[bool]):
True if the name is case sensitive. Returns: bool: True if the file entry has the data stream. Raises: ValueError: if the name is not string. """ |
if not isinstance(name, py2to3.STRING_TYPES):
raise ValueError('Name is not a string.')
name_lower = name.lower()
for data_stream in self._GetDataStreams():
if data_stream.name == name:
return True
if not case_sensitive and data_stream.name.lower() == name_lower:
return True
return False |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def IsAllocated(self):
"""Determines if the file entry is allocated. Returns: bool: True if the file entry is allocated. """ |
if self._stat_object is None:
self._stat_object = self._GetStat()
return self._stat_object and self._stat_object.is_allocated |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def IsDevice(self):
"""Determines if the file entry is a device. Returns: bool: True if the file entry is a device. """ |
if self._stat_object is None:
self._stat_object = self._GetStat()
if self._stat_object is not None:
self.entry_type = self._stat_object.type
return self.entry_type == definitions.FILE_ENTRY_TYPE_DEVICE |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def IsDirectory(self):
"""Determines if the file entry is a directory. Returns: bool: True if the file entry is a directory. """ |
if self._stat_object is None:
self._stat_object = self._GetStat()
if self._stat_object is not None:
self.entry_type = self._stat_object.type
return self.entry_type == definitions.FILE_ENTRY_TYPE_DIRECTORY |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def IsFile(self):
"""Determines if the file entry is a file. Returns: bool: True if the file entry is a file. """ |
if self._stat_object is None:
self._stat_object = self._GetStat()
if self._stat_object is not None:
self.entry_type = self._stat_object.type
return self.entry_type == definitions.FILE_ENTRY_TYPE_FILE |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def IsLink(self):
"""Determines if the file entry is a link. Returns: bool: True if the file entry is a link. """ |
if self._stat_object is None:
self._stat_object = self._GetStat()
if self._stat_object is not None:
self.entry_type = self._stat_object.type
return self.entry_type == definitions.FILE_ENTRY_TYPE_LINK |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def IsPipe(self):
"""Determines if the file entry is a pipe. Returns: bool: True if the file entry is a pipe. """ |
if self._stat_object is None:
self._stat_object = self._GetStat()
if self._stat_object is not None:
self.entry_type = self._stat_object.type
return self.entry_type == definitions.FILE_ENTRY_TYPE_PIPE |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def IsSocket(self):
"""Determines if the file entry is a socket. Returns: bool: True if the file entry is a socket. """ |
if self._stat_object is None:
self._stat_object = self._GetStat()
if self._stat_object is not None:
self.entry_type = self._stat_object.type
return self.entry_type == definitions.FILE_ENTRY_TYPE_SOCKET |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def Open(self, path_spec):
"""Opens a volume defined by path specification. Args: path_spec (PathSpec):
a path specification. Raises: VolumeSystemError: if the TSK partition virtual file system could not be resolved. """ |
self._file_system = resolver.Resolver.OpenFileSystem(path_spec)
if self._file_system is None:
raise errors.VolumeSystemError('Unable to resolve path specification.')
type_indicator = self._file_system.type_indicator
if type_indicator != definitions.TYPE_INDICATOR_TSK_PARTITION:
raise errors.VolumeSystemError('Unsupported type indicator.') |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def GetLVMLogicalVolumeByPathSpec(self, path_spec):
"""Retrieves a LVM logical volume for a path specification. Args: path_spec (PathSpec):
path specification. Returns: pyvslvm.logical_volume: a LVM logical volume or None if not available. """ |
volume_index = lvm.LVMPathSpecGetVolumeIndex(path_spec)
if volume_index is None:
return None
return self._vslvm_volume_group.get_logical_volume(volume_index) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def ExtractCredentialsFromPathSpec(self, path_spec):
"""Extracts credentials from a path specification. Args: path_spec (PathSpec):
path specification to extract credentials from. """ |
credentials = manager.CredentialsManager.GetCredentials(path_spec)
for identifier in credentials.CREDENTIALS:
value = getattr(path_spec, identifier, None)
if value is None:
continue
self.SetCredential(path_spec, identifier, value) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def GetCredential(self, path_spec, identifier):
"""Retrieves a specific credential from the key chain. Args: path_spec (PathSpec):
path specification. identifier (str):
credential identifier. Returns: object: credential or None if the credential for the path specification is not set. """ |
credentials = self._credentials_per_path_spec.get(path_spec.comparable, {})
return credentials.get(identifier, None) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def SetCredential(self, path_spec, identifier, data):
"""Sets a specific credential for the path specification. Args: path_spec (PathSpec):
path specification. identifier (str):
credential identifier. data (object):
credential data. Raises: KeyError: if the credential is not supported by the path specification type. """ |
supported_credentials = manager.CredentialsManager.GetCredentials(path_spec)
if identifier not in supported_credentials.CREDENTIALS:
raise KeyError((
'Unsuppored credential: {0:s} for path specification type: '
'{1:s}').format(identifier, path_spec.type_indicator))
credentials = self._credentials_per_path_spec.get(path_spec.comparable, {})
credentials[identifier] = data
self._credentials_per_path_spec[path_spec.comparable] = credentials |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def EWFGlobPathSpec(file_system, path_spec):
"""Globs for path specifications according to the EWF naming schema. Args: file_system (FileSystem):
file system. path_spec (PathSpec):
path specification. Returns: list[PathSpec]: path specifications that match the glob. Raises: PathSpecError: if the path specification is invalid. RuntimeError: if the maximum number of supported segment files is reached. """ |
if not path_spec.HasParent():
raise errors.PathSpecError(
'Unsupported path specification without parent.')
parent_path_spec = path_spec.parent
parent_location = getattr(parent_path_spec, 'location', None)
if not parent_location:
raise errors.PathSpecError(
'Unsupported parent path specification without location.')
parent_location, _, segment_extension = parent_location.rpartition('.')
segment_extension_start = segment_extension[0]
segment_extension_length = len(segment_extension)
if (segment_extension_length not in [3, 4] or
not segment_extension.endswith('01') or (
segment_extension_length == 3 and
segment_extension_start not in ['E', 'e', 's']) or (
segment_extension_length == 4 and
not segment_extension.startswith('Ex'))):
raise errors.PathSpecError((
'Unsupported parent path specification invalid segment file '
'extension: {0:s}').format(segment_extension))
segment_number = 1
segment_files = []
while True:
segment_location = '{0:s}.{1:s}'.format(parent_location, segment_extension)
# Note that we don't want to set the keyword arguments when not used
# because the path specification base class will check for unused
# keyword arguments and raise.
kwargs = path_spec_factory.Factory.GetProperties(parent_path_spec)
kwargs['location'] = segment_location
if parent_path_spec.parent is not None:
kwargs['parent'] = parent_path_spec.parent
segment_path_spec = path_spec_factory.Factory.NewPathSpec(
parent_path_spec.type_indicator, **kwargs)
if not file_system.FileEntryExistsByPathSpec(segment_path_spec):
break
segment_files.append(segment_path_spec)
segment_number += 1
if segment_number <= 99:
if segment_extension_length == 3:
segment_extension = '{0:s}{1:02d}'.format(
segment_extension_start, segment_number)
elif segment_extension_length == 4:
segment_extension = '{0:s}x{1:02d}'.format(
segment_extension_start, segment_number)
else:
segment_index = segment_number - 100
if segment_extension_start in ['e', 's']:
letter_offset = ord('a')
else:
letter_offset = ord('A')
segment_index, remainder = divmod(segment_index, 26)
third_letter = chr(letter_offset + remainder)
segment_index, remainder = divmod(segment_index, 26)
second_letter = chr(letter_offset + remainder)
first_letter = chr(ord(segment_extension_start) + segment_index)
if first_letter in ['[', '{']:
raise RuntimeError('Unsupported number of segment files.')
if segment_extension_length == 3:
segment_extension = '{0:s}{1:s}{2:s}'.format(
first_letter, second_letter, third_letter)
elif segment_extension_length == 4:
segment_extension = '{0:s}x{1:s}{2:s}'.format(
first_letter, second_letter, third_letter)
return segment_files |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _AddParentDirectories(self, path):
"""Adds the parent directories of a path to the fake file system. Args: path (str):
path of the file within the fake file system. Raises: ValueError: if a parent directory is already set and is not a directory. """ |
path_segments = self.file_system.SplitPath(path)
for segment_index in range(len(path_segments)):
parent_path = self.file_system.JoinPath(path_segments[:segment_index])
file_entry = self.file_system.GetFileEntryByPath(parent_path)
if file_entry and not file_entry.IsDirectory():
raise ValueError(
'Non-directory parent file entry: {0:s} already exists.'.format(
parent_path))
for segment_index in range(len(path_segments)):
parent_path = self.file_system.JoinPath(path_segments[:segment_index])
if not self.file_system.FileEntryExistsByPath(parent_path):
self.file_system.AddFileEntry(
parent_path, file_entry_type=definitions.FILE_ENTRY_TYPE_DIRECTORY) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def AddDirectory(self, path):
"""Adds a directory to the fake file system. Note that this function will create parent directories if needed. Args: path (str):
path of the directory within the fake file system. Raises: ValueError: if the path is already set. """ |
if self.file_system.FileEntryExistsByPath(path):
raise ValueError('Path: {0:s} already set.'.format(path))
self._AddParentDirectories(path)
self.file_system.AddFileEntry(
path, file_entry_type=definitions.FILE_ENTRY_TYPE_DIRECTORY) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def AddSymbolicLink(self, path, linked_path):
"""Adds a symbolic link to the fake file system. Args: path (str):
path of the symbolic link within the fake file system. linked_path (str):
path that is linked. Raises: ValueError: if the path is already set. """ |
if self.file_system.FileEntryExistsByPath(path):
raise ValueError('Path: {0:s} already set.'.format(path))
self._AddParentDirectories(path)
self.file_system.AddFileEntry(
path, file_entry_type=definitions.FILE_ENTRY_TYPE_LINK,
link_data=linked_path) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def DeregisterCredentials(cls, credentials):
"""Deregisters a path specification credentials. Args: credentials (Credentials):
credentials. Raises: KeyError: if credential object is not set for the corresponding type indicator. """ |
if credentials.type_indicator not in cls._credentials:
raise KeyError(
'Credential object not set for type indicator: {0:s}.'.format(
credentials.type_indicator))
del cls._credentials[credentials.type_indicator] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def RegisterCredentials(cls, credentials):
"""Registers a path specification credentials. Args: credentials (Credentials):
credentials. Raises: KeyError: if credentials object is already set for the corresponding type indicator. """ |
if credentials.type_indicator in cls._credentials:
raise KeyError(
'Credentials object already set for type indicator: {0:s}.'.format(
credentials.type_indicator))
cls._credentials[credentials.type_indicator] = credentials |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def Read(self, file_object):
"""Reads the next uncompressed data from the gzip stream. Args: file_object (FileIO):
file object that contains the compressed stream. Returns: bytes: next uncompressed data from the compressed stream. """ |
file_object.seek(self.last_read, os.SEEK_SET)
read_data = file_object.read(self._MAXIMUM_READ_SIZE)
self.last_read = file_object.get_offset()
compressed_data = b''.join([self._compressed_data, read_data])
decompressed, extra_compressed = self._decompressor.Decompress(
compressed_data)
self._compressed_data = extra_compressed
self.uncompressed_offset += len(decompressed)
return decompressed |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _ReadMemberHeader(self, file_object):
"""Reads a member header. Args: file_object (FileIO):
file-like object to read from. Raises: FileFormatError: if the member header cannot be read. """ |
file_offset = file_object.get_offset()
member_header = self._ReadStructure(
file_object, file_offset, self._MEMBER_HEADER_SIZE,
self._MEMBER_HEADER, 'member header')
if member_header.signature != self._GZIP_SIGNATURE:
raise errors.FileFormatError(
'Unsupported signature: 0x{0:04x}.'.format(member_header.signature))
if member_header.compression_method != self._COMPRESSION_METHOD_DEFLATE:
raise errors.FileFormatError(
'Unsupported compression method: {0:d}.'.format(
member_header.compression_method))
self.modification_time = member_header.modification_time
self.operating_system = member_header.operating_system
if member_header.flags & self._FLAG_FEXTRA:
file_offset = file_object.get_offset()
extra_field_data_size = self._ReadStructure(
file_object, file_offset, self._UINT16LE_SIZE,
self._UINT16LE, 'extra field data size')
file_object.seek(extra_field_data_size, os.SEEK_CUR)
if member_header.flags & self._FLAG_FNAME:
file_offset = file_object.get_offset()
string_value = self._ReadString(
file_object, file_offset, self._CSTRING, 'original filename')
self.original_filename = string_value.rstrip('\x00')
if member_header.flags & self._FLAG_FCOMMENT:
file_offset = file_object.get_offset()
string_value = self._ReadString(
file_object, file_offset, self._CSTRING, 'comment')
self.comment = string_value.rstrip('\x00')
if member_header.flags & self._FLAG_FHCRC:
file_object.read(2) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _ReadMemberFooter(self, file_object):
"""Reads a member footer. Args: file_object (FileIO):
file-like object to read from. Raises: FileFormatError: if the member footer cannot be read. """ |
file_offset = file_object.get_offset()
member_footer = self._ReadStructure(
file_object, file_offset, self._MEMBER_FOOTER_SIZE,
self._MEMBER_FOOTER, 'member footer')
self.uncompressed_data_size = member_footer.uncompressed_data_size |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def FlushCache(self):
"""Empties the cache that holds cached decompressed data.""" |
self._cache = b''
self._cache_start_offset = None
self._cache_end_offset = None
self._ResetDecompressorState() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def GetCacheSize(self):
"""Determines the size of the uncompressed cached data. Returns: int: number of cached bytes. """ |
if not self._cache_start_offset or not self._cache_end_offset:
return 0
return self._cache_end_offset - self._cache_start_offset |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def ReadAtOffset(self, offset, size=None):
"""Reads a byte string from the gzip member at the specified offset. The function will read a byte string of the specified size or all of the remaining data if no size was specified. Args: offset (int):
offset within the uncompressed data in this member to read from. size (Optional[int]):
maximum number of bytes to read, where None represents all remaining data, to a maximum of the uncompressed cache size. Returns: bytes: data read. Raises: IOError: if the read failed. ValueError: if a negative read size or offset is specified. """ |
if size is not None and size < 0:
raise ValueError('Invalid size value {0!s}'.format(size))
if offset < 0:
raise ValueError('Invalid offset value {0!s}'.format(offset))
if size == 0 or offset >= self.uncompressed_data_size:
return b''
if self._cache_start_offset is None:
self._LoadDataIntoCache(self._file_object, offset)
if offset > self._cache_end_offset or offset < self._cache_start_offset:
self.FlushCache()
self._LoadDataIntoCache(self._file_object, offset)
cache_offset = offset - self._cache_start_offset
if not size:
return self._cache[cache_offset:]
data_end_offset = cache_offset + size
if data_end_offset > self._cache_end_offset:
return self._cache[cache_offset:]
return self._cache[cache_offset:data_end_offset] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _LoadDataIntoCache( self, file_object, minimum_offset, read_all_data=False):
"""Reads and decompresses the data in the member. This function already loads as much data as possible in the cache, up to UNCOMPRESSED_DATA_CACHE_SIZE bytes. Args: file_object (FileIO):
file-like object. minimum_offset (int):
offset into this member's uncompressed data at which the cache should start. read_all_data (bool):
True if all the compressed data should be read from the member. """ |
# Decompression can only be performed from beginning to end of the stream.
# So, if data before the current position of the decompressor in the stream
# is required, it's necessary to throw away the current decompression
# state and start again.
if minimum_offset < self._decompressor_state.uncompressed_offset:
self._ResetDecompressorState()
while not self.IsCacheFull() or read_all_data:
decompressed_data = self._decompressor_state.Read(file_object)
# Note that decompressed_data will be empty if there is no data left
# to read and decompress.
if not decompressed_data:
break
decompressed_data_length = len(decompressed_data)
decompressed_end_offset = self._decompressor_state.uncompressed_offset
decompressed_start_offset = (
decompressed_end_offset - decompressed_data_length)
data_to_add = decompressed_data
added_data_start_offset = decompressed_start_offset
if decompressed_start_offset < minimum_offset:
data_to_add = None
if decompressed_start_offset < minimum_offset < decompressed_end_offset:
data_add_offset = decompressed_end_offset - minimum_offset
data_to_add = decompressed_data[-data_add_offset]
added_data_start_offset = decompressed_end_offset - data_add_offset
if not self.IsCacheFull() and data_to_add:
self._cache = b''.join([self._cache, data_to_add])
if self._cache_start_offset is None:
self._cache_start_offset = added_data_start_offset
if self._cache_end_offset is None:
self._cache_end_offset = self._cache_start_offset + len(data_to_add)
else:
self._cache_end_offset += len(data_to_add)
# If there's no more data in the member, the unused_data value is
# populated in the decompressor. When this situation arises, we rewind
# to the end of the compressed_data section.
unused_data = self._decompressor_state.GetUnusedData()
if unused_data:
seek_offset = -len(unused_data)
file_object.seek(seek_offset, os.SEEK_CUR)
self._ResetDecompressorState()
break |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _ListFileEntry( self, file_system, file_entry, parent_full_path, output_writer):
"""Lists a file entry. Args: file_system (dfvfs.FileSystem):
file system that contains the file entry. file_entry (dfvfs.FileEntry):
file entry to list. parent_full_path (str):
full path of the parent file entry. output_writer (StdoutWriter):
output writer. """ |
# Since every file system implementation can have their own path
# segment separator we are using JoinPath to be platform and file system
# type independent.
full_path = file_system.JoinPath([parent_full_path, file_entry.name])
if not self._list_only_files or file_entry.IsFile():
output_writer.WriteFileEntry(full_path)
for sub_file_entry in file_entry.sub_file_entries:
self._ListFileEntry(file_system, sub_file_entry, full_path, output_writer) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def ListFileEntries(self, base_path_specs, output_writer):
"""Lists file entries in the base path specification. Args: base_path_specs (list[dfvfs.PathSpec]):
source path specification. output_writer (StdoutWriter):
output writer. """ |
for base_path_spec in base_path_specs:
file_system = resolver.Resolver.OpenFileSystem(base_path_spec)
file_entry = resolver.Resolver.OpenFileEntry(base_path_spec)
if file_entry is None:
logging.warning(
'Unable to open base path specification:\n{0:s}'.format(
base_path_spec.comparable))
return
self._ListFileEntry(file_system, file_entry, '', output_writer) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def WriteFileEntry(self, path):
"""Writes the file path to file. Args: path (str):
path of the file. """ |
string = '{0:s}\n'.format(path)
encoded_string = self._EncodeString(string)
self._file_object.write(encoded_string) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def AddFileEntry( self, path, file_entry_type=definitions.FILE_ENTRY_TYPE_FILE, file_data=None, link_data=None):
"""Adds a fake file entry. Args: path (str):
path of the file entry. file_entry_type (Optional[str]):
type of the file entry object. file_data (Optional[bytes]):
data of the fake file-like object. link_data (Optional[bytes]):
link data of the fake file entry object. Raises: KeyError: if the path already exists. ValueError: if the file data is set but the file entry type is not a file or if the link data is set but the file entry type is not a link. """ |
if path in self._paths:
raise KeyError('File entry already set for path: {0:s}.'.format(path))
if file_data and file_entry_type != definitions.FILE_ENTRY_TYPE_FILE:
raise ValueError('File data set for non-file file entry type.')
if link_data and file_entry_type != definitions.FILE_ENTRY_TYPE_LINK:
raise ValueError('Link data set for non-link file entry type.')
if file_data is not None:
path_data = file_data
elif link_data is not None:
path_data = link_data
else:
path_data = None
self._paths[path] = (file_entry_type, path_data) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def GetDataByPath(self, path):
"""Retrieves the data associated to a path. Args: path (str):
path of the file entry. Returns: bytes: data or None if not available. """ |
_, path_data = self._paths.get(path, (None, None))
return path_data |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def GetFileEntryByPath(self, path):
"""Retrieves a file entry for a path. Args: path (str):
path of the file entry. Returns: FakeFileEntry: a file entry or None if not available. """ |
if path is None:
return None
file_entry_type, _ = self._paths.get(path, (None, None))
if not file_entry_type:
return None
path_spec = fake_path_spec.FakePathSpec(location=path)
return fake_file_entry.FakeFileEntry(
self._resolver_context, self, path_spec,
file_entry_type=file_entry_type) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def BasenamePath(self, path):
"""Determines the basename of the path. Args: path (str):
path. Returns: str: basename of the path. """ |
if path.endswith(self.PATH_SEPARATOR):
path = path[:-1]
_, _, basename = path.rpartition(self.PATH_SEPARATOR)
return basename |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def DirnamePath(self, path):
"""Determines the directory name of the path. The file system root is represented by an empty string. Args: path (str):
path. Returns: str: directory name of the path or None. """ |
if path.endswith(self.PATH_SEPARATOR):
path = path[:-1]
if not path:
return None
dirname, _, _ = path.rpartition(self.PATH_SEPARATOR)
return dirname |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def GetDataStreamByPathSpec(self, path_spec):
"""Retrieves a data stream for a path specification. Args: path_spec (PathSpec):
a path specification. Returns: DataStream: a data stream or None if not available. """ |
file_entry = self.GetFileEntryByPathSpec(path_spec)
if not file_entry:
return None
data_stream_name = getattr(path_spec, 'data_stream', None)
return file_entry.GetDataStream(data_stream_name) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def GetFileObjectByPathSpec(self, path_spec):
"""Retrieves a file-like object for a path specification. Args: path_spec (PathSpec):
a path specification. Returns: FileIO: a file-like object or None if not available. """ |
file_entry = self.GetFileEntryByPathSpec(path_spec)
if not file_entry:
return None
return file_entry.GetFileObject() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def GetPathSegmentAndSuffix(self, base_path, path):
"""Determines the path segment and suffix of the path. None is returned if the path does not start with the base path and an empty string if the path exactly matches the base path. Args: base_path (str):
base path. path (str):
path. Returns: tuple[str, str]: path segment and suffix string. """ |
if path is None or base_path is None or not path.startswith(base_path):
return None, None
path_index = len(base_path)
if base_path and not base_path.endswith(self.PATH_SEPARATOR):
path_index += 1
if path_index == len(path):
return '', ''
path_segment, _, suffix = path[path_index:].partition(self.PATH_SEPARATOR)
return path_segment, suffix |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def OpenFileEntry(cls, path_spec_object, resolver_context=None):
"""Opens a file entry object defined by path specification. Args: path_spec_object (PathSpec):
path specification. resolver_context (Optional[Context]):
resolver context, where None represents the built in context which is not multi process safe. Returns: FileEntry: file entry or None if the path specification could not be resolved. """ |
file_system = cls.OpenFileSystem(
path_spec_object, resolver_context=resolver_context)
if resolver_context is None:
resolver_context = cls._resolver_context
file_entry = file_system.GetFileEntryByPathSpec(path_spec_object)
# Release the file system so it will be removed from the cache
# when the file entry is destroyed.
resolver_context.ReleaseFileSystem(file_system)
return file_entry |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def OpenFileObject(cls, path_spec_object, resolver_context=None):
"""Opens a file-like object defined by path specification. Args: path_spec_object (PathSpec):
path specification. resolver_context (Optional[Context]):
resolver context, where None represents the built in context which is not multi process safe. Returns: FileIO: file-like object or None if the path specification could not be resolved. Raises: PathSpecError: if the path specification is incorrect. TypeError: if the path specification type is unsupported. """ |
if not isinstance(path_spec_object, path_spec.PathSpec):
raise TypeError('Unsupported path specification type.')
if resolver_context is None:
resolver_context = cls._resolver_context
if path_spec_object.type_indicator == definitions.TYPE_INDICATOR_MOUNT:
if path_spec_object.HasParent():
raise errors.PathSpecError(
'Unsupported mount path specification with parent.')
mount_point = getattr(path_spec_object, 'identifier', None)
if not mount_point:
raise errors.PathSpecError(
'Unsupported path specification without mount point identifier.')
path_spec_object = mount_manager.MountPointManager.GetMountPoint(
mount_point)
if not path_spec_object:
raise errors.MountPointError(
'No such mount point: {0:s}'.format(mount_point))
file_object = resolver_context.GetFileObject(path_spec_object)
if not file_object:
resolver_helper = cls._GetResolverHelper(path_spec_object.type_indicator)
file_object = resolver_helper.NewFileObject(resolver_context)
file_object.open(path_spec=path_spec_object)
return file_object |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def OpenFileSystem(cls, path_spec_object, resolver_context=None):
"""Opens a file system object defined by path specification. Args: path_spec_object (PathSpec):
path specification. resolver_context (Optional[Context]):
resolver context, where None represents the built in context which is not multi process safe. Returns: FileSystem: file system or None if the path specification could not be resolved or has no file system object. Raises: AccessError: if the access to open the file system was denied. BackEndError: if the file system cannot be opened. MountPointError: if the mount point specified in the path specification does not exist. PathSpecError: if the path specification is incorrect. TypeError: if the path specification type is unsupported. """ |
if not isinstance(path_spec_object, path_spec.PathSpec):
raise TypeError('Unsupported path specification type.')
if resolver_context is None:
resolver_context = cls._resolver_context
if path_spec_object.type_indicator == definitions.TYPE_INDICATOR_MOUNT:
if path_spec_object.HasParent():
raise errors.PathSpecError(
'Unsupported mount path specification with parent.')
mount_point = getattr(path_spec_object, 'identifier', None)
if not mount_point:
raise errors.PathSpecError(
'Unsupported path specification without mount point identifier.')
path_spec_object = mount_manager.MountPointManager.GetMountPoint(
mount_point)
if not path_spec_object:
raise errors.MountPointError(
'No such mount point: {0:s}'.format(mount_point))
file_system = resolver_context.GetFileSystem(path_spec_object)
if not file_system:
resolver_helper = cls._GetResolverHelper(path_spec_object.type_indicator)
file_system = resolver_helper.NewFileSystem(resolver_context)
try:
file_system.Open(path_spec_object)
except (IOError, ValueError) as exception:
raise errors.BackEndError(
'Unable to open file system with error: {0!s}'.format(exception))
return file_system |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def GetDecompressor(cls, compression_method):
"""Retrieves the decompressor object for a specific compression method. Args: compression_method (str):
compression method identifier. Returns: Decompressor: decompressor or None if the compression method does not exists. """ |
compression_method = compression_method.lower()
decompressor = cls._decompressors.get(compression_method, None)
if not decompressor:
return None
return decompressor() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def RegisterDecompressor(cls, decompressor):
"""Registers a decompressor for a specific compression method. Args: decompressor (type):
decompressor class. Raises: KeyError: if the corresponding decompressor is already set. """ |
compression_method = decompressor.COMPRESSION_METHOD.lower()
if compression_method in cls._decompressors:
raise KeyError(
'Decompressor for compression method: {0:s} already set.'.format(
decompressor.COMPRESSION_METHOD))
cls._decompressors[compression_method] = decompressor |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _GetDecrypter(self):
"""Retrieves a decrypter. Returns: Decrypter: decrypter. Raises: IOError: if the decrypter cannot be initialized. OSError: if the decrypter cannot be initialized. """ |
resolver.Resolver.key_chain.ExtractCredentialsFromPathSpec(self._path_spec)
try:
credentials = resolver.Resolver.key_chain.GetCredentials(self._path_spec)
return encryption_manager.EncryptionManager.GetDecrypter(
self._encryption_method, **credentials)
except ValueError as exception:
raise IOError(exception) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _GetDecryptedStreamSize(self):
"""Retrieves the decrypted stream size. Returns: int: decrypted stream size. """ |
self._file_object.seek(0, os.SEEK_SET)
self._decrypter = self._GetDecrypter()
self._decrypted_data = b''
encrypted_data_offset = 0
encrypted_data_size = self._file_object.get_size()
decrypted_stream_size = 0
while encrypted_data_offset < encrypted_data_size:
read_count = self._ReadEncryptedData(self._ENCRYPTED_DATA_BUFFER_SIZE)
if read_count == 0:
break
encrypted_data_offset += read_count
decrypted_stream_size += self._decrypted_data_size
return decrypted_stream_size |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _Open(self, path_spec=None, mode='rb'):
"""Opens the file-like object. Args: path_spec (Optional[PathSpec]):
path specification. mode (Optional[str]):
file access mode. Raises: AccessError: if the access to open the file was denied. IOError: if the file-like object could not be opened. OSError: if the file-like object could not be opened. PathSpecError: if the path specification is incorrect. ValueError: if the path specification is invalid. """ |
if not self._file_object_set_in_init and not path_spec:
raise ValueError('Missing path specification.')
if not self._file_object_set_in_init:
if not path_spec.HasParent():
raise errors.PathSpecError(
'Unsupported path specification without parent.')
self._encryption_method = getattr(path_spec, 'encryption_method', None)
if self._encryption_method is None:
raise errors.PathSpecError(
'Path specification missing encryption method.')
self._file_object = resolver.Resolver.OpenFileObject(
path_spec.parent, resolver_context=self._resolver_context)
self._path_spec = path_spec |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _AlignDecryptedDataOffset(self, decrypted_data_offset):
"""Aligns the encrypted file with the decrypted data offset. Args: decrypted_data_offset (int):
decrypted data offset. """ |
self._file_object.seek(0, os.SEEK_SET)
self._decrypter = self._GetDecrypter()
self._decrypted_data = b''
encrypted_data_offset = 0
encrypted_data_size = self._file_object.get_size()
while encrypted_data_offset < encrypted_data_size:
read_count = self._ReadEncryptedData(self._ENCRYPTED_DATA_BUFFER_SIZE)
if read_count == 0:
break
encrypted_data_offset += read_count
if decrypted_data_offset < self._decrypted_data_size:
self._decrypted_data_offset = decrypted_data_offset
break
decrypted_data_offset -= self._decrypted_data_size |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _ReadEncryptedData(self, read_size):
"""Reads encrypted data from the file-like object. Args: read_size (int):
number of bytes of encrypted data to read. Returns: int: number of bytes of encrypted data read. """ |
encrypted_data = self._file_object.read(read_size)
read_count = len(encrypted_data)
self._encrypted_data = b''.join([self._encrypted_data, encrypted_data])
self._decrypted_data, self._encrypted_data = (
self._decrypter.Decrypt(self._encrypted_data))
self._decrypted_data_size = len(self._decrypted_data)
return read_count |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def SetDecryptedStreamSize(self, decrypted_stream_size):
"""Sets the decrypted stream size. This function is used to set the decrypted stream size if it can be determined separately. Args: decrypted_stream_size (int):
size of the decrypted stream in bytes. Raises: IOError: if the file-like object is already open. OSError: if the file-like object is already open. ValueError: if the decrypted stream size is invalid. """ |
if self._is_open:
raise IOError('Already open.')
if decrypted_stream_size < 0:
raise ValueError((
'Invalid decrypted stream size: {0:d} value out of '
'bounds.').format(decrypted_stream_size))
self._decrypted_stream_size = decrypted_stream_size |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def getViewletByName(self, name):
""" Viewlets allow through-the-web customizations. Through-the-web customization magic is managed by five.customerize. We need to think of this when looking up viewlets. @return: Viewlet registration object """ |
views = registration.getViews(IBrowserRequest)
for v in views:
if v.provided == IViewlet:
# Note that we might have conflicting BrowserView with the same
# name, thus we need to check for provided
if v.name == name:
return v
return None |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def setupViewletByName(self, name):
""" Constructs a viewlet instance by its name. Viewlet update() and render() method are not called. @return: Viewlet instance of None if viewlet with name does not exist """ |
context = aq_inner(self.context)
request = self.request
# Perform viewlet regisration look-up
# from adapters registry
reg = self.getViewletByName(name)
if reg is None:
return None
# factory method is responsible for creating the viewlet instance
factory = reg.factory
# Create viewlet and put it to the acquisition chain
# Viewlet need initialization parameters: context, request, view
try:
viewlet = factory(context, request, self, None).__of__(context)
except TypeError:
# Bad constructor call parameters
raise RuntimeError(
"Unable to initialize viewlet {}. "
"Factory method {} call failed."
.format(name, str(factory)))
return viewlet |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def setup_handler(context):
"""Generic setup handler """ |
if context.readDataFile('senaite.lims.txt') is None:
return
logger.info("SENAITE setup handler [BEGIN]")
portal = context.getSite() # noqa
# Custom setup handlers
setup_html_filter(portal)
logger.info("SENAITE setup handler [DONE]") |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def setup_html_filter(portal):
"""Setup HTML filtering for resultsinterpretations """ |
logger.info("*** Setup HTML Filter ***")
# bypass the broken API from portal_transforms
adapter = IFilterSchema(portal)
style_whitelist = adapter.style_whitelist
for style in ALLOWED_STYLES:
logger.info("Allow style '{}'".format(style))
if style not in style_whitelist:
style_whitelist.append(style)
adapter.style_whitelist = style_whitelist |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def to_1000(portal_setup):
"""Initial version to 1000 :param portal_setup: The portal_setup tool """ |
logger.info("Run all import steps from SENAITE LIMS ...")
context = portal_setup._getImportContext(PROFILE_ID)
portal = context.getSite()
setup_html_filter(portal)
portal_setup.runAllImportStepsFromProfile(PROFILE_ID)
logger.info("Run all import steps from SENAITE LIMS [DONE]") |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def spotlight_search_route(context, request):
"""The spotlight search route """ |
catalogs = [
CATALOG_ANALYSIS_REQUEST_LISTING,
"portal_catalog",
"bika_setup_catalog",
"bika_catalog",
"bika_catalog_worksheet_listing"
]
search_results = []
for catalog in catalogs:
search_results.extend(search(catalog=catalog))
# extract the data from all the brains
items = map(get_brain_info, search_results)
return {
"count": len(items),
"items": sorted(items, key=itemgetter("title")),
} |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_brain_info(brain):
"""Extract the brain info """ |
icon = api.get_icon(brain)
# avoid 404 errors with these guys
if "document_icon.gif" in icon:
icon = ""
id = api.get_id(brain)
url = api.get_url(brain)
title = api.get_title(brain)
description = api.get_description(brain)
parent = api.get_parent(brain)
parent_title = api.get_title(parent)
parent_url = api.get_url(parent)
return {
"id": id,
"title": title,
"title_or_id": title or id,
"description": description,
"url": url,
"parent_title": parent_title,
"parent_url": parent_url,
"icon": icon,
} |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_search_index_for(catalog):
"""Returns the search index to query """ |
searchable_text_index = "SearchableText"
listing_searchable_text_index = "listing_searchable_text"
if catalog == CATALOG_ANALYSIS_REQUEST_LISTING:
tool = api.get_tool(catalog)
indexes = tool.indexes()
if listing_searchable_text_index in indexes:
return listing_searchable_text_index
return searchable_text_index |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def make_query(catalog):
"""A function to prepare a query """ |
query = {}
request = api.get_request()
index = get_search_index_for(catalog)
limit = request.form.get("limit")
q = request.form.get("q")
if len(q) > 0:
query[index] = q + "*"
else:
return None
portal_type = request.form.get("portal_type")
if portal_type:
if not isinstance(portal_type, list):
portal_type = [portal_type]
query["portal_type"] = portal_type
if limit and limit.isdigit():
query["sort_limit"] = int(limit)
return query |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def icon_cache_key(method, self, brain_or_object):
"""Generates a cache key for the icon lookup Includes the virtual URL to handle multiple HTTP/HTTPS domains Example: http://senaite.local/clients?modified=1512033263370 """ |
url = api.get_url(brain_or_object)
modified = api.get_modification_date(brain_or_object).millis()
key = "{}?modified={}".format(url, modified)
logger.debug("Generated Cache Key: {}".format(key))
return key |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_icon_for(self, brain_or_object):
"""Get the navigation portlet icon for the brain or object The cache key ensures that the lookup is done only once per domain name """ |
portal_types = api.get_tool("portal_types")
fti = portal_types.getTypeInfo(api.get_portal_type(brain_or_object))
icon = fti.getIcon()
if not icon:
return ""
# Always try to get the big icon for high-res displays
icon_big = icon.replace(".png", "_big.png")
# fall back to a default icon if the looked up icon does not exist
if self.context.restrictedTraverse(icon_big, None) is None:
icon_big = None
portal_url = api.get_url(api.get_portal())
title = api.get_title(brain_or_object)
html_tag = "<img title='{}' src='{}/{}' width='16' />".format(
title, portal_url, icon_big or icon)
logger.info("Generated Icon Tag for {}: {}".format(
api.get_path(brain_or_object), html_tag))
return html_tag |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def getViewportValues(self, view=None):
"""Determine the value of the viewport meta-tag """ |
values = {
'width': 'device-width',
'initial-scale': '1.0',
}
return ','.join('%s=%s' % (k, v) for k, v in values.items()) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.