idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
19,500
def _GetComparable ( self , sub_comparable_string = '' ) : string_parts = [ ] string_parts . append ( getattr ( self . parent , 'comparable' , '' ) ) string_parts . append ( 'type: {0:s}' . format ( self . type_indicator ) ) if sub_comparable_string : string_parts . append ( ', {0:s}' . format ( sub_comparable_string ) ) string_parts . append ( '\n' ) return '' . join ( string_parts )
Retrieves the comparable representation .
19,501
def CopyToDict ( self ) : path_spec_dict = { } for attribute_name , attribute_value in iter ( self . __dict__ . items ( ) ) : if attribute_value is None : continue if attribute_name == 'parent' : attribute_value = attribute_value . CopyToDict ( ) path_spec_dict [ attribute_name ] = attribute_value return path_spec_dict
Copies the path specification to a dictionary .
19,502
def DeregisterMountPoint ( cls , mount_point ) : if mount_point not in cls . _mount_points : raise KeyError ( 'Mount point: {0:s} not set.' . format ( mount_point ) ) del cls . _mount_points [ mount_point ]
Deregisters a path specification mount point .
19,503
def RegisterMountPoint ( cls , mount_point , path_spec ) : if mount_point in cls . _mount_points : raise KeyError ( 'Mount point: {0:s} already set.' . format ( mount_point ) ) cls . _mount_points [ mount_point ] = path_spec
Registers a path specification mount point .
19,504
def DeregisterHelper ( cls , resolver_helper ) : if resolver_helper . type_indicator not in cls . _resolver_helpers : raise KeyError ( 'Resolver helper object not set for type indicator: {0:s}.' . format ( resolver_helper . type_indicator ) ) del cls . _resolver_helpers [ resolver_helper . type_indicator ]
Deregisters a path specification resolver helper .
19,505
def RegisterHelper ( cls , resolver_helper ) : if resolver_helper . type_indicator in cls . _resolver_helpers : raise KeyError ( ( 'Resolver helper object already set for type indicator: ' '{0!s}.' ) . format ( resolver_helper . type_indicator ) ) cls . _resolver_helpers [ resolver_helper . type_indicator ] = resolver_helper
Registers a path specification resolver helper .
19,506
def GetDataStream ( self , name , case_sensitive = True ) : if not isinstance ( name , py2to3 . STRING_TYPES ) : raise ValueError ( 'Name is not a string.' ) name_lower = name . lower ( ) matching_data_stream = None for data_stream in self . _GetDataStreams ( ) : if data_stream . name == name : return data_stream if not case_sensitive and data_stream . name . lower ( ) == name_lower : if not matching_data_stream : matching_data_stream = data_stream return matching_data_stream
Retrieves a data stream by name .
19,507
def GetSubFileEntryByName ( self , name , case_sensitive = True ) : name_lower = name . lower ( ) matching_sub_file_entry = None for sub_file_entry in self . sub_file_entries : if sub_file_entry . name == name : return sub_file_entry if not case_sensitive and sub_file_entry . name . lower ( ) == name_lower : if not matching_sub_file_entry : matching_sub_file_entry = sub_file_entry return matching_sub_file_entry
Retrieves a sub file entry by name .
19,508
def HasDataStream ( self , name , case_sensitive = True ) : if not isinstance ( name , py2to3 . STRING_TYPES ) : raise ValueError ( 'Name is not a string.' ) name_lower = name . lower ( ) for data_stream in self . _GetDataStreams ( ) : if data_stream . name == name : return True if not case_sensitive and data_stream . name . lower ( ) == name_lower : return True return False
Determines if the file entry has specific data stream .
19,509
def IsAllocated ( self ) : if self . _stat_object is None : self . _stat_object = self . _GetStat ( ) return self . _stat_object and self . _stat_object . is_allocated
Determines if the file entry is allocated .
19,510
def IsDevice ( self ) : if self . _stat_object is None : self . _stat_object = self . _GetStat ( ) if self . _stat_object is not None : self . entry_type = self . _stat_object . type return self . entry_type == definitions . FILE_ENTRY_TYPE_DEVICE
Determines if the file entry is a device .
19,511
def IsDirectory ( self ) : if self . _stat_object is None : self . _stat_object = self . _GetStat ( ) if self . _stat_object is not None : self . entry_type = self . _stat_object . type return self . entry_type == definitions . FILE_ENTRY_TYPE_DIRECTORY
Determines if the file entry is a directory .
19,512
def IsFile ( self ) : if self . _stat_object is None : self . _stat_object = self . _GetStat ( ) if self . _stat_object is not None : self . entry_type = self . _stat_object . type return self . entry_type == definitions . FILE_ENTRY_TYPE_FILE
Determines if the file entry is a file .
19,513
def IsLink ( self ) : if self . _stat_object is None : self . _stat_object = self . _GetStat ( ) if self . _stat_object is not None : self . entry_type = self . _stat_object . type return self . entry_type == definitions . FILE_ENTRY_TYPE_LINK
Determines if the file entry is a link .
19,514
def IsPipe ( self ) : if self . _stat_object is None : self . _stat_object = self . _GetStat ( ) if self . _stat_object is not None : self . entry_type = self . _stat_object . type return self . entry_type == definitions . FILE_ENTRY_TYPE_PIPE
Determines if the file entry is a pipe .
19,515
def IsSocket ( self ) : if self . _stat_object is None : self . _stat_object = self . _GetStat ( ) if self . _stat_object is not None : self . entry_type = self . _stat_object . type return self . entry_type == definitions . FILE_ENTRY_TYPE_SOCKET
Determines if the file entry is a socket .
19,516
def Open ( self , path_spec ) : self . _file_system = resolver . Resolver . OpenFileSystem ( path_spec ) if self . _file_system is None : raise errors . VolumeSystemError ( 'Unable to resolve path specification.' ) type_indicator = self . _file_system . type_indicator if type_indicator != definitions . TYPE_INDICATOR_TSK_PARTITION : raise errors . VolumeSystemError ( 'Unsupported type indicator.' )
Opens a volume defined by path specification .
19,517
def GetLVMLogicalVolumeByPathSpec ( self , path_spec ) : volume_index = lvm . LVMPathSpecGetVolumeIndex ( path_spec ) if volume_index is None : return None return self . _vslvm_volume_group . get_logical_volume ( volume_index )
Retrieves a LVM logical volume for a path specification .
19,518
def ExtractCredentialsFromPathSpec ( self , path_spec ) : credentials = manager . CredentialsManager . GetCredentials ( path_spec ) for identifier in credentials . CREDENTIALS : value = getattr ( path_spec , identifier , None ) if value is None : continue self . SetCredential ( path_spec , identifier , value )
Extracts credentials from a path specification .
19,519
def GetCredential ( self , path_spec , identifier ) : credentials = self . _credentials_per_path_spec . get ( path_spec . comparable , { } ) return credentials . get ( identifier , None )
Retrieves a specific credential from the key chain .
19,520
def SetCredential ( self , path_spec , identifier , data ) : supported_credentials = manager . CredentialsManager . GetCredentials ( path_spec ) if identifier not in supported_credentials . CREDENTIALS : raise KeyError ( ( 'Unsuppored credential: {0:s} for path specification type: ' '{1:s}' ) . format ( identifier , path_spec . type_indicator ) ) credentials = self . _credentials_per_path_spec . get ( path_spec . comparable , { } ) credentials [ identifier ] = data self . _credentials_per_path_spec [ path_spec . comparable ] = credentials
Sets a specific credential for the path specification .
19,521
def EWFGlobPathSpec ( file_system , path_spec ) : if not path_spec . HasParent ( ) : raise errors . PathSpecError ( 'Unsupported path specification without parent.' ) parent_path_spec = path_spec . parent parent_location = getattr ( parent_path_spec , 'location' , None ) if not parent_location : raise errors . PathSpecError ( 'Unsupported parent path specification without location.' ) parent_location , _ , segment_extension = parent_location . rpartition ( '.' ) segment_extension_start = segment_extension [ 0 ] segment_extension_length = len ( segment_extension ) if ( segment_extension_length not in [ 3 , 4 ] or not segment_extension . endswith ( '01' ) or ( segment_extension_length == 3 and segment_extension_start not in [ 'E' , 'e' , 's' ] ) or ( segment_extension_length == 4 and not segment_extension . startswith ( 'Ex' ) ) ) : raise errors . PathSpecError ( ( 'Unsupported parent path specification invalid segment file ' 'extension: {0:s}' ) . format ( segment_extension ) ) segment_number = 1 segment_files = [ ] while True : segment_location = '{0:s}.{1:s}' . format ( parent_location , segment_extension ) kwargs = path_spec_factory . Factory . GetProperties ( parent_path_spec ) kwargs [ 'location' ] = segment_location if parent_path_spec . parent is not None : kwargs [ 'parent' ] = parent_path_spec . parent segment_path_spec = path_spec_factory . Factory . NewPathSpec ( parent_path_spec . type_indicator , ** kwargs ) if not file_system . FileEntryExistsByPathSpec ( segment_path_spec ) : break segment_files . append ( segment_path_spec ) segment_number += 1 if segment_number <= 99 : if segment_extension_length == 3 : segment_extension = '{0:s}{1:02d}' . format ( segment_extension_start , segment_number ) elif segment_extension_length == 4 : segment_extension = '{0:s}x{1:02d}' . format ( segment_extension_start , segment_number ) else : segment_index = segment_number - 100 if segment_extension_start in [ 'e' , 's' ] : letter_offset = ord ( 'a' ) else : letter_offset = ord ( 'A' ) segment_index , remainder = divmod ( segment_index , 26 ) third_letter = chr ( letter_offset + remainder ) segment_index , remainder = divmod ( segment_index , 26 ) second_letter = chr ( letter_offset + remainder ) first_letter = chr ( ord ( segment_extension_start ) + segment_index ) if first_letter in [ '[' , '{' ] : raise RuntimeError ( 'Unsupported number of segment files.' ) if segment_extension_length == 3 : segment_extension = '{0:s}{1:s}{2:s}' . format ( first_letter , second_letter , third_letter ) elif segment_extension_length == 4 : segment_extension = '{0:s}x{1:s}{2:s}' . format ( first_letter , second_letter , third_letter ) return segment_files
Globs for path specifications according to the EWF naming schema .
19,522
def _AddParentDirectories ( self , path ) : path_segments = self . file_system . SplitPath ( path ) for segment_index in range ( len ( path_segments ) ) : parent_path = self . file_system . JoinPath ( path_segments [ : segment_index ] ) file_entry = self . file_system . GetFileEntryByPath ( parent_path ) if file_entry and not file_entry . IsDirectory ( ) : raise ValueError ( 'Non-directory parent file entry: {0:s} already exists.' . format ( parent_path ) ) for segment_index in range ( len ( path_segments ) ) : parent_path = self . file_system . JoinPath ( path_segments [ : segment_index ] ) if not self . file_system . FileEntryExistsByPath ( parent_path ) : self . file_system . AddFileEntry ( parent_path , file_entry_type = definitions . FILE_ENTRY_TYPE_DIRECTORY )
Adds the parent directories of a path to the fake file system .
19,523
def AddDirectory ( self , path ) : if self . file_system . FileEntryExistsByPath ( path ) : raise ValueError ( 'Path: {0:s} already set.' . format ( path ) ) self . _AddParentDirectories ( path ) self . file_system . AddFileEntry ( path , file_entry_type = definitions . FILE_ENTRY_TYPE_DIRECTORY )
Adds a directory to the fake file system .
19,524
def AddSymbolicLink ( self , path , linked_path ) : if self . file_system . FileEntryExistsByPath ( path ) : raise ValueError ( 'Path: {0:s} already set.' . format ( path ) ) self . _AddParentDirectories ( path ) self . file_system . AddFileEntry ( path , file_entry_type = definitions . FILE_ENTRY_TYPE_LINK , link_data = linked_path )
Adds a symbolic link to the fake file system .
19,525
def DeregisterCredentials ( cls , credentials ) : if credentials . type_indicator not in cls . _credentials : raise KeyError ( 'Credential object not set for type indicator: {0:s}.' . format ( credentials . type_indicator ) ) del cls . _credentials [ credentials . type_indicator ]
Deregisters a path specification credentials .
19,526
def RegisterCredentials ( cls , credentials ) : if credentials . type_indicator in cls . _credentials : raise KeyError ( 'Credentials object already set for type indicator: {0:s}.' . format ( credentials . type_indicator ) ) cls . _credentials [ credentials . type_indicator ] = credentials
Registers a path specification credentials .
19,527
def Read ( self , file_object ) : file_object . seek ( self . last_read , os . SEEK_SET ) read_data = file_object . read ( self . _MAXIMUM_READ_SIZE ) self . last_read = file_object . get_offset ( ) compressed_data = b'' . join ( [ self . _compressed_data , read_data ] ) decompressed , extra_compressed = self . _decompressor . Decompress ( compressed_data ) self . _compressed_data = extra_compressed self . uncompressed_offset += len ( decompressed ) return decompressed
Reads the next uncompressed data from the gzip stream .
19,528
def _ReadMemberHeader ( self , file_object ) : file_offset = file_object . get_offset ( ) member_header = self . _ReadStructure ( file_object , file_offset , self . _MEMBER_HEADER_SIZE , self . _MEMBER_HEADER , 'member header' ) if member_header . signature != self . _GZIP_SIGNATURE : raise errors . FileFormatError ( 'Unsupported signature: 0x{0:04x}.' . format ( member_header . signature ) ) if member_header . compression_method != self . _COMPRESSION_METHOD_DEFLATE : raise errors . FileFormatError ( 'Unsupported compression method: {0:d}.' . format ( member_header . compression_method ) ) self . modification_time = member_header . modification_time self . operating_system = member_header . operating_system if member_header . flags & self . _FLAG_FEXTRA : file_offset = file_object . get_offset ( ) extra_field_data_size = self . _ReadStructure ( file_object , file_offset , self . _UINT16LE_SIZE , self . _UINT16LE , 'extra field data size' ) file_object . seek ( extra_field_data_size , os . SEEK_CUR ) if member_header . flags & self . _FLAG_FNAME : file_offset = file_object . get_offset ( ) string_value = self . _ReadString ( file_object , file_offset , self . _CSTRING , 'original filename' ) self . original_filename = string_value . rstrip ( '\x00' ) if member_header . flags & self . _FLAG_FCOMMENT : file_offset = file_object . get_offset ( ) string_value = self . _ReadString ( file_object , file_offset , self . _CSTRING , 'comment' ) self . comment = string_value . rstrip ( '\x00' ) if member_header . flags & self . _FLAG_FHCRC : file_object . read ( 2 )
Reads a member header .
19,529
def _ReadMemberFooter ( self , file_object ) : file_offset = file_object . get_offset ( ) member_footer = self . _ReadStructure ( file_object , file_offset , self . _MEMBER_FOOTER_SIZE , self . _MEMBER_FOOTER , 'member footer' ) self . uncompressed_data_size = member_footer . uncompressed_data_size
Reads a member footer .
19,530
def FlushCache ( self ) : self . _cache = b'' self . _cache_start_offset = None self . _cache_end_offset = None self . _ResetDecompressorState ( )
Empties the cache that holds cached decompressed data .
19,531
def GetCacheSize ( self ) : if not self . _cache_start_offset or not self . _cache_end_offset : return 0 return self . _cache_end_offset - self . _cache_start_offset
Determines the size of the uncompressed cached data .
19,532
def ReadAtOffset ( self , offset , size = None ) : if size is not None and size < 0 : raise ValueError ( 'Invalid size value {0!s}' . format ( size ) ) if offset < 0 : raise ValueError ( 'Invalid offset value {0!s}' . format ( offset ) ) if size == 0 or offset >= self . uncompressed_data_size : return b'' if self . _cache_start_offset is None : self . _LoadDataIntoCache ( self . _file_object , offset ) if offset > self . _cache_end_offset or offset < self . _cache_start_offset : self . FlushCache ( ) self . _LoadDataIntoCache ( self . _file_object , offset ) cache_offset = offset - self . _cache_start_offset if not size : return self . _cache [ cache_offset : ] data_end_offset = cache_offset + size if data_end_offset > self . _cache_end_offset : return self . _cache [ cache_offset : ] return self . _cache [ cache_offset : data_end_offset ]
Reads a byte string from the gzip member at the specified offset .
19,533
def _LoadDataIntoCache ( self , file_object , minimum_offset , read_all_data = False ) : if minimum_offset < self . _decompressor_state . uncompressed_offset : self . _ResetDecompressorState ( ) while not self . IsCacheFull ( ) or read_all_data : decompressed_data = self . _decompressor_state . Read ( file_object ) if not decompressed_data : break decompressed_data_length = len ( decompressed_data ) decompressed_end_offset = self . _decompressor_state . uncompressed_offset decompressed_start_offset = ( decompressed_end_offset - decompressed_data_length ) data_to_add = decompressed_data added_data_start_offset = decompressed_start_offset if decompressed_start_offset < minimum_offset : data_to_add = None if decompressed_start_offset < minimum_offset < decompressed_end_offset : data_add_offset = decompressed_end_offset - minimum_offset data_to_add = decompressed_data [ - data_add_offset ] added_data_start_offset = decompressed_end_offset - data_add_offset if not self . IsCacheFull ( ) and data_to_add : self . _cache = b'' . join ( [ self . _cache , data_to_add ] ) if self . _cache_start_offset is None : self . _cache_start_offset = added_data_start_offset if self . _cache_end_offset is None : self . _cache_end_offset = self . _cache_start_offset + len ( data_to_add ) else : self . _cache_end_offset += len ( data_to_add ) unused_data = self . _decompressor_state . GetUnusedData ( ) if unused_data : seek_offset = - len ( unused_data ) file_object . seek ( seek_offset , os . SEEK_CUR ) self . _ResetDecompressorState ( ) break
Reads and decompresses the data in the member .
19,534
def _ListFileEntry ( self , file_system , file_entry , parent_full_path , output_writer ) : full_path = file_system . JoinPath ( [ parent_full_path , file_entry . name ] ) if not self . _list_only_files or file_entry . IsFile ( ) : output_writer . WriteFileEntry ( full_path ) for sub_file_entry in file_entry . sub_file_entries : self . _ListFileEntry ( file_system , sub_file_entry , full_path , output_writer )
Lists a file entry .
19,535
def ListFileEntries ( self , base_path_specs , output_writer ) : for base_path_spec in base_path_specs : file_system = resolver . Resolver . OpenFileSystem ( base_path_spec ) file_entry = resolver . Resolver . OpenFileEntry ( base_path_spec ) if file_entry is None : logging . warning ( 'Unable to open base path specification:\n{0:s}' . format ( base_path_spec . comparable ) ) return self . _ListFileEntry ( file_system , file_entry , '' , output_writer )
Lists file entries in the base path specification .
19,536
def WriteFileEntry ( self , path ) : string = '{0:s}\n' . format ( path ) encoded_string = self . _EncodeString ( string ) self . _file_object . write ( encoded_string )
Writes the file path to file .
19,537
def AddFileEntry ( self , path , file_entry_type = definitions . FILE_ENTRY_TYPE_FILE , file_data = None , link_data = None ) : if path in self . _paths : raise KeyError ( 'File entry already set for path: {0:s}.' . format ( path ) ) if file_data and file_entry_type != definitions . FILE_ENTRY_TYPE_FILE : raise ValueError ( 'File data set for non-file file entry type.' ) if link_data and file_entry_type != definitions . FILE_ENTRY_TYPE_LINK : raise ValueError ( 'Link data set for non-link file entry type.' ) if file_data is not None : path_data = file_data elif link_data is not None : path_data = link_data else : path_data = None self . _paths [ path ] = ( file_entry_type , path_data )
Adds a fake file entry .
19,538
def GetDataByPath ( self , path ) : _ , path_data = self . _paths . get ( path , ( None , None ) ) return path_data
Retrieves the data associated to a path .
19,539
def GetFileEntryByPath ( self , path ) : if path is None : return None file_entry_type , _ = self . _paths . get ( path , ( None , None ) ) if not file_entry_type : return None path_spec = fake_path_spec . FakePathSpec ( location = path ) return fake_file_entry . FakeFileEntry ( self . _resolver_context , self , path_spec , file_entry_type = file_entry_type )
Retrieves a file entry for a path .
19,540
def BasenamePath ( self , path ) : if path . endswith ( self . PATH_SEPARATOR ) : path = path [ : - 1 ] _ , _ , basename = path . rpartition ( self . PATH_SEPARATOR ) return basename
Determines the basename of the path .
19,541
def DirnamePath ( self , path ) : if path . endswith ( self . PATH_SEPARATOR ) : path = path [ : - 1 ] if not path : return None dirname , _ , _ = path . rpartition ( self . PATH_SEPARATOR ) return dirname
Determines the directory name of the path .
19,542
def GetDataStreamByPathSpec ( self , path_spec ) : file_entry = self . GetFileEntryByPathSpec ( path_spec ) if not file_entry : return None data_stream_name = getattr ( path_spec , 'data_stream' , None ) return file_entry . GetDataStream ( data_stream_name )
Retrieves a data stream for a path specification .
19,543
def GetFileObjectByPathSpec ( self , path_spec ) : file_entry = self . GetFileEntryByPathSpec ( path_spec ) if not file_entry : return None return file_entry . GetFileObject ( )
Retrieves a file - like object for a path specification .
19,544
def GetPathSegmentAndSuffix ( self , base_path , path ) : if path is None or base_path is None or not path . startswith ( base_path ) : return None , None path_index = len ( base_path ) if base_path and not base_path . endswith ( self . PATH_SEPARATOR ) : path_index += 1 if path_index == len ( path ) : return '' , '' path_segment , _ , suffix = path [ path_index : ] . partition ( self . PATH_SEPARATOR ) return path_segment , suffix
Determines the path segment and suffix of the path .
19,545
def OpenFileEntry ( cls , path_spec_object , resolver_context = None ) : file_system = cls . OpenFileSystem ( path_spec_object , resolver_context = resolver_context ) if resolver_context is None : resolver_context = cls . _resolver_context file_entry = file_system . GetFileEntryByPathSpec ( path_spec_object ) resolver_context . ReleaseFileSystem ( file_system ) return file_entry
Opens a file entry object defined by path specification .
19,546
def OpenFileObject ( cls , path_spec_object , resolver_context = None ) : if not isinstance ( path_spec_object , path_spec . PathSpec ) : raise TypeError ( 'Unsupported path specification type.' ) if resolver_context is None : resolver_context = cls . _resolver_context if path_spec_object . type_indicator == definitions . TYPE_INDICATOR_MOUNT : if path_spec_object . HasParent ( ) : raise errors . PathSpecError ( 'Unsupported mount path specification with parent.' ) mount_point = getattr ( path_spec_object , 'identifier' , None ) if not mount_point : raise errors . PathSpecError ( 'Unsupported path specification without mount point identifier.' ) path_spec_object = mount_manager . MountPointManager . GetMountPoint ( mount_point ) if not path_spec_object : raise errors . MountPointError ( 'No such mount point: {0:s}' . format ( mount_point ) ) file_object = resolver_context . GetFileObject ( path_spec_object ) if not file_object : resolver_helper = cls . _GetResolverHelper ( path_spec_object . type_indicator ) file_object = resolver_helper . NewFileObject ( resolver_context ) file_object . open ( path_spec = path_spec_object ) return file_object
Opens a file - like object defined by path specification .
19,547
def OpenFileSystem ( cls , path_spec_object , resolver_context = None ) : if not isinstance ( path_spec_object , path_spec . PathSpec ) : raise TypeError ( 'Unsupported path specification type.' ) if resolver_context is None : resolver_context = cls . _resolver_context if path_spec_object . type_indicator == definitions . TYPE_INDICATOR_MOUNT : if path_spec_object . HasParent ( ) : raise errors . PathSpecError ( 'Unsupported mount path specification with parent.' ) mount_point = getattr ( path_spec_object , 'identifier' , None ) if not mount_point : raise errors . PathSpecError ( 'Unsupported path specification without mount point identifier.' ) path_spec_object = mount_manager . MountPointManager . GetMountPoint ( mount_point ) if not path_spec_object : raise errors . MountPointError ( 'No such mount point: {0:s}' . format ( mount_point ) ) file_system = resolver_context . GetFileSystem ( path_spec_object ) if not file_system : resolver_helper = cls . _GetResolverHelper ( path_spec_object . type_indicator ) file_system = resolver_helper . NewFileSystem ( resolver_context ) try : file_system . Open ( path_spec_object ) except ( IOError , ValueError ) as exception : raise errors . BackEndError ( 'Unable to open file system with error: {0!s}' . format ( exception ) ) return file_system
Opens a file system object defined by path specification .
19,548
def BDEVolumeOpen ( bde_volume , path_spec , file_object , key_chain ) : password = key_chain . GetCredential ( path_spec , 'password' ) if password : bde_volume . set_password ( password ) recovery_password = key_chain . GetCredential ( path_spec , 'recovery_password' ) if recovery_password : bde_volume . set_recovery_password ( recovery_password ) startup_key = key_chain . GetCredential ( path_spec , 'startup_key' ) if startup_key : bde_volume . read_startup_key ( startup_key ) bde_volume . open_file_object ( file_object )
Opens the BDE volume using the path specification .
19,549
def GetDecompressor ( cls , compression_method ) : compression_method = compression_method . lower ( ) decompressor = cls . _decompressors . get ( compression_method , None ) if not decompressor : return None return decompressor ( )
Retrieves the decompressor object for a specific compression method .
19,550
def RegisterDecompressor ( cls , decompressor ) : compression_method = decompressor . COMPRESSION_METHOD . lower ( ) if compression_method in cls . _decompressors : raise KeyError ( 'Decompressor for compression method: {0:s} already set.' . format ( decompressor . COMPRESSION_METHOD ) ) cls . _decompressors [ compression_method ] = decompressor
Registers a decompressor for a specific compression method .
19,551
def _GetDecrypter ( self ) : resolver . Resolver . key_chain . ExtractCredentialsFromPathSpec ( self . _path_spec ) try : credentials = resolver . Resolver . key_chain . GetCredentials ( self . _path_spec ) return encryption_manager . EncryptionManager . GetDecrypter ( self . _encryption_method , ** credentials ) except ValueError as exception : raise IOError ( exception )
Retrieves a decrypter .
19,552
def _GetDecryptedStreamSize ( self ) : self . _file_object . seek ( 0 , os . SEEK_SET ) self . _decrypter = self . _GetDecrypter ( ) self . _decrypted_data = b'' encrypted_data_offset = 0 encrypted_data_size = self . _file_object . get_size ( ) decrypted_stream_size = 0 while encrypted_data_offset < encrypted_data_size : read_count = self . _ReadEncryptedData ( self . _ENCRYPTED_DATA_BUFFER_SIZE ) if read_count == 0 : break encrypted_data_offset += read_count decrypted_stream_size += self . _decrypted_data_size return decrypted_stream_size
Retrieves the decrypted stream size .
19,553
def _Open ( self , path_spec = None , mode = 'rb' ) : if not self . _file_object_set_in_init and not path_spec : raise ValueError ( 'Missing path specification.' ) if not self . _file_object_set_in_init : if not path_spec . HasParent ( ) : raise errors . PathSpecError ( 'Unsupported path specification without parent.' ) self . _encryption_method = getattr ( path_spec , 'encryption_method' , None ) if self . _encryption_method is None : raise errors . PathSpecError ( 'Path specification missing encryption method.' ) self . _file_object = resolver . Resolver . OpenFileObject ( path_spec . parent , resolver_context = self . _resolver_context ) self . _path_spec = path_spec
Opens the file - like object .
19,554
def _AlignDecryptedDataOffset ( self , decrypted_data_offset ) : self . _file_object . seek ( 0 , os . SEEK_SET ) self . _decrypter = self . _GetDecrypter ( ) self . _decrypted_data = b'' encrypted_data_offset = 0 encrypted_data_size = self . _file_object . get_size ( ) while encrypted_data_offset < encrypted_data_size : read_count = self . _ReadEncryptedData ( self . _ENCRYPTED_DATA_BUFFER_SIZE ) if read_count == 0 : break encrypted_data_offset += read_count if decrypted_data_offset < self . _decrypted_data_size : self . _decrypted_data_offset = decrypted_data_offset break decrypted_data_offset -= self . _decrypted_data_size
Aligns the encrypted file with the decrypted data offset .
19,555
def _ReadEncryptedData ( self , read_size ) : encrypted_data = self . _file_object . read ( read_size ) read_count = len ( encrypted_data ) self . _encrypted_data = b'' . join ( [ self . _encrypted_data , encrypted_data ] ) self . _decrypted_data , self . _encrypted_data = ( self . _decrypter . Decrypt ( self . _encrypted_data ) ) self . _decrypted_data_size = len ( self . _decrypted_data ) return read_count
Reads encrypted data from the file - like object .
19,556
def SetDecryptedStreamSize ( self , decrypted_stream_size ) : if self . _is_open : raise IOError ( 'Already open.' ) if decrypted_stream_size < 0 : raise ValueError ( ( 'Invalid decrypted stream size: {0:d} value out of ' 'bounds.' ) . format ( decrypted_stream_size ) ) self . _decrypted_stream_size = decrypted_stream_size
Sets the decrypted stream size .
19,557
def getViewletByName ( self , name ) : views = registration . getViews ( IBrowserRequest ) for v in views : if v . provided == IViewlet : if v . name == name : return v return None
Viewlets allow through - the - web customizations .
19,558
def setupViewletByName ( self , name ) : context = aq_inner ( self . context ) request = self . request reg = self . getViewletByName ( name ) if reg is None : return None factory = reg . factory try : viewlet = factory ( context , request , self , None ) . __of__ ( context ) except TypeError : raise RuntimeError ( "Unable to initialize viewlet {}. " "Factory method {} call failed." . format ( name , str ( factory ) ) ) return viewlet
Constructs a viewlet instance by its name .
19,559
def setup_handler ( context ) : if context . readDataFile ( 'senaite.lims.txt' ) is None : return logger . info ( "SENAITE setup handler [BEGIN]" ) portal = context . getSite ( ) setup_html_filter ( portal ) logger . info ( "SENAITE setup handler [DONE]" )
Generic setup handler
19,560
def setup_html_filter ( portal ) : logger . info ( "*** Setup HTML Filter ***" ) adapter = IFilterSchema ( portal ) style_whitelist = adapter . style_whitelist for style in ALLOWED_STYLES : logger . info ( "Allow style '{}'" . format ( style ) ) if style not in style_whitelist : style_whitelist . append ( style ) adapter . style_whitelist = style_whitelist
Setup HTML filtering for resultsinterpretations
19,561
def to_1000 ( portal_setup ) : logger . info ( "Run all import steps from SENAITE LIMS ..." ) context = portal_setup . _getImportContext ( PROFILE_ID ) portal = context . getSite ( ) setup_html_filter ( portal ) portal_setup . runAllImportStepsFromProfile ( PROFILE_ID ) logger . info ( "Run all import steps from SENAITE LIMS [DONE]" )
Initial version to 1000
19,562
def spotlight_search_route ( context , request ) : catalogs = [ CATALOG_ANALYSIS_REQUEST_LISTING , "portal_catalog" , "bika_setup_catalog" , "bika_catalog" , "bika_catalog_worksheet_listing" ] search_results = [ ] for catalog in catalogs : search_results . extend ( search ( catalog = catalog ) ) items = map ( get_brain_info , search_results ) return { "count" : len ( items ) , "items" : sorted ( items , key = itemgetter ( "title" ) ) , }
The spotlight search route
19,563
def get_brain_info ( brain ) : icon = api . get_icon ( brain ) if "document_icon.gif" in icon : icon = "" id = api . get_id ( brain ) url = api . get_url ( brain ) title = api . get_title ( brain ) description = api . get_description ( brain ) parent = api . get_parent ( brain ) parent_title = api . get_title ( parent ) parent_url = api . get_url ( parent ) return { "id" : id , "title" : title , "title_or_id" : title or id , "description" : description , "url" : url , "parent_title" : parent_title , "parent_url" : parent_url , "icon" : icon , }
Extract the brain info
19,564
def get_search_index_for ( catalog ) : searchable_text_index = "SearchableText" listing_searchable_text_index = "listing_searchable_text" if catalog == CATALOG_ANALYSIS_REQUEST_LISTING : tool = api . get_tool ( catalog ) indexes = tool . indexes ( ) if listing_searchable_text_index in indexes : return listing_searchable_text_index return searchable_text_index
Returns the search index to query
19,565
def make_query ( catalog ) : query = { } request = api . get_request ( ) index = get_search_index_for ( catalog ) limit = request . form . get ( "limit" ) q = request . form . get ( "q" ) if len ( q ) > 0 : query [ index ] = q + "*" else : return None portal_type = request . form . get ( "portal_type" ) if portal_type : if not isinstance ( portal_type , list ) : portal_type = [ portal_type ] query [ "portal_type" ] = portal_type if limit and limit . isdigit ( ) : query [ "sort_limit" ] = int ( limit ) return query
A function to prepare a query
19,566
def icon_cache_key ( method , self , brain_or_object ) : url = api . get_url ( brain_or_object ) modified = api . get_modification_date ( brain_or_object ) . millis ( ) key = "{}?modified={}" . format ( url , modified ) logger . debug ( "Generated Cache Key: {}" . format ( key ) ) return key
Generates a cache key for the icon lookup
19,567
def get_icon_for ( self , brain_or_object ) : portal_types = api . get_tool ( "portal_types" ) fti = portal_types . getTypeInfo ( api . get_portal_type ( brain_or_object ) ) icon = fti . getIcon ( ) if not icon : return "" icon_big = icon . replace ( ".png" , "_big.png" ) if self . context . restrictedTraverse ( icon_big , None ) is None : icon_big = None portal_url = api . get_url ( api . get_portal ( ) ) title = api . get_title ( brain_or_object ) html_tag = "<img title='{}' src='{}/{}' width='16' />" . format ( title , portal_url , icon_big or icon ) logger . info ( "Generated Icon Tag for {}: {}" . format ( api . get_path ( brain_or_object ) , html_tag ) ) return html_tag
Get the navigation portlet icon for the brain or object
19,568
def getViewportValues ( self , view = None ) : values = { 'width' : 'device-width' , 'initial-scale' : '1.0' , } return ',' . join ( '%s=%s' % ( k , v ) for k , v in values . items ( ) )
Determine the value of the viewport meta - tag
19,569
def getColumnsClasses ( self , view = None ) : plone_view = getMultiAdapter ( ( self . context , self . request ) , name = u'plone' ) portal_state = getMultiAdapter ( ( self . context , self . request ) , name = u'plone_portal_state' ) sl = plone_view . have_portlets ( 'plone.leftcolumn' , view = view ) sr = plone_view . have_portlets ( 'plone.rightcolumn' , view = view ) isRTL = portal_state . is_rtl ( ) columns = dict ( one = "" , content = "" , two = "" ) if not sl and not sr : columns [ 'content' ] = "col-md-12" elif sl and sr : columns [ 'one' ] = "col-xs-12 col-md-2" columns [ 'content' ] = "col-xs-12 col-md-8" columns [ 'two' ] = "col-xs-12 col-md-2" elif ( sr and not sl ) and not isRTL : columns [ 'content' ] = "col-xs-12 col-md-10" columns [ 'two' ] = "col-xs-12 col-md-2" elif ( sl and not sr ) and isRTL : columns [ 'one' ] = "col-xs-12 col-md-2" columns [ 'content' ] = "col-xs-12 col-md-10" elif ( sl and not sr ) and not isRTL : columns [ 'one' ] = "col-xs-12 col-md-2" columns [ 'content' ] = "col-xs-12 col-md-10" return columns
Determine whether a column should be shown . The left column is called plone . leftcolumn ; the right column is called plone . rightcolumn .
19,570
def setupitems ( self ) : query = { "path" : { "query" : api . get_path ( self . setup ) , "depth" : 1 , } , } items = api . search ( query , "portal_catalog" ) items = filter ( lambda item : not item . exclude_from_nav , items ) def cmp_by_translated_title ( brain1 , brain2 ) : title1 = t ( api . get_title ( brain1 ) ) title2 = t ( api . get_title ( brain2 ) ) return cmp ( title1 , title2 ) return sorted ( items , cmp = cmp_by_translated_title )
Lookup available setup items
19,571
def content_type ( self ) : if hasattr ( self , '_content_type' ) : return self . _content_type filename , extension = os . path . splitext ( self . _file_path ) if extension == '.csv' : self . _content_type = 'text/csv' elif extension == '.tsv' : self . _content_type = 'text/tab-separated-values' else : self . _content_type = 'text/plain' return self . _content_type
Returns the content - type value determined by file extension .
19,572
def perform ( self ) : if self . _file_size < self . _SINGLE_UPLOAD_MAX : resource = "{0}{1}" . format ( self . _DEFAULT_RESOURCE , self . bucket ) response = self . __upload ( resource , open ( self . _file_path , 'rb' ) . read ( ) ) return response . headers [ 'location' ] else : response = self . __init_chunked_upload ( ) min_chunk_size = int ( response . headers [ 'x-ton-min-chunk-size' ] ) chunk_size = min_chunk_size * self . _DEFAULT_CHUNK_SIZE location = response . headers [ 'location' ] f = open ( self . _file_path , 'rb' ) bytes_read = 0 while True : bytes = f . read ( chunk_size ) if not bytes : break bytes_start = bytes_read bytes_read += len ( bytes ) response = self . __upload_chunk ( location , chunk_size , bytes , bytes_start , bytes_read ) response_time = int ( response . headers [ 'x-response-time' ] ) chunk_size = min_chunk_size * size ( self . _DEFAULT_CHUNK_SIZE , self . _RESPONSE_TIME_MAX , response_time ) f . close ( ) return location . split ( "?" ) [ 0 ]
Executes the current TONUpload object .
19,573
def __upload ( self , resource , bytes ) : headers = { 'x-ton-expires' : http_time ( self . options . get ( 'x-ton-expires' , self . _DEFAULT_EXPIRE ) ) , 'content-length' : str ( self . _file_size ) , 'content-type' : self . content_type } return Request ( self . _client , 'post' , resource , domain = self . _DEFAULT_DOMAIN , headers = headers , body = bytes ) . perform ( )
Performs a single chunk upload .
19,574
def __init_chunked_upload ( self ) : headers = { 'x-ton-content-type' : self . content_type , 'x-ton-content-length' : str ( self . _file_size ) , 'x-ton-expires' : http_time ( self . options . get ( 'x-ton-expires' , self . _DEFAULT_EXPIRE ) ) , 'content-length' : str ( 0 ) , 'content-type' : self . content_type } resource = "{0}{1}?resumable=true" . format ( self . _DEFAULT_RESOURCE , self . _DEFAULT_BUCKET ) return Request ( self . _client , 'post' , resource , domain = self . _DEFAULT_DOMAIN , headers = headers ) . perform ( )
Initialization for a multi - chunk upload .
19,575
def __upload_chunk ( self , resource , chunk_size , bytes , bytes_start , bytes_read ) : headers = { 'content-type' : self . content_type , 'content-length' : str ( min ( [ chunk_size , self . _file_size - bytes_read ] ) ) , 'content-range' : "bytes {0}-{1}/{2}" . format ( bytes_start , bytes_read - 1 , self . _file_size ) } return Request ( self . _client , 'put' , resource , domain = self . _DEFAULT_DOMAIN , headers = headers , body = bytes ) . perform ( )
Uploads a single chunk of a multi - chunk upload .
19,576
def next ( self ) : if self . _current_index < len ( self . _collection ) : value = self . _collection [ self . _current_index ] self . _current_index += 1 return value elif self . _next_cursor : self . __fetch_next ( ) return self . next ( ) else : self . _current_index = 0 raise StopIteration
Returns the next item in the cursor .
19,577
def save ( self ) : params = self . to_params ( ) if 'tweet_id' in params : params [ 'tweet_ids' ] = [ params [ 'tweet_id' ] ] del params [ 'tweet_id' ] if self . id : raise HTTPError ( "Method PUT not allowed." ) resource = self . RESOURCE_COLLECTION . format ( account_id = self . account . id ) response = Request ( self . account . client , 'post' , resource , params = params ) . perform ( ) return self . from_response ( response . body [ 'data' ] [ 0 ] )
Saves or updates the current object instance depending on the presence of object . id .
19,578
def preview ( self ) : if self . id : resource = self . PREVIEW resource = resource . format ( account_id = self . account . id , id = self . id ) response = Request ( self . account . client , 'get' , resource ) . perform ( ) return response . body [ 'data' ]
Returns an HTML preview for a Scheduled Tweet .
19,579
def load ( klass , client , id , ** kwargs ) : resource = klass . RESOURCE . format ( id = id ) response = Request ( client , 'get' , resource , params = kwargs ) . perform ( ) return klass ( client ) . from_response ( response . body [ 'data' ] )
Returns an object instance for a given resource .
19,580
def all ( klass , client , ** kwargs ) : resource = klass . RESOURCE_COLLECTION request = Request ( client , 'get' , resource , params = kwargs ) return Cursor ( klass , request , init_with = [ client ] )
Returns a Cursor instance for a given resource .
19,581
def features ( self ) : self . _validate_loaded ( ) resource = self . FEATURES . format ( id = self . id ) response = Request ( self . client , 'get' , resource ) . perform ( ) return response . body [ 'data' ]
Returns a collection of features available to the current account .
19,582
def scoped_timeline ( self , * id , ** kwargs ) : self . _validate_loaded ( ) params = { 'user_id' : id } params . update ( kwargs ) resource = self . SCOPED_TIMELINE . format ( id = self . id ) response = Request ( self . client , 'get' , resource , params = params ) . perform ( ) return response . body [ 'data' ]
Returns the most recent promotable Tweets created by the specified Twitter user .
19,583
def get_version ( ) : if isinstance ( VERSION [ - 1 ] , str ) : return '.' . join ( map ( str , VERSION [ : - 1 ] ) ) + VERSION [ - 1 ] return '.' . join ( map ( str , VERSION ) )
Returns a string representation of the current SDK version .
19,584
def to_time ( time , granularity ) : if not granularity : if type ( time ) is datetime . date : return format_date ( time ) else : return format_time ( time ) if granularity == GRANULARITY . HOUR : return format_time ( remove_minutes ( time ) ) elif granularity == GRANULARITY . DAY : return format_date ( remove_hours ( time ) ) else : return format_time ( time )
Returns a truncated and rounded time string based on the specified granularity .
19,585
def http_time ( time ) : return formatdate ( timeval = mktime ( time . timetuple ( ) ) , localtime = False , usegmt = True )
Formats a datetime as an RFC 1123 compliant string .
19,586
def size ( default_chunk_size , response_time_max , response_time_actual ) : if response_time_actual == 0 : response_time_actual = 1 scale = 1 / ( response_time_actual / response_time_max ) size = int ( default_chunk_size * scale ) return min ( max ( size , 1 ) , default_chunk_size )
Determines the chunk size based on response times .
19,587
def sandbox ( ) : def fget ( self ) : return self . _options . get ( 'sandbox' , None ) def fset ( self , value ) : self . _options [ 'sandbox' ] = value return locals ( )
Enables and disables sandbox mode .
19,588
def trace ( ) : def fget ( self ) : return self . _options . get ( 'trace' , None ) def fset ( self , value ) : self . _options [ 'trace' ] = value return locals ( )
Enables and disables request tracing .
19,589
def platform_versions ( klass , account , ** kwargs ) : resource = klass . RESOURCE_OPTIONS + 'platform_versions' request = Request ( account . client , 'get' , resource , params = kwargs ) return Cursor ( None , request )
Returns a list of supported platform versions
19,590
def targeting_criteria ( self , id = None , ** kwargs ) : self . _validate_loaded ( ) if id is None : return TargetingCriteria . all ( self . account , self . id , ** kwargs ) else : return TargetingCriteria . load ( self . account , id , ** kwargs )
Returns a collection of targeting criteria available to the current line item .
19,591
def preview ( klass , account , ** kwargs ) : params = { } params . update ( kwargs ) if 'media_ids' in params and isinstance ( params [ 'media_ids' ] , list ) : params [ 'media_ids' ] = ',' . join ( map ( str , params [ 'media_ids' ] ) ) resource = klass . TWEET_ID_PREVIEW if params . get ( 'id' ) else klass . TWEET_PREVIEW resource = resource . format ( account_id = account . id , id = params . get ( 'id' ) ) response = Request ( account . client , 'get' , resource , params = params ) . perform ( ) return response . body [ 'data' ]
Returns an HTML preview of a tweet either new or existing .
19,592
def create ( klass , account , ** kwargs ) : params = { } params . update ( kwargs ) if 'media_ids' in params and isinstance ( params [ 'media_ids' ] , list ) : params [ 'media_ids' ] = ',' . join ( map ( str , params [ 'media_ids' ] ) ) resource = klass . TWEET_CREATE . format ( account_id = account . id ) response = Request ( account . client , 'post' , resource , params = params ) . perform ( ) return response . body [ 'data' ]
Creates a Promoted - Only Tweet using the specialized Ads API end point .
19,593
def resource_property ( klass , name , ** kwargs ) : klass . PROPERTIES [ name ] = kwargs def getter ( self ) : return getattr ( self , '_%s' % name , kwargs . get ( 'default' , None ) ) if kwargs . get ( 'readonly' , False ) : setattr ( klass , name , property ( getter ) ) else : def setter ( self , value ) : setattr ( self , '_%s' % name , value ) setattr ( klass , name , property ( getter , setter ) )
Builds a resource object property .
19,594
def from_response ( self , response ) : for name in self . PROPERTIES : attr = '_{0}' . format ( name ) transform = self . PROPERTIES [ name ] . get ( 'transform' , None ) value = response . get ( name , None ) if transform and transform == TRANSFORM . TIME and value : setattr ( self , attr , dateutil . parser . parse ( value ) ) if isinstance ( value , int ) and value == 0 : continue else : setattr ( self , attr , value ) return self
Populates a given objects attributes from a parsed JSON API response . This helper handles all necessary type coercions as it assigns attribute values .
19,595
def to_params ( self ) : params = { } for name in self . PROPERTIES : attr = '_{0}' . format ( name ) value = getattr ( self , attr , None ) or getattr ( self , name , None ) if value is None : continue if isinstance ( value , datetime ) : params [ name ] = format_time ( value ) elif isinstance ( value , list ) : params [ name ] = ',' . join ( map ( str , value ) ) elif isinstance ( value , bool ) : params [ name ] = str ( value ) . lower ( ) else : params [ name ] = value return params
Generates a Hash of property values for the current object . This helper handles all necessary type coercions as it generates its output .
19,596
def stats ( self , metrics , ** kwargs ) : return self . __class__ . all_stats ( self . account , [ self . id ] , metrics , ** kwargs )
Pulls a list of metrics for the current object instance .
19,597
def _standard_params ( klass , ids , metric_groups , ** kwargs ) : end_time = kwargs . get ( 'end_time' , datetime . utcnow ( ) ) start_time = kwargs . get ( 'start_time' , end_time - timedelta ( seconds = 604800 ) ) granularity = kwargs . get ( 'granularity' , GRANULARITY . HOUR ) placement = kwargs . get ( 'placement' , PLACEMENT . ALL_ON_TWITTER ) params = { 'metric_groups' : ',' . join ( metric_groups ) , 'start_time' : to_time ( start_time , granularity ) , 'end_time' : to_time ( end_time , granularity ) , 'granularity' : granularity . upper ( ) , 'entity' : klass . ANALYTICS_MAP [ klass . __name__ ] , 'placement' : placement } params [ 'entity_ids' ] = ',' . join ( ids ) return params
Sets the standard params for a stats request
19,598
def all_stats ( klass , account , ids , metric_groups , ** kwargs ) : params = klass . _standard_params ( ids , metric_groups , ** kwargs ) resource = klass . RESOURCE_SYNC . format ( account_id = account . id ) response = Request ( account . client , 'get' , resource , params = params ) . perform ( ) return response . body [ 'data' ]
Pulls a list of metrics for a specified set of object IDs .
19,599
def queue_async_stats_job ( klass , account , ids , metric_groups , ** kwargs ) : params = klass . _standard_params ( ids , metric_groups , ** kwargs ) params [ 'platform' ] = kwargs . get ( 'platform' , None ) params [ 'country' ] = kwargs . get ( 'country' , None ) params [ 'segmentation_type' ] = kwargs . get ( 'segmentation_type' , None ) resource = klass . RESOURCE_ASYNC . format ( account_id = account . id ) response = Request ( account . client , 'post' , resource , params = params ) . perform ( ) return response . body [ 'data' ]
Queues a list of metrics for a specified set of object IDs asynchronously