docstring
stringlengths
52
499
function
stringlengths
67
35.2k
__index_level_0__
int64
52.6k
1.16M
Initializes an UUID (or GUID) data type map. Args: data_type_definition (DataTypeDefinition): data type definition.
def __init__(self, data_type_definition): super(UUIDMap, self).__init__(data_type_definition) self._byte_order = data_type_definition.byte_order
814,096
Folds the data type into a byte stream. Args: mapped_value (object): mapped value. Returns: bytes: byte stream. Raises: FoldingError: if the data type definition cannot be folded into the byte stream.
def FoldByteStream(self, mapped_value, **unused_kwargs): value = None try: if self._byte_order == definitions.BYTE_ORDER_BIG_ENDIAN: value = mapped_value.bytes elif self._byte_order == definitions.BYTE_ORDER_LITTLE_ENDIAN: value = mapped_value.bytes_le except Exception as ...
814,097
Maps the data type on a byte stream. Args: byte_stream (bytes): byte stream. byte_offset (Optional[int]): offset into the byte stream where to start. context (Optional[DataTypeMapContext]): data type map context. Returns: uuid.UUID: mapped value. Raises: MappingError: if the...
def MapByteStream( self, byte_stream, byte_offset=0, context=None, **unused_kwargs): data_type_size = self._data_type_definition.GetByteSize() self._CheckByteStreamSize(byte_stream, byte_offset, data_type_size) try: if self._byte_order == definitions.BYTE_ORDER_BIG_ENDIAN: mapped_v...
814,098
Initializes a sequence data type map. Args: data_type_definition (DataTypeDefinition): data type definition.
def __init__(self, data_type_definition): element_data_type_definition = self._GetElementDataTypeDefinition( data_type_definition) super(ElementSequenceDataTypeMap, self).__init__(data_type_definition) self._element_data_type_map = DataTypeMapFactory.CreateDataTypeMapByType( element_da...
814,099
Calculates the elements data size. Args: context (Optional[DataTypeMapContext]): data type map context, used to determine the size hint. Returns: int: the elements data size or None if not available.
def _CalculateElementsDataSize(self, context): elements_data_size = None if self._HasElementsDataSize(): elements_data_size = self._EvaluateElementsDataSize(context) elif self._HasNumberOfElements(): element_byte_size = self._element_data_type_definition.GetByteSize() if element_byt...
814,100
Evaluates elements data size. Args: context (DataTypeMapContext): data type map context. Returns: int: elements data size. Raises: MappingError: if the elements data size cannot be determined.
def _EvaluateElementsDataSize(self, context): elements_data_size = None if self._data_type_definition.elements_data_size: elements_data_size = self._data_type_definition.elements_data_size elif self._data_type_definition.elements_data_size_expression: expression = self._data_type_definitio...
814,101
Evaluates number of elements. Args: context (DataTypeMapContext): data type map context. Returns: int: number of elements. Raises: MappingError: if the number of elements cannot be determined.
def _EvaluateNumberOfElements(self, context): number_of_elements = None if self._data_type_definition.number_of_elements: number_of_elements = self._data_type_definition.number_of_elements elif self._data_type_definition.number_of_elements_expression: expression = self._data_type_definitio...
814,102
Retrieves the element data type definition. Args: data_type_definition (DataTypeDefinition): data type definition. Returns: DataTypeDefinition: element data type definition. Raises: FormatError: if the element data type cannot be determined from the data type definition.
def _GetElementDataTypeDefinition(self, data_type_definition): if not data_type_definition: raise errors.FormatError('Missing data type definition') element_data_type_definition = getattr( data_type_definition, 'element_data_type_definition', None) if not element_data_type_definition: ...
814,103
Retrieves a hint about the size. Args: context (Optional[DataTypeMapContext]): data type map context, used to determine the size hint. Returns: int: hint of the number of bytes needed from the byte stream or None.
def GetSizeHint(self, context=None, **unused_kwargs): context_state = getattr(context, 'state', {}) elements_data_size = self.GetByteSize() if elements_data_size: return elements_data_size try: elements_data_size = self._CalculateElementsDataSize(context) except errors.MappingErro...
814,104
Initializes a sequence data type map. Args: data_type_definition (DataTypeDefinition): data type definition.
def __init__(self, data_type_definition): super(SequenceMap, self).__init__(data_type_definition) self._fold_byte_stream = None self._map_byte_stream = None self._operation = None if (self._element_data_type_definition.IsComposite() or data_type_definition.elements_data_size_expression...
814,105
Maps a sequence of composite data types on a byte stream. Args: byte_stream (bytes): byte stream. byte_offset (Optional[int]): offset into the byte stream where to start. context (Optional[DataTypeMapContext]): data type map context. Returns: tuple[object, ...]: mapped values. Rai...
def _CompositeMapByteStream( self, byte_stream, byte_offset=0, context=None, **unused_kwargs): elements_data_size = None elements_terminator = None number_of_elements = None if self._HasElementsDataSize(): elements_data_size = self._EvaluateElementsDataSize(context) element_byte...
814,106
Folds the data type into a byte stream. Args: mapped_value (object): mapped value. Returns: bytes: byte stream. Raises: FoldingError: if the data type definition cannot be folded into the byte stream.
def _LinearFoldByteStream(self, mapped_value, **unused_kwargs): try: return self._operation.WriteTo(mapped_value) except Exception as exception: error_string = ( 'Unable to write: {0:s} to byte stream with error: {1!s}').format( self._data_type_definition.name, exceptio...
814,107
Maps a data type sequence on a byte stream. Args: byte_stream (bytes): byte stream. byte_offset (Optional[int]): offset into the byte stream where to start. context (Optional[DataTypeMapContext]): data type map context. Returns: tuple[object, ...]: mapped values. Raises: Map...
def _LinearMapByteStream( self, byte_stream, byte_offset=0, context=None, **unused_kwargs): elements_data_size = self._data_type_definition.GetByteSize() self._CheckByteStreamSize(byte_stream, byte_offset, elements_data_size) try: struct_tuple = self._operation.ReadFrom(byte_stream[byte_of...
814,108
Initializes a stream data type map. Args: data_type_definition (DataTypeDefinition): data type definition. Raises: FormatError: if the data type map cannot be determined from the data type definition.
def __init__(self, data_type_definition): super(StreamMap, self).__init__(data_type_definition) self._fold_byte_stream = None self._map_byte_stream = None if self._element_data_type_definition.IsComposite(): raise errors.FormatError('Unsupported composite element data type')
814,110
Folds the data type into a byte stream. Args: mapped_value (object): mapped value. context (Optional[DataTypeMapContext]): data type map context. Returns: bytes: byte stream. Raises: FoldingError: if the data type definition cannot be folded into the byte stream.
def FoldByteStream(self, mapped_value, context=None, **unused_kwargs): elements_data_size = self._CalculateElementsDataSize(context) if elements_data_size is not None: if elements_data_size != len(mapped_value): raise errors.FoldingError( 'Mismatch between elements data size and m...
814,111
Maps the data type on a byte stream. Args: byte_stream (bytes): byte stream. byte_offset (Optional[int]): offset into the byte stream where to start. context (Optional[DataTypeMapContext]): data type map context. Returns: tuple[object, ...]: mapped values. Raises: MappingErr...
def MapByteStream( self, byte_stream, byte_offset=0, context=None, **unused_kwargs): context_state = getattr(context, 'state', {}) size_hints = context_state.get('size_hints', {}) elements_data_size = self._CalculateElementsDataSize(context) if elements_data_size is not None: self._Ch...
814,112
Initializes a padding data type map. Args: data_type_definition (DataTypeDefinition): data type definition. Raises: FormatError: if the data type map cannot be determined from the data type definition.
def __init__(self, data_type_definition): super(PaddingMap, self).__init__(data_type_definition) self.byte_size = None
814,113
Maps the data type on a byte stream. Args: byte_stream (bytes): byte stream. byte_offset (Optional[int]): offset into the byte stream where to start. Returns: object: mapped value. Raises: MappingError: if the data type definition cannot be mapped on the byte stream.
def MapByteStream(self, byte_stream, byte_offset=0, **unused_kwargs): return byte_stream[byte_offset:byte_offset + self.byte_size]
814,114
Folds the data type into a byte stream. Args: mapped_value (object): mapped value. Returns: bytes: byte stream. Raises: FoldingError: if the data type definition cannot be folded into the byte stream.
def FoldByteStream(self, mapped_value, **kwargs): try: byte_stream = mapped_value.encode(self._data_type_definition.encoding) except Exception as exception: error_string = ( 'Unable to write: {0:s} to byte stream with error: {1!s}').format( self._data_type_definition.na...
814,115
Maps the data type on a byte stream. Args: byte_stream (bytes): byte stream. byte_offset (Optional[int]): offset into the byte stream where to start. Returns: str: mapped values. Raises: MappingError: if the data type definition cannot be mapped on the byte stream.
def MapByteStream(self, byte_stream, byte_offset=0, **kwargs): byte_stream = super(StringMap, self).MapByteStream( byte_stream, byte_offset=byte_offset, **kwargs) if self._HasElementsTerminator(): # Remove the elements terminator and any trailing data from # the byte stream. elem...
814,116
Initializes a structure data type map. Args: data_type_definition (DataTypeDefinition): data type definition.
def __init__(self, data_type_definition): super(StructureMap, self).__init__(data_type_definition) self._attribute_names = self._GetAttributeNames(data_type_definition) self._data_type_map_cache = {} self._data_type_maps = self._GetMemberDataTypeMaps( data_type_definition, self._data_type_m...
814,117
Determines if the data type definition needs a composite map. Args: data_type_definition (DataTypeDefinition): structure data type definition. Returns: bool: True if a composite map is needed, False otherwise. Raises: FormatError: if a composite map is needed cannot be determined from t...
def _CheckCompositeMap(self, data_type_definition): if not data_type_definition: raise errors.FormatError('Missing data type definition') members = getattr(data_type_definition, 'members', None) if not members: raise errors.FormatError('Invalid data type definition missing members') i...
814,118
Folds the data type into a byte stream. Args: mapped_value (object): mapped value. context (Optional[DataTypeMapContext]): data type map context. Returns: bytes: byte stream. Raises: FoldingError: if the data type definition cannot be folded into the byte stream.
def _CompositeFoldByteStream( self, mapped_value, context=None, **unused_kwargs): context_state = getattr(context, 'state', {}) attribute_index = context_state.get('attribute_index', 0) subcontext = context_state.get('context', None) if not subcontext: subcontext = DataTypeMapContext(...
814,119
Maps a sequence of composite data types on a byte stream. Args: byte_stream (bytes): byte stream. byte_offset (Optional[int]): offset into the byte stream where to start. context (Optional[DataTypeMapContext]): data type map context. Returns: object: mapped value. Raises: Ma...
def _CompositeMapByteStream( self, byte_stream, byte_offset=0, context=None, **unused_kwargs): context_state = getattr(context, 'state', {}) attribute_index = context_state.get('attribute_index', 0) mapped_values = context_state.get('mapped_values', None) subcontext = context_state.get('cont...
814,120
Determines the attribute (or field) names of the members. Args: data_type_definition (DataTypeDefinition): data type definition. Returns: list[str]: attribute names. Raises: FormatError: if the attribute names cannot be determined from the data type definition.
def _GetAttributeNames(self, data_type_definition): if not data_type_definition: raise errors.FormatError('Missing data type definition') attribute_names = [] for member_definition in data_type_definition.members: attribute_names.append(member_definition.name) return attribute_names
814,121
Retrieves the member data type maps. Args: data_type_definition (DataTypeDefinition): data type definition. data_type_map_cache (dict[str, DataTypeMap]): cached data type maps. Returns: list[DataTypeMap]: member data type maps. Raises: FormatError: if the data type maps cannot be ...
def _GetMemberDataTypeMaps(self, data_type_definition, data_type_map_cache): if not data_type_definition: raise errors.FormatError('Missing data type definition') members = getattr(data_type_definition, 'members', None) if not members: raise errors.FormatError('Invalid data type definition...
814,122
Folds the data type into a byte stream. Args: mapped_value (object): mapped value. Returns: bytes: byte stream. Raises: FoldingError: if the data type definition cannot be folded into the byte stream.
def _LinearFoldByteStream(self, mapped_value, **unused_kwargs): try: attribute_values = [ getattr(mapped_value, attribute_name, None) for attribute_name in self._attribute_names] attribute_values = [ value for value in attribute_values if value is not None] retur...
814,123
Maps a data type sequence on a byte stream. Args: byte_stream (bytes): byte stream. byte_offset (Optional[int]): offset into the byte stream where to start. context (Optional[DataTypeMapContext]): data type map context. Returns: object: mapped value. Raises: MappingError: if...
def _LinearMapByteStream( self, byte_stream, byte_offset=0, context=None, **unused_kwargs): members_data_size = self._data_type_definition.GetByteSize() self._CheckByteStreamSize(byte_stream, byte_offset, members_data_size) try: struct_tuple = self._operation.ReadFrom(byte_stream[byte_offs...
814,124
Retrieves a hint about the size. Args: context (Optional[DataTypeMapContext]): data type map context, used to determine the size hint. Returns: int: hint of the number of bytes needed from the byte stream or None.
def GetSizeHint(self, context=None, **unused_kwargs): context_state = getattr(context, 'state', {}) subcontext = context_state.get('context', None) if not subcontext: mapped_values = context_state.get('mapped_values', None) subcontext = DataTypeMapContext(values={ type(mapped_val...
814,125
Folds the data type into a byte stream. Args: mapped_value (object): mapped value. Returns: bytes: byte stream. Raises: FoldingError: if the data type definition cannot be folded into the byte stream.
def FoldByteStream(self, mapped_value, **unused_kwargs): # pylint: disable=redundant-returns-doc raise errors.FoldingError( 'Unable to fold {0:s} data type into byte stream'.format( self._data_type_definition.TYPE_INDICATOR))
814,127
Maps the data type on a byte stream. Args: byte_stream (bytes): byte stream. Returns: object: mapped value. Raises: MappingError: if the data type definition cannot be mapped on the byte stream.
def MapByteStream(self, byte_stream, **unused_kwargs): # pylint: disable=redundant-returns-doc raise errors.MappingError( 'Unable to map {0:s} data type to byte stream'.format( self._data_type_definition.TYPE_INDICATOR))
814,128
Retrieves the name of an enumeration value by number. Args: number (int): number. Returns: str: name of the enumeration value or None if no corresponding enumeration value was found.
def GetName(self, number): value = self._data_type_definition.values_per_number.get(number, None) if not value: return None return value.name
814,129
Initializes a data type maps factory. Args: definitions_registry (DataTypeDefinitionsRegistry): data type definitions registry.
def __init__(self, definitions_registry): super(DataTypeMapFactory, self).__init__() self._definitions_registry = definitions_registry
814,130
Creates a specific data type map by name. Args: definition_name (str): name of the data type definition. Returns: DataTypeMap: data type map or None if the date type definition is not available.
def CreateDataTypeMap(self, definition_name): data_type_definition = self._definitions_registry.GetDefinitionByName( definition_name) if not data_type_definition: return None return DataTypeMapFactory.CreateDataTypeMapByType(data_type_definition)
814,131
Creates a specific data type map by type indicator. Args: data_type_definition (DataTypeDefinition): data type definition. Returns: DataTypeMap: data type map or None if the date type definition is not available.
def CreateDataTypeMapByType(cls, data_type_definition): data_type_map_class = cls._MAP_PER_DEFINITION.get( data_type_definition.TYPE_INDICATOR, None) if not data_type_map_class: return None return data_type_map_class(data_type_definition)
814,132
Initializes a data type definition. Args: name (str): name. aliases (Optional[list[str]]): aliases. description (Optional[str]): description. urls (Optional[list[str]]): URLs.
def __init__(self, name, aliases=None, description=None, urls=None): super(DataTypeDefinition, self).__init__() self.aliases = aliases or [] self.description = description self.name = name self.urls = urls
814,133
Initializes a storage data type definition. Args: name (str): name. aliases (Optional[list[str]]): aliases. description (Optional[str]): description. urls (Optional[list[str]]): URLs.
def __init__(self, name, aliases=None, description=None, urls=None): super(StorageDataTypeDefinition, self).__init__( name, aliases=aliases, description=description, urls=urls) self.byte_order = definitions.BYTE_ORDER_NATIVE
814,134
Initializes a fixed-size data type definition. Args: name (str): name. aliases (Optional[list[str]]): aliases. description (Optional[str]): description. urls (Optional[list[str]]): URLs.
def __init__(self, name, aliases=None, description=None, urls=None): super(FixedSizeDataTypeDefinition, self).__init__( name, aliases=aliases, description=description, urls=urls) self.size = definitions.SIZE_NATIVE self.units = 'bytes'
814,135
Initializes a boolean data type definition. Args: name (str): name. aliases (Optional[list[str]]): aliases. description (Optional[str]): description. false_value (Optional[int]): value that represents false. urls (Optional[list[str]]): URLs.
def __init__( self, name, aliases=None, description=None, false_value=0, urls=None): super(BooleanDefinition, self).__init__( name, aliases=aliases, description=description, urls=urls) self.false_value = false_value self.true_value = None
814,137
Initializes an integer data type definition. Args: name (str): name. aliases (Optional[list[str]]): aliases. description (Optional[str]): description. maximum_value (Optional[int]): maximum allowed value of the integer data type. minimum_value (Optional[int]): minimum allowe...
def __init__( self, name, aliases=None, description=None, maximum_value=None, minimum_value=None, urls=None): super(IntegerDefinition, self).__init__( name, aliases=aliases, description=description, urls=urls) self.format = definitions.FORMAT_SIGNED self.maximum_value = maximum_valu...
814,138
Initializes an UUID data type definition. Args: name (str): name. aliases (Optional[list[str]]): aliases. description (Optional[str]): description. urls (Optional[list[str]]): URLs.
def __init__(self, name, aliases=None, description=None, urls=None): super(UUIDDefinition, self).__init__( name, aliases=aliases, description=description, urls=urls) self.size = 16
814,139
Initializes a padding data type definition. Args: name (str): name. aliases (Optional[list[str]]): aliases. alignment_size (Optional[int]): alignment size. description (Optional[str]): description. urls (Optional[list[str]]): URLs.
def __init__( self, name, aliases=None, alignment_size=None, description=None, urls=None): super(PaddingDefinition, self).__init__( name, aliases=aliases, description=description, urls=urls) self.alignment_size = alignment_size
814,140
Initializes a sequence data type definition. Args: name (str): name. data_type_definition (DataTypeDefinition): sequence element data type definition. aliases (Optional[list[str]]): aliases. data_type (Optional[str]): name of the sequence element data type. description (Opti...
def __init__( self, name, data_type_definition, aliases=None, data_type=None, description=None, urls=None): super(ElementSequenceDataTypeDefinition, self).__init__( name, aliases=aliases, description=description, urls=urls) self.byte_order = getattr( data_type_definition, 'byte_...
814,141
Initializes a string data type definition. Args: name (str): name. data_type_definition (DataTypeDefinition): string element data type definition. aliases (Optional[list[str]]): aliases. data_type (Optional[str]): name of the string element data type. description (Optional[s...
def __init__( self, name, data_type_definition, aliases=None, data_type=None, description=None, urls=None): super(StringDefinition, self).__init__( name, data_type_definition, aliases=aliases, data_type=data_type, description=description, urls=urls) self.encoding = 'ascii'
814,143
Initializes a data type definition. Args: name (str): name. aliases (Optional[list[str]]): aliases. description (Optional[str]): description. urls (Optional[list[str]]): URLs.
def __init__(self, name, aliases=None, description=None, urls=None): super(DataTypeDefinitionWithMembers, self).__init__( name, aliases=aliases, description=description, urls=urls) self._byte_size = None self.members = [] self.sections = []
814,144
Adds a member definition. Args: member_definition (DataTypeDefinition): member data type definition.
def AddMemberDefinition(self, member_definition): self._byte_size = None self.members.append(member_definition) if self.sections: section_definition = self.sections[-1] section_definition.members.append(member_definition)
814,145
Initializes a member section definition. Args: name (str): name.
def __init__(self, name): super(MemberSectionDefinition, self).__init__() self.name = name self.members = []
814,148
Initializes a data type definition. Args: name (str): name. aliases (Optional[list[str]]): aliases. description (Optional[str]): description. urls (Optional[list[str]]): URLs.
def __init__(self, name, aliases=None, description=None, urls=None): super(StructureDefinition, self).__init__( name, aliases=aliases, description=description, urls=urls) self.family_definition = None
814,149
Initializes an enumeration data type definition. Args: name (str): name. aliases (Optional[list[str]]): aliases. description (Optional[str]): description. urls (Optional[list[str]]): URLs.
def __init__(self, name, aliases=None, description=None, urls=None): super(ConstantDefinition, self).__init__( name, aliases=aliases, description=description, urls=urls) self.value = None
814,152
Initializes an enumeration value. Args: name (str): name. number (int): number. aliases (Optional[list[str]]): aliases. description (Optional[str]): description.
def __init__(self, name, number, aliases=None, description=None): super(EnumerationValue, self).__init__() self.aliases = aliases or [] self.description = description self.name = name self.number = number
814,153
Initializes an enumeration data type definition. Args: name (str): name. aliases (Optional[list[str]]): aliases. description (Optional[str]): description. urls (Optional[list[str]]): URLs.
def __init__(self, name, aliases=None, description=None, urls=None): super(EnumerationDefinition, self).__init__( name, aliases=aliases, description=description, urls=urls) self.values = [] self.values_per_alias = {} self.values_per_name = {} self.values_per_number = {}
814,154
Adds an enumeration value. Args: name (str): name. number (int): number. aliases (Optional[list[str]]): aliases. description (Optional[str]): description. Raises: KeyError: if the enumeration value already exists.
def AddValue(self, name, number, aliases=None, description=None): if name in self.values_per_name: raise KeyError('Value with name: {0:s} already exists.'.format(name)) if number in self.values_per_number: raise KeyError('Value with number: {0!s} already exists.'.format(number)) for alias...
814,155
Initializes a format data type definition. Args: name (str): name. aliases (Optional[list[str]]): aliases. description (Optional[str]): description. urls (Optional[list[str]]): URLs.
def __init__(self, name, aliases=None, description=None, urls=None): super(FormatDefinition, self).__init__( name, aliases=aliases, description=description, urls=urls) self.metadata = {}
814,156
Initializes a structure family data type definition. Args: name (str): name. aliases (Optional[list[str]]): aliases. description (Optional[str]): description. urls (Optional[list[str]]): URLs.
def __init__(self, name, aliases=None, description=None, urls=None): super(StructureFamilyDefinition, self).__init__( name, aliases=aliases, description=description, urls=urls) self.members = [] self.runtime = None
814,157
Adds a member definition. Args: member_definition (DataTypeDefinition): member data type definition.
def AddMemberDefinition(self, member_definition): self.members.append(member_definition) member_definition.family_definition = self
814,158
Reads a boolean data type definition. Args: definitions_registry (DataTypeDefinitionsRegistry): data type definitions registry. definition_values (dict[str, object]): definition values. definition_name (str): name of the definition. is_member (Optional[bool]): True if the data typ...
def _ReadBooleanDataTypeDefinition( self, definitions_registry, definition_values, definition_name, is_member=False): return self._ReadFixedSizeDataTypeDefinition( definitions_registry, definition_values, data_types.BooleanDefinition, definition_name, self._SUPPORTED_ATTRIBU...
814,222
Reads a character data type definition. Args: definitions_registry (DataTypeDefinitionsRegistry): data type definitions registry. definition_values (dict[str, object]): definition values. definition_name (str): name of the definition. is_member (Optional[bool]): True if the data t...
def _ReadCharacterDataTypeDefinition( self, definitions_registry, definition_values, definition_name, is_member=False): return self._ReadFixedSizeDataTypeDefinition( definitions_registry, definition_values, data_types.CharacterDefinition, definition_name, self._SUPPORTED_ATT...
814,223
Reads a floating-point data type definition. Args: definitions_registry (DataTypeDefinitionsRegistry): data type definitions registry. definition_values (dict[str, object]): definition values. definition_name (str): name of the definition. is_member (Optional[bool]): True if the d...
def _ReadFloatingPointDataTypeDefinition( self, definitions_registry, definition_values, definition_name, is_member=False): return self._ReadFixedSizeDataTypeDefinition( definitions_registry, definition_values, data_types.FloatingPointDefinition, definition_name, self._SUPPO...
814,230
Reads a data type definition. Args: definitions_registry (DataTypeDefinitionsRegistry): data type definitions registry. definition_values (dict[str, object]): definition values. Returns: DataTypeDefinition: data type definition or None. Raises: DefinitionReaderError: if ...
def _ReadDefinition(self, definitions_registry, definition_values): if not definition_values: error_message = 'missing definition values' raise errors.DefinitionReaderError(None, error_message) name = definition_values.get('name', None) if not name: error_message = 'missing name' ...
814,245
Reads data type definitions from a file into the registry. Args: definitions_registry (DataTypeDefinitionsRegistry): data type definitions registry. path (str): path of the file to read from.
def ReadFile(self, definitions_registry, path): with open(path, 'r') as file_object: self.ReadFileObject(definitions_registry, file_object)
814,246
Retrieves a format error location. Args: yaml_definition (dict[str, object]): current YAML definition. last_definition_object (DataTypeDefinition): previous data type definition. Returns: str: format error location.
def _GetFormatErrorLocation( self, yaml_definition, last_definition_object): name = yaml_definition.get('name', None) if name: error_location = 'in: {0:s}'.format(name or '<NAMELESS>') elif last_definition_object: error_location = 'after: {0:s}'.format(last_definition_object.name) ...
814,247
Reads data type definitions from a file-like object into the registry. Args: definitions_registry (DataTypeDefinitionsRegistry): data type definitions registry. file_object (file): file-like object to read from. Raises: FormatError: if the definitions values are missing or if the f...
def ReadFileObject(self, definitions_registry, file_object): last_definition_object = None error_location = None error_message = None try: yaml_generator = yaml.safe_load_all(file_object) for yaml_definition in yaml_generator: definition_object = self._ReadDefinition( ...
814,248
Reads the organization given by identifier from HDX and returns Organization object Args: identifier (str): Identifier of organization configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. Returns: Optional[Organization]: Orga...
def read_from_hdx(identifier, configuration=None): # type: (str, Optional[Configuration]) -> Optional['Organization'] organization = Organization(configuration=configuration) result = organization._load_from_hdx('organization', identifier) if result: return organiza...
814,272
Returns the organization's users. Args: capacity (Optional[str]): Filter by capacity eg. member, admin. Defaults to None. Returns: List[User]: Organization's users.
def get_users(self, capacity=None): # type: (Optional[str]) -> List[User] users = list() usersdicts = self.data.get('users') if usersdicts is not None: for userdata in usersdicts: if capacity is not None and userdata['capacity'] != capacity: ...
814,273
Add new or update existing user in organization with new metadata. Capacity eg. member, admin must be supplied either within the User object or dictionary or using the capacity argument (which takes precedence). Args: user (Union[User,Dict,str]): Either a user id or user metadata ei...
def add_update_user(self, user, capacity=None): # type: (Union[hdx.data.user.User,Dict,str],Optional[str]) -> None if isinstance(user, str): user = hdx.data.user.User.read_from_hdx(user, configuration=self.configuration) elif isinstance(user, dict): user = hdx.da...
814,274
Add new or update existing users in organization with new metadata. Capacity eg. member, admin must be supplied either within the User object or dictionary or using the capacity argument (which takes precedence). Args: users (List[Union[User,Dict,str]]): A list of either user ids or...
def add_update_users(self, users, capacity=None): # type: (List[Union[hdx.data.user.User,Dict,str]],Optional[str]) -> None if not isinstance(users, list): raise HDXError('Users should be a list!') for user in users: self.add_update_user(user, capacity)
814,275
Makes a read call to HDX passing in given parameter. Args: object_type (str): Description of HDX object type (for messages) value (str): Value of HDX field fieldname (str): HDX field name. Defaults to id. action (Optional[str]): Replacement CKAN action url to use...
def _read_from_hdx(self, object_type, value, fieldname='id', action=None, **kwargs): # type: (str, str, str, Optional[str], Any) -> Tuple[bool, Union[Dict, str]] if not fieldname: raise HDXError('Empty %s field name!' % object_type) if action is None: ...
814,280
Helper method to load the HDX object given by identifier from HDX Args: object_type (str): Description of HDX object type (for messages) id_field (str): HDX object identifier Returns: bool: True if loaded, False if not
def _load_from_hdx(self, object_type, id_field): # type: (str, str) -> bool success, result = self._read_from_hdx(object_type, id_field) if success: self.old_data = self.data self.data = result return True logger.debug(result) return F...
814,281
Check metadata exists and contains HDX object identifier, and if so load HDX object Args: object_type (str): Description of HDX object type (for messages) id_field_name (str): Name of field containing HDX object identifier operation (str): Operation to report if error. Defau...
def _check_load_existing_object(self, object_type, id_field_name, operation='update'): # type: (str, str, str) -> None self._check_existing_object(object_type, id_field_name) if not self._load_from_hdx(object_type, self.data[id_field_name]): raise HDXError('No existing %s to...
814,283
Helper method to check that metadata for HDX object is complete Args: ignore_fields (List[str]): Any fields to ignore in the check Returns: None
def _check_required_fields(self, object_type, ignore_fields): # type: (str, List[str]) -> None for field in self.configuration[object_type]['required_fields']: if field not in self.data and field not in ignore_fields: raise HDXError('Field %s is missing in %s!' % (fi...
814,284
Helper method to check if HDX object exists and update it Args: object_type (str): Description of HDX object type (for messages) id_field_name (str): Name of field containing HDX object identifier file_to_upload (Optional[str]): File to upload to HDX **kwargs: Se...
def _merge_hdx_update(self, object_type, id_field_name, file_to_upload=None, **kwargs): # type: (str, str, Optional[str], Any) -> None merge_two_dictionaries(self.data, self.old_data) if 'batch_mode' in kwargs: # Whether or not CKAN should change groupings of datasets on /datasets page...
814,285
Helper method to check if HDX object exists in HDX and if so, update it Args: object_type (str): Description of HDX object type (for messages) id_field_name (str): Name of field containing HDX object identifier file_to_upload (Optional[str]): File to upload to HDX ...
def _update_in_hdx(self, object_type, id_field_name, file_to_upload=None, **kwargs): # type: (str, str, Optional[str], Any) -> None self._check_load_existing_object(object_type, id_field_name) # We load an existing object even thought it may well have been loaded already # to p...
814,286
Creates or updates an HDX object in HDX and return HDX object metadata dict Args: action (str): Action to perform eg. 'create', 'update' data (Dict): Data to write to HDX id_field_name (str): Name of field containing HDX object identifier or None file_to_upload (...
def _write_to_hdx(self, action, data, id_field_name, file_to_upload=None): # type: (str, Dict, str, Optional[str]) -> Dict file = None try: if file_to_upload: file = open(file_to_upload, 'rb') files = [('upload', file)] else: ...
814,287
Creates or updates an HDX object in HDX, saving current data and replacing with returned HDX object data from HDX Args: action (str): Action to perform: 'create' or 'update' id_field_name (str): Name of field containing HDX object identifier file_to_upload (Optional[...
def _save_to_hdx(self, action, id_field_name, file_to_upload=None): # type: (str, str, Optional[str]) -> None result = self._write_to_hdx(action, self.data, id_field_name, file_to_upload) self.old_data = self.data self.data = result
814,288
Helper method to check if resource exists in HDX and if so, update it, otherwise create it Args: object_type (str): Description of HDX object type (for messages) id_field_name (str): Name of field containing HDX object identifier name_field_name (str): Name of field contain...
def _create_in_hdx(self, object_type, id_field_name, name_field_name, file_to_upload=None): # type: (str, str, str, Optional[str]) -> None self.check_required_fields() if id_field_name in self.data and self._load_from_hdx(object_type, self.data[id_field_name]): ...
814,289
Helper method to deletes a resource from HDX Args: object_type (str): Description of HDX object type (for messages) id_field_name (str): Name of field containing HDX object identifier Returns: None
def _delete_from_hdx(self, object_type, id_field_name): # type: (str, str) -> None if id_field_name not in self.data: raise HDXError('No %s field (mandatory) in %s!' % (id_field_name, object_type)) self._save_to_hdx('delete', id_field_name)
814,290
Remove an HDX object from a list within the parent HDX object Args: objlist (List[Union[T <= HDXObject,Dict]]): list of HDX objects obj (Union[T <= HDXObject,Dict,str]): Either an id or hdx object metadata either from an HDX object or a dictionary matchon (str): Field to mat...
def _remove_hdxobject(self, objlist, obj, matchon='id', delete=False): # type: (List[Union[HDXObjectUpperBound,Dict]], Union[HDXObjectUpperBound,Dict,str], str, bool) -> bool if objlist is None: return False if isinstance(obj, six.string_types): obj_id = obj ...
814,292
Helper function to convert supplied list of HDX objects to a list of dict Args: hdxobjects (List[T <= HDXObject]): List of HDX objects to convert Returns: List[Dict]: List of HDX objects converted to simple dictionaries
def _convert_hdxobjects(self, hdxobjects): # type: (List[HDXObjectUpperBound]) -> List[HDXObjectUpperBound] newhdxobjects = list() for hdxobject in hdxobjects: newhdxobjects.append(hdxobject.data) return newhdxobjects
814,293
Helper function to make a deep copy of a supplied list of HDX objects Args: hdxobjects (List[T <= HDXObject]): list of HDX objects to copy hdxobjectclass (type): Type of the HDX Objects to be copied attribute_to_copy (Optional[str]): An attribute to copy over from the HDX ob...
def _copy_hdxobjects(self, hdxobjects, hdxobjectclass, attribute_to_copy=None): # type: (List[HDXObjectUpperBound], type, Optional[str]) -> List[HDXObjectUpperBound] newhdxobjects = list() for hdxobject in hdxobjects: newhdxobjectdata = copy.deepcopy(hdxobject.data) ...
814,294
Add a tag Args: tag (str): Tag to add Returns: bool: True if tag added or False if tag already present
def _add_tag(self, tag): # type: (str) -> bool tags = self.data.get('tags', None) if tags: if tag in [x['name'] for x in tags]: return False else: tags = list() tags.append({'name': tag}) self.data['tags'] = tags re...
814,297
Add a list of tag Args: tags (List[str]): list of tags to add Returns: bool: True if all tags added or False if any already present.
def _add_tags(self, tags): # type: (List[str]) -> bool alltagsadded = True for tag in tags: if not self._add_tag(tag): alltagsadded = False return alltagsadded
814,298
Return list of strings from comma separated list Args: field (str): Field containing comma separated list Returns: List[str]: List of strings
def _get_stringlist_from_commastring(self, field): # type: (str) -> List[str] strings = self.data.get(field) if strings: return strings.split(',') else: return list()
814,299
Add a string to a comma separated list of strings Args: field (str): Field containing comma separated list string (str): String to add Returns: bool: True if string added or False if string already present
def _add_string_to_commastring(self, field, string): # type: (str, str) -> bool if string in self._get_stringlist_from_commastring(field): return False strings = '%s,%s' % (self.data.get(field, ''), string) if strings[0] == ',': strings = strings[1:] ...
814,300
Add a list of strings to a comma separated list of strings Args: field (str): Field containing comma separated list strings (List[str]): list of strings to add Returns: bool: True if all strings added or False if any already present.
def _add_strings_to_commastring(self, field, strings): # type: (str, List[str]) -> bool allstringsadded = True for string in strings: if not self._add_string_to_commastring(field, string): allstringsadded = False return allstringsadded
814,301
Remove a string from a comma separated list of strings Args: field (str): Field containing comma separated list string (str): String to remove Returns: bool: True if string removed or False if not
def _remove_string_from_commastring(self, field, string): # type: (str, str) -> bool commastring = self.data.get(field, '') if string in commastring: self.data[field] = commastring.replace(string, '') return True return False
814,302
Reads the resource given by identifier from HDX and returns Resource object Args: identifier (str): Identifier of resource configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. Returns: Optional[Resource]: Resource object if s...
def read_from_hdx(identifier, configuration=None): # type: (str, Optional[Configuration]) -> Optional['Resource'] if is_valid_uuid(identifier) is False: raise HDXError('%s is not a valid resource id!' % identifier) resource = Resource(configuration=configuration) re...
814,304
Delete any existing url and set the file uploaded to the local path provided Args: file_to_upload (str): Local path to file to upload Returns: None
def set_file_to_upload(self, file_to_upload): # type: (str) -> None if 'url' in self.data: del self.data['url'] self.file_to_upload = file_to_upload
814,305
Check if resource exists in HDX and if so, update it Args: **kwargs: See below operation (string): Operation to perform eg. patch. Defaults to update. Returns: None
def update_in_hdx(self, **kwargs): # type: (Any) -> None self._check_load_existing_object('resource', 'id') if self.file_to_upload and 'url' in self.data: del self.data['url'] self._merge_hdx_update('resource', 'id', self.file_to_upload, **kwargs)
814,307
Download resource store to provided folder or temporary folder if no folder supplied Args: folder (Optional[str]): Folder to download resource to. Defaults to None. Returns: Tuple[str, str]: (URL downloaded, Path to downloaded file)
def download(self, folder=None): # type: (Optional[str]) -> Tuple[str, str] # Download the resource url = self.data.get('url', None) if not url: raise HDXError('No URL to download!') logger.debug('Downloading %s' % url) filename = self.data['name'] ...
814,311
Get list of resources that have a datastore returning their ids. Args: configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. Returns: List[str]: List of resource ids that are in the datastore
def get_all_resource_ids_in_datastore(configuration=None): # type: (Optional[Configuration]) -> List[str] resource = Resource(configuration=configuration) success, result = resource._read_from_hdx('datastore', '_table_metadata', 'resource_id', ...
814,312
For tabular data, create a resource in the HDX datastore which enables data preview in HDX using the built in YAML definition for a topline. If path is not supplied, the file is first downloaded from HDX. Args: delete_first (int): Delete datastore before creation. 0 = No, 1 = Yes, 2 = If no...
def create_datastore_for_topline(self, delete_first=0, path=None): # type: (int, Optional[str]) -> None data = load_yaml(script_dir_plus_file(join('..', 'hdx_datasource_topline.yml'), Resource)) self.create_datastore_from_dict_schema(data, delete_first, path=path)
814,319
Get resource view id Args: resource_view (Union[ResourceView,Dict]): ResourceView metadata from a ResourceView object or dictionary Returns: ResourceView: ResourceView object
def _get_resource_view(self, resource_view): # type: (Union[ResourceView,Dict]) -> ResourceView if isinstance(resource_view, dict): resource_view = ResourceView(resource_view, configuration=self.configuration) if isinstance(resource_view, ResourceView): return re...
814,321
Add new or update existing resource views in resource with new metadata. Args: resource_views (List[Union[ResourceView,Dict]]): A list of resource views metadata from ResourceView objects or dictionaries Returns: None
def add_update_resource_views(self, resource_views): # type: (List[Union[ResourceView,Dict]]) -> None if not isinstance(resource_views, list): raise HDXError('ResourceViews should be a list!') for resource_view in resource_views: self.add_update_resource_view(res...
814,322
Order resource views in resource. Args: resource_views (List[Union[ResourceView,Dict,str]]): A list of either resource view ids or resource views metadata from ResourceView objects or dictionaries Returns: None
def reorder_resource_views(self, resource_views): # type: (List[Union[ResourceView,Dict,str]]) -> None if not isinstance(resource_views, list): raise HDXError('ResourceViews should be a list!') ids = list() for resource_view in resource_views: if isinstan...
814,323
Delete a resource view from the resource and HDX Args: resource_view (Union[ResourceView,Dict,str]): Either a resource view id or resource view metadata either from a ResourceView object or a dictionary Returns: None
def delete_resource_view(self, resource_view): # type: (Union[ResourceView,Dict,str]) -> None if isinstance(resource_view, str): if is_valid_uuid(resource_view) is False: raise HDXError('%s is not a valid resource view id!' % resource_view) resource_view ...
814,324
Creates the class template. Args: data_type_definition (DataTypeDefinition): data type definition. Returns: str: class template.
def _CreateClassTemplate(cls, data_type_definition): type_name = data_type_definition.name type_description = data_type_definition.description or type_name while type_description.endswith('.'): type_description = type_description[:-1] class_attributes_description = [] init_arguments = [...
814,326
Checks if a string contains an identifier. Args: string (str): string to check. Returns: bool: True if the string contains an identifier, False otherwise.
def _IsIdentifier(cls, string): return ( string and not string[0].isdigit() and all(character.isalnum() or character == '_' for character in string))
814,327
Validates the data type definition. Args: data_type_definition (DataTypeDefinition): data type definition. Raises: ValueError: if the data type definition is not considered valid.
def _ValidateDataTypeDefinition(cls, data_type_definition): if not cls._IsIdentifier(data_type_definition.name): raise ValueError( 'Data type definition name: {0!s} not a valid identifier'.format( data_type_definition.name)) if keyword.iskeyword(data_type_definition.name): ...
814,328
Creates a new structure values class. Args: data_type_definition (DataTypeDefinition): data type definition. Returns: class: structure values class.
def CreateClass(cls, data_type_definition): cls._ValidateDataTypeDefinition(data_type_definition) class_definition = cls._CreateClassTemplate(data_type_definition) namespace = { '__builtins__' : { 'object': builtins.object, 'super': builtins.super}, '__name__':...
814,329
Deregisters a data type definition. The data type definitions are identified based on their lower case name. Args: data_type_definition (DataTypeDefinition): data type definition. Raises: KeyError: if a data type definition is not set for the corresponding name.
def DeregisterDefinition(self, data_type_definition): name = data_type_definition.name.lower() if name not in self._definitions: raise KeyError('Definition not set for name: {0:s}.'.format( data_type_definition.name)) del self._definitions[name]
814,331