text
stringlengths 0
828
|
|---|
Implementation of Parser API
|
Relies on the underlying registry of parsers to provide the best parsing plan
|
:param desired_type:
|
:param filesystem_object:
|
:param logger:
|
:param log_only_last: a flag to only log the last part of the file path (default False)
|
:return:
|
""""""
|
# find the parser for this object
|
t, combined_parser = self.build_parser_for_fileobject_and_desiredtype(filesystem_object, desired_type,
|
logger=logger)
|
# ask the parser for the parsing plan
|
return combined_parser.create_parsing_plan(t, filesystem_object, logger)"
|
500,"def build_parser_for_fileobject_and_desiredtype(self, obj_on_filesystem: PersistedObject, object_type: Type[T],
|
logger: Logger = None) -> Tuple[Type, Parser]:
|
""""""
|
Builds from the registry, a parser to parse object obj_on_filesystem as an object of type object_type.
|
To do that, it iterates through all registered parsers in the list in reverse order (last inserted first),
|
and checks if they support the provided object format (single or multifile) and type.
|
If several parsers match, it returns a cascadingparser that will try them in order.
|
If several alternatives are requested (through a root Union type), this is done independently for each
|
alternative.
|
:param obj_on_filesystem:
|
:param object_type:
|
:param logger:
|
:return: a type to use and a parser. The type to use is either directly the one provided, or a resolved one in
|
case of TypeVar
|
""""""
|
# First resolve TypeVars and Unions to get a list of compliant types
|
object_types = get_alternate_types_resolving_forwardref_union_and_typevar(object_type)
|
if len(object_types) == 1:
|
# One type: proceed as usual
|
parsers = self._build_parser_for_fileobject_and_desiredtype(obj_on_filesystem, object_typ=object_types[0],
|
logger=logger)
|
if len(parsers) > 1:
|
return object_types[0], CascadingParser(parsers)
|
else:
|
return next(iter(parsers.items()))
|
else:
|
# Several alternate types are supported: try to build a parser for each
|
parsers = OrderedDict()
|
errors = OrderedDict()
|
for typ in object_types:
|
try:
|
parsers.update(self._build_parser_for_fileobject_and_desiredtype(obj_on_filesystem, object_typ=typ,
|
logger=logger))
|
except NoParserFoundForObjectExt as e:
|
logger.warning(""{} - {}"".format(type(e).__name__, e))
|
errors[e] = e
|
except NoParserFoundForObjectType as f:
|
logger.warning(""{} - {}"".format(type(f).__name__, f))
|
errors[f] = f
|
# Combine if there are remaining, otherwise raise
|
if len(parsers) > 0:
|
return object_type, CascadingParser(parsers)
|
else:
|
raise NoParserFoundForUnionType.create(obj_on_filesystem, object_type, errors)"
|
501,"def _build_parser_for_fileobject_and_desiredtype(self, obj_on_filesystem: PersistedObject, object_typ: Type[T],
|
logger: Logger = None) -> Dict[Type, Parser]:
|
""""""
|
Builds a parser for each subtype of object_typ
|
:param obj_on_filesystem:
|
:param object_typ:
|
:param logger:
|
:return:
|
""""""
|
parsers = OrderedDict()
|
errors = OrderedDict()
|
try:
|
p = self.__build_parser_for_fileobject_and_desiredtype(obj_on_filesystem,
|
object_typ=object_typ,
|
logger=logger)
|
parsers[object_typ] = p
|
except NoParserFoundForObjectExt as e:
|
logger.warning(""{} - {}"".format(type(e).__name__, e))
|
errors[e] = e
|
except NoParserFoundForObjectType as f:
|
logger.warning(""{} - {}"".format(type(f).__name__, f))
|
errors[f] = f
|
# do not explore subclasses for collections
|
if is_collection(object_typ, strict=True):
|
if len(errors) > 0:
|
raise next(iter(errors.values()))
|
else:
|
return parsers
|
# Finally create one such parser for each subclass
|
subclasses = get_all_subclasses(object_typ)
|
# Then for each subclass also try (with a configurable limit in nb of subclasses)
|
for subclass in subclasses[0:GLOBAL_CONFIG.dict_to_object_subclass_limit]:
|
try:
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.