| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | """ |
| | Logging utilities for dill. |
| | |
| | The 'logger' object is dill's top-level logger. |
| | |
| | The 'adapter' object wraps the logger and implements a 'trace()' method that |
| | generates a detailed tree-style trace for the pickling call at log level INFO. |
| | |
| | The 'trace()' function sets and resets dill's logger log level, enabling and |
| | disabling the pickling trace. |
| | |
| | The trace shows a tree structure depicting the depth of each object serialized |
| | *with dill save functions*, but not the ones that use save functions from |
| | 'pickle._Pickler.dispatch'. If the information is available, it also displays |
| | the size in bytes that the object contributed to the pickle stream (including |
| | its child objects). Sample trace output: |
| | |
| | >>> import dill, dill.tests |
| | >>> dill.detect.trace(True) |
| | >>> dill.dump_session(main=dill.tests) |
| | β¬ M1: <module 'dill.tests' from '.../dill/tests/__init__.py'> |
| | ββ¬ F2: <function _import_module at 0x7f0d2dce1b80> |
| | ββ # F2 [32 B] |
| | ββ¬ D2: <dict object at 0x7f0d2e98a540> |
| | βββ¬ T4: <class '_frozen_importlib.ModuleSpec'> |
| | βββ # T4 [35 B] |
| | βββ¬ D2: <dict object at 0x7f0d2ef0e8c0> |
| | ββββ¬ T4: <class '_frozen_importlib_external.SourceFileLoader'> |
| | ββββ # T4 [50 B] |
| | ββββ¬ D2: <dict object at 0x7f0d2e988a40> |
| | ββββ # D2 [84 B] |
| | βββ # D2 [413 B] |
| | ββ # D2 [763 B] |
| | β # M1 [813 B] |
| | """ |
| |
|
| | __all__ = ['adapter', 'logger', 'trace'] |
| |
|
| | import codecs |
| | import contextlib |
| | import locale |
| | import logging |
| | import math |
| | import os |
| | from functools import partial |
| | from typing import TextIO, Union |
| |
|
| | import dill |
| |
|
| | |
| | ASCII_MAP = str.maketrans({"β": "|", "β": "|", "β¬": "+", "β": "`"}) |
| |
|
| | |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| |
|
| | class TraceAdapter(logging.LoggerAdapter): |
| | """ |
| | Tracks object tree depth and calculates pickled object size. |
| | |
| | A single instance of this wraps the module's logger, as the logging API |
| | doesn't allow setting it directly with a custom Logger subclass. The added |
| | 'trace()' method receives a pickle instance as the first argument and |
| | creates extra values to be added in the LogRecord from it, then calls |
| | 'info()'. |
| | |
| | Usage of logger with 'trace()' method: |
| | |
| | >>> from dill.logger import adapter as logger #NOTE: not dill.logger.logger |
| | >>> ... |
| | >>> def save_atype(pickler, obj): |
| | >>> logger.trace(pickler, "Message with %s and %r etc. placeholders", 'text', obj) |
| | >>> ... |
| | """ |
| | def __init__(self, logger): |
| | self.logger = logger |
| | def addHandler(self, handler): |
| | formatter = TraceFormatter("%(prefix)s%(message)s%(suffix)s", handler=handler) |
| | handler.setFormatter(formatter) |
| | self.logger.addHandler(handler) |
| | def removeHandler(self, handler): |
| | self.logger.removeHandler(handler) |
| | def process(self, msg, kwargs): |
| | |
| | return msg, kwargs |
| | def trace_setup(self, pickler): |
| | |
| | if not dill._dill.is_dill(pickler, child=False): |
| | return |
| | if self.isEnabledFor(logging.INFO): |
| | pickler._trace_depth = 1 |
| | pickler._size_stack = [] |
| | else: |
| | pickler._trace_depth = None |
| | def trace(self, pickler, msg, *args, **kwargs): |
| | if not hasattr(pickler, '_trace_depth'): |
| | logger.info(msg, *args, **kwargs) |
| | return |
| | if pickler._trace_depth is None: |
| | return |
| | extra = kwargs.get('extra', {}) |
| | pushed_obj = msg.startswith('#') |
| | size = None |
| | try: |
| | |
| | size = pickler._file.tell() |
| | frame = pickler.framer.current_frame |
| | try: |
| | size += frame.tell() |
| | except AttributeError: |
| | |
| | size += len(frame) |
| | except (AttributeError, TypeError): |
| | pass |
| | if size is not None: |
| | if not pushed_obj: |
| | pickler._size_stack.append(size) |
| | else: |
| | size -= pickler._size_stack.pop() |
| | extra['size'] = size |
| | if pushed_obj: |
| | pickler._trace_depth -= 1 |
| | extra['depth'] = pickler._trace_depth |
| | kwargs['extra'] = extra |
| | self.info(msg, *args, **kwargs) |
| | if not pushed_obj: |
| | pickler._trace_depth += 1 |
| |
|
| | class TraceFormatter(logging.Formatter): |
| | """ |
| | Generates message prefix and suffix from record. |
| | |
| | This Formatter adds prefix and suffix strings to the log message in trace |
| | mode (an also provides empty string defaults for normal logs). |
| | """ |
| | def __init__(self, *args, handler=None, **kwargs): |
| | super().__init__(*args, **kwargs) |
| | try: |
| | encoding = handler.stream.encoding |
| | if encoding is None: |
| | raise AttributeError |
| | except AttributeError: |
| | encoding = locale.getpreferredencoding() |
| | try: |
| | encoding = codecs.lookup(encoding).name |
| | except LookupError: |
| | self.is_utf8 = False |
| | else: |
| | self.is_utf8 = (encoding == codecs.lookup('utf-8').name) |
| | def format(self, record): |
| | fields = {'prefix': "", 'suffix': ""} |
| | if getattr(record, 'depth', 0) > 0: |
| | if record.msg.startswith("#"): |
| | prefix = (record.depth - 1)*"β" + "β" |
| | elif record.depth == 1: |
| | prefix = "β¬" |
| | else: |
| | prefix = (record.depth - 2)*"β" + "ββ¬" |
| | if not self.is_utf8: |
| | prefix = prefix.translate(ASCII_MAP) + "-" |
| | fields['prefix'] = prefix + " " |
| | if hasattr(record, 'size') and record.size is not None and record.size >= 1: |
| | |
| | power = int(math.log(record.size, 2)) // 10 |
| | size = record.size >> power*10 |
| | fields['suffix'] = " [%d %sB]" % (size, "KMGTP"[power] + "i" if power else "") |
| | vars(record).update(fields) |
| | return super().format(record) |
| |
|
| | logger = logging.getLogger('dill') |
| | logger.propagate = False |
| | adapter = TraceAdapter(logger) |
| | stderr_handler = logging._StderrHandler() |
| | adapter.addHandler(stderr_handler) |
| |
|
| | def trace(arg: Union[bool, TextIO, str, os.PathLike] = None, *, mode: str = 'a') -> None: |
| | """print a trace through the stack when pickling; useful for debugging |
| | |
| | With a single boolean argument, enable or disable the tracing. |
| | |
| | Example usage: |
| | |
| | >>> import dill |
| | >>> dill.detect.trace(True) |
| | >>> dill.dump_session() |
| | |
| | Alternatively, ``trace()`` can be used as a context manager. With no |
| | arguments, it just takes care of restoring the tracing state on exit. |
| | Either a file handle, or a file name and (optionally) a file mode may be |
| | specitfied to redirect the tracing output in the ``with`` block context. A |
| | log function is yielded by the manager so the user can write extra |
| | information to the file. |
| | |
| | Example usage: |
| | |
| | >>> from dill import detect |
| | >>> D = {'a': 42, 'b': {'x': None}} |
| | >>> with detect.trace(): |
| | >>> dumps(D) |
| | β¬ D2: <dict object at 0x7f2721804800> |
| | ββ¬ D2: <dict object at 0x7f27217f5c40> |
| | ββ # D2 [8 B] |
| | β # D2 [22 B] |
| | >>> squared = lambda x: x**2 |
| | >>> with detect.trace('output.txt', mode='w') as log: |
| | >>> log("> D = %r", D) |
| | >>> dumps(D) |
| | >>> log("> squared = %r", squared) |
| | >>> dumps(squared) |
| | |
| | Arguments: |
| | arg: a boolean value, or an optional file-like or path-like object for the context manager |
| | mode: mode string for ``open()`` if a file name is passed as the first argument |
| | """ |
| | if repr(arg) not in ('False', 'True'): |
| | return TraceManager(file=arg, mode=mode) |
| | logger.setLevel(logging.INFO if arg else logging.WARNING) |
| |
|
| | class TraceManager(contextlib.AbstractContextManager): |
| | """context manager version of trace(); can redirect the trace to a file""" |
| | def __init__(self, file, mode): |
| | self.file = file |
| | self.mode = mode |
| | self.redirect = file is not None |
| | self.file_is_stream = hasattr(file, 'write') |
| | def __enter__(self): |
| | if self.redirect: |
| | stderr_handler.flush() |
| | if self.file_is_stream: |
| | self.handler = logging.StreamHandler(self.file) |
| | else: |
| | self.handler = logging.FileHandler(self.file, self.mode) |
| | adapter.removeHandler(stderr_handler) |
| | adapter.addHandler(self.handler) |
| | self.old_level = adapter.getEffectiveLevel() |
| | adapter.setLevel(logging.INFO) |
| | return adapter.info |
| | def __exit__(self, *exc_info): |
| | adapter.setLevel(self.old_level) |
| | if self.redirect: |
| | adapter.removeHandler(self.handler) |
| | adapter.addHandler(stderr_handler) |
| | if not self.file_is_stream: |
| | self.handler.close() |
| |
|