koichi12 commited on
Commit
c63d08b
·
verified ·
1 Parent(s): f64ba55

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .venv/lib/python3.11/site-packages/dill/__diff.py +234 -0
  2. .venv/lib/python3.11/site-packages/dill/__info__.py +291 -0
  3. .venv/lib/python3.11/site-packages/dill/__init__.py +119 -0
  4. .venv/lib/python3.11/site-packages/dill/__pycache__/__diff.cpython-311.pyc +0 -0
  5. .venv/lib/python3.11/site-packages/dill/__pycache__/__info__.cpython-311.pyc +0 -0
  6. .venv/lib/python3.11/site-packages/dill/__pycache__/__init__.cpython-311.pyc +0 -0
  7. .venv/lib/python3.11/site-packages/dill/__pycache__/_objects.cpython-311.pyc +0 -0
  8. .venv/lib/python3.11/site-packages/dill/__pycache__/_shims.cpython-311.pyc +0 -0
  9. .venv/lib/python3.11/site-packages/dill/__pycache__/detect.cpython-311.pyc +0 -0
  10. .venv/lib/python3.11/site-packages/dill/__pycache__/logger.cpython-311.pyc +0 -0
  11. .venv/lib/python3.11/site-packages/dill/__pycache__/objtypes.cpython-311.pyc +0 -0
  12. .venv/lib/python3.11/site-packages/dill/__pycache__/pointers.cpython-311.pyc +0 -0
  13. .venv/lib/python3.11/site-packages/dill/__pycache__/session.cpython-311.pyc +0 -0
  14. .venv/lib/python3.11/site-packages/dill/__pycache__/settings.cpython-311.pyc +0 -0
  15. .venv/lib/python3.11/site-packages/dill/__pycache__/source.cpython-311.pyc +0 -0
  16. .venv/lib/python3.11/site-packages/dill/__pycache__/temp.cpython-311.pyc +0 -0
  17. .venv/lib/python3.11/site-packages/dill/_dill.py +2226 -0
  18. .venv/lib/python3.11/site-packages/dill/_objects.py +541 -0
  19. .venv/lib/python3.11/site-packages/dill/_shims.py +193 -0
  20. .venv/lib/python3.11/site-packages/dill/detect.py +284 -0
  21. .venv/lib/python3.11/site-packages/dill/logger.py +285 -0
  22. .venv/lib/python3.11/site-packages/dill/objtypes.py +24 -0
  23. .venv/lib/python3.11/site-packages/dill/pointers.py +122 -0
  24. .venv/lib/python3.11/site-packages/dill/session.py +612 -0
  25. .venv/lib/python3.11/site-packages/dill/settings.py +25 -0
  26. .venv/lib/python3.11/site-packages/dill/source.py +1023 -0
  27. .venv/lib/python3.11/site-packages/dill/temp.py +252 -0
  28. .venv/lib/python3.11/site-packages/dill/tests/__main__.py +35 -0
  29. .venv/lib/python3.11/site-packages/dill/tests/test_check.py +62 -0
  30. .venv/lib/python3.11/site-packages/dill/tests/test_classdef.py +340 -0
  31. .venv/lib/python3.11/site-packages/dill/tests/test_detect.py +160 -0
  32. .venv/lib/python3.11/site-packages/dill/tests/test_diff.py +107 -0
  33. .venv/lib/python3.11/site-packages/dill/tests/test_extendpickle.py +53 -0
  34. .venv/lib/python3.11/site-packages/dill/tests/test_fglobals.py +55 -0
  35. .venv/lib/python3.11/site-packages/dill/tests/test_file.py +500 -0
  36. .venv/lib/python3.11/site-packages/dill/tests/test_functors.py +39 -0
  37. .venv/lib/python3.11/site-packages/dill/tests/test_logger.py +70 -0
  38. .venv/lib/python3.11/site-packages/dill/tests/test_mixins.py +121 -0
  39. .venv/lib/python3.11/site-packages/dill/tests/test_module.py +84 -0
  40. .venv/lib/python3.11/site-packages/dill/tests/test_moduledict.py +54 -0
  41. .venv/lib/python3.11/site-packages/dill/tests/test_nested.py +135 -0
  42. .venv/lib/python3.11/site-packages/dill/tests/test_objects.py +63 -0
  43. .venv/lib/python3.11/site-packages/dill/tests/test_properties.py +62 -0
  44. .venv/lib/python3.11/site-packages/dill/tests/test_pycapsule.py +45 -0
  45. .venv/lib/python3.11/site-packages/dill/tests/test_registered.py +64 -0
  46. .venv/lib/python3.11/site-packages/dill/tests/test_session.py +280 -0
  47. .venv/lib/python3.11/site-packages/dill/tests/test_source.py +173 -0
  48. .venv/lib/python3.11/site-packages/dill/tests/test_sources.py +190 -0
  49. .venv/lib/python3.11/site-packages/dill/tests/test_temp.py +103 -0
  50. .venv/lib/python3.11/site-packages/dill/tests/test_weakref.py +72 -0
.venv/lib/python3.11/site-packages/dill/__diff.py ADDED
@@ -0,0 +1,234 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ #
3
+ # Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
4
+ # Copyright (c) 2008-2016 California Institute of Technology.
5
+ # Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
6
+ # License: 3-clause BSD. The full license text is available at:
7
+ # - https://github.com/uqfoundation/dill/blob/master/LICENSE
8
+
9
+ """
10
+ Module to show if an object has changed since it was memorised
11
+ """
12
+
13
+ import builtins
14
+ import os
15
+ import sys
16
+ import types
17
+ try:
18
+ import numpy.ma
19
+ HAS_NUMPY = True
20
+ except ImportError:
21
+ HAS_NUMPY = False
22
+
23
+ # pypy doesn't use reference counting
24
+ getrefcount = getattr(sys, 'getrefcount', lambda x:0)
25
+
26
+ # memo of objects indexed by id to a tuple (attributes, sequence items)
27
+ # attributes is a dict indexed by attribute name to attribute id
28
+ # sequence items is either a list of ids, of a dictionary of keys to ids
29
+ memo = {}
30
+ id_to_obj = {}
31
+ # types that cannot have changing attributes
32
+ builtins_types = set((str, list, dict, set, frozenset, int))
33
+ dont_memo = set(id(i) for i in (memo, sys.modules, sys.path_importer_cache,
34
+ os.environ, id_to_obj))
35
+
36
+
37
+ def get_attrs(obj):
38
+ """
39
+ Gets all the attributes of an object though its __dict__ or return None
40
+ """
41
+ if type(obj) in builtins_types \
42
+ or type(obj) is type and obj in builtins_types:
43
+ return
44
+ return getattr(obj, '__dict__', None)
45
+
46
+
47
+ def get_seq(obj, cache={str: False, frozenset: False, list: True, set: True,
48
+ dict: True, tuple: True, type: False,
49
+ types.ModuleType: False, types.FunctionType: False,
50
+ types.BuiltinFunctionType: False}):
51
+ """
52
+ Gets all the items in a sequence or return None
53
+ """
54
+ try:
55
+ o_type = obj.__class__
56
+ except AttributeError:
57
+ o_type = type(obj)
58
+ hsattr = hasattr
59
+ if o_type in cache:
60
+ if cache[o_type]:
61
+ if hsattr(obj, "copy"):
62
+ return obj.copy()
63
+ return obj
64
+ elif HAS_NUMPY and o_type in (numpy.ndarray, numpy.ma.core.MaskedConstant):
65
+ if obj.shape and obj.size:
66
+ return obj
67
+ else:
68
+ return []
69
+ elif hsattr(obj, "__contains__") and hsattr(obj, "__iter__") \
70
+ and hsattr(obj, "__len__") and hsattr(o_type, "__contains__") \
71
+ and hsattr(o_type, "__iter__") and hsattr(o_type, "__len__"):
72
+ cache[o_type] = True
73
+ if hsattr(obj, "copy"):
74
+ return obj.copy()
75
+ return obj
76
+ else:
77
+ cache[o_type] = False
78
+ return None
79
+
80
+
81
+ def memorise(obj, force=False):
82
+ """
83
+ Adds an object to the memo, and recursively adds all the objects
84
+ attributes, and if it is a container, its items. Use force=True to update
85
+ an object already in the memo. Updating is not recursively done.
86
+ """
87
+ obj_id = id(obj)
88
+ if obj_id in memo and not force or obj_id in dont_memo:
89
+ return
90
+ id_ = id
91
+ g = get_attrs(obj)
92
+ if g is None:
93
+ attrs_id = None
94
+ else:
95
+ attrs_id = dict((key,id_(value)) for key, value in g.items())
96
+
97
+ s = get_seq(obj)
98
+ if s is None:
99
+ seq_id = None
100
+ elif hasattr(s, "items"):
101
+ seq_id = dict((id_(key),id_(value)) for key, value in s.items())
102
+ elif not hasattr(s, "__len__"): #XXX: avoid TypeError from unexpected case
103
+ seq_id = None
104
+ else:
105
+ seq_id = [id_(i) for i in s]
106
+
107
+ memo[obj_id] = attrs_id, seq_id
108
+ id_to_obj[obj_id] = obj
109
+ mem = memorise
110
+ if g is not None:
111
+ [mem(value) for key, value in g.items()]
112
+
113
+ if s is not None:
114
+ if hasattr(s, "items"):
115
+ [(mem(key), mem(item))
116
+ for key, item in s.items()]
117
+ else:
118
+ if hasattr(s, '__len__'):
119
+ [mem(item) for item in s]
120
+ else: mem(s)
121
+
122
+
123
+ def release_gone():
124
+ itop, mp, src = id_to_obj.pop, memo.pop, getrefcount
125
+ [(itop(id_), mp(id_)) for id_, obj in list(id_to_obj.items())
126
+ if src(obj) < 4] #XXX: correct for pypy?
127
+
128
+
129
+ def whats_changed(obj, seen=None, simple=False, first=True):
130
+ """
131
+ Check an object against the memo. Returns a list in the form
132
+ (attribute changes, container changed). Attribute changes is a dict of
133
+ attribute name to attribute value. container changed is a boolean.
134
+ If simple is true, just returns a boolean. None for either item means
135
+ that it has not been checked yet
136
+ """
137
+ # Special cases
138
+ if first:
139
+ # ignore the _ variable, which only appears in interactive sessions
140
+ if "_" in builtins.__dict__:
141
+ del builtins._
142
+ if seen is None:
143
+ seen = {}
144
+
145
+ obj_id = id(obj)
146
+
147
+ if obj_id in seen:
148
+ if simple:
149
+ return any(seen[obj_id])
150
+ return seen[obj_id]
151
+
152
+ # Safety checks
153
+ if obj_id in dont_memo:
154
+ seen[obj_id] = [{}, False]
155
+ if simple:
156
+ return False
157
+ return seen[obj_id]
158
+ elif obj_id not in memo:
159
+ if simple:
160
+ return True
161
+ else:
162
+ raise RuntimeError("Object not memorised " + str(obj))
163
+
164
+ seen[obj_id] = ({}, False)
165
+
166
+ chngd = whats_changed
167
+ id_ = id
168
+
169
+ # compare attributes
170
+ attrs = get_attrs(obj)
171
+ if attrs is None:
172
+ changed = {}
173
+ else:
174
+ obj_attrs = memo[obj_id][0]
175
+ obj_get = obj_attrs.get
176
+ changed = dict((key,None) for key in obj_attrs if key not in attrs)
177
+ for key, o in attrs.items():
178
+ if id_(o) != obj_get(key, None) or chngd(o, seen, True, False):
179
+ changed[key] = o
180
+
181
+ # compare sequence
182
+ items = get_seq(obj)
183
+ seq_diff = False
184
+ if (items is not None) and (hasattr(items, '__len__')):
185
+ obj_seq = memo[obj_id][1]
186
+ if (len(items) != len(obj_seq)):
187
+ seq_diff = True
188
+ elif hasattr(obj, "items"): # dict type obj
189
+ obj_get = obj_seq.get
190
+ for key, item in items.items():
191
+ if id_(item) != obj_get(id_(key)) \
192
+ or chngd(key, seen, True, False) \
193
+ or chngd(item, seen, True, False):
194
+ seq_diff = True
195
+ break
196
+ else:
197
+ for i, j in zip(items, obj_seq): # list type obj
198
+ if id_(i) != j or chngd(i, seen, True, False):
199
+ seq_diff = True
200
+ break
201
+ seen[obj_id] = changed, seq_diff
202
+ if simple:
203
+ return changed or seq_diff
204
+ return changed, seq_diff
205
+
206
+
207
+ def has_changed(*args, **kwds):
208
+ kwds['simple'] = True # ignore simple if passed in
209
+ return whats_changed(*args, **kwds)
210
+
211
+ __import__ = __import__
212
+
213
+
214
+ def _imp(*args, **kwds):
215
+ """
216
+ Replaces the default __import__, to allow a module to be memorised
217
+ before the user can change it
218
+ """
219
+ before = set(sys.modules.keys())
220
+ mod = __import__(*args, **kwds)
221
+ after = set(sys.modules.keys()).difference(before)
222
+ for m in after:
223
+ memorise(sys.modules[m])
224
+ return mod
225
+
226
+ builtins.__import__ = _imp
227
+ if hasattr(builtins, "_"):
228
+ del builtins._
229
+
230
+ # memorise all already imported modules. This implies that this must be
231
+ # imported first for any changes to be recorded
232
+ for mod in list(sys.modules.values()):
233
+ memorise(mod)
234
+ release_gone()
.venv/lib/python3.11/site-packages/dill/__info__.py ADDED
@@ -0,0 +1,291 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ #
3
+ # Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
4
+ # Copyright (c) 2024 The Uncertainty Quantification Foundation.
5
+ # License: 3-clause BSD. The full license text is available at:
6
+ # - https://github.com/uqfoundation/dill/blob/master/LICENSE
7
+ '''
8
+ -----------------------------
9
+ dill: serialize all of Python
10
+ -----------------------------
11
+
12
+ About Dill
13
+ ==========
14
+
15
+ ``dill`` extends Python's ``pickle`` module for serializing and de-serializing
16
+ Python objects to the majority of the built-in Python types. Serialization
17
+ is the process of converting an object to a byte stream, and the inverse
18
+ of which is converting a byte stream back to a Python object hierarchy.
19
+
20
+ ``dill`` provides the user the same interface as the ``pickle`` module, and
21
+ also includes some additional features. In addition to pickling Python
22
+ objects, ``dill`` provides the ability to save the state of an interpreter
23
+ session in a single command. Hence, it would be feasible to save an
24
+ interpreter session, close the interpreter, ship the pickled file to
25
+ another computer, open a new interpreter, unpickle the session and
26
+ thus continue from the 'saved' state of the original interpreter
27
+ session.
28
+
29
+ ``dill`` can be used to store Python objects to a file, but the primary
30
+ usage is to send Python objects across the network as a byte stream.
31
+ ``dill`` is quite flexible, and allows arbitrary user defined classes
32
+ and functions to be serialized. Thus ``dill`` is not intended to be
33
+ secure against erroneously or maliciously constructed data. It is
34
+ left to the user to decide whether the data they unpickle is from
35
+ a trustworthy source.
36
+
37
+ ``dill`` is part of ``pathos``, a Python framework for heterogeneous computing.
38
+ ``dill`` is in active development, so any user feedback, bug reports, comments,
39
+ or suggestions are highly appreciated. A list of issues is located at
40
+ https://github.com/uqfoundation/dill/issues, with a legacy list maintained at
41
+ https://uqfoundation.github.io/project/pathos/query.
42
+
43
+
44
+ Major Features
45
+ ==============
46
+
47
+ ``dill`` can pickle the following standard types:
48
+
49
+ - none, type, bool, int, float, complex, bytes, str,
50
+ - tuple, list, dict, file, buffer, builtin,
51
+ - Python classes, namedtuples, dataclasses, metaclasses,
52
+ - instances of classes,
53
+ - set, frozenset, array, functions, exceptions
54
+
55
+ ``dill`` can also pickle more 'exotic' standard types:
56
+
57
+ - functions with yields, nested functions, lambdas,
58
+ - cell, method, unboundmethod, module, code, methodwrapper,
59
+ - methoddescriptor, getsetdescriptor, memberdescriptor, wrapperdescriptor,
60
+ - dictproxy, slice, notimplemented, ellipsis, quit
61
+
62
+ ``dill`` cannot yet pickle these standard types:
63
+
64
+ - frame, generator, traceback
65
+
66
+ ``dill`` also provides the capability to:
67
+
68
+ - save and load Python interpreter sessions
69
+ - save and extract the source code from functions and classes
70
+ - interactively diagnose pickling errors
71
+
72
+
73
+ Current Release
74
+ ===============
75
+
76
+ The latest released version of ``dill`` is available from:
77
+
78
+ https://pypi.org/project/dill
79
+
80
+ ``dill`` is distributed under a 3-clause BSD license.
81
+
82
+
83
+ Development Version
84
+ ===================
85
+
86
+ You can get the latest development version with all the shiny new features at:
87
+
88
+ https://github.com/uqfoundation
89
+
90
+ If you have a new contribution, please submit a pull request.
91
+
92
+
93
+ Installation
94
+ ============
95
+
96
+ ``dill`` can be installed with ``pip``::
97
+
98
+ $ pip install dill
99
+
100
+ To optionally include the ``objgraph`` diagnostic tool in the install::
101
+
102
+ $ pip install dill[graph]
103
+
104
+ To optionally include the ``gprof2dot`` diagnostic tool in the install::
105
+
106
+ $ pip install dill[profile]
107
+
108
+ For windows users, to optionally install session history tools::
109
+
110
+ $ pip install dill[readline]
111
+
112
+
113
+ Requirements
114
+ ============
115
+
116
+ ``dill`` requires:
117
+
118
+ - ``python`` (or ``pypy``), **>=3.8**
119
+ - ``setuptools``, **>=42**
120
+
121
+ Optional requirements:
122
+
123
+ - ``objgraph``, **>=1.7.2**
124
+ - ``gprof2dot``, **>=2022.7.29**
125
+ - ``pyreadline``, **>=1.7.1** (on windows)
126
+
127
+
128
+ Basic Usage
129
+ ===========
130
+
131
+ ``dill`` is a drop-in replacement for ``pickle``. Existing code can be
132
+ updated to allow complete pickling using::
133
+
134
+ >>> import dill as pickle
135
+
136
+ or::
137
+
138
+ >>> from dill import dumps, loads
139
+
140
+ ``dumps`` converts the object to a unique byte string, and ``loads`` performs
141
+ the inverse operation::
142
+
143
+ >>> squared = lambda x: x**2
144
+ >>> loads(dumps(squared))(3)
145
+ 9
146
+
147
+ There are a number of options to control serialization which are provided
148
+ as keyword arguments to several ``dill`` functions:
149
+
150
+ * with *protocol*, the pickle protocol level can be set. This uses the
151
+ same value as the ``pickle`` module, *DEFAULT_PROTOCOL*.
152
+ * with *byref=True*, ``dill`` to behave a lot more like pickle with
153
+ certain objects (like modules) pickled by reference as opposed to
154
+ attempting to pickle the object itself.
155
+ * with *recurse=True*, objects referred to in the global dictionary are
156
+ recursively traced and pickled, instead of the default behavior of
157
+ attempting to store the entire global dictionary.
158
+ * with *fmode*, the contents of the file can be pickled along with the file
159
+ handle, which is useful if the object is being sent over the wire to a
160
+ remote system which does not have the original file on disk. Options are
161
+ *HANDLE_FMODE* for just the handle, *CONTENTS_FMODE* for the file content
162
+ and *FILE_FMODE* for content and handle.
163
+ * with *ignore=False*, objects reconstructed with types defined in the
164
+ top-level script environment use the existing type in the environment
165
+ rather than a possibly different reconstructed type.
166
+
167
+ The default serialization can also be set globally in *dill.settings*.
168
+ Thus, we can modify how ``dill`` handles references to the global dictionary
169
+ locally or globally::
170
+
171
+ >>> import dill.settings
172
+ >>> dumps(absolute) == dumps(absolute, recurse=True)
173
+ False
174
+ >>> dill.settings['recurse'] = True
175
+ >>> dumps(absolute) == dumps(absolute, recurse=True)
176
+ True
177
+
178
+ ``dill`` also includes source code inspection, as an alternate to pickling::
179
+
180
+ >>> import dill.source
181
+ >>> print(dill.source.getsource(squared))
182
+ squared = lambda x:x**2
183
+
184
+ To aid in debugging pickling issues, use *dill.detect* which provides
185
+ tools like pickle tracing::
186
+
187
+ >>> import dill.detect
188
+ >>> with dill.detect.trace():
189
+ >>> dumps(squared)
190
+ ┬ F1: <function <lambda> at 0x7fe074f8c280>
191
+ ├┬ F2: <function _create_function at 0x7fe074c49c10>
192
+ │└ # F2 [34 B]
193
+ ├┬ Co: <code object <lambda> at 0x7fe07501eb30, file "<stdin>", line 1>
194
+ │├┬ F2: <function _create_code at 0x7fe074c49ca0>
195
+ ││└ # F2 [19 B]
196
+ │└ # Co [87 B]
197
+ ├┬ D1: <dict object at 0x7fe0750d4680>
198
+ │└ # D1 [22 B]
199
+ ├┬ D2: <dict object at 0x7fe074c5a1c0>
200
+ │└ # D2 [2 B]
201
+ ├┬ D2: <dict object at 0x7fe074f903c0>
202
+ │├┬ D2: <dict object at 0x7fe074f8ebc0>
203
+ ││└ # D2 [2 B]
204
+ │└ # D2 [23 B]
205
+ └ # F1 [180 B]
206
+
207
+ With trace, we see how ``dill`` stored the lambda (``F1``) by first storing
208
+ ``_create_function``, the underlying code object (``Co``) and ``_create_code``
209
+ (which is used to handle code objects), then we handle the reference to
210
+ the global dict (``D2``) plus other dictionaries (``D1`` and ``D2``) that
211
+ save the lambda object's state. A ``#`` marks when the object is actually stored.
212
+
213
+
214
+ More Information
215
+ ================
216
+
217
+ Probably the best way to get started is to look at the documentation at
218
+ http://dill.rtfd.io. Also see ``dill.tests`` for a set of scripts that
219
+ demonstrate how ``dill`` can serialize different Python objects. You can
220
+ run the test suite with ``python -m dill.tests``. The contents of any
221
+ pickle file can be examined with ``undill``. As ``dill`` conforms to
222
+ the ``pickle`` interface, the examples and documentation found at
223
+ http://docs.python.org/library/pickle.html also apply to ``dill``
224
+ if one will ``import dill as pickle``. The source code is also generally
225
+ well documented, so further questions may be resolved by inspecting the
226
+ code itself. Please feel free to submit a ticket on github, or ask a
227
+ question on stackoverflow (**@Mike McKerns**).
228
+ If you would like to share how you use ``dill`` in your work, please send
229
+ an email (to **mmckerns at uqfoundation dot org**).
230
+
231
+
232
+ Citation
233
+ ========
234
+
235
+ If you use ``dill`` to do research that leads to publication, we ask that you
236
+ acknowledge use of ``dill`` by citing the following in your publication::
237
+
238
+ M.M. McKerns, L. Strand, T. Sullivan, A. Fang, M.A.G. Aivazis,
239
+ "Building a framework for predictive science", Proceedings of
240
+ the 10th Python in Science Conference, 2011;
241
+ http://arxiv.org/pdf/1202.1056
242
+
243
+ Michael McKerns and Michael Aivazis,
244
+ "pathos: a framework for heterogeneous computing", 2010- ;
245
+ https://uqfoundation.github.io/project/pathos
246
+
247
+ Please see https://uqfoundation.github.io/project/pathos or
248
+ http://arxiv.org/pdf/1202.1056 for further information.
249
+
250
+ '''
251
+
252
+ __version__ = '0.3.9'
253
+ __author__ = 'Mike McKerns'
254
+
255
+ __license__ = '''
256
+ Copyright (c) 2004-2016 California Institute of Technology.
257
+ Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
258
+ All rights reserved.
259
+
260
+ This software is available subject to the conditions and terms laid
261
+ out below. By downloading and using this software you are agreeing
262
+ to the following conditions.
263
+
264
+ Redistribution and use in source and binary forms, with or without
265
+ modification, are permitted provided that the following conditions
266
+ are met:
267
+
268
+ - Redistributions of source code must retain the above copyright
269
+ notice, this list of conditions and the following disclaimer.
270
+
271
+ - Redistributions in binary form must reproduce the above copyright
272
+ notice, this list of conditions and the following disclaimer in the
273
+ documentation and/or other materials provided with the distribution.
274
+
275
+ - Neither the names of the copyright holders nor the names of any of
276
+ the contributors may be used to endorse or promote products derived
277
+ from this software without specific prior written permission.
278
+
279
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
280
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
281
+ TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
282
+ PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
283
+ CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
284
+ EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
285
+ PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
286
+ OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
287
+ WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
288
+ OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
289
+ ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
290
+
291
+ '''
.venv/lib/python3.11/site-packages/dill/__init__.py ADDED
@@ -0,0 +1,119 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ #
3
+ # Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
4
+ # Copyright (c) 2008-2016 California Institute of Technology.
5
+ # Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
6
+ # License: 3-clause BSD. The full license text is available at:
7
+ # - https://github.com/uqfoundation/dill/blob/master/LICENSE
8
+
9
+ # author, version, license, and long description
10
+ try: # the package is installed
11
+ from .__info__ import __version__, __author__, __doc__, __license__
12
+ except: # pragma: no cover
13
+ import os
14
+ import sys
15
+ parent = os.path.dirname(os.path.abspath(os.path.dirname(__file__)))
16
+ sys.path.append(parent)
17
+ # get distribution meta info
18
+ from version import (__version__, __author__,
19
+ get_license_text, get_readme_as_rst)
20
+ __license__ = get_license_text(os.path.join(parent, 'LICENSE'))
21
+ __license__ = "\n%s" % __license__
22
+ __doc__ = get_readme_as_rst(os.path.join(parent, 'README.md'))
23
+ del os, sys, parent, get_license_text, get_readme_as_rst
24
+
25
+
26
+ from ._dill import (
27
+ dump, dumps, load, loads, copy,
28
+ Pickler, Unpickler, register, pickle, pickles, check,
29
+ DEFAULT_PROTOCOL, HIGHEST_PROTOCOL, HANDLE_FMODE, CONTENTS_FMODE, FILE_FMODE,
30
+ PickleError, PickleWarning, PicklingError, PicklingWarning, UnpicklingError,
31
+ UnpicklingWarning,
32
+ )
33
+ from .session import (
34
+ dump_module, load_module, load_module_asdict,
35
+ dump_session, load_session # backward compatibility
36
+ )
37
+ from . import detect, logger, session, source, temp
38
+
39
+ # get global settings
40
+ from .settings import settings
41
+
42
+ # make sure "trace" is turned off
43
+ logger.trace(False)
44
+
45
+ objects = {}
46
+ # local import of dill._objects
47
+ #from . import _objects
48
+ #objects.update(_objects.succeeds)
49
+ #del _objects
50
+
51
+ # local import of dill.objtypes
52
+ from . import objtypes as types
53
+
54
+ def load_types(pickleable=True, unpickleable=True):
55
+ """load pickleable and/or unpickleable types to ``dill.types``
56
+
57
+ ``dill.types`` is meant to mimic the ``types`` module, providing a
58
+ registry of object types. By default, the module is empty (for import
59
+ speed purposes). Use the ``load_types`` function to load selected object
60
+ types to the ``dill.types`` module.
61
+
62
+ Args:
63
+ pickleable (bool, default=True): if True, load pickleable types.
64
+ unpickleable (bool, default=True): if True, load unpickleable types.
65
+
66
+ Returns:
67
+ None
68
+ """
69
+ from importlib import reload
70
+ # local import of dill.objects
71
+ from . import _objects
72
+ if pickleable:
73
+ objects.update(_objects.succeeds)
74
+ else:
75
+ [objects.pop(obj,None) for obj in _objects.succeeds]
76
+ if unpickleable:
77
+ objects.update(_objects.failures)
78
+ else:
79
+ [objects.pop(obj,None) for obj in _objects.failures]
80
+ objects.update(_objects.registered)
81
+ del _objects
82
+ # reset contents of types to 'empty'
83
+ [types.__dict__.pop(obj) for obj in list(types.__dict__.keys()) \
84
+ if obj.find('Type') != -1]
85
+ # add corresponding types from objects to types
86
+ reload(types)
87
+
88
+ def extend(use_dill=True):
89
+ '''add (or remove) dill types to/from the pickle registry
90
+
91
+ by default, ``dill`` populates its types to ``pickle.Pickler.dispatch``.
92
+ Thus, all ``dill`` types are available upon calling ``'import pickle'``.
93
+ To drop all ``dill`` types from the ``pickle`` dispatch, *use_dill=False*.
94
+
95
+ Args:
96
+ use_dill (bool, default=True): if True, extend the dispatch table.
97
+
98
+ Returns:
99
+ None
100
+ '''
101
+ from ._dill import _revert_extension, _extend
102
+ if use_dill: _extend()
103
+ else: _revert_extension()
104
+ return
105
+
106
+ extend()
107
+
108
+
109
+ def license():
110
+ """print license"""
111
+ print (__license__)
112
+ return
113
+
114
+ def citation():
115
+ """print citation"""
116
+ print (__doc__[-491:-118])
117
+ return
118
+
119
+ # end of file
.venv/lib/python3.11/site-packages/dill/__pycache__/__diff.cpython-311.pyc ADDED
Binary file (11 kB). View file
 
.venv/lib/python3.11/site-packages/dill/__pycache__/__info__.cpython-311.pyc ADDED
Binary file (10.7 kB). View file
 
.venv/lib/python3.11/site-packages/dill/__pycache__/__init__.cpython-311.pyc ADDED
Binary file (5.8 kB). View file
 
.venv/lib/python3.11/site-packages/dill/__pycache__/_objects.cpython-311.pyc ADDED
Binary file (26.6 kB). View file
 
.venv/lib/python3.11/site-packages/dill/__pycache__/_shims.cpython-311.pyc ADDED
Binary file (8.05 kB). View file
 
.venv/lib/python3.11/site-packages/dill/__pycache__/detect.cpython-311.pyc ADDED
Binary file (15.9 kB). View file
 
.venv/lib/python3.11/site-packages/dill/__pycache__/logger.cpython-311.pyc ADDED
Binary file (13.4 kB). View file
 
.venv/lib/python3.11/site-packages/dill/__pycache__/objtypes.cpython-311.pyc ADDED
Binary file (698 Bytes). View file
 
.venv/lib/python3.11/site-packages/dill/__pycache__/pointers.cpython-311.pyc ADDED
Binary file (5.63 kB). View file
 
.venv/lib/python3.11/site-packages/dill/__pycache__/session.cpython-311.pyc ADDED
Binary file (28.5 kB). View file
 
.venv/lib/python3.11/site-packages/dill/__pycache__/settings.cpython-311.pyc ADDED
Binary file (400 Bytes). View file
 
.venv/lib/python3.11/site-packages/dill/__pycache__/source.cpython-311.pyc ADDED
Binary file (49.1 kB). View file
 
.venv/lib/python3.11/site-packages/dill/__pycache__/temp.cpython-311.pyc ADDED
Binary file (10.5 kB). View file
 
.venv/lib/python3.11/site-packages/dill/_dill.py ADDED
@@ -0,0 +1,2226 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ #
3
+ # Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
4
+ # Copyright (c) 2008-2015 California Institute of Technology.
5
+ # Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
6
+ # License: 3-clause BSD. The full license text is available at:
7
+ # - https://github.com/uqfoundation/dill/blob/master/LICENSE
8
+ """
9
+ dill: a utility for serialization of python objects
10
+
11
+ The primary functions in `dill` are :func:`dump` and
12
+ :func:`dumps` for serialization ("pickling") to a
13
+ file or to a string, respectively, and :func:`load`
14
+ and :func:`loads` for deserialization ("unpickling"),
15
+ similarly, from a file or from a string. Other notable
16
+ functions are :func:`~dill.dump_module` and
17
+ :func:`~dill.load_module`, which are used to save and
18
+ restore module objects, including an intepreter session.
19
+
20
+ Based on code written by Oren Tirosh and Armin Ronacher.
21
+ Extended to a (near) full set of the builtin types (in types module),
22
+ and coded to the pickle interface, by <mmckerns@caltech.edu>.
23
+ Initial port to python3 by Jonathan Dobson, continued by mmckerns.
24
+ Tested against "all" python types (Std. Lib. CH 1-15 @ 2.7) by mmckerns.
25
+ Tested against CH16+ Std. Lib. ... TBD.
26
+ """
27
+
28
+ from __future__ import annotations
29
+
30
+ __all__ = [
31
+ 'dump','dumps','load','loads','copy',
32
+ 'Pickler','Unpickler','register','pickle','pickles','check',
33
+ 'DEFAULT_PROTOCOL','HIGHEST_PROTOCOL','HANDLE_FMODE','CONTENTS_FMODE','FILE_FMODE',
34
+ 'PickleError','PickleWarning','PicklingError','PicklingWarning','UnpicklingError',
35
+ 'UnpicklingWarning',
36
+ ]
37
+
38
+ __module__ = 'dill'
39
+
40
+ import warnings
41
+ from .logger import adapter as logger
42
+ from .logger import trace as _trace
43
+ log = logger # backward compatibility (see issue #582)
44
+
45
+ import os
46
+ import sys
47
+ diff = None
48
+ _use_diff = False
49
+ OLD38 = (sys.hexversion < 0x3080000)
50
+ OLD39 = (sys.hexversion < 0x3090000)
51
+ OLD310 = (sys.hexversion < 0x30a0000)
52
+ OLD312a7 = (sys.hexversion < 0x30c00a7)
53
+ #XXX: get types from .objtypes ?
54
+ import builtins as __builtin__
55
+ from pickle import _Pickler as StockPickler, Unpickler as StockUnpickler
56
+ from pickle import GLOBAL, POP
57
+ from _thread import LockType
58
+ from _thread import RLock as RLockType
59
+ try:
60
+ from _thread import _ExceptHookArgs as ExceptHookArgsType
61
+ except ImportError:
62
+ ExceptHookArgsType = None
63
+ try:
64
+ from _thread import _ThreadHandle as ThreadHandleType
65
+ except ImportError:
66
+ ThreadHandleType = None
67
+ #from io import IOBase
68
+ from types import CodeType, FunctionType, MethodType, GeneratorType, \
69
+ TracebackType, FrameType, ModuleType, BuiltinMethodType
70
+ BufferType = memoryview #XXX: unregistered
71
+ ClassType = type # no 'old-style' classes
72
+ EllipsisType = type(Ellipsis)
73
+ #FileType = IOBase
74
+ NotImplementedType = type(NotImplemented)
75
+ SliceType = slice
76
+ TypeType = type # 'new-style' classes #XXX: unregistered
77
+ XRangeType = range
78
+ from types import MappingProxyType as DictProxyType, new_class
79
+ from pickle import DEFAULT_PROTOCOL, HIGHEST_PROTOCOL, PickleError, PicklingError, UnpicklingError
80
+ import __main__ as _main_module
81
+ import marshal
82
+ import gc
83
+ # import zlib
84
+ import abc
85
+ import dataclasses
86
+ from weakref import ReferenceType, ProxyType, CallableProxyType
87
+ from collections import OrderedDict
88
+ from enum import Enum, EnumMeta
89
+ from functools import partial
90
+ from operator import itemgetter, attrgetter
91
+ GENERATOR_FAIL = False
92
+ import importlib.machinery
93
+ EXTENSION_SUFFIXES = tuple(importlib.machinery.EXTENSION_SUFFIXES)
94
+ try:
95
+ import ctypes
96
+ HAS_CTYPES = True
97
+ # if using `pypy`, pythonapi is not found
98
+ IS_PYPY = not hasattr(ctypes, 'pythonapi')
99
+ except ImportError:
100
+ HAS_CTYPES = False
101
+ IS_PYPY = False
102
+ NumpyUfuncType = None
103
+ NumpyDType = None
104
+ NumpyArrayType = None
105
+ try:
106
+ if not importlib.machinery.PathFinder().find_spec('numpy'):
107
+ raise ImportError("No module named 'numpy'")
108
+ NumpyUfuncType = True
109
+ NumpyDType = True
110
+ NumpyArrayType = True
111
+ except ImportError:
112
+ pass
113
+ def __hook__():
114
+ global NumpyArrayType, NumpyDType, NumpyUfuncType
115
+ from numpy import ufunc as NumpyUfuncType
116
+ from numpy import ndarray as NumpyArrayType
117
+ from numpy import dtype as NumpyDType
118
+ return True
119
+ if NumpyArrayType: # then has numpy
120
+ def ndarraysubclassinstance(obj_type):
121
+ if all((c.__module__, c.__name__) != ('numpy', 'ndarray') for c in obj_type.__mro__):
122
+ return False
123
+ # anything below here is a numpy array (or subclass) instance
124
+ __hook__() # import numpy (so the following works!!!)
125
+ # verify that __reduce__ has not been overridden
126
+ if obj_type.__reduce_ex__ is not NumpyArrayType.__reduce_ex__ \
127
+ or obj_type.__reduce__ is not NumpyArrayType.__reduce__:
128
+ return False
129
+ return True
130
+ def numpyufunc(obj_type):
131
+ return any((c.__module__, c.__name__) == ('numpy', 'ufunc') for c in obj_type.__mro__)
132
+ def numpydtype(obj_type):
133
+ if all((c.__module__, c.__name__) != ('numpy', 'dtype') for c in obj_type.__mro__):
134
+ return False
135
+ # anything below here is a numpy dtype
136
+ __hook__() # import numpy (so the following works!!!)
137
+ return obj_type is type(NumpyDType) # handles subclasses
138
+ else:
139
+ def ndarraysubclassinstance(obj): return False
140
+ def numpyufunc(obj): return False
141
+ def numpydtype(obj): return False
142
+
143
+ from types import GetSetDescriptorType, ClassMethodDescriptorType, \
144
+ WrapperDescriptorType, MethodDescriptorType, MemberDescriptorType, \
145
+ MethodWrapperType #XXX: unused
146
+
147
+ # make sure to add these 'hand-built' types to _typemap
148
+ CellType = type((lambda x: lambda y: x)(0).__closure__[0])
149
+ PartialType = type(partial(int, base=2))
150
+ SuperType = type(super(Exception, TypeError()))
151
+ ItemGetterType = type(itemgetter(0))
152
+ AttrGetterType = type(attrgetter('__repr__'))
153
+
154
+ try:
155
+ from functools import _lru_cache_wrapper as LRUCacheType
156
+ except ImportError:
157
+ LRUCacheType = None
158
+
159
+ if not isinstance(LRUCacheType, type):
160
+ LRUCacheType = None
161
+
162
+ def get_file_type(*args, **kwargs):
163
+ open = kwargs.pop("open", __builtin__.open)
164
+ f = open(os.devnull, *args, **kwargs)
165
+ t = type(f)
166
+ f.close()
167
+ return t
168
+
169
+ IS_PYODIDE = sys.platform == 'emscripten'
170
+
171
+ FileType = get_file_type('rb', buffering=0)
172
+ TextWrapperType = get_file_type('r', buffering=-1)
173
+ BufferedRandomType = None if IS_PYODIDE else get_file_type('r+b', buffering=-1)
174
+ BufferedReaderType = get_file_type('rb', buffering=-1)
175
+ BufferedWriterType = get_file_type('wb', buffering=-1)
176
+ try:
177
+ from _pyio import open as _open
178
+ PyTextWrapperType = get_file_type('r', buffering=-1, open=_open)
179
+ PyBufferedRandomType = None if IS_PYODIDE else get_file_type('r+b', buffering=-1, open=_open)
180
+ PyBufferedReaderType = get_file_type('rb', buffering=-1, open=_open)
181
+ PyBufferedWriterType = get_file_type('wb', buffering=-1, open=_open)
182
+ except ImportError:
183
+ PyTextWrapperType = PyBufferedRandomType = PyBufferedReaderType = PyBufferedWriterType = None
184
+ from io import BytesIO as StringIO
185
+ InputType = OutputType = None
186
+ from socket import socket as SocketType
187
+ #FIXME: additionally calls ForkingPickler.register several times
188
+ from multiprocessing.reduction import _reduce_socket as reduce_socket
189
+ try: #pragma: no cover
190
+ IS_IPYTHON = __IPYTHON__ # is True
191
+ ExitType = None # IPython.core.autocall.ExitAutocall
192
+ IPYTHON_SINGLETONS = ('exit', 'quit', 'get_ipython')
193
+ except NameError:
194
+ IS_IPYTHON = False
195
+ try: ExitType = type(exit) # apparently 'exit' can be removed
196
+ except NameError: ExitType = None
197
+ IPYTHON_SINGLETONS = ()
198
+
199
+ import inspect
200
+ import typing
201
+
202
+
203
+ ### Shims for different versions of Python and dill
204
+ class Sentinel(object):
205
+ """
206
+ Create a unique sentinel object that is pickled as a constant.
207
+ """
208
+ def __init__(self, name, module_name=None):
209
+ self.name = name
210
+ if module_name is None:
211
+ # Use the calling frame's module
212
+ self.__module__ = inspect.currentframe().f_back.f_globals['__name__']
213
+ else:
214
+ self.__module__ = module_name # pragma: no cover
215
+ def __repr__(self):
216
+ return self.__module__ + '.' + self.name # pragma: no cover
217
+ def __copy__(self):
218
+ return self # pragma: no cover
219
+ def __deepcopy__(self, memo):
220
+ return self # pragma: no cover
221
+ def __reduce__(self):
222
+ return self.name
223
+ def __reduce_ex__(self, protocol):
224
+ return self.name
225
+
226
+ from . import _shims
227
+ from ._shims import Reduce, Getattr
228
+
229
+ ### File modes
230
+ #: Pickles the file handle, preserving mode. The position of the unpickled
231
+ #: object is as for a new file handle.
232
+ HANDLE_FMODE = 0
233
+ #: Pickles the file contents, creating a new file if on load the file does
234
+ #: not exist. The position = min(pickled position, EOF) and mode is chosen
235
+ #: as such that "best" preserves behavior of the original file.
236
+ CONTENTS_FMODE = 1
237
+ #: Pickles the entire file (handle and contents), preserving mode and position.
238
+ FILE_FMODE = 2
239
+
240
+ ### Shorthands (modified from python2.5/lib/pickle.py)
241
+ def copy(obj, *args, **kwds):
242
+ """
243
+ Use pickling to 'copy' an object (i.e. `loads(dumps(obj))`).
244
+
245
+ See :func:`dumps` and :func:`loads` for keyword arguments.
246
+ """
247
+ ignore = kwds.pop('ignore', Unpickler.settings['ignore'])
248
+ return loads(dumps(obj, *args, **kwds), ignore=ignore)
249
+
250
+ def dump(obj, file, protocol=None, byref=None, fmode=None, recurse=None, **kwds):#, strictio=None):
251
+ """
252
+ Pickle an object to a file.
253
+
254
+ See :func:`dumps` for keyword arguments.
255
+ """
256
+ from .settings import settings
257
+ protocol = settings['protocol'] if protocol is None else int(protocol)
258
+ _kwds = kwds.copy()
259
+ _kwds.update(dict(byref=byref, fmode=fmode, recurse=recurse))
260
+ Pickler(file, protocol, **_kwds).dump(obj)
261
+ return
262
+
263
+ def dumps(obj, protocol=None, byref=None, fmode=None, recurse=None, **kwds):#, strictio=None):
264
+ """
265
+ Pickle an object to a string.
266
+
267
+ *protocol* is the pickler protocol, as defined for Python *pickle*.
268
+
269
+ If *byref=True*, then dill behaves a lot more like pickle as certain
270
+ objects (like modules) are pickled by reference as opposed to attempting
271
+ to pickle the object itself.
272
+
273
+ If *recurse=True*, then objects referred to in the global dictionary
274
+ are recursively traced and pickled, instead of the default behavior
275
+ of attempting to store the entire global dictionary. This is needed for
276
+ functions defined via *exec()*.
277
+
278
+ *fmode* (:const:`HANDLE_FMODE`, :const:`CONTENTS_FMODE`,
279
+ or :const:`FILE_FMODE`) indicates how file handles will be pickled.
280
+ For example, when pickling a data file handle for transfer to a remote
281
+ compute service, *FILE_FMODE* will include the file contents in the
282
+ pickle and cursor position so that a remote method can operate
283
+ transparently on an object with an open file handle.
284
+
285
+ Default values for keyword arguments can be set in :mod:`dill.settings`.
286
+ """
287
+ file = StringIO()
288
+ dump(obj, file, protocol, byref, fmode, recurse, **kwds)#, strictio)
289
+ return file.getvalue()
290
+
291
+ def load(file, ignore=None, **kwds):
292
+ """
293
+ Unpickle an object from a file.
294
+
295
+ See :func:`loads` for keyword arguments.
296
+ """
297
+ return Unpickler(file, ignore=ignore, **kwds).load()
298
+
299
+ def loads(str, ignore=None, **kwds):
300
+ """
301
+ Unpickle an object from a string.
302
+
303
+ If *ignore=False* then objects whose class is defined in the module
304
+ *__main__* are updated to reference the existing class in *__main__*,
305
+ otherwise they are left to refer to the reconstructed type, which may
306
+ be different.
307
+
308
+ Default values for keyword arguments can be set in :mod:`dill.settings`.
309
+ """
310
+ file = StringIO(str)
311
+ return load(file, ignore, **kwds)
312
+
313
+ # def dumpzs(obj, protocol=None):
314
+ # """pickle an object to a compressed string"""
315
+ # return zlib.compress(dumps(obj, protocol))
316
+
317
+ # def loadzs(str):
318
+ # """unpickle an object from a compressed string"""
319
+ # return loads(zlib.decompress(str))
320
+
321
+ ### End: Shorthands ###
322
+
323
+ class MetaCatchingDict(dict):
324
+ def get(self, key, default=None):
325
+ try:
326
+ return self[key]
327
+ except KeyError:
328
+ return default
329
+
330
+ def __missing__(self, key):
331
+ if issubclass(key, type):
332
+ return save_type
333
+ else:
334
+ raise KeyError()
335
+
336
+ class PickleWarning(Warning, PickleError):
337
+ pass
338
+
339
+ class PicklingWarning(PickleWarning, PicklingError):
340
+ pass
341
+
342
+ class UnpicklingWarning(PickleWarning, UnpicklingError):
343
+ pass
344
+
345
+ ### Extend the Picklers
346
+ class Pickler(StockPickler):
347
+ """python's Pickler extended to interpreter sessions"""
348
+ dispatch: typing.Dict[type, typing.Callable[[Pickler, typing.Any], None]] \
349
+ = MetaCatchingDict(StockPickler.dispatch.copy())
350
+ """The dispatch table, a dictionary of serializing functions used
351
+ by Pickler to save objects of specific types. Use :func:`pickle`
352
+ or :func:`register` to associate types to custom functions.
353
+
354
+ :meta hide-value:
355
+ """
356
+ _session = False
357
+ from .settings import settings
358
+
359
+ def __init__(self, file, *args, **kwds):
360
+ settings = Pickler.settings
361
+ _byref = kwds.pop('byref', None)
362
+ #_strictio = kwds.pop('strictio', None)
363
+ _fmode = kwds.pop('fmode', None)
364
+ _recurse = kwds.pop('recurse', None)
365
+ StockPickler.__init__(self, file, *args, **kwds)
366
+ self._main = _main_module
367
+ self._diff_cache = {}
368
+ self._byref = settings['byref'] if _byref is None else _byref
369
+ self._strictio = False #_strictio
370
+ self._fmode = settings['fmode'] if _fmode is None else _fmode
371
+ self._recurse = settings['recurse'] if _recurse is None else _recurse
372
+ self._postproc = OrderedDict()
373
+ self._file = file
374
+
375
+ def save(self, obj, save_persistent_id=True):
376
+ # numpy hack
377
+ obj_type = type(obj)
378
+ if NumpyArrayType and not (obj_type is type or obj_type in Pickler.dispatch):
379
+ # register if the object is a numpy ufunc
380
+ # thanks to Paul Kienzle for pointing out ufuncs didn't pickle
381
+ if numpyufunc(obj_type):
382
+ @register(obj_type)
383
+ def save_numpy_ufunc(pickler, obj):
384
+ logger.trace(pickler, "Nu: %s", obj)
385
+ name = getattr(obj, '__qualname__', getattr(obj, '__name__', None))
386
+ StockPickler.save_global(pickler, obj, name=name)
387
+ logger.trace(pickler, "# Nu")
388
+ return
389
+ # NOTE: the above 'save' performs like:
390
+ # import copy_reg
391
+ # def udump(f): return f.__name__
392
+ # def uload(name): return getattr(numpy, name)
393
+ # copy_reg.pickle(NumpyUfuncType, udump, uload)
394
+ # register if the object is a numpy dtype
395
+ if numpydtype(obj_type):
396
+ @register(obj_type)
397
+ def save_numpy_dtype(pickler, obj):
398
+ logger.trace(pickler, "Dt: %s", obj)
399
+ pickler.save_reduce(_create_dtypemeta, (obj.type,), obj=obj)
400
+ logger.trace(pickler, "# Dt")
401
+ return
402
+ # NOTE: the above 'save' performs like:
403
+ # import copy_reg
404
+ # def uload(name): return type(NumpyDType(name))
405
+ # def udump(f): return uload, (f.type,)
406
+ # copy_reg.pickle(NumpyDTypeType, udump, uload)
407
+ # register if the object is a subclassed numpy array instance
408
+ if ndarraysubclassinstance(obj_type):
409
+ @register(obj_type)
410
+ def save_numpy_array(pickler, obj):
411
+ logger.trace(pickler, "Nu: (%s, %s)", obj.shape, obj.dtype)
412
+ npdict = getattr(obj, '__dict__', None)
413
+ f, args, state = obj.__reduce__()
414
+ pickler.save_reduce(_create_array, (f,args,state,npdict), obj=obj)
415
+ logger.trace(pickler, "# Nu")
416
+ return
417
+ # end numpy hack
418
+
419
+ if GENERATOR_FAIL and obj_type is GeneratorType:
420
+ msg = "Can't pickle %s: attribute lookup builtins.generator failed" % GeneratorType
421
+ raise PicklingError(msg)
422
+ StockPickler.save(self, obj, save_persistent_id)
423
+
424
+ save.__doc__ = StockPickler.save.__doc__
425
+
426
+ def dump(self, obj): #NOTE: if settings change, need to update attributes
427
+ logger.trace_setup(self)
428
+ StockPickler.dump(self, obj)
429
+ dump.__doc__ = StockPickler.dump.__doc__
430
+
431
+ class Unpickler(StockUnpickler):
432
+ """python's Unpickler extended to interpreter sessions and more types"""
433
+ from .settings import settings
434
+ _session = False
435
+
436
+ def find_class(self, module, name):
437
+ if (module, name) == ('__builtin__', '__main__'):
438
+ return self._main.__dict__ #XXX: above set w/save_module_dict
439
+ elif (module, name) == ('__builtin__', 'NoneType'):
440
+ return type(None) #XXX: special case: NoneType missing
441
+ if module == 'dill.dill': module = 'dill._dill'
442
+ return StockUnpickler.find_class(self, module, name)
443
+
444
+ def __init__(self, *args, **kwds):
445
+ settings = Pickler.settings
446
+ _ignore = kwds.pop('ignore', None)
447
+ StockUnpickler.__init__(self, *args, **kwds)
448
+ self._main = _main_module
449
+ self._ignore = settings['ignore'] if _ignore is None else _ignore
450
+
451
+ def load(self): #NOTE: if settings change, need to update attributes
452
+ obj = StockUnpickler.load(self)
453
+ if type(obj).__module__ == getattr(_main_module, '__name__', '__main__'):
454
+ if not self._ignore:
455
+ # point obj class to main
456
+ try: obj.__class__ = getattr(self._main, type(obj).__name__)
457
+ except (AttributeError,TypeError): pass # defined in a file
458
+ #_main_module.__dict__.update(obj.__dict__) #XXX: should update globals ?
459
+ return obj
460
+ load.__doc__ = StockUnpickler.load.__doc__
461
+ pass
462
+
463
+ '''
464
+ def dispatch_table():
465
+ """get the dispatch table of registered types"""
466
+ return Pickler.dispatch
467
+ '''
468
+
469
+ pickle_dispatch_copy = StockPickler.dispatch.copy()
470
+
471
+ def pickle(t, func):
472
+ """expose :attr:`~Pickler.dispatch` table for user-created extensions"""
473
+ Pickler.dispatch[t] = func
474
+ return
475
+
476
+ def register(t):
477
+ """decorator to register types to Pickler's :attr:`~Pickler.dispatch` table"""
478
+ def proxy(func):
479
+ Pickler.dispatch[t] = func
480
+ return func
481
+ return proxy
482
+
483
+ def _revert_extension():
484
+ """drop dill-registered types from pickle's dispatch table"""
485
+ for type, func in list(StockPickler.dispatch.items()):
486
+ if func.__module__ == __name__:
487
+ del StockPickler.dispatch[type]
488
+ if type in pickle_dispatch_copy:
489
+ StockPickler.dispatch[type] = pickle_dispatch_copy[type]
490
+
491
+ def use_diff(on=True):
492
+ """
493
+ Reduces size of pickles by only including object which have changed.
494
+
495
+ Decreases pickle size but increases CPU time needed.
496
+ Also helps avoid some unpickleable objects.
497
+ MUST be called at start of script, otherwise changes will not be recorded.
498
+ """
499
+ global _use_diff, diff
500
+ _use_diff = on
501
+ if _use_diff and diff is None:
502
+ try:
503
+ from . import diff as d
504
+ except ImportError:
505
+ import diff as d
506
+ diff = d
507
+
508
+ def _create_typemap():
509
+ import types
510
+ d = dict(list(__builtin__.__dict__.items()) + \
511
+ list(types.__dict__.items())).items()
512
+ for key, value in d:
513
+ if getattr(value, '__module__', None) == 'builtins' \
514
+ and type(value) is type:
515
+ yield key, value
516
+ return
517
+ _reverse_typemap = dict(_create_typemap())
518
+ _reverse_typemap.update({
519
+ 'PartialType': PartialType,
520
+ 'SuperType': SuperType,
521
+ 'ItemGetterType': ItemGetterType,
522
+ 'AttrGetterType': AttrGetterType,
523
+ })
524
+ if sys.hexversion < 0x30800a2:
525
+ _reverse_typemap.update({
526
+ 'CellType': CellType,
527
+ })
528
+
529
+ # "Incidental" implementation specific types. Unpickling these types in another
530
+ # implementation of Python (PyPy -> CPython) is not guaranteed to work
531
+
532
+ # This dictionary should contain all types that appear in Python implementations
533
+ # but are not defined in https://docs.python.org/3/library/types.html#standard-interpreter-types
534
+ x=OrderedDict()
535
+ _incedental_reverse_typemap = {
536
+ 'FileType': FileType,
537
+ 'BufferedRandomType': BufferedRandomType,
538
+ 'BufferedReaderType': BufferedReaderType,
539
+ 'BufferedWriterType': BufferedWriterType,
540
+ 'TextWrapperType': TextWrapperType,
541
+ 'PyBufferedRandomType': PyBufferedRandomType,
542
+ 'PyBufferedReaderType': PyBufferedReaderType,
543
+ 'PyBufferedWriterType': PyBufferedWriterType,
544
+ 'PyTextWrapperType': PyTextWrapperType,
545
+ }
546
+
547
+ _incedental_reverse_typemap.update({
548
+ "DictKeysType": type({}.keys()),
549
+ "DictValuesType": type({}.values()),
550
+ "DictItemsType": type({}.items()),
551
+
552
+ "OdictKeysType": type(x.keys()),
553
+ "OdictValuesType": type(x.values()),
554
+ "OdictItemsType": type(x.items()),
555
+ })
556
+
557
+ if ExitType:
558
+ _incedental_reverse_typemap['ExitType'] = ExitType
559
+ if InputType:
560
+ _incedental_reverse_typemap['InputType'] = InputType
561
+ _incedental_reverse_typemap['OutputType'] = OutputType
562
+
563
+ '''
564
+ try:
565
+ import symtable
566
+ _incedental_reverse_typemap["SymtableEntryType"] = type(symtable.symtable("", "string", "exec")._table)
567
+ except: #FIXME: fails to pickle
568
+ pass
569
+
570
+ if sys.hexversion >= 0x30a00a0:
571
+ _incedental_reverse_typemap['LineIteratorType'] = type(compile('3', '', 'eval').co_lines())
572
+ '''
573
+
574
+ if sys.hexversion >= 0x30b00b0:
575
+ from types import GenericAlias
576
+ _incedental_reverse_typemap["GenericAliasIteratorType"] = type(iter(GenericAlias(list, (int,))))
577
+ '''
578
+ _incedental_reverse_typemap['PositionsIteratorType'] = type(compile('3', '', 'eval').co_positions())
579
+ '''
580
+
581
+ try:
582
+ import winreg
583
+ _incedental_reverse_typemap["HKEYType"] = winreg.HKEYType
584
+ except ImportError:
585
+ pass
586
+
587
+ _reverse_typemap.update(_incedental_reverse_typemap)
588
+ _incedental_types = set(_incedental_reverse_typemap.values())
589
+
590
+ del x
591
+
592
+ _typemap = dict((v, k) for k, v in _reverse_typemap.items())
593
+
594
+ def _unmarshal(string):
595
+ return marshal.loads(string)
596
+
597
+ def _load_type(name):
598
+ return _reverse_typemap[name]
599
+
600
+ def _create_type(typeobj, *args):
601
+ return typeobj(*args)
602
+
603
+ def _create_function(fcode, fglobals, fname=None, fdefaults=None,
604
+ fclosure=None, fdict=None, fkwdefaults=None):
605
+ # same as FunctionType, but enable passing __dict__ to new function,
606
+ # __dict__ is the storehouse for attributes added after function creation
607
+ func = FunctionType(fcode, fglobals or dict(), fname, fdefaults, fclosure)
608
+ if fdict is not None:
609
+ func.__dict__.update(fdict) #XXX: better copy? option to copy?
610
+ if fkwdefaults is not None:
611
+ func.__kwdefaults__ = fkwdefaults
612
+ # 'recurse' only stores referenced modules/objects in fglobals,
613
+ # thus we need to make sure that we have __builtins__ as well
614
+ if "__builtins__" not in func.__globals__:
615
+ func.__globals__["__builtins__"] = globals()["__builtins__"]
616
+ # assert id(fglobals) == id(func.__globals__)
617
+ return func
618
+
619
+ class match:
620
+ """
621
+ Make avaialable a limited structural pattern matching-like syntax for Python < 3.10
622
+
623
+ Patterns can be only tuples (without types) currently.
624
+ Inspired by the package pattern-matching-PEP634.
625
+
626
+ Usage:
627
+ >>> with match(args) as m:
628
+ >>> if m.case(('x', 'y')):
629
+ >>> # use m.x and m.y
630
+ >>> elif m.case(('x', 'y', 'z')):
631
+ >>> # use m.x, m.y and m.z
632
+
633
+ Equivalent native code for Python >= 3.10:
634
+ >>> match args:
635
+ >>> case (x, y):
636
+ >>> # use x and y
637
+ >>> case (x, y, z):
638
+ >>> # use x, y and z
639
+ """
640
+ def __init__(self, value):
641
+ self.value = value
642
+ self._fields = None
643
+ def __enter__(self):
644
+ return self
645
+ def __exit__(self, *exc_info):
646
+ return False
647
+ def case(self, args): # *args, **kwargs):
648
+ """just handles tuple patterns"""
649
+ if len(self.value) != len(args): # + len(kwargs):
650
+ return False
651
+ #if not all(isinstance(arg, pat) for arg, pat in zip(self.value[len(args):], kwargs.values())):
652
+ # return False
653
+ self.args = args # (*args, *kwargs)
654
+ return True
655
+ @property
656
+ def fields(self):
657
+ # Only bind names to values if necessary.
658
+ if self._fields is None:
659
+ self._fields = dict(zip(self.args, self.value))
660
+ return self._fields
661
+ def __getattr__(self, item):
662
+ return self.fields[item]
663
+
664
+ ALL_CODE_PARAMS = [
665
+ # Version New attribute CodeType parameters
666
+ ((3,11,'a'), 'co_endlinetable', 'argcount posonlyargcount kwonlyargcount nlocals stacksize flags code consts names varnames filename name qualname firstlineno linetable endlinetable columntable exceptiontable freevars cellvars'),
667
+ ((3,11), 'co_exceptiontable', 'argcount posonlyargcount kwonlyargcount nlocals stacksize flags code consts names varnames filename name qualname firstlineno linetable exceptiontable freevars cellvars'),
668
+ ((3,10), 'co_linetable', 'argcount posonlyargcount kwonlyargcount nlocals stacksize flags code consts names varnames filename name firstlineno linetable freevars cellvars'),
669
+ ((3,8), 'co_posonlyargcount', 'argcount posonlyargcount kwonlyargcount nlocals stacksize flags code consts names varnames filename name firstlineno lnotab freevars cellvars'),
670
+ ((3,7), 'co_kwonlyargcount', 'argcount kwonlyargcount nlocals stacksize flags code consts names varnames filename name firstlineno lnotab freevars cellvars'),
671
+ ]
672
+ for version, new_attr, params in ALL_CODE_PARAMS:
673
+ if hasattr(CodeType, new_attr):
674
+ CODE_VERSION = version
675
+ CODE_PARAMS = params.split()
676
+ break
677
+ ENCODE_PARAMS = set(CODE_PARAMS).intersection(
678
+ ['code', 'lnotab', 'linetable', 'endlinetable', 'columntable', 'exceptiontable'])
679
+
680
+ def _create_code(*args):
681
+ if not isinstance(args[0], int): # co_lnotab stored from >= 3.10
682
+ LNOTAB, *args = args
683
+ else: # from < 3.10 (or pre-LNOTAB storage)
684
+ LNOTAB = b''
685
+
686
+ with match(args) as m:
687
+ # Python 3.11/3.12a (18 members)
688
+ if m.case((
689
+ 'argcount', 'posonlyargcount', 'kwonlyargcount', 'nlocals', 'stacksize', 'flags', # args[0:6]
690
+ 'code', 'consts', 'names', 'varnames', 'filename', 'name', 'qualname', 'firstlineno', # args[6:14]
691
+ 'linetable', 'exceptiontable', 'freevars', 'cellvars' # args[14:]
692
+ )):
693
+ if CODE_VERSION == (3,11):
694
+ return CodeType(
695
+ *args[:6],
696
+ args[6].encode() if hasattr(args[6], 'encode') else args[6], # code
697
+ *args[7:14],
698
+ args[14].encode() if hasattr(args[14], 'encode') else args[14], # linetable
699
+ args[15].encode() if hasattr(args[15], 'encode') else args[15], # exceptiontable
700
+ args[16],
701
+ args[17],
702
+ )
703
+ fields = m.fields
704
+ # Python 3.10 or 3.8/3.9 (16 members)
705
+ elif m.case((
706
+ 'argcount', 'posonlyargcount', 'kwonlyargcount', 'nlocals', 'stacksize', 'flags', # args[0:6]
707
+ 'code', 'consts', 'names', 'varnames', 'filename', 'name', 'firstlineno', # args[6:13]
708
+ 'LNOTAB_OR_LINETABLE', 'freevars', 'cellvars' # args[13:]
709
+ )):
710
+ if CODE_VERSION == (3,10) or CODE_VERSION == (3,8):
711
+ return CodeType(
712
+ *args[:6],
713
+ args[6].encode() if hasattr(args[6], 'encode') else args[6], # code
714
+ *args[7:13],
715
+ args[13].encode() if hasattr(args[13], 'encode') else args[13], # lnotab/linetable
716
+ args[14],
717
+ args[15],
718
+ )
719
+ fields = m.fields
720
+ if CODE_VERSION >= (3,10):
721
+ fields['linetable'] = m.LNOTAB_OR_LINETABLE
722
+ else:
723
+ fields['lnotab'] = LNOTAB if LNOTAB else m.LNOTAB_OR_LINETABLE
724
+ # Python 3.7 (15 args)
725
+ elif m.case((
726
+ 'argcount', 'kwonlyargcount', 'nlocals', 'stacksize', 'flags', # args[0:5]
727
+ 'code', 'consts', 'names', 'varnames', 'filename', 'name', 'firstlineno', # args[5:12]
728
+ 'lnotab', 'freevars', 'cellvars' # args[12:]
729
+ )):
730
+ if CODE_VERSION == (3,7):
731
+ return CodeType(
732
+ *args[:5],
733
+ args[5].encode() if hasattr(args[5], 'encode') else args[5], # code
734
+ *args[6:12],
735
+ args[12].encode() if hasattr(args[12], 'encode') else args[12], # lnotab
736
+ args[13],
737
+ args[14],
738
+ )
739
+ fields = m.fields
740
+ # Python 3.11a (20 members)
741
+ elif m.case((
742
+ 'argcount', 'posonlyargcount', 'kwonlyargcount', 'nlocals', 'stacksize', 'flags', # args[0:6]
743
+ 'code', 'consts', 'names', 'varnames', 'filename', 'name', 'qualname', 'firstlineno', # args[6:14]
744
+ 'linetable', 'endlinetable', 'columntable', 'exceptiontable', 'freevars', 'cellvars' # args[14:]
745
+ )):
746
+ if CODE_VERSION == (3,11,'a'):
747
+ return CodeType(
748
+ *args[:6],
749
+ args[6].encode() if hasattr(args[6], 'encode') else args[6], # code
750
+ *args[7:14],
751
+ *(a.encode() if hasattr(a, 'encode') else a for a in args[14:18]), # linetable-exceptiontable
752
+ args[18],
753
+ args[19],
754
+ )
755
+ fields = m.fields
756
+ else:
757
+ raise UnpicklingError("pattern match for code object failed")
758
+
759
+ # The args format doesn't match this version.
760
+ fields.setdefault('posonlyargcount', 0) # from python <= 3.7
761
+ fields.setdefault('lnotab', LNOTAB) # from python >= 3.10
762
+ fields.setdefault('linetable', b'') # from python <= 3.9
763
+ fields.setdefault('qualname', fields['name']) # from python <= 3.10
764
+ fields.setdefault('exceptiontable', b'') # from python <= 3.10
765
+ fields.setdefault('endlinetable', None) # from python != 3.11a
766
+ fields.setdefault('columntable', None) # from python != 3.11a
767
+
768
+ args = (fields[k].encode() if k in ENCODE_PARAMS and hasattr(fields[k], 'encode') else fields[k]
769
+ for k in CODE_PARAMS)
770
+ return CodeType(*args)
771
+
772
+ def _create_ftype(ftypeobj, func, args, kwds):
773
+ if kwds is None:
774
+ kwds = {}
775
+ if args is None:
776
+ args = ()
777
+ return ftypeobj(func, *args, **kwds)
778
+
779
+ def _create_typing_tuple(argz, *args): #NOTE: workaround python/cpython#94245
780
+ if not argz:
781
+ return typing.Tuple[()].copy_with(())
782
+ if argz == ((),):
783
+ return typing.Tuple[()]
784
+ return typing.Tuple[argz]
785
+
786
+ if ThreadHandleType:
787
+ def _create_thread_handle(ident, done, *args): #XXX: ignores 'blocking'
788
+ from threading import _make_thread_handle
789
+ handle = _make_thread_handle(ident)
790
+ if done:
791
+ handle._set_done()
792
+ return handle
793
+
794
+ def _create_lock(locked, *args): #XXX: ignores 'blocking'
795
+ from threading import Lock
796
+ lock = Lock()
797
+ if locked:
798
+ if not lock.acquire(False):
799
+ raise UnpicklingError("Cannot acquire lock")
800
+ return lock
801
+
802
+ def _create_rlock(count, owner, *args): #XXX: ignores 'blocking'
803
+ lock = RLockType()
804
+ if owner is not None:
805
+ lock._acquire_restore((count, owner))
806
+ if owner and not lock._is_owned():
807
+ raise UnpicklingError("Cannot acquire lock")
808
+ return lock
809
+
810
+ # thanks to matsjoyce for adding all the different file modes
811
+ def _create_filehandle(name, mode, position, closed, open, strictio, fmode, fdata): # buffering=0
812
+ # only pickles the handle, not the file contents... good? or StringIO(data)?
813
+ # (for file contents see: http://effbot.org/librarybook/copy-reg.htm)
814
+ # NOTE: handle special cases first (are there more special cases?)
815
+ names = {'<stdin>':sys.__stdin__, '<stdout>':sys.__stdout__,
816
+ '<stderr>':sys.__stderr__} #XXX: better fileno=(0,1,2) ?
817
+ if name in list(names.keys()):
818
+ f = names[name] #XXX: safer "f=sys.stdin"
819
+ elif name == '<tmpfile>':
820
+ f = os.tmpfile()
821
+ elif name == '<fdopen>':
822
+ import tempfile
823
+ f = tempfile.TemporaryFile(mode)
824
+ else:
825
+ try:
826
+ exists = os.path.exists(name)
827
+ except Exception:
828
+ exists = False
829
+ if not exists:
830
+ if strictio:
831
+ raise FileNotFoundError("[Errno 2] No such file or directory: '%s'" % name)
832
+ elif "r" in mode and fmode != FILE_FMODE:
833
+ name = '<fdopen>' # or os.devnull?
834
+ current_size = 0 # or maintain position?
835
+ else:
836
+ current_size = os.path.getsize(name)
837
+
838
+ if position > current_size:
839
+ if strictio:
840
+ raise ValueError("invalid buffer size")
841
+ elif fmode == CONTENTS_FMODE:
842
+ position = current_size
843
+ # try to open the file by name
844
+ # NOTE: has different fileno
845
+ try:
846
+ #FIXME: missing: *buffering*, encoding, softspace
847
+ if fmode == FILE_FMODE:
848
+ f = open(name, mode if "w" in mode else "w")
849
+ f.write(fdata)
850
+ if "w" not in mode:
851
+ f.close()
852
+ f = open(name, mode)
853
+ elif name == '<fdopen>': # file did not exist
854
+ import tempfile
855
+ f = tempfile.TemporaryFile(mode)
856
+ # treat x mode as w mode
857
+ elif fmode == CONTENTS_FMODE \
858
+ and ("w" in mode or "x" in mode):
859
+ # stop truncation when opening
860
+ flags = os.O_CREAT
861
+ if "+" in mode:
862
+ flags |= os.O_RDWR
863
+ else:
864
+ flags |= os.O_WRONLY
865
+ f = os.fdopen(os.open(name, flags), mode)
866
+ # set name to the correct value
867
+ r = getattr(f, "buffer", f)
868
+ r = getattr(r, "raw", r)
869
+ r.name = name
870
+ assert f.name == name
871
+ else:
872
+ f = open(name, mode)
873
+ except (IOError, FileNotFoundError):
874
+ err = sys.exc_info()[1]
875
+ raise UnpicklingError(err)
876
+ if closed:
877
+ f.close()
878
+ elif position >= 0 and fmode != HANDLE_FMODE:
879
+ f.seek(position)
880
+ return f
881
+
882
+ def _create_stringi(value, position, closed):
883
+ f = StringIO(value)
884
+ if closed: f.close()
885
+ else: f.seek(position)
886
+ return f
887
+
888
+ def _create_stringo(value, position, closed):
889
+ f = StringIO()
890
+ if closed: f.close()
891
+ else:
892
+ f.write(value)
893
+ f.seek(position)
894
+ return f
895
+
896
+ class _itemgetter_helper(object):
897
+ def __init__(self):
898
+ self.items = []
899
+ def __getitem__(self, item):
900
+ self.items.append(item)
901
+ return
902
+
903
+ class _attrgetter_helper(object):
904
+ def __init__(self, attrs, index=None):
905
+ self.attrs = attrs
906
+ self.index = index
907
+ def __getattribute__(self, attr):
908
+ attrs = object.__getattribute__(self, "attrs")
909
+ index = object.__getattribute__(self, "index")
910
+ if index is None:
911
+ index = len(attrs)
912
+ attrs.append(attr)
913
+ else:
914
+ attrs[index] = ".".join([attrs[index], attr])
915
+ return type(self)(attrs, index)
916
+
917
+ class _dictproxy_helper(dict):
918
+ def __ror__(self, a):
919
+ return a
920
+
921
+ _dictproxy_helper_instance = _dictproxy_helper()
922
+
923
+ __d = {}
924
+ try:
925
+ # In CPython 3.9 and later, this trick can be used to exploit the
926
+ # implementation of the __or__ function of MappingProxyType to get the true
927
+ # mapping referenced by the proxy. It may work for other implementations,
928
+ # but is not guaranteed.
929
+ MAPPING_PROXY_TRICK = __d is (DictProxyType(__d) | _dictproxy_helper_instance)
930
+ except Exception:
931
+ MAPPING_PROXY_TRICK = False
932
+ del __d
933
+
934
+ # _CELL_REF and _CELL_EMPTY are used to stay compatible with versions of dill
935
+ # whose _create_cell functions do not have a default value.
936
+ # _CELL_REF can be safely removed entirely (replaced by empty tuples for calls
937
+ # to _create_cell) once breaking changes are allowed.
938
+ _CELL_REF = None
939
+ _CELL_EMPTY = Sentinel('_CELL_EMPTY')
940
+
941
+ def _create_cell(contents=None):
942
+ if contents is not _CELL_EMPTY:
943
+ value = contents
944
+ return (lambda: value).__closure__[0]
945
+
946
+ def _create_weakref(obj, *args):
947
+ from weakref import ref
948
+ if obj is None: # it's dead
949
+ from collections import UserDict
950
+ return ref(UserDict(), *args)
951
+ return ref(obj, *args)
952
+
953
+ def _create_weakproxy(obj, callable=False, *args):
954
+ from weakref import proxy
955
+ if obj is None: # it's dead
956
+ if callable: return proxy(lambda x:x, *args)
957
+ from collections import UserDict
958
+ return proxy(UserDict(), *args)
959
+ return proxy(obj, *args)
960
+
961
+ def _eval_repr(repr_str):
962
+ return eval(repr_str)
963
+
964
+ def _create_array(f, args, state, npdict=None):
965
+ #array = numpy.core.multiarray._reconstruct(*args)
966
+ array = f(*args)
967
+ array.__setstate__(state)
968
+ if npdict is not None: # we also have saved state in __dict__
969
+ array.__dict__.update(npdict)
970
+ return array
971
+
972
+ def _create_dtypemeta(scalar_type):
973
+ if NumpyDType is True: __hook__() # a bit hacky I think
974
+ if scalar_type is None:
975
+ return NumpyDType
976
+ return type(NumpyDType(scalar_type))
977
+
978
+ def _create_namedtuple(name, fieldnames, modulename, defaults=None):
979
+ class_ = _import_module(modulename + '.' + name, safe=True)
980
+ if class_ is not None:
981
+ return class_
982
+ import collections
983
+ t = collections.namedtuple(name, fieldnames, defaults=defaults, module=modulename)
984
+ return t
985
+
986
+ def _create_capsule(pointer, name, context, destructor):
987
+ attr_found = False
988
+ try:
989
+ # based on https://github.com/python/cpython/blob/f4095e53ab708d95e019c909d5928502775ba68f/Objects/capsule.c#L209-L231
990
+ uname = name.decode('utf8')
991
+ for i in range(1, uname.count('.')+1):
992
+ names = uname.rsplit('.', i)
993
+ try:
994
+ module = __import__(names[0])
995
+ except ImportError:
996
+ pass
997
+ obj = module
998
+ for attr in names[1:]:
999
+ obj = getattr(obj, attr)
1000
+ capsule = obj
1001
+ attr_found = True
1002
+ break
1003
+ except Exception:
1004
+ pass
1005
+
1006
+ if attr_found:
1007
+ if _PyCapsule_IsValid(capsule, name):
1008
+ return capsule
1009
+ raise UnpicklingError("%s object exists at %s but a PyCapsule object was expected." % (type(capsule), name))
1010
+ else:
1011
+ #warnings.warn('Creating a new PyCapsule %s for a C data structure that may not be present in memory. Segmentation faults or other memory errors are possible.' % (name,), UnpicklingWarning)
1012
+ capsule = _PyCapsule_New(pointer, name, destructor)
1013
+ _PyCapsule_SetContext(capsule, context)
1014
+ return capsule
1015
+
1016
+ def _getattr(objclass, name, repr_str):
1017
+ # hack to grab the reference directly
1018
+ try: #XXX: works only for __builtin__ ?
1019
+ attr = repr_str.split("'")[3]
1020
+ return eval(attr+'.__dict__["'+name+'"]')
1021
+ except Exception:
1022
+ try:
1023
+ attr = objclass.__dict__
1024
+ if type(attr) is DictProxyType:
1025
+ attr = attr[name]
1026
+ else:
1027
+ attr = getattr(objclass,name)
1028
+ except (AttributeError, KeyError):
1029
+ attr = getattr(objclass,name)
1030
+ return attr
1031
+
1032
+ def _get_attr(self, name):
1033
+ # stop recursive pickling
1034
+ return getattr(self, name, None) or getattr(__builtin__, name)
1035
+
1036
+ def _import_module(import_name, safe=False):
1037
+ try:
1038
+ if import_name.startswith('__runtime__.'):
1039
+ return sys.modules[import_name]
1040
+ elif '.' in import_name:
1041
+ items = import_name.split('.')
1042
+ module = '.'.join(items[:-1])
1043
+ obj = items[-1]
1044
+ submodule = getattr(__import__(module, None, None, [obj]), obj)
1045
+ if isinstance(submodule, (ModuleType, type)):
1046
+ return submodule
1047
+ return __import__(import_name, None, None, [obj])
1048
+ else:
1049
+ return __import__(import_name)
1050
+ except (ImportError, AttributeError, KeyError):
1051
+ if safe:
1052
+ return None
1053
+ raise
1054
+
1055
+ # https://github.com/python/cpython/blob/a8912a0f8d9eba6d502c37d522221f9933e976db/Lib/pickle.py#L322-L333
1056
+ def _getattribute(obj, name):
1057
+ for subpath in name.split('.'):
1058
+ if subpath == '<locals>':
1059
+ raise AttributeError("Can't get local attribute {!r} on {!r}"
1060
+ .format(name, obj))
1061
+ try:
1062
+ parent = obj
1063
+ obj = getattr(obj, subpath)
1064
+ except AttributeError:
1065
+ raise AttributeError("Can't get attribute {!r} on {!r}"
1066
+ .format(name, obj))
1067
+ return obj, parent
1068
+
1069
+ def _locate_function(obj, pickler=None):
1070
+ module_name = getattr(obj, '__module__', None)
1071
+ if module_name in ['__main__', None] or \
1072
+ pickler and is_dill(pickler, child=False) and pickler._session and module_name == pickler._main.__name__:
1073
+ return False
1074
+ if hasattr(obj, '__qualname__'):
1075
+ module = _import_module(module_name, safe=True)
1076
+ try:
1077
+ found, _ = _getattribute(module, obj.__qualname__)
1078
+ return found is obj
1079
+ except AttributeError:
1080
+ return False
1081
+ else:
1082
+ found = _import_module(module_name + '.' + obj.__name__, safe=True)
1083
+ return found is obj
1084
+
1085
+
1086
+ def _setitems(dest, source):
1087
+ for k, v in source.items():
1088
+ dest[k] = v
1089
+
1090
+
1091
+ def _save_with_postproc(pickler, reduction, is_pickler_dill=None, obj=Getattr.NO_DEFAULT, postproc_list=None):
1092
+ if obj is Getattr.NO_DEFAULT:
1093
+ obj = Reduce(reduction) # pragma: no cover
1094
+
1095
+ if is_pickler_dill is None:
1096
+ is_pickler_dill = is_dill(pickler, child=True)
1097
+ if is_pickler_dill:
1098
+ # assert id(obj) not in pickler._postproc, str(obj) + ' already pushed on stack!'
1099
+ # if not hasattr(pickler, 'x'): pickler.x = 0
1100
+ # print(pickler.x*' ', 'push', obj, id(obj), pickler._recurse)
1101
+ # pickler.x += 1
1102
+ if postproc_list is None:
1103
+ postproc_list = []
1104
+
1105
+ # Recursive object not supported. Default to a global instead.
1106
+ if id(obj) in pickler._postproc:
1107
+ name = '%s.%s ' % (obj.__module__, getattr(obj, '__qualname__', obj.__name__)) if hasattr(obj, '__module__') else ''
1108
+ warnings.warn('Cannot pickle %r: %shas recursive self-references that trigger a RecursionError.' % (obj, name), PicklingWarning)
1109
+ pickler.save_global(obj)
1110
+ return
1111
+ pickler._postproc[id(obj)] = postproc_list
1112
+
1113
+ # TODO: Use state_setter in Python 3.8 to allow for faster cPickle implementations
1114
+ pickler.save_reduce(*reduction, obj=obj)
1115
+
1116
+ if is_pickler_dill:
1117
+ # pickler.x -= 1
1118
+ # print(pickler.x*' ', 'pop', obj, id(obj))
1119
+ postproc = pickler._postproc.pop(id(obj))
1120
+ # assert postproc_list == postproc, 'Stack tampered!'
1121
+ for reduction in reversed(postproc):
1122
+ if reduction[0] is _setitems:
1123
+ # use the internal machinery of pickle.py to speedup when
1124
+ # updating a dictionary in postproc
1125
+ dest, source = reduction[1]
1126
+ if source:
1127
+ pickler.write(pickler.get(pickler.memo[id(dest)][0]))
1128
+ pickler._batch_setitems(iter(source.items()))
1129
+ else:
1130
+ # Updating with an empty dictionary. Same as doing nothing.
1131
+ continue
1132
+ else:
1133
+ pickler.save_reduce(*reduction)
1134
+ # pop None created by calling preprocessing step off stack
1135
+ pickler.write(POP)
1136
+
1137
+ #@register(CodeType)
1138
+ #def save_code(pickler, obj):
1139
+ # logger.trace(pickler, "Co: %s", obj)
1140
+ # pickler.save_reduce(_unmarshal, (marshal.dumps(obj),), obj=obj)
1141
+ # logger.trace(pickler, "# Co")
1142
+ # return
1143
+
1144
+ # The following function is based on 'save_codeobject' from 'cloudpickle'
1145
+ # Copyright (c) 2012, Regents of the University of California.
1146
+ # Copyright (c) 2009 `PiCloud, Inc. <http://www.picloud.com>`_.
1147
+ # License: https://github.com/cloudpipe/cloudpickle/blob/master/LICENSE
1148
+ @register(CodeType)
1149
+ def save_code(pickler, obj):
1150
+ logger.trace(pickler, "Co: %s", obj)
1151
+ if hasattr(obj, "co_endlinetable"): # python 3.11a (20 args)
1152
+ args = (
1153
+ obj.co_lnotab, # for < python 3.10 [not counted in args]
1154
+ obj.co_argcount, obj.co_posonlyargcount,
1155
+ obj.co_kwonlyargcount, obj.co_nlocals, obj.co_stacksize,
1156
+ obj.co_flags, obj.co_code, obj.co_consts, obj.co_names,
1157
+ obj.co_varnames, obj.co_filename, obj.co_name, obj.co_qualname,
1158
+ obj.co_firstlineno, obj.co_linetable, obj.co_endlinetable,
1159
+ obj.co_columntable, obj.co_exceptiontable, obj.co_freevars,
1160
+ obj.co_cellvars
1161
+ )
1162
+ elif hasattr(obj, "co_exceptiontable"): # python 3.11 (18 args)
1163
+ with warnings.catch_warnings():
1164
+ if not OLD312a7: # issue 597
1165
+ warnings.filterwarnings('ignore', category=DeprecationWarning)
1166
+ args = (
1167
+ obj.co_lnotab, # for < python 3.10 [not counted in args]
1168
+ obj.co_argcount, obj.co_posonlyargcount,
1169
+ obj.co_kwonlyargcount, obj.co_nlocals, obj.co_stacksize,
1170
+ obj.co_flags, obj.co_code, obj.co_consts, obj.co_names,
1171
+ obj.co_varnames, obj.co_filename, obj.co_name, obj.co_qualname,
1172
+ obj.co_firstlineno, obj.co_linetable, obj.co_exceptiontable,
1173
+ obj.co_freevars, obj.co_cellvars
1174
+ )
1175
+ elif hasattr(obj, "co_linetable"): # python 3.10 (16 args)
1176
+ args = (
1177
+ obj.co_lnotab, # for < python 3.10 [not counted in args]
1178
+ obj.co_argcount, obj.co_posonlyargcount,
1179
+ obj.co_kwonlyargcount, obj.co_nlocals, obj.co_stacksize,
1180
+ obj.co_flags, obj.co_code, obj.co_consts, obj.co_names,
1181
+ obj.co_varnames, obj.co_filename, obj.co_name,
1182
+ obj.co_firstlineno, obj.co_linetable, obj.co_freevars,
1183
+ obj.co_cellvars
1184
+ )
1185
+ elif hasattr(obj, "co_posonlyargcount"): # python 3.8 (16 args)
1186
+ args = (
1187
+ obj.co_argcount, obj.co_posonlyargcount,
1188
+ obj.co_kwonlyargcount, obj.co_nlocals, obj.co_stacksize,
1189
+ obj.co_flags, obj.co_code, obj.co_consts, obj.co_names,
1190
+ obj.co_varnames, obj.co_filename, obj.co_name,
1191
+ obj.co_firstlineno, obj.co_lnotab, obj.co_freevars,
1192
+ obj.co_cellvars
1193
+ )
1194
+ else: # python 3.7 (15 args)
1195
+ args = (
1196
+ obj.co_argcount, obj.co_kwonlyargcount, obj.co_nlocals,
1197
+ obj.co_stacksize, obj.co_flags, obj.co_code, obj.co_consts,
1198
+ obj.co_names, obj.co_varnames, obj.co_filename,
1199
+ obj.co_name, obj.co_firstlineno, obj.co_lnotab,
1200
+ obj.co_freevars, obj.co_cellvars
1201
+ )
1202
+
1203
+ pickler.save_reduce(_create_code, args, obj=obj)
1204
+ logger.trace(pickler, "# Co")
1205
+ return
1206
+
1207
+ def _repr_dict(obj):
1208
+ """Make a short string representation of a dictionary."""
1209
+ return "<%s object at %#012x>" % (type(obj).__name__, id(obj))
1210
+
1211
+ @register(dict)
1212
+ def save_module_dict(pickler, obj):
1213
+ if is_dill(pickler, child=False) and obj == pickler._main.__dict__ and \
1214
+ not (pickler._session and pickler._first_pass):
1215
+ logger.trace(pickler, "D1: %s", _repr_dict(obj)) # obj
1216
+ pickler.write(bytes('c__builtin__\n__main__\n', 'UTF-8'))
1217
+ logger.trace(pickler, "# D1")
1218
+ elif (not is_dill(pickler, child=False)) and (obj == _main_module.__dict__):
1219
+ logger.trace(pickler, "D3: %s", _repr_dict(obj)) # obj
1220
+ pickler.write(bytes('c__main__\n__dict__\n', 'UTF-8')) #XXX: works in general?
1221
+ logger.trace(pickler, "# D3")
1222
+ elif '__name__' in obj and obj != _main_module.__dict__ \
1223
+ and type(obj['__name__']) is str \
1224
+ and obj is getattr(_import_module(obj['__name__'],True), '__dict__', None):
1225
+ logger.trace(pickler, "D4: %s", _repr_dict(obj)) # obj
1226
+ pickler.write(bytes('c%s\n__dict__\n' % obj['__name__'], 'UTF-8'))
1227
+ logger.trace(pickler, "# D4")
1228
+ else:
1229
+ logger.trace(pickler, "D2: %s", _repr_dict(obj)) # obj
1230
+ if is_dill(pickler, child=False) and pickler._session:
1231
+ # we only care about session the first pass thru
1232
+ pickler._first_pass = False
1233
+ StockPickler.save_dict(pickler, obj)
1234
+ logger.trace(pickler, "# D2")
1235
+ return
1236
+
1237
+
1238
+ if not OLD310 and MAPPING_PROXY_TRICK:
1239
+ def save_dict_view(dicttype):
1240
+ def save_dict_view_for_function(func):
1241
+ def _save_dict_view(pickler, obj):
1242
+ logger.trace(pickler, "Dkvi: <%s>", obj)
1243
+ mapping = obj.mapping | _dictproxy_helper_instance
1244
+ pickler.save_reduce(func, (mapping,), obj=obj)
1245
+ logger.trace(pickler, "# Dkvi")
1246
+ return _save_dict_view
1247
+ return [
1248
+ (funcname, save_dict_view_for_function(getattr(dicttype, funcname)))
1249
+ for funcname in ('keys', 'values', 'items')
1250
+ ]
1251
+ else:
1252
+ # The following functions are based on 'cloudpickle'
1253
+ # https://github.com/cloudpipe/cloudpickle/blob/5d89947288a18029672596a4d719093cc6d5a412/cloudpickle/cloudpickle.py#L922-L940
1254
+ # Copyright (c) 2012, Regents of the University of California.
1255
+ # Copyright (c) 2009 `PiCloud, Inc. <http://www.picloud.com>`_.
1256
+ # License: https://github.com/cloudpipe/cloudpickle/blob/master/LICENSE
1257
+ def save_dict_view(dicttype):
1258
+ def save_dict_keys(pickler, obj):
1259
+ logger.trace(pickler, "Dk: <%s>", obj)
1260
+ dict_constructor = _shims.Reduce(dicttype.fromkeys, (list(obj),))
1261
+ pickler.save_reduce(dicttype.keys, (dict_constructor,), obj=obj)
1262
+ logger.trace(pickler, "# Dk")
1263
+
1264
+ def save_dict_values(pickler, obj):
1265
+ logger.trace(pickler, "Dv: <%s>", obj)
1266
+ dict_constructor = _shims.Reduce(dicttype, (enumerate(obj),))
1267
+ pickler.save_reduce(dicttype.values, (dict_constructor,), obj=obj)
1268
+ logger.trace(pickler, "# Dv")
1269
+
1270
+ def save_dict_items(pickler, obj):
1271
+ logger.trace(pickler, "Di: <%s>", obj)
1272
+ pickler.save_reduce(dicttype.items, (dicttype(obj),), obj=obj)
1273
+ logger.trace(pickler, "# Di")
1274
+
1275
+ return (
1276
+ ('keys', save_dict_keys),
1277
+ ('values', save_dict_values),
1278
+ ('items', save_dict_items)
1279
+ )
1280
+
1281
+ for __dicttype in (
1282
+ dict,
1283
+ OrderedDict
1284
+ ):
1285
+ __obj = __dicttype()
1286
+ for __funcname, __savefunc in save_dict_view(__dicttype):
1287
+ __tview = type(getattr(__obj, __funcname)())
1288
+ if __tview not in Pickler.dispatch:
1289
+ Pickler.dispatch[__tview] = __savefunc
1290
+ del __dicttype, __obj, __funcname, __tview, __savefunc
1291
+
1292
+
1293
+ @register(ClassType)
1294
+ def save_classobj(pickler, obj): #FIXME: enable pickler._byref
1295
+ if not _locate_function(obj, pickler):
1296
+ logger.trace(pickler, "C1: %s", obj)
1297
+ pickler.save_reduce(ClassType, (obj.__name__, obj.__bases__,
1298
+ obj.__dict__), obj=obj)
1299
+ #XXX: or obj.__dict__.copy()), obj=obj) ?
1300
+ logger.trace(pickler, "# C1")
1301
+ else:
1302
+ logger.trace(pickler, "C2: %s", obj)
1303
+ name = getattr(obj, '__qualname__', getattr(obj, '__name__', None))
1304
+ StockPickler.save_global(pickler, obj, name=name)
1305
+ logger.trace(pickler, "# C2")
1306
+ return
1307
+
1308
+ @register(typing._GenericAlias)
1309
+ def save_generic_alias(pickler, obj):
1310
+ args = obj.__args__
1311
+ if type(obj.__reduce__()) is str:
1312
+ logger.trace(pickler, "Ga0: %s", obj)
1313
+ StockPickler.save_global(pickler, obj, name=obj.__reduce__())
1314
+ logger.trace(pickler, "# Ga0")
1315
+ elif obj.__origin__ is tuple and (not args or args == ((),)):
1316
+ logger.trace(pickler, "Ga1: %s", obj)
1317
+ pickler.save_reduce(_create_typing_tuple, (args,), obj=obj)
1318
+ logger.trace(pickler, "# Ga1")
1319
+ else:
1320
+ logger.trace(pickler, "Ga2: %s", obj)
1321
+ StockPickler.save_reduce(pickler, *obj.__reduce__(), obj=obj)
1322
+ logger.trace(pickler, "# Ga2")
1323
+ return
1324
+
1325
+ if ThreadHandleType:
1326
+ @register(ThreadHandleType)
1327
+ def save_thread_handle(pickler, obj):
1328
+ logger.trace(pickler, "Th: %s", obj)
1329
+ pickler.save_reduce(_create_thread_handle, (obj.ident, obj.is_done()), obj=obj)
1330
+ logger.trace(pickler, "# Th")
1331
+ return
1332
+
1333
+ @register(LockType) #XXX: copied Thread will have new Event (due to new Lock)
1334
+ def save_lock(pickler, obj):
1335
+ logger.trace(pickler, "Lo: %s", obj)
1336
+ pickler.save_reduce(_create_lock, (obj.locked(),), obj=obj)
1337
+ logger.trace(pickler, "# Lo")
1338
+ return
1339
+
1340
+ @register(RLockType)
1341
+ def save_rlock(pickler, obj):
1342
+ logger.trace(pickler, "RL: %s", obj)
1343
+ r = obj.__repr__() # don't use _release_save as it unlocks the lock
1344
+ count = int(r.split('count=')[1].split()[0].rstrip('>'))
1345
+ owner = int(r.split('owner=')[1].split()[0])
1346
+ pickler.save_reduce(_create_rlock, (count,owner,), obj=obj)
1347
+ logger.trace(pickler, "# RL")
1348
+ return
1349
+
1350
+ #@register(SocketType) #FIXME: causes multiprocess test_pickling FAIL
1351
+ def save_socket(pickler, obj):
1352
+ logger.trace(pickler, "So: %s", obj)
1353
+ pickler.save_reduce(*reduce_socket(obj))
1354
+ logger.trace(pickler, "# So")
1355
+ return
1356
+
1357
+ def _save_file(pickler, obj, open_):
1358
+ if obj.closed:
1359
+ position = 0
1360
+ else:
1361
+ obj.flush()
1362
+ if obj in (sys.__stdout__, sys.__stderr__, sys.__stdin__):
1363
+ position = -1
1364
+ else:
1365
+ position = obj.tell()
1366
+ if is_dill(pickler, child=True) and pickler._fmode == FILE_FMODE:
1367
+ f = open_(obj.name, "r")
1368
+ fdata = f.read()
1369
+ f.close()
1370
+ else:
1371
+ fdata = ""
1372
+ if is_dill(pickler, child=True):
1373
+ strictio = pickler._strictio
1374
+ fmode = pickler._fmode
1375
+ else:
1376
+ strictio = False
1377
+ fmode = 0 # HANDLE_FMODE
1378
+ pickler.save_reduce(_create_filehandle, (obj.name, obj.mode, position,
1379
+ obj.closed, open_, strictio,
1380
+ fmode, fdata), obj=obj)
1381
+ return
1382
+
1383
+
1384
+ @register(FileType) #XXX: in 3.x has buffer=0, needs different _create?
1385
+ @register(BufferedReaderType)
1386
+ @register(BufferedWriterType)
1387
+ @register(TextWrapperType)
1388
+ def save_file(pickler, obj):
1389
+ logger.trace(pickler, "Fi: %s", obj)
1390
+ f = _save_file(pickler, obj, open)
1391
+ logger.trace(pickler, "# Fi")
1392
+ return f
1393
+
1394
+ if BufferedRandomType:
1395
+ @register(BufferedRandomType)
1396
+ def save_file(pickler, obj):
1397
+ logger.trace(pickler, "Fi: %s", obj)
1398
+ f = _save_file(pickler, obj, open)
1399
+ logger.trace(pickler, "# Fi")
1400
+ return f
1401
+
1402
+ if PyTextWrapperType:
1403
+ @register(PyBufferedReaderType)
1404
+ @register(PyBufferedWriterType)
1405
+ @register(PyTextWrapperType)
1406
+ def save_file(pickler, obj):
1407
+ logger.trace(pickler, "Fi: %s", obj)
1408
+ f = _save_file(pickler, obj, _open)
1409
+ logger.trace(pickler, "# Fi")
1410
+ return f
1411
+
1412
+ if PyBufferedRandomType:
1413
+ @register(PyBufferedRandomType)
1414
+ def save_file(pickler, obj):
1415
+ logger.trace(pickler, "Fi: %s", obj)
1416
+ f = _save_file(pickler, obj, _open)
1417
+ logger.trace(pickler, "# Fi")
1418
+ return f
1419
+
1420
+
1421
+ # The following two functions are based on 'saveCStringIoInput'
1422
+ # and 'saveCStringIoOutput' from spickle
1423
+ # Copyright (c) 2011 by science+computing ag
1424
+ # License: http://www.apache.org/licenses/LICENSE-2.0
1425
+ if InputType:
1426
+ @register(InputType)
1427
+ def save_stringi(pickler, obj):
1428
+ logger.trace(pickler, "Io: %s", obj)
1429
+ if obj.closed:
1430
+ value = ''; position = 0
1431
+ else:
1432
+ value = obj.getvalue(); position = obj.tell()
1433
+ pickler.save_reduce(_create_stringi, (value, position, \
1434
+ obj.closed), obj=obj)
1435
+ logger.trace(pickler, "# Io")
1436
+ return
1437
+
1438
+ @register(OutputType)
1439
+ def save_stringo(pickler, obj):
1440
+ logger.trace(pickler, "Io: %s", obj)
1441
+ if obj.closed:
1442
+ value = ''; position = 0
1443
+ else:
1444
+ value = obj.getvalue(); position = obj.tell()
1445
+ pickler.save_reduce(_create_stringo, (value, position, \
1446
+ obj.closed), obj=obj)
1447
+ logger.trace(pickler, "# Io")
1448
+ return
1449
+
1450
+ if LRUCacheType is not None:
1451
+ from functools import lru_cache
1452
+ @register(LRUCacheType)
1453
+ def save_lru_cache(pickler, obj):
1454
+ logger.trace(pickler, "LRU: %s", obj)
1455
+ if OLD39:
1456
+ kwargs = obj.cache_info()
1457
+ args = (kwargs.maxsize,)
1458
+ else:
1459
+ kwargs = obj.cache_parameters()
1460
+ args = (kwargs['maxsize'], kwargs['typed'])
1461
+ if args != lru_cache.__defaults__:
1462
+ wrapper = Reduce(lru_cache, args, is_callable=True)
1463
+ else:
1464
+ wrapper = lru_cache
1465
+ pickler.save_reduce(wrapper, (obj.__wrapped__,), obj=obj)
1466
+ logger.trace(pickler, "# LRU")
1467
+ return
1468
+
1469
+ @register(SuperType)
1470
+ def save_super(pickler, obj):
1471
+ logger.trace(pickler, "Su: %s", obj)
1472
+ pickler.save_reduce(super, (obj.__thisclass__, obj.__self__), obj=obj)
1473
+ logger.trace(pickler, "# Su")
1474
+ return
1475
+
1476
+ if IS_PYPY:
1477
+ @register(MethodType)
1478
+ def save_instancemethod0(pickler, obj):
1479
+ code = getattr(obj.__func__, '__code__', None)
1480
+ if code is not None and type(code) is not CodeType \
1481
+ and getattr(obj.__self__, obj.__name__) == obj:
1482
+ # Some PyPy builtin functions have no module name
1483
+ logger.trace(pickler, "Me2: %s", obj)
1484
+ # TODO: verify that this works for all PyPy builtin methods
1485
+ pickler.save_reduce(getattr, (obj.__self__, obj.__name__), obj=obj)
1486
+ logger.trace(pickler, "# Me2")
1487
+ return
1488
+
1489
+ logger.trace(pickler, "Me1: %s", obj)
1490
+ pickler.save_reduce(MethodType, (obj.__func__, obj.__self__), obj=obj)
1491
+ logger.trace(pickler, "# Me1")
1492
+ return
1493
+ else:
1494
+ @register(MethodType)
1495
+ def save_instancemethod0(pickler, obj):
1496
+ logger.trace(pickler, "Me1: %s", obj)
1497
+ pickler.save_reduce(MethodType, (obj.__func__, obj.__self__), obj=obj)
1498
+ logger.trace(pickler, "# Me1")
1499
+ return
1500
+
1501
+ if not IS_PYPY:
1502
+ @register(MemberDescriptorType)
1503
+ @register(GetSetDescriptorType)
1504
+ @register(MethodDescriptorType)
1505
+ @register(WrapperDescriptorType)
1506
+ @register(ClassMethodDescriptorType)
1507
+ def save_wrapper_descriptor(pickler, obj):
1508
+ logger.trace(pickler, "Wr: %s", obj)
1509
+ pickler.save_reduce(_getattr, (obj.__objclass__, obj.__name__,
1510
+ obj.__repr__()), obj=obj)
1511
+ logger.trace(pickler, "# Wr")
1512
+ return
1513
+ else:
1514
+ @register(MemberDescriptorType)
1515
+ @register(GetSetDescriptorType)
1516
+ def save_wrapper_descriptor(pickler, obj):
1517
+ logger.trace(pickler, "Wr: %s", obj)
1518
+ pickler.save_reduce(_getattr, (obj.__objclass__, obj.__name__,
1519
+ obj.__repr__()), obj=obj)
1520
+ logger.trace(pickler, "# Wr")
1521
+ return
1522
+
1523
+ @register(CellType)
1524
+ def save_cell(pickler, obj):
1525
+ try:
1526
+ f = obj.cell_contents
1527
+ except ValueError: # cell is empty
1528
+ logger.trace(pickler, "Ce3: %s", obj)
1529
+ # _shims._CELL_EMPTY is defined in _shims.py to support PyPy 2.7.
1530
+ # It unpickles to a sentinel object _dill._CELL_EMPTY, also created in
1531
+ # _shims.py. This object is not present in Python 3 because the cell's
1532
+ # contents can be deleted in newer versions of Python. The reduce object
1533
+ # will instead unpickle to None if unpickled in Python 3.
1534
+
1535
+ # When breaking changes are made to dill, (_shims._CELL_EMPTY,) can
1536
+ # be replaced by () OR the delattr function can be removed repending on
1537
+ # whichever is more convienient.
1538
+ pickler.save_reduce(_create_cell, (_shims._CELL_EMPTY,), obj=obj)
1539
+ # Call the function _delattr on the cell's cell_contents attribute
1540
+ # The result of this function call will be None
1541
+ pickler.save_reduce(_shims._delattr, (obj, 'cell_contents'))
1542
+ # pop None created by calling _delattr off stack
1543
+ pickler.write(POP)
1544
+ logger.trace(pickler, "# Ce3")
1545
+ return
1546
+ if is_dill(pickler, child=True):
1547
+ if id(f) in pickler._postproc:
1548
+ # Already seen. Add to its postprocessing.
1549
+ postproc = pickler._postproc[id(f)]
1550
+ else:
1551
+ # Haven't seen it. Add to the highest possible object and set its
1552
+ # value as late as possible to prevent cycle.
1553
+ postproc = next(iter(pickler._postproc.values()), None)
1554
+ if postproc is not None:
1555
+ logger.trace(pickler, "Ce2: %s", obj)
1556
+ # _CELL_REF is defined in _shims.py to support older versions of
1557
+ # dill. When breaking changes are made to dill, (_CELL_REF,) can
1558
+ # be replaced by ()
1559
+ pickler.save_reduce(_create_cell, (_CELL_REF,), obj=obj)
1560
+ postproc.append((_shims._setattr, (obj, 'cell_contents', f)))
1561
+ logger.trace(pickler, "# Ce2")
1562
+ return
1563
+ logger.trace(pickler, "Ce1: %s", obj)
1564
+ pickler.save_reduce(_create_cell, (f,), obj=obj)
1565
+ logger.trace(pickler, "# Ce1")
1566
+ return
1567
+
1568
+ if MAPPING_PROXY_TRICK:
1569
+ @register(DictProxyType)
1570
+ def save_dictproxy(pickler, obj):
1571
+ logger.trace(pickler, "Mp: %s", _repr_dict(obj)) # obj
1572
+ mapping = obj | _dictproxy_helper_instance
1573
+ pickler.save_reduce(DictProxyType, (mapping,), obj=obj)
1574
+ logger.trace(pickler, "# Mp")
1575
+ return
1576
+ else:
1577
+ @register(DictProxyType)
1578
+ def save_dictproxy(pickler, obj):
1579
+ logger.trace(pickler, "Mp: %s", _repr_dict(obj)) # obj
1580
+ pickler.save_reduce(DictProxyType, (obj.copy(),), obj=obj)
1581
+ logger.trace(pickler, "# Mp")
1582
+ return
1583
+
1584
+ @register(SliceType)
1585
+ def save_slice(pickler, obj):
1586
+ logger.trace(pickler, "Sl: %s", obj)
1587
+ pickler.save_reduce(slice, (obj.start, obj.stop, obj.step), obj=obj)
1588
+ logger.trace(pickler, "# Sl")
1589
+ return
1590
+
1591
+ @register(XRangeType)
1592
+ @register(EllipsisType)
1593
+ @register(NotImplementedType)
1594
+ def save_singleton(pickler, obj):
1595
+ logger.trace(pickler, "Si: %s", obj)
1596
+ pickler.save_reduce(_eval_repr, (obj.__repr__(),), obj=obj)
1597
+ logger.trace(pickler, "# Si")
1598
+ return
1599
+
1600
+ def _proxy_helper(obj): # a dead proxy returns a reference to None
1601
+ """get memory address of proxy's reference object"""
1602
+ _repr = repr(obj)
1603
+ try: _str = str(obj)
1604
+ except ReferenceError: # it's a dead proxy
1605
+ return id(None)
1606
+ if _str == _repr: return id(obj) # it's a repr
1607
+ try: # either way, it's a proxy from here
1608
+ address = int(_str.rstrip('>').split(' at ')[-1], base=16)
1609
+ except ValueError: # special case: proxy of a 'type'
1610
+ if not IS_PYPY:
1611
+ address = int(_repr.rstrip('>').split(' at ')[-1], base=16)
1612
+ else:
1613
+ objects = iter(gc.get_objects())
1614
+ for _obj in objects:
1615
+ if repr(_obj) == _str: return id(_obj)
1616
+ # all bad below... nothing found so throw ReferenceError
1617
+ msg = "Cannot reference object for proxy at '%s'" % id(obj)
1618
+ raise ReferenceError(msg)
1619
+ return address
1620
+
1621
+ def _locate_object(address, module=None):
1622
+ """get object located at the given memory address (inverse of id(obj))"""
1623
+ special = [None, True, False] #XXX: more...?
1624
+ for obj in special:
1625
+ if address == id(obj): return obj
1626
+ if module:
1627
+ objects = iter(module.__dict__.values())
1628
+ else: objects = iter(gc.get_objects())
1629
+ for obj in objects:
1630
+ if address == id(obj): return obj
1631
+ # all bad below... nothing found so throw ReferenceError or TypeError
1632
+ try: address = hex(address)
1633
+ except TypeError:
1634
+ raise TypeError("'%s' is not a valid memory address" % str(address))
1635
+ raise ReferenceError("Cannot reference object at '%s'" % address)
1636
+
1637
+ @register(ReferenceType)
1638
+ def save_weakref(pickler, obj):
1639
+ refobj = obj()
1640
+ logger.trace(pickler, "R1: %s", obj)
1641
+ #refobj = ctypes.pythonapi.PyWeakref_GetObject(obj) # dead returns "None"
1642
+ pickler.save_reduce(_create_weakref, (refobj,), obj=obj)
1643
+ logger.trace(pickler, "# R1")
1644
+ return
1645
+
1646
+ @register(ProxyType)
1647
+ @register(CallableProxyType)
1648
+ def save_weakproxy(pickler, obj):
1649
+ # Must do string substitution here and use %r to avoid ReferenceError.
1650
+ logger.trace(pickler, "R2: %r" % obj)
1651
+ refobj = _locate_object(_proxy_helper(obj))
1652
+ pickler.save_reduce(_create_weakproxy, (refobj, callable(obj)), obj=obj)
1653
+ logger.trace(pickler, "# R2")
1654
+ return
1655
+
1656
+ def _is_builtin_module(module):
1657
+ if not hasattr(module, "__file__"): return True
1658
+ if module.__file__ is None: return False
1659
+ # If a module file name starts with prefix, it should be a builtin
1660
+ # module, so should always be pickled as a reference.
1661
+ names = ["base_prefix", "base_exec_prefix", "exec_prefix", "prefix", "real_prefix"]
1662
+ rp = os.path.realpath
1663
+ # See https://github.com/uqfoundation/dill/issues/566
1664
+ return (
1665
+ any(
1666
+ module.__file__.startswith(getattr(sys, name))
1667
+ or rp(module.__file__).startswith(rp(getattr(sys, name)))
1668
+ for name in names
1669
+ if hasattr(sys, name)
1670
+ )
1671
+ or module.__file__.endswith(EXTENSION_SUFFIXES)
1672
+ or 'site-packages' in module.__file__
1673
+ )
1674
+
1675
+ def _is_imported_module(module):
1676
+ return getattr(module, '__loader__', None) is not None or module in sys.modules.values()
1677
+
1678
+ @register(ModuleType)
1679
+ def save_module(pickler, obj):
1680
+ if False: #_use_diff:
1681
+ if obj.__name__.split('.', 1)[0] != "dill":
1682
+ try:
1683
+ changed = diff.whats_changed(obj, seen=pickler._diff_cache)[0]
1684
+ except RuntimeError: # not memorised module, probably part of dill
1685
+ pass
1686
+ else:
1687
+ logger.trace(pickler, "M2: %s with diff", obj)
1688
+ logger.info("Diff: %s", changed.keys())
1689
+ pickler.save_reduce(_import_module, (obj.__name__,), obj=obj,
1690
+ state=changed)
1691
+ logger.trace(pickler, "# M2")
1692
+ return
1693
+
1694
+ logger.trace(pickler, "M1: %s", obj)
1695
+ pickler.save_reduce(_import_module, (obj.__name__,), obj=obj)
1696
+ logger.trace(pickler, "# M1")
1697
+ else:
1698
+ builtin_mod = _is_builtin_module(obj)
1699
+ is_session_main = is_dill(pickler, child=True) and obj is pickler._main
1700
+ if (obj.__name__ not in ("builtins", "dill", "dill._dill") and not builtin_mod
1701
+ or is_session_main):
1702
+ logger.trace(pickler, "M1: %s", obj)
1703
+ # Hack for handling module-type objects in load_module().
1704
+ mod_name = obj.__name__ if _is_imported_module(obj) else '__runtime__.%s' % obj.__name__
1705
+ # Second references are saved as __builtin__.__main__ in save_module_dict().
1706
+ main_dict = obj.__dict__.copy()
1707
+ for item in ('__builtins__', '__loader__'):
1708
+ main_dict.pop(item, None)
1709
+ for item in IPYTHON_SINGLETONS: #pragma: no cover
1710
+ if getattr(main_dict.get(item), '__module__', '').startswith('IPython'):
1711
+ del main_dict[item]
1712
+ pickler.save_reduce(_import_module, (mod_name,), obj=obj, state=main_dict)
1713
+ logger.trace(pickler, "# M1")
1714
+ elif obj.__name__ == "dill._dill":
1715
+ logger.trace(pickler, "M2: %s", obj)
1716
+ pickler.save_global(obj, name="_dill")
1717
+ logger.trace(pickler, "# M2")
1718
+ else:
1719
+ logger.trace(pickler, "M2: %s", obj)
1720
+ pickler.save_reduce(_import_module, (obj.__name__,), obj=obj)
1721
+ logger.trace(pickler, "# M2")
1722
+ return
1723
+
1724
+ # The following function is based on '_extract_class_dict' from 'cloudpickle'
1725
+ # Copyright (c) 2012, Regents of the University of California.
1726
+ # Copyright (c) 2009 `PiCloud, Inc. <http://www.picloud.com>`_.
1727
+ # License: https://github.com/cloudpipe/cloudpickle/blob/master/LICENSE
1728
+ def _get_typedict_type(cls, clsdict, attrs, postproc_list):
1729
+ """Retrieve a copy of the dict of a class without the inherited methods"""
1730
+ if len(cls.__bases__) == 1:
1731
+ inherited_dict = cls.__bases__[0].__dict__
1732
+ else:
1733
+ inherited_dict = {}
1734
+ for base in reversed(cls.__bases__):
1735
+ inherited_dict.update(base.__dict__)
1736
+ to_remove = []
1737
+ for name, value in dict.items(clsdict):
1738
+ try:
1739
+ base_value = inherited_dict[name]
1740
+ if value is base_value and hasattr(value, '__qualname__'):
1741
+ to_remove.append(name)
1742
+ except KeyError:
1743
+ pass
1744
+ for name in to_remove:
1745
+ dict.pop(clsdict, name)
1746
+
1747
+ if issubclass(type(cls), type):
1748
+ clsdict.pop('__dict__', None)
1749
+ clsdict.pop('__weakref__', None)
1750
+ # clsdict.pop('__prepare__', None)
1751
+ return clsdict, attrs
1752
+
1753
+ def _get_typedict_abc(obj, _dict, attrs, postproc_list):
1754
+ if hasattr(abc, '_get_dump'):
1755
+ (registry, _, _, _) = abc._get_dump(obj)
1756
+ register = obj.register
1757
+ postproc_list.extend((register, (reg(),)) for reg in registry)
1758
+ elif hasattr(obj, '_abc_registry'):
1759
+ registry = obj._abc_registry
1760
+ register = obj.register
1761
+ postproc_list.extend((register, (reg,)) for reg in registry)
1762
+ else:
1763
+ raise PicklingError("Cannot find registry of ABC %s", obj)
1764
+
1765
+ if '_abc_registry' in _dict:
1766
+ _dict.pop('_abc_registry', None)
1767
+ _dict.pop('_abc_cache', None)
1768
+ _dict.pop('_abc_negative_cache', None)
1769
+ # _dict.pop('_abc_negative_cache_version', None)
1770
+ else:
1771
+ _dict.pop('_abc_impl', None)
1772
+ return _dict, attrs
1773
+
1774
+ @register(TypeType)
1775
+ def save_type(pickler, obj, postproc_list=None):
1776
+ if obj in _typemap:
1777
+ logger.trace(pickler, "T1: %s", obj)
1778
+ # if obj in _incedental_types:
1779
+ # warnings.warn('Type %r may only exist on this implementation of Python and cannot be unpickled in other implementations.' % (obj,), PicklingWarning)
1780
+ pickler.save_reduce(_load_type, (_typemap[obj],), obj=obj)
1781
+ logger.trace(pickler, "# T1")
1782
+ elif obj.__bases__ == (tuple,) and all([hasattr(obj, attr) for attr in ('_fields','_asdict','_make','_replace')]):
1783
+ # special case: namedtuples
1784
+ logger.trace(pickler, "T6: %s", obj)
1785
+
1786
+ obj_name = getattr(obj, '__qualname__', getattr(obj, '__name__', None))
1787
+ if obj.__name__ != obj_name:
1788
+ if postproc_list is None:
1789
+ postproc_list = []
1790
+ postproc_list.append((setattr, (obj, '__qualname__', obj_name)))
1791
+
1792
+ if not obj._field_defaults:
1793
+ _save_with_postproc(pickler, (_create_namedtuple, (obj.__name__, obj._fields, obj.__module__)), obj=obj, postproc_list=postproc_list)
1794
+ else:
1795
+ defaults = [obj._field_defaults[field] for field in obj._fields if field in obj._field_defaults]
1796
+ _save_with_postproc(pickler, (_create_namedtuple, (obj.__name__, obj._fields, obj.__module__, defaults)), obj=obj, postproc_list=postproc_list)
1797
+ logger.trace(pickler, "# T6")
1798
+ return
1799
+
1800
+ # special caes: NoneType, NotImplementedType, EllipsisType, EnumMeta, etc
1801
+ elif obj is type(None):
1802
+ logger.trace(pickler, "T7: %s", obj)
1803
+ #XXX: pickler.save_reduce(type, (None,), obj=obj)
1804
+ pickler.write(GLOBAL + b'__builtin__\nNoneType\n')
1805
+ logger.trace(pickler, "# T7")
1806
+ elif obj is NotImplementedType:
1807
+ logger.trace(pickler, "T7: %s", obj)
1808
+ pickler.save_reduce(type, (NotImplemented,), obj=obj)
1809
+ logger.trace(pickler, "# T7")
1810
+ elif obj is EllipsisType:
1811
+ logger.trace(pickler, "T7: %s", obj)
1812
+ pickler.save_reduce(type, (Ellipsis,), obj=obj)
1813
+ logger.trace(pickler, "# T7")
1814
+ elif obj is EnumMeta:
1815
+ logger.trace(pickler, "T7: %s", obj)
1816
+ pickler.write(GLOBAL + b'enum\nEnumMeta\n')
1817
+ logger.trace(pickler, "# T7")
1818
+ elif obj is ExceptHookArgsType: #NOTE: must be after NoneType for pypy
1819
+ logger.trace(pickler, "T7: %s", obj)
1820
+ pickler.write(GLOBAL + b'threading\nExceptHookArgs\n')
1821
+ logger.trace(pickler, "# T7")
1822
+
1823
+ else:
1824
+ _byref = getattr(pickler, '_byref', None)
1825
+ obj_recursive = id(obj) in getattr(pickler, '_postproc', ())
1826
+ incorrectly_named = not _locate_function(obj, pickler)
1827
+ if not _byref and not obj_recursive and incorrectly_named: # not a function, but the name was held over
1828
+ if postproc_list is None:
1829
+ postproc_list = []
1830
+
1831
+ # thanks to Tom Stepleton pointing out pickler._session unneeded
1832
+ logger.trace(pickler, "T2: %s", obj)
1833
+ _dict, attrs = _get_typedict_type(obj, obj.__dict__.copy(), None, postproc_list) # copy dict proxy to a dict
1834
+
1835
+ #print (_dict)
1836
+ #print ("%s\n%s" % (type(obj), obj.__name__))
1837
+ #print ("%s\n%s" % (obj.__bases__, obj.__dict__))
1838
+ slots = _dict.get('__slots__', ())
1839
+ if type(slots) == str:
1840
+ # __slots__ accepts a single string
1841
+ slots = (slots,)
1842
+
1843
+ for name in slots:
1844
+ _dict.pop(name, None)
1845
+
1846
+ if isinstance(obj, abc.ABCMeta):
1847
+ logger.trace(pickler, "ABC: %s", obj)
1848
+ _dict, attrs = _get_typedict_abc(obj, _dict, attrs, postproc_list)
1849
+ logger.trace(pickler, "# ABC")
1850
+
1851
+ qualname = getattr(obj, '__qualname__', None)
1852
+ if attrs is not None:
1853
+ for k, v in attrs.items():
1854
+ postproc_list.append((setattr, (obj, k, v)))
1855
+ # TODO: Consider using the state argument to save_reduce?
1856
+ if qualname is not None:
1857
+ postproc_list.append((setattr, (obj, '__qualname__', qualname)))
1858
+
1859
+ if not hasattr(obj, '__orig_bases__'):
1860
+ _save_with_postproc(pickler, (_create_type, (
1861
+ type(obj), obj.__name__, obj.__bases__, _dict
1862
+ )), obj=obj, postproc_list=postproc_list)
1863
+ else:
1864
+ # This case will always work, but might be overkill.
1865
+ _metadict = {
1866
+ 'metaclass': type(obj)
1867
+ }
1868
+
1869
+ if _dict:
1870
+ _dict_update = PartialType(_setitems, source=_dict)
1871
+ else:
1872
+ _dict_update = None
1873
+
1874
+ _save_with_postproc(pickler, (new_class, (
1875
+ obj.__name__, obj.__orig_bases__, _metadict, _dict_update
1876
+ )), obj=obj, postproc_list=postproc_list)
1877
+ logger.trace(pickler, "# T2")
1878
+ else:
1879
+ obj_name = getattr(obj, '__qualname__', getattr(obj, '__name__', None))
1880
+ logger.trace(pickler, "T4: %s", obj)
1881
+ if incorrectly_named:
1882
+ warnings.warn(
1883
+ "Cannot locate reference to %r." % (obj,),
1884
+ PicklingWarning,
1885
+ stacklevel=3,
1886
+ )
1887
+ if obj_recursive:
1888
+ warnings.warn(
1889
+ "Cannot pickle %r: %s.%s has recursive self-references that "
1890
+ "trigger a RecursionError." % (obj, obj.__module__, obj_name),
1891
+ PicklingWarning,
1892
+ stacklevel=3,
1893
+ )
1894
+ #print (obj.__dict__)
1895
+ #print ("%s\n%s" % (type(obj), obj.__name__))
1896
+ #print ("%s\n%s" % (obj.__bases__, obj.__dict__))
1897
+ StockPickler.save_global(pickler, obj, name=obj_name)
1898
+ logger.trace(pickler, "# T4")
1899
+ return
1900
+
1901
+ @register(property)
1902
+ @register(abc.abstractproperty)
1903
+ def save_property(pickler, obj):
1904
+ logger.trace(pickler, "Pr: %s", obj)
1905
+ pickler.save_reduce(type(obj), (obj.fget, obj.fset, obj.fdel, obj.__doc__),
1906
+ obj=obj)
1907
+ logger.trace(pickler, "# Pr")
1908
+
1909
+ @register(staticmethod)
1910
+ @register(classmethod)
1911
+ @register(abc.abstractstaticmethod)
1912
+ @register(abc.abstractclassmethod)
1913
+ def save_classmethod(pickler, obj):
1914
+ logger.trace(pickler, "Cm: %s", obj)
1915
+ orig_func = obj.__func__
1916
+
1917
+ # if type(obj.__dict__) is dict:
1918
+ # if obj.__dict__:
1919
+ # state = obj.__dict__
1920
+ # else:
1921
+ # state = None
1922
+ # else:
1923
+ # state = (None, {'__dict__', obj.__dict__})
1924
+
1925
+ pickler.save_reduce(type(obj), (orig_func,), obj=obj)
1926
+ logger.trace(pickler, "# Cm")
1927
+
1928
+ @register(FunctionType)
1929
+ def save_function(pickler, obj):
1930
+ if not _locate_function(obj, pickler):
1931
+ if type(obj.__code__) is not CodeType:
1932
+ # Some PyPy builtin functions have no module name, and thus are not
1933
+ # able to be located
1934
+ module_name = getattr(obj, '__module__', None)
1935
+ if module_name is None:
1936
+ module_name = __builtin__.__name__
1937
+ module = _import_module(module_name, safe=True)
1938
+ _pypy_builtin = False
1939
+ try:
1940
+ found, _ = _getattribute(module, obj.__qualname__)
1941
+ if getattr(found, '__func__', None) is obj:
1942
+ _pypy_builtin = True
1943
+ except AttributeError:
1944
+ pass
1945
+
1946
+ if _pypy_builtin:
1947
+ logger.trace(pickler, "F3: %s", obj)
1948
+ pickler.save_reduce(getattr, (found, '__func__'), obj=obj)
1949
+ logger.trace(pickler, "# F3")
1950
+ return
1951
+
1952
+ logger.trace(pickler, "F1: %s", obj)
1953
+ _recurse = getattr(pickler, '_recurse', None)
1954
+ _postproc = getattr(pickler, '_postproc', None)
1955
+ _main_modified = getattr(pickler, '_main_modified', None)
1956
+ _original_main = getattr(pickler, '_original_main', __builtin__)#'None'
1957
+ postproc_list = []
1958
+ if _recurse:
1959
+ # recurse to get all globals referred to by obj
1960
+ from .detect import globalvars
1961
+ globs_copy = globalvars(obj, recurse=True, builtin=True)
1962
+
1963
+ # Add the name of the module to the globs dictionary to prevent
1964
+ # the duplication of the dictionary. Pickle the unpopulated
1965
+ # globals dictionary and set the remaining items after the function
1966
+ # is created to correctly handle recursion.
1967
+ globs = {'__name__': obj.__module__}
1968
+ else:
1969
+ globs_copy = obj.__globals__
1970
+
1971
+ # If the globals is the __dict__ from the module being saved as a
1972
+ # session, substitute it by the dictionary being actually saved.
1973
+ if _main_modified and globs_copy is _original_main.__dict__:
1974
+ globs_copy = getattr(pickler, '_main', _original_main).__dict__
1975
+ globs = globs_copy
1976
+ # If the globals is a module __dict__, do not save it in the pickle.
1977
+ elif globs_copy is not None and obj.__module__ is not None and \
1978
+ getattr(_import_module(obj.__module__, True), '__dict__', None) is globs_copy:
1979
+ globs = globs_copy
1980
+ else:
1981
+ globs = {'__name__': obj.__module__}
1982
+
1983
+ if globs_copy is not None and globs is not globs_copy:
1984
+ # In the case that the globals are copied, we need to ensure that
1985
+ # the globals dictionary is updated when all objects in the
1986
+ # dictionary are already created.
1987
+ glob_ids = {id(g) for g in globs_copy.values()}
1988
+ for stack_element in _postproc:
1989
+ if stack_element in glob_ids:
1990
+ _postproc[stack_element].append((_setitems, (globs, globs_copy)))
1991
+ break
1992
+ else:
1993
+ postproc_list.append((_setitems, (globs, globs_copy)))
1994
+
1995
+ closure = obj.__closure__
1996
+ state_dict = {}
1997
+ for fattrname in ('__doc__', '__kwdefaults__', '__annotations__'):
1998
+ fattr = getattr(obj, fattrname, None)
1999
+ if fattr is not None:
2000
+ state_dict[fattrname] = fattr
2001
+ if obj.__qualname__ != obj.__name__:
2002
+ state_dict['__qualname__'] = obj.__qualname__
2003
+ if '__name__' not in globs or obj.__module__ != globs['__name__']:
2004
+ state_dict['__module__'] = obj.__module__
2005
+
2006
+ state = obj.__dict__
2007
+ if type(state) is not dict:
2008
+ state_dict['__dict__'] = state
2009
+ state = None
2010
+ if state_dict:
2011
+ state = state, state_dict
2012
+
2013
+ _save_with_postproc(pickler, (_create_function, (
2014
+ obj.__code__, globs, obj.__name__, obj.__defaults__,
2015
+ closure
2016
+ ), state), obj=obj, postproc_list=postproc_list)
2017
+
2018
+ # Lift closure cell update to earliest function (#458)
2019
+ if _postproc:
2020
+ topmost_postproc = next(iter(_postproc.values()), None)
2021
+ if closure and topmost_postproc:
2022
+ for cell in closure:
2023
+ possible_postproc = (setattr, (cell, 'cell_contents', obj))
2024
+ try:
2025
+ topmost_postproc.remove(possible_postproc)
2026
+ except ValueError:
2027
+ continue
2028
+
2029
+ # Change the value of the cell
2030
+ pickler.save_reduce(*possible_postproc)
2031
+ # pop None created by calling preprocessing step off stack
2032
+ pickler.write(POP)
2033
+
2034
+ logger.trace(pickler, "# F1")
2035
+ else:
2036
+ logger.trace(pickler, "F2: %s", obj)
2037
+ name = getattr(obj, '__qualname__', getattr(obj, '__name__', None))
2038
+ StockPickler.save_global(pickler, obj, name=name)
2039
+ logger.trace(pickler, "# F2")
2040
+ return
2041
+
2042
+ if HAS_CTYPES and hasattr(ctypes, 'pythonapi'):
2043
+ _PyCapsule_New = ctypes.pythonapi.PyCapsule_New
2044
+ _PyCapsule_New.argtypes = (ctypes.c_void_p, ctypes.c_char_p, ctypes.c_void_p)
2045
+ _PyCapsule_New.restype = ctypes.py_object
2046
+ _PyCapsule_GetPointer = ctypes.pythonapi.PyCapsule_GetPointer
2047
+ _PyCapsule_GetPointer.argtypes = (ctypes.py_object, ctypes.c_char_p)
2048
+ _PyCapsule_GetPointer.restype = ctypes.c_void_p
2049
+ _PyCapsule_GetDestructor = ctypes.pythonapi.PyCapsule_GetDestructor
2050
+ _PyCapsule_GetDestructor.argtypes = (ctypes.py_object,)
2051
+ _PyCapsule_GetDestructor.restype = ctypes.c_void_p
2052
+ _PyCapsule_GetContext = ctypes.pythonapi.PyCapsule_GetContext
2053
+ _PyCapsule_GetContext.argtypes = (ctypes.py_object,)
2054
+ _PyCapsule_GetContext.restype = ctypes.c_void_p
2055
+ _PyCapsule_GetName = ctypes.pythonapi.PyCapsule_GetName
2056
+ _PyCapsule_GetName.argtypes = (ctypes.py_object,)
2057
+ _PyCapsule_GetName.restype = ctypes.c_char_p
2058
+ _PyCapsule_IsValid = ctypes.pythonapi.PyCapsule_IsValid
2059
+ _PyCapsule_IsValid.argtypes = (ctypes.py_object, ctypes.c_char_p)
2060
+ _PyCapsule_IsValid.restype = ctypes.c_bool
2061
+ _PyCapsule_SetContext = ctypes.pythonapi.PyCapsule_SetContext
2062
+ _PyCapsule_SetContext.argtypes = (ctypes.py_object, ctypes.c_void_p)
2063
+ _PyCapsule_SetDestructor = ctypes.pythonapi.PyCapsule_SetDestructor
2064
+ _PyCapsule_SetDestructor.argtypes = (ctypes.py_object, ctypes.c_void_p)
2065
+ _PyCapsule_SetName = ctypes.pythonapi.PyCapsule_SetName
2066
+ _PyCapsule_SetName.argtypes = (ctypes.py_object, ctypes.c_char_p)
2067
+ _PyCapsule_SetPointer = ctypes.pythonapi.PyCapsule_SetPointer
2068
+ _PyCapsule_SetPointer.argtypes = (ctypes.py_object, ctypes.c_void_p)
2069
+ #from _socket import CAPI as _testcapsule
2070
+ _testcapsule_name = b'dill._dill._testcapsule'
2071
+ _testcapsule = _PyCapsule_New(
2072
+ ctypes.cast(_PyCapsule_New, ctypes.c_void_p),
2073
+ ctypes.c_char_p(_testcapsule_name),
2074
+ None
2075
+ )
2076
+ PyCapsuleType = type(_testcapsule)
2077
+ @register(PyCapsuleType)
2078
+ def save_capsule(pickler, obj):
2079
+ logger.trace(pickler, "Cap: %s", obj)
2080
+ name = _PyCapsule_GetName(obj)
2081
+ #warnings.warn('Pickling a PyCapsule (%s) does not pickle any C data structures and could cause segmentation faults or other memory errors when unpickling.' % (name,), PicklingWarning)
2082
+ pointer = _PyCapsule_GetPointer(obj, name)
2083
+ context = _PyCapsule_GetContext(obj)
2084
+ destructor = _PyCapsule_GetDestructor(obj)
2085
+ pickler.save_reduce(_create_capsule, (pointer, name, context, destructor), obj=obj)
2086
+ logger.trace(pickler, "# Cap")
2087
+ _incedental_reverse_typemap['PyCapsuleType'] = PyCapsuleType
2088
+ _reverse_typemap['PyCapsuleType'] = PyCapsuleType
2089
+ _incedental_types.add(PyCapsuleType)
2090
+ else:
2091
+ _testcapsule = None
2092
+
2093
+
2094
+ #############################
2095
+ # A quick fix for issue #500
2096
+ # This should be removed when a better solution is found.
2097
+
2098
+ if hasattr(dataclasses, "_HAS_DEFAULT_FACTORY_CLASS"):
2099
+ @register(dataclasses._HAS_DEFAULT_FACTORY_CLASS)
2100
+ def save_dataclasses_HAS_DEFAULT_FACTORY_CLASS(pickler, obj):
2101
+ logger.trace(pickler, "DcHDF: %s", obj)
2102
+ pickler.write(GLOBAL + b"dataclasses\n_HAS_DEFAULT_FACTORY\n")
2103
+ logger.trace(pickler, "# DcHDF")
2104
+
2105
+ if hasattr(dataclasses, "MISSING"):
2106
+ @register(type(dataclasses.MISSING))
2107
+ def save_dataclasses_MISSING_TYPE(pickler, obj):
2108
+ logger.trace(pickler, "DcM: %s", obj)
2109
+ pickler.write(GLOBAL + b"dataclasses\nMISSING\n")
2110
+ logger.trace(pickler, "# DcM")
2111
+
2112
+ if hasattr(dataclasses, "KW_ONLY"):
2113
+ @register(type(dataclasses.KW_ONLY))
2114
+ def save_dataclasses_KW_ONLY_TYPE(pickler, obj):
2115
+ logger.trace(pickler, "DcKWO: %s", obj)
2116
+ pickler.write(GLOBAL + b"dataclasses\nKW_ONLY\n")
2117
+ logger.trace(pickler, "# DcKWO")
2118
+
2119
+ if hasattr(dataclasses, "_FIELD_BASE"):
2120
+ @register(dataclasses._FIELD_BASE)
2121
+ def save_dataclasses_FIELD_BASE(pickler, obj):
2122
+ logger.trace(pickler, "DcFB: %s", obj)
2123
+ pickler.write(GLOBAL + b"dataclasses\n" + obj.name.encode() + b"\n")
2124
+ logger.trace(pickler, "# DcFB")
2125
+
2126
+ #############################
2127
+
2128
+ # quick sanity checking
2129
+ def pickles(obj,exact=False,safe=False,**kwds):
2130
+ """
2131
+ Quick check if object pickles with dill.
2132
+
2133
+ If *exact=True* then an equality test is done to check if the reconstructed
2134
+ object matches the original object.
2135
+
2136
+ If *safe=True* then any exception will raised in copy signal that the
2137
+ object is not picklable, otherwise only pickling errors will be trapped.
2138
+
2139
+ Additional keyword arguments are as :func:`dumps` and :func:`loads`.
2140
+ """
2141
+ if safe: exceptions = (Exception,) # RuntimeError, ValueError
2142
+ else:
2143
+ exceptions = (TypeError, AssertionError, NotImplementedError, PicklingError, UnpicklingError)
2144
+ try:
2145
+ pik = copy(obj, **kwds)
2146
+ #FIXME: should check types match first, then check content if "exact"
2147
+ try:
2148
+ #FIXME: should be "(pik == obj).all()" for numpy comparison, though that'll fail if shapes differ
2149
+ result = bool(pik.all() == obj.all())
2150
+ except (AttributeError, TypeError):
2151
+ warnings.filterwarnings('ignore') #FIXME: be specific
2152
+ result = pik == obj
2153
+ if warnings.filters: del warnings.filters[0]
2154
+ if hasattr(result, 'toarray'): # for unusual types like sparse matrix
2155
+ result = result.toarray().all()
2156
+ if result: return True
2157
+ if not exact:
2158
+ result = type(pik) == type(obj)
2159
+ if result: return result
2160
+ # class instances might have been dumped with byref=False
2161
+ return repr(type(pik)) == repr(type(obj)) #XXX: InstanceType?
2162
+ return False
2163
+ except exceptions:
2164
+ return False
2165
+
2166
+ def check(obj, *args, **kwds):
2167
+ """
2168
+ Check pickling of an object across another process.
2169
+
2170
+ *python* is the path to the python interpreter (defaults to sys.executable)
2171
+
2172
+ Set *verbose=True* to print the unpickled object in the other process.
2173
+
2174
+ Additional keyword arguments are as :func:`dumps` and :func:`loads`.
2175
+ """
2176
+ # == undocumented ==
2177
+ # python -- the string path or executable name of the selected python
2178
+ # verbose -- if True, be verbose about printing warning messages
2179
+ # all other args and kwds are passed to dill.dumps #FIXME: ignore on load
2180
+ verbose = kwds.pop('verbose', False)
2181
+ python = kwds.pop('python', None)
2182
+ if python is None:
2183
+ import sys
2184
+ python = sys.executable
2185
+ # type check
2186
+ isinstance(python, str)
2187
+ import subprocess
2188
+ fail = True
2189
+ try:
2190
+ _obj = dumps(obj, *args, **kwds)
2191
+ fail = False
2192
+ finally:
2193
+ if fail and verbose:
2194
+ print("DUMP FAILED")
2195
+ #FIXME: fails if python interpreter path contains spaces
2196
+ # Use the following instead (which also processes the 'ignore' keyword):
2197
+ # ignore = kwds.pop('ignore', None)
2198
+ # unpickle = "dill.loads(%s, ignore=%s)"%(repr(_obj), repr(ignore))
2199
+ # cmd = [python, "-c", "import dill; print(%s)"%unpickle]
2200
+ # msg = "SUCCESS" if not subprocess.call(cmd) else "LOAD FAILED"
2201
+ msg = "%s -c import dill; print(dill.loads(%s))" % (python, repr(_obj))
2202
+ msg = "SUCCESS" if not subprocess.call(msg.split(None,2)) else "LOAD FAILED"
2203
+ if verbose:
2204
+ print(msg)
2205
+ return
2206
+
2207
+ # use to protect against missing attributes
2208
+ def is_dill(pickler, child=None):
2209
+ "check the dill-ness of your pickler"
2210
+ if child is False or not hasattr(pickler.__class__, 'mro'):
2211
+ return 'dill' in pickler.__module__
2212
+ return Pickler in pickler.__class__.mro()
2213
+
2214
+ def _extend():
2215
+ """extend pickle with all of dill's registered types"""
2216
+ # need to have pickle not choke on _main_module? use is_dill(pickler)
2217
+ for t,func in Pickler.dispatch.items():
2218
+ try:
2219
+ StockPickler.dispatch[t] = func
2220
+ except Exception: #TypeError, PicklingError, UnpicklingError
2221
+ logger.trace(pickler, "skip: %s", t)
2222
+ return
2223
+
2224
+ del diff, _use_diff, use_diff
2225
+
2226
+ # EOF
.venv/lib/python3.11/site-packages/dill/_objects.py ADDED
@@ -0,0 +1,541 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ #
3
+ # Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
4
+ # Copyright (c) 2008-2016 California Institute of Technology.
5
+ # Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
6
+ # License: 3-clause BSD. The full license text is available at:
7
+ # - https://github.com/uqfoundation/dill/blob/master/LICENSE
8
+ """
9
+ all Python Standard Library objects (currently: CH 1-15 @ 2.7)
10
+ and some other common objects (i.e. numpy.ndarray)
11
+ """
12
+
13
+ __all__ = ['registered','failures','succeeds']
14
+
15
+ # helper imports
16
+ import warnings; warnings.filterwarnings("ignore", category=DeprecationWarning)
17
+ import sys
18
+ import queue as Queue
19
+ #import dbm as anydbm #XXX: delete foo
20
+ from io import BytesIO as StringIO
21
+ import re
22
+ import array
23
+ import collections
24
+ import codecs
25
+ import struct
26
+ import dataclasses
27
+ import datetime
28
+ import calendar
29
+ import weakref
30
+ import pprint
31
+ import decimal
32
+ import numbers
33
+ import functools
34
+ import itertools
35
+ import operator
36
+ import tempfile
37
+ import shelve
38
+ import zlib
39
+ import gzip
40
+ import zipfile
41
+ import tarfile
42
+ import csv
43
+ import hashlib
44
+ import hmac
45
+ import os
46
+ import logging
47
+ import logging.handlers
48
+ import optparse
49
+ #import __hello__
50
+ import threading
51
+ import socket
52
+ import contextlib
53
+ try:
54
+ import bz2
55
+ import sqlite3
56
+ import dbm.ndbm as dbm
57
+ HAS_ALL = True
58
+ except ImportError: # Ubuntu
59
+ HAS_ALL = False
60
+ try:
61
+ #import curses
62
+ #from curses import textpad, panel
63
+ HAS_CURSES = True
64
+ except ImportError: # Windows
65
+ HAS_CURSES = False
66
+ try:
67
+ import ctypes
68
+ HAS_CTYPES = True
69
+ # if using `pypy`, pythonapi is not found
70
+ IS_PYPY = not hasattr(ctypes, 'pythonapi')
71
+ except ImportError: # MacPorts
72
+ HAS_CTYPES = False
73
+ IS_PYPY = False
74
+
75
+ IS_PYODIDE = sys.platform == 'emscripten'
76
+
77
+ # helper objects
78
+ class _class:
79
+ def _method(self):
80
+ pass
81
+ # @classmethod
82
+ # def _clsmethod(cls): #XXX: test me
83
+ # pass
84
+ # @staticmethod
85
+ # def _static(self): #XXX: test me
86
+ # pass
87
+ class _class2:
88
+ def __call__(self):
89
+ pass
90
+ _instance2 = _class2()
91
+ class _newclass(object):
92
+ def _method(self):
93
+ pass
94
+ # @classmethod
95
+ # def _clsmethod(cls): #XXX: test me
96
+ # pass
97
+ # @staticmethod
98
+ # def _static(self): #XXX: test me
99
+ # pass
100
+ class _newclass2(object):
101
+ __slots__ = ['descriptor']
102
+ def _function(x): yield x
103
+ def _function2():
104
+ try: raise
105
+ except Exception:
106
+ from sys import exc_info
107
+ e, er, tb = exc_info()
108
+ return er, tb
109
+ if HAS_CTYPES:
110
+ class _Struct(ctypes.Structure):
111
+ pass
112
+ _Struct._fields_ = [("_field", ctypes.c_int),("next", ctypes.POINTER(_Struct))]
113
+ _filedescrip, _tempfile = tempfile.mkstemp('r') # deleted in cleanup
114
+ if sys.hexversion < 0x30d00a1:
115
+ _tmpf = tempfile.TemporaryFile('w') # emits OSError 9 in python 3.13
116
+ else:
117
+ _tmpf = tempfile.NamedTemporaryFile('w').file # for > python 3.9
118
+
119
+ # objects used by dill for type declaration
120
+ registered = d = {}
121
+ # objects dill fails to pickle
122
+ failures = x = {}
123
+ # all other type objects
124
+ succeeds = a = {}
125
+
126
+ # types module (part of CH 8)
127
+ a['BooleanType'] = bool(1)
128
+ a['BuiltinFunctionType'] = len
129
+ a['BuiltinMethodType'] = a['BuiltinFunctionType']
130
+ a['BytesType'] = _bytes = codecs.latin_1_encode('\x00')[0] # bytes(1)
131
+ a['ClassType'] = _class
132
+ a['ComplexType'] = complex(1)
133
+ a['DictType'] = _dict = {}
134
+ a['DictionaryType'] = a['DictType']
135
+ a['FloatType'] = float(1)
136
+ a['FunctionType'] = _function
137
+ a['InstanceType'] = _instance = _class()
138
+ a['IntType'] = _int = int(1)
139
+ a['ListType'] = _list = []
140
+ a['NoneType'] = None
141
+ a['ObjectType'] = object()
142
+ a['StringType'] = _str = str(1)
143
+ a['TupleType'] = _tuple = ()
144
+ a['TypeType'] = type
145
+ a['LongType'] = _int
146
+ a['UnicodeType'] = _str
147
+ # built-in constants (CH 4)
148
+ a['CopyrightType'] = copyright
149
+ # built-in types (CH 5)
150
+ a['ClassObjectType'] = _newclass # <type 'type'>
151
+ a['ClassInstanceType'] = _newclass() # <type 'class'>
152
+ a['SetType'] = _set = set()
153
+ a['FrozenSetType'] = frozenset()
154
+ # built-in exceptions (CH 6)
155
+ a['ExceptionType'] = _exception = _function2()[0]
156
+ # string services (CH 7)
157
+ a['SREPatternType'] = _srepattern = re.compile('')
158
+ # data types (CH 8)
159
+ a['ArrayType'] = array.array("f")
160
+ a['DequeType'] = collections.deque([0])
161
+ a['DefaultDictType'] = collections.defaultdict(_function, _dict)
162
+ a['TZInfoType'] = datetime.tzinfo()
163
+ a['DateTimeType'] = datetime.datetime.today()
164
+ a['CalendarType'] = calendar.Calendar()
165
+ # numeric and mathematical types (CH 9)
166
+ a['DecimalType'] = decimal.Decimal(1)
167
+ a['CountType'] = itertools.count(0)
168
+ # data compression and archiving (CH 12)
169
+ a['TarInfoType'] = tarfile.TarInfo()
170
+ # generic operating system services (CH 15)
171
+ a['LoggerType'] = _logger = logging.getLogger()
172
+ a['FormatterType'] = logging.Formatter() # pickle ok
173
+ a['FilterType'] = logging.Filter() # pickle ok
174
+ a['LogRecordType'] = logging.makeLogRecord(_dict) # pickle ok
175
+ a['OptionParserType'] = _oparser = optparse.OptionParser() # pickle ok
176
+ a['OptionGroupType'] = optparse.OptionGroup(_oparser,"foo") # pickle ok
177
+ a['OptionType'] = optparse.Option('--foo') # pickle ok
178
+ if HAS_CTYPES:
179
+ z = x if IS_PYPY else a
180
+ z['CCharType'] = _cchar = ctypes.c_char()
181
+ z['CWCharType'] = ctypes.c_wchar() # fail == 2.6
182
+ z['CByteType'] = ctypes.c_byte()
183
+ z['CUByteType'] = ctypes.c_ubyte()
184
+ z['CShortType'] = ctypes.c_short()
185
+ z['CUShortType'] = ctypes.c_ushort()
186
+ z['CIntType'] = ctypes.c_int()
187
+ z['CUIntType'] = ctypes.c_uint()
188
+ z['CLongType'] = ctypes.c_long()
189
+ z['CULongType'] = ctypes.c_ulong()
190
+ z['CLongLongType'] = ctypes.c_longlong()
191
+ z['CULongLongType'] = ctypes.c_ulonglong()
192
+ z['CFloatType'] = ctypes.c_float()
193
+ z['CDoubleType'] = ctypes.c_double()
194
+ z['CSizeTType'] = ctypes.c_size_t()
195
+ del z
196
+ a['CLibraryLoaderType'] = ctypes.cdll
197
+ a['StructureType'] = _Struct
198
+ # if not IS_PYPY:
199
+ # a['BigEndianStructureType'] = ctypes.BigEndianStructure()
200
+ #NOTE: also LittleEndianStructureType and UnionType... abstract classes
201
+ #NOTE: remember for ctypesobj.contents creates a new python object
202
+ #NOTE: ctypes.c_int._objects is memberdescriptor for object's __dict__
203
+ #NOTE: base class of all ctypes data types is non-public _CData
204
+
205
+ import fractions
206
+ import io
207
+ from io import StringIO as TextIO
208
+ # built-in functions (CH 2)
209
+ a['ByteArrayType'] = bytearray([1])
210
+ # numeric and mathematical types (CH 9)
211
+ a['FractionType'] = fractions.Fraction()
212
+ a['NumberType'] = numbers.Number()
213
+ # generic operating system services (CH 15)
214
+ a['IOBaseType'] = io.IOBase()
215
+ a['RawIOBaseType'] = io.RawIOBase()
216
+ a['TextIOBaseType'] = io.TextIOBase()
217
+ a['BufferedIOBaseType'] = io.BufferedIOBase()
218
+ a['UnicodeIOType'] = TextIO() # the new StringIO
219
+ a['LoggerAdapterType'] = logging.LoggerAdapter(_logger,_dict) # pickle ok
220
+ if HAS_CTYPES:
221
+ z = x if IS_PYPY else a
222
+ z['CBoolType'] = ctypes.c_bool(1)
223
+ z['CLongDoubleType'] = ctypes.c_longdouble()
224
+ del z
225
+ import argparse
226
+ # data types (CH 8)
227
+ a['OrderedDictType'] = collections.OrderedDict(_dict)
228
+ a['CounterType'] = collections.Counter(_dict)
229
+ if HAS_CTYPES:
230
+ z = x if IS_PYPY else a
231
+ z['CSSizeTType'] = ctypes.c_ssize_t()
232
+ del z
233
+ # generic operating system services (CH 15)
234
+ a['NullHandlerType'] = logging.NullHandler() # pickle ok # new 2.7
235
+ a['ArgParseFileType'] = argparse.FileType() # pickle ok
236
+
237
+ # -- pickle fails on all below here -----------------------------------------
238
+ # types module (part of CH 8)
239
+ a['CodeType'] = compile('','','exec')
240
+ a['DictProxyType'] = type.__dict__
241
+ a['DictProxyType2'] = _newclass.__dict__
242
+ a['EllipsisType'] = Ellipsis
243
+ a['ClosedFileType'] = open(os.devnull, 'wb', buffering=0).close()
244
+ a['GetSetDescriptorType'] = array.array.typecode
245
+ a['LambdaType'] = _lambda = lambda x: lambda y: x #XXX: works when not imported!
246
+ a['MemberDescriptorType'] = _newclass2.descriptor
247
+ if not IS_PYPY:
248
+ a['MemberDescriptorType2'] = datetime.timedelta.days
249
+ a['MethodType'] = _method = _class()._method #XXX: works when not imported!
250
+ a['ModuleType'] = datetime
251
+ a['NotImplementedType'] = NotImplemented
252
+ a['SliceType'] = slice(1)
253
+ a['UnboundMethodType'] = _class._method #XXX: works when not imported!
254
+ d['TextWrapperType'] = open(os.devnull, 'r') # same as mode='w','w+','r+'
255
+ if not IS_PYODIDE:
256
+ d['BufferedRandomType'] = open(os.devnull, 'r+b') # same as mode='w+b'
257
+ d['BufferedReaderType'] = open(os.devnull, 'rb') # (default: buffering=-1)
258
+ d['BufferedWriterType'] = open(os.devnull, 'wb')
259
+ try: # oddities: deprecated
260
+ from _pyio import open as _open
261
+ d['PyTextWrapperType'] = _open(os.devnull, 'r', buffering=-1)
262
+ if not IS_PYODIDE:
263
+ d['PyBufferedRandomType'] = _open(os.devnull, 'r+b', buffering=-1)
264
+ d['PyBufferedReaderType'] = _open(os.devnull, 'rb', buffering=-1)
265
+ d['PyBufferedWriterType'] = _open(os.devnull, 'wb', buffering=-1)
266
+ except ImportError:
267
+ pass
268
+ # other (concrete) object types
269
+ z = d if sys.hexversion < 0x30800a2 else a
270
+ z['CellType'] = (_lambda)(0).__closure__[0]
271
+ del z
272
+ a['XRangeType'] = _xrange = range(1)
273
+ a['MethodDescriptorType'] = type.__dict__['mro']
274
+ a['WrapperDescriptorType'] = type.__repr__
275
+ #a['WrapperDescriptorType2'] = type.__dict__['__module__']#XXX: GetSetDescriptor
276
+ a['ClassMethodDescriptorType'] = type.__dict__['__prepare__']
277
+ # built-in functions (CH 2)
278
+ _methodwrap = (1).__lt__
279
+ a['MethodWrapperType'] = _methodwrap
280
+ a['StaticMethodType'] = staticmethod(_method)
281
+ a['ClassMethodType'] = classmethod(_method)
282
+ a['PropertyType'] = property()
283
+ d['SuperType'] = super(Exception, _exception)
284
+ # string services (CH 7)
285
+ _in = _bytes
286
+ a['InputType'] = _cstrI = StringIO(_in)
287
+ a['OutputType'] = _cstrO = StringIO()
288
+ # data types (CH 8)
289
+ a['WeakKeyDictionaryType'] = weakref.WeakKeyDictionary()
290
+ a['WeakValueDictionaryType'] = weakref.WeakValueDictionary()
291
+ a['ReferenceType'] = weakref.ref(_instance)
292
+ a['DeadReferenceType'] = weakref.ref(_class())
293
+ a['ProxyType'] = weakref.proxy(_instance)
294
+ a['DeadProxyType'] = weakref.proxy(_class())
295
+ a['CallableProxyType'] = weakref.proxy(_instance2)
296
+ a['DeadCallableProxyType'] = weakref.proxy(_class2())
297
+ a['QueueType'] = Queue.Queue()
298
+ # numeric and mathematical types (CH 9)
299
+ d['PartialType'] = functools.partial(int,base=2)
300
+ a['IzipType'] = zip('0','1')
301
+ a['ChainType'] = itertools.chain('0','1')
302
+ d['ItemGetterType'] = operator.itemgetter(0)
303
+ d['AttrGetterType'] = operator.attrgetter('__repr__')
304
+ # file and directory access (CH 10)
305
+ _fileW = _cstrO
306
+ # data persistence (CH 11)
307
+ if HAS_ALL:
308
+ x['ConnectionType'] = _conn = sqlite3.connect(':memory:')
309
+ x['CursorType'] = _conn.cursor()
310
+ a['ShelveType'] = shelve.Shelf({})
311
+ # data compression and archiving (CH 12)
312
+ if HAS_ALL:
313
+ x['BZ2FileType'] = bz2.BZ2File(os.devnull)
314
+ x['BZ2CompressorType'] = bz2.BZ2Compressor()
315
+ x['BZ2DecompressorType'] = bz2.BZ2Decompressor()
316
+ #x['ZipFileType'] = _zip = zipfile.ZipFile(os.devnull,'w')
317
+ #_zip.write(_tempfile,'x') [causes annoying warning/error printed on import]
318
+ #a['ZipInfoType'] = _zip.getinfo('x')
319
+ a['TarFileType'] = tarfile.open(fileobj=_fileW,mode='w')
320
+ # file formats (CH 13)
321
+ x['DialectType'] = csv.get_dialect('excel')
322
+ if sys.hexversion < 0x30d00a1:
323
+ import xdrlib
324
+ a['PackerType'] = xdrlib.Packer()
325
+ # optional operating system services (CH 16)
326
+ a['LockType'] = threading.Lock()
327
+ a['RLockType'] = threading.RLock()
328
+ # generic operating system services (CH 15) # also closed/open and r/w/etc...
329
+ a['NamedLoggerType'] = _logger = logging.getLogger(__name__)
330
+ #a['FrozenModuleType'] = __hello__ #FIXME: prints "Hello world..."
331
+ # interprocess communication (CH 17)
332
+ x['SocketType'] = _socket = socket.socket()
333
+ x['SocketPairType'] = socket.socketpair()[0]
334
+ # python runtime services (CH 27)
335
+ a['GeneratorContextManagerType'] = contextlib.contextmanager(max)([1])
336
+
337
+ try: # ipython
338
+ __IPYTHON__ is True # is ipython
339
+ except NameError:
340
+ # built-in constants (CH 4)
341
+ a['QuitterType'] = quit
342
+ d['ExitType'] = a['QuitterType']
343
+ try: # numpy #FIXME: slow... 0.05 to 0.1 sec to import numpy
344
+ from numpy import ufunc as _numpy_ufunc
345
+ from numpy import array as _numpy_array
346
+ from numpy import int32 as _numpy_int32
347
+ a['NumpyUfuncType'] = _numpy_ufunc
348
+ a['NumpyArrayType'] = _numpy_array
349
+ a['NumpyInt32Type'] = _numpy_int32
350
+ except ImportError:
351
+ pass
352
+ # numeric and mathematical types (CH 9)
353
+ a['ProductType'] = itertools.product('0','1')
354
+ # generic operating system services (CH 15)
355
+ a['FileHandlerType'] = logging.FileHandler(os.devnull)
356
+ a['RotatingFileHandlerType'] = logging.handlers.RotatingFileHandler(os.devnull)
357
+ a['SocketHandlerType'] = logging.handlers.SocketHandler('localhost',514)
358
+ a['MemoryHandlerType'] = logging.handlers.MemoryHandler(1)
359
+ # data types (CH 8)
360
+ a['WeakSetType'] = weakref.WeakSet() # 2.7
361
+ # generic operating system services (CH 15) [errors when dill is imported]
362
+ #a['ArgumentParserType'] = _parser = argparse.ArgumentParser('PROG')
363
+ #a['NamespaceType'] = _parser.parse_args() # pickle ok
364
+ #a['SubParsersActionType'] = _parser.add_subparsers()
365
+ #a['MutuallyExclusiveGroupType'] = _parser.add_mutually_exclusive_group()
366
+ #a['ArgumentGroupType'] = _parser.add_argument_group()
367
+
368
+ # -- dill fails in some versions below here ---------------------------------
369
+ # types module (part of CH 8)
370
+ d['FileType'] = open(os.devnull, 'rb', buffering=0) # same 'wb','wb+','rb+'
371
+ # built-in functions (CH 2)
372
+ # Iterators:
373
+ a['ListIteratorType'] = iter(_list) # empty vs non-empty
374
+ a['SetIteratorType'] = iter(_set) #XXX: empty vs non-empty #FIXME: list_iterator
375
+ a['TupleIteratorType']= iter(_tuple) # empty vs non-empty
376
+ a['XRangeIteratorType'] = iter(_xrange) # empty vs non-empty
377
+ a["BytesIteratorType"] = iter(b'')
378
+ a["BytearrayIteratorType"] = iter(bytearray(b''))
379
+ z = x if IS_PYPY else a
380
+ z["CallableIteratorType"] = iter(iter, None)
381
+ del z
382
+ x["MemoryIteratorType"] = iter(memoryview(b''))
383
+ a["ListReverseiteratorType"] = reversed([])
384
+ X = a['OrderedDictType']
385
+ d["OdictKeysType"] = X.keys()
386
+ d["OdictValuesType"] = X.values()
387
+ d["OdictItemsType"] = X.items()
388
+ a["OdictIteratorType"] = iter(X.keys()) #FIXME: list_iterator
389
+ del X
390
+ #FIXME: list_iterator
391
+ a['DictionaryItemIteratorType'] = iter(type.__dict__.items())
392
+ a['DictionaryKeyIteratorType'] = iter(type.__dict__.keys())
393
+ a['DictionaryValueIteratorType'] = iter(type.__dict__.values())
394
+ if sys.hexversion >= 0x30800a0:
395
+ a["DictReversekeyiteratorType"] = reversed({}.keys())
396
+ a["DictReversevalueiteratorType"] = reversed({}.values())
397
+ a["DictReverseitemiteratorType"] = reversed({}.items())
398
+
399
+ try:
400
+ import symtable
401
+ #FIXME: fails to pickle
402
+ x["SymtableEntryType"] = symtable.symtable("", "string", "exec")._table
403
+ except ImportError:
404
+ pass
405
+
406
+ if sys.hexversion >= 0x30a00a0 and not IS_PYPY:
407
+ x['LineIteratorType'] = compile('3', '', 'eval').co_lines()
408
+
409
+ if sys.hexversion >= 0x30b00b0:
410
+ from types import GenericAlias
411
+ d["GenericAliasIteratorType"] = iter(GenericAlias(list, (int,)))
412
+ x['PositionsIteratorType'] = compile('3', '', 'eval').co_positions()
413
+
414
+ # data types (CH 8)
415
+ a['PrettyPrinterType'] = pprint.PrettyPrinter()
416
+ # numeric and mathematical types (CH 9)
417
+ a['CycleType'] = itertools.cycle('0')
418
+ # file and directory access (CH 10)
419
+ a['TemporaryFileType'] = _tmpf
420
+ # data compression and archiving (CH 12)
421
+ x['GzipFileType'] = gzip.GzipFile(fileobj=_fileW)
422
+ # generic operating system services (CH 15)
423
+ a['StreamHandlerType'] = logging.StreamHandler()
424
+ # numeric and mathematical types (CH 9)
425
+ a['PermutationsType'] = itertools.permutations('0')
426
+ a['CombinationsType'] = itertools.combinations('0',1)
427
+ a['RepeatType'] = itertools.repeat(0)
428
+ a['CompressType'] = itertools.compress('0',[1])
429
+ #XXX: ...and etc
430
+
431
+ # -- dill fails on all below here -------------------------------------------
432
+ # types module (part of CH 8)
433
+ x['GeneratorType'] = _generator = _function(1) #XXX: priority
434
+ x['FrameType'] = _generator.gi_frame #XXX: inspect.currentframe()
435
+ x['TracebackType'] = _function2()[1] #(see: inspect.getouterframes,getframeinfo)
436
+ # other (concrete) object types
437
+ # (also: Capsule / CObject ?)
438
+ # built-in functions (CH 2)
439
+ # built-in types (CH 5)
440
+ # string services (CH 7)
441
+ x['StructType'] = struct.Struct('c')
442
+ x['CallableIteratorType'] = _srepattern.finditer('')
443
+ x['SREMatchType'] = _srepattern.match('')
444
+ x['SREScannerType'] = _srepattern.scanner('')
445
+ x['StreamReader'] = codecs.StreamReader(_cstrI) #XXX: ... and etc
446
+ # python object persistence (CH 11)
447
+ # x['DbShelveType'] = shelve.open('foo','n')#,protocol=2) #XXX: delete foo
448
+ if HAS_ALL:
449
+ z = a if IS_PYPY else x
450
+ z['DbmType'] = dbm.open(_tempfile,'n')
451
+ del z
452
+ # x['DbCursorType'] = _dbcursor = anydbm.open('foo','n') #XXX: delete foo
453
+ # x['DbType'] = _dbcursor.db
454
+ # data compression and archiving (CH 12)
455
+ x['ZlibCompressType'] = zlib.compressobj()
456
+ x['ZlibDecompressType'] = zlib.decompressobj()
457
+ # file formats (CH 13)
458
+ x['CSVReaderType'] = csv.reader(_cstrI)
459
+ x['CSVWriterType'] = csv.writer(_cstrO)
460
+ x['CSVDictReaderType'] = csv.DictReader(_cstrI)
461
+ x['CSVDictWriterType'] = csv.DictWriter(_cstrO,{})
462
+ # cryptographic services (CH 14)
463
+ x['HashType'] = hashlib.md5()
464
+ if (sys.hexversion < 0x30800a1):
465
+ x['HMACType'] = hmac.new(_in)
466
+ else:
467
+ x['HMACType'] = hmac.new(_in, digestmod='md5')
468
+ # generic operating system services (CH 15)
469
+ if HAS_CURSES: pass
470
+ #x['CursesWindowType'] = _curwin = curses.initscr() #FIXME: messes up tty
471
+ #x['CursesTextPadType'] = textpad.Textbox(_curwin)
472
+ #x['CursesPanelType'] = panel.new_panel(_curwin)
473
+ if HAS_CTYPES:
474
+ x['CCharPType'] = ctypes.c_char_p()
475
+ x['CWCharPType'] = ctypes.c_wchar_p()
476
+ x['CVoidPType'] = ctypes.c_void_p()
477
+ if sys.platform[:3] == 'win':
478
+ x['CDLLType'] = _cdll = ctypes.cdll.msvcrt
479
+ else:
480
+ x['CDLLType'] = _cdll = ctypes.CDLL(None)
481
+ if not IS_PYPY:
482
+ x['PyDLLType'] = _pydll = ctypes.pythonapi
483
+ x['FuncPtrType'] = _cdll._FuncPtr()
484
+ x['CCharArrayType'] = ctypes.create_string_buffer(1)
485
+ x['CWCharArrayType'] = ctypes.create_unicode_buffer(1)
486
+ x['CParamType'] = ctypes.byref(_cchar)
487
+ x['LPCCharType'] = ctypes.pointer(_cchar)
488
+ x['LPCCharObjType'] = _lpchar = ctypes.POINTER(ctypes.c_char)
489
+ x['NullPtrType'] = _lpchar()
490
+ x['NullPyObjectType'] = ctypes.py_object()
491
+ x['PyObjectType'] = ctypes.py_object(lambda :None)
492
+ z = a if IS_PYPY else x
493
+ z['FieldType'] = _field = _Struct._field
494
+ z['CFUNCTYPEType'] = _cfunc = ctypes.CFUNCTYPE(ctypes.c_char)
495
+ if sys.hexversion < 0x30c00b3:
496
+ x['CFunctionType'] = _cfunc(str)
497
+ del z
498
+ # numeric and mathematical types (CH 9)
499
+ a['MethodCallerType'] = operator.methodcaller('mro') # 2.6
500
+ # built-in types (CH 5)
501
+ x['MemoryType'] = memoryview(_in) # 2.7
502
+ x['MemoryType2'] = memoryview(bytearray(_in)) # 2.7
503
+ d['DictItemsType'] = _dict.items() # 2.7
504
+ d['DictKeysType'] = _dict.keys() # 2.7
505
+ d['DictValuesType'] = _dict.values() # 2.7
506
+ # generic operating system services (CH 15)
507
+ a['RawTextHelpFormatterType'] = argparse.RawTextHelpFormatter('PROG')
508
+ a['RawDescriptionHelpFormatterType'] = argparse.RawDescriptionHelpFormatter('PROG')
509
+ a['ArgDefaultsHelpFormatterType'] = argparse.ArgumentDefaultsHelpFormatter('PROG')
510
+ z = a if IS_PYPY else x
511
+ z['CmpKeyType'] = _cmpkey = functools.cmp_to_key(_methodwrap) # 2.7, >=3.2
512
+ z['CmpKeyObjType'] = _cmpkey('0') #2.7, >=3.2
513
+ del z
514
+ # oddities: removed, etc
515
+ x['BufferType'] = x['MemoryType']
516
+
517
+ from dill._dill import _testcapsule
518
+ if _testcapsule is not None:
519
+ d['PyCapsuleType'] = _testcapsule
520
+ del _testcapsule
521
+
522
+ if hasattr(dataclasses, '_HAS_DEFAULT_FACTORY'):
523
+ a['DataclassesHasDefaultFactoryType'] = dataclasses._HAS_DEFAULT_FACTORY
524
+
525
+ if hasattr(dataclasses, 'MISSING'):
526
+ a['DataclassesMissingType'] = dataclasses.MISSING
527
+
528
+ if hasattr(dataclasses, 'KW_ONLY'):
529
+ a['DataclassesKWOnlyType'] = dataclasses.KW_ONLY
530
+
531
+ if hasattr(dataclasses, '_FIELD_BASE'):
532
+ a['DataclassesFieldBaseType'] = dataclasses._FIELD
533
+
534
+ # -- cleanup ----------------------------------------------------------------
535
+ a.update(d) # registered also succeed
536
+ if sys.platform[:3] == 'win':
537
+ os.close(_filedescrip) # required on win32
538
+ os.remove(_tempfile)
539
+
540
+
541
+ # EOF
.venv/lib/python3.11/site-packages/dill/_shims.py ADDED
@@ -0,0 +1,193 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ #
3
+ # Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
4
+ # Author: Anirudh Vegesana (avegesan@cs.stanford.edu)
5
+ # Copyright (c) 2021-2024 The Uncertainty Quantification Foundation.
6
+ # License: 3-clause BSD. The full license text is available at:
7
+ # - https://github.com/uqfoundation/dill/blob/master/LICENSE
8
+ """
9
+ Provides shims for compatibility between versions of dill and Python.
10
+
11
+ Compatibility shims should be provided in this file. Here are two simple example
12
+ use cases.
13
+
14
+ Deprecation of constructor function:
15
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
16
+ Assume that we were transitioning _import_module in _dill.py to
17
+ the builtin function importlib.import_module when present.
18
+
19
+ @move_to(_dill)
20
+ def _import_module(import_name):
21
+ ... # code already in _dill.py
22
+
23
+ _import_module = Getattr(importlib, 'import_module', Getattr(_dill, '_import_module', None))
24
+
25
+ The code will attempt to find import_module in the importlib module. If not
26
+ present, it will use the _import_module function in _dill.
27
+
28
+ Emulate new Python behavior in older Python versions:
29
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
30
+ CellType.cell_contents behaves differently in Python 3.6 and 3.7. It is
31
+ read-only in Python 3.6 and writable and deletable in 3.7.
32
+
33
+ if _dill.OLD37 and _dill.HAS_CTYPES and ...:
34
+ @move_to(_dill)
35
+ def _setattr(object, name, value):
36
+ if type(object) is _dill.CellType and name == 'cell_contents':
37
+ _PyCell_Set.argtypes = (ctypes.py_object, ctypes.py_object)
38
+ _PyCell_Set(object, value)
39
+ else:
40
+ setattr(object, name, value)
41
+ ... # more cases below
42
+
43
+ _setattr = Getattr(_dill, '_setattr', setattr)
44
+
45
+ _dill._setattr will be used when present to emulate Python 3.7 functionality in
46
+ older versions of Python while defaulting to the standard setattr in 3.7+.
47
+
48
+ See this PR for the discussion that lead to this system:
49
+ https://github.com/uqfoundation/dill/pull/443
50
+ """
51
+
52
+ import inspect
53
+ import sys
54
+
55
+ _dill = sys.modules['dill._dill']
56
+
57
+
58
+ class Reduce(object):
59
+ """
60
+ Reduce objects are wrappers used for compatibility enforcement during
61
+ unpickle-time. They should only be used in calls to pickler.save and
62
+ other Reduce objects. They are only evaluated within unpickler.load.
63
+
64
+ Pickling a Reduce object makes the two implementations equivalent:
65
+
66
+ pickler.save(Reduce(*reduction))
67
+
68
+ pickler.save_reduce(*reduction, obj=reduction)
69
+ """
70
+ __slots__ = ['reduction']
71
+ def __new__(cls, *reduction, **kwargs):
72
+ """
73
+ Args:
74
+ *reduction: a tuple that matches the format given here:
75
+ https://docs.python.org/3/library/pickle.html#object.__reduce__
76
+ is_callable: a bool to indicate that the object created by
77
+ unpickling `reduction` is callable. If true, the current Reduce
78
+ is allowed to be used as the function in further save_reduce calls
79
+ or Reduce objects.
80
+ """
81
+ is_callable = kwargs.get('is_callable', False) # Pleases Py2. Can be removed later
82
+ if is_callable:
83
+ self = object.__new__(_CallableReduce)
84
+ else:
85
+ self = object.__new__(Reduce)
86
+ self.reduction = reduction
87
+ return self
88
+ def __repr__(self):
89
+ return 'Reduce%s' % (self.reduction,)
90
+ def __copy__(self):
91
+ return self # pragma: no cover
92
+ def __deepcopy__(self, memo):
93
+ return self # pragma: no cover
94
+ def __reduce__(self):
95
+ return self.reduction
96
+ def __reduce_ex__(self, protocol):
97
+ return self.__reduce__()
98
+
99
+ class _CallableReduce(Reduce):
100
+ # A version of Reduce for functions. Used to trick pickler.save_reduce into
101
+ # thinking that Reduce objects of functions are themselves meaningful functions.
102
+ def __call__(self, *args, **kwargs):
103
+ reduction = self.__reduce__()
104
+ func = reduction[0]
105
+ f_args = reduction[1]
106
+ obj = func(*f_args)
107
+ return obj(*args, **kwargs)
108
+
109
+ __NO_DEFAULT = _dill.Sentinel('Getattr.NO_DEFAULT')
110
+
111
+ def Getattr(object, name, default=__NO_DEFAULT):
112
+ """
113
+ A Reduce object that represents the getattr operation. When unpickled, the
114
+ Getattr will access an attribute 'name' of 'object' and return the value
115
+ stored there. If the attribute doesn't exist, the default value will be
116
+ returned if present.
117
+
118
+ The following statements are equivalent:
119
+
120
+ Getattr(collections, 'OrderedDict')
121
+ Getattr(collections, 'spam', None)
122
+ Getattr(*args)
123
+
124
+ Reduce(getattr, (collections, 'OrderedDict'))
125
+ Reduce(getattr, (collections, 'spam', None))
126
+ Reduce(getattr, args)
127
+
128
+ During unpickling, the first two will result in collections.OrderedDict and
129
+ None respectively because the first attribute exists and the second one does
130
+ not, forcing it to use the default value given in the third argument.
131
+ """
132
+
133
+ if default is Getattr.NO_DEFAULT:
134
+ reduction = (getattr, (object, name))
135
+ else:
136
+ reduction = (getattr, (object, name, default))
137
+
138
+ return Reduce(*reduction, is_callable=callable(default))
139
+
140
+ Getattr.NO_DEFAULT = __NO_DEFAULT
141
+ del __NO_DEFAULT
142
+
143
+ def move_to(module, name=None):
144
+ def decorator(func):
145
+ if name is None:
146
+ fname = func.__name__
147
+ else:
148
+ fname = name
149
+ module.__dict__[fname] = func
150
+ func.__module__ = module.__name__
151
+ return func
152
+ return decorator
153
+
154
+ def register_shim(name, default):
155
+ """
156
+ A easier to understand and more compact way of "softly" defining a function.
157
+ These two pieces of code are equivalent:
158
+
159
+ if _dill.OLD3X:
160
+ def _create_class():
161
+ ...
162
+ _create_class = register_shim('_create_class', types.new_class)
163
+
164
+ if _dill.OLD3X:
165
+ @move_to(_dill)
166
+ def _create_class():
167
+ ...
168
+ _create_class = Getattr(_dill, '_create_class', types.new_class)
169
+
170
+ Intuitively, it creates a function or object in the versions of dill/python
171
+ that require special reimplementations, and use a core library or default
172
+ implementation if that function or object does not exist.
173
+ """
174
+ func = globals().get(name)
175
+ if func is not None:
176
+ _dill.__dict__[name] = func
177
+ func.__module__ = _dill.__name__
178
+
179
+ if default is Getattr.NO_DEFAULT:
180
+ reduction = (getattr, (_dill, name))
181
+ else:
182
+ reduction = (getattr, (_dill, name, default))
183
+
184
+ return Reduce(*reduction, is_callable=callable(default))
185
+
186
+ ######################
187
+ ## Compatibility Shims are defined below
188
+ ######################
189
+
190
+ _CELL_EMPTY = register_shim('_CELL_EMPTY', None)
191
+
192
+ _setattr = register_shim('_setattr', setattr)
193
+ _delattr = register_shim('_delattr', delattr)
.venv/lib/python3.11/site-packages/dill/detect.py ADDED
@@ -0,0 +1,284 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ #
3
+ # Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
4
+ # Copyright (c) 2008-2016 California Institute of Technology.
5
+ # Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
6
+ # License: 3-clause BSD. The full license text is available at:
7
+ # - https://github.com/uqfoundation/dill/blob/master/LICENSE
8
+ """
9
+ Methods for detecting objects leading to pickling failures.
10
+ """
11
+
12
+ import dis
13
+ from inspect import ismethod, isfunction, istraceback, isframe, iscode
14
+
15
+ from .pointers import parent, reference, at, parents, children
16
+ from .logger import trace
17
+
18
+ __all__ = ['baditems','badobjects','badtypes','code','errors','freevars',
19
+ 'getmodule','globalvars','nestedcode','nestedglobals','outermost',
20
+ 'referredglobals','referrednested','trace','varnames']
21
+
22
+ def getmodule(object, _filename=None, force=False):
23
+ """get the module of the object"""
24
+ from inspect import getmodule as getmod
25
+ module = getmod(object, _filename)
26
+ if module or not force: return module
27
+ import builtins
28
+ from .source import getname
29
+ name = getname(object, force=True)
30
+ return builtins if name in vars(builtins).keys() else None
31
+
32
+ def outermost(func): # is analogous to getsource(func,enclosing=True)
33
+ """get outermost enclosing object (i.e. the outer function in a closure)
34
+
35
+ NOTE: this is the object-equivalent of getsource(func, enclosing=True)
36
+ """
37
+ if ismethod(func):
38
+ _globals = func.__func__.__globals__ or {}
39
+ elif isfunction(func):
40
+ _globals = func.__globals__ or {}
41
+ else:
42
+ return #XXX: or raise? no matches
43
+ _globals = _globals.items()
44
+ # get the enclosing source
45
+ from .source import getsourcelines
46
+ try: lines,lnum = getsourcelines(func, enclosing=True)
47
+ except Exception: #TypeError, IOError
48
+ lines,lnum = [],None
49
+ code = ''.join(lines)
50
+ # get all possible names,objects that are named in the enclosing source
51
+ _locals = ((name,obj) for (name,obj) in _globals if name in code)
52
+ # now only save the objects that generate the enclosing block
53
+ for name,obj in _locals: #XXX: don't really need 'name'
54
+ try:
55
+ if getsourcelines(obj) == (lines,lnum): return obj
56
+ except Exception: #TypeError, IOError
57
+ pass
58
+ return #XXX: or raise? no matches
59
+
60
+ def nestedcode(func, recurse=True): #XXX: or return dict of {co_name: co} ?
61
+ """get the code objects for any nested functions (e.g. in a closure)"""
62
+ func = code(func)
63
+ if not iscode(func): return [] #XXX: or raise? no matches
64
+ nested = set()
65
+ for co in func.co_consts:
66
+ if co is None: continue
67
+ co = code(co)
68
+ if co:
69
+ nested.add(co)
70
+ if recurse: nested |= set(nestedcode(co, recurse=True))
71
+ return list(nested)
72
+
73
+ def code(func):
74
+ """get the code object for the given function or method
75
+
76
+ NOTE: use dill.source.getsource(CODEOBJ) to get the source code
77
+ """
78
+ if ismethod(func): func = func.__func__
79
+ if isfunction(func): func = func.__code__
80
+ if istraceback(func): func = func.tb_frame
81
+ if isframe(func): func = func.f_code
82
+ if iscode(func): return func
83
+ return
84
+
85
+ #XXX: ugly: parse dis.dis for name after "<code object" in line and in globals?
86
+ def referrednested(func, recurse=True): #XXX: return dict of {__name__: obj} ?
87
+ """get functions defined inside of func (e.g. inner functions in a closure)
88
+
89
+ NOTE: results may differ if the function has been executed or not.
90
+ If len(nestedcode(func)) > len(referrednested(func)), try calling func().
91
+ If possible, python builds code objects, but delays building functions
92
+ until func() is called.
93
+ """
94
+ import gc
95
+ funcs = set()
96
+ # get the code objects, and try to track down by referrence
97
+ for co in nestedcode(func, recurse):
98
+ # look for function objects that refer to the code object
99
+ for obj in gc.get_referrers(co):
100
+ # get methods
101
+ _ = getattr(obj, '__func__', None) # ismethod
102
+ if getattr(_, '__code__', None) is co: funcs.add(obj)
103
+ # get functions
104
+ elif getattr(obj, '__code__', None) is co: funcs.add(obj)
105
+ # get frame objects
106
+ elif getattr(obj, 'f_code', None) is co: funcs.add(obj)
107
+ # get code objects
108
+ elif hasattr(obj, 'co_code') and obj is co: funcs.add(obj)
109
+ # frameobjs => func.__code__.co_varnames not in func.__code__.co_cellvars
110
+ # funcobjs => func.__code__.co_cellvars not in func.__code__.co_varnames
111
+ # frameobjs are not found, however funcobjs are...
112
+ # (see: test_mixins.quad ... and test_mixins.wtf)
113
+ # after execution, code objects get compiled, and then may be found by gc
114
+ return list(funcs)
115
+
116
+
117
+ def freevars(func):
118
+ """get objects defined in enclosing code that are referred to by func
119
+
120
+ returns a dict of {name:object}"""
121
+ if ismethod(func): func = func.__func__
122
+ if isfunction(func):
123
+ closures = func.__closure__ or ()
124
+ func = func.__code__.co_freevars # get freevars
125
+ else:
126
+ return {}
127
+
128
+ def get_cell_contents():
129
+ for name, c in zip(func, closures):
130
+ try:
131
+ cell_contents = c.cell_contents
132
+ except ValueError: # cell is empty
133
+ continue
134
+ yield name, c.cell_contents
135
+
136
+ return dict(get_cell_contents())
137
+
138
+ # thanks to Davies Liu for recursion of globals
139
+ def nestedglobals(func, recurse=True):
140
+ """get the names of any globals found within func"""
141
+ func = code(func)
142
+ if func is None: return list()
143
+ import sys
144
+ from .temp import capture
145
+ CAN_NULL = sys.hexversion >= 0x30b00a7 # NULL may be prepended >= 3.11a7
146
+ names = set()
147
+ with capture('stdout') as out:
148
+ dis.dis(func) #XXX: dis.dis(None) disassembles last traceback
149
+ for line in out.getvalue().splitlines():
150
+ if '_GLOBAL' in line:
151
+ name = line.split('(')[-1].split(')')[0]
152
+ if CAN_NULL:
153
+ names.add(name.replace('NULL + ', '').replace(' + NULL', ''))
154
+ else:
155
+ names.add(name)
156
+ for co in getattr(func, 'co_consts', tuple()):
157
+ if co and recurse and iscode(co):
158
+ names.update(nestedglobals(co, recurse=True))
159
+ return list(names)
160
+
161
+ def referredglobals(func, recurse=True, builtin=False):
162
+ """get the names of objects in the global scope referred to by func"""
163
+ return globalvars(func, recurse, builtin).keys()
164
+
165
+ def globalvars(func, recurse=True, builtin=False):
166
+ """get objects defined in global scope that are referred to by func
167
+
168
+ return a dict of {name:object}"""
169
+ if ismethod(func): func = func.__func__
170
+ if isfunction(func):
171
+ globs = vars(getmodule(sum)).copy() if builtin else {}
172
+ # get references from within closure
173
+ orig_func, func = func, set()
174
+ for obj in orig_func.__closure__ or {}:
175
+ try:
176
+ cell_contents = obj.cell_contents
177
+ except ValueError: # cell is empty
178
+ pass
179
+ else:
180
+ _vars = globalvars(cell_contents, recurse, builtin) or {}
181
+ func.update(_vars) #XXX: (above) be wary of infinte recursion?
182
+ globs.update(_vars)
183
+ # get globals
184
+ globs.update(orig_func.__globals__ or {})
185
+ # get names of references
186
+ if not recurse:
187
+ func.update(orig_func.__code__.co_names)
188
+ else:
189
+ func.update(nestedglobals(orig_func.__code__))
190
+ # find globals for all entries of func
191
+ for key in func.copy(): #XXX: unnecessary...?
192
+ nested_func = globs.get(key)
193
+ if nested_func is orig_func:
194
+ #func.remove(key) if key in func else None
195
+ continue #XXX: globalvars(func, False)?
196
+ func.update(globalvars(nested_func, True, builtin))
197
+ elif iscode(func):
198
+ globs = vars(getmodule(sum)).copy() if builtin else {}
199
+ #globs.update(globals())
200
+ if not recurse:
201
+ func = func.co_names # get names
202
+ else:
203
+ orig_func = func.co_name # to stop infinite recursion
204
+ func = set(nestedglobals(func))
205
+ # find globals for all entries of func
206
+ for key in func.copy(): #XXX: unnecessary...?
207
+ if key is orig_func:
208
+ #func.remove(key) if key in func else None
209
+ continue #XXX: globalvars(func, False)?
210
+ nested_func = globs.get(key)
211
+ func.update(globalvars(nested_func, True, builtin))
212
+ else:
213
+ return {}
214
+ #NOTE: if name not in __globals__, then we skip it...
215
+ return dict((name,globs[name]) for name in func if name in globs)
216
+
217
+
218
+ def varnames(func):
219
+ """get names of variables defined by func
220
+
221
+ returns a tuple (local vars, local vars referrenced by nested functions)"""
222
+ func = code(func)
223
+ if not iscode(func):
224
+ return () #XXX: better ((),())? or None?
225
+ return func.co_varnames, func.co_cellvars
226
+
227
+
228
+ def baditems(obj, exact=False, safe=False): #XXX: obj=globals() ?
229
+ """get items in object that fail to pickle"""
230
+ if not hasattr(obj,'__iter__'): # is not iterable
231
+ return [j for j in (badobjects(obj,0,exact,safe),) if j is not None]
232
+ obj = obj.values() if getattr(obj,'values',None) else obj
233
+ _obj = [] # can't use a set, as items may be unhashable
234
+ [_obj.append(badobjects(i,0,exact,safe)) for i in obj if i not in _obj]
235
+ return [j for j in _obj if j is not None]
236
+
237
+
238
+ def badobjects(obj, depth=0, exact=False, safe=False):
239
+ """get objects that fail to pickle"""
240
+ from dill import pickles
241
+ if not depth:
242
+ if pickles(obj,exact,safe): return None
243
+ return obj
244
+ return dict(((attr, badobjects(getattr(obj,attr),depth-1,exact,safe)) \
245
+ for attr in dir(obj) if not pickles(getattr(obj,attr),exact,safe)))
246
+
247
+ def badtypes(obj, depth=0, exact=False, safe=False):
248
+ """get types for objects that fail to pickle"""
249
+ from dill import pickles
250
+ if not depth:
251
+ if pickles(obj,exact,safe): return None
252
+ return type(obj)
253
+ return dict(((attr, badtypes(getattr(obj,attr),depth-1,exact,safe)) \
254
+ for attr in dir(obj) if not pickles(getattr(obj,attr),exact,safe)))
255
+
256
+ def errors(obj, depth=0, exact=False, safe=False):
257
+ """get errors for objects that fail to pickle"""
258
+ from dill import pickles, copy
259
+ if not depth:
260
+ try:
261
+ pik = copy(obj)
262
+ if exact:
263
+ assert pik == obj, \
264
+ "Unpickling produces %s instead of %s" % (pik,obj)
265
+ assert type(pik) == type(obj), \
266
+ "Unpickling produces %s instead of %s" % (type(pik),type(obj))
267
+ return None
268
+ except Exception:
269
+ import sys
270
+ return sys.exc_info()[1]
271
+ _dict = {}
272
+ for attr in dir(obj):
273
+ try:
274
+ _attr = getattr(obj,attr)
275
+ except Exception:
276
+ import sys
277
+ _dict[attr] = sys.exc_info()[1]
278
+ continue
279
+ if not pickles(_attr,exact,safe):
280
+ _dict[attr] = errors(_attr,depth-1,exact,safe)
281
+ return _dict
282
+
283
+
284
+ # EOF
.venv/lib/python3.11/site-packages/dill/logger.py ADDED
@@ -0,0 +1,285 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ # -*- coding: utf-8 -*-
3
+ #
4
+ # Author: Leonardo Gama (@leogama)
5
+ # Copyright (c) 2022-2024 The Uncertainty Quantification Foundation.
6
+ # License: 3-clause BSD. The full license text is available at:
7
+ # - https://github.com/uqfoundation/dill/blob/master/LICENSE
8
+ """
9
+ Logging utilities for dill.
10
+
11
+ The 'logger' object is dill's top-level logger.
12
+
13
+ The 'adapter' object wraps the logger and implements a 'trace()' method that
14
+ generates a detailed tree-style trace for the pickling call at log level INFO.
15
+
16
+ The 'trace()' function sets and resets dill's logger log level, enabling and
17
+ disabling the pickling trace.
18
+
19
+ The trace shows a tree structure depicting the depth of each object serialized
20
+ *with dill save functions*, but not the ones that use save functions from
21
+ 'pickle._Pickler.dispatch'. If the information is available, it also displays
22
+ the size in bytes that the object contributed to the pickle stream (including
23
+ its child objects). Sample trace output:
24
+
25
+ >>> import dill, dill.tests
26
+ >>> dill.detect.trace(True)
27
+ >>> dill.dump_session(main=dill.tests)
28
+ ┬ M1: <module 'dill.tests' from '.../dill/tests/__init__.py'>
29
+ ├┬ F2: <function _import_module at 0x7f0d2dce1b80>
30
+ │└ # F2 [32 B]
31
+ ├┬ D2: <dict object at 0x7f0d2e98a540>
32
+ │├┬ T4: <class '_frozen_importlib.ModuleSpec'>
33
+ ││└ # T4 [35 B]
34
+ │├┬ D2: <dict object at 0x7f0d2ef0e8c0>
35
+ ││├┬ T4: <class '_frozen_importlib_external.SourceFileLoader'>
36
+ │││└ # T4 [50 B]
37
+ ││├┬ D2: <dict object at 0x7f0d2e988a40>
38
+ │││└ # D2 [84 B]
39
+ ││└ # D2 [413 B]
40
+ │└ # D2 [763 B]
41
+ └ # M1 [813 B]
42
+ """
43
+
44
+ __all__ = ['adapter', 'logger', 'trace']
45
+
46
+ import codecs
47
+ import contextlib
48
+ import locale
49
+ import logging
50
+ import math
51
+ import os
52
+ from functools import partial
53
+ from typing import TextIO, Union
54
+
55
+ import dill
56
+
57
+ # Tree drawing characters: Unicode to ASCII map.
58
+ ASCII_MAP = str.maketrans({"│": "|", "├": "|", "┬": "+", "└": "`"})
59
+
60
+ ## Notes about the design choices ##
61
+
62
+ # Here is some domumentation of the Standard Library's logging internals that
63
+ # can't be found completely in the official documentation. dill's logger is
64
+ # obtained by calling logging.getLogger('dill') and therefore is an instance of
65
+ # logging.getLoggerClass() at the call time. As this is controlled by the user,
66
+ # in order to add some functionality to it it's necessary to use a LoggerAdapter
67
+ # to wrap it, overriding some of the adapter's methods and creating new ones.
68
+ #
69
+ # Basic calling sequence
70
+ # ======================
71
+ #
72
+ # Python's logging functionality can be conceptually divided into five steps:
73
+ # 0. Check logging level -> abort if call level is greater than logger level
74
+ # 1. Gather information -> construct a LogRecord from passed arguments and context
75
+ # 2. Filter (optional) -> discard message if the record matches a filter
76
+ # 3. Format -> format message with args, then format output string with message plus record
77
+ # 4. Handle -> write the formatted string to output as defined in the handler
78
+ #
79
+ # dill.logging.logger.log -> # or logger.info, etc.
80
+ # Logger.log -> \
81
+ # Logger._log -> }- accept 'extra' parameter for custom record entries
82
+ # Logger.makeRecord -> /
83
+ # LogRecord.__init__
84
+ # Logger.handle ->
85
+ # Logger.callHandlers ->
86
+ # Handler.handle ->
87
+ # Filterer.filter ->
88
+ # Filter.filter
89
+ # StreamHandler.emit ->
90
+ # Handler.format ->
91
+ # Formatter.format ->
92
+ # LogRecord.getMessage # does: record.message = msg % args
93
+ # Formatter.formatMessage ->
94
+ # PercentStyle.format # does: self._fmt % vars(record)
95
+ #
96
+ # NOTE: All methods from the second line on are from logging.__init__.py
97
+
98
+ class TraceAdapter(logging.LoggerAdapter):
99
+ """
100
+ Tracks object tree depth and calculates pickled object size.
101
+
102
+ A single instance of this wraps the module's logger, as the logging API
103
+ doesn't allow setting it directly with a custom Logger subclass. The added
104
+ 'trace()' method receives a pickle instance as the first argument and
105
+ creates extra values to be added in the LogRecord from it, then calls
106
+ 'info()'.
107
+
108
+ Usage of logger with 'trace()' method:
109
+
110
+ >>> from dill.logger import adapter as logger #NOTE: not dill.logger.logger
111
+ >>> ...
112
+ >>> def save_atype(pickler, obj):
113
+ >>> logger.trace(pickler, "Message with %s and %r etc. placeholders", 'text', obj)
114
+ >>> ...
115
+ """
116
+ def __init__(self, logger):
117
+ self.logger = logger
118
+ def addHandler(self, handler):
119
+ formatter = TraceFormatter("%(prefix)s%(message)s%(suffix)s", handler=handler)
120
+ handler.setFormatter(formatter)
121
+ self.logger.addHandler(handler)
122
+ def removeHandler(self, handler):
123
+ self.logger.removeHandler(handler)
124
+ def process(self, msg, kwargs):
125
+ # A no-op override, as we don't have self.extra.
126
+ return msg, kwargs
127
+ def trace_setup(self, pickler):
128
+ # Called by Pickler.dump().
129
+ if not dill._dill.is_dill(pickler, child=False):
130
+ return
131
+ if self.isEnabledFor(logging.INFO):
132
+ pickler._trace_depth = 1
133
+ pickler._size_stack = []
134
+ else:
135
+ pickler._trace_depth = None
136
+ def trace(self, pickler, msg, *args, **kwargs):
137
+ if not hasattr(pickler, '_trace_depth'):
138
+ logger.info(msg, *args, **kwargs)
139
+ return
140
+ if pickler._trace_depth is None:
141
+ return
142
+ extra = kwargs.get('extra', {})
143
+ pushed_obj = msg.startswith('#')
144
+ size = None
145
+ try:
146
+ # Streams are not required to be tellable.
147
+ size = pickler._file.tell()
148
+ frame = pickler.framer.current_frame
149
+ try:
150
+ size += frame.tell()
151
+ except AttributeError:
152
+ # PyPy may use a BytesBuilder as frame
153
+ size += len(frame)
154
+ except (AttributeError, TypeError):
155
+ pass
156
+ if size is not None:
157
+ if not pushed_obj:
158
+ pickler._size_stack.append(size)
159
+ else:
160
+ size -= pickler._size_stack.pop()
161
+ extra['size'] = size
162
+ if pushed_obj:
163
+ pickler._trace_depth -= 1
164
+ extra['depth'] = pickler._trace_depth
165
+ kwargs['extra'] = extra
166
+ self.info(msg, *args, **kwargs)
167
+ if not pushed_obj:
168
+ pickler._trace_depth += 1
169
+
170
+ class TraceFormatter(logging.Formatter):
171
+ """
172
+ Generates message prefix and suffix from record.
173
+
174
+ This Formatter adds prefix and suffix strings to the log message in trace
175
+ mode (an also provides empty string defaults for normal logs).
176
+ """
177
+ def __init__(self, *args, handler=None, **kwargs):
178
+ super().__init__(*args, **kwargs)
179
+ try:
180
+ encoding = handler.stream.encoding
181
+ if encoding is None:
182
+ raise AttributeError
183
+ except AttributeError:
184
+ encoding = locale.getpreferredencoding()
185
+ try:
186
+ encoding = codecs.lookup(encoding).name
187
+ except LookupError:
188
+ self.is_utf8 = False
189
+ else:
190
+ self.is_utf8 = (encoding == codecs.lookup('utf-8').name)
191
+ def format(self, record):
192
+ fields = {'prefix': "", 'suffix': ""}
193
+ if getattr(record, 'depth', 0) > 0:
194
+ if record.msg.startswith("#"):
195
+ prefix = (record.depth - 1)*"│" + "└"
196
+ elif record.depth == 1:
197
+ prefix = "┬"
198
+ else:
199
+ prefix = (record.depth - 2)*"│" + "├┬"
200
+ if not self.is_utf8:
201
+ prefix = prefix.translate(ASCII_MAP) + "-"
202
+ fields['prefix'] = prefix + " "
203
+ if hasattr(record, 'size') and record.size is not None and record.size >= 1:
204
+ # Show object size in human-readable form.
205
+ power = int(math.log(record.size, 2)) // 10
206
+ size = record.size >> power*10
207
+ fields['suffix'] = " [%d %sB]" % (size, "KMGTP"[power] + "i" if power else "")
208
+ vars(record).update(fields)
209
+ return super().format(record)
210
+
211
+ logger = logging.getLogger('dill')
212
+ logger.propagate = False
213
+ adapter = TraceAdapter(logger)
214
+ stderr_handler = logging._StderrHandler()
215
+ adapter.addHandler(stderr_handler)
216
+
217
+ def trace(arg: Union[bool, TextIO, str, os.PathLike] = None, *, mode: str = 'a') -> None:
218
+ """print a trace through the stack when pickling; useful for debugging
219
+
220
+ With a single boolean argument, enable or disable the tracing.
221
+
222
+ Example usage:
223
+
224
+ >>> import dill
225
+ >>> dill.detect.trace(True)
226
+ >>> dill.dump_session()
227
+
228
+ Alternatively, ``trace()`` can be used as a context manager. With no
229
+ arguments, it just takes care of restoring the tracing state on exit.
230
+ Either a file handle, or a file name and (optionally) a file mode may be
231
+ specitfied to redirect the tracing output in the ``with`` block context. A
232
+ log function is yielded by the manager so the user can write extra
233
+ information to the file.
234
+
235
+ Example usage:
236
+
237
+ >>> from dill import detect
238
+ >>> D = {'a': 42, 'b': {'x': None}}
239
+ >>> with detect.trace():
240
+ >>> dumps(D)
241
+ ┬ D2: <dict object at 0x7f2721804800>
242
+ ├┬ D2: <dict object at 0x7f27217f5c40>
243
+ │└ # D2 [8 B]
244
+ └ # D2 [22 B]
245
+ >>> squared = lambda x: x**2
246
+ >>> with detect.trace('output.txt', mode='w') as log:
247
+ >>> log("> D = %r", D)
248
+ >>> dumps(D)
249
+ >>> log("> squared = %r", squared)
250
+ >>> dumps(squared)
251
+
252
+ Arguments:
253
+ arg: a boolean value, or an optional file-like or path-like object for the context manager
254
+ mode: mode string for ``open()`` if a file name is passed as the first argument
255
+ """
256
+ if repr(arg) not in ('False', 'True'):
257
+ return TraceManager(file=arg, mode=mode)
258
+ logger.setLevel(logging.INFO if arg else logging.WARNING)
259
+
260
+ class TraceManager(contextlib.AbstractContextManager):
261
+ """context manager version of trace(); can redirect the trace to a file"""
262
+ def __init__(self, file, mode):
263
+ self.file = file
264
+ self.mode = mode
265
+ self.redirect = file is not None
266
+ self.file_is_stream = hasattr(file, 'write')
267
+ def __enter__(self):
268
+ if self.redirect:
269
+ stderr_handler.flush()
270
+ if self.file_is_stream:
271
+ self.handler = logging.StreamHandler(self.file)
272
+ else:
273
+ self.handler = logging.FileHandler(self.file, self.mode)
274
+ adapter.removeHandler(stderr_handler)
275
+ adapter.addHandler(self.handler)
276
+ self.old_level = adapter.getEffectiveLevel()
277
+ adapter.setLevel(logging.INFO)
278
+ return adapter.info
279
+ def __exit__(self, *exc_info):
280
+ adapter.setLevel(self.old_level)
281
+ if self.redirect:
282
+ adapter.removeHandler(self.handler)
283
+ adapter.addHandler(stderr_handler)
284
+ if not self.file_is_stream:
285
+ self.handler.close()
.venv/lib/python3.11/site-packages/dill/objtypes.py ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ #
3
+ # Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
4
+ # Copyright (c) 2008-2016 California Institute of Technology.
5
+ # Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
6
+ # License: 3-clause BSD. The full license text is available at:
7
+ # - https://github.com/uqfoundation/dill/blob/master/LICENSE
8
+ """
9
+ all Python Standard Library object types (currently: CH 1-15 @ 2.7)
10
+ and some other common object types (i.e. numpy.ndarray)
11
+
12
+ to load more objects and types, use dill.load_types()
13
+ """
14
+
15
+ # non-local import of dill.objects
16
+ from dill import objects
17
+ for _type in objects.keys():
18
+ exec("%s = type(objects['%s'])" % (_type,_type))
19
+
20
+ del objects
21
+ try:
22
+ del _type
23
+ except NameError:
24
+ pass
.venv/lib/python3.11/site-packages/dill/pointers.py ADDED
@@ -0,0 +1,122 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ #
3
+ # Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
4
+ # Copyright (c) 2008-2016 California Institute of Technology.
5
+ # Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
6
+ # License: 3-clause BSD. The full license text is available at:
7
+ # - https://github.com/uqfoundation/dill/blob/master/LICENSE
8
+
9
+ __all__ = ['parent', 'reference', 'at', 'parents', 'children']
10
+
11
+ import gc
12
+ import sys
13
+
14
+ from ._dill import _proxy_helper as reference
15
+ from ._dill import _locate_object as at
16
+
17
+ def parent(obj, objtype, ignore=()):
18
+ """
19
+ >>> listiter = iter([4,5,6,7])
20
+ >>> obj = parent(listiter, list)
21
+ >>> obj == [4,5,6,7] # actually 'is', but don't have handle any longer
22
+ True
23
+
24
+ NOTE: objtype can be a single type (e.g. int or list) or a tuple of types.
25
+
26
+ WARNING: if obj is a sequence (e.g. list), may produce unexpected results.
27
+ Parent finds *one* parent (e.g. the last member of the sequence).
28
+ """
29
+ depth = 1 #XXX: always looking for the parent (only, right?)
30
+ chain = parents(obj, objtype, depth, ignore)
31
+ parent = chain.pop()
32
+ if parent is obj:
33
+ return None
34
+ return parent
35
+
36
+
37
+ def parents(obj, objtype, depth=1, ignore=()): #XXX: objtype=object ?
38
+ """Find the chain of referents for obj. Chain will end with obj.
39
+
40
+ objtype: an object type or tuple of types to search for
41
+ depth: search depth (e.g. depth=2 is 'grandparents')
42
+ ignore: an object or tuple of objects to ignore in the search
43
+ """
44
+ edge_func = gc.get_referents # looking for refs, not back_refs
45
+ predicate = lambda x: isinstance(x, objtype) # looking for parent type
46
+ #if objtype is None: predicate = lambda x: True #XXX: in obj.mro() ?
47
+ ignore = (ignore,) if not hasattr(ignore, '__len__') else ignore
48
+ ignore = (id(obj) for obj in ignore)
49
+ chain = find_chain(obj, predicate, edge_func, depth)[::-1]
50
+ #XXX: should pop off obj... ?
51
+ return chain
52
+
53
+
54
+ def children(obj, objtype, depth=1, ignore=()): #XXX: objtype=object ?
55
+ """Find the chain of referrers for obj. Chain will start with obj.
56
+
57
+ objtype: an object type or tuple of types to search for
58
+ depth: search depth (e.g. depth=2 is 'grandchildren')
59
+ ignore: an object or tuple of objects to ignore in the search
60
+
61
+ NOTE: a common thing to ignore is all globals, 'ignore=(globals(),)'
62
+
63
+ NOTE: repeated calls may yield different results, as python stores
64
+ the last value in the special variable '_'; thus, it is often good
65
+ to execute something to replace '_' (e.g. >>> 1+1).
66
+ """
67
+ edge_func = gc.get_referrers # looking for back_refs, not refs
68
+ predicate = lambda x: isinstance(x, objtype) # looking for child type
69
+ #if objtype is None: predicate = lambda x: True #XXX: in obj.mro() ?
70
+ ignore = (ignore,) if not hasattr(ignore, '__len__') else ignore
71
+ ignore = (id(obj) for obj in ignore)
72
+ chain = find_chain(obj, predicate, edge_func, depth, ignore)
73
+ #XXX: should pop off obj... ?
74
+ return chain
75
+
76
+
77
+ # more generic helper function (cut-n-paste from objgraph)
78
+ # Source at http://mg.pov.lt/objgraph/
79
+ # Copyright (c) 2008-2010 Marius Gedminas <marius@pov.lt>
80
+ # Copyright (c) 2010 Stefano Rivera <stefano@rivera.za.net>
81
+ # Released under the MIT licence (see objgraph/objgrah.py)
82
+
83
+ def find_chain(obj, predicate, edge_func, max_depth=20, extra_ignore=()):
84
+ queue = [obj]
85
+ depth = {id(obj): 0}
86
+ parent = {id(obj): None}
87
+ ignore = set(extra_ignore)
88
+ ignore.add(id(extra_ignore))
89
+ ignore.add(id(queue))
90
+ ignore.add(id(depth))
91
+ ignore.add(id(parent))
92
+ ignore.add(id(ignore))
93
+ ignore.add(id(sys._getframe())) # this function
94
+ ignore.add(id(sys._getframe(1))) # find_chain/find_backref_chain, likely
95
+ gc.collect()
96
+ while queue:
97
+ target = queue.pop(0)
98
+ if predicate(target):
99
+ chain = [target]
100
+ while parent[id(target)] is not None:
101
+ target = parent[id(target)]
102
+ chain.append(target)
103
+ return chain
104
+ tdepth = depth[id(target)]
105
+ if tdepth < max_depth:
106
+ referrers = edge_func(target)
107
+ ignore.add(id(referrers))
108
+ for source in referrers:
109
+ if id(source) in ignore:
110
+ continue
111
+ if id(source) not in depth:
112
+ depth[id(source)] = tdepth + 1
113
+ parent[id(source)] = target
114
+ queue.append(source)
115
+ return [obj] # not found
116
+
117
+
118
+ # backward compatibility
119
+ refobject = at
120
+
121
+
122
+ # EOF
.venv/lib/python3.11/site-packages/dill/session.py ADDED
@@ -0,0 +1,612 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ #
3
+ # Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
4
+ # Author: Leonardo Gama (@leogama)
5
+ # Copyright (c) 2008-2015 California Institute of Technology.
6
+ # Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
7
+ # License: 3-clause BSD. The full license text is available at:
8
+ # - https://github.com/uqfoundation/dill/blob/master/LICENSE
9
+ """
10
+ Pickle and restore the intepreter session.
11
+ """
12
+
13
+ __all__ = [
14
+ 'dump_module', 'load_module', 'load_module_asdict',
15
+ 'dump_session', 'load_session' # backward compatibility
16
+ ]
17
+
18
+ import re
19
+ import os
20
+ import sys
21
+ import warnings
22
+ import pathlib
23
+ import tempfile
24
+
25
+ TEMPDIR = pathlib.PurePath(tempfile.gettempdir())
26
+
27
+ # Type hints.
28
+ from typing import Optional, Union
29
+
30
+ from dill import _dill, Pickler, Unpickler
31
+ from ._dill import (
32
+ BuiltinMethodType, FunctionType, MethodType, ModuleType, TypeType,
33
+ _import_module, _is_builtin_module, _is_imported_module, _main_module,
34
+ _reverse_typemap, __builtin__, UnpicklingError,
35
+ )
36
+
37
+ def _module_map():
38
+ """get map of imported modules"""
39
+ from collections import defaultdict
40
+ from types import SimpleNamespace
41
+ modmap = SimpleNamespace(
42
+ by_name=defaultdict(list),
43
+ by_id=defaultdict(list),
44
+ top_level={},
45
+ )
46
+ for modname, module in sys.modules.items():
47
+ if modname in ('__main__', '__mp_main__') or not isinstance(module, ModuleType):
48
+ continue
49
+ if '.' not in modname:
50
+ modmap.top_level[id(module)] = modname
51
+ for objname, modobj in module.__dict__.items():
52
+ modmap.by_name[objname].append((modobj, modname))
53
+ modmap.by_id[id(modobj)].append((modobj, objname, modname))
54
+ return modmap
55
+
56
+ IMPORTED_AS_TYPES = (ModuleType, TypeType, FunctionType, MethodType, BuiltinMethodType)
57
+ if 'PyCapsuleType' in _reverse_typemap:
58
+ IMPORTED_AS_TYPES += (_reverse_typemap['PyCapsuleType'],)
59
+ IMPORTED_AS_MODULES = ('ctypes', 'typing', 'subprocess', 'threading',
60
+ r'concurrent\.futures(\.\w+)?', r'multiprocessing(\.\w+)?')
61
+ IMPORTED_AS_MODULES = tuple(re.compile(x) for x in IMPORTED_AS_MODULES)
62
+
63
+ def _lookup_module(modmap, name, obj, main_module):
64
+ """lookup name or id of obj if module is imported"""
65
+ for modobj, modname in modmap.by_name[name]:
66
+ if modobj is obj and sys.modules[modname] is not main_module:
67
+ return modname, name
68
+ __module__ = getattr(obj, '__module__', None)
69
+ if isinstance(obj, IMPORTED_AS_TYPES) or (__module__ is not None
70
+ and any(regex.fullmatch(__module__) for regex in IMPORTED_AS_MODULES)):
71
+ for modobj, objname, modname in modmap.by_id[id(obj)]:
72
+ if sys.modules[modname] is not main_module:
73
+ return modname, objname
74
+ return None, None
75
+
76
+ def _stash_modules(main_module):
77
+ modmap = _module_map()
78
+ newmod = ModuleType(main_module.__name__)
79
+
80
+ imported = []
81
+ imported_as = []
82
+ imported_top_level = [] # keep separated for backward compatibility
83
+ original = {}
84
+ for name, obj in main_module.__dict__.items():
85
+ if obj is main_module:
86
+ original[name] = newmod # self-reference
87
+ elif obj is main_module.__dict__:
88
+ original[name] = newmod.__dict__
89
+ # Avoid incorrectly matching a singleton value in another package (ex.: __doc__).
90
+ elif any(obj is singleton for singleton in (None, False, True)) \
91
+ or isinstance(obj, ModuleType) and _is_builtin_module(obj): # always saved by ref
92
+ original[name] = obj
93
+ else:
94
+ source_module, objname = _lookup_module(modmap, name, obj, main_module)
95
+ if source_module is not None:
96
+ if objname == name:
97
+ imported.append((source_module, name))
98
+ else:
99
+ imported_as.append((source_module, objname, name))
100
+ else:
101
+ try:
102
+ imported_top_level.append((modmap.top_level[id(obj)], name))
103
+ except KeyError:
104
+ original[name] = obj
105
+
106
+ if len(original) < len(main_module.__dict__):
107
+ newmod.__dict__.update(original)
108
+ newmod.__dill_imported = imported
109
+ newmod.__dill_imported_as = imported_as
110
+ newmod.__dill_imported_top_level = imported_top_level
111
+ if getattr(newmod, '__loader__', None) is None and _is_imported_module(main_module):
112
+ # Trick _is_imported_module() to force saving as an imported module.
113
+ newmod.__loader__ = True # will be discarded by save_module()
114
+ return newmod
115
+ else:
116
+ return main_module
117
+
118
+ def _restore_modules(unpickler, main_module):
119
+ try:
120
+ for modname, name in main_module.__dict__.pop('__dill_imported'):
121
+ main_module.__dict__[name] = unpickler.find_class(modname, name)
122
+ for modname, objname, name in main_module.__dict__.pop('__dill_imported_as'):
123
+ main_module.__dict__[name] = unpickler.find_class(modname, objname)
124
+ for modname, name in main_module.__dict__.pop('__dill_imported_top_level'):
125
+ main_module.__dict__[name] = __import__(modname)
126
+ except KeyError:
127
+ pass
128
+
129
+ #NOTE: 06/03/15 renamed main_module to main
130
+ def dump_module(
131
+ filename: Union[str, os.PathLike] = None,
132
+ module: Optional[Union[ModuleType, str]] = None,
133
+ refimported: bool = False,
134
+ **kwds
135
+ ) -> None:
136
+ """Pickle the current state of :py:mod:`__main__` or another module to a file.
137
+
138
+ Save the contents of :py:mod:`__main__` (e.g. from an interactive
139
+ interpreter session), an imported module, or a module-type object (e.g.
140
+ built with :py:class:`~types.ModuleType`), to a file. The pickled
141
+ module can then be restored with the function :py:func:`load_module`.
142
+
143
+ Args:
144
+ filename: a path-like object or a writable stream. If `None`
145
+ (the default), write to a named file in a temporary directory.
146
+ module: a module object or the name of an importable module. If `None`
147
+ (the default), :py:mod:`__main__` is saved.
148
+ refimported: if `True`, all objects identified as having been imported
149
+ into the module's namespace are saved by reference. *Note:* this is
150
+ similar but independent from ``dill.settings[`byref`]``, as
151
+ ``refimported`` refers to virtually all imported objects, while
152
+ ``byref`` only affects select objects.
153
+ **kwds: extra keyword arguments passed to :py:class:`Pickler()`.
154
+
155
+ Raises:
156
+ :py:exc:`PicklingError`: if pickling fails.
157
+
158
+ Examples:
159
+
160
+ - Save current interpreter session state:
161
+
162
+ >>> import dill
163
+ >>> squared = lambda x: x*x
164
+ >>> dill.dump_module() # save state of __main__ to /tmp/session.pkl
165
+
166
+ - Save the state of an imported/importable module:
167
+
168
+ >>> import dill
169
+ >>> import pox
170
+ >>> pox.plus_one = lambda x: x+1
171
+ >>> dill.dump_module('pox_session.pkl', module=pox)
172
+
173
+ - Save the state of a non-importable, module-type object:
174
+
175
+ >>> import dill
176
+ >>> from types import ModuleType
177
+ >>> foo = ModuleType('foo')
178
+ >>> foo.values = [1,2,3]
179
+ >>> import math
180
+ >>> foo.sin = math.sin
181
+ >>> dill.dump_module('foo_session.pkl', module=foo, refimported=True)
182
+
183
+ - Restore the state of the saved modules:
184
+
185
+ >>> import dill
186
+ >>> dill.load_module()
187
+ >>> squared(2)
188
+ 4
189
+ >>> pox = dill.load_module('pox_session.pkl')
190
+ >>> pox.plus_one(1)
191
+ 2
192
+ >>> foo = dill.load_module('foo_session.pkl')
193
+ >>> [foo.sin(x) for x in foo.values]
194
+ [0.8414709848078965, 0.9092974268256817, 0.1411200080598672]
195
+
196
+ - Use `refimported` to save imported objects by reference:
197
+
198
+ >>> import dill
199
+ >>> from html.entities import html5
200
+ >>> type(html5), len(html5)
201
+ (dict, 2231)
202
+ >>> import io
203
+ >>> buf = io.BytesIO()
204
+ >>> dill.dump_module(buf) # saves __main__, with html5 saved by value
205
+ >>> len(buf.getvalue()) # pickle size in bytes
206
+ 71665
207
+ >>> buf = io.BytesIO()
208
+ >>> dill.dump_module(buf, refimported=True) # html5 saved by reference
209
+ >>> len(buf.getvalue())
210
+ 438
211
+
212
+ *Changed in version 0.3.6:* Function ``dump_session()`` was renamed to
213
+ ``dump_module()``. Parameters ``main`` and ``byref`` were renamed to
214
+ ``module`` and ``refimported``, respectively.
215
+
216
+ Note:
217
+ Currently, ``dill.settings['byref']`` and ``dill.settings['recurse']``
218
+ don't apply to this function.
219
+ """
220
+ for old_par, par in [('main', 'module'), ('byref', 'refimported')]:
221
+ if old_par in kwds:
222
+ message = "The argument %r has been renamed %r" % (old_par, par)
223
+ if old_par == 'byref':
224
+ message += " to distinguish it from dill.settings['byref']"
225
+ warnings.warn(message + ".", PendingDeprecationWarning)
226
+ if locals()[par]: # the defaults are None and False
227
+ raise TypeError("both %r and %r arguments were used" % (par, old_par))
228
+ refimported = kwds.pop('byref', refimported)
229
+ module = kwds.pop('main', module)
230
+
231
+ from .settings import settings
232
+ protocol = settings['protocol']
233
+ main = module
234
+ if main is None:
235
+ main = _main_module
236
+ elif isinstance(main, str):
237
+ main = _import_module(main)
238
+ if not isinstance(main, ModuleType):
239
+ raise TypeError("%r is not a module" % main)
240
+ if hasattr(filename, 'write'):
241
+ file = filename
242
+ else:
243
+ if filename is None:
244
+ filename = str(TEMPDIR/'session.pkl')
245
+ file = open(filename, 'wb')
246
+ try:
247
+ pickler = Pickler(file, protocol, **kwds)
248
+ pickler._original_main = main
249
+ if refimported:
250
+ main = _stash_modules(main)
251
+ pickler._main = main #FIXME: dill.settings are disabled
252
+ pickler._byref = False # disable pickling by name reference
253
+ pickler._recurse = False # disable pickling recursion for globals
254
+ pickler._session = True # is best indicator of when pickling a session
255
+ pickler._first_pass = True
256
+ pickler._main_modified = main is not pickler._original_main
257
+ pickler.dump(main)
258
+ finally:
259
+ if file is not filename: # if newly opened file
260
+ file.close()
261
+ return
262
+
263
+ # Backward compatibility.
264
+ def dump_session(filename=None, main=None, byref=False, **kwds):
265
+ warnings.warn("dump_session() has been renamed dump_module()", PendingDeprecationWarning)
266
+ dump_module(filename, module=main, refimported=byref, **kwds)
267
+ dump_session.__doc__ = dump_module.__doc__
268
+
269
+ class _PeekableReader:
270
+ """lightweight stream wrapper that implements peek()"""
271
+ def __init__(self, stream):
272
+ self.stream = stream
273
+ def read(self, n):
274
+ return self.stream.read(n)
275
+ def readline(self):
276
+ return self.stream.readline()
277
+ def tell(self):
278
+ return self.stream.tell()
279
+ def close(self):
280
+ return self.stream.close()
281
+ def peek(self, n):
282
+ stream = self.stream
283
+ try:
284
+ if hasattr(stream, 'flush'): stream.flush()
285
+ position = stream.tell()
286
+ stream.seek(position) # assert seek() works before reading
287
+ chunk = stream.read(n)
288
+ stream.seek(position)
289
+ return chunk
290
+ except (AttributeError, OSError):
291
+ raise NotImplementedError("stream is not peekable: %r", stream) from None
292
+
293
+ def _make_peekable(stream):
294
+ """return stream as an object with a peek() method"""
295
+ import io
296
+ if hasattr(stream, 'peek'):
297
+ return stream
298
+ if not (hasattr(stream, 'tell') and hasattr(stream, 'seek')):
299
+ try:
300
+ return io.BufferedReader(stream)
301
+ except Exception:
302
+ pass
303
+ return _PeekableReader(stream)
304
+
305
+ def _identify_module(file, main=None):
306
+ """identify the name of the module stored in the given file-type object"""
307
+ from pickletools import genops
308
+ UNICODE = {'UNICODE', 'BINUNICODE', 'SHORT_BINUNICODE'}
309
+ found_import = False
310
+ try:
311
+ for opcode, arg, pos in genops(file.peek(256)):
312
+ if not found_import:
313
+ if opcode.name in ('GLOBAL', 'SHORT_BINUNICODE') and \
314
+ arg.endswith('_import_module'):
315
+ found_import = True
316
+ else:
317
+ if opcode.name in UNICODE:
318
+ return arg
319
+ else:
320
+ raise UnpicklingError("reached STOP without finding main module")
321
+ except (NotImplementedError, ValueError) as error:
322
+ # ValueError occours when the end of the chunk is reached (without a STOP).
323
+ if isinstance(error, NotImplementedError) and main is not None:
324
+ # file is not peekable, but we have main.
325
+ return None
326
+ raise UnpicklingError("unable to identify main module") from error
327
+
328
+ def load_module(
329
+ filename: Union[str, os.PathLike] = None,
330
+ module: Optional[Union[ModuleType, str]] = None,
331
+ **kwds
332
+ ) -> Optional[ModuleType]:
333
+ """Update the selected module (default is :py:mod:`__main__`) with
334
+ the state saved at ``filename``.
335
+
336
+ Restore a module to the state saved with :py:func:`dump_module`. The
337
+ saved module can be :py:mod:`__main__` (e.g. an interpreter session),
338
+ an imported module, or a module-type object (e.g. created with
339
+ :py:class:`~types.ModuleType`).
340
+
341
+ When restoring the state of a non-importable module-type object, the
342
+ current instance of this module may be passed as the argument ``main``.
343
+ Otherwise, a new instance is created with :py:class:`~types.ModuleType`
344
+ and returned.
345
+
346
+ Args:
347
+ filename: a path-like object or a readable stream. If `None`
348
+ (the default), read from a named file in a temporary directory.
349
+ module: a module object or the name of an importable module;
350
+ the module name and kind (i.e. imported or non-imported) must
351
+ match the name and kind of the module stored at ``filename``.
352
+ **kwds: extra keyword arguments passed to :py:class:`Unpickler()`.
353
+
354
+ Raises:
355
+ :py:exc:`UnpicklingError`: if unpickling fails.
356
+ :py:exc:`ValueError`: if the argument ``main`` and module saved
357
+ at ``filename`` are incompatible.
358
+
359
+ Returns:
360
+ A module object, if the saved module is not :py:mod:`__main__` or
361
+ a module instance wasn't provided with the argument ``main``.
362
+
363
+ Examples:
364
+
365
+ - Save the state of some modules:
366
+
367
+ >>> import dill
368
+ >>> squared = lambda x: x*x
369
+ >>> dill.dump_module() # save state of __main__ to /tmp/session.pkl
370
+ >>>
371
+ >>> import pox # an imported module
372
+ >>> pox.plus_one = lambda x: x+1
373
+ >>> dill.dump_module('pox_session.pkl', module=pox)
374
+ >>>
375
+ >>> from types import ModuleType
376
+ >>> foo = ModuleType('foo') # a module-type object
377
+ >>> foo.values = [1,2,3]
378
+ >>> import math
379
+ >>> foo.sin = math.sin
380
+ >>> dill.dump_module('foo_session.pkl', module=foo, refimported=True)
381
+
382
+ - Restore the state of the interpreter:
383
+
384
+ >>> import dill
385
+ >>> dill.load_module() # updates __main__ from /tmp/session.pkl
386
+ >>> squared(2)
387
+ 4
388
+
389
+ - Load the saved state of an importable module:
390
+
391
+ >>> import dill
392
+ >>> pox = dill.load_module('pox_session.pkl')
393
+ >>> pox.plus_one(1)
394
+ 2
395
+ >>> import sys
396
+ >>> pox in sys.modules.values()
397
+ True
398
+
399
+ - Load the saved state of a non-importable module-type object:
400
+
401
+ >>> import dill
402
+ >>> foo = dill.load_module('foo_session.pkl')
403
+ >>> [foo.sin(x) for x in foo.values]
404
+ [0.8414709848078965, 0.9092974268256817, 0.1411200080598672]
405
+ >>> import math
406
+ >>> foo.sin is math.sin # foo.sin was saved by reference
407
+ True
408
+ >>> import sys
409
+ >>> foo in sys.modules.values()
410
+ False
411
+
412
+ - Update the state of a non-importable module-type object:
413
+
414
+ >>> import dill
415
+ >>> from types import ModuleType
416
+ >>> foo = ModuleType('foo')
417
+ >>> foo.values = ['a','b']
418
+ >>> foo.sin = lambda x: x*x
419
+ >>> dill.load_module('foo_session.pkl', module=foo)
420
+ >>> [foo.sin(x) for x in foo.values]
421
+ [0.8414709848078965, 0.9092974268256817, 0.1411200080598672]
422
+
423
+ *Changed in version 0.3.6:* Function ``load_session()`` was renamed to
424
+ ``load_module()``. Parameter ``main`` was renamed to ``module``.
425
+
426
+ See also:
427
+ :py:func:`load_module_asdict` to load the contents of module saved
428
+ with :py:func:`dump_module` into a dictionary.
429
+ """
430
+ if 'main' in kwds:
431
+ warnings.warn(
432
+ "The argument 'main' has been renamed 'module'.",
433
+ PendingDeprecationWarning
434
+ )
435
+ if module is not None:
436
+ raise TypeError("both 'module' and 'main' arguments were used")
437
+ module = kwds.pop('main')
438
+ main = module
439
+ if hasattr(filename, 'read'):
440
+ file = filename
441
+ else:
442
+ if filename is None:
443
+ filename = str(TEMPDIR/'session.pkl')
444
+ file = open(filename, 'rb')
445
+ try:
446
+ file = _make_peekable(file)
447
+ #FIXME: dill.settings are disabled
448
+ unpickler = Unpickler(file, **kwds)
449
+ unpickler._session = True
450
+
451
+ # Resolve unpickler._main
452
+ pickle_main = _identify_module(file, main)
453
+ if main is None and pickle_main is not None:
454
+ main = pickle_main
455
+ if isinstance(main, str):
456
+ if main.startswith('__runtime__.'):
457
+ # Create runtime module to load the session into.
458
+ main = ModuleType(main.partition('.')[-1])
459
+ else:
460
+ main = _import_module(main)
461
+ if main is not None:
462
+ if not isinstance(main, ModuleType):
463
+ raise TypeError("%r is not a module" % main)
464
+ unpickler._main = main
465
+ else:
466
+ main = unpickler._main
467
+
468
+ # Check against the pickle's main.
469
+ is_main_imported = _is_imported_module(main)
470
+ if pickle_main is not None:
471
+ is_runtime_mod = pickle_main.startswith('__runtime__.')
472
+ if is_runtime_mod:
473
+ pickle_main = pickle_main.partition('.')[-1]
474
+ error_msg = "can't update{} module{} %r with the saved state of{} module{} %r"
475
+ if is_runtime_mod and is_main_imported:
476
+ raise ValueError(
477
+ error_msg.format(" imported", "", "", "-type object")
478
+ % (main.__name__, pickle_main)
479
+ )
480
+ if not is_runtime_mod and not is_main_imported:
481
+ raise ValueError(
482
+ error_msg.format("", "-type object", " imported", "")
483
+ % (pickle_main, main.__name__)
484
+ )
485
+ if main.__name__ != pickle_main:
486
+ raise ValueError(error_msg.format("", "", "", "") % (main.__name__, pickle_main))
487
+
488
+ # This is for find_class() to be able to locate it.
489
+ if not is_main_imported:
490
+ runtime_main = '__runtime__.%s' % main.__name__
491
+ sys.modules[runtime_main] = main
492
+
493
+ loaded = unpickler.load()
494
+ finally:
495
+ if not hasattr(filename, 'read'): # if newly opened file
496
+ file.close()
497
+ try:
498
+ del sys.modules[runtime_main]
499
+ except (KeyError, NameError):
500
+ pass
501
+ assert loaded is main
502
+ _restore_modules(unpickler, main)
503
+ if main is _main_module or main is module:
504
+ return None
505
+ else:
506
+ return main
507
+
508
+ # Backward compatibility.
509
+ def load_session(filename=None, main=None, **kwds):
510
+ warnings.warn("load_session() has been renamed load_module().", PendingDeprecationWarning)
511
+ load_module(filename, module=main, **kwds)
512
+ load_session.__doc__ = load_module.__doc__
513
+
514
+ def load_module_asdict(
515
+ filename: Union[str, os.PathLike] = None,
516
+ update: bool = False,
517
+ **kwds
518
+ ) -> dict:
519
+ """
520
+ Load the contents of a saved module into a dictionary.
521
+
522
+ ``load_module_asdict()`` is the near-equivalent of::
523
+
524
+ lambda filename: vars(dill.load_module(filename)).copy()
525
+
526
+ however, does not alter the original module. Also, the path of
527
+ the loaded module is stored in the ``__session__`` attribute.
528
+
529
+ Args:
530
+ filename: a path-like object or a readable stream. If `None`
531
+ (the default), read from a named file in a temporary directory.
532
+ update: if `True`, initialize the dictionary with the current state
533
+ of the module prior to loading the state stored at filename.
534
+ **kwds: extra keyword arguments passed to :py:class:`Unpickler()`
535
+
536
+ Raises:
537
+ :py:exc:`UnpicklingError`: if unpickling fails
538
+
539
+ Returns:
540
+ A copy of the restored module's dictionary.
541
+
542
+ Note:
543
+ If ``update`` is True, the corresponding module may first be imported
544
+ into the current namespace before the saved state is loaded from
545
+ filename to the dictionary. Note that any module that is imported into
546
+ the current namespace as a side-effect of using ``update`` will not be
547
+ modified by loading the saved module in filename to a dictionary.
548
+
549
+ Example:
550
+ >>> import dill
551
+ >>> alist = [1, 2, 3]
552
+ >>> anum = 42
553
+ >>> dill.dump_module()
554
+ >>> anum = 0
555
+ >>> new_var = 'spam'
556
+ >>> main = dill.load_module_asdict()
557
+ >>> main['__name__'], main['__session__']
558
+ ('__main__', '/tmp/session.pkl')
559
+ >>> main is globals() # loaded objects don't reference globals
560
+ False
561
+ >>> main['alist'] == alist
562
+ True
563
+ >>> main['alist'] is alist # was saved by value
564
+ False
565
+ >>> main['anum'] == anum # changed after the session was saved
566
+ False
567
+ >>> new_var in main # would be True if the option 'update' was set
568
+ False
569
+ """
570
+ if 'module' in kwds:
571
+ raise TypeError("'module' is an invalid keyword argument for load_module_asdict()")
572
+ if hasattr(filename, 'read'):
573
+ file = filename
574
+ else:
575
+ if filename is None:
576
+ filename = str(TEMPDIR/'session.pkl')
577
+ file = open(filename, 'rb')
578
+ try:
579
+ file = _make_peekable(file)
580
+ main_name = _identify_module(file)
581
+ old_main = sys.modules.get(main_name)
582
+ main = ModuleType(main_name)
583
+ if update:
584
+ if old_main is None:
585
+ old_main = _import_module(main_name)
586
+ main.__dict__.update(old_main.__dict__)
587
+ else:
588
+ main.__builtins__ = __builtin__
589
+ sys.modules[main_name] = main
590
+ load_module(file, **kwds)
591
+ finally:
592
+ if not hasattr(filename, 'read'): # if newly opened file
593
+ file.close()
594
+ try:
595
+ if old_main is None:
596
+ del sys.modules[main_name]
597
+ else:
598
+ sys.modules[main_name] = old_main
599
+ except NameError: # failed before setting old_main
600
+ pass
601
+ main.__session__ = str(filename)
602
+ return main.__dict__
603
+
604
+
605
+ # Internal exports for backward compatibility with dill v0.3.5.1
606
+ # Can't be placed in dill._dill because of circular import problems.
607
+ for name in (
608
+ '_lookup_module', '_module_map', '_restore_modules', '_stash_modules',
609
+ 'dump_session', 'load_session' # backward compatibility functions
610
+ ):
611
+ setattr(_dill, name, globals()[name])
612
+ del name
.venv/lib/python3.11/site-packages/dill/settings.py ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ #
3
+ # Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
4
+ # Copyright (c) 2008-2016 California Institute of Technology.
5
+ # Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
6
+ # License: 3-clause BSD. The full license text is available at:
7
+ # - https://github.com/uqfoundation/dill/blob/master/LICENSE
8
+ """
9
+ global settings for Pickler
10
+ """
11
+
12
+ from pickle import DEFAULT_PROTOCOL
13
+
14
+ settings = {
15
+ #'main' : None,
16
+ 'protocol' : DEFAULT_PROTOCOL,
17
+ 'byref' : False,
18
+ #'strictio' : False,
19
+ 'fmode' : 0, #HANDLE_FMODE
20
+ 'recurse' : False,
21
+ 'ignore' : False,
22
+ }
23
+
24
+ del DEFAULT_PROTOCOL
25
+
.venv/lib/python3.11/site-packages/dill/source.py ADDED
@@ -0,0 +1,1023 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ #
3
+ # Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
4
+ # Copyright (c) 2008-2016 California Institute of Technology.
5
+ # Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
6
+ # License: 3-clause BSD. The full license text is available at:
7
+ # - https://github.com/uqfoundation/dill/blob/master/LICENSE
8
+ #
9
+ # inspired by inspect.py from Python-2.7.6
10
+ # inspect.py author: 'Ka-Ping Yee <ping@lfw.org>'
11
+ # inspect.py merged into original dill.source by Mike McKerns 4/13/14
12
+ """
13
+ Extensions to python's 'inspect' module, which can be used
14
+ to retrieve information from live python objects. The methods
15
+ defined in this module are augmented to facilitate access to
16
+ source code of interactively defined functions and classes,
17
+ as well as provide access to source code for objects defined
18
+ in a file.
19
+ """
20
+
21
+ __all__ = ['findsource', 'getsourcelines', 'getsource', 'indent', 'outdent', \
22
+ '_wrap', 'dumpsource', 'getname', '_namespace', 'getimport', \
23
+ '_importable', 'importable','isdynamic', 'isfrommain']
24
+
25
+ import linecache
26
+ import re
27
+ from inspect import (getblock, getfile, getmodule, getsourcefile, indentsize,
28
+ isbuiltin, isclass, iscode, isframe, isfunction, ismethod,
29
+ ismodule, istraceback)
30
+ from tokenize import TokenError
31
+
32
+ from ._dill import IS_IPYTHON
33
+
34
+
35
+ def isfrommain(obj):
36
+ "check if object was built in __main__"
37
+ module = getmodule(obj)
38
+ if module and module.__name__ == '__main__':
39
+ return True
40
+ return False
41
+
42
+
43
+ def isdynamic(obj):
44
+ "check if object was built in the interpreter"
45
+ try: file = getfile(obj)
46
+ except TypeError: file = None
47
+ if file == '<stdin>' and isfrommain(obj):
48
+ return True
49
+ return False
50
+
51
+
52
+ def _matchlambda(func, line):
53
+ """check if lambda object 'func' matches raw line of code 'line'"""
54
+ from .detect import code as getcode
55
+ from .detect import freevars, globalvars, varnames
56
+ dummy = lambda : '__this_is_a_big_dummy_function__'
57
+ # process the line (removing leading whitespace, etc)
58
+ lhs,rhs = line.split('lambda ',1)[-1].split(":", 1) #FIXME: if !1 inputs
59
+ try: #FIXME: unsafe
60
+ _ = eval("lambda %s : %s" % (lhs,rhs), globals(),locals())
61
+ except Exception: _ = dummy
62
+ # get code objects, for comparison
63
+ _, code = getcode(_).co_code, getcode(func).co_code
64
+ # check if func is in closure
65
+ _f = [line.count(i) for i in freevars(func).keys()]
66
+ if not _f: # not in closure
67
+ # check if code matches
68
+ if _ == code: return True
69
+ return False
70
+ # weak check on freevars
71
+ if not all(_f): return False #XXX: VERY WEAK
72
+ # weak check on varnames and globalvars
73
+ _f = varnames(func)
74
+ _f = [line.count(i) for i in _f[0]+_f[1]]
75
+ if _f and not all(_f): return False #XXX: VERY WEAK
76
+ _f = [line.count(i) for i in globalvars(func).keys()]
77
+ if _f and not all(_f): return False #XXX: VERY WEAK
78
+ # check if func is a double lambda
79
+ if (line.count('lambda ') > 1) and (lhs in freevars(func).keys()):
80
+ _lhs,_rhs = rhs.split('lambda ',1)[-1].split(":",1) #FIXME: if !1 inputs
81
+ try: #FIXME: unsafe
82
+ _f = eval("lambda %s : %s" % (_lhs,_rhs), globals(),locals())
83
+ except Exception: _f = dummy
84
+ # get code objects, for comparison
85
+ _, code = getcode(_f).co_code, getcode(func).co_code
86
+ if len(_) != len(code): return False
87
+ #NOTE: should be same code same order, but except for 't' and '\x88'
88
+ _ = set((i,j) for (i,j) in zip(_,code) if i != j)
89
+ if len(_) != 1: return False #('t','\x88')
90
+ return True
91
+ # check indentsize
92
+ if not indentsize(line): return False #FIXME: is this a good check???
93
+ # check if code 'pattern' matches
94
+ #XXX: or pattern match against dis.dis(code)? (or use uncompyle2?)
95
+ _ = _.split(_[0]) # 't' #XXX: remove matching values if starts the same?
96
+ _f = code.split(code[0]) # '\x88'
97
+ #NOTE: should be same code different order, with different first element
98
+ _ = dict(re.match(r'([\W\D\S])(.*)', _[i]).groups() for i in range(1,len(_)))
99
+ _f = dict(re.match(r'([\W\D\S])(.*)', _f[i]).groups() for i in range(1,len(_f)))
100
+ if (_.keys() == _f.keys()) and (sorted(_.values()) == sorted(_f.values())):
101
+ return True
102
+ return False
103
+
104
+
105
+ def findsource(object):
106
+ """Return the entire source file and starting line number for an object.
107
+ For interactively-defined objects, the 'file' is the interpreter's history.
108
+
109
+ The argument may be a module, class, method, function, traceback, frame,
110
+ or code object. The source code is returned as a list of all the lines
111
+ in the file and the line number indexes a line in that list. An IOError
112
+ is raised if the source code cannot be retrieved, while a TypeError is
113
+ raised for objects where the source code is unavailable (e.g. builtins)."""
114
+
115
+ module = getmodule(object)
116
+ try: file = getfile(module)
117
+ except TypeError: file = None
118
+ is_module_main = (module and module.__name__ == '__main__' and not file)
119
+ if IS_IPYTHON and is_module_main:
120
+ #FIXME: quick fix for functions and classes in IPython interpreter
121
+ try:
122
+ file = getfile(object)
123
+ sourcefile = getsourcefile(object)
124
+ except TypeError:
125
+ if isclass(object):
126
+ for object_method in filter(isfunction, object.__dict__.values()):
127
+ # look for a method of the class
128
+ file_candidate = getfile(object_method)
129
+ if not file_candidate.startswith('<ipython-input-'):
130
+ continue
131
+ file = file_candidate
132
+ sourcefile = getsourcefile(object_method)
133
+ break
134
+ if file:
135
+ lines = linecache.getlines(file)
136
+ else:
137
+ # fallback to use history
138
+ history = '\n'.join(get_ipython().history_manager.input_hist_parsed)
139
+ lines = [line + '\n' for line in history.splitlines()]
140
+ # use readline when working in interpreter (i.e. __main__ and not file)
141
+ elif is_module_main:
142
+ try:
143
+ import readline
144
+ err = ''
145
+ except ImportError:
146
+ import sys
147
+ err = sys.exc_info()[1].args[0]
148
+ if sys.platform[:3] == 'win':
149
+ err += ", please install 'pyreadline'"
150
+ if err:
151
+ raise IOError(err)
152
+ lbuf = readline.get_current_history_length()
153
+ lines = [readline.get_history_item(i)+'\n' for i in range(1,lbuf+1)]
154
+ else:
155
+ try: # special handling for class instances
156
+ if not isclass(object) and isclass(type(object)): # __class__
157
+ file = getfile(module)
158
+ sourcefile = getsourcefile(module)
159
+ else: # builtins fail with a TypeError
160
+ file = getfile(object)
161
+ sourcefile = getsourcefile(object)
162
+ except (TypeError, AttributeError): # fail with better error
163
+ file = getfile(object)
164
+ sourcefile = getsourcefile(object)
165
+ if not sourcefile and file[:1] + file[-1:] != '<>':
166
+ raise IOError('source code not available')
167
+ file = sourcefile if sourcefile else file
168
+
169
+ module = getmodule(object, file)
170
+ if module:
171
+ lines = linecache.getlines(file, module.__dict__)
172
+ else:
173
+ lines = linecache.getlines(file)
174
+
175
+ if not lines:
176
+ raise IOError('could not extract source code')
177
+
178
+ #FIXME: all below may fail if exec used (i.e. exec('f = lambda x:x') )
179
+ if ismodule(object):
180
+ return lines, 0
181
+
182
+ #NOTE: beneficial if search goes from end to start of buffer history
183
+ name = pat1 = obj = ''
184
+ pat2 = r'^(\s*@)'
185
+ # pat1b = r'^(\s*%s\W*=)' % name #FIXME: finds 'f = decorate(f)', not exec
186
+ if ismethod(object):
187
+ name = object.__name__
188
+ if name == '<lambda>': pat1 = r'(.*(?<!\w)lambda(:|\s))'
189
+ else: pat1 = r'^(\s*def\s)'
190
+ object = object.__func__
191
+ if isfunction(object):
192
+ name = object.__name__
193
+ if name == '<lambda>':
194
+ pat1 = r'(.*(?<!\w)lambda(:|\s))'
195
+ obj = object #XXX: better a copy?
196
+ else: pat1 = r'^(\s*def\s)'
197
+ object = object.__code__
198
+ if istraceback(object):
199
+ object = object.tb_frame
200
+ if isframe(object):
201
+ object = object.f_code
202
+ if iscode(object):
203
+ if not hasattr(object, 'co_firstlineno'):
204
+ raise IOError('could not find function definition')
205
+ stdin = object.co_filename == '<stdin>'
206
+ if stdin:
207
+ lnum = len(lines) - 1 # can't get lnum easily, so leverage pat
208
+ if not pat1: pat1 = r'^(\s*def\s)|(.*(?<!\w)lambda(:|\s))|^(\s*@)'
209
+ else:
210
+ lnum = object.co_firstlineno - 1
211
+ pat1 = r'^(\s*def\s)|(.*(?<!\w)lambda(:|\s))|^(\s*@)'
212
+ pat1 = re.compile(pat1); pat2 = re.compile(pat2)
213
+ #XXX: candidate_lnum = [n for n in range(lnum) if pat1.match(lines[n])]
214
+ while lnum > 0: #XXX: won't find decorators in <stdin> ?
215
+ line = lines[lnum]
216
+ if pat1.match(line):
217
+ if not stdin: break # co_firstlineno does the job
218
+ if name == '<lambda>': # hackery needed to confirm a match
219
+ if _matchlambda(obj, line): break
220
+ else: # not a lambda, just look for the name
221
+ if name in line: # need to check for decorator...
222
+ hats = 0
223
+ for _lnum in range(lnum-1,-1,-1):
224
+ if pat2.match(lines[_lnum]): hats += 1
225
+ else: break
226
+ lnum = lnum - hats
227
+ break
228
+ lnum = lnum - 1
229
+ return lines, lnum
230
+
231
+ try: # turn instances into classes
232
+ if not isclass(object) and isclass(type(object)): # __class__
233
+ object = object.__class__ #XXX: sometimes type(class) is better?
234
+ #XXX: we don't find how the instance was built
235
+ except AttributeError: pass
236
+ if isclass(object):
237
+ name = object.__name__
238
+ pat = re.compile(r'^(\s*)class\s*' + name + r'\b')
239
+ # make some effort to find the best matching class definition:
240
+ # use the one with the least indentation, which is the one
241
+ # that's most probably not inside a function definition.
242
+ candidates = []
243
+ for i in range(len(lines)-1,-1,-1):
244
+ match = pat.match(lines[i])
245
+ if match:
246
+ # if it's at toplevel, it's already the best one
247
+ if lines[i][0] == 'c':
248
+ return lines, i
249
+ # else add whitespace to candidate list
250
+ candidates.append((match.group(1), i))
251
+ if candidates:
252
+ # this will sort by whitespace, and by line number,
253
+ # less whitespace first #XXX: should sort high lnum before low
254
+ candidates.sort()
255
+ return lines, candidates[0][1]
256
+ else:
257
+ raise IOError('could not find class definition')
258
+ raise IOError('could not find code object')
259
+
260
+
261
+ def getblocks(object, lstrip=False, enclosing=False, locate=False):
262
+ """Return a list of source lines and starting line number for an object.
263
+ Interactively-defined objects refer to lines in the interpreter's history.
264
+
265
+ If enclosing=True, then also return any enclosing code.
266
+ If lstrip=True, ensure there is no indentation in the first line of code.
267
+ If locate=True, then also return the line number for the block of code.
268
+
269
+ DEPRECATED: use 'getsourcelines' instead
270
+ """
271
+ lines, lnum = findsource(object)
272
+
273
+ if ismodule(object):
274
+ if lstrip: lines = _outdent(lines)
275
+ return ([lines], [0]) if locate is True else [lines]
276
+
277
+ #XXX: 'enclosing' means: closures only? or classes and files?
278
+ indent = indentsize(lines[lnum])
279
+ block = getblock(lines[lnum:]) #XXX: catch any TokenError here?
280
+
281
+ if not enclosing or not indent:
282
+ if lstrip: block = _outdent(block)
283
+ return ([block], [lnum]) if locate is True else [block]
284
+
285
+ pat1 = r'^(\s*def\s)|(.*(?<!\w)lambda(:|\s))'; pat1 = re.compile(pat1)
286
+ pat2 = r'^(\s*@)'; pat2 = re.compile(pat2)
287
+ #pat3 = r'^(\s*class\s)'; pat3 = re.compile(pat3) #XXX: enclosing class?
288
+ #FIXME: bound methods need enclosing class (and then instantiation)
289
+ # *or* somehow apply a partial using the instance
290
+
291
+ skip = 0
292
+ line = 0
293
+ blocks = []; _lnum = []
294
+ target = ''.join(block)
295
+ while line <= lnum: #XXX: repeat lnum? or until line < lnum?
296
+ # see if starts with ('def','lambda') and contains our target block
297
+ if pat1.match(lines[line]):
298
+ if not skip:
299
+ try: code = getblock(lines[line:])
300
+ except TokenError: code = [lines[line]]
301
+ if indentsize(lines[line]) > indent: #XXX: should be >= ?
302
+ line += len(code) - skip
303
+ elif target in ''.join(code):
304
+ blocks.append(code) # save code block as the potential winner
305
+ _lnum.append(line - skip) # save the line number for the match
306
+ line += len(code) - skip
307
+ else:
308
+ line += 1
309
+ skip = 0
310
+ # find skip: the number of consecutive decorators
311
+ elif pat2.match(lines[line]):
312
+ try: code = getblock(lines[line:])
313
+ except TokenError: code = [lines[line]]
314
+ skip = 1
315
+ for _line in code[1:]: # skip lines that are decorators
316
+ if not pat2.match(_line): break
317
+ skip += 1
318
+ line += skip
319
+ # no match: reset skip and go to the next line
320
+ else:
321
+ line +=1
322
+ skip = 0
323
+
324
+ if not blocks:
325
+ blocks = [block]
326
+ _lnum = [lnum]
327
+ if lstrip: blocks = [_outdent(block) for block in blocks]
328
+ # return last match
329
+ return (blocks, _lnum) if locate is True else blocks
330
+
331
+
332
+ def getsourcelines(object, lstrip=False, enclosing=False):
333
+ """Return a list of source lines and starting line number for an object.
334
+ Interactively-defined objects refer to lines in the interpreter's history.
335
+
336
+ The argument may be a module, class, method, function, traceback, frame,
337
+ or code object. The source code is returned as a list of the lines
338
+ corresponding to the object and the line number indicates where in the
339
+ original source file the first line of code was found. An IOError is
340
+ raised if the source code cannot be retrieved, while a TypeError is
341
+ raised for objects where the source code is unavailable (e.g. builtins).
342
+
343
+ If lstrip=True, ensure there is no indentation in the first line of code.
344
+ If enclosing=True, then also return any enclosing code."""
345
+ code, n = getblocks(object, lstrip=lstrip, enclosing=enclosing, locate=True)
346
+ return code[-1], n[-1]
347
+
348
+
349
+ #NOTE: broke backward compatibility 4/16/14 (was lstrip=True, force=True)
350
+ def getsource(object, alias='', lstrip=False, enclosing=False, \
351
+ force=False, builtin=False):
352
+ """Return the text of the source code for an object. The source code for
353
+ interactively-defined objects are extracted from the interpreter's history.
354
+
355
+ The argument may be a module, class, method, function, traceback, frame,
356
+ or code object. The source code is returned as a single string. An
357
+ IOError is raised if the source code cannot be retrieved, while a
358
+ TypeError is raised for objects where the source code is unavailable
359
+ (e.g. builtins).
360
+
361
+ If alias is provided, then add a line of code that renames the object.
362
+ If lstrip=True, ensure there is no indentation in the first line of code.
363
+ If enclosing=True, then also return any enclosing code.
364
+ If force=True, catch (TypeError,IOError) and try to use import hooks.
365
+ If builtin=True, force an import for any builtins
366
+ """
367
+ # hascode denotes a callable
368
+ hascode = _hascode(object)
369
+ # is a class instance type (and not in builtins)
370
+ instance = _isinstance(object)
371
+
372
+ # get source lines; if fail, try to 'force' an import
373
+ try: # fails for builtins, and other assorted object types
374
+ lines, lnum = getsourcelines(object, enclosing=enclosing)
375
+ except (TypeError, IOError): # failed to get source, resort to import hooks
376
+ if not force: # don't try to get types that findsource can't get
377
+ raise
378
+ if not getmodule(object): # get things like 'None' and '1'
379
+ if not instance: return getimport(object, alias, builtin=builtin)
380
+ # special handling (numpy arrays, ...)
381
+ _import = getimport(object, builtin=builtin)
382
+ name = getname(object, force=True)
383
+ _alias = "%s = " % alias if alias else ""
384
+ if alias == name: _alias = ""
385
+ return _import+_alias+"%s\n" % name
386
+ else: #FIXME: could use a good bit of cleanup, since using getimport...
387
+ if not instance: return getimport(object, alias, builtin=builtin)
388
+ # now we are dealing with an instance...
389
+ name = object.__class__.__name__
390
+ module = object.__module__
391
+ if module in ['builtins','__builtin__']:
392
+ return getimport(object, alias, builtin=builtin)
393
+ else: #FIXME: leverage getimport? use 'from module import name'?
394
+ lines, lnum = ["%s = __import__('%s', fromlist=['%s']).%s\n" % (name,module,name,name)], 0
395
+ obj = eval(lines[0].lstrip(name + ' = '))
396
+ lines, lnum = getsourcelines(obj, enclosing=enclosing)
397
+
398
+ # strip leading indent (helps ensure can be imported)
399
+ if lstrip or alias:
400
+ lines = _outdent(lines)
401
+
402
+ # instantiate, if there's a nice repr #XXX: BAD IDEA???
403
+ if instance: #and force: #XXX: move into findsource or getsourcelines ?
404
+ if '(' in repr(object): lines.append('%r\n' % object)
405
+ #else: #XXX: better to somehow to leverage __reduce__ ?
406
+ # reconstructor,args = object.__reduce__()
407
+ # _ = reconstructor(*args)
408
+ else: # fall back to serialization #XXX: bad idea?
409
+ #XXX: better not duplicate work? #XXX: better new/enclose=True?
410
+ lines = dumpsource(object, alias='', new=force, enclose=False)
411
+ lines, lnum = [line+'\n' for line in lines.split('\n')][:-1], 0
412
+ #else: object.__code__ # raise AttributeError
413
+
414
+ # add an alias to the source code
415
+ if alias:
416
+ if hascode:
417
+ skip = 0
418
+ for line in lines: # skip lines that are decorators
419
+ if not line.startswith('@'): break
420
+ skip += 1
421
+ #XXX: use regex from findsource / getsourcelines ?
422
+ if lines[skip].lstrip().startswith('def '): # we have a function
423
+ if alias != object.__name__:
424
+ lines.append('\n%s = %s\n' % (alias, object.__name__))
425
+ elif 'lambda ' in lines[skip]: # we have a lambda
426
+ if alias != lines[skip].split('=')[0].strip():
427
+ lines[skip] = '%s = %s' % (alias, lines[skip])
428
+ else: # ...try to use the object's name
429
+ if alias != object.__name__:
430
+ lines.append('\n%s = %s\n' % (alias, object.__name__))
431
+ else: # class or class instance
432
+ if instance:
433
+ if alias != lines[-1].split('=')[0].strip():
434
+ lines[-1] = ('%s = ' % alias) + lines[-1]
435
+ else:
436
+ name = getname(object, force=True) or object.__name__
437
+ if alias != name:
438
+ lines.append('\n%s = %s\n' % (alias, name))
439
+ return ''.join(lines)
440
+
441
+
442
+ def _hascode(object):
443
+ '''True if object has an attribute that stores it's __code__'''
444
+ return getattr(object,'__code__',None) or getattr(object,'func_code',None)
445
+
446
+ def _isinstance(object):
447
+ '''True if object is a class instance type (and is not a builtin)'''
448
+ if _hascode(object) or isclass(object) or ismodule(object):
449
+ return False
450
+ if istraceback(object) or isframe(object) or iscode(object):
451
+ return False
452
+ # special handling (numpy arrays, ...)
453
+ if not getmodule(object) and getmodule(type(object)).__name__ in ['numpy']:
454
+ return True
455
+ # # check if is instance of a builtin
456
+ # if not getmodule(object) and getmodule(type(object)).__name__ in ['__builtin__','builtins']:
457
+ # return False
458
+ _types = ('<class ',"<type 'instance'>")
459
+ if not repr(type(object)).startswith(_types): #FIXME: weak hack
460
+ return False
461
+ if not getmodule(object) or object.__module__ in ['builtins','__builtin__'] or getname(object, force=True) in ['array']:
462
+ return False
463
+ return True # by process of elimination... it's what we want
464
+
465
+
466
+ def _intypes(object):
467
+ '''check if object is in the 'types' module'''
468
+ import types
469
+ # allow user to pass in object or object.__name__
470
+ if type(object) is not type(''):
471
+ object = getname(object, force=True)
472
+ if object == 'ellipsis': object = 'EllipsisType'
473
+ return True if hasattr(types, object) else False
474
+
475
+
476
+ def _isstring(object): #XXX: isstringlike better?
477
+ '''check if object is a string-like type'''
478
+ return isinstance(object, (str, bytes))
479
+
480
+
481
+ def indent(code, spaces=4):
482
+ '''indent a block of code with whitespace (default is 4 spaces)'''
483
+ indent = indentsize(code)
484
+ from numbers import Integral
485
+ if isinstance(spaces, Integral): spaces = ' '*spaces
486
+ # if '\t' is provided, will indent with a tab
487
+ nspaces = indentsize(spaces)
488
+ # blank lines (etc) need to be ignored
489
+ lines = code.split('\n')
490
+ ## stq = "'''"; dtq = '"""'
491
+ ## in_stq = in_dtq = False
492
+ for i in range(len(lines)):
493
+ #FIXME: works... but shouldn't indent 2nd+ lines of multiline doc
494
+ _indent = indentsize(lines[i])
495
+ if indent > _indent: continue
496
+ lines[i] = spaces+lines[i]
497
+ ## #FIXME: may fail when stq and dtq in same line (depends on ordering)
498
+ ## nstq, ndtq = lines[i].count(stq), lines[i].count(dtq)
499
+ ## if not in_dtq and not in_stq:
500
+ ## lines[i] = spaces+lines[i] # we indent
501
+ ## # entering a comment block
502
+ ## if nstq%2: in_stq = not in_stq
503
+ ## if ndtq%2: in_dtq = not in_dtq
504
+ ## # leaving a comment block
505
+ ## elif in_dtq and ndtq%2: in_dtq = not in_dtq
506
+ ## elif in_stq and nstq%2: in_stq = not in_stq
507
+ ## else: pass
508
+ if lines[-1].strip() == '': lines[-1] = ''
509
+ return '\n'.join(lines)
510
+
511
+
512
+ def _outdent(lines, spaces=None, all=True):
513
+ '''outdent lines of code, accounting for docs and line continuations'''
514
+ indent = indentsize(lines[0])
515
+ if spaces is None or spaces > indent or spaces < 0: spaces = indent
516
+ for i in range(len(lines) if all else 1):
517
+ #FIXME: works... but shouldn't outdent 2nd+ lines of multiline doc
518
+ _indent = indentsize(lines[i])
519
+ if spaces > _indent: _spaces = _indent
520
+ else: _spaces = spaces
521
+ lines[i] = lines[i][_spaces:]
522
+ return lines
523
+
524
+ def outdent(code, spaces=None, all=True):
525
+ '''outdent a block of code (default is to strip all leading whitespace)'''
526
+ indent = indentsize(code)
527
+ if spaces is None or spaces > indent or spaces < 0: spaces = indent
528
+ #XXX: will this delete '\n' in some cases?
529
+ if not all: return code[spaces:]
530
+ return '\n'.join(_outdent(code.split('\n'), spaces=spaces, all=all))
531
+
532
+
533
+ # _wrap provides an wrapper to correctly exec and load into locals
534
+ __globals__ = globals()
535
+ __locals__ = locals()
536
+ def _wrap(f):
537
+ """ encapsulate a function and it's __import__ """
538
+ def func(*args, **kwds):
539
+ try:
540
+ # _ = eval(getsource(f, force=True)) #XXX: safer but less robust
541
+ exec(getimportable(f, alias='_'), __globals__, __locals__)
542
+ except Exception:
543
+ raise ImportError('cannot import name ' + f.__name__)
544
+ return _(*args, **kwds)
545
+ func.__name__ = f.__name__
546
+ func.__doc__ = f.__doc__
547
+ return func
548
+
549
+
550
+ def _enclose(object, alias=''): #FIXME: needs alias to hold returned object
551
+ """create a function enclosure around the source of some object"""
552
+ #XXX: dummy and stub should append a random string
553
+ dummy = '__this_is_a_big_dummy_enclosing_function__'
554
+ stub = '__this_is_a_stub_variable__'
555
+ code = 'def %s():\n' % dummy
556
+ code += indent(getsource(object, alias=stub, lstrip=True, force=True))
557
+ code += indent('return %s\n' % stub)
558
+ if alias: code += '%s = ' % alias
559
+ code += '%s(); del %s\n' % (dummy, dummy)
560
+ #code += "globals().pop('%s',lambda :None)()\n" % dummy
561
+ return code
562
+
563
+
564
+ def dumpsource(object, alias='', new=False, enclose=True):
565
+ """'dump to source', where the code includes a pickled object.
566
+
567
+ If new=True and object is a class instance, then create a new
568
+ instance using the unpacked class source code. If enclose, then
569
+ create the object inside a function enclosure (thus minimizing
570
+ any global namespace pollution).
571
+ """
572
+ from dill import dumps
573
+ pik = repr(dumps(object))
574
+ code = 'import dill\n'
575
+ if enclose:
576
+ stub = '__this_is_a_stub_variable__' #XXX: *must* be same _enclose.stub
577
+ pre = '%s = ' % stub
578
+ new = False #FIXME: new=True doesn't work with enclose=True
579
+ else:
580
+ stub = alias
581
+ pre = '%s = ' % stub if alias else alias
582
+
583
+ # if a 'new' instance is not needed, then just dump and load
584
+ if not new or not _isinstance(object):
585
+ code += pre + 'dill.loads(%s)\n' % pik
586
+ else: #XXX: other cases where source code is needed???
587
+ code += getsource(object.__class__, alias='', lstrip=True, force=True)
588
+ mod = repr(object.__module__) # should have a module (no builtins here)
589
+ code += pre + 'dill.loads(%s.replace(b%s,bytes(__name__,"UTF-8")))\n' % (pik,mod)
590
+ #code += 'del %s' % object.__class__.__name__ #NOTE: kills any existing!
591
+
592
+ if enclose:
593
+ # generation of the 'enclosure'
594
+ dummy = '__this_is_a_big_dummy_object__'
595
+ dummy = _enclose(dummy, alias=alias)
596
+ # hack to replace the 'dummy' with the 'real' code
597
+ dummy = dummy.split('\n')
598
+ code = dummy[0]+'\n' + indent(code) + '\n'.join(dummy[-3:])
599
+
600
+ return code #XXX: better 'dumpsourcelines', returning list of lines?
601
+
602
+
603
+ def getname(obj, force=False, fqn=False): #XXX: throw(?) to raise error on fail?
604
+ """get the name of the object. for lambdas, get the name of the pointer """
605
+ if fqn: return '.'.join(_namespace(obj)) #NOTE: returns 'type'
606
+ module = getmodule(obj)
607
+ if not module: # things like "None" and "1"
608
+ if not force: return None #NOTE: returns 'instance' NOT 'type' #FIXME?
609
+ # handle some special cases
610
+ if hasattr(obj, 'dtype') and not obj.shape:
611
+ return getname(obj.__class__) + "(" + repr(obj.tolist()) + ")"
612
+ return repr(obj)
613
+ try:
614
+ #XXX: 'wrong' for decorators and curried functions ?
615
+ # if obj.func_closure: ...use logic from getimportable, etc ?
616
+ name = obj.__name__
617
+ if name == '<lambda>':
618
+ return getsource(obj).split('=',1)[0].strip()
619
+ # handle some special cases
620
+ if module.__name__ in ['builtins','__builtin__']:
621
+ if name == 'ellipsis': name = 'EllipsisType'
622
+ return name
623
+ except AttributeError: #XXX: better to just throw AttributeError ?
624
+ if not force: return None
625
+ name = repr(obj)
626
+ if name.startswith('<'): # or name.split('('):
627
+ return None
628
+ return name
629
+
630
+
631
+ def _namespace(obj):
632
+ """_namespace(obj); return namespace hierarchy (as a list of names)
633
+ for the given object. For an instance, find the class hierarchy.
634
+
635
+ For example:
636
+
637
+ >>> from functools import partial
638
+ >>> p = partial(int, base=2)
639
+ >>> _namespace(p)
640
+ [\'functools\', \'partial\']
641
+ """
642
+ # mostly for functions and modules and such
643
+ #FIXME: 'wrong' for decorators and curried functions
644
+ try: #XXX: needs some work and testing on different types
645
+ module = qual = str(getmodule(obj)).split()[1].strip('>').strip('"').strip("'")
646
+ qual = qual.split('.')
647
+ if ismodule(obj):
648
+ return qual
649
+ # get name of a lambda, function, etc
650
+ name = getname(obj) or obj.__name__ # failing, raise AttributeError
651
+ # check special cases (NoneType, ...)
652
+ if module in ['builtins','__builtin__']: # BuiltinFunctionType
653
+ if _intypes(name): return ['types'] + [name]
654
+ return qual + [name] #XXX: can be wrong for some aliased objects
655
+ except Exception: pass
656
+ # special case: numpy.inf and numpy.nan (we don't want them as floats)
657
+ if str(obj) in ['inf','nan','Inf','NaN']: # is more, but are they needed?
658
+ return ['numpy'] + [str(obj)]
659
+ # mostly for classes and class instances and such
660
+ module = getattr(obj.__class__, '__module__', None)
661
+ qual = str(obj.__class__)
662
+ try: qual = qual[qual.index("'")+1:-2]
663
+ except ValueError: pass # str(obj.__class__) made the 'try' unnecessary
664
+ qual = qual.split(".")
665
+ if module in ['builtins','__builtin__']:
666
+ # check special cases (NoneType, Ellipsis, ...)
667
+ if qual[-1] == 'ellipsis': qual[-1] = 'EllipsisType'
668
+ if _intypes(qual[-1]): module = 'types' #XXX: BuiltinFunctionType
669
+ qual = [module] + qual
670
+ return qual
671
+
672
+
673
+ #NOTE: 05/25/14 broke backward compatibility: added 'alias' as 3rd argument
674
+ def _getimport(head, tail, alias='', verify=True, builtin=False):
675
+ """helper to build a likely import string from head and tail of namespace.
676
+ ('head','tail') are used in the following context: "from head import tail"
677
+
678
+ If verify=True, then test the import string before returning it.
679
+ If builtin=True, then force an import for builtins where possible.
680
+ If alias is provided, then rename the object on import.
681
+ """
682
+ # special handling for a few common types
683
+ if tail in ['Ellipsis', 'NotImplemented'] and head in ['types']:
684
+ head = len.__module__
685
+ elif tail in ['None'] and head in ['types']:
686
+ _alias = '%s = ' % alias if alias else ''
687
+ if alias == tail: _alias = ''
688
+ return _alias+'%s\n' % tail
689
+ # we don't need to import from builtins, so return ''
690
+ # elif tail in ['NoneType','int','float','long','complex']: return '' #XXX: ?
691
+ if head in ['builtins','__builtin__']:
692
+ # special cases (NoneType, Ellipsis, ...) #XXX: BuiltinFunctionType
693
+ if tail == 'ellipsis': tail = 'EllipsisType'
694
+ if _intypes(tail): head = 'types'
695
+ elif not builtin:
696
+ _alias = '%s = ' % alias if alias else ''
697
+ if alias == tail: _alias = ''
698
+ return _alias+'%s\n' % tail
699
+ else: pass # handle builtins below
700
+ # get likely import string
701
+ if not head: _str = "import %s" % tail
702
+ else: _str = "from %s import %s" % (head, tail)
703
+ _alias = " as %s\n" % alias if alias else "\n"
704
+ if alias == tail: _alias = "\n"
705
+ _str += _alias
706
+ # FIXME: fails on most decorators, currying, and such...
707
+ # (could look for magic __wrapped__ or __func__ attr)
708
+ # (could fix in 'namespace' to check obj for closure)
709
+ if verify and not head.startswith('dill.'):# weird behavior for dill
710
+ #print(_str)
711
+ try: exec(_str) #XXX: check if == obj? (name collision)
712
+ except ImportError: #XXX: better top-down or bottom-up recursion?
713
+ _head = head.rsplit(".",1)[0] #(or get all, then compare == obj?)
714
+ if not _head: raise
715
+ if _head != head:
716
+ _str = _getimport(_head, tail, alias, verify)
717
+ return _str
718
+
719
+
720
+ #XXX: rename builtin to force? vice versa? verify to force? (as in getsource)
721
+ #NOTE: 05/25/14 broke backward compatibility: added 'alias' as 2nd argument
722
+ def getimport(obj, alias='', verify=True, builtin=False, enclosing=False):
723
+ """get the likely import string for the given object
724
+
725
+ obj is the object to inspect
726
+ If verify=True, then test the import string before returning it.
727
+ If builtin=True, then force an import for builtins where possible.
728
+ If enclosing=True, get the import for the outermost enclosing callable.
729
+ If alias is provided, then rename the object on import.
730
+ """
731
+ if enclosing:
732
+ from .detect import outermost
733
+ _obj = outermost(obj)
734
+ obj = _obj if _obj else obj
735
+ # get the namespace
736
+ qual = _namespace(obj)
737
+ head = '.'.join(qual[:-1])
738
+ tail = qual[-1]
739
+ # for named things... with a nice repr #XXX: move into _namespace?
740
+ try: # look for '<...>' and be mindful it might be in lists, dicts, etc...
741
+ name = repr(obj).split('<',1)[1].split('>',1)[1]
742
+ name = None # we have a 'object'-style repr
743
+ except Exception: # it's probably something 'importable'
744
+ if head in ['builtins','__builtin__']:
745
+ name = repr(obj) #XXX: catch [1,2], (1,2), set([1,2])... others?
746
+ elif _isinstance(obj):
747
+ name = getname(obj, force=True).split('(')[0]
748
+ else:
749
+ name = repr(obj).split('(')[0]
750
+ #if not repr(obj).startswith('<'): name = repr(obj).split('(')[0]
751
+ #else: name = None
752
+ if name: # try using name instead of tail
753
+ try: return _getimport(head, name, alias, verify, builtin)
754
+ except ImportError: pass
755
+ except SyntaxError:
756
+ if head in ['builtins','__builtin__']:
757
+ _alias = '%s = ' % alias if alias else ''
758
+ if alias == name: _alias = ''
759
+ return _alias+'%s\n' % name
760
+ else: pass
761
+ try:
762
+ #if type(obj) is type(abs): _builtin = builtin # BuiltinFunctionType
763
+ #else: _builtin = False
764
+ return _getimport(head, tail, alias, verify, builtin)
765
+ except ImportError:
766
+ raise # could do some checking against obj
767
+ except SyntaxError:
768
+ if head in ['builtins','__builtin__']:
769
+ _alias = '%s = ' % alias if alias else ''
770
+ if alias == tail: _alias = ''
771
+ return _alias+'%s\n' % tail
772
+ raise # could do some checking against obj
773
+
774
+
775
+ def _importable(obj, alias='', source=None, enclosing=False, force=True, \
776
+ builtin=True, lstrip=True):
777
+ """get an import string (or the source code) for the given object
778
+
779
+ This function will attempt to discover the name of the object, or the repr
780
+ of the object, or the source code for the object. To attempt to force
781
+ discovery of the source code, use source=True, to attempt to force the
782
+ use of an import, use source=False; otherwise an import will be sought
783
+ for objects not defined in __main__. The intent is to build a string
784
+ that can be imported from a python file. obj is the object to inspect.
785
+ If alias is provided, then rename the object with the given alias.
786
+
787
+ If source=True, use these options:
788
+ If enclosing=True, then also return any enclosing code.
789
+ If force=True, catch (TypeError,IOError) and try to use import hooks.
790
+ If lstrip=True, ensure there is no indentation in the first line of code.
791
+
792
+ If source=False, use these options:
793
+ If enclosing=True, get the import for the outermost enclosing callable.
794
+ If force=True, then don't test the import string before returning it.
795
+ If builtin=True, then force an import for builtins where possible.
796
+ """
797
+ if source is None:
798
+ source = True if isfrommain(obj) else False
799
+ if source: # first try to get the source
800
+ try:
801
+ return getsource(obj, alias, enclosing=enclosing, \
802
+ force=force, lstrip=lstrip, builtin=builtin)
803
+ except Exception: pass
804
+ try:
805
+ if not _isinstance(obj):
806
+ return getimport(obj, alias, enclosing=enclosing, \
807
+ verify=(not force), builtin=builtin)
808
+ # first 'get the import', then 'get the instance'
809
+ _import = getimport(obj, enclosing=enclosing, \
810
+ verify=(not force), builtin=builtin)
811
+ name = getname(obj, force=True)
812
+ if not name:
813
+ raise AttributeError("object has no atribute '__name__'")
814
+ _alias = "%s = " % alias if alias else ""
815
+ if alias == name: _alias = ""
816
+ return _import+_alias+"%s\n" % name
817
+
818
+ except Exception: pass
819
+ if not source: # try getsource, only if it hasn't been tried yet
820
+ try:
821
+ return getsource(obj, alias, enclosing=enclosing, \
822
+ force=force, lstrip=lstrip, builtin=builtin)
823
+ except Exception: pass
824
+ # get the name (of functions, lambdas, and classes)
825
+ # or hope that obj can be built from the __repr__
826
+ #XXX: what to do about class instances and such?
827
+ obj = getname(obj, force=force)
828
+ # we either have __repr__ or __name__ (or None)
829
+ if not obj or obj.startswith('<'):
830
+ raise AttributeError("object has no atribute '__name__'")
831
+ _alias = '%s = ' % alias if alias else ''
832
+ if alias == obj: _alias = ''
833
+ return _alias+'%s\n' % obj
834
+ #XXX: possible failsafe... (for example, for instances when source=False)
835
+ # "import dill; result = dill.loads(<pickled_object>); # repr(<object>)"
836
+
837
+ def _closuredimport(func, alias='', builtin=False):
838
+ """get import for closured objects; return a dict of 'name' and 'import'"""
839
+ import re
840
+ from .detect import freevars, outermost
841
+ free_vars = freevars(func)
842
+ func_vars = {}
843
+ # split into 'funcs' and 'non-funcs'
844
+ for name,obj in list(free_vars.items()):
845
+ if not isfunction(obj): continue
846
+ # get import for 'funcs'
847
+ fobj = free_vars.pop(name)
848
+ src = getsource(fobj)
849
+ if src.lstrip().startswith('@'): # we have a decorator
850
+ src = getimport(fobj, alias=alias, builtin=builtin)
851
+ else: # we have to "hack" a bit... and maybe be lucky
852
+ encl = outermost(func)
853
+ # pattern: 'func = enclosing(fobj'
854
+ pat = r'.*[\w\s]=\s*'+getname(encl)+r'\('+getname(fobj)
855
+ mod = getname(getmodule(encl))
856
+ #HACK: get file containing 'outer' function; is func there?
857
+ lines,_ = findsource(encl)
858
+ candidate = [line for line in lines if getname(encl) in line and \
859
+ re.match(pat, line)]
860
+ if not candidate:
861
+ mod = getname(getmodule(fobj))
862
+ #HACK: get file containing 'inner' function; is func there?
863
+ lines,_ = findsource(fobj)
864
+ candidate = [line for line in lines \
865
+ if getname(fobj) in line and re.match(pat, line)]
866
+ if not len(candidate): raise TypeError('import could not be found')
867
+ candidate = candidate[-1]
868
+ name = candidate.split('=',1)[0].split()[-1].strip()
869
+ src = _getimport(mod, name, alias=alias, builtin=builtin)
870
+ func_vars[name] = src
871
+ if not func_vars:
872
+ name = outermost(func)
873
+ mod = getname(getmodule(name))
874
+ if not mod or name is func: # then it can be handled by getimport
875
+ name = getname(func, force=True) #XXX: better key?
876
+ src = getimport(func, alias=alias, builtin=builtin)
877
+ else:
878
+ lines,_ = findsource(name)
879
+ # pattern: 'func = enclosing('
880
+ candidate = [line for line in lines if getname(name) in line and \
881
+ re.match(r'.*[\w\s]=\s*'+getname(name)+r'\(', line)]
882
+ if not len(candidate): raise TypeError('import could not be found')
883
+ candidate = candidate[-1]
884
+ name = candidate.split('=',1)[0].split()[-1].strip()
885
+ src = _getimport(mod, name, alias=alias, builtin=builtin)
886
+ func_vars[name] = src
887
+ return func_vars
888
+
889
+ #XXX: should be able to use __qualname__
890
+ def _closuredsource(func, alias=''):
891
+ """get source code for closured objects; return a dict of 'name'
892
+ and 'code blocks'"""
893
+ #FIXME: this entire function is a messy messy HACK
894
+ # - pollutes global namespace
895
+ # - fails if name of freevars are reused
896
+ # - can unnecessarily duplicate function code
897
+ from .detect import freevars
898
+ free_vars = freevars(func)
899
+ func_vars = {}
900
+ # split into 'funcs' and 'non-funcs'
901
+ for name,obj in list(free_vars.items()):
902
+ if not isfunction(obj):
903
+ # get source for 'non-funcs'
904
+ free_vars[name] = getsource(obj, force=True, alias=name)
905
+ continue
906
+ # get source for 'funcs'
907
+ fobj = free_vars.pop(name)
908
+ src = getsource(fobj, alias) # DO NOT include dependencies
909
+ # if source doesn't start with '@', use name as the alias
910
+ if not src.lstrip().startswith('@'): #FIXME: 'enclose' in dummy;
911
+ src = importable(fobj,alias=name)# wrong ref 'name'
912
+ org = getsource(func, alias, enclosing=False, lstrip=True)
913
+ src = (src, org) # undecorated first, then target
914
+ else: #NOTE: reproduces the code!
915
+ org = getsource(func, enclosing=True, lstrip=False)
916
+ src = importable(fobj, alias, source=True) # include dependencies
917
+ src = (org, src) # target first, then decorated
918
+ func_vars[name] = src
919
+ src = ''.join(free_vars.values())
920
+ if not func_vars: #FIXME: 'enclose' in dummy; wrong ref 'name'
921
+ org = getsource(func, alias, force=True, enclosing=False, lstrip=True)
922
+ src = (src, org) # variables first, then target
923
+ else:
924
+ src = (src, None) # just variables (better '' instead of None?)
925
+ func_vars[None] = src
926
+ # FIXME: remove duplicates (however, order is important...)
927
+ return func_vars
928
+
929
+ def importable(obj, alias='', source=None, builtin=True):
930
+ """get an importable string (i.e. source code or the import string)
931
+ for the given object, including any required objects from the enclosing
932
+ and global scope
933
+
934
+ This function will attempt to discover the name of the object, or the repr
935
+ of the object, or the source code for the object. To attempt to force
936
+ discovery of the source code, use source=True, to attempt to force the
937
+ use of an import, use source=False; otherwise an import will be sought
938
+ for objects not defined in __main__. The intent is to build a string
939
+ that can be imported from a python file.
940
+
941
+ obj is the object to inspect. If alias is provided, then rename the
942
+ object with the given alias. If builtin=True, then force an import for
943
+ builtins where possible.
944
+ """
945
+ #NOTE: we always 'force', and 'lstrip' as necessary
946
+ #NOTE: for 'enclosing', use importable(outermost(obj))
947
+ if source is None:
948
+ source = True if isfrommain(obj) else False
949
+ elif builtin and isbuiltin(obj):
950
+ source = False
951
+ tried_source = tried_import = False
952
+ while True:
953
+ if not source: # we want an import
954
+ try:
955
+ if _isinstance(obj): # for instances, punt to _importable
956
+ return _importable(obj, alias, source=False, builtin=builtin)
957
+ src = _closuredimport(obj, alias=alias, builtin=builtin)
958
+ if len(src) == 0:
959
+ raise NotImplementedError('not implemented')
960
+ if len(src) > 1:
961
+ raise NotImplementedError('not implemented')
962
+ return list(src.values())[0]
963
+ except Exception:
964
+ if tried_source: raise
965
+ tried_import = True
966
+ # we want the source
967
+ try:
968
+ src = _closuredsource(obj, alias=alias)
969
+ if len(src) == 0:
970
+ raise NotImplementedError('not implemented')
971
+ # groan... an inline code stitcher
972
+ def _code_stitcher(block):
973
+ "stitch together the strings in tuple 'block'"
974
+ if block[0] and block[-1]: block = '\n'.join(block)
975
+ elif block[0]: block = block[0]
976
+ elif block[-1]: block = block[-1]
977
+ else: block = ''
978
+ return block
979
+ # get free_vars first
980
+ _src = _code_stitcher(src.pop(None))
981
+ _src = [_src] if _src else []
982
+ # get func_vars
983
+ for xxx in src.values():
984
+ xxx = _code_stitcher(xxx)
985
+ if xxx: _src.append(xxx)
986
+ # make a single source string
987
+ if not len(_src):
988
+ src = ''
989
+ elif len(_src) == 1:
990
+ src = _src[0]
991
+ else:
992
+ src = '\n'.join(_src)
993
+ # get source code of objects referred to by obj in global scope
994
+ from .detect import globalvars
995
+ obj = globalvars(obj) #XXX: don't worry about alias? recurse? etc?
996
+ obj = list(getsource(_obj,name,force=True) for (name,_obj) in obj.items() if not isbuiltin(_obj))
997
+ obj = '\n'.join(obj) if obj else ''
998
+ # combine all referred-to source (global then enclosing)
999
+ if not obj: return src
1000
+ if not src: return obj
1001
+ return obj + src
1002
+ except Exception:
1003
+ if tried_import: raise
1004
+ tried_source = True
1005
+ source = not source
1006
+ # should never get here
1007
+ return
1008
+
1009
+
1010
+ # backward compatibility
1011
+ def getimportable(obj, alias='', byname=True, explicit=False):
1012
+ return importable(obj,alias,source=(not byname),builtin=explicit)
1013
+ #return outdent(_importable(obj,alias,source=(not byname),builtin=explicit))
1014
+ def likely_import(obj, passive=False, explicit=False):
1015
+ return getimport(obj, verify=(not passive), builtin=explicit)
1016
+ def _likely_import(first, last, passive=False, explicit=True):
1017
+ return _getimport(first, last, verify=(not passive), builtin=explicit)
1018
+ _get_name = getname
1019
+ getblocks_from_history = getblocks
1020
+
1021
+
1022
+
1023
+ # EOF
.venv/lib/python3.11/site-packages/dill/temp.py ADDED
@@ -0,0 +1,252 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ #
3
+ # Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
4
+ # Copyright (c) 2008-2016 California Institute of Technology.
5
+ # Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
6
+ # License: 3-clause BSD. The full license text is available at:
7
+ # - https://github.com/uqfoundation/dill/blob/master/LICENSE
8
+ """
9
+ Methods for serialized objects (or source code) stored in temporary files
10
+ and file-like objects.
11
+ """
12
+ #XXX: better instead to have functions write to any given file-like object ?
13
+ #XXX: currently, all file-like objects are created by the function...
14
+
15
+ __all__ = ['dump_source', 'dump', 'dumpIO_source', 'dumpIO',\
16
+ 'load_source', 'load', 'loadIO_source', 'loadIO',\
17
+ 'capture']
18
+
19
+ import contextlib
20
+
21
+
22
+ @contextlib.contextmanager
23
+ def capture(stream='stdout'):
24
+ """builds a context that temporarily replaces the given stream name
25
+
26
+ >>> with capture('stdout') as out:
27
+ ... print ("foo!")
28
+ ...
29
+ >>> print (out.getvalue())
30
+ foo!
31
+
32
+ """
33
+ import sys
34
+ from io import StringIO
35
+ orig = getattr(sys, stream)
36
+ setattr(sys, stream, StringIO())
37
+ try:
38
+ yield getattr(sys, stream)
39
+ finally:
40
+ setattr(sys, stream, orig)
41
+
42
+
43
+ def b(x): # deal with b'foo' versus 'foo'
44
+ import codecs
45
+ return codecs.latin_1_encode(x)[0]
46
+
47
+ def load_source(file, **kwds):
48
+ """load an object that was stored with dill.temp.dump_source
49
+
50
+ file: filehandle
51
+ alias: string name of stored object
52
+ mode: mode to open the file, one of: {'r', 'rb'}
53
+
54
+ >>> f = lambda x: x**2
55
+ >>> pyfile = dill.temp.dump_source(f, alias='_f')
56
+ >>> _f = dill.temp.load_source(pyfile)
57
+ >>> _f(4)
58
+ 16
59
+ """
60
+ alias = kwds.pop('alias', None)
61
+ mode = kwds.pop('mode', 'r')
62
+ fname = getattr(file, 'name', file) # fname=file.name or fname=file (if str)
63
+ source = open(fname, mode=mode, **kwds).read()
64
+ if not alias:
65
+ tag = source.strip().splitlines()[-1].split()
66
+ if tag[0] != '#NAME:':
67
+ stub = source.splitlines()[0]
68
+ raise IOError("unknown name for code: %s" % stub)
69
+ alias = tag[-1]
70
+ local = {}
71
+ exec(source, local)
72
+ _ = eval("%s" % alias, local)
73
+ return _
74
+
75
+ def dump_source(object, **kwds):
76
+ """write object source to a NamedTemporaryFile (instead of dill.dump)
77
+ Loads with "import" or "dill.temp.load_source". Returns the filehandle.
78
+
79
+ >>> f = lambda x: x**2
80
+ >>> pyfile = dill.temp.dump_source(f, alias='_f')
81
+ >>> _f = dill.temp.load_source(pyfile)
82
+ >>> _f(4)
83
+ 16
84
+
85
+ >>> f = lambda x: x**2
86
+ >>> pyfile = dill.temp.dump_source(f, dir='.')
87
+ >>> modulename = os.path.basename(pyfile.name).split('.py')[0]
88
+ >>> exec('from %s import f as _f' % modulename)
89
+ >>> _f(4)
90
+ 16
91
+
92
+ Optional kwds:
93
+ If 'alias' is specified, the object will be renamed to the given string.
94
+
95
+ If 'prefix' is specified, the file name will begin with that prefix,
96
+ otherwise a default prefix is used.
97
+
98
+ If 'dir' is specified, the file will be created in that directory,
99
+ otherwise a default directory is used.
100
+
101
+ If 'text' is specified and true, the file is opened in text
102
+ mode. Else (the default) the file is opened in binary mode. On
103
+ some operating systems, this makes no difference.
104
+
105
+ NOTE: Keep the return value for as long as you want your file to exist !
106
+ """ #XXX: write a "load_source"?
107
+ from .source import importable, getname
108
+ import tempfile
109
+ kwds.setdefault('delete', True)
110
+ kwds.pop('suffix', '') # this is *always* '.py'
111
+ alias = kwds.pop('alias', '') #XXX: include an alias so a name is known
112
+ name = str(alias) or getname(object)
113
+ name = "\n#NAME: %s\n" % name
114
+ #XXX: assumes kwds['dir'] is writable and on $PYTHONPATH
115
+ file = tempfile.NamedTemporaryFile(suffix='.py', **kwds)
116
+ file.write(b(''.join([importable(object, alias=alias),name])))
117
+ file.flush()
118
+ return file
119
+
120
+ def load(file, **kwds):
121
+ """load an object that was stored with dill.temp.dump
122
+
123
+ file: filehandle
124
+ mode: mode to open the file, one of: {'r', 'rb'}
125
+
126
+ >>> dumpfile = dill.temp.dump([1, 2, 3, 4, 5])
127
+ >>> dill.temp.load(dumpfile)
128
+ [1, 2, 3, 4, 5]
129
+ """
130
+ import dill as pickle
131
+ mode = kwds.pop('mode', 'rb')
132
+ name = getattr(file, 'name', file) # name=file.name or name=file (if str)
133
+ return pickle.load(open(name, mode=mode, **kwds))
134
+
135
+ def dump(object, **kwds):
136
+ """dill.dump of object to a NamedTemporaryFile.
137
+ Loads with "dill.temp.load". Returns the filehandle.
138
+
139
+ >>> dumpfile = dill.temp.dump([1, 2, 3, 4, 5])
140
+ >>> dill.temp.load(dumpfile)
141
+ [1, 2, 3, 4, 5]
142
+
143
+ Optional kwds:
144
+ If 'suffix' is specified, the file name will end with that suffix,
145
+ otherwise there will be no suffix.
146
+
147
+ If 'prefix' is specified, the file name will begin with that prefix,
148
+ otherwise a default prefix is used.
149
+
150
+ If 'dir' is specified, the file will be created in that directory,
151
+ otherwise a default directory is used.
152
+
153
+ If 'text' is specified and true, the file is opened in text
154
+ mode. Else (the default) the file is opened in binary mode. On
155
+ some operating systems, this makes no difference.
156
+
157
+ NOTE: Keep the return value for as long as you want your file to exist !
158
+ """
159
+ import dill as pickle
160
+ import tempfile
161
+ kwds.setdefault('delete', True)
162
+ file = tempfile.NamedTemporaryFile(**kwds)
163
+ pickle.dump(object, file)
164
+ file.flush()
165
+ return file
166
+
167
+ def loadIO(buffer, **kwds):
168
+ """load an object that was stored with dill.temp.dumpIO
169
+
170
+ buffer: buffer object
171
+
172
+ >>> dumpfile = dill.temp.dumpIO([1, 2, 3, 4, 5])
173
+ >>> dill.temp.loadIO(dumpfile)
174
+ [1, 2, 3, 4, 5]
175
+ """
176
+ import dill as pickle
177
+ from io import BytesIO as StringIO
178
+ value = getattr(buffer, 'getvalue', buffer) # value or buffer.getvalue
179
+ if value != buffer: value = value() # buffer.getvalue()
180
+ return pickle.load(StringIO(value))
181
+
182
+ def dumpIO(object, **kwds):
183
+ """dill.dump of object to a buffer.
184
+ Loads with "dill.temp.loadIO". Returns the buffer object.
185
+
186
+ >>> dumpfile = dill.temp.dumpIO([1, 2, 3, 4, 5])
187
+ >>> dill.temp.loadIO(dumpfile)
188
+ [1, 2, 3, 4, 5]
189
+ """
190
+ import dill as pickle
191
+ from io import BytesIO as StringIO
192
+ file = StringIO()
193
+ pickle.dump(object, file)
194
+ file.flush()
195
+ return file
196
+
197
+ def loadIO_source(buffer, **kwds):
198
+ """load an object that was stored with dill.temp.dumpIO_source
199
+
200
+ buffer: buffer object
201
+ alias: string name of stored object
202
+
203
+ >>> f = lambda x:x**2
204
+ >>> pyfile = dill.temp.dumpIO_source(f, alias='_f')
205
+ >>> _f = dill.temp.loadIO_source(pyfile)
206
+ >>> _f(4)
207
+ 16
208
+ """
209
+ alias = kwds.pop('alias', None)
210
+ source = getattr(buffer, 'getvalue', buffer) # source or buffer.getvalue
211
+ if source != buffer: source = source() # buffer.getvalue()
212
+ source = source.decode() # buffer to string
213
+ if not alias:
214
+ tag = source.strip().splitlines()[-1].split()
215
+ if tag[0] != '#NAME:':
216
+ stub = source.splitlines()[0]
217
+ raise IOError("unknown name for code: %s" % stub)
218
+ alias = tag[-1]
219
+ local = {}
220
+ exec(source, local)
221
+ _ = eval("%s" % alias, local)
222
+ return _
223
+
224
+ def dumpIO_source(object, **kwds):
225
+ """write object source to a buffer (instead of dill.dump)
226
+ Loads by with dill.temp.loadIO_source. Returns the buffer object.
227
+
228
+ >>> f = lambda x:x**2
229
+ >>> pyfile = dill.temp.dumpIO_source(f, alias='_f')
230
+ >>> _f = dill.temp.loadIO_source(pyfile)
231
+ >>> _f(4)
232
+ 16
233
+
234
+ Optional kwds:
235
+ If 'alias' is specified, the object will be renamed to the given string.
236
+ """
237
+ from .source import importable, getname
238
+ from io import BytesIO as StringIO
239
+ alias = kwds.pop('alias', '') #XXX: include an alias so a name is known
240
+ name = str(alias) or getname(object)
241
+ name = "\n#NAME: %s\n" % name
242
+ #XXX: assumes kwds['dir'] is writable and on $PYTHONPATH
243
+ file = StringIO()
244
+ file.write(b(''.join([importable(object, alias=alias),name])))
245
+ file.flush()
246
+ return file
247
+
248
+
249
+ del contextlib
250
+
251
+
252
+ # EOF
.venv/lib/python3.11/site-packages/dill/tests/__main__.py ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ #
3
+ # Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
4
+ # Copyright (c) 2018-2024 The Uncertainty Quantification Foundation.
5
+ # License: 3-clause BSD. The full license text is available at:
6
+ # - https://github.com/uqfoundation/dill/blob/master/LICENSE
7
+
8
+ import glob
9
+ import os
10
+ import sys
11
+ import subprocess as sp
12
+ python = sys.executable
13
+ try:
14
+ import pox
15
+ python = pox.which_python(version=True) or python
16
+ except ImportError:
17
+ pass
18
+ shell = sys.platform[:3] == 'win'
19
+
20
+ suite = os.path.dirname(__file__) or os.path.curdir
21
+ tests = glob.glob(suite + os.path.sep + 'test_*.py')
22
+
23
+
24
+ if __name__ == '__main__':
25
+
26
+ failed = 0
27
+ for test in tests:
28
+ p = sp.Popen([python, test], shell=shell).wait()
29
+ if p:
30
+ print('F', end='', flush=True)
31
+ failed = 1
32
+ else:
33
+ print('.', end='', flush=True)
34
+ print('')
35
+ exit(failed)
.venv/lib/python3.11/site-packages/dill/tests/test_check.py ADDED
@@ -0,0 +1,62 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ #
3
+ # Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
4
+ # Copyright (c) 2008-2016 California Institute of Technology.
5
+ # Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
6
+ # License: 3-clause BSD. The full license text is available at:
7
+ # - https://github.com/uqfoundation/dill/blob/master/LICENSE
8
+
9
+ from dill import check
10
+ import sys
11
+
12
+ from dill.temp import capture
13
+
14
+
15
+ #FIXME: this doesn't catch output... it's from the internal call
16
+ def raise_check(func, **kwds):
17
+ try:
18
+ with capture('stdout') as out:
19
+ check(func, **kwds)
20
+ except Exception:
21
+ e = sys.exc_info()[1]
22
+ raise AssertionError(str(e))
23
+ else:
24
+ assert 'Traceback' not in out.getvalue()
25
+ finally:
26
+ out.close()
27
+
28
+
29
+ f = lambda x:x**2
30
+
31
+
32
+ def test_simple(verbose=None):
33
+ raise_check(f, verbose=verbose)
34
+
35
+
36
+ def test_recurse(verbose=None):
37
+ raise_check(f, recurse=True, verbose=verbose)
38
+
39
+
40
+ def test_byref(verbose=None):
41
+ raise_check(f, byref=True, verbose=verbose)
42
+
43
+
44
+ def test_protocol(verbose=None):
45
+ raise_check(f, protocol=True, verbose=verbose)
46
+
47
+
48
+ def test_python(verbose=None):
49
+ raise_check(f, python=None, verbose=verbose)
50
+
51
+
52
+ #TODO: test incompatible versions
53
+ #TODO: test dump failure
54
+ #TODO: test load failure
55
+
56
+
57
+ if __name__ == '__main__':
58
+ test_simple()
59
+ test_recurse()
60
+ test_byref()
61
+ test_protocol()
62
+ test_python()
.venv/lib/python3.11/site-packages/dill/tests/test_classdef.py ADDED
@@ -0,0 +1,340 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ #
3
+ # Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
4
+ # Copyright (c) 2008-2016 California Institute of Technology.
5
+ # Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
6
+ # License: 3-clause BSD. The full license text is available at:
7
+ # - https://github.com/uqfoundation/dill/blob/master/LICENSE
8
+
9
+ import dill
10
+ from enum import EnumMeta
11
+ import sys
12
+ dill.settings['recurse'] = True
13
+
14
+ # test classdefs
15
+ class _class:
16
+ def _method(self):
17
+ pass
18
+ def ok(self):
19
+ return True
20
+
21
+ class _class2:
22
+ def __call__(self):
23
+ pass
24
+ def ok(self):
25
+ return True
26
+
27
+ class _newclass(object):
28
+ def _method(self):
29
+ pass
30
+ def ok(self):
31
+ return True
32
+
33
+ class _newclass2(object):
34
+ def __call__(self):
35
+ pass
36
+ def ok(self):
37
+ return True
38
+
39
+ class _meta(type):
40
+ pass
41
+
42
+ def __call__(self):
43
+ pass
44
+ def ok(self):
45
+ return True
46
+
47
+ _mclass = _meta("_mclass", (object,), {"__call__": __call__, "ok": ok})
48
+
49
+ del __call__
50
+ del ok
51
+
52
+ o = _class()
53
+ oc = _class2()
54
+ n = _newclass()
55
+ nc = _newclass2()
56
+ m = _mclass()
57
+
58
+ if sys.hexversion < 0x03090000:
59
+ import typing
60
+ class customIntList(typing.List[int]):
61
+ pass
62
+ else:
63
+ class customIntList(list[int]):
64
+ pass
65
+
66
+ # test pickles for class instances
67
+ def test_class_instances():
68
+ assert dill.pickles(o)
69
+ assert dill.pickles(oc)
70
+ assert dill.pickles(n)
71
+ assert dill.pickles(nc)
72
+ assert dill.pickles(m)
73
+
74
+ def test_class_objects():
75
+ clslist = [_class,_class2,_newclass,_newclass2,_mclass]
76
+ objlist = [o,oc,n,nc,m]
77
+ _clslist = [dill.dumps(obj) for obj in clslist]
78
+ _objlist = [dill.dumps(obj) for obj in objlist]
79
+
80
+ for obj in clslist:
81
+ globals().pop(obj.__name__)
82
+ del clslist
83
+ for obj in ['o','oc','n','nc']:
84
+ globals().pop(obj)
85
+ del objlist
86
+ del obj
87
+
88
+ for obj,cls in zip(_objlist,_clslist):
89
+ _cls = dill.loads(cls)
90
+ _obj = dill.loads(obj)
91
+ assert _obj.ok()
92
+ assert _cls.ok(_cls())
93
+ if _cls.__name__ == "_mclass":
94
+ assert type(_cls).__name__ == "_meta"
95
+
96
+ # test NoneType
97
+ def test_specialtypes():
98
+ assert dill.pickles(type(None))
99
+ assert dill.pickles(type(NotImplemented))
100
+ assert dill.pickles(type(Ellipsis))
101
+ assert dill.pickles(type(EnumMeta))
102
+
103
+ from collections import namedtuple
104
+ Z = namedtuple("Z", ['a','b'])
105
+ Zi = Z(0,1)
106
+ X = namedtuple("Y", ['a','b'])
107
+ X.__name__ = "X"
108
+ X.__qualname__ = "X" #XXX: name must 'match' or fails to pickle
109
+ Xi = X(0,1)
110
+ Bad = namedtuple("FakeName", ['a','b'])
111
+ Badi = Bad(0,1)
112
+ Defaults = namedtuple('Defaults', ['x', 'y'], defaults=[1])
113
+ Defaultsi = Defaults(2)
114
+
115
+ # test namedtuple
116
+ def test_namedtuple():
117
+ assert Z is dill.loads(dill.dumps(Z))
118
+ assert Zi == dill.loads(dill.dumps(Zi))
119
+ assert X is dill.loads(dill.dumps(X))
120
+ assert Xi == dill.loads(dill.dumps(Xi))
121
+ assert Defaults is dill.loads(dill.dumps(Defaults))
122
+ assert Defaultsi == dill.loads(dill.dumps(Defaultsi))
123
+ assert Bad is not dill.loads(dill.dumps(Bad))
124
+ assert Bad._fields == dill.loads(dill.dumps(Bad))._fields
125
+ assert tuple(Badi) == tuple(dill.loads(dill.dumps(Badi)))
126
+
127
+ class A:
128
+ class B(namedtuple("C", ["one", "two"])):
129
+ '''docstring'''
130
+ B.__module__ = 'testing'
131
+
132
+ a = A()
133
+ assert dill.copy(a)
134
+
135
+ assert dill.copy(A.B).__name__ == 'B'
136
+ assert dill.copy(A.B).__qualname__.endswith('.<locals>.A.B')
137
+ assert dill.copy(A.B).__doc__ == 'docstring'
138
+ assert dill.copy(A.B).__module__ == 'testing'
139
+
140
+ from typing import NamedTuple
141
+
142
+ def A():
143
+ class B(NamedTuple):
144
+ x: int
145
+ return B
146
+
147
+ assert type(dill.copy(A()(8))).__qualname__ == type(A()(8)).__qualname__
148
+
149
+ def test_dtype():
150
+ try:
151
+ import numpy as np
152
+
153
+ dti = np.dtype('int')
154
+ assert np.dtype == dill.copy(np.dtype)
155
+ assert dti == dill.copy(dti)
156
+ except ImportError: pass
157
+
158
+
159
+ def test_array_nested():
160
+ try:
161
+ import numpy as np
162
+
163
+ x = np.array([1])
164
+ y = (x,)
165
+ assert y == dill.copy(y)
166
+
167
+ except ImportError: pass
168
+
169
+
170
+ def test_array_subclass():
171
+ try:
172
+ import numpy as np
173
+
174
+ class TestArray(np.ndarray):
175
+ def __new__(cls, input_array, color):
176
+ obj = np.asarray(input_array).view(cls)
177
+ obj.color = color
178
+ return obj
179
+ def __array_finalize__(self, obj):
180
+ if obj is None:
181
+ return
182
+ if isinstance(obj, type(self)):
183
+ self.color = obj.color
184
+ def __getnewargs__(self):
185
+ return np.asarray(self), self.color
186
+
187
+ a1 = TestArray(np.zeros(100), color='green')
188
+ if not dill._dill.IS_PYPY:
189
+ assert dill.pickles(a1)
190
+ assert a1.__dict__ == dill.copy(a1).__dict__
191
+
192
+ a2 = a1[0:9]
193
+ if not dill._dill.IS_PYPY:
194
+ assert dill.pickles(a2)
195
+ assert a2.__dict__ == dill.copy(a2).__dict__
196
+
197
+ class TestArray2(np.ndarray):
198
+ color = 'blue'
199
+
200
+ a3 = TestArray2([1,2,3,4,5])
201
+ a3.color = 'green'
202
+ if not dill._dill.IS_PYPY:
203
+ assert dill.pickles(a3)
204
+ assert a3.__dict__ == dill.copy(a3).__dict__
205
+
206
+ except ImportError: pass
207
+
208
+
209
+ def test_method_decorator():
210
+ class A(object):
211
+ @classmethod
212
+ def test(cls):
213
+ pass
214
+
215
+ a = A()
216
+
217
+ res = dill.dumps(a)
218
+ new_obj = dill.loads(res)
219
+ new_obj.__class__.test()
220
+
221
+ # test slots
222
+ class Y(object):
223
+ __slots__ = ('y', '__weakref__')
224
+ def __init__(self, y):
225
+ self.y = y
226
+
227
+ value = 123
228
+ y = Y(value)
229
+
230
+ class Y2(object):
231
+ __slots__ = 'y'
232
+ def __init__(self, y):
233
+ self.y = y
234
+
235
+ def test_slots():
236
+ assert dill.pickles(Y)
237
+ assert dill.pickles(y)
238
+ assert dill.pickles(Y.y)
239
+ assert dill.copy(y).y == value
240
+ assert dill.copy(Y2(value)).y == value
241
+
242
+ def test_origbases():
243
+ assert dill.copy(customIntList).__orig_bases__ == customIntList.__orig_bases__
244
+
245
+ def test_attr():
246
+ import attr
247
+ @attr.s
248
+ class A:
249
+ a = attr.ib()
250
+
251
+ v = A(1)
252
+ assert dill.copy(v) == v
253
+
254
+ def test_metaclass():
255
+ class metaclass_with_new(type):
256
+ def __new__(mcls, name, bases, ns, **kwds):
257
+ cls = super().__new__(mcls, name, bases, ns, **kwds)
258
+ assert mcls is not None
259
+ assert cls.method(mcls)
260
+ return cls
261
+ def method(cls, mcls):
262
+ return isinstance(cls, mcls)
263
+
264
+ l = locals()
265
+ exec("""class subclass_with_new(metaclass=metaclass_with_new):
266
+ def __new__(cls):
267
+ self = super().__new__(cls)
268
+ return self""", None, l)
269
+ subclass_with_new = l['subclass_with_new']
270
+
271
+ assert dill.copy(subclass_with_new())
272
+
273
+ def test_enummeta():
274
+ from http import HTTPStatus
275
+ import enum
276
+ assert dill.copy(HTTPStatus.OK) is HTTPStatus.OK
277
+ assert dill.copy(enum.EnumMeta) is enum.EnumMeta
278
+
279
+ def test_inherit(): #NOTE: see issue #612
280
+ class Foo:
281
+ w = 0
282
+ x = 1
283
+ y = 1.1
284
+ a = ()
285
+ b = (1,)
286
+ n = None
287
+
288
+ class Bar(Foo):
289
+ w = 2
290
+ x = 1
291
+ y = 1.1
292
+ z = 0.2
293
+ a = ()
294
+ b = (1,)
295
+ c = (2,)
296
+ n = None
297
+
298
+ Baz = dill.copy(Bar)
299
+
300
+ import platform
301
+ is_pypy = platform.python_implementation() == 'PyPy'
302
+ assert Bar.__dict__ == Baz.__dict__
303
+ # ints
304
+ assert 'w' in Bar.__dict__ and 'w' in Baz.__dict__
305
+ assert Bar.__dict__['w'] is Baz.__dict__['w']
306
+ assert 'x' in Bar.__dict__ and 'x' in Baz.__dict__
307
+ assert Bar.__dict__['x'] is Baz.__dict__['x']
308
+ # floats
309
+ assert 'y' in Bar.__dict__ and 'y' in Baz.__dict__
310
+ same = Bar.__dict__['y'] is Baz.__dict__['y']
311
+ assert same if is_pypy else not same
312
+ assert 'z' in Bar.__dict__ and 'z' in Baz.__dict__
313
+ same = Bar.__dict__['z'] is Baz.__dict__['z']
314
+ assert same if is_pypy else not same
315
+ # tuples
316
+ assert 'a' in Bar.__dict__ and 'a' in Baz.__dict__
317
+ assert Bar.__dict__['a'] is Baz.__dict__['a']
318
+ assert 'b' in Bar.__dict__ and 'b' in Baz.__dict__
319
+ assert Bar.__dict__['b'] is not Baz.__dict__['b']
320
+ assert 'c' in Bar.__dict__ and 'c' in Baz.__dict__
321
+ assert Bar.__dict__['c'] is not Baz.__dict__['c']
322
+ # None
323
+ assert 'n' in Bar.__dict__ and 'n' in Baz.__dict__
324
+ assert Bar.__dict__['n'] is Baz.__dict__['n']
325
+
326
+
327
+ if __name__ == '__main__':
328
+ test_class_instances()
329
+ test_class_objects()
330
+ test_specialtypes()
331
+ test_namedtuple()
332
+ test_dtype()
333
+ test_array_nested()
334
+ test_array_subclass()
335
+ test_method_decorator()
336
+ test_slots()
337
+ test_origbases()
338
+ test_metaclass()
339
+ test_enummeta()
340
+ test_inherit()
.venv/lib/python3.11/site-packages/dill/tests/test_detect.py ADDED
@@ -0,0 +1,160 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ #
3
+ # Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
4
+ # Copyright (c) 2008-2016 California Institute of Technology.
5
+ # Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
6
+ # License: 3-clause BSD. The full license text is available at:
7
+ # - https://github.com/uqfoundation/dill/blob/master/LICENSE
8
+
9
+ from dill.detect import baditems, badobjects, badtypes, errors, parent, at, globalvars
10
+ from dill import settings
11
+ from dill._dill import IS_PYPY
12
+ from pickle import PicklingError
13
+
14
+ import inspect
15
+ import sys
16
+ import os
17
+
18
+ def test_bad_things():
19
+ f = inspect.currentframe()
20
+ assert baditems(f) == [f]
21
+ #assert baditems(globals()) == [f] #XXX
22
+ assert badobjects(f) is f
23
+ assert badtypes(f) == type(f)
24
+ assert type(errors(f)) is TypeError
25
+ d = badtypes(f, 1)
26
+ assert isinstance(d, dict)
27
+ assert list(badobjects(f, 1).keys()) == list(d.keys())
28
+ assert list(errors(f, 1).keys()) == list(d.keys())
29
+ s = set([(err.__class__.__name__,err.args[0]) for err in list(errors(f, 1).values())])
30
+ a = dict(s)
31
+ if not os.environ.get('COVERAGE'): #XXX: travis-ci
32
+ proxy = 0 if type(f.f_locals) is dict else 1
33
+ assert len(s) == len(a) + proxy # TypeError (and possibly PicklingError)
34
+ n = 2
35
+ assert len(a) is n if 'PicklingError' in a.keys() else n-1
36
+
37
+ def test_parent():
38
+ x = [4,5,6,7]
39
+ listiter = iter(x)
40
+ obj = parent(listiter, list)
41
+ assert obj is x
42
+
43
+ if IS_PYPY: assert parent(obj, int) is None
44
+ else: assert parent(obj, int) is x[-1] # python oddly? finds last int
45
+ assert at(id(at)) is at
46
+
47
+ a, b, c = 1, 2, 3
48
+
49
+ def squared(x):
50
+ return a+x**2
51
+
52
+ def foo(x):
53
+ def bar(y):
54
+ return squared(x)+y
55
+ return bar
56
+
57
+ class _class:
58
+ def _method(self):
59
+ pass
60
+ def ok(self):
61
+ return True
62
+
63
+ def test_globals():
64
+ def f():
65
+ a
66
+ def g():
67
+ b
68
+ def h():
69
+ c
70
+ assert globalvars(f) == dict(a=1, b=2, c=3)
71
+
72
+ res = globalvars(foo, recurse=True)
73
+ assert set(res) == set(['squared', 'a'])
74
+ res = globalvars(foo, recurse=False)
75
+ assert res == {}
76
+ zap = foo(2)
77
+ res = globalvars(zap, recurse=True)
78
+ assert set(res) == set(['squared', 'a'])
79
+ res = globalvars(zap, recurse=False)
80
+ assert set(res) == set(['squared'])
81
+ del zap
82
+ res = globalvars(squared)
83
+ assert set(res) == set(['a'])
84
+ # FIXME: should find referenced __builtins__
85
+ #res = globalvars(_class, recurse=True)
86
+ #assert set(res) == set(['True'])
87
+ #res = globalvars(_class, recurse=False)
88
+ #assert res == {}
89
+ #res = globalvars(_class.ok, recurse=True)
90
+ #assert set(res) == set(['True'])
91
+ #res = globalvars(_class.ok, recurse=False)
92
+ #assert set(res) == set(['True'])
93
+
94
+
95
+ #98 dill ignores __getstate__ in interactive lambdas
96
+ bar = [0]
97
+
98
+ class Foo(object):
99
+ def __init__(self):
100
+ pass
101
+ def __getstate__(self):
102
+ bar[0] = bar[0]+1
103
+ return {}
104
+ def __setstate__(self, data):
105
+ pass
106
+
107
+ f = Foo()
108
+
109
+ def test_getstate():
110
+ from dill import dumps, loads
111
+ dumps(f)
112
+ b = bar[0]
113
+ dumps(lambda: f, recurse=False) # doesn't call __getstate__
114
+ assert bar[0] == b
115
+ dumps(lambda: f, recurse=True) # calls __getstate__
116
+ assert bar[0] == b + 1
117
+
118
+ #97 serialize lambdas in test files
119
+ def test_deleted():
120
+ global sin
121
+ from dill import dumps, loads
122
+ from math import sin, pi
123
+
124
+ def sinc(x):
125
+ return sin(x)/x
126
+
127
+ settings['recurse'] = True
128
+ _sinc = dumps(sinc)
129
+ sin = globals().pop('sin')
130
+ sin = 1
131
+ del sin
132
+ sinc_ = loads(_sinc) # no NameError... pickling preserves 'sin'
133
+ res = sinc_(1)
134
+ from math import sin
135
+ assert sinc(1) == res
136
+
137
+
138
+ def test_lambdify():
139
+ try:
140
+ from sympy import symbols, lambdify
141
+ except ImportError:
142
+ return
143
+ settings['recurse'] = True
144
+ x = symbols("x")
145
+ y = x**2
146
+ f = lambdify([x], y)
147
+ z = min
148
+ d = globals()
149
+ globalvars(f, recurse=True, builtin=True)
150
+ assert z is min
151
+ assert d is globals()
152
+
153
+
154
+ if __name__ == '__main__':
155
+ test_bad_things()
156
+ test_parent()
157
+ test_globals()
158
+ test_getstate()
159
+ test_deleted()
160
+ test_lambdify()
.venv/lib/python3.11/site-packages/dill/tests/test_diff.py ADDED
@@ -0,0 +1,107 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ #
3
+ # Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
4
+ # Copyright (c) 2008-2016 California Institute of Technology.
5
+ # Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
6
+ # License: 3-clause BSD. The full license text is available at:
7
+ # - https://github.com/uqfoundation/dill/blob/master/LICENSE
8
+
9
+ from dill import __diff as diff
10
+
11
+ import sys
12
+ IS_PYPY = not hasattr(sys, 'getrefcount')
13
+
14
+ class A:
15
+ pass
16
+
17
+ def test_diff():
18
+ a = A()
19
+ b = A()
20
+ c = A()
21
+ a.a = b
22
+ b.a = c
23
+ diff.memorise(a)
24
+ assert not diff.has_changed(a)
25
+ c.a = 1
26
+ assert diff.has_changed(a)
27
+ diff.memorise(c, force=True)
28
+ assert not diff.has_changed(a)
29
+ c.a = 2
30
+ assert diff.has_changed(a)
31
+ changed = diff.whats_changed(a)
32
+ assert list(changed[0].keys()) == ["a"]
33
+ assert not changed[1]
34
+
35
+ a2 = []
36
+ b2 = [a2]
37
+ c2 = [b2]
38
+ diff.memorise(c2)
39
+ assert not diff.has_changed(c2)
40
+ a2.append(1)
41
+ assert diff.has_changed(c2)
42
+ changed = diff.whats_changed(c2)
43
+ assert changed[0] == {}
44
+ assert changed[1]
45
+
46
+ a3 = {}
47
+ b3 = {1: a3}
48
+ c3 = {1: b3}
49
+ diff.memorise(c3)
50
+ assert not diff.has_changed(c3)
51
+ a3[1] = 1
52
+ assert diff.has_changed(c3)
53
+ changed = diff.whats_changed(c3)
54
+ assert changed[0] == {}
55
+ assert changed[1]
56
+
57
+ if not IS_PYPY:
58
+ import abc
59
+ # make sure the "_abc_invaldation_counter" doesn't make test fail
60
+ diff.memorise(abc.ABCMeta, force=True)
61
+ assert not diff.has_changed(abc)
62
+ abc.ABCMeta.zzz = 1
63
+ assert diff.has_changed(abc)
64
+ changed = diff.whats_changed(abc)
65
+ assert list(changed[0].keys()) == ["ABCMeta"]
66
+ assert not changed[1]
67
+
68
+ '''
69
+ import Queue
70
+ diff.memorise(Queue, force=True)
71
+ assert not diff.has_changed(Queue)
72
+ Queue.Queue.zzz = 1
73
+ assert diff.has_changed(Queue)
74
+ changed = diff.whats_changed(Queue)
75
+ assert list(changed[0].keys()) == ["Queue"]
76
+ assert not changed[1]
77
+
78
+ import math
79
+ diff.memorise(math, force=True)
80
+ assert not diff.has_changed(math)
81
+ math.zzz = 1
82
+ assert diff.has_changed(math)
83
+ changed = diff.whats_changed(math)
84
+ assert list(changed[0].keys()) == ["zzz"]
85
+ assert not changed[1]
86
+ '''
87
+
88
+ a = A()
89
+ b = A()
90
+ c = A()
91
+ a.a = b
92
+ b.a = c
93
+ diff.memorise(a)
94
+ assert not diff.has_changed(a)
95
+ c.a = 1
96
+ assert diff.has_changed(a)
97
+ diff.memorise(c, force=True)
98
+ assert not diff.has_changed(a)
99
+ del c.a
100
+ assert diff.has_changed(a)
101
+ changed = diff.whats_changed(a)
102
+ assert list(changed[0].keys()) == ["a"]
103
+ assert not changed[1]
104
+
105
+
106
+ if __name__ == '__main__':
107
+ test_diff()
.venv/lib/python3.11/site-packages/dill/tests/test_extendpickle.py ADDED
@@ -0,0 +1,53 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ #
3
+ # Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
4
+ # Copyright (c) 2008-2016 California Institute of Technology.
5
+ # Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
6
+ # License: 3-clause BSD. The full license text is available at:
7
+ # - https://github.com/uqfoundation/dill/blob/master/LICENSE
8
+
9
+ import dill as pickle
10
+ from io import BytesIO as StringIO
11
+
12
+
13
+ def my_fn(x):
14
+ return x * 17
15
+
16
+
17
+ def test_extend():
18
+ obj = lambda : my_fn(34)
19
+ assert obj() == 578
20
+
21
+ obj_io = StringIO()
22
+ pickler = pickle.Pickler(obj_io)
23
+ pickler.dump(obj)
24
+
25
+ obj_str = obj_io.getvalue()
26
+
27
+ obj2_io = StringIO(obj_str)
28
+ unpickler = pickle.Unpickler(obj2_io)
29
+ obj2 = unpickler.load()
30
+
31
+ assert obj2() == 578
32
+
33
+
34
+ def test_isdill():
35
+ obj_io = StringIO()
36
+ pickler = pickle.Pickler(obj_io)
37
+ assert pickle._dill.is_dill(pickler) is True
38
+
39
+ pickler = pickle._dill.StockPickler(obj_io)
40
+ assert pickle._dill.is_dill(pickler) is False
41
+
42
+ try:
43
+ import multiprocess as mp
44
+ pickler = mp.reduction.ForkingPickler(obj_io)
45
+ assert pickle._dill.is_dill(pickler, child=True) is True
46
+ assert pickle._dill.is_dill(pickler, child=False) is False
47
+ except Exception:
48
+ pass
49
+
50
+
51
+ if __name__ == '__main__':
52
+ test_extend()
53
+ test_isdill()
.venv/lib/python3.11/site-packages/dill/tests/test_fglobals.py ADDED
@@ -0,0 +1,55 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ #
3
+ # Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
4
+ # Copyright (c) 2021-2024 The Uncertainty Quantification Foundation.
5
+ # License: 3-clause BSD. The full license text is available at:
6
+ # - https://github.com/uqfoundation/dill/blob/master/LICENSE
7
+
8
+ import dill
9
+ dill.settings['recurse'] = True
10
+
11
+ def get_fun_with_strftime():
12
+ def fun_with_strftime():
13
+ import datetime
14
+ return datetime.datetime.strptime("04-01-1943", "%d-%m-%Y").strftime(
15
+ "%Y-%m-%d %H:%M:%S"
16
+ )
17
+ return fun_with_strftime
18
+
19
+
20
+ def get_fun_with_strftime2():
21
+ import datetime
22
+ return datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
23
+
24
+
25
+ def test_doc_dill_issue_219():
26
+ back_fn = dill.loads(dill.dumps(get_fun_with_strftime()))
27
+ assert back_fn() == "1943-01-04 00:00:00"
28
+ dupl = dill.loads(dill.dumps(get_fun_with_strftime2))
29
+ assert dupl() == get_fun_with_strftime2()
30
+
31
+
32
+ def get_fun_with_internal_import():
33
+ def fun_with_import():
34
+ import re
35
+ return re.compile("$")
36
+ return fun_with_import
37
+
38
+
39
+ def test_method_with_internal_import_should_work():
40
+ import re
41
+ back_fn = dill.loads(dill.dumps(get_fun_with_internal_import()))
42
+ import inspect
43
+ if hasattr(inspect, 'getclosurevars'):
44
+ vars = inspect.getclosurevars(back_fn)
45
+ assert vars.globals == {}
46
+ assert vars.nonlocals == {}
47
+ assert back_fn() == re.compile("$")
48
+ assert "__builtins__" in back_fn.__globals__
49
+
50
+
51
+ if __name__ == "__main__":
52
+ import sys
53
+ if (sys.version_info[:3] != (3,10,0) or sys.version_info[3] != 'alpha'):
54
+ test_doc_dill_issue_219()
55
+ test_method_with_internal_import_should_work()
.venv/lib/python3.11/site-packages/dill/tests/test_file.py ADDED
@@ -0,0 +1,500 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ #
3
+ # Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
4
+ # Copyright (c) 2008-2016 California Institute of Technology.
5
+ # Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
6
+ # License: 3-clause BSD. The full license text is available at:
7
+ # - https://github.com/uqfoundation/dill/blob/master/LICENSE
8
+
9
+ import os
10
+ import sys
11
+ import string
12
+ import random
13
+
14
+ import dill
15
+
16
+
17
+ dill.settings['recurse'] = True
18
+
19
+ fname = "_test_file.txt"
20
+ rand_chars = list(string.ascii_letters) + ["\n"] * 40 # bias newline
21
+
22
+ buffer_error = ValueError("invalid buffer size")
23
+ dne_error = FileNotFoundError("[Errno 2] No such file or directory: '%s'" % fname)
24
+
25
+
26
+ def write_randomness(number=200):
27
+ f = open(fname, "w")
28
+ for i in range(number):
29
+ f.write(random.choice(rand_chars))
30
+ f.close()
31
+ f = open(fname, "r")
32
+ contents = f.read()
33
+ f.close()
34
+ return contents
35
+
36
+
37
+ def trunc_file():
38
+ open(fname, "w").close()
39
+
40
+
41
+ def throws(op, args, exc):
42
+ try:
43
+ op(*args)
44
+ except type(exc):
45
+ return sys.exc_info()[1].args == exc.args
46
+ else:
47
+ return False
48
+
49
+
50
+ def teardown_module():
51
+ if os.path.exists(fname):
52
+ os.remove(fname)
53
+
54
+
55
+ def bench(strictio, fmode, skippypy):
56
+ import platform
57
+ if skippypy and platform.python_implementation() == 'PyPy':
58
+ # Skip for PyPy...
59
+ return
60
+
61
+ # file exists, with same contents
62
+ # read
63
+
64
+ write_randomness()
65
+
66
+ f = open(fname, "r")
67
+ _f = dill.loads(dill.dumps(f, fmode=fmode))#, strictio=strictio))
68
+ assert _f.mode == f.mode
69
+ assert _f.tell() == f.tell()
70
+ assert _f.read() == f.read()
71
+ f.close()
72
+ _f.close()
73
+
74
+ # write
75
+
76
+ f = open(fname, "w")
77
+ f.write("hello")
78
+ f_dumped = dill.dumps(f, fmode=fmode)#, strictio=strictio)
79
+ f1mode = f.mode
80
+ ftell = f.tell()
81
+ f.close()
82
+ f2 = dill.loads(f_dumped) #FIXME: fails due to pypy/issues/1233
83
+ # TypeError: expected py_object instance instead of str
84
+ f2mode = f2.mode
85
+ f2tell = f2.tell()
86
+ f2name = f2.name
87
+ f2.write(" world!")
88
+ f2.close()
89
+
90
+ if fmode == dill.HANDLE_FMODE:
91
+ assert open(fname).read() == " world!"
92
+ assert f2mode == f1mode
93
+ assert f2tell == 0
94
+ elif fmode == dill.CONTENTS_FMODE:
95
+ assert open(fname).read() == "hello world!"
96
+ assert f2mode == f1mode
97
+ assert f2tell == ftell
98
+ assert f2name == fname
99
+ elif fmode == dill.FILE_FMODE:
100
+ assert open(fname).read() == "hello world!"
101
+ assert f2mode == f1mode
102
+ assert f2tell == ftell
103
+ else:
104
+ raise RuntimeError("Unknown file mode '%s'" % fmode)
105
+
106
+ # append
107
+
108
+ trunc_file()
109
+
110
+ f = open(fname, "a")
111
+ f.write("hello")
112
+ f_dumped = dill.dumps(f, fmode=fmode)#, strictio=strictio)
113
+ f1mode = f.mode
114
+ ftell = f.tell()
115
+ f.close()
116
+ f2 = dill.loads(f_dumped)
117
+ f2mode = f2.mode
118
+ f2tell = f2.tell()
119
+ f2.write(" world!")
120
+ f2.close()
121
+
122
+ assert f2mode == f1mode
123
+ if fmode == dill.CONTENTS_FMODE:
124
+ assert open(fname).read() == "hello world!"
125
+ assert f2tell == ftell
126
+ elif fmode == dill.HANDLE_FMODE:
127
+ assert open(fname).read() == "hello world!"
128
+ assert f2tell == ftell
129
+ elif fmode == dill.FILE_FMODE:
130
+ assert open(fname).read() == "hello world!"
131
+ assert f2tell == ftell
132
+ else:
133
+ raise RuntimeError("Unknown file mode '%s'" % fmode)
134
+
135
+ # file exists, with different contents (smaller size)
136
+ # read
137
+
138
+ write_randomness()
139
+
140
+ f = open(fname, "r")
141
+ fstr = f.read()
142
+ f_dumped = dill.dumps(f, fmode=fmode)#, strictio=strictio)
143
+ f1mode = f.mode
144
+ ftell = f.tell()
145
+ f.close()
146
+ _flen = 150
147
+ _fstr = write_randomness(number=_flen)
148
+
149
+ if strictio: # throw error if ftell > EOF
150
+ assert throws(dill.loads, (f_dumped,), buffer_error)
151
+ else:
152
+ f2 = dill.loads(f_dumped)
153
+ assert f2.mode == f1mode
154
+ if fmode == dill.CONTENTS_FMODE:
155
+ assert f2.tell() == _flen
156
+ assert f2.read() == ""
157
+ f2.seek(0)
158
+ assert f2.read() == _fstr
159
+ assert f2.tell() == _flen # 150
160
+ elif fmode == dill.HANDLE_FMODE:
161
+ assert f2.tell() == 0
162
+ assert f2.read() == _fstr
163
+ assert f2.tell() == _flen # 150
164
+ elif fmode == dill.FILE_FMODE:
165
+ assert f2.tell() == ftell # 200
166
+ assert f2.read() == ""
167
+ f2.seek(0)
168
+ assert f2.read() == fstr
169
+ assert f2.tell() == ftell # 200
170
+ else:
171
+ raise RuntimeError("Unknown file mode '%s'" % fmode)
172
+ f2.close()
173
+
174
+ # write
175
+
176
+ write_randomness()
177
+
178
+ f = open(fname, "w")
179
+ f.write("hello")
180
+ f_dumped = dill.dumps(f, fmode=fmode)#, strictio=strictio)
181
+ f1mode = f.mode
182
+ ftell = f.tell()
183
+ f.close()
184
+ fstr = open(fname).read()
185
+
186
+ f = open(fname, "w")
187
+ f.write("h")
188
+ _ftell = f.tell()
189
+ f.close()
190
+
191
+ if strictio: # throw error if ftell > EOF
192
+ assert throws(dill.loads, (f_dumped,), buffer_error)
193
+ else:
194
+ f2 = dill.loads(f_dumped)
195
+ f2mode = f2.mode
196
+ f2tell = f2.tell()
197
+ f2.write(" world!")
198
+ f2.close()
199
+ if fmode == dill.CONTENTS_FMODE:
200
+ assert open(fname).read() == "h world!"
201
+ assert f2mode == f1mode
202
+ assert f2tell == _ftell
203
+ elif fmode == dill.HANDLE_FMODE:
204
+ assert open(fname).read() == " world!"
205
+ assert f2mode == f1mode
206
+ assert f2tell == 0
207
+ elif fmode == dill.FILE_FMODE:
208
+ assert open(fname).read() == "hello world!"
209
+ assert f2mode == f1mode
210
+ assert f2tell == ftell
211
+ else:
212
+ raise RuntimeError("Unknown file mode '%s'" % fmode)
213
+ f2.close()
214
+
215
+ # append
216
+
217
+ trunc_file()
218
+
219
+ f = open(fname, "a")
220
+ f.write("hello")
221
+ f_dumped = dill.dumps(f, fmode=fmode)#, strictio=strictio)
222
+ f1mode = f.mode
223
+ ftell = f.tell()
224
+ f.close()
225
+ fstr = open(fname).read()
226
+
227
+ f = open(fname, "w")
228
+ f.write("h")
229
+ _ftell = f.tell()
230
+ f.close()
231
+
232
+ if strictio: # throw error if ftell > EOF
233
+ assert throws(dill.loads, (f_dumped,), buffer_error)
234
+ else:
235
+ f2 = dill.loads(f_dumped)
236
+ f2mode = f2.mode
237
+ f2tell = f2.tell()
238
+ f2.write(" world!")
239
+ f2.close()
240
+ assert f2mode == f1mode
241
+ if fmode == dill.CONTENTS_FMODE:
242
+ # position of writes cannot be changed on some OSs
243
+ assert open(fname).read() == "h world!"
244
+ assert f2tell == _ftell
245
+ elif fmode == dill.HANDLE_FMODE:
246
+ assert open(fname).read() == "h world!"
247
+ assert f2tell == _ftell
248
+ elif fmode == dill.FILE_FMODE:
249
+ assert open(fname).read() == "hello world!"
250
+ assert f2tell == ftell
251
+ else:
252
+ raise RuntimeError("Unknown file mode '%s'" % fmode)
253
+ f2.close()
254
+
255
+ # file does not exist
256
+ # read
257
+
258
+ write_randomness()
259
+
260
+ f = open(fname, "r")
261
+ fstr = f.read()
262
+ f_dumped = dill.dumps(f, fmode=fmode)#, strictio=strictio)
263
+ f1mode = f.mode
264
+ ftell = f.tell()
265
+ f.close()
266
+
267
+ os.remove(fname)
268
+
269
+ if strictio: # throw error if file DNE
270
+ assert throws(dill.loads, (f_dumped,), dne_error)
271
+ else:
272
+ f2 = dill.loads(f_dumped)
273
+ assert f2.mode == f1mode
274
+ if fmode == dill.CONTENTS_FMODE:
275
+ # FIXME: this fails on systems where f2.tell() always returns 0
276
+ # assert f2.tell() == ftell # 200
277
+ assert f2.read() == ""
278
+ f2.seek(0)
279
+ assert f2.read() == ""
280
+ assert f2.tell() == 0
281
+ elif fmode == dill.FILE_FMODE:
282
+ assert f2.tell() == ftell # 200
283
+ assert f2.read() == ""
284
+ f2.seek(0)
285
+ assert f2.read() == fstr
286
+ assert f2.tell() == ftell # 200
287
+ elif fmode == dill.HANDLE_FMODE:
288
+ assert f2.tell() == 0
289
+ assert f2.read() == ""
290
+ assert f2.tell() == 0
291
+ else:
292
+ raise RuntimeError("Unknown file mode '%s'" % fmode)
293
+ f2.close()
294
+
295
+ # write
296
+
297
+ write_randomness()
298
+
299
+ f = open(fname, "w+")
300
+ f.write("hello")
301
+ f_dumped = dill.dumps(f, fmode=fmode)#, strictio=strictio)
302
+ ftell = f.tell()
303
+ f1mode = f.mode
304
+ f.close()
305
+
306
+ os.remove(fname)
307
+
308
+ if strictio: # throw error if file DNE
309
+ assert throws(dill.loads, (f_dumped,), dne_error)
310
+ else:
311
+ f2 = dill.loads(f_dumped)
312
+ f2mode = f2.mode
313
+ f2tell = f2.tell()
314
+ f2.write(" world!")
315
+ f2.close()
316
+ if fmode == dill.CONTENTS_FMODE:
317
+ assert open(fname).read() == " world!"
318
+ assert f2mode == 'w+'
319
+ assert f2tell == 0
320
+ elif fmode == dill.HANDLE_FMODE:
321
+ assert open(fname).read() == " world!"
322
+ assert f2mode == f1mode
323
+ assert f2tell == 0
324
+ elif fmode == dill.FILE_FMODE:
325
+ assert open(fname).read() == "hello world!"
326
+ assert f2mode == f1mode
327
+ assert f2tell == ftell
328
+ else:
329
+ raise RuntimeError("Unknown file mode '%s'" % fmode)
330
+
331
+ # append
332
+
333
+ trunc_file()
334
+
335
+ f = open(fname, "a")
336
+ f.write("hello")
337
+ f_dumped = dill.dumps(f, fmode=fmode)#, strictio=strictio)
338
+ ftell = f.tell()
339
+ f1mode = f.mode
340
+ f.close()
341
+
342
+ os.remove(fname)
343
+
344
+ if strictio: # throw error if file DNE
345
+ assert throws(dill.loads, (f_dumped,), dne_error)
346
+ else:
347
+ f2 = dill.loads(f_dumped)
348
+ f2mode = f2.mode
349
+ f2tell = f2.tell()
350
+ f2.write(" world!")
351
+ f2.close()
352
+ assert f2mode == f1mode
353
+ if fmode == dill.CONTENTS_FMODE:
354
+ assert open(fname).read() == " world!"
355
+ assert f2tell == 0
356
+ elif fmode == dill.HANDLE_FMODE:
357
+ assert open(fname).read() == " world!"
358
+ assert f2tell == 0
359
+ elif fmode == dill.FILE_FMODE:
360
+ assert open(fname).read() == "hello world!"
361
+ assert f2tell == ftell
362
+ else:
363
+ raise RuntimeError("Unknown file mode '%s'" % fmode)
364
+
365
+ # file exists, with different contents (larger size)
366
+ # read
367
+
368
+ write_randomness()
369
+
370
+ f = open(fname, "r")
371
+ fstr = f.read()
372
+ f_dumped = dill.dumps(f, fmode=fmode)#, strictio=strictio)
373
+ f1mode = f.mode
374
+ ftell = f.tell()
375
+ f.close()
376
+ _flen = 250
377
+ _fstr = write_randomness(number=_flen)
378
+
379
+ # XXX: no safe_file: no way to be 'safe'?
380
+
381
+ f2 = dill.loads(f_dumped)
382
+ assert f2.mode == f1mode
383
+ if fmode == dill.CONTENTS_FMODE:
384
+ assert f2.tell() == ftell # 200
385
+ assert f2.read() == _fstr[ftell:]
386
+ f2.seek(0)
387
+ assert f2.read() == _fstr
388
+ assert f2.tell() == _flen # 250
389
+ elif fmode == dill.HANDLE_FMODE:
390
+ assert f2.tell() == 0
391
+ assert f2.read() == _fstr
392
+ assert f2.tell() == _flen # 250
393
+ elif fmode == dill.FILE_FMODE:
394
+ assert f2.tell() == ftell # 200
395
+ assert f2.read() == ""
396
+ f2.seek(0)
397
+ assert f2.read() == fstr
398
+ assert f2.tell() == ftell # 200
399
+ else:
400
+ raise RuntimeError("Unknown file mode '%s'" % fmode)
401
+ f2.close() # XXX: other alternatives?
402
+
403
+ # write
404
+
405
+ f = open(fname, "w")
406
+ f.write("hello")
407
+ f_dumped = dill.dumps(f, fmode=fmode)#, strictio=strictio)
408
+ f1mode = f.mode
409
+ ftell = f.tell()
410
+
411
+ fstr = open(fname).read()
412
+
413
+ f.write(" and goodbye!")
414
+ _ftell = f.tell()
415
+ f.close()
416
+
417
+ # XXX: no safe_file: no way to be 'safe'?
418
+
419
+ f2 = dill.loads(f_dumped)
420
+ f2mode = f2.mode
421
+ f2tell = f2.tell()
422
+ f2.write(" world!")
423
+ f2.close()
424
+ if fmode == dill.CONTENTS_FMODE:
425
+ assert open(fname).read() == "hello world!odbye!"
426
+ assert f2mode == f1mode
427
+ assert f2tell == ftell
428
+ elif fmode == dill.HANDLE_FMODE:
429
+ assert open(fname).read() == " world!"
430
+ assert f2mode == f1mode
431
+ assert f2tell == 0
432
+ elif fmode == dill.FILE_FMODE:
433
+ assert open(fname).read() == "hello world!"
434
+ assert f2mode == f1mode
435
+ assert f2tell == ftell
436
+ else:
437
+ raise RuntimeError("Unknown file mode '%s'" % fmode)
438
+ f2.close()
439
+
440
+ # append
441
+
442
+ trunc_file()
443
+
444
+ f = open(fname, "a")
445
+ f.write("hello")
446
+ f_dumped = dill.dumps(f, fmode=fmode)#, strictio=strictio)
447
+ f1mode = f.mode
448
+ ftell = f.tell()
449
+ fstr = open(fname).read()
450
+
451
+ f.write(" and goodbye!")
452
+ _ftell = f.tell()
453
+ f.close()
454
+
455
+ # XXX: no safe_file: no way to be 'safe'?
456
+
457
+ f2 = dill.loads(f_dumped)
458
+ f2mode = f2.mode
459
+ f2tell = f2.tell()
460
+ f2.write(" world!")
461
+ f2.close()
462
+ assert f2mode == f1mode
463
+ if fmode == dill.CONTENTS_FMODE:
464
+ assert open(fname).read() == "hello and goodbye! world!"
465
+ assert f2tell == ftell
466
+ elif fmode == dill.HANDLE_FMODE:
467
+ assert open(fname).read() == "hello and goodbye! world!"
468
+ assert f2tell == _ftell
469
+ elif fmode == dill.FILE_FMODE:
470
+ assert open(fname).read() == "hello world!"
471
+ assert f2tell == ftell
472
+ else:
473
+ raise RuntimeError("Unknown file mode '%s'" % fmode)
474
+ f2.close()
475
+
476
+
477
+ def test_nostrictio_handlefmode():
478
+ bench(False, dill.HANDLE_FMODE, False)
479
+ teardown_module()
480
+
481
+
482
+ def test_nostrictio_filefmode():
483
+ bench(False, dill.FILE_FMODE, False)
484
+ teardown_module()
485
+
486
+
487
+ def test_nostrictio_contentsfmode():
488
+ bench(False, dill.CONTENTS_FMODE, True)
489
+ teardown_module()
490
+
491
+
492
+ #bench(True, dill.HANDLE_FMODE, False)
493
+ #bench(True, dill.FILE_FMODE, False)
494
+ #bench(True, dill.CONTENTS_FMODE, True)
495
+
496
+
497
+ if __name__ == '__main__':
498
+ test_nostrictio_handlefmode()
499
+ test_nostrictio_filefmode()
500
+ test_nostrictio_contentsfmode()
.venv/lib/python3.11/site-packages/dill/tests/test_functors.py ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ #
3
+ # Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
4
+ # Copyright (c) 2008-2016 California Institute of Technology.
5
+ # Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
6
+ # License: 3-clause BSD. The full license text is available at:
7
+ # - https://github.com/uqfoundation/dill/blob/master/LICENSE
8
+
9
+ import functools
10
+ import dill
11
+ dill.settings['recurse'] = True
12
+
13
+
14
+ def f(a, b, c): # without keywords
15
+ pass
16
+
17
+
18
+ def g(a, b, c=2): # with keywords
19
+ pass
20
+
21
+
22
+ def h(a=1, b=2, c=3): # without args
23
+ pass
24
+
25
+
26
+ def test_functools():
27
+ fp = functools.partial(f, 1, 2)
28
+ gp = functools.partial(g, 1, c=2)
29
+ hp = functools.partial(h, 1, c=2)
30
+ bp = functools.partial(int, base=2)
31
+
32
+ assert dill.pickles(fp, safe=True)
33
+ assert dill.pickles(gp, safe=True)
34
+ assert dill.pickles(hp, safe=True)
35
+ assert dill.pickles(bp, safe=True)
36
+
37
+
38
+ if __name__ == '__main__':
39
+ test_functools()
.venv/lib/python3.11/site-packages/dill/tests/test_logger.py ADDED
@@ -0,0 +1,70 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+
3
+ # Author: Leonardo Gama (@leogama)
4
+ # Copyright (c) 2022-2024 The Uncertainty Quantification Foundation.
5
+ # License: 3-clause BSD. The full license text is available at:
6
+ # - https://github.com/uqfoundation/dill/blob/master/LICENSE
7
+
8
+ import logging
9
+ import re
10
+ import tempfile
11
+
12
+ import dill
13
+ from dill import detect
14
+ from dill.logger import stderr_handler, adapter as logger
15
+
16
+ try:
17
+ from StringIO import StringIO
18
+ except ImportError:
19
+ from io import StringIO
20
+
21
+ test_obj = {'a': (1, 2), 'b': object(), 'f': lambda x: x**2, 'big': list(range(10))}
22
+
23
+ def test_logging(should_trace):
24
+ buffer = StringIO()
25
+ handler = logging.StreamHandler(buffer)
26
+ logger.addHandler(handler)
27
+ try:
28
+ dill.dumps(test_obj)
29
+ if should_trace:
30
+ regex = re.compile(r'(\S*┬ \w.*[^)]' # begin pickling object
31
+ r'|│*└ # \w.* \[\d+ (\wi)?B])' # object written (with size)
32
+ )
33
+ for line in buffer.getvalue().splitlines():
34
+ assert regex.fullmatch(line)
35
+ return buffer.getvalue()
36
+ else:
37
+ assert buffer.getvalue() == ""
38
+ finally:
39
+ logger.removeHandler(handler)
40
+ buffer.close()
41
+
42
+ def test_trace_to_file(stream_trace):
43
+ file = tempfile.NamedTemporaryFile(mode='r')
44
+ with detect.trace(file.name, mode='w'):
45
+ dill.dumps(test_obj)
46
+ file_trace = file.read()
47
+ file.close()
48
+ # Apparently, objects can change location in memory...
49
+ reghex = re.compile(r'0x[0-9A-Za-z]+')
50
+ file_trace, stream_trace = reghex.sub('0x', file_trace), reghex.sub('0x', stream_trace)
51
+ # PyPy prints dictionary contents with repr(dict)...
52
+ regdict = re.compile(r'(dict\.__repr__ of ).*')
53
+ file_trace, stream_trace = regdict.sub(r'\1{}>', file_trace), regdict.sub(r'\1{}>', stream_trace)
54
+ assert file_trace == stream_trace
55
+
56
+ if __name__ == '__main__':
57
+ logger.removeHandler(stderr_handler)
58
+ test_logging(should_trace=False)
59
+ detect.trace(True)
60
+ test_logging(should_trace=True)
61
+ detect.trace(False)
62
+ test_logging(should_trace=False)
63
+
64
+ loglevel = logging.ERROR
65
+ logger.setLevel(loglevel)
66
+ with detect.trace():
67
+ stream_trace = test_logging(should_trace=True)
68
+ test_logging(should_trace=False)
69
+ assert logger.getEffectiveLevel() == loglevel
70
+ test_trace_to_file(stream_trace)
.venv/lib/python3.11/site-packages/dill/tests/test_mixins.py ADDED
@@ -0,0 +1,121 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ #
3
+ # Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
4
+ # Copyright (c) 2008-2016 California Institute of Technology.
5
+ # Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
6
+ # License: 3-clause BSD. The full license text is available at:
7
+ # - https://github.com/uqfoundation/dill/blob/master/LICENSE
8
+
9
+ import dill
10
+ dill.settings['recurse'] = True
11
+
12
+
13
+ def wtf(x,y,z):
14
+ def zzz():
15
+ return x
16
+ def yyy():
17
+ return y
18
+ def xxx():
19
+ return z
20
+ return zzz,yyy
21
+
22
+
23
+ def quad(a=1, b=1, c=0):
24
+ inverted = [False]
25
+ def invert():
26
+ inverted[0] = not inverted[0]
27
+ def dec(f):
28
+ def func(*args, **kwds):
29
+ x = f(*args, **kwds)
30
+ if inverted[0]: x = -x
31
+ return a*x**2 + b*x + c
32
+ func.__wrapped__ = f
33
+ func.invert = invert
34
+ func.inverted = inverted
35
+ return func
36
+ return dec
37
+
38
+
39
+ @quad(a=0,b=2)
40
+ def double_add(*args):
41
+ return sum(args)
42
+
43
+
44
+ fx = sum([1,2,3])
45
+
46
+
47
+ ### to make it interesting...
48
+ def quad_factory(a=1,b=1,c=0):
49
+ def dec(f):
50
+ def func(*args,**kwds):
51
+ fx = f(*args,**kwds)
52
+ return a*fx**2 + b*fx + c
53
+ return func
54
+ return dec
55
+
56
+
57
+ @quad_factory(a=0,b=4,c=0)
58
+ def quadish(x):
59
+ return x+1
60
+
61
+
62
+ quadratic = quad_factory()
63
+
64
+
65
+ def doubler(f):
66
+ def inner(*args, **kwds):
67
+ fx = f(*args, **kwds)
68
+ return 2*fx
69
+ return inner
70
+
71
+
72
+ @doubler
73
+ def quadruple(x):
74
+ return 2*x
75
+
76
+
77
+ def test_mixins():
78
+ # test mixins
79
+ assert double_add(1,2,3) == 2*fx
80
+ double_add.invert()
81
+ assert double_add(1,2,3) == -2*fx
82
+
83
+ _d = dill.copy(double_add)
84
+ assert _d(1,2,3) == -2*fx
85
+ #_d.invert() #FIXME: fails seemingly randomly
86
+ #assert _d(1,2,3) == 2*fx
87
+
88
+ assert _d.__wrapped__(1,2,3) == fx
89
+
90
+ # XXX: issue or feature? in python3.4, inverted is linked through copy
91
+ if not double_add.inverted[0]:
92
+ double_add.invert()
93
+
94
+ # test some stuff from source and pointers
95
+ ds = dill.source
96
+ dd = dill.detect
97
+ assert ds.getsource(dd.freevars(quadish)['f']) == '@quad_factory(a=0,b=4,c=0)\ndef quadish(x):\n return x+1\n'
98
+ assert ds.getsource(dd.freevars(quadruple)['f']) == '@doubler\ndef quadruple(x):\n return 2*x\n'
99
+ assert ds.importable(quadish, source=False) == 'from %s import quadish\n' % __name__
100
+ assert ds.importable(quadruple, source=False) == 'from %s import quadruple\n' % __name__
101
+ assert ds.importable(quadratic, source=False) == 'from %s import quadratic\n' % __name__
102
+ assert ds.importable(double_add, source=False) == 'from %s import double_add\n' % __name__
103
+ assert ds.importable(quadruple, source=True) == 'def doubler(f):\n def inner(*args, **kwds):\n fx = f(*args, **kwds)\n return 2*fx\n return inner\n\n@doubler\ndef quadruple(x):\n return 2*x\n'
104
+ #***** #FIXME: this needs work
105
+ result = ds.importable(quadish, source=True)
106
+ a,b,c,_,result = result.split('\n',4)
107
+ assert result == 'def quad_factory(a=1,b=1,c=0):\n def dec(f):\n def func(*args,**kwds):\n fx = f(*args,**kwds)\n return a*fx**2 + b*fx + c\n return func\n return dec\n\n@quad_factory(a=0,b=4,c=0)\ndef quadish(x):\n return x+1\n'
108
+ assert set([a,b,c]) == set(['a = 0', 'c = 0', 'b = 4'])
109
+ result = ds.importable(quadratic, source=True)
110
+ a,b,c,result = result.split('\n',3)
111
+ assert result == '\ndef dec(f):\n def func(*args,**kwds):\n fx = f(*args,**kwds)\n return a*fx**2 + b*fx + c\n return func\n'
112
+ assert set([a,b,c]) == set(['a = 1', 'c = 0', 'b = 1'])
113
+ result = ds.importable(double_add, source=True)
114
+ a,b,c,d,_,result = result.split('\n',5)
115
+ assert result == 'def quad(a=1, b=1, c=0):\n inverted = [False]\n def invert():\n inverted[0] = not inverted[0]\n def dec(f):\n def func(*args, **kwds):\n x = f(*args, **kwds)\n if inverted[0]: x = -x\n return a*x**2 + b*x + c\n func.__wrapped__ = f\n func.invert = invert\n func.inverted = inverted\n return func\n return dec\n\n@quad(a=0,b=2)\ndef double_add(*args):\n return sum(args)\n'
116
+ assert set([a,b,c,d]) == set(['a = 0', 'c = 0', 'b = 2', 'inverted = [True]'])
117
+ #*****
118
+
119
+
120
+ if __name__ == '__main__':
121
+ test_mixins()
.venv/lib/python3.11/site-packages/dill/tests/test_module.py ADDED
@@ -0,0 +1,84 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ #
3
+ # Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
4
+ # Copyright (c) 2008-2016 California Institute of Technology.
5
+ # Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
6
+ # License: 3-clause BSD. The full license text is available at:
7
+ # - https://github.com/uqfoundation/dill/blob/master/LICENSE
8
+
9
+ import sys
10
+ import dill
11
+ import test_mixins as module
12
+ from importlib import reload
13
+ dill.settings['recurse'] = True
14
+
15
+ cached = (module.__cached__ if hasattr(module, "__cached__")
16
+ else module.__file__.split(".", 1)[0] + ".pyc")
17
+
18
+ module.a = 1234
19
+
20
+ pik_mod = dill.dumps(module)
21
+
22
+ module.a = 0
23
+
24
+ # remove module
25
+ del sys.modules[module.__name__]
26
+ del module
27
+
28
+ module = dill.loads(pik_mod)
29
+ def test_attributes():
30
+ #assert hasattr(module, "a") and module.a == 1234 #FIXME: -m dill.tests
31
+ assert module.double_add(1, 2, 3) == 2 * module.fx
32
+
33
+ # Restart, and test use_diff
34
+
35
+ reload(module)
36
+
37
+ try:
38
+ dill.use_diff()
39
+
40
+ module.a = 1234
41
+
42
+ pik_mod = dill.dumps(module)
43
+
44
+ module.a = 0
45
+
46
+ # remove module
47
+ del sys.modules[module.__name__]
48
+ del module
49
+
50
+ module = dill.loads(pik_mod)
51
+ def test_diff_attributes():
52
+ assert hasattr(module, "a") and module.a == 1234
53
+ assert module.double_add(1, 2, 3) == 2 * module.fx
54
+
55
+ except AttributeError:
56
+ def test_diff_attributes():
57
+ pass
58
+
59
+ # clean up
60
+ import os
61
+ if os.path.exists(cached):
62
+ os.remove(cached)
63
+ pycache = os.path.join(os.path.dirname(module.__file__), "__pycache__")
64
+ if os.path.exists(pycache) and not os.listdir(pycache):
65
+ os.removedirs(pycache)
66
+
67
+
68
+ # test when module is None
69
+ import math
70
+
71
+ def get_lambda(str, **kwarg):
72
+ return eval(str, kwarg, None)
73
+
74
+ obj = get_lambda('lambda x: math.exp(x)', math=math)
75
+
76
+ def test_module_is_none():
77
+ assert obj.__module__ is None
78
+ assert dill.copy(obj)(3) == obj(3)
79
+
80
+
81
+ if __name__ == '__main__':
82
+ test_attributes()
83
+ test_diff_attributes()
84
+ test_module_is_none()
.venv/lib/python3.11/site-packages/dill/tests/test_moduledict.py ADDED
@@ -0,0 +1,54 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ #
3
+ # Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
4
+ # Copyright (c) 2008-2016 California Institute of Technology.
5
+ # Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
6
+ # License: 3-clause BSD. The full license text is available at:
7
+ # - https://github.com/uqfoundation/dill/blob/master/LICENSE
8
+
9
+ import dill
10
+ dill.settings['recurse'] = True
11
+
12
+ def f(func):
13
+ def w(*args):
14
+ return f(*args)
15
+ return w
16
+
17
+ @f
18
+ def f2(): pass
19
+
20
+ # check when __main__ and on import
21
+ def test_decorated():
22
+ assert dill.pickles(f2)
23
+
24
+
25
+ import doctest
26
+ import logging
27
+ logging.basicConfig(level=logging.DEBUG)
28
+
29
+ class SomeUnreferencedUnpicklableClass(object):
30
+ def __reduce__(self):
31
+ raise Exception
32
+
33
+ unpicklable = SomeUnreferencedUnpicklableClass()
34
+
35
+ # This works fine outside of Doctest:
36
+ def test_normal():
37
+ serialized = dill.dumps(lambda x: x)
38
+
39
+ # should not try to pickle unpicklable object in __globals__
40
+ def tests():
41
+ """
42
+ >>> serialized = dill.dumps(lambda x: x)
43
+ """
44
+ return
45
+
46
+ #print("\n\nRunning Doctest:")
47
+ def test_doctest():
48
+ doctest.testmod()
49
+
50
+
51
+ if __name__ == '__main__':
52
+ test_decorated()
53
+ test_normal()
54
+ test_doctest()
.venv/lib/python3.11/site-packages/dill/tests/test_nested.py ADDED
@@ -0,0 +1,135 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ #
3
+ # Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
4
+ # Copyright (c) 2008-2016 California Institute of Technology.
5
+ # Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
6
+ # License: 3-clause BSD. The full license text is available at:
7
+ # - https://github.com/uqfoundation/dill/blob/master/LICENSE
8
+ """
9
+ test dill's ability to handle nested functions
10
+ """
11
+
12
+ import os
13
+ import math
14
+
15
+ import dill as pickle
16
+ pickle.settings['recurse'] = True
17
+
18
+
19
+ # the nested function: pickle should fail here, but dill is ok.
20
+ def adder(augend):
21
+ zero = [0]
22
+
23
+ def inner(addend):
24
+ return addend + augend + zero[0]
25
+ return inner
26
+
27
+
28
+ # rewrite the nested function using a class: standard pickle should work here.
29
+ class cadder(object):
30
+ def __init__(self, augend):
31
+ self.augend = augend
32
+ self.zero = [0]
33
+
34
+ def __call__(self, addend):
35
+ return addend + self.augend + self.zero[0]
36
+
37
+
38
+ # rewrite again, but as an old-style class
39
+ class c2adder:
40
+ def __init__(self, augend):
41
+ self.augend = augend
42
+ self.zero = [0]
43
+
44
+ def __call__(self, addend):
45
+ return addend + self.augend + self.zero[0]
46
+
47
+
48
+ # some basic class stuff
49
+ class basic(object):
50
+ pass
51
+
52
+
53
+ class basic2:
54
+ pass
55
+
56
+
57
+ x = 5
58
+ y = 1
59
+
60
+
61
+ def test_basic():
62
+ a = [0, 1, 2]
63
+ pa = pickle.dumps(a)
64
+ pmath = pickle.dumps(math) #XXX: FAILS in pickle
65
+ pmap = pickle.dumps(map)
66
+ # ...
67
+ la = pickle.loads(pa)
68
+ lmath = pickle.loads(pmath)
69
+ lmap = pickle.loads(pmap)
70
+ assert list(map(math.sin, a)) == list(lmap(lmath.sin, la))
71
+
72
+
73
+ def test_basic_class():
74
+ pbasic2 = pickle.dumps(basic2)
75
+ _pbasic2 = pickle.loads(pbasic2)()
76
+ pbasic = pickle.dumps(basic)
77
+ _pbasic = pickle.loads(pbasic)()
78
+
79
+
80
+ def test_c2adder():
81
+ pc2adder = pickle.dumps(c2adder)
82
+ pc2add5 = pickle.loads(pc2adder)(x)
83
+ assert pc2add5(y) == x+y
84
+
85
+
86
+ def test_pickled_cadder():
87
+ pcadder = pickle.dumps(cadder)
88
+ pcadd5 = pickle.loads(pcadder)(x)
89
+ assert pcadd5(y) == x+y
90
+
91
+
92
+ def test_raw_adder_and_inner():
93
+ add5 = adder(x)
94
+ assert add5(y) == x+y
95
+
96
+
97
+ def test_pickled_adder():
98
+ padder = pickle.dumps(adder)
99
+ padd5 = pickle.loads(padder)(x)
100
+ assert padd5(y) == x+y
101
+
102
+
103
+ def test_pickled_inner():
104
+ add5 = adder(x)
105
+ pinner = pickle.dumps(add5) #XXX: FAILS in pickle
106
+ p5add = pickle.loads(pinner)
107
+ assert p5add(y) == x+y
108
+
109
+
110
+ def test_moduledict_where_not_main():
111
+ try:
112
+ from . import test_moduledict
113
+ except ImportError:
114
+ import test_moduledict
115
+ name = 'test_moduledict.py'
116
+ if os.path.exists(name) and os.path.exists(name+'c'):
117
+ os.remove(name+'c')
118
+
119
+ if os.path.exists(name) and hasattr(test_moduledict, "__cached__") \
120
+ and os.path.exists(test_moduledict.__cached__):
121
+ os.remove(getattr(test_moduledict, "__cached__"))
122
+
123
+ if os.path.exists("__pycache__") and not os.listdir("__pycache__"):
124
+ os.removedirs("__pycache__")
125
+
126
+
127
+ if __name__ == '__main__':
128
+ test_basic()
129
+ test_basic_class()
130
+ test_c2adder()
131
+ test_pickled_cadder()
132
+ test_raw_adder_and_inner()
133
+ test_pickled_adder()
134
+ test_pickled_inner()
135
+ test_moduledict_where_not_main()
.venv/lib/python3.11/site-packages/dill/tests/test_objects.py ADDED
@@ -0,0 +1,63 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ #
3
+ # Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
4
+ # Copyright (c) 2008-2016 California Institute of Technology.
5
+ # Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
6
+ # License: 3-clause BSD. The full license text is available at:
7
+ # - https://github.com/uqfoundation/dill/blob/master/LICENSE
8
+ """
9
+ demonstrate dill's ability to pickle different python types
10
+ test pickling of all Python Standard Library objects (currently: CH 1-14 @ 2.7)
11
+ """
12
+
13
+ import dill as pickle
14
+ pickle.settings['recurse'] = True
15
+ #pickle.detect.trace(True)
16
+ #import pickle
17
+
18
+ # get all objects for testing
19
+ from dill import load_types, objects, extend
20
+ load_types(pickleable=True,unpickleable=False)
21
+
22
+ # uncomment the next two lines to test cloudpickle
23
+ #extend(False)
24
+ #import cloudpickle as pickle
25
+
26
+ # helper objects
27
+ class _class:
28
+ def _method(self):
29
+ pass
30
+
31
+ # objects that *fail* if imported
32
+ special = {}
33
+ special['LambdaType'] = _lambda = lambda x: lambda y: x
34
+ special['MethodType'] = _method = _class()._method
35
+ special['UnboundMethodType'] = _class._method
36
+ objects.update(special)
37
+
38
+ def pickles(name, exact=False, verbose=True):
39
+ """quick check if object pickles with dill"""
40
+ obj = objects[name]
41
+ try:
42
+ pik = pickle.loads(pickle.dumps(obj))
43
+ if exact:
44
+ try:
45
+ assert pik == obj
46
+ except AssertionError:
47
+ assert type(obj) == type(pik)
48
+ if verbose: print ("weak: %s %s" % (name, type(obj)))
49
+ else:
50
+ assert type(obj) == type(pik)
51
+ except Exception:
52
+ if verbose: print ("fails: %s %s" % (name, type(obj)))
53
+
54
+
55
+ def test_objects(verbose=True):
56
+ for member in objects.keys():
57
+ #pickles(member, exact=True, verbose=verbose)
58
+ pickles(member, exact=False, verbose=verbose)
59
+
60
+ if __name__ == '__main__':
61
+ import warnings
62
+ warnings.simplefilter('ignore')
63
+ test_objects(verbose=False)
.venv/lib/python3.11/site-packages/dill/tests/test_properties.py ADDED
@@ -0,0 +1,62 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ #
3
+ # Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
4
+ # Copyright (c) 2008-2016 California Institute of Technology.
5
+ # Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
6
+ # License: 3-clause BSD. The full license text is available at:
7
+ # - https://github.com/uqfoundation/dill/blob/master/LICENSE
8
+
9
+ import sys
10
+
11
+ import dill
12
+ dill.settings['recurse'] = True
13
+
14
+
15
+ class Foo(object):
16
+ def __init__(self):
17
+ self._data = 1
18
+
19
+ def _get_data(self):
20
+ return self._data
21
+
22
+ def _set_data(self, x):
23
+ self._data = x
24
+
25
+ data = property(_get_data, _set_data)
26
+
27
+
28
+ def test_data_not_none():
29
+ FooS = dill.copy(Foo)
30
+ assert FooS.data.fget is not None
31
+ assert FooS.data.fset is not None
32
+ assert FooS.data.fdel is None
33
+
34
+
35
+ def test_data_unchanged():
36
+ FooS = dill.copy(Foo)
37
+ try:
38
+ res = FooS().data
39
+ except Exception:
40
+ e = sys.exc_info()[1]
41
+ raise AssertionError(str(e))
42
+ else:
43
+ assert res == 1
44
+
45
+
46
+ def test_data_changed():
47
+ FooS = dill.copy(Foo)
48
+ try:
49
+ f = FooS()
50
+ f.data = 1024
51
+ res = f.data
52
+ except Exception:
53
+ e = sys.exc_info()[1]
54
+ raise AssertionError(str(e))
55
+ else:
56
+ assert res == 1024
57
+
58
+
59
+ if __name__ == '__main__':
60
+ test_data_not_none()
61
+ test_data_unchanged()
62
+ test_data_changed()
.venv/lib/python3.11/site-packages/dill/tests/test_pycapsule.py ADDED
@@ -0,0 +1,45 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ #
3
+ # Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
4
+ # Author: Anirudh Vegesana (avegesan@cs.stanford.edu)
5
+ # Copyright (c) 2022-2024 The Uncertainty Quantification Foundation.
6
+ # License: 3-clause BSD. The full license text is available at:
7
+ # - https://github.com/uqfoundation/dill/blob/master/LICENSE
8
+ """
9
+ test pickling a PyCapsule object
10
+ """
11
+
12
+ import dill
13
+ import warnings
14
+
15
+ test_pycapsule = None
16
+
17
+ if dill._dill._testcapsule is not None:
18
+ import ctypes
19
+ def test_pycapsule():
20
+ name = ctypes.create_string_buffer(b'dill._testcapsule')
21
+ capsule = dill._dill._PyCapsule_New(
22
+ ctypes.cast(dill._dill._PyCapsule_New, ctypes.c_void_p),
23
+ name,
24
+ None
25
+ )
26
+ with warnings.catch_warnings():
27
+ warnings.simplefilter("ignore")
28
+ dill.copy(capsule)
29
+ dill._testcapsule = capsule
30
+ with warnings.catch_warnings():
31
+ warnings.simplefilter("ignore")
32
+ dill.copy(capsule)
33
+ dill._testcapsule = None
34
+ try:
35
+ with warnings.catch_warnings():
36
+ warnings.simplefilter("ignore", dill.PicklingWarning)
37
+ dill.copy(capsule)
38
+ except dill.UnpicklingError:
39
+ pass
40
+ else:
41
+ raise AssertionError("Expected a different error")
42
+
43
+ if __name__ == '__main__':
44
+ if test_pycapsule is not None:
45
+ test_pycapsule()
.venv/lib/python3.11/site-packages/dill/tests/test_registered.py ADDED
@@ -0,0 +1,64 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ #
3
+ # Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
4
+ # Copyright (c) 2022-2024 The Uncertainty Quantification Foundation.
5
+ # License: 3-clause BSD. The full license text is available at:
6
+ # - https://github.com/uqfoundation/dill/blob/master/LICENSE
7
+ """
8
+ test pickling registered objects
9
+ """
10
+
11
+ import dill
12
+ from dill._objects import failures, registered, succeeds
13
+ import warnings
14
+ warnings.filterwarnings('ignore')
15
+
16
+ def check(d, ok=True):
17
+ res = []
18
+ for k,v in d.items():
19
+ try:
20
+ z = dill.copy(v)
21
+ if ok: res.append(k)
22
+ except:
23
+ if not ok: res.append(k)
24
+ return res
25
+
26
+ fails = check(failures)
27
+ try:
28
+ assert not bool(fails)
29
+ except AssertionError as e:
30
+ print("FAILS: %s" % fails)
31
+ raise e from None
32
+
33
+ register = check(registered, ok=False)
34
+ try:
35
+ assert not bool(register)
36
+ except AssertionError as e:
37
+ print("REGISTER: %s" % register)
38
+ raise e from None
39
+
40
+ success = check(succeeds, ok=False)
41
+ try:
42
+ assert not bool(success)
43
+ except AssertionError as e:
44
+ print("SUCCESS: %s" % success)
45
+ raise e from None
46
+
47
+ import builtins
48
+ import types
49
+ q = dill._dill._reverse_typemap
50
+ p = {k:v for k,v in q.items() if k not in vars(builtins) and k not in vars(types)}
51
+
52
+ diff = set(p.keys()).difference(registered.keys())
53
+ try:
54
+ assert not bool(diff)
55
+ except AssertionError as e:
56
+ print("DIFF: %s" % diff)
57
+ raise e from None
58
+
59
+ miss = set(registered.keys()).difference(p.keys())
60
+ try:
61
+ assert not bool(miss)
62
+ except AssertionError as e:
63
+ print("MISS: %s" % miss)
64
+ raise e from None
.venv/lib/python3.11/site-packages/dill/tests/test_session.py ADDED
@@ -0,0 +1,280 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+
3
+ # Author: Leonardo Gama (@leogama)
4
+ # Copyright (c) 2022-2024 The Uncertainty Quantification Foundation.
5
+ # License: 3-clause BSD. The full license text is available at:
6
+ # - https://github.com/uqfoundation/dill/blob/master/LICENSE
7
+
8
+ import atexit
9
+ import os
10
+ import sys
11
+ import __main__
12
+ from contextlib import suppress
13
+ from io import BytesIO
14
+
15
+ import dill
16
+
17
+ session_file = os.path.join(os.path.dirname(__file__), 'session-refimported-%s.pkl')
18
+
19
+ ###################
20
+ # Child process #
21
+ ###################
22
+
23
+ def _error_line(error, obj, refimported):
24
+ import traceback
25
+ line = traceback.format_exc().splitlines()[-2].replace('[obj]', '['+repr(obj)+']')
26
+ return "while testing (with refimported=%s): %s" % (refimported, line.lstrip())
27
+
28
+ if __name__ == '__main__' and len(sys.argv) >= 3 and sys.argv[1] == '--child':
29
+ # Test session loading in a fresh interpreter session.
30
+ refimported = (sys.argv[2] == 'True')
31
+ dill.load_module(session_file % refimported, module='__main__')
32
+
33
+ def test_modules(refimported):
34
+ # FIXME: In this test setting with CPython 3.7, 'calendar' is not included
35
+ # in sys.modules, independent of the value of refimported. Tried to
36
+ # run garbage collection just before loading the session with no luck. It
37
+ # fails even when preceding them with 'import calendar'. Needed to run
38
+ # these kinds of tests in a supbrocess. Failing test sample:
39
+ # assert globals()['day_name'] is sys.modules['calendar'].__dict__['day_name']
40
+ try:
41
+ for obj in ('json', 'url', 'local_mod', 'sax', 'dom'):
42
+ assert globals()[obj].__name__ in sys.modules
43
+ assert 'calendar' in sys.modules and 'cmath' in sys.modules
44
+ import calendar, cmath
45
+
46
+ for obj in ('Calendar', 'isleap'):
47
+ assert globals()[obj] is sys.modules['calendar'].__dict__[obj]
48
+ assert __main__.day_name.__module__ == 'calendar'
49
+ if refimported:
50
+ assert __main__.day_name is calendar.day_name
51
+
52
+ assert __main__.complex_log is cmath.log
53
+
54
+ except AssertionError as error:
55
+ error.args = (_error_line(error, obj, refimported),)
56
+ raise
57
+
58
+ test_modules(refimported)
59
+ sys.exit()
60
+
61
+ ####################
62
+ # Parent process #
63
+ ####################
64
+
65
+ # Create various kinds of objects to test different internal logics.
66
+
67
+ ## Modules.
68
+ import json # top-level module
69
+ import urllib as url # top-level module under alias
70
+ from xml import sax # submodule
71
+ import xml.dom.minidom as dom # submodule under alias
72
+ import test_dictviews as local_mod # non-builtin top-level module
73
+
74
+ ## Imported objects.
75
+ from calendar import Calendar, isleap, day_name # class, function, other object
76
+ from cmath import log as complex_log # imported with alias
77
+
78
+ ## Local objects.
79
+ x = 17
80
+ empty = None
81
+ names = ['Alice', 'Bob', 'Carol']
82
+ def squared(x): return x**2
83
+ cubed = lambda x: x**3
84
+ class Person:
85
+ def __init__(self, name, age):
86
+ self.name = name
87
+ self.age = age
88
+ person = Person(names[0], x)
89
+ class CalendarSubclass(Calendar):
90
+ def weekdays(self):
91
+ return [day_name[i] for i in self.iterweekdays()]
92
+ cal = CalendarSubclass()
93
+ selfref = __main__
94
+
95
+ # Setup global namespace for session saving tests.
96
+ class TestNamespace:
97
+ test_globals = globals().copy()
98
+ def __init__(self, **extra):
99
+ self.extra = extra
100
+ def __enter__(self):
101
+ self.backup = globals().copy()
102
+ globals().clear()
103
+ globals().update(self.test_globals)
104
+ globals().update(self.extra)
105
+ return self
106
+ def __exit__(self, *exc_info):
107
+ globals().clear()
108
+ globals().update(self.backup)
109
+
110
+ def _clean_up_cache(module):
111
+ cached = module.__file__.split('.', 1)[0] + '.pyc'
112
+ cached = module.__cached__ if hasattr(module, '__cached__') else cached
113
+ pycache = os.path.join(os.path.dirname(module.__file__), '__pycache__')
114
+ for remove, file in [(os.remove, cached), (os.removedirs, pycache)]:
115
+ with suppress(OSError):
116
+ remove(file)
117
+
118
+ atexit.register(_clean_up_cache, local_mod)
119
+
120
+ def _test_objects(main, globals_copy, refimported):
121
+ try:
122
+ main_dict = __main__.__dict__
123
+ global Person, person, Calendar, CalendarSubclass, cal, selfref
124
+
125
+ for obj in ('json', 'url', 'local_mod', 'sax', 'dom'):
126
+ assert globals()[obj].__name__ == globals_copy[obj].__name__
127
+
128
+ for obj in ('x', 'empty', 'names'):
129
+ assert main_dict[obj] == globals_copy[obj]
130
+
131
+ for obj in ['squared', 'cubed']:
132
+ assert main_dict[obj].__globals__ is main_dict
133
+ assert main_dict[obj](3) == globals_copy[obj](3)
134
+
135
+ assert Person.__module__ == __main__.__name__
136
+ assert isinstance(person, Person)
137
+ assert person.age == globals_copy['person'].age
138
+
139
+ assert issubclass(CalendarSubclass, Calendar)
140
+ assert isinstance(cal, CalendarSubclass)
141
+ assert cal.weekdays() == globals_copy['cal'].weekdays()
142
+
143
+ assert selfref is __main__
144
+
145
+ except AssertionError as error:
146
+ error.args = (_error_line(error, obj, refimported),)
147
+ raise
148
+
149
+ def test_session_main(refimported):
150
+ """test dump/load_module() for __main__, both in this process and in a subprocess"""
151
+ extra_objects = {}
152
+ if refimported:
153
+ # Test unpickleable imported object in main.
154
+ from sys import flags
155
+ extra_objects['flags'] = flags
156
+
157
+ with TestNamespace(**extra_objects) as ns:
158
+ try:
159
+ # Test session loading in a new session.
160
+ dill.dump_module(session_file % refimported, refimported=refimported)
161
+ from dill.tests.__main__ import python, shell, sp
162
+ error = sp.call([python, __file__, '--child', str(refimported)], shell=shell)
163
+ if error: sys.exit(error)
164
+ finally:
165
+ with suppress(OSError):
166
+ os.remove(session_file % refimported)
167
+
168
+ # Test session loading in the same session.
169
+ session_buffer = BytesIO()
170
+ dill.dump_module(session_buffer, refimported=refimported)
171
+ session_buffer.seek(0)
172
+ dill.load_module(session_buffer, module='__main__')
173
+ ns.backup['_test_objects'](__main__, ns.backup, refimported)
174
+
175
+ def test_session_other():
176
+ """test dump/load_module() for a module other than __main__"""
177
+ import test_classdef as module
178
+ atexit.register(_clean_up_cache, module)
179
+ module.selfref = module
180
+ dict_objects = [obj for obj in module.__dict__.keys() if not obj.startswith('__')]
181
+
182
+ session_buffer = BytesIO()
183
+ dill.dump_module(session_buffer, module)
184
+
185
+ for obj in dict_objects:
186
+ del module.__dict__[obj]
187
+
188
+ session_buffer.seek(0)
189
+ dill.load_module(session_buffer, module)
190
+
191
+ assert all(obj in module.__dict__ for obj in dict_objects)
192
+ assert module.selfref is module
193
+
194
+ def test_runtime_module():
195
+ from types import ModuleType
196
+ modname = '__runtime__'
197
+ runtime = ModuleType(modname)
198
+ runtime.x = 42
199
+
200
+ mod = dill.session._stash_modules(runtime)
201
+ if mod is not runtime:
202
+ print("There are objects to save by referenece that shouldn't be:",
203
+ mod.__dill_imported, mod.__dill_imported_as, mod.__dill_imported_top_level,
204
+ file=sys.stderr)
205
+
206
+ # This is also for code coverage, tests the use case of dump_module(refimported=True)
207
+ # without imported objects in the namespace. It's a contrived example because
208
+ # even dill can't be in it. This should work after fixing #462.
209
+ session_buffer = BytesIO()
210
+ dill.dump_module(session_buffer, module=runtime, refimported=True)
211
+ session_dump = session_buffer.getvalue()
212
+
213
+ # Pass a new runtime created module with the same name.
214
+ runtime = ModuleType(modname) # empty
215
+ return_val = dill.load_module(BytesIO(session_dump), module=runtime)
216
+ assert return_val is None
217
+ assert runtime.__name__ == modname
218
+ assert runtime.x == 42
219
+ assert runtime not in sys.modules.values()
220
+
221
+ # Pass nothing as main. load_module() must create it.
222
+ session_buffer.seek(0)
223
+ runtime = dill.load_module(BytesIO(session_dump))
224
+ assert runtime.__name__ == modname
225
+ assert runtime.x == 42
226
+ assert runtime not in sys.modules.values()
227
+
228
+ def test_refimported_imported_as():
229
+ import collections
230
+ import concurrent.futures
231
+ import types
232
+ import typing
233
+ mod = sys.modules['__test__'] = types.ModuleType('__test__')
234
+ dill.executor = concurrent.futures.ThreadPoolExecutor(max_workers=1)
235
+ mod.Dict = collections.UserDict # select by type
236
+ mod.AsyncCM = typing.AsyncContextManager # select by __module__
237
+ mod.thread_exec = dill.executor # select by __module__ with regex
238
+
239
+ session_buffer = BytesIO()
240
+ dill.dump_module(session_buffer, mod, refimported=True)
241
+ session_buffer.seek(0)
242
+ mod = dill.load(session_buffer)
243
+ del sys.modules['__test__']
244
+
245
+ assert set(mod.__dill_imported_as) == {
246
+ ('collections', 'UserDict', 'Dict'),
247
+ ('typing', 'AsyncContextManager', 'AsyncCM'),
248
+ ('dill', 'executor', 'thread_exec'),
249
+ }
250
+
251
+ def test_load_module_asdict():
252
+ with TestNamespace():
253
+ session_buffer = BytesIO()
254
+ dill.dump_module(session_buffer)
255
+
256
+ global empty, names, x, y
257
+ x = y = 0 # change x and create y
258
+ del empty
259
+ globals_state = globals().copy()
260
+
261
+ session_buffer.seek(0)
262
+ main_vars = dill.load_module_asdict(session_buffer)
263
+
264
+ assert main_vars is not globals()
265
+ assert globals() == globals_state
266
+
267
+ assert main_vars['__name__'] == '__main__'
268
+ assert main_vars['names'] == names
269
+ assert main_vars['names'] is not names
270
+ assert main_vars['x'] != x
271
+ assert 'y' not in main_vars
272
+ assert 'empty' in main_vars
273
+
274
+ if __name__ == '__main__':
275
+ test_session_main(refimported=False)
276
+ test_session_main(refimported=True)
277
+ test_session_other()
278
+ test_runtime_module()
279
+ test_refimported_imported_as()
280
+ test_load_module_asdict()
.venv/lib/python3.11/site-packages/dill/tests/test_source.py ADDED
@@ -0,0 +1,173 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ #
3
+ # Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
4
+ # Copyright (c) 2008-2016 California Institute of Technology.
5
+ # Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
6
+ # License: 3-clause BSD. The full license text is available at:
7
+ # - https://github.com/uqfoundation/dill/blob/master/LICENSE
8
+
9
+ from dill.source import getsource, getname, _wrap, getimport
10
+ from dill.source import importable
11
+ from dill._dill import IS_PYPY
12
+
13
+ import sys
14
+ PY310b = 0x30a00b1
15
+
16
+ f = lambda x: x**2
17
+ def g(x): return f(x) - x
18
+
19
+ def h(x):
20
+ def g(x): return x
21
+ return g(x) - x
22
+
23
+ class Foo(object):
24
+ def bar(self, x):
25
+ return x*x+x
26
+ _foo = Foo()
27
+
28
+ def add(x,y):
29
+ return x+y
30
+
31
+ # yes, same as 'f', but things are tricky when it comes to pointers
32
+ squared = lambda x:x**2
33
+
34
+ class Bar:
35
+ pass
36
+ _bar = Bar()
37
+
38
+ # inspect.getsourcelines # dill.source.getblocks
39
+ def test_getsource():
40
+ assert getsource(f) == 'f = lambda x: x**2\n'
41
+ assert getsource(g) == 'def g(x): return f(x) - x\n'
42
+ assert getsource(h) == 'def h(x):\n def g(x): return x\n return g(x) - x\n'
43
+ assert getname(f) == 'f'
44
+ assert getname(g) == 'g'
45
+ assert getname(h) == 'h'
46
+ assert _wrap(f)(4) == 16
47
+ assert _wrap(g)(4) == 12
48
+ assert _wrap(h)(4) == 0
49
+
50
+ assert getname(Foo) == 'Foo'
51
+ assert getname(Bar) == 'Bar'
52
+ assert getsource(Bar) == 'class Bar:\n pass\n'
53
+ assert getsource(Foo) == 'class Foo(object):\n def bar(self, x):\n return x*x+x\n'
54
+ #XXX: add getsource for _foo, _bar
55
+
56
+ # test itself
57
+ def test_itself():
58
+ assert getimport(getimport)=='from dill.source import getimport\n'
59
+
60
+ # builtin functions and objects
61
+ def test_builtin():
62
+ assert getimport(pow) == 'pow\n'
63
+ assert getimport(100) == '100\n'
64
+ assert getimport(True) == 'True\n'
65
+ assert getimport(pow, builtin=True) == 'from builtins import pow\n'
66
+ assert getimport(100, builtin=True) == '100\n'
67
+ assert getimport(True, builtin=True) == 'True\n'
68
+ # this is kinda BS... you can't import a None
69
+ assert getimport(None) == 'None\n'
70
+ assert getimport(None, builtin=True) == 'None\n'
71
+
72
+
73
+ # other imported functions
74
+ def test_imported():
75
+ from math import sin
76
+ assert getimport(sin) == 'from math import sin\n'
77
+
78
+ # interactively defined functions
79
+ def test_dynamic():
80
+ assert getimport(add) == 'from %s import add\n' % __name__
81
+ # interactive lambdas
82
+ assert getimport(squared) == 'from %s import squared\n' % __name__
83
+
84
+ # classes and class instances
85
+ def test_classes():
86
+ from io import BytesIO as StringIO
87
+ y = "from _io import BytesIO\n"
88
+ x = y if (IS_PYPY or sys.hexversion >= PY310b) else "from io import BytesIO\n"
89
+ s = StringIO()
90
+
91
+ assert getimport(StringIO) == x
92
+ assert getimport(s) == y
93
+ # interactively defined classes and class instances
94
+ assert getimport(Foo) == 'from %s import Foo\n' % __name__
95
+ assert getimport(_foo) == 'from %s import Foo\n' % __name__
96
+
97
+
98
+ # test importable
99
+ def test_importable():
100
+ assert importable(add, source=False) == 'from %s import add\n' % __name__
101
+ assert importable(squared, source=False) == 'from %s import squared\n' % __name__
102
+ assert importable(Foo, source=False) == 'from %s import Foo\n' % __name__
103
+ assert importable(Foo.bar, source=False) == 'from %s import bar\n' % __name__
104
+ assert importable(_foo.bar, source=False) == 'from %s import bar\n' % __name__
105
+ assert importable(None, source=False) == 'None\n'
106
+ assert importable(100, source=False) == '100\n'
107
+
108
+ assert importable(add, source=True) == 'def add(x,y):\n return x+y\n'
109
+ assert importable(squared, source=True) == 'squared = lambda x:x**2\n'
110
+ assert importable(None, source=True) == 'None\n'
111
+ assert importable(Bar, source=True) == 'class Bar:\n pass\n'
112
+ assert importable(Foo, source=True) == 'class Foo(object):\n def bar(self, x):\n return x*x+x\n'
113
+ assert importable(Foo.bar, source=True) == 'def bar(self, x):\n return x*x+x\n'
114
+ assert importable(Foo.bar, source=False) == 'from %s import bar\n' % __name__
115
+ assert importable(Foo.bar, alias='memo', source=False) == 'from %s import bar as memo\n' % __name__
116
+ assert importable(Foo, alias='memo', source=False) == 'from %s import Foo as memo\n' % __name__
117
+ assert importable(squared, alias='memo', source=False) == 'from %s import squared as memo\n' % __name__
118
+ assert importable(squared, alias='memo', source=True) == 'memo = squared = lambda x:x**2\n'
119
+ assert importable(add, alias='memo', source=True) == 'def add(x,y):\n return x+y\n\nmemo = add\n'
120
+ assert importable(None, alias='memo', source=True) == 'memo = None\n'
121
+ assert importable(100, alias='memo', source=True) == 'memo = 100\n'
122
+ assert importable(add, builtin=True, source=False) == 'from %s import add\n' % __name__
123
+ assert importable(squared, builtin=True, source=False) == 'from %s import squared\n' % __name__
124
+ assert importable(Foo, builtin=True, source=False) == 'from %s import Foo\n' % __name__
125
+ assert importable(Foo.bar, builtin=True, source=False) == 'from %s import bar\n' % __name__
126
+ assert importable(_foo.bar, builtin=True, source=False) == 'from %s import bar\n' % __name__
127
+ assert importable(None, builtin=True, source=False) == 'None\n'
128
+ assert importable(100, builtin=True, source=False) == '100\n'
129
+
130
+
131
+ def test_numpy():
132
+ try:
133
+ import numpy as np
134
+ y = np.array
135
+ x = y([1,2,3])
136
+ assert importable(x, source=False) == 'from numpy import array\narray([1, 2, 3])\n'
137
+ assert importable(y, source=False) == 'from %s import array\n' % y.__module__
138
+ assert importable(x, source=True) == 'from numpy import array\narray([1, 2, 3])\n'
139
+ assert importable(y, source=True) == 'from %s import array\n' % y.__module__
140
+ y = np.int64
141
+ x = y(0)
142
+ assert importable(x, source=False) == 'from numpy import int64\nint64(0)\n'
143
+ assert importable(y, source=False) == 'from %s import int64\n' % y.__module__
144
+ assert importable(x, source=True) == 'from numpy import int64\nint64(0)\n'
145
+ assert importable(y, source=True) == 'from %s import int64\n' % y.__module__
146
+ y = np.bool_
147
+ x = y(0)
148
+ import warnings
149
+ with warnings.catch_warnings():
150
+ warnings.filterwarnings('ignore', category=FutureWarning)
151
+ warnings.filterwarnings('ignore', category=DeprecationWarning)
152
+ if hasattr(np, 'bool'): b = 'bool_' if np.bool is bool else 'bool'
153
+ else: b = 'bool_'
154
+ assert importable(x, source=False) == 'from numpy import %s\n%s(False)\n' % (b,b)
155
+ assert importable(y, source=False) == 'from %s import %s\n' % (y.__module__,b)
156
+ assert importable(x, source=True) == 'from numpy import %s\n%s(False)\n' % (b,b)
157
+ assert importable(y, source=True) == 'from %s import %s\n' % (y.__module__,b)
158
+ except ImportError: pass
159
+
160
+ #NOTE: if before getimport(pow), will cause pow to throw AssertionError
161
+ def test_foo():
162
+ assert importable(_foo, source=True).startswith("import dill\nclass Foo(object):\n def bar(self, x):\n return x*x+x\ndill.loads(")
163
+
164
+ if __name__ == '__main__':
165
+ test_getsource()
166
+ test_itself()
167
+ test_builtin()
168
+ test_imported()
169
+ test_dynamic()
170
+ test_classes()
171
+ test_importable()
172
+ test_numpy()
173
+ test_foo()
.venv/lib/python3.11/site-packages/dill/tests/test_sources.py ADDED
@@ -0,0 +1,190 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ #
3
+ # Author: Mike McKerns (mmckerns @uqfoundation)
4
+ # Copyright (c) 2024 The Uncertainty Quantification Foundation.
5
+ # License: 3-clause BSD. The full license text is available at:
6
+ # - https://github.com/uqfoundation/dill/blob/master/LICENSE
7
+ """
8
+ check that dill.source performs as expected with changes to locals in 3.13.0b1
9
+ see: https://github.com/python/cpython/issues/118888
10
+ """
11
+ # repeat functions from test_source.py
12
+ f = lambda x: x**2
13
+ def g(x): return f(x) - x
14
+
15
+ def h(x):
16
+ def g(x): return x
17
+ return g(x) - x
18
+
19
+ class Foo(object):
20
+ def bar(self, x):
21
+ return x*x+x
22
+ _foo = Foo()
23
+
24
+ def add(x,y):
25
+ return x+y
26
+
27
+ squared = lambda x:x**2
28
+
29
+ class Bar:
30
+ pass
31
+ _bar = Bar()
32
+
33
+ # repeat, but from test_source.py
34
+ import test_source as ts
35
+
36
+ # test objects created in other test modules
37
+ import test_mixins as tm
38
+
39
+ import dill.source as ds
40
+
41
+
42
+ def test_isfrommain():
43
+ assert ds.isfrommain(add) == True
44
+ assert ds.isfrommain(squared) == True
45
+ assert ds.isfrommain(Bar) == True
46
+ assert ds.isfrommain(_bar) == True
47
+ assert ds.isfrommain(ts.add) == False
48
+ assert ds.isfrommain(ts.squared) == False
49
+ assert ds.isfrommain(ts.Bar) == False
50
+ assert ds.isfrommain(ts._bar) == False
51
+ assert ds.isfrommain(tm.quad) == False
52
+ assert ds.isfrommain(tm.double_add) == False
53
+ assert ds.isfrommain(tm.quadratic) == False
54
+ assert ds.isdynamic(add) == False
55
+ assert ds.isdynamic(squared) == False
56
+ assert ds.isdynamic(ts.add) == False
57
+ assert ds.isdynamic(ts.squared) == False
58
+ assert ds.isdynamic(tm.double_add) == False
59
+ assert ds.isdynamic(tm.quadratic) == False
60
+
61
+
62
+ def test_matchlambda():
63
+ assert ds._matchlambda(f, 'f = lambda x: x**2\n')
64
+ assert ds._matchlambda(squared, 'squared = lambda x:x**2\n')
65
+ assert ds._matchlambda(ts.f, 'f = lambda x: x**2\n')
66
+ assert ds._matchlambda(ts.squared, 'squared = lambda x:x**2\n')
67
+
68
+
69
+ def test_findsource():
70
+ lines, lineno = ds.findsource(add)
71
+ assert lines[lineno] == 'def add(x,y):\n'
72
+ lines, lineno = ds.findsource(ts.add)
73
+ assert lines[lineno] == 'def add(x,y):\n'
74
+ lines, lineno = ds.findsource(squared)
75
+ assert lines[lineno] == 'squared = lambda x:x**2\n'
76
+ lines, lineno = ds.findsource(ts.squared)
77
+ assert lines[lineno] == 'squared = lambda x:x**2\n'
78
+ lines, lineno = ds.findsource(Bar)
79
+ assert lines[lineno] == 'class Bar:\n'
80
+ lines, lineno = ds.findsource(ts.Bar)
81
+ assert lines[lineno] == 'class Bar:\n'
82
+ lines, lineno = ds.findsource(_bar)
83
+ assert lines[lineno] == 'class Bar:\n'
84
+ lines, lineno = ds.findsource(ts._bar)
85
+ assert lines[lineno] == 'class Bar:\n'
86
+ lines, lineno = ds.findsource(tm.quad)
87
+ assert lines[lineno] == 'def quad(a=1, b=1, c=0):\n'
88
+ lines, lineno = ds.findsource(tm.double_add)
89
+ assert lines[lineno] == ' def func(*args, **kwds):\n'
90
+ lines, lineno = ds.findsource(tm.quadratic)
91
+ assert lines[lineno] == ' def dec(f):\n'
92
+
93
+
94
+ def test_getsourcelines():
95
+ assert ''.join(ds.getsourcelines(add)[0]) == 'def add(x,y):\n return x+y\n'
96
+ assert ''.join(ds.getsourcelines(ts.add)[0]) == 'def add(x,y):\n return x+y\n'
97
+ assert ''.join(ds.getsourcelines(squared)[0]) == 'squared = lambda x:x**2\n'
98
+ assert ''.join(ds.getsourcelines(ts.squared)[0]) == 'squared = lambda x:x**2\n'
99
+ assert ''.join(ds.getsourcelines(Bar)[0]) == 'class Bar:\n pass\n'
100
+ assert ''.join(ds.getsourcelines(ts.Bar)[0]) == 'class Bar:\n pass\n'
101
+ assert ''.join(ds.getsourcelines(_bar)[0]) == 'class Bar:\n pass\n' #XXX: ?
102
+ assert ''.join(ds.getsourcelines(ts._bar)[0]) == 'class Bar:\n pass\n' #XXX: ?
103
+ assert ''.join(ds.getsourcelines(tm.quad)[0]) == 'def quad(a=1, b=1, c=0):\n inverted = [False]\n def invert():\n inverted[0] = not inverted[0]\n def dec(f):\n def func(*args, **kwds):\n x = f(*args, **kwds)\n if inverted[0]: x = -x\n return a*x**2 + b*x + c\n func.__wrapped__ = f\n func.invert = invert\n func.inverted = inverted\n return func\n return dec\n'
104
+ assert ''.join(ds.getsourcelines(tm.quadratic)[0]) == ' def dec(f):\n def func(*args,**kwds):\n fx = f(*args,**kwds)\n return a*fx**2 + b*fx + c\n return func\n'
105
+ assert ''.join(ds.getsourcelines(tm.quadratic, lstrip=True)[0]) == 'def dec(f):\n def func(*args,**kwds):\n fx = f(*args,**kwds)\n return a*fx**2 + b*fx + c\n return func\n'
106
+ assert ''.join(ds.getsourcelines(tm.quadratic, enclosing=True)[0]) == 'def quad_factory(a=1,b=1,c=0):\n def dec(f):\n def func(*args,**kwds):\n fx = f(*args,**kwds)\n return a*fx**2 + b*fx + c\n return func\n return dec\n'
107
+ assert ''.join(ds.getsourcelines(tm.double_add)[0]) == ' def func(*args, **kwds):\n x = f(*args, **kwds)\n if inverted[0]: x = -x\n return a*x**2 + b*x + c\n'
108
+ assert ''.join(ds.getsourcelines(tm.double_add, enclosing=True)[0]) == 'def quad(a=1, b=1, c=0):\n inverted = [False]\n def invert():\n inverted[0] = not inverted[0]\n def dec(f):\n def func(*args, **kwds):\n x = f(*args, **kwds)\n if inverted[0]: x = -x\n return a*x**2 + b*x + c\n func.__wrapped__ = f\n func.invert = invert\n func.inverted = inverted\n return func\n return dec\n'
109
+
110
+
111
+ def test_indent():
112
+ assert ds.outdent(''.join(ds.getsourcelines(tm.quadratic)[0])) == ''.join(ds.getsourcelines(tm.quadratic, lstrip=True)[0])
113
+ assert ds.indent(''.join(ds.getsourcelines(tm.quadratic, lstrip=True)[0]), 2) == ''.join(ds.getsourcelines(tm.quadratic)[0])
114
+
115
+
116
+ def test_dumpsource():
117
+ local = {}
118
+ exec(ds.dumpsource(add, alias='raw'), {}, local)
119
+ exec(ds.dumpsource(ts.add, alias='mod'), {}, local)
120
+ assert local['raw'](1,2) == local['mod'](1,2)
121
+ exec(ds.dumpsource(squared, alias='raw'), {}, local)
122
+ exec(ds.dumpsource(ts.squared, alias='mod'), {}, local)
123
+ assert local['raw'](3) == local['mod'](3)
124
+ assert ds._wrap(add)(1,2) == ds._wrap(ts.add)(1,2)
125
+ assert ds._wrap(squared)(3) == ds._wrap(ts.squared)(3)
126
+
127
+
128
+ def test_name():
129
+ assert ds._namespace(add) == ds.getname(add, fqn=True).split('.')
130
+ assert ds._namespace(ts.add) == ds.getname(ts.add, fqn=True).split('.')
131
+ assert ds._namespace(squared) == ds.getname(squared, fqn=True).split('.')
132
+ assert ds._namespace(ts.squared) == ds.getname(ts.squared, fqn=True).split('.')
133
+ assert ds._namespace(Bar) == ds.getname(Bar, fqn=True).split('.')
134
+ assert ds._namespace(ts.Bar) == ds.getname(ts.Bar, fqn=True).split('.')
135
+ assert ds._namespace(tm.quad) == ds.getname(tm.quad, fqn=True).split('.')
136
+ #XXX: the following also works, however behavior may be wrong for nested functions
137
+ #assert ds._namespace(tm.double_add) == ds.getname(tm.double_add, fqn=True).split('.')
138
+ #assert ds._namespace(tm.quadratic) == ds.getname(tm.quadratic, fqn=True).split('.')
139
+ assert ds.getname(add) == 'add'
140
+ assert ds.getname(ts.add) == 'add'
141
+ assert ds.getname(squared) == 'squared'
142
+ assert ds.getname(ts.squared) == 'squared'
143
+ assert ds.getname(Bar) == 'Bar'
144
+ assert ds.getname(ts.Bar) == 'Bar'
145
+ assert ds.getname(tm.quad) == 'quad'
146
+ assert ds.getname(tm.double_add) == 'func' #XXX: ?
147
+ assert ds.getname(tm.quadratic) == 'dec' #XXX: ?
148
+
149
+
150
+ def test_getimport():
151
+ local = {}
152
+ exec(ds.getimport(add, alias='raw'), {}, local)
153
+ exec(ds.getimport(ts.add, alias='mod'), {}, local)
154
+ assert local['raw'](1,2) == local['mod'](1,2)
155
+ exec(ds.getimport(squared, alias='raw'), {}, local)
156
+ exec(ds.getimport(ts.squared, alias='mod'), {}, local)
157
+ assert local['raw'](3) == local['mod'](3)
158
+ exec(ds.getimport(Bar, alias='raw'), {}, local)
159
+ exec(ds.getimport(ts.Bar, alias='mod'), {}, local)
160
+ assert ds.getname(local['raw']) == ds.getname(local['mod'])
161
+ exec(ds.getimport(tm.quad, alias='mod'), {}, local)
162
+ assert local['mod']()(sum)([1,2,3]) == tm.quad()(sum)([1,2,3])
163
+ #FIXME: wrong results for nested functions (e.g. tm.double_add, tm.quadratic)
164
+
165
+
166
+ def test_importable():
167
+ assert ds.importable(add, source=False) == ds.getimport(add)
168
+ assert ds.importable(add) == ds.getsource(add)
169
+ assert ds.importable(squared, source=False) == ds.getimport(squared)
170
+ assert ds.importable(squared) == ds.getsource(squared)
171
+ assert ds.importable(Bar, source=False) == ds.getimport(Bar)
172
+ assert ds.importable(Bar) == ds.getsource(Bar)
173
+ assert ds.importable(ts.add) == ds.getimport(ts.add)
174
+ assert ds.importable(ts.add, source=True) == ds.getsource(ts.add)
175
+ assert ds.importable(ts.squared) == ds.getimport(ts.squared)
176
+ assert ds.importable(ts.squared, source=True) == ds.getsource(ts.squared)
177
+ assert ds.importable(ts.Bar) == ds.getimport(ts.Bar)
178
+ assert ds.importable(ts.Bar, source=True) == ds.getsource(ts.Bar)
179
+
180
+
181
+ if __name__ == '__main__':
182
+ test_isfrommain()
183
+ test_matchlambda()
184
+ test_findsource()
185
+ test_getsourcelines()
186
+ test_indent()
187
+ test_dumpsource()
188
+ test_name()
189
+ test_getimport()
190
+ test_importable()
.venv/lib/python3.11/site-packages/dill/tests/test_temp.py ADDED
@@ -0,0 +1,103 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ #
3
+ # Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
4
+ # Copyright (c) 2008-2016 California Institute of Technology.
5
+ # Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
6
+ # License: 3-clause BSD. The full license text is available at:
7
+ # - https://github.com/uqfoundation/dill/blob/master/LICENSE
8
+
9
+ import sys
10
+ from dill.temp import dump, dump_source, dumpIO, dumpIO_source
11
+ from dill.temp import load, load_source, loadIO, loadIO_source
12
+ WINDOWS = sys.platform[:3] == 'win'
13
+
14
+
15
+ f = lambda x: x**2
16
+ x = [1,2,3,4,5]
17
+
18
+ # source code to tempfile
19
+ def test_code_to_tempfile():
20
+ if not WINDOWS: #see: https://bugs.python.org/issue14243
21
+ pyfile = dump_source(f, alias='_f')
22
+ _f = load_source(pyfile)
23
+ assert _f(4) == f(4)
24
+
25
+ # source code to stream
26
+ def test_code_to_stream():
27
+ pyfile = dumpIO_source(f, alias='_f')
28
+ _f = loadIO_source(pyfile)
29
+ assert _f(4) == f(4)
30
+
31
+ # pickle to tempfile
32
+ def test_pickle_to_tempfile():
33
+ if not WINDOWS: #see: https://bugs.python.org/issue14243
34
+ dumpfile = dump(x)
35
+ _x = load(dumpfile)
36
+ assert _x == x
37
+
38
+ # pickle to stream
39
+ def test_pickle_to_stream():
40
+ dumpfile = dumpIO(x)
41
+ _x = loadIO(dumpfile)
42
+ assert _x == x
43
+
44
+ ### now testing the objects ###
45
+ f = lambda x: x**2
46
+ def g(x): return f(x) - x
47
+
48
+ def h(x):
49
+ def g(x): return x
50
+ return g(x) - x
51
+
52
+ class Foo(object):
53
+ def bar(self, x):
54
+ return x*x+x
55
+ _foo = Foo()
56
+
57
+ def add(x,y):
58
+ return x+y
59
+
60
+ # yes, same as 'f', but things are tricky when it comes to pointers
61
+ squared = lambda x:x**2
62
+
63
+ class Bar:
64
+ pass
65
+ _bar = Bar()
66
+
67
+
68
+ # test function-type objects that take 2 args
69
+ def test_two_arg_functions():
70
+ for obj in [add]:
71
+ pyfile = dumpIO_source(obj, alias='_obj')
72
+ _obj = loadIO_source(pyfile)
73
+ assert _obj(4,2) == obj(4,2)
74
+
75
+ # test function-type objects that take 1 arg
76
+ def test_one_arg_functions():
77
+ for obj in [g, h, squared]:
78
+ pyfile = dumpIO_source(obj, alias='_obj')
79
+ _obj = loadIO_source(pyfile)
80
+ assert _obj(4) == obj(4)
81
+
82
+ # test instance-type objects
83
+ #for obj in [_bar, _foo]:
84
+ # pyfile = dumpIO_source(obj, alias='_obj')
85
+ # _obj = loadIO_source(pyfile)
86
+ # assert type(_obj) == type(obj)
87
+
88
+ # test the rest of the objects
89
+ def test_the_rest():
90
+ for obj in [Bar, Foo, Foo.bar, _foo.bar]:
91
+ pyfile = dumpIO_source(obj, alias='_obj')
92
+ _obj = loadIO_source(pyfile)
93
+ assert _obj.__name__ == obj.__name__
94
+
95
+
96
+ if __name__ == '__main__':
97
+ test_code_to_tempfile()
98
+ test_code_to_stream()
99
+ test_pickle_to_tempfile()
100
+ test_pickle_to_stream()
101
+ test_two_arg_functions()
102
+ test_one_arg_functions()
103
+ test_the_rest()
.venv/lib/python3.11/site-packages/dill/tests/test_weakref.py ADDED
@@ -0,0 +1,72 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ #
3
+ # Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
4
+ # Copyright (c) 2008-2016 California Institute of Technology.
5
+ # Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
6
+ # License: 3-clause BSD. The full license text is available at:
7
+ # - https://github.com/uqfoundation/dill/blob/master/LICENSE
8
+
9
+ import dill
10
+ dill.settings['recurse'] = True
11
+ import weakref
12
+
13
+ class _class:
14
+ def _method(self):
15
+ pass
16
+
17
+ class _callable_class:
18
+ def __call__(self):
19
+ pass
20
+
21
+ def _function():
22
+ pass
23
+
24
+
25
+ def test_weakref():
26
+ o = _class()
27
+ oc = _callable_class()
28
+ f = _function
29
+ x = _class
30
+
31
+ # ReferenceType
32
+ r = weakref.ref(o)
33
+ d_r = weakref.ref(_class())
34
+ fr = weakref.ref(f)
35
+ xr = weakref.ref(x)
36
+
37
+ # ProxyType
38
+ p = weakref.proxy(o)
39
+ d_p = weakref.proxy(_class())
40
+
41
+ # CallableProxyType
42
+ cp = weakref.proxy(oc)
43
+ d_cp = weakref.proxy(_callable_class())
44
+ fp = weakref.proxy(f)
45
+ xp = weakref.proxy(x)
46
+
47
+ objlist = [r,d_r,fr,xr, p,d_p, cp,d_cp,fp,xp]
48
+ #dill.detect.trace(True)
49
+
50
+ for obj in objlist:
51
+ res = dill.detect.errors(obj)
52
+ if res:
53
+ print ("%r:\n %s" % (obj, res))
54
+ # else:
55
+ # print ("PASS: %s" % obj)
56
+ assert not res
57
+
58
+ def test_dictproxy():
59
+ from dill._dill import DictProxyType
60
+ try:
61
+ m = DictProxyType({"foo": "bar"})
62
+ except Exception:
63
+ m = type.__dict__
64
+ mp = dill.copy(m)
65
+ assert mp.items() == m.items()
66
+
67
+
68
+ if __name__ == '__main__':
69
+ test_weakref()
70
+ from dill._dill import IS_PYPY
71
+ if not IS_PYPY:
72
+ test_dictproxy()