forked from panda3d/panda3d
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathpickle.py
More file actions
160 lines (129 loc) · 5.26 KB
/
pickle.py
File metadata and controls
160 lines (129 loc) · 5.26 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
""" This module extends standard Python's pickle module so that it is
capable of writing more efficient pickle files that contain Panda
objects with shared pointers. In particular, a single Python
structure that contains many NodePaths into the same scene graph will
write the NodePaths correctly when used with this pickle module, so
that when it is unpickled later, the NodePaths will still reference
into the same scene graph together.
If you use the standard pickle module instead, the NodePaths will each
duplicate its own copy of its scene graph.
This is necessary because the standard pickle module doesn't provide a
mechanism for sharing context between different objects written to the
same pickle stream, so each NodePath has to write itself without
knowing about the other NodePaths that will also be writing to the
same stream. This replacement module solves this problem by defining
a ``__reduce_persist__()`` replacement method for ``__reduce__()``,
which accepts a pointer to the Pickler object itself, allowing for
shared context between all objects written by that Pickler.
Unfortunately, cPickle cannot be supported, because it does not
support extensions of this nature. """
import sys
from panda3d.core import BamWriter, BamReader
if sys.version_info >= (3, 0):
from copyreg import dispatch_table
else:
from copy_reg import dispatch_table
# A funny replacement for "import pickle" so we don't get confused
# with the local pickle.py.
pickle = __import__('pickle')
class Pickler(pickle.Pickler):
def __init__(self, *args, **kw):
self.bamWriter = BamWriter()
pickle.Pickler.__init__(self, *args, **kw)
# We have to duplicate most of the save() method, so we can add
# support for __reduce_persist__().
def save(self, obj):
# Check for persistent id (defined by a subclass)
pid = self.persistent_id(obj)
if pid:
self.save_pers(pid)
return
# Check the memo
x = self.memo.get(id(obj))
if x:
self.write(self.get(x[0]))
return
# Check the type dispatch table
t = type(obj)
f = self.dispatch.get(t)
if f:
f(self, obj) # Call unbound method with explicit self
return
# Check for a class with a custom metaclass; treat as regular class
try:
issc = issubclass(t, type)
except TypeError: # t is not a class (old Boost; see SF #502085)
issc = 0
if issc:
self.save_global(obj)
return
# Check copy_reg.dispatch_table
reduce = dispatch_table.get(t)
if reduce:
rv = reduce(obj)
else:
# New code: check for a __reduce_persist__ method, then
# fall back to standard methods.
reduce = getattr(obj, "__reduce_persist__", None)
if reduce:
rv = reduce(self)
else:
# Check for a __reduce_ex__ method, fall back to __reduce__
reduce = getattr(obj, "__reduce_ex__", None)
if reduce:
rv = reduce(self.proto)
else:
reduce = getattr(obj, "__reduce__", None)
if reduce:
rv = reduce()
else:
raise PicklingError("Can't pickle %r object: %r" %
(t.__name__, obj))
# Check for string returned by reduce(), meaning "save as global"
if type(rv) is str:
self.save_global(obj, rv)
return
# Assert that reduce() returned a tuple
if type(rv) is not tuple:
raise PicklingError("%s must return string or tuple" % reduce)
# Assert that it returned an appropriately sized tuple
l = len(rv)
if not (2 <= l <= 5):
raise PicklingError("Tuple returned by %s must have "
"two to five elements" % reduce)
# Save the reduce() output and finally memoize the object
self.save_reduce(obj=obj, *rv)
class Unpickler(pickle.Unpickler):
def __init__(self, *args, **kw):
self.bamReader = BamReader()
pickle.Unpickler.__init__(self, *args, **kw)
# Duplicate the load_reduce() function, to provide a special case
# for the reduction function.
def load_reduce(self):
stack = self.stack
args = stack.pop()
func = stack[-1]
# If the function name ends with "Persist", then assume the
# function wants the Unpickler as the first parameter.
if func.__name__.endswith('Persist'):
value = func(self, *args)
else:
# Otherwise, use the existing pickle convention.
value = func(*args)
stack[-1] = value
#FIXME: how to replace in Python 3?
if sys.version_info < (3, 0):
pickle.Unpickler.dispatch[pickle.REDUCE] = load_reduce
# Shorthands
from io import BytesIO
def dump(obj, file, protocol=None):
Pickler(file, protocol).dump(obj)
def dumps(obj, protocol=None):
file = BytesIO()
Pickler(file, protocol).dump(obj)
return file.getvalue()
def load(file):
return Unpickler(file).load()
def loads(str):
file = BytesIO(str)
return Unpickler(file).load()