[Zope3-checkins] CVS: Zope3/src/zodbcode - __init__.py:1.1
class_.py:1.1 function.py:1.1 interfaces.py:1.1 module.py:1.1
module.txt:1.1 patch.py:1.1
Fred L. Drake, Jr.
fred at zope.com
Fri Feb 20 17:02:24 EST 2004
Update of /cvs-repository/Zope3/src/zodbcode
In directory cvs.zope.org:/tmp/cvs-serv14949/src/zodbcode
Added Files:
__init__.py class_.py function.py interfaces.py module.py
module.txt patch.py
Log Message:
convert the zodb.code package to be zodbcode
=== Added File Zope3/src/zodbcode/__init__.py ===
#
# This file is necessary to make this directory a package.
=== Added File Zope3/src/zodbcode/class_.py ===
##############################################################################
#
# Copyright (c) 2002 Zope Corporation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.0 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Persistent Classes."""
__metaclass__ = type
from zope.interface import implements
from persistent.cPersistence import UPTODATE, CHANGED, GHOST
from persistence.interfaces import IPersistent
from zodbcode.function import PersistentFunction
import time
# XXX There is a lot of magic here to give classes and instances
# separate sets of attributes. This code should be documented, as it
# it quite delicate, and it should be move to a separate module.
class SimpleDescriptor(object):
missing = object()
def __init__(self, value):
self._value = value
def __get__(self, obj, cls):
if self._value is self.missing:
raise AttributeError
return self._value
def __set__(self, obj, value):
self._value = value
def __delete__(self, obj):
if self._value is self.missing:
raise AttributeError
del self._value
class ExtClassDescr:
"""Maintains seperate class and instance descriptors for an attribute.
This allows a class to provide methods and attributes without
intefering with normal use of instances. The class and its
instances can each have methods with the same name.
This does interfere with introspection on the class.
"""
def __init__(self, name, instdescr):
self.name = name
self.instdescr = instdescr
def __get__(self, obj, cls):
if obj is None:
return self.clsget(cls)
else:
return self.instdescr.__get__(obj, cls)
def __set__(self, obj, val):
if obj is None:
self.clsset(val)
else:
if self.instdescr is None:
raise AttributeError, self.name
return self.instdescr.__set__(obj, val)
def __delete__(self, obj):
if self.instdescr is None:
raise AttributeError, self.name
return self.instdescr.__delete__(obj)
# subclass should override
def clsget(self, cls):
pass
def clsset(self, val):
pass
def clsdelete(self):
pass
class MethodMixin:
def __init__(self, name, descr, func):
if not hasattr(descr, "__get__"):
# If the object defined in the metaclass is not a descriptor,
# create one for it.
descr = SimpleDescriptor(descr)
super(MethodMixin, self).__init__(name, descr)
self.func = func
def clsget(self, cls):
def f(*args, **kwargs):
try:
return self.func(cls, *args, **kwargs)
except TypeError:
print `self.func`, `cls`, `args`, `kwargs`
raise
return f
class DataMixin:
def __init__(self, name, descr, val):
if not hasattr(descr, "__get__"):
# If the object defined in the metaclass is not a descriptor,
# create one for it.
descr = SimpleDescriptor(descr)
super(DataMixin, self).__init__(name, descr)
self.val = val
def clsget(self, cls):
return self.val
def clsset(self, val):
self.val = val
def clsdelete(self):
del self.val
class ExtClassMethodDescr(MethodMixin, ExtClassDescr):
pass
class ExtClassDataDescr(DataMixin, ExtClassDescr):
pass
class ExtClassHookDataDescr(ExtClassDataDescr):
# Calls a hook when clsset() is called.
def __init__(self, name, descr, val, hook):
super(ExtClassHookDataDescr, self).__init__(name, descr, val)
self.hook = hook
def clsset(self, val):
self.val = val
self.hook()
# The next three classes conspire to make a PersistentFunction
# behave like a method when found in a class's __dict__.
class PersistentMethod:
"""Make PersistentFunctions into methods."""
def __init__(self, klass, inst, func):
self.im_class = klass
self.im_self = inst
self.im_func = func
def __repr__(self):
if self.im_self is None:
fmt = "<persistent unbound method %s.%s>"
else:
fmt = "<persistent bound method %%s.%%s of %s>" % (self.im_self,)
return fmt % (self.im_class.__name__, self.im_func.__name__)
def __call__(self, *args, **kwargs):
if self.im_self is None:
if not isinstance(args[0], self.im_class):
raise TypeError("unbound method %s() must be called "
"with %s instance as first argument ("
"got %s instead)" % (self.im_func.__name__,
self.im_class.__name__,
type(args[0]).__name__))
else:
return self.im_func(self.im_self, *args, **kwargs)
class PersistentDescriptor:
def __init__(self, objclass, func):
self.__name__ = func.__name__
self.__doc__ = func.__doc__
self.__objclass__ = objclass
self._func = func
# Delegate __getstate__ and __setstate__ to the persistent func.
# The patch module will use these methods to update persistent
# methods in place.
self.__getstate__ = func.__getstate__
self.__setstate__ = func.__setstate__
def __repr__(self):
return "<persistent descriptor %s.%s>" % (self.__objclass__.__name__,
self.__name__)
def __get__(self, object, klass=None):
if object is None:
return PersistentMethod(klass or self.__objclass__, None,
self._func)
else:
return PersistentMethod(klass or self.__objclass__, object,
self._func)
_missing = object()
def findattr(cls, attr, default):
"""Walk the mro of cls to find attr."""
for c in cls.__mro__:
o = c.__dict__.get(attr, _missing)
if o is not _missing:
return o
return default
class StateChangeDataDescr(ExtClassDataDescr):
# A data descriptor for _p_changed.
pass
class PersistentClassMetaClass(type):
# An attempt to make persistent classes look just like other
# persistent objects by providing class attributes and methods
# that behave like the persistence machinery.
# The chief limitation of this approach is that class.attr won't
# always behave the way it does for normal classes
# A persistent class can never be a ghost, because there are too
# many places where Python will attempt to inspect the class
# without using getattr(). As a result, it would be impossible to
# guarantee that the class would be unghostified at the right
# time. It's really difficult to guarantee this property without
# help from the connection, because a ghost can't be unghosted
# until after the connection sets its _p_jar.
# The hack solution is to have a hook for _p_jar that activates
# the object the first time it is set.
#implements(IPersistent)
__implements__ = IPersistent
# A class is normally created in the UPTODATE state, but when a
# new ghost is created for it the serialization machinery passes
# GHOST instead of UPTODATE. See __getnewargs__().
def __new__(meta, name, bases, dict, state=UPTODATE):
if "__dict__" in dict:
del dict["__dict__"]
cls = super(PersistentClassMetaClass, meta).__new__(
meta, name, bases, dict)
cls._pc_init = False
# helper functions
def extend_attr(attr, v):
prev = findattr(cls, attr, SimpleDescriptor.missing)
setattr(cls, attr, ExtClassDataDescr(attr, prev, v))
def extend_meth(attr, m):
prev = findattr(cls, attr, SimpleDescriptor.missing)
setattr(cls, attr, ExtClassMethodDescr(attr, prev, m))
extend_attr("_p_oid", None)
extend_attr("_p_atime", time.time() % 86400)
extend_attr("_p_state", state)
extend_attr("_p_changed", None)
extend_meth("_p_activate", meta._p_activate)
extend_meth("_p_deactivate", meta._p_deactivate)
# XXX _p_invalidate
# Create a descriptor that calls _p_activate() when _p_jar is set.
inst_jar_descr = findattr(cls, "_p_jar", None)
setattr(cls, "_p_jar",
ExtClassHookDataDescr("_p_jar", inst_jar_descr, None,
getattr(cls, "_p_activate")))
for k, v in dict.items():
if isinstance(v, PersistentFunction):
setattr(cls, k, PersistentDescriptor(cls, v))
# A class could define any of these attributes, thus we
# need to create extended descriptors so that the class
# and its instances have separate versions.
extend_meth("__getstate__", meta.__getstate__)
extend_meth("__setstate__", meta.__setstate__)
# Don't need this with interface geddon
# extend_attr("__implements__", meta.__implements__)
cls._pc_init = True
return cls
def __getattribute__(cls, name):
# XXX I'm not sure I understand this code any more.
super_meth = super(PersistentClassMetaClass, cls).__getattribute__
# If we are initializing the class, don't trying to check variables
# like _p_state, since they may not be initialized.
if not super_meth("_pc_init"):
return super_meth(name)
if (name[0] != "_" or
not (name.startswith("_p_") or name.startswith("_pc_") or
name == "__dict__")):
if cls._p_state == GHOST:
cls._p_activate()
cls._p_atime = int(time.time() % 86400)
return super_meth(name)
# XXX There needs to be an _p_changed flag so that classes get
# registered with the txn when they are modified.
def __setattr__(cls, attr, val):
if not attr.startswith("_pc_") and cls._pc_init:
descr = cls.__dict__.get(attr)
if descr is not None:
set = getattr(descr, "__set__", None)
if set is not None:
set(None, val)
## cls._p_changed = True
return
super(PersistentClassMetaClass, cls).__setattr__(attr, val)
def __delattr__(cls, attr):
if attr.startswith('_p_'):
# XXX what should happen with these?
return
super(PersistentClassMetaClass, cls).__delattr__(attr)
def __repr__(cls):
return "<persistent class %s.%s>" % (cls.__module__,
cls.__name__)
# It should be possible for getstate / setstate to deal with
# arbitrary class attributes. That goal is hard to achieve,
# because there are several funny descriptors that need to
# be handled specially.
def __getstate__(cls):
dict = {}
for k in cls.__dict__.keys():
v = getattr(cls, k)
if isinstance(v, PersistentMethod):
dict[k] = v.im_func
continue
if (k in ["__module__", "__weakref__", "__dict__"]
or k.startswith("_p_") or k.startswith("_pc_")):
continue
# XXX The following test isn't right because overriding
# must be allowed, but I haven't figured that out yet.
# __getstate__ and __setstate__ might be overridden
# __implements__ might be overridden
if k in ["__getstate__", "__setstate__", "__implements__"]:
continue
dict[k] = v
return dict
def __setstate__(cls, dict):
for k, v in dict.items():
if isinstance(v, PersistentFunction):
setattr(cls, k, PersistentDescriptor(cls, v))
else:
setattr(cls, k, v)
# XXX Should the object get marked as a ghost when it is, in fact,
# not a ghost? The most obvious answer is no. But if we don't
# then we need some other attribute that can be used to handle
# invalidations of classes and make _p_activate() work as expected.
# Need to decide on a good answer.
def _p_deactivate(cls):
# do nothing but mark the state change for now
cls._p_state = GHOST
def _p_activate(cls):
# The logic here is:
# If the class hasn't finished executing __new__(), don't
# try to load its state.
# If the class has a jar but no oid, it's a new object
# and doesn't have state in the database.
if cls._p_state == GHOST and cls._pc_init:
dm = cls._p_jar
if dm is not None and cls._p_oid:
cls._p_state = CHANGED
try:
# XXX Make sure the object is in the cache before
# calling setstate().
dm._cache[cls._p_oid] = cls
dm.setstate(cls)
finally:
# XXX Should really put in special inconsistent state
cls._p_state = UPTODATE
else:
print id(cls), "dm", dm, "oid", cls._p_oid
# Methods below here are not wrapped to be class-only attributes.
# They are available as methods of classes using this metaclass.
def __getnewargs__(cls):
# XXX This should really be _p_getnewargs() or something like that.
# If the class is later loaded and unghostified, the arguments
# passed to __new__() won't have an __module__. It seems that
# the module gets set to zodb.code.class_ in that case, which
# is wrong.
return (cls.__name__, cls.__bases__,
{"__module__": cls.__module__}, GHOST)
def _p_newstate(cls, acls):
# Update a class's __dict__ in place. Must use setattr and
# delattr because __dict__ is a read-only proxy.
# XXX This doesn't handle __methods__ correctly.
# XXX I'm not sure how this is supposed to handle the
# ExtClassDataDescrs. As a hack, I'm deleting _p_oid
# and _p_jar from the keys dict, because I know they
# will be descrs and they won't change as a result of
# update. It appears that if the new class has a descr
# that isn't set on the class, it will stomp on the old
# class's value. Not sure if this is a problem in general.
def getkeys(cls):
L = [n for n in cls.__dict__.keys()
if (not (n.startswith("__") and n.endswith("__"))
and not n.startswith("_p_"))
]
d = {}
for elt in L:
d[elt] = True
return d
oldnames = getkeys(cls)
newnames = getkeys(acls)
for name in oldnames:
if not name in newnames:
delattr(cls, name)
for name in newnames:
setattr(cls, name, acls.__dict__[name])
=== Added File Zope3/src/zodbcode/function.py ===
##############################################################################
#
# Copyright (c) 2002 Zope Corporation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.0 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Persistent functions."""
import dis
import new
import sys
# in 2.3, this will be spelled new.function and new.code
from types import FunctionType as function, CodeType as code
from persistence import Persistent
_STORE_GLOBAL = chr(dis.opname.index("STORE_GLOBAL"))
def has_side_effect(func):
# will find this as an opcode or oparg
return _STORE_GLOBAL in func.func_code.co_code
class CodeWrapper:
"""Package a code object so that it can be pickled."""
nested = 0
def __init__(self, co):
consts = co.co_consts
nested = [(i, c) for i, c in zip(range(len(consts)), consts)
if isinstance(c, code)]
if nested:
self.nested = 1
L = list(consts)
for i, c in nested:
L[i] = CodeWrapper(c)
consts = tuple(L)
# args stores the arguments to new.code in order
self.args = [co.co_argcount,
co.co_nlocals,
co.co_stacksize,
co.co_flags,
co.co_code,
consts,
co.co_names,
co.co_varnames,
co.co_filename,
co.co_name,
co.co_firstlineno,
co.co_lnotab,
co.co_freevars,
co.co_cellvars]
def ascode(self):
if self.nested:
L = list(self.args[5])
for i, elt in zip(range(len(L)), L):
if isinstance(elt, CodeWrapper):
L[i] = elt.ascode()
self.args[5] = tuple(L)
return new.code(*self.args)
def get_code_args(co):
"""Return args from code object suitable for passing to constructor."""
class PersistentFunction(Persistent):
def __init__(self, func, module):
# Use _pf_ as the prefix to minimize the possibility that
# these attribute names will conflict with function attributes
# found in user code. It would have been nice to use _p_
# since it's already an reserved attribute prefix, but the
# base persistent getattr function does not unghostify an
# object on refences to _p_ attributes.
self._pf_func = func
self._v_side_effect = has_side_effect(func)
self._pf_module = module
self._pf_code = {}
# Python doesn't provide enough rope to recreate a closure. The
# cell objects are opaque which means Python code can't extra
# the objects from them or recreate them on unpickling. In
# principle this code be fixed with C code, but it should be
# done in Python, not Zope.
if func.func_code.co_freevars:
raise TypeError, "persistent function can not have free variables"
def __repr__(self):
return "<PersistentFunction %s.%s>" % (self._pf_module.__name__,
self._pf_func.func_name)
# We need attribute hooks to handle access to _pf_ attributes in a
# special way. All other attributes should be looked up on
# _pf_func.
def __getattr__(self, attr):
# If it wasn't found in __dict__, then it must be a function
# attribute.
if attr == '_pf_func':
raise AttributeError, attr
return getattr(self._pf_func, attr)
def __setattr__(self, attr, value):
if not self._p_setattr(attr, value):
# the persistence machinery didn't handle this attribute,
# it must be ours
if attr.startswith('_pf_'):
self.__dict__[attr] = value
if attr == "_pf_func":
self._v_side_effect = has_side_effect(self._pf_func)
else:
setattr(self._pf_func, attr, value)
if not attr.startswith('_v_'):
self._p_changed = 1
def __delattr__(self, attr):
if not self._p_delattr(attr):
# the persistence machinery didn't handle this attribute,
# it must be ours
if attr.startswith('_pf_'):
del self.__dict__[attr]
else:
delattr(self._pf_func, attr)
if not attr.startswith('_v_'):
self._p_changed = 1
def __call__(self, *args, **kwargs):
# We must make sure that _module is loaded when func is
# executed because the function may reference a global
# variable and that global variable must be in the module's
# __dict__. We can't use a PersistentDict because the
# interpreter requires that globals be a real dict.
self._pf_module._p_activate()
# XXX What if the function module is deactivated while the
# function is executing? It seems like we need to expose
# refcounts at the Python level to guarantee that this will
# work.
try:
return self._pf_func(*args, **kwargs)
finally:
# If the func has a side-effect, the module must be marked
# as changed. We use the conservative approximation that
# any function with a STORE_GLOBAL opcode has a
# side-effect, regardless of whether a a particular call
# of the function actually executes STORE_GLOBAL.
# XXX Is this sufficient?
if self._v_side_effect:
self._pf_module._p_changed = True
def __getstate__(self):
# If func_dict is empty, store None to avoid creating a dict
# unnecessarily when the function is unpickled
# XXX new.function doesn't accept a closure
func = self._pf_func
func_state = func.func_defaults, func.func_dict or None
# Store the code separately from the function
code = func.func_code
# The code object is can only be reused in an interpreter
# running the same version of Python and with the same
# __debug__ value. Store code in a dict keyed by these two values.
key = sys.version_info, __debug__
if key not in self._pf_code:
self._pf_code[key] = CodeWrapper(code)
return func_state, self._pf_code, self._pf_module
def __setstate__(self, (func, code, mod)):
self._pf_code = code
self._pf_module = mod
# recreate the code object
code = None
key = sys.version_info, __debug__
cowrap = self._pf_code.get(key, None)
if cowrap is None:
assert False, "not implemented yet"
else:
code = cowrap.ascode()
func_defaults, func_dict = func
if func_defaults:
func = new.function(code, mod.__dict__, None, func_defaults)
else:
func = new.function(code, mod.__dict__)
if func_dict:
func.func_dict.update(func_dict)
self._pf_func = func
self._v_side_effect = has_side_effect(func)
=== Added File Zope3/src/zodbcode/interfaces.py ===
##############################################################################
#
# Copyright (c) 2002 Zope Corporation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.0 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
from zope.interface import Interface, Attribute
class IPersistentModuleImportRegistry(Interface):
def findModule(name):
"""Return module registered under name or None."""
def modules():
"""Return a list of module names in the registry."""
class IPersistentModuleUpdateRegistry(IPersistentModuleImportRegistry):
def setModule(name, module):
"""Register module under name.
Raises ValueError if module is already registered.
"""
def delModule(name):
"""Unregister module registered under name.
Raises KeyError in module is not registered.
"""
class IPersistentModuleManager(Interface):
def new(name, source):
"""Create and register a new named module from source."""
def update(src):
"""Update the source of the existing module."""
def remove():
"""Unregister the module and forget about it."""
name = Attribute("Absolute module name")
source = Attribute("Module source string")
=== Added File Zope3/src/zodbcode/module.py ===
##############################################################################
#
# Copyright (c) 2002 Zope Corporation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.0 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Persistent Module."""
__metaclass__ = type
from zope.interface import implements
from persistent import Persistent
from persistent.cPersistence import GHOST
from zodbcode.interfaces import IPersistentModuleManager
from zodbcode.interfaces \
import IPersistentModuleImportRegistry, IPersistentModuleUpdateRegistry
from zodbcode.patch import NameFinder, convert
# builtins are explicitly assigned when a module is unpickled
import __builtin__
# Modules aren't picklable by default, but we'd like them to be
# pickled just like classes (by name).
import copy_reg
def _pickle_module(mod):
return _unpickle_module, (mod.__name__,)
def _unpickle_module(modname):
mod = __import__(modname)
if "." in modname:
parts = modname.split(".")[1:]
for part in parts:
mod = getattr(mod, part)
return mod
copy_reg.pickle(type(copy_reg), _pickle_module, _unpickle_module)
# XXX Is this comment still relevant?
#
# There seems to be something seriously wrong with a module pickle
# that contains objects pickled via save_global(). These objects are
# pickled using references to the module. It appears that unpickling the
# object in the module causes the persistence machinery to fail.
#
# My suspicion is that the assignment to po_state before trying to
# load the state confuses things. The first call to setstate attempts
# to reference an attribute of the module. That getattr() fails because
# the module is not a ghost, but does have any empty dict. Since
# that getattr() fails, its state can't be unpickled.
#
# Not sure what to do about this.
class PersistentModule(Persistent):
def __init__(self, name):
self.__name__ = name
def __repr__(self):
return "<%s %s>" % (self.__class__.__name__, self.__name__)
# XXX need getattr &c. hooks to update _p_changed?
# XXX what about code that modifies __dict__ directly?
# XXX one example is a function that rebinds a global
def __getstate__(self):
d = self.__dict__.copy()
try:
del d["__builtins__"]
except KeyError:
pass
return d
def __setstate__(self, state):
state["__builtins__"] = __builtin__
self.__dict__.update(state)
class PersistentPackage(PersistentModule):
# XXX Is it okay that these packages don't have __path__?
# A PersistentPackage can exist in a registry without a manager.
# It only gets a manager if someone creates an __init__ module for
# the package.
def __init__(self, name):
self.__name__ = name
__persistent_module_registry__ = "__persistent_module_registry__"
def newModule(registry, name, source):
"""Return a manager object for a newly created module."""
mgr = PersistentModuleManager(registry)
mgr.new(name, source)
return mgr
def compileModule(module, registry, source):
# Try to prevent compilation errors from files without trailing
# newlines.
if source and source[-1] != "\n":
source += "\n"
module._p_changed = True
moddict = module.__dict__
old_names = NameFinder(module)
moddict[__persistent_module_registry__] = registry
# XXX need to be able to replace sys.std{in,out,err} at this point
exec source in moddict
# XXX and restore them here.
del moddict[__persistent_module_registry__]
new_names = NameFinder(module)
replacements = new_names.replacements(old_names)
convert(module, replacements)
class PersistentModuleManager(Persistent):
implements(IPersistentModuleManager)
def __init__(self, registry):
self._registry = registry
self._module = None
self.name = None
self.source = None
def new(self, name, source):
"""Return a new module from a name and source text."""
if self._module is not None:
raise ValueError, "module already exists"
if "." in name:
parent = self._new_package(name)
else:
parent = None
self._module = PersistentModule(name)
try:
self._registry.setModule(name, self._module)
except ValueError:
self._module = None
raise
self.name = name
try:
self.update(source)
except:
self._registry.delModule(name)
raise
if parent is not None:
modname = name.split(".")[-1]
setattr(parent, modname, self._module)
def update(self, source):
# Try to prevent compilation errors from files without trailing
# newlines.
compileModule(self._module, self._registry, source)
self.source = source
def remove(self):
self._registry.delModule(self._module.__name__)
self._module = None
def _new_package(self, name):
parent = self._get_parent(name)
modname = name.split(".")[-1]
if modname == "__init__":
self._module = parent
return None
else:
self._module = PersistentModule(name)
return parent
def _get_parent(self, name):
# If a module is being created in a package, automatically
# create parent packages that do no already exist.
parts = name.split(".")[:-1]
parent = None
for i in range(len(parts)):
if parts[i] == "__init__":
raise ValueError, "__init__ can not be a package"
pname = ".".join(parts[:i+1])
package = self._registry.findModule(pname)
if package is None:
package = PersistentPackage(pname)
self._registry.setModule(pname, package)
if parent is not None:
setattr(parent, parts[i], package)
elif not isinstance(package, PersistentPackage):
raise ValueError, "%s is module" % pname
parent = package
return parent
class PersistentModuleImporter:
"""An import hook that loads persistent modules.
The importer cooperates with other objects to make sure imports of
persistent modules work correctly. The default importer depends
on finding a persistent module registry in the globals passed to
__import__(). It looks for the name __persistent_module_registry__.
A PersistentModuleManager places its registry in the globals used
to exec module source.
It is important that the registry be activated before it is used
to handle imports. If a ghost registry is used for importing, a
circular import occurs. The second import occurs when the
machinery searches for the class of the registry. It will re-use
the registry and fail, because the registry will be marked as
changed but not yet have its state loaded.
XXX There ought to be a way to deal with this.
"""
# The import hook doesn't use sys.modules, because Zope might want
# to have placeful registries. That is, a particular module might
# execute in a context where there is more than one persistent
# module registry active. In this case, it isn't safe to use
# sys.modules, because each registry could have a different binding
# for a particular name.
_saved_import = None
def install(self):
if self._saved_import is not None:
raise TypeError("Already installed!")
self._saved_import = __builtin__.__import__
__builtin__.__import__ = self.__import__
def uninstall(self):
if self._saved_import is None:
raise TypeError("Not installed!")
__builtin__.__import__ = self._saved_import
def _import(self, registry, name, parent, fromlist):
mod = None
if parent is not None:
fullname = "%s.%s" % (parent, name)
mod = registry.findModule(fullname)
if mod is None:
parent = None
if mod is None: # no parent or didn't find in parent
mod = registry.findModule(name)
if mod is None:
return None
if fromlist:
if isinstance(mod, PersistentPackage):
self._import_fromlist(registry, mod, fromlist)
return mod
else:
i = name.find(".")
if i == -1:
return mod
name = name[:i]
if parent:
name = "%s.%s" % (parent, name)
top = registry.findModule(name)
assert top is not None, "No package for module %s" % name
return top
def _import_fromlist(self, registry, mod, fromlist):
for name in fromlist:
if not hasattr(mod, name):
fullname = "%s.%s" % (mod.__name__, name)
self._import(registry, fullname, None, [])
def __import__(self, name, globals={}, locals={}, fromlist=[]):
registry = globals.get(__persistent_module_registry__)
if registry is not None:
mod = self._import(registry, name, self._get_parent(globals),
fromlist)
if mod is not None:
return mod
return self._saved_import(name, globals, locals, fromlist)
def _get_parent(self, globals):
name = globals.get("__name__")
if name is None or "." not in name:
return None
i = name.rfind(".")
return name[:i]
class PersistentModuleRegistry(Persistent):
"""A collection of persistent modules.
The registry is similar in purpose to sys.modules. A persistent
module manager stores its modules in a registry, and the importer
looks for them there.
"""
implements(IPersistentModuleImportRegistry,
IPersistentModuleUpdateRegistry)
def __init__(self):
self._modules = {}
def findModule(self, name):
assert self._p_changed is not None
return self._modules.get(name)
def setModule(self, name, module):
if name in self._modules:
# The name is already in use.
# XXX should raise a better error
raise ValueError, name
self._p_changed = True
self._modules[name] = module
def delModule(self, name):
self._p_changed = True
del self._modules[name]
def modules(self):
"""Return a list of the modules in the registry."""
return self._modules.keys()
class ManagedRegistry(PersistentModuleRegistry):
"""A collection of persistent modules and their managers.
An extension of the persistent module registry that also collects
the managers. For persistent modules to be useful, the managers
must be stored in the database. This registry stores managers
as well as their modules, so that all objects related to the modules
in the registry are reachable from the registry.
"""
def __init__(self):
super(ManagedRegistry, self).__init__()
self._mgrs = {}
def newModule(self, name, source):
mgr = PersistentModuleManager(self)
mgr.new(name, source)
self._p_changed = True
self._mgrs[name] = mgr
def updateModule(self, name, source):
self._mgrs[name].update(source)
def removeModule(self, name):
self._mgrs[name].remove()
self._p_changed = True
del self._mgrs[name]
=== Added File Zope3/src/zodbcode/module.txt ===
Persistent Modules
Document Overview
This document seeks to capture technical information about
persistent modules to guide and document their design.
Goals
These goals largely come from Zope 3. It would be worth while
considering other applications.
- Persistent modules are used to support management of software
using the ZODB.
- Software can be updated using network
clients, such as web browsers and file-synchonozation tools.
- Application-server clusters can be updated
transactionally without requiring server restarts.
- Persistent modules leverage a familiar model, modules, for
managing Python software.
- Persistent modules can be synchronized to a file-system using
the Zope file-system synchronization framework. Persistent
modules are synchronized for purposes including:
o Use of traditional tools such as editors and code-analysis
tools
o Revision control
Ideally, the file-system representation would consist of a
Python source file.
Use cases
- Create classes and functions that implement Zope 3 components.
o Utility, Adapter, View, and service classes and factories.
o Content components, which are typically persistent and/or
pickleable.
- Define interfaces, including schema
- Import classes, functions, and interfaces from other modules.
- Import classes, functions, and interfaces from other persistent
objects. For example, an adapter registration object might have
a direct reference to a persistent-module-defined class.
- Change module source
- Changes are reflected in module state
- Changes are reflected in objects imported into other modules.
- Synchronize modules with a file-system representation.
Edge cases
???
Fundamental dilema
Python modules were not designed to change at run time. The
source of a Python module normally doesn't change while a Python
program is running. There is a crude reload tool that allows
modules to be manually reloaded to handle source changes.
Python modules contain mutable state. A module has a dictionary
that may be mutated by application code. It may contain mutable
data that is modified at run time. This is typeically used to
implement global registries.
When a module is reloaded, it is reexecuted with a dictionary that
includes the results of the previous execution.
Programs using the ZODB may be said to have logical lifetimes that
exceed the lifetimes of individual processes. In addition, the
program might exist as multiple individual processes with
overlapping run-times.
The lifetime of a persistent program is long enough that it is
likely that module source code will change during the life time
of the program.
Issues
- Should the state of a module be represented soley by the module
source?
Consider the possibilities:
A. Module state is represented soley by it's source.
- This would be a departure from the behavior of standard
Python modules. Standard Python modules retain a module
dictionary that is not overwritten by reloads. Python
modules may be mutated from outside and may contain mutable
data structures that are modified at run time.
OTOH, a regular module's state is not persistent or shared
accross processes.
For standard Python modules, one could view the module
source as an expression of the initial state of the
module. (This isn't quite right either, since some modules
are written in such a way that they anticipate module
reloads.)
- Deleting variables from a module's source that have been
imported by other modules or objects will cause the imported
values to become disconnected from the module's source.
Even if the variables are added back later, the
previously-imported values will be disconnected.
It is tempting to introduce a data structure to record
imports make from a module. For example, suppose module M1
imports X from M2. It's tempting to record that fact in M2,
so that we disallow M2 to be removed or to be changed in
such a way that M2 no-longer defines X. Unfortunately, that
would introduce state that isn't captured by my M2's source.
- Persistent modules could only be used for software. You
wouldn't be able to use them to store mutable data, such as
registries or counters, that are updated outside of the
execution of the module source.
B. Module state isn't represented soley by it's source.
- It would become possible to allow mutable data, such as
registries in persistent modules.
- It could be very difficult to see what a module's state is.
If a module contained mutable data, you'd need some way to
get to that data so you could inspect and manipulate it.
- When a module is synchronized to the file system, you'd need
to syncronize it's source and you'd also need to synchronize it's
contents in some way. Synchronization of the contents could
be done using an XML pickle, but management of the data
using file-system-based tools would be cumbersome.
You'd end up with data duplicated between the two
representations. It would be cumbersome to manage the
duplicated data in a consistent way.
C. Module state is represented soley by it's source, but allow
additional meta data.
This is the same as option A, except we support meta-data
management. The meta data could include dependency
information. We'd keep track of external usage (import) of
module variables to influence whether deletion of the module
or defined variables is allowed, or whether to issue warnings
when variables are deleted.
Note that the management of the meta data need not be the
responsibility of the module. This could be done via some
application-defined facility, in which case, the module
facility would need to provide an api for implimenting hooks
for managing this information.
Special cases
This section contains examples that may introduce challenges for
persistent modules or that might motivate or highlight issues
described above,
- Persistent classes
Persistent classes include data that are not represented by the
class sources. A class caches slot definitions inherited from
base classes. This is information that is only indirectly
represented by it's source. Similarly, a class manages a
collection of it's subclasses. This allows a class to
invalidate cached slots in subclasses when a new slot definition
is assigned (via a setattr). The cached slots and collection of
subclasses is not part of a persistent class' state. It isn't
saved in the database, but is recomputed when the class is
loaded into memory or when it's subclasses are loaded into memory.
Consider two persistent modules, M1, which defines class C1,
and M2, which defines class C2. C2 subclasses C1. C1 defines a
__getitem__ slot, which is inherited and cached by C2.
Suppose we have a process, P1, which has M1 and M2 in memory.
C2 in P1 has a (cached) __getitem__ slot filled with the
definition inherited from C1 in P1. C1 in P1 has C2 in it's
collection of subclasses. In P1, we modify M1, by editing and
recompiling its source. When we recompile M1's source, we'll
update the state of C1 by calling it's __setstate__ method,
passing the new class dictionary. The __setstate__ method will,
in turn, use setattr to assign the values from the new
dictionary. If we set a slot attribute, the __setattribute__
method in C1 will notify each of it's subclasses that the slot
has changed. Now, suppose that we've added a __len__ slot
definition when we modified the source. When we set the __len__
attribute in C1, C2 will be notified that there is a new slot
definition for __len__.
Suppose we have a process P2, which also has M1 and M2 loaded
into memory. As in P1, C2 in P2 caches the __getitem__ slot and
C1 in P2 has C2 in P2 in it's collection of subclasses. Now,
when M1 in P1 is modified and the corresponding transaction is
committed, an invalidation for M1 and all of the persistent
objects it defines, including C1, is sent to all other
processes. When P2 gets the invalidation for C1, it invalidates
C1. It happens that persistent classes are not allowed to be
ghosts. When a persistent class is invalidated, it immediately
reloads it's state, rather than converting itself into a
ghost. When C2's state is reloaded in P2, we assign it's
attributes from the new class dictionary. When we assign slots,
we notify it's subclasses, including C2 in P2.
Suppose we have a process P3, that only has M1 in memory. In
P3, M2 is not in memory, nor are any of it's subobjects. In P3,
C2 is not in the collection of subclasses of C1, because C2 is
not in memory and the collection of subclasses is volatile data
for C1. When we modify C1 in P1 and commit the transaction, the
state of C1 in P3 will be updated, but the state of C2 is not
affected in P3, because it's not in memory.
Finally, consider a process, P4 that has M2, but not M1 in
memory. M2 is not a ghost, so C2 is in memory. Now, since C2 is
in memory, C1 must be in memory, even though M1 is not in
memory, because C2 has a reference to C1. Further, C1 cannot
be a ghost, because persistent classes are not allowed to be
ghosts. When we commit the transation in P1 that updates M1, an
invalidation for C1 is sent to P4 and C1 is updated. When C1 is
updated, it's subclasses (in P4), including C2 are notified, so
that their cached slot definitions are updated.
When we modify M1, all copies in memory of C1 and C2 are updated
properly, even though the data they cache is not cached
persistently. This works, and only works, because persistent
classes are never ghosts. If a class could be a ghost, then
invalidating it would have not effect and non-ghost dependent
classes would not be updated.
- Persistent interfaces
Like classes, Zope interfaces cache certain information. An
interface maintains a set of all of the interfaces that it
extends. In addition, interfaces maintain a collection of all
of their sub-interfaces. The collection of subinterfaces is
used to notify sub=interfaces when an interface changes.
(Interfaces are a special case of a more general class of
objects, called "specifications", that include both interfaces
and interface declareations. Similar caching is performed for
other specifications and related data structures. To simplify
the discussion, however, we'll limit ourselves to interfaces.)
When designing persistent interfaces, we have alternative
approaches to consider:
A. We could take the same approach as that taken with persistent
classes. We would not save cached data persistently. We
would compute it as objects are moved into memory.
To take this approach, we'd need to also make persistent
interfaces non-ghostifiable. This is necessary to properly
propigate object changes.
One could argue that non-ghostifiability if classes is a
necessary wart forced on us by details of Python classes that
are beyond our control, and that we should avoid creating new
kinds of objects that require non-ghostifiability.
B. We could store the cached data persistently. For example, we
could store the set of extended interfaces and the set of
subinterfaces in persistent dictionaries.
A significant disadvantage of this approach is that
persistent interfaces would accumulate state is that not
refelcted in their source code, however, it's worth noting
that, while the dependency and cache data cannot be derived
from a single module source, it *can* be derived from the
sources of all of the modules in the system. We can
implement persistent interface in such a way that execution
of module code causes all dependcies among module-defined
interfaces to be recomputed correctly.
(This is, to me, Jim, an interesting case: state that can be
computed during deserialization from other serialized
state. This should not be surprising, as we are essentially
talking about cached data used for optimization purposes.)
Proposals
- A module's state must be reprersented, directly or indirectly,
by it's source. The state may also include information, such as
caching data, that is derivable from it's source-represented
state.
It is unclear if or how we will enforce this. Perhaps it will
be just a guideline. The module-synchronization adapters used
in Zope will only synchronize the module source. If a module
defines state that is not represented by or derivable from it's
source, then that data will be lost in synchronization. Of
course, applications that don't use the synchronization
framework would be unaffected by this limitation. Alternatively,
one could develop custom module-synchronization adapters that
handled extra module data, however, development of such adapters
will be outside the scope of the Zope project.
Notes
- When we invalidate a persistent class, we need to delete all of
the attributes defined by it's old dictionary that are not
defined by the new class dictionary.
=== Added File Zope3/src/zodbcode/patch.py ===
##############################################################################
#
# Copyright (c) 2002 Zope Corporation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.0 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Patch references to auto-persistent objects in a module.
When a persistent module is compiled, all classes and functions should
be converted to persistent classes and functions. When a module is
updated, it is compiled and its persistent functions and classes are
updated in place so that clients of the module see the update.
The specific semantics of the convert and update-in-place operations
are still being determined. Here are some rough notes:
- Classes and functions are not converted in place. New objects are
created to replace the builtin functions and classes.
- Every function object is converted to a PersistentFunction.
- Every class is converted to a new class that is created by calling
the PersistentClassMetaClass with the name, bases, and dict of the
class being converted.
- The conversion operation must preserve object identity. If an
object created by a def or class statement is referenced elsewhere
in the module, all references must be replaced with references to
the converted object.
Implementation notes:
The conversion operation is implemented using a pickler. It wasn't
possible to use the copy module, because it isn't possible to extend
the copy module in a safe way. The copy module depends on module globals.
The pickler uses a Wrapper object that creates the appropriate new
object or updates an old one when it is unpickled. The wrapper also
causes parts of the wrapped object's state to be traversed by the
pickler, for example the func_defaults of a function object. This
traversal is necessary because references to convertable objects could
be contained in the state and must be updated to refer to the new
objects.
What semantics do we want for update-in-place in the presence of aliases?
Semantics based on per-namespace updates don't work in the presence of
aliases. If an update changes an alias, then the old binding will be
updated with the state of the new binding.
Semantics based on containing namespaces seem to work. The outermost
namespace that contains a name is updated in place. Aliases are
simple rebinding operations that do not update in place.
The containment approach seems to have a problem with bound methods,
where an instance can stash a copy of a bound method created via an
alias. When the class is updated, the alias changes, but the bound
method isn't. Then the bound method can invoke an old method on a new
object, which may not be legal. It might sufficient to outlaw this case.
XXX Open issues
Can we handle metaclasses within this framework? That is, what if an
object's type is not type, but a subclass of type.
How do we handle things like staticmethods? We'd like the code to be
able to use them, but Python doesn't expose an introspection on them.
What if the same object is bound to two different names in the same
namespace? Example:
x = lambda: 1
y = x
If the module is updated to:
x = lambda: 1
y = lambda: 2
What are the desired semantics?
"""
__metaclass__ = type
from copy_reg import dispatch_table
from cStringIO import StringIO
import pickle
import sys
from types import *
from zodbcode.class_ import PersistentClassMetaClass, PersistentDescriptor
from zodbcode.function import PersistentFunction
class Wrapper:
"""Implement pickling reduce protocol for update-able object.
The Pickler creates a Wrapper instance and uses it as the reduce
function. The Unpickler calls the instance to recreate the
object.
"""
__safe_for_unpickling__ = True
def __init__(self, obj, module, replace=None):
self._obj = obj
self._module = module
self._replace = replace
def __call__(self, *args):
new = self.unwrap(*args)
if self._replace is not None:
# XXX Hack: Use _p_newstate for persistent classes, because
# a persistent class's persistent state is a fairly limited
# subset of the dict and we really want to replace everything.
if hasattr(self._replace, "_p_newstate"):
self._replace._p_newstate(new)
else:
self._replace.__setstate__(new.__getstate__())
return self._replace
else:
return new
class FunctionWrapper(Wrapper):
def unwrap(self, defaults, dict):
self._obj.func_defaults = defaults
self._obj.func_dict.update(dict)
return PersistentFunction(self._obj, self._module)
class TypeWrapper(Wrapper):
def unwrap(self, bases, dict):
return PersistentClassMetaClass(self._obj.__name__, bases, dict)
def registerWrapper(atype, wrapper, unwrap_args, getstate=None):
"""Register a patch wrapper for an external object type."""
Pickler.dispatch[atype] = Pickler.save_external
Pickler.external[atype] = wrapper, unwrap_args, getstate
marker = object()
_module_cache = {}
def whichmodule(func, funcname):
"""Return a likely candidate for the module that defines obj,
where context is the name of the module in which obj was found.
Use a trick suggested by Guido to make sure we found the right
module: Compare the function's globals with the module's globals.
You've found the right module only when they match.
"""
mod = getattr(func, "__module__", None)
if mod is not None:
return mod
mod = _module_cache.get(func)
if mod is not None:
return mod
for name, module in sys.modules.items():
if module is None:
continue # skip dummy package entries
if getattr(module, funcname, None) is func:
if module.__dict__ is func.func_globals:
break
else:
name = '__main__'
_module_cache[func] = name
return name
class Pickler(pickle.Pickler):
dispatch = pickle.Pickler.dispatch.copy()
def __init__(self, file, module, memo, replacements):
# The pickler must be created in binary mode, because
# it pickles instances using the OBJ code. The text-mode
# pickler uses a different strategy that explicitly
# stores the name of the instance's class which defeats
# the desire to replace references to classes with
# persistent classes.
pickle.Pickler.__init__(self, file, bin=True)
self._pmemo = memo
self._wrapped = {} # set of objects already wrapped
self._module = module
self._module_name = module.__name__
self._repl = replacements
self._builtins = module.__builtins__
def wrap(self, wrapperclass, obj):
return wrapperclass(obj, self._module, self._repl.get(id(obj)))
def persistent_id(self, obj, force=False):
if (isinstance(obj, Wrapper)
or isinstance(obj, ModuleType)
or obj is self._builtins
or force):
oid = id(obj)
self._pmemo[oid] = obj
return oid
else:
# If the object is a real persistent object, patch it by
# persistent id, too. This case is specifically intended
# to catch persistent classes imported from other modules.
# They are classes, but can't be pickled as globals because
# pickle looks in sys.modules and the persistent import
# doesn't use sys.modules.
# If we find a class, pickle it via save_type()
if isinstance(obj, PersistentClassMetaClass):
return None
# XXX Is this safe in all cases?
oid = getattr(obj, "_p_oid", marker)
if oid is marker:
return None
elif oid is None:
# It's a persistent object, but it's newly created.
oid = object()
descr = getattr(oid, "__get__", None)
if descr is not None:
return None
self._pmemo[oid] = obj
return oid
def save_type(self, atype):
if atype.__module__ == self._module_name:
self.save_reduce(self.wrap(TypeWrapper, atype),
(atype.__bases__, atype.__dict__),
obj=atype)
else:
if isinstance(atype, PersistentClassMetaClass):
self.save_pers(self.persistent_id(atype, True))
else:
self.save_global(atype)
dispatch[TypeType] = save_type
dispatch[ClassType] = save_type
dispatch[type] = save_type
dispatch[PersistentClassMetaClass] = save_type
def save_function(self, func):
modname = whichmodule(func, func.__name__)
if modname == self._module_name or modname == "__main__":
self.save_reduce(self.wrap(FunctionWrapper, func),
(func.func_defaults, func.func_dict),
obj=func)
else:
self.save_global(func)
dispatch[FunctionType] = save_function
external = {}
def save_external(self, obj):
# XXX Will this object always have an __module__?
if obj.__module__ == self._module_name:
# Save an external type registered through registerWrapper
objtype = type(obj)
wrapper, unwrap_args, getstate = self.external[objtype]
if getstate is not None:
self.save_reduce(self.wrap(wrapper, obj), unwrap_args(obj),
getstate(obj),
obj=obj)
else:
self.save_reduce(self.wrap(wrapper, obj), unwrap_args(obj),
obj=obj)
else:
# In general, we don't know how to pickle this object,
# so pickle it by reference to the original.
self.save_pers(self.persistent_id(obj, True))
# New-style classes don't have real dicts. They have dictproxies.
# There's no official way to spell the dictproxy type, so we have
# to get it by using type() on an example.
dispatch[type(Wrapper.__dict__)] = pickle.Pickler.save_dict
def save(self, obj, ignore=None):
# Override the save() implementation from pickle.py, because
# we don't ever want to invoke __reduce__() on builtin types
# that aren't picklable. Instead, we'd like to pickle all of
# those objects using the persistent_id() mechanism. There's
# no need to cover every type with this pickler, because it
# isn't being used for persistent just to create a copy.
# The ignored parameter is for compatible with Python 2.2,
# which has the old inst_persistent_id feature.
pid = self.persistent_id(obj)
if pid is not None:
self.save_pers(pid)
return
d = id(obj)
t = type(obj)
if (t is TupleType) and (len(obj) == 0):
if self.bin:
self.save_empty_tuple(obj)
else:
self.save_tuple(obj)
return
if d in self.memo:
self.write(self.get(self.memo[d][0]))
return
try:
f = self.dispatch[t]
except KeyError:
try:
issc = issubclass(t, TypeType)
except TypeError: # t is not a class
issc = 0
if issc:
self.save_global(obj)
return
try:
reduce = dispatch_table[t]
except KeyError:
self.save_pers(self.persistent_id(obj, True))
return
else:
tup = reduce(obj)
if type(tup) is StringType:
self.save_global(obj, tup)
return
if type(tup) is not TupleType:
raise pickle.PicklingError("Value returned by %s must be a "
"tuple" % reduce)
l = len(tup)
if (l != 2) and (l != 3):
raise pickle.PicklingError("tuple returned by %s must "
"contain only two or three "
"elements" % reduce)
callable = tup[0]
arg_tup = tup[1]
if l > 2:
state = tup[2]
else:
state = None
if type(arg_tup) is not TupleType and arg_tup is not None:
raise pickle.PicklingError("Second element of tuple "
"returned by %s must be a "
"tuple" % reduce)
self.save_reduce(callable, arg_tup, state, obj=obj)
return
f(self, obj)
def save_reduce(self, callable, arg_tup, state = None, obj = None):
write = self.write
save = self.save
save(callable)
save(arg_tup)
write(pickle.REDUCE)
if obj is not None:
memo_len = len(self.memo)
self.write(self.put(memo_len))
self.memo[id(obj)] = (memo_len, obj)
if state is not None:
save(state)
write(pickle.BUILD)
class Unpickler(pickle.Unpickler):
def __init__(self, file, pmemo):
pickle.Unpickler.__init__(self, file)
self._pmemo = pmemo
def persistent_load(self, oid):
return self._pmemo[oid]
class NameFinder:
"""Find a canonical name for each update-able object."""
# XXX should we try to handle descriptors? If it looks like a
# descriptor, try calling it and passing the class object?
classTypes = {
TypeType: True,
ClassType: True,
PersistentClassMetaClass: True,
}
types = {
FunctionType: True,
PersistentFunction: True,
PersistentDescriptor: True,
}
types.update(classTypes)
def __init__(self, module):
self._names = {} # map object ids to (canonical name, obj) pairs
self.walkModule(module)
def names(self):
return [n for n, o in self._names.itervalues()]
def _walk(self, obj, name, fmt):
classes = []
for k, v in obj.__dict__.items():
aType = type(v)
anId = id(v)
if aType in self.types and not anId in self._names:
self._names[anId] = fmt % (name, k), v
if aType in self.classTypes:
classes.append((v, k))
for _klass, _name in classes:
self.walkClass(_klass, fmt % (name, _name))
def walkModule(self, mod):
self._walk(mod, "", "%s%s")
def walkClass(self, klass, name):
self._walk(klass, name, "%s.%s")
def replacements(self, aFinder):
"""Return a dictionary of replacements.
self and aFinder are two NameFinder instances. Return a dict
of all the objects in the two that share the same name. The
keys are the ids in self and the values are the objects in
aFinder.
"""
temp = {}
result = {}
for anId, (name, obj) in self._names.iteritems():
temp[name] = anId
for anId, (name, obj) in aFinder._names.iteritems():
if name in temp:
result[temp[name]] = obj
return result
def convert(module, replacements):
"""Convert object to persistent objects in module.
Use replacements dictionary to determine which objects to update
in place.
"""
f = StringIO()
memo = {}
p = Pickler(f, module, memo, replacements)
moddict = module.__dict__
p.dump(moddict)
f.seek(0)
u = Unpickler(f, memo)
newdict = u.load()
module.__dict__.clear()
module.__dict__.update(newdict)
if __name__ == "__main__":
pass
More information about the Zope3-Checkins
mailing list