[Zope3-checkins] CVS: Zope3/src/zodb/code - __init__.py:1.2 class_.py:1.2 function.py:1.2 interfaces.py:1.2 module.py:1.2 patch.py:1.2
Jim Fulton
jim@zope.com
Wed, 25 Dec 2002 09:13:49 -0500
Update of /cvs-repository/Zope3/src/zodb/code
In directory cvs.zope.org:/tmp/cvs-serv15352/src/zodb/code
Added Files:
__init__.py class_.py function.py interfaces.py module.py
patch.py
Log Message:
Grand renaming:
- Renamed most files (especially python modules) to lower case.
- Moved views and interfaces into separate hierarchies within each
project, where each top-level directory under the zope package
is a separate project.
- Moved everything to src from lib/python.
lib/python will eventually go away. I need access to the cvs
repository to make this happen, however.
There are probably some bits that are broken. All tests pass
and zope runs, but I haven't tried everything. There are a number
of cleanups I'll work on tomorrow.
=== Zope3/src/zodb/code/__init__.py 1.1 => 1.2 ===
--- /dev/null Wed Dec 25 09:13:48 2002
+++ Zope3/src/zodb/code/__init__.py Wed Dec 25 09:12:18 2002
@@ -0,0 +1,2 @@
+#
+# This file is necessary to make this directory a package.
=== Zope3/src/zodb/code/class_.py 1.1 => 1.2 ===
--- /dev/null Wed Dec 25 09:13:48 2002
+++ Zope3/src/zodb/code/class_.py Wed Dec 25 09:12:18 2002
@@ -0,0 +1,335 @@
+##############################################################################
+#
+# Copyright (c) 2002 Zope Corporation and Contributors.
+# All Rights Reserved.
+#
+# This software is subject to the provisions of the Zope Public License,
+# Version 2.0 (ZPL). A copy of the ZPL should accompany this distribution.
+# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+# FOR A PARTICULAR PURPOSE.
+#
+##############################################################################
+"""Persistent Classes."""
+
+from persistence import Persistent, PersistentMetaClass
+from persistence._persistence import UPTODATE, CHANGED, STICKY, GHOST
+from persistence.interfaces import IPersistent
+from zodb.code.function import PersistentFunction
+
+import new
+from types import FunctionType as function
+import time
+
+# XXX There is a lot of magic here to give classes and instances
+# separate sets of attributes. This code should be documented, as it
+# it quite delicate, and it should be move to a separate module.
+
+__metaclass__ = type
+
+class ExtClassDescr:
+ """Maintains seperate class and instance descriptors for an attribute.
+
+ This allows a class to provide methods and attributes without
+ intefering with normal use of instances. The class and its
+ instances can each have methods with the same name.
+
+ This does interfere with introspection on the class.
+ """
+
+ def __init__(self, name, instdescr):
+ self.name = name
+ self.instdescr = instdescr
+
+ def __get__(self, obj, cls):
+ if obj is None:
+ return self.clsget(cls)
+ else:
+ return self.instdescr.__get__(obj, cls)
+
+ def __set__(self, obj, val):
+ if obj is None:
+ self.clsset(val)
+ else:
+ if self.instdescr is None:
+ raise AttributeError, self.name
+ return self.instdescr.__set__(obj, val)
+
+ def __delete__(self, obj):
+ if self.instdescr is None:
+ raise AttributeError, self.name
+ return self.instdescr.__delete__(obj)
+
+ # subclass should override
+
+ def clsget(self, cls):
+ pass
+
+ def clsset(self, val):
+ pass
+
+ def clsdelete(self):
+ pass
+
+class MethodMixin:
+
+ def __init__(self, name, descr, func):
+ super(MethodMixin, self).__init__(name, descr)
+ self.func = func
+
+ def clsget(self, cls):
+ def f(*args, **kwargs):
+ try:
+ return self.func(cls, *args, **kwargs)
+ except TypeError:
+ print `self.func`, `cls`, `args`, `kwargs`
+ raise
+ return f
+
+class DataMixin:
+
+ def __init__(self, name, descr, val):
+ super(DataMixin, self).__init__(name, descr)
+ self.val = val
+
+ def clsget(self, cls):
+ return self.val
+
+ def clsset(self, val):
+ self.val = val
+
+ def clsdelete(self):
+ del self.val
+
+class ExtClassObject:
+
+ _missing = object()
+
+ def __init__(self, name, instdescr):
+ self.name = name
+ self.instdescr = instdescr
+
+ def __get__(self, obj, cls):
+ if obj is None:
+ return self.clsget(cls)
+ else:
+ return self.instdescr.__get__(obj, cls)
+
+ def __set__(self, obj, cls):
+ if obj is None:
+ return self.clsset(cls)
+ else:
+ if self.instdescr is None:
+ raise AttributeError, self.name
+ return self.instdescr.__set__(obj, cls)
+
+ def __delete__(self, obj, cls):
+ if obj is None:
+ return self.clsdelete(cls)
+ else:
+ if self.instdescr is None:
+ raise AttributeError, self.name
+ return self.instdescr.__delete__(obj, cls)
+
+class ExtClassMethodDescr(MethodMixin, ExtClassDescr):
+ pass
+
+class ExtClassDataDescr(DataMixin, ExtClassDescr):
+ pass
+
+# The next three classes conspire to make a PersistentFunction
+# behave like a method when found in a class's __dict__.
+
+class PersistentMethod:
+ """Make PersistentFunctions into methods."""
+ def __init__(self, klass, inst, func):
+ self.im_class = klass
+ self.im_self = inst
+ self.im_func = func
+
+ def __repr__(self):
+ if self.im_self is None:
+ kind = "unbound"
+ else:
+ kind = "bound"
+ return ("<persistent %s method %s.%s of %s>"
+ % (kind, self.im_class.__name__, self.im_func.__name__,
+ self.im_self))
+
+ def __call__(self, *args, **kwargs):
+ if self.im_self is None:
+ if not isinstance(args[0], self.im_class):
+ raise TypeError("unbound method %s() must be called "
+ "with %s instance as first argument ("
+ "got %s instead)" % (self.im_func.__name__,
+ self.im_class.__name__,
+ type(args[0]).__name__))
+ else:
+ return self.im_func(self.im_self, *args, **kwargs)
+
+class PersistentDescriptor:
+
+ def __init__(self, objclass, func):
+ self.__name__ = func.__name__
+ self.__doc__ = func.__doc__
+ self.__objclass__ = objclass
+ self._func = func
+ # Delegate __getstate__ and __setstate__ to the persistent func.
+ # The patch module will use these methods to update persistent
+ # methods in place.
+ self.__getstate__ = func.__getstate__
+ self.__setstate__ = func.__setstate__
+
+ def __repr__(self):
+ return "<descriptor %s.%s>" % (self.__objclass__.__name__,
+ self.__name__)
+
+ def __get__(self, object, klass=None):
+ if object is None:
+ return PersistentMethod(klass or self.__objclass__, None,
+ self._func)
+ else:
+ return PersistentMethod(klass or self.__objclass__, object,
+ self._func)
+
+
+# XXX is missing necessary for findattr?
+# None might be sufficient
+_missing = object()
+
+def findattr(cls, attr, default):
+ """Walk the mro of cls to find attr."""
+ for c in cls.__mro__:
+ o = c.__dict__.get(attr, _missing)
+ if o is not _missing:
+ return o
+ return default
+
+class PersistentClassMetaClass(PersistentMetaClass):
+
+ # an attempt to make persistent classes look just like other
+ # persistent objects by providing class attributes and methods
+ # that behave like the persistence machinery.
+
+ # the chief limitation of this approach is that class.attr won't
+ # always behave the way it does for normal classes
+
+ __implements__ = IPersistent
+
+ _pc_init = False
+
+ def __new__(meta, name, bases, dict):
+ cls = super(PersistentClassMetaClass, meta).__new__(
+ meta, name, bases, dict)
+ # helper functions
+ def extend_attr(attr, v):
+ prev = findattr(cls, attr, None)
+ setattr(cls, attr, ExtClassDataDescr(attr, prev, v))
+
+ def extend_meth(attr, m):
+ prev = findattr(cls, attr, None)
+ setattr(cls, attr, ExtClassMethodDescr(attr, prev, m))
+
+ extend_attr("_p_oid", None)
+ extend_attr("_p_jar", None)
+ extend_attr("_p_state", UPTODATE)
+ extend_meth("_p_activate", meta._p_activate)
+ extend_meth("_p_deactivate", meta._p_activate)
+ extend_meth("__getstate__", meta.__getstate__)
+ extend_meth("__setstate__", meta.__setstate__)
+ extend_attr("__implements__", meta.__implements__)
+
+ for k, v in dict.items():
+ if isinstance(v, PersistentFunction):
+ setattr(cls, k, PersistentDescriptor(cls, v))
+
+ cls._pc_init = True
+ return cls
+
+ def fixup(cls, mod):
+ for k, v in cls.__dict__.items():
+ if isinstance(v, function):
+ setattr(cls, k, PersistentFunction(v, mod))
+
+ def __getattribute__(cls, name):
+ # XXX I'm not sure I understand this code any more.
+ super_meth = super(PersistentClassMetaClass, cls).__getattribute__
+
+ # If we are initializing the class, don't trying to check variables
+ # like _p_state, since they may not be initialized.
+ if not super_meth("_pc_init"):
+ return super_meth(name)
+ if (name[0] == "_" and
+ not (name.startswith("_p_") or name.startswith("_pc_") or
+ name == "__dict__")):
+ if cls._p_state == GHOST:
+ cls._p_activate()
+ cls._p_atime = int(time.time() % 86400)
+ return super_meth(name)
+
+ def __setattr__(cls, attr, val):
+ if not attr.startswith("_pc_") and cls._pc_init:
+ descr = cls.__dict__.get(attr)
+ if descr is not None:
+ set = getattr(descr, "__set__", None)
+ if set is not None:
+ set(None, val)
+ return
+ super(PersistentClassMetaClass, cls).__setattr__(attr, val)
+
+ def __delattr__(cls, attr):
+ if attr.startswith('_p_'):
+ if attr == "_p_changed":
+ # this means something special
+ pass
+ else:
+ return
+ super(PersistentClassMetaClass, cls).__delattr__(attr)
+
+ def __getstate__(cls):
+ dict = {}
+ for k, v in cls.__dict__.items():
+ if hasattr(v, '_p_oid'):
+ dict[k] = v
+ return dict
+
+ def __setstate__(cls, dict):
+ for k, v in dict.items():
+ setattr(cls, k, v)
+
+ def _p_deactivate(cls):
+ # do nothing but mark the state change for now
+ cls._p_state = GHOST
+
+ def _p_activate(cls):
+ if cls._p_state is None:
+ dm = cls._p_jar
+ if dm is not None:
+ # reactivate
+ cls._p_state = UPTODATE
+
+ # Methods below here are not wrapped to be class-only attributes.
+ # They are available as methods of classes using this metaclass.
+
+ def __getnewargs__(cls):
+ return cls.__name__, cls.__bases__, {}
+
+ def _p_newstate(cls, acls):
+ # Update a class's __dict__ in place. Must use setattr and
+ # delattr because __dict__ is a read-only proxy.
+ # XXX This doesn't handle __methods__ correctly.
+ def getkeys(cls):
+ L = [n for n in cls.__dict__.keys()
+ if not (n.startswith("__") and n.endswith("__"))]
+ d = {}
+ for elt in L:
+ d[elt] = True
+ return d
+ oldnames = getkeys(cls)
+ newnames = getkeys(acls)
+ for name in oldnames:
+ if not name in newnames:
+ delattr(cls, name)
+ for name in newnames:
+ setattr(cls, name, acls.__dict__[name])
=== Zope3/src/zodb/code/function.py 1.1 => 1.2 ===
--- /dev/null Wed Dec 25 09:13:49 2002
+++ Zope3/src/zodb/code/function.py Wed Dec 25 09:12:18 2002
@@ -0,0 +1,205 @@
+##############################################################################
+#
+# Copyright (c) 2002 Zope Corporation and Contributors.
+# All Rights Reserved.
+#
+# This software is subject to the provisions of the Zope Public License,
+# Version 2.0 (ZPL). A copy of the ZPL should accompany this distribution.
+# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+# FOR A PARTICULAR PURPOSE.
+#
+##############################################################################
+"""Persistent functions."""
+
+import dis
+import new
+import sys
+# in 2.3, this will be spelled new.function and new.code
+from types import FunctionType as function, CodeType as code
+
+from persistence import Persistent
+
+_STORE_GLOBAL = chr(dis.opname.index("STORE_GLOBAL"))
+
+def has_side_effect(func):
+ # will find this as an opcode or oparg
+ return _STORE_GLOBAL in func.func_code.co_code
+
+class CodeWrapper:
+ """Package a code object so that it can be pickled."""
+
+ nested = 0
+
+ def __init__(self, co):
+ consts = co.co_consts
+ nested = [(i, c) for i, c in zip(range(len(consts)), consts)
+ if isinstance(c, code)]
+ if nested:
+ self.nested = 1
+ L = list(consts)
+ for i, c in nested:
+ L[i] = CodeWrapper(c)
+ consts = tuple(L)
+
+ # args stores the arguments to new.code in order
+ self.args = [co.co_argcount,
+ co.co_nlocals,
+ co.co_stacksize,
+ co.co_flags,
+ co.co_code,
+ consts,
+ co.co_names,
+ co.co_varnames,
+ co.co_filename,
+ co.co_name,
+ co.co_firstlineno,
+ co.co_lnotab,
+ co.co_freevars,
+ co.co_cellvars]
+
+ def ascode(self):
+ if self.nested:
+ L = list(self.args[5])
+ for i, elt in zip(range(len(L)), L):
+ if isinstance(elt, CodeWrapper):
+ L[i] = elt.ascode()
+ self.args[5] = tuple(L)
+ return new.code(*self.args)
+
+def get_code_args(co):
+ """Return args from code object suitable for passing to constructor."""
+
+class PersistentFunction(Persistent):
+
+ def __init__(self, func, module):
+ # Use _pf_ as the prefix to minimize the possibility that
+ # these attribute names will conflict with function attributes
+ # found in user code. It would have been nice to use _p_
+ # since it's already an reserved attribute prefix, but the
+ # base persistent getattr function does not unghostify an
+ # object on refences to _p_ attributes.
+ self._pf_func = func
+ self._v_side_effect = has_side_effect(func)
+ self._pf_module = module
+ self._pf_code = {}
+ self._fixup_contained()
+
+ def __repr__(self):
+ return "<PersistentFunction %s.%s>" % (self._pf_module.__name__,
+ self._pf_func.func_name)
+
+ def _fixup_contained(self):
+ # The function object may contain other function objects as a
+ # default value for an argument. These functions are
+ # converted to persistent objects, but are not updated in
+ # place when the containing module is changed.
+ new = {}
+ defaults = self._pf_func.func_defaults
+ if defaults is None:
+ return
+ for i in range(len(defaults)):
+ obj = defaults[i]
+ if isinstance(obj, function):
+ new[i] = PersistentFunction(obj, self._pf_module)
+ if new:
+ new_defs = list(defaults)
+ for i, pf in new.items():
+ new_defs[i] = pf
+ self._pf_func.func_defaults = tuple(new_defs)
+
+ # We need attribute hooks to handle access to _pf_ attributes in a
+ # special way. All other attributes should be looked up on
+ # _pf_func.
+
+ def __getattr__(self, attr):
+ # If it wasn't found in __dict__, then it must be a function
+ # attribute.
+ return getattr(self._pf_func, attr)
+
+ def __setattr__(self, attr, value):
+ if not self._p_setattr(attr, value):
+ # the persistence machinery didn't handle this attribute,
+ # it must be ours
+ if attr.startswith('_pf_'):
+ self.__dict__[attr] = value
+ if attr == "_pf_func":
+ self._v_side_effect = has_side_effect(self._pf_func)
+ else:
+ setattr(self._pf_func, attr, value)
+
+ def __delattr__(self, attr):
+ if not self._p_delattr(attr):
+ # the persistence machinery didn't handle this attribute,
+ # it must be ours
+ if attr.startswith('_pf_'):
+ del self.__dict__[attr]
+ else:
+ delattr(self._pf_func, attr)
+
+ def __call__(self, *args, **kwargs):
+ # We must make sure that _module is loaded when func is
+ # executed because the function may reference a global
+ # variable and that global variable must be in the module's
+ # __dict__. We can't use a PersistentDict because the
+ # interpreter requires that globals be a real dict.
+ self._pf_module._p_activate()
+
+ # XXX What if the function module is deactivated while the
+ # function is executing? It seems like we need to expose
+ # refcounts at the Python level to guarantee that this will
+ # work.
+
+ try:
+ return self._pf_func(*args, **kwargs)
+ finally:
+ # If the func has a side-effect, the module must be marked
+ # as changed. We use the conservative approximation that
+ # any function with a STORE_GLOBAL opcode has a
+ # side-effect, regardless of whether a a particular call
+ # of the function actually executes STORE_GLOBAL.
+
+ # XXX Is this sufficient?
+ if self._v_side_effect:
+ self._pf_module._p_changed = 1
+
+ def __getstate__(self):
+ # If func_dict is empty, store None to avoid creating a dict
+ # unnecessarily when the function is unpickled
+ # XXX new.function doesn't accept a closure
+ func = self._pf_func
+ func_state = func.func_defaults, func.func_dict or None
+
+ # Store the code separately from the function
+ code = func.func_code
+
+ # The code object is can only be reused in an interpreter
+ # running the same version of Python and with the same
+ # __debug__ value. Store code in a dict keyed by these two values.
+
+ key = sys.version_info, __debug__
+ if key not in self._pf_code:
+ self._pf_code[key] = CodeWrapper(code)
+
+ return func_state, self._pf_code, self._pf_module
+
+ def __setstate__(self, (func, code, mod)):
+ self._pf_code = code
+ self._pf_module = mod
+
+ # recreate the code object
+ code = None
+ key = sys.version_info, __debug__
+ cowrap = self._pf_code.get(key, None)
+ if cowrap is None:
+ assert False, "not implemented yet"
+ else:
+ code = cowrap.ascode()
+
+ func_defaults, func_dict = func
+ func = new.function(code, mod.__dict__, func_defaults)
+ if func_dict:
+ func.func_dict.update(func_dict)
+ self._pf_func = func
+ self._v_side_effect = has_side_effect(func)
=== Zope3/src/zodb/code/interfaces.py 1.1 => 1.2 ===
--- /dev/null Wed Dec 25 09:13:49 2002
+++ Zope3/src/zodb/code/interfaces.py Wed Dec 25 09:12:18 2002
@@ -0,0 +1,57 @@
+##############################################################################
+#
+# Copyright (c) 2002 Zope Corporation and Contributors.
+# All Rights Reserved.
+#
+# This software is subject to the provisions of the Zope Public License,
+# Version 2.0 (ZPL). A copy of the ZPL should accompany this distribution.
+# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+# FOR A PARTICULAR PURPOSE.
+#
+##############################################################################
+from zope.interface import Interface
+
+class IPersistentModuleImportRegistry(Interface):
+
+ def findModule(name):
+ """Return module registered under name or None."""
+
+class IPersistentModuleUpdateRegistry(IPersistentModuleImportRegistry):
+
+ def setModule(name, module):
+ """Register module under name.
+
+ Raises ValueError if module is already registered.
+ """
+
+ def delModule(name):
+ """Unregister module registered under name.
+
+ Raises KeyError in module is not registered.
+ """
+
+
+
+try:
+ from zope.interface import Interface, Attribute
+except ImportError:
+ class Interface:
+ pass
+ def Attribute(x):
+ return x
+
+class IPersistentModuleManager(Interface):
+
+ def new(name, source):
+ """Create and register a new named module from source."""
+
+ def update(src):
+ """Update the source of the existing module."""
+
+ def remove():
+ """Unregister the module and forget about it."""
+
+ name = Attribute("Absolute module name")
+ source = Attribute("Module source string")
=== Zope3/src/zodb/code/module.py 1.1 => 1.2 ===
--- /dev/null Wed Dec 25 09:13:49 2002
+++ Zope3/src/zodb/code/module.py Wed Dec 25 09:12:18 2002
@@ -0,0 +1,299 @@
+##############################################################################
+#
+# Copyright (c) 2002 Zope Corporation and Contributors.
+# All Rights Reserved.
+#
+# This software is subject to the provisions of the Zope Public License,
+# Version 2.0 (ZPL). A copy of the ZPL should accompany this distribution.
+# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+# FOR A PARTICULAR PURPOSE.
+#
+##############################################################################
+"""Persistent Module."""
+
+__metaclass__ = type
+
+import __builtin__
+# in 2.3, this will be spelled new.function
+from types import FunctionType as function
+import sys
+
+from persistence import Persistent
+from persistence._persistence import GHOST
+from zodb.code.class_ import PersistentClassMetaClass
+from zodb.code.function import PersistentFunction
+from zodb.code.interfaces import IPersistentModuleManager
+from zodb.code.interfaces \
+ import IPersistentModuleImportRegistry, IPersistentModuleUpdateRegistry
+from zodb.code.patch import NameFinder, convert
+
+from transaction import get_transaction
+
+# builtins are explicitly assigned when a module is unpickled
+import __builtin__
+
+# Modules aren't picklable by default, but we'd like them to be
+# pickled just like classes (by name).
+import copy_reg
+
+def _pickle_module(mod):
+ return mod.__name__
+
+def _unpickle_module(modname):
+ mod = __import__(modname)
+ if "." in modname:
+ parts = modname.split(".")[1:]
+ for part in parts:
+ mod = getattr(mod, part)
+ return mod
+
+copy_reg.pickle(type(copy_reg), _pickle_module, _unpickle_module)
+
+# XXX Is this comment still relevant?
+#
+# There seems to be something seriously wrong with a module pickle
+# that contains objects pickled via save_global(). These objects are
+# pickled using references to the module. It appears that unpickling the
+# object in the module causes the persistence machinery to fail.
+#
+# My suspicion is that the assignment to po_state before trying to
+# load the state confuses things. The first call to setstate attempts
+# to reference an attribute of the module. That getattr() fails because
+# the module is not a ghost, but does have any empty dict. Since
+# that getattr() fails, its state can't be unpickled.
+#
+# Not sure what to do about this.
+
+class PersistentModule(Persistent):
+
+ def __init__(self, name):
+ self.__name__ = name
+
+ def __repr__(self):
+ return "<%s %s>" % (self.__class__.__name__, self.__name__)
+
+ # XXX need getattr &c. hooks to update _p_changed?
+ # XXX what about code that modifies __dict__ directly?
+ # XXX one example is a function that rebinds a global
+
+ def __getstate__(self):
+ d = self.__dict__.copy()
+ try:
+ del d["__builtins__"]
+ except KeyError:
+ pass
+ return d
+
+ def __setstate__(self, state):
+ state["__builtins__"] = __builtin__
+ self.__dict__.update(state)
+
+class PersistentPackage(PersistentModule):
+ # XXX Is it okay that these packages don't have __path__?
+
+ # A PersistentPackage can exist in a registry without a manager.
+ # It only gets a manager if someone creates an __init__ module for
+ # the package.
+
+ def __init__(self, name):
+ self.__name__ = name
+
+__persistent_module_registry__ = "__persistent_module_registry__"
+
+class PersistentModuleManager(Persistent):
+
+ __implements__ = IPersistentModuleManager
+
+ def __init__(self, registry):
+ self._registry = registry
+ self._module = None
+ self.name = None
+ self.source = None
+
+ def new(self, name, source):
+ if self._module is not None:
+ raise ValueError, "module already exists"
+ if "." in name:
+ parent = self._new_package(name)
+ else:
+ parent = None
+ self._module = PersistentModule(name)
+ try:
+ self._registry.setModule(name, self._module)
+ except ValueError, err:
+ self._module = None
+ raise
+ self.name = name
+ self.update(source)
+ if parent is not None:
+ modname = name.split(".")[-1]
+ setattr(parent, modname, self._module)
+
+ def update(self, source):
+ self._module._p_changed = True
+ moddict = self._module.__dict__
+ old_names = NameFinder(self._module)
+ moddict[__persistent_module_registry__] = self._registry
+ exec source in moddict
+ del moddict[__persistent_module_registry__]
+ new_names = NameFinder(self._module)
+ replacements = new_names.replacements(old_names)
+ convert(self._module, replacements)
+ self.source = source
+
+ def remove(self, source):
+ self._registry.delModule(self._module.__name__)
+ self._module = None
+
+ def _fixup(self, new, old, module):
+ # Update persistent objects in place, and
+ # convert new functions to persistent functions
+ # XXX should convert classes, too
+
+ for k, v in new.items():
+ if isinstance(v, function):
+ v = new[k] = PersistentFunction(v, module)
+ elif isinstance(v.__class__, PersistentClassMetaClass):
+ v.__class__.fixup(module)
+ # XXX need to check for classes that are not persistent!
+
+ old_v = old.get(k)
+ if old_v is not None:
+ # XXX the type test below works for functions, but may
+ # not work for classes or other objects
+ if (isinstance(old_v, Persistent)
+ and type(old_v) == type(v)):
+ state = v.__getstate__()
+ old_v.__setstate__(state)
+ new[k] = old_v
+
+ def _new_package(self, name):
+ parent = self._get_parent(name)
+ modname = name.split(".")[-1]
+ if modname == "__init__":
+ self._module = parent
+ return None
+ else:
+ self._module = PersistentModule(name)
+ return parent
+
+ def _get_parent(self, name):
+ # If a module is being created in a package, automatically
+ # create parent packages that do no already exist.
+ parts = name.split(".")[:-1]
+ parent = None
+ for i in range(len(parts)):
+ if parts[i] == "__init__":
+ raise ValueError, "__init__ can not be a package"
+ pname = ".".join(parts[:i+1])
+ package = self._registry.findModule(pname)
+ if package is None:
+ package = PersistentPackage(pname)
+ self._registry.setModule(pname, package)
+ if parent is not None:
+ setattr(parent, parts[i], package)
+ elif not isinstance(package, PersistentPackage):
+ raise ValueError, "%s is module" % pname
+ parent = package
+ return parent
+
+class PersistentModuleImporter:
+ """An import hook that loads persistent modules.
+
+ The importer cooperates with other objects to make sure imports of
+ persistent modules work correctly. The default importer depends
+ on finding a persistent module registry in the globals passed to
+ __import__(). It looks for the name __persistent_module_registry__.
+ A PersistentModuleManager places its registry in the globals used
+ to exec module source.
+
+ It is important that the registry be activated before it is used
+ to handle imports. If a ghost registry is used for importing, a
+ circular import occurs. The second import occurs when the
+ machinery searches for the class of the registry. It will re-use
+ the registry and fail, because the registry will be marked as
+ changed but not yet have its stated loaded. XXX There ought to be
+ a way to deal with this.
+ """
+
+ def __init__(self):
+ self._saved_import = None
+
+ def install(self):
+ self._saved_import = __builtin__.__import__
+ __builtin__.__import__ = self.__import__
+
+ def uninstall(self):
+ __builtin__.__import__ = self._saved_import
+
+ def _import(self, registry, name, parent, fromlist):
+ mod = None
+ if parent is not None:
+ fullname = "%s.%s" % (parent, name)
+ mod = registry.findModule(fullname)
+ if mod is None:
+ parent = None
+ if mod is None: # no parent or didn't find in parent
+ mod = registry.findModule(name)
+ if mod is None:
+ return None
+ if fromlist:
+ if isinstance(mod, PersistentPackage):
+ self._import_fromlist(registry, mod, fromlist)
+ return mod
+ else:
+ i = name.find(".")
+ if i == -1:
+ return mod
+ name = name[:i]
+ if parent:
+ name = "%s.%s" % (parent, name)
+ top = registry.findModule(name)
+ assert top is not None, "No package for module %s" % name
+ return top
+
+ def _import_fromlist(self, registry, mod, fromlist):
+ for name in fromlist:
+ if not hasattr(mod, name):
+ fullname = "%s.%s" % (mod.__name__, name)
+ self._import(registry, fullname, None, [])
+
+ def __import__(self, name, globals={}, locals={}, fromlist=[]):
+ registry = globals.get(__persistent_module_registry__)
+ if registry is not None:
+ mod = self._import(registry, name, self._get_parent(globals),
+ fromlist)
+ if mod is not None:
+ return mod
+ return self._saved_import(name, globals, locals, fromlist)
+
+ def _get_parent(self, globals):
+ name = globals.get("__name__")
+ if name is None or "." not in name:
+ return None
+ i = name.rfind(".")
+ return name[:i]
+
+class PersistentModuleRegistry(Persistent):
+
+ __implements__ = (IPersistentModuleImportRegistry,
+ IPersistentModuleUpdateRegistry)
+
+ def __init__(self):
+ self.__modules = {}
+
+ def findModule(self, name):
+ assert self._p_state != GHOST
+ return self.__modules.get(name)
+
+ def setModule(self, name, module):
+ if name in self.__modules:
+ raise ValueError, name
+ self._p_changed = True
+ self.__modules[name] = module
+
+ def delModule(self, name):
+ self._p_changed = True
+ del self.__modules[name]
=== Zope3/src/zodb/code/patch.py 1.1 => 1.2 ===
--- /dev/null Wed Dec 25 09:13:49 2002
+++ Zope3/src/zodb/code/patch.py Wed Dec 25 09:12:18 2002
@@ -0,0 +1,358 @@
+##############################################################################
+#
+# Copyright (c) 2002 Zope Corporation and Contributors.
+# All Rights Reserved.
+#
+# This software is subject to the provisions of the Zope Public License,
+# Version 2.0 (ZPL). A copy of the ZPL should accompany this distribution.
+# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+# FOR A PARTICULAR PURPOSE.
+#
+##############################################################################
+"""Patch references to auto-persistent objects in a module.
+
+When a persistent module is compiled, all classes and functions should
+be converted to persistent classes and functions. When a module is
+updated, it is compiled and its persistent functions and classes are
+updated in place so that clients of the module see the update.
+
+The specific semantics of the convert and update-in-place operations
+are still being determined. Here are some rough notes:
+
+- Classes and functions are not converted in place. New objects are
+ created to replace the builtin functions and classes.
+
+- Every function object is converted to a PersistentFunction.
+
+- Every class is converted to a new class that is created by calling
+ the PersistentClassMetaClass with the name, bases, and dict of the
+ class being converted.
+
+- The conversion operation must preserve object identity. If an
+ object created by a def or class statement is referenced elsewhere
+ in the module, all references must be replaced with references to
+ the converted object.
+
+Implementation notes:
+
+The conversion operation is implemented using a pickler. It wasn't
+possible to use the copy module, because it isn't possible to extend
+the copy module in a safe way. The copy module depends on module globals.
+
+What semantics do we want for update-in-place in the presence of aliases?
+
+Semantics based on per-namespace updates don't work in the presence of
+aliases. If an update changes an alias, then the old binding will be
+updated with the state of the new binding.
+
+Semantics based on containing namespaces seem to work. The outermost
+namespace that contains a name is updated in place. Aliases are
+simple rebinding operations that do not update in place.
+
+The containment approach seems to have a problem with bound methods,
+where an instance can stash a copy of a bound method created via an
+alias. When the class is updated, the alias changes, but the bound
+method isn't. Then the bound method can invoke an old method on a new
+object, which may not be legal. It might sufficient to outlaw this case.
+
+XXX Open issues
+
+Can we handle metaclasses within this framework? That is, what if an
+object's type is not type, but a subclass of type.
+
+How do we handle things like staticmethods? We'd like the code to be
+able to use them, but Python doesn't expose an introspection on them.
+
+What if the same object is bound to two different names in the same
+namespace? Example:
+ x = lambda: 1
+ y = x
+If the module is updated to:
+ x = lambda: 1
+ y = lambda: 2
+What are the desired semantics?
+"""
+
+__metaclass__ = type
+
+from copy_reg import dispatch_table
+from cStringIO import StringIO
+import pickle
+from types import *
+
+from zodb.code.class_ import PersistentClassMetaClass, PersistentDescriptor
+from zodb.code.function import PersistentFunction
+from persistence import Persistent
+
+class Wrapper:
+ """Implement pickling reduce protocol for update-able object.
+
+ The Pickler creates a Wrapper instance and uses it as the reduce
+ function. The Unpickler calls the instance to recreate the
+ object.
+ """
+ __safe_for_unpickling__ = True
+
+ def __init__(self, obj, module, replace=None):
+ self._obj = obj
+ self._module = module
+ self._replace = replace
+
+ def __call__(self, *args):
+ new = self.unwrap(*args)
+ if self._replace is not None:
+ # XXX Hack: Use _p_newstatefor persistent classes, because
+ # a persistent class's persistent state is a fairly limited
+ # subset of the dict and we really want to replace everything.
+ if hasattr(self._replace, "_p_newstate"):
+ self._replace._p_newstate(new)
+ else:
+ self._replace.__setstate__(new.__getstate__())
+ return self._replace
+ else:
+ return new
+
+class FunctionWrapper(Wrapper):
+
+ def unwrap(self, defaults, dict):
+ self._obj.func_defaults = defaults
+ self._obj.func_dict.update(dict)
+ return PersistentFunction(self._obj, self._module)
+
+class TypeWrapper(Wrapper):
+
+ def unwrap(self, bases, dict):
+ # XXX Add Persistent to the list of bases so that type (the
+ # base class of PersistentClassMetaClass) will create the
+ # correct C layout.
+
+ # We must maintain a linearizable MRO when adding Persistent
+ # to list of bases. In particular, object is in Persistent's
+ # __bases__ to Persistent must occur before object in the
+ # new class's __bases__.
+
+ if not Persistent in bases:
+ if object in bases:
+ L = list(bases)
+ i = L.index(object)
+ newbases = bases[:i] + (Persistent,) + bases[i:]
+ else:
+ newbases = bases + (Persistent,)
+
+ return PersistentClassMetaClass(self._obj.__name__, newbases, dict)
+
+class Pickler(pickle.Pickler):
+
+ dispatch = {}
+ dispatch.update(pickle.Pickler.dispatch)
+
+ def __init__(self, file, module, memo, replacements):
+ pickle.Pickler.__init__(self, file, bin=True)
+ self._pmemo = memo
+ self._module = module
+ self._repl = replacements
+ self._builtins = module.__builtins__
+
+ def wrap(self, wrapperclass, object):
+ return wrapperclass(object, self._module, self._repl.get(id(object)))
+
+ def persistent_id(self, object, force=False):
+ if isinstance(object, Wrapper) or object is self._builtins or force:
+ oid = id(object)
+ self._pmemo[oid] = object
+ return oid
+ else:
+ return None
+
+ def save_type(self, atype):
+ if atype.__module__ == "__builtin__":
+ self.save_global(atype)
+ else:
+ self.save_reduce(self.wrap(TypeWrapper, atype),
+ (atype.__bases__, atype.__dict__))
+
+ dispatch[TypeType] = save_type
+ dispatch[ClassType] = save_type
+
+ def save_function(self, func):
+ self.save_reduce(self.wrap(FunctionWrapper, func),
+ (func.func_defaults, func.func_dict))
+
+ dispatch[FunctionType] = save_function
+
+ # New-style classes don't have real dicts. They have dictproxies.
+ # There's no official way to spell the dictproxy type, so we have
+ # to get it by using type() on an example.
+ dispatch[type(Wrapper.__dict__)] = pickle.Pickler.save_dict
+
+ def save(self, object, ignore=None):
+ # Override the save() implementation from pickle.py, because
+ # we don't ever want to invoke __reduce__() on builtin types
+ # that aren't picklable. Instead, we'd like to pickle all of
+ # those objects using the persistent_id() mechanism. There's
+ # no need to cover every type with this pickler, because it
+ # isn't being used for persistent just to create a copy.
+
+ # The ignored parameter is for compatible with Python 2.2,
+ # which has the old inst_persistent_id feature.
+ pid = self.persistent_id(object)
+ if pid is not None:
+ self.save_pers(pid)
+ return
+
+ d = id(object)
+ t = type(object)
+ if (t is TupleType) and (len(object) == 0):
+ if self.bin:
+ self.save_empty_tuple(object)
+ else:
+ self.save_tuple(object)
+ return
+
+ if d in self.memo:
+ self.write(self.get(self.memo[d][0]))
+ return
+
+ try:
+ f = self.dispatch[t]
+ except KeyError:
+ try:
+ issc = issubclass(t, TypeType)
+ except TypeError: # t is not a class
+ issc = 0
+ if issc:
+ self.save_global(object)
+ return
+
+ try:
+ reduce = dispatch_table[t]
+ except KeyError:
+ self.save_pers(self.persistent_id(object, True))
+ return
+ else:
+ tup = reduce(object)
+
+ if type(tup) is StringType:
+ self.save_global(object, tup)
+ return
+ if type(tup) is not TupleType:
+ raise pickle.PicklingError("Value returned by %s must be a "
+ "tuple" % reduce)
+
+ l = len(tup)
+ if (l != 2) and (l != 3):
+ raise pickle.PicklingError("tuple returned by %s must "
+ "contain only two or three "
+ "elements" % reduce)
+
+ callable = tup[0]
+ arg_tup = tup[1]
+ if l > 2:
+ state = tup[2]
+ else:
+ state = None
+
+ if type(arg_tup) is not TupleType and arg_tup is not None:
+ raise pickle.PicklingError("Second element of tuple "
+ "returned by %s must be a "
+ "tuple" % reduce)
+
+ self.save_reduce(callable, arg_tup, state)
+ memo_len = len(self.memo)
+ self.write(self.put(memo_len))
+ self.memo[d] = (memo_len, object)
+ return
+
+ f(self, object)
+
+class Unpickler(pickle.Unpickler):
+
+ def __init__(self, file, pmemo):
+ pickle.Unpickler.__init__(self, file)
+ self._pmemo = pmemo
+
+ def persistent_load(self, oid):
+ return self._pmemo[oid]
+
+class NameFinder:
+ """Find a canonical name for each update-able object."""
+
+ # XXX should we try to handle descriptors? If it looks like a
+ # descriptor, try calling it and passing the class object?
+
+ classTypes = {
+ TypeType: True,
+ ClassType: True,
+ PersistentClassMetaClass: True,
+ }
+
+ types = {
+ FunctionType: True,
+ PersistentFunction: True,
+ PersistentDescriptor: True,
+ }
+ types.update(classTypes)
+
+ def __init__(self, module):
+ self._names = {} # map object ids to (canonical name, obj) pairs
+ self.walkModule(module)
+
+ def names(self):
+ return [n for n, o in self._names.itervalues()]
+
+ def _walk(self, obj, name, fmt):
+ classes = []
+ for k, v in obj.__dict__.items():
+ aType = type(v)
+ anId = id(v)
+ if aType in self.types and not anId in self._names:
+ self._names[anId] = fmt % (name, k), v
+ if aType in self.classTypes:
+ classes.append((v, k))
+ for _klass, _name in classes:
+ self.walkClass(_klass, fmt % (name, _name))
+
+ def walkModule(self, mod):
+ self._walk(mod, "", "%s%s")
+
+ def walkClass(self, klass, name):
+ self._walk(klass, name, "%s.%s")
+
+ def replacements(self, aFinder):
+ """Return a dictionary of replacements.
+
+ self and aFinder are two NameFinder instances. Return a dict
+ of all the objects in the two that share the same name. The
+ keys are the ids in self and the values are the objects in
+ aFinder.
+ """
+ temp = {}
+ result = {}
+ for anId, (name, obj) in self._names.iteritems():
+ temp[name] = anId
+ for anId, (name, obj) in aFinder._names.iteritems():
+ if name in temp:
+ result[temp[name]] = obj
+ return result
+
+def convert(module, replacements):
+ """Convert object to persistent objects in module.
+
+ Use replacements dictionary to determine which objects to update
+ in place.
+ """
+ f = StringIO()
+ memo = {}
+ p = Pickler(f, module, memo, replacements)
+ moddict = module.__dict__
+ p.dump(moddict)
+ f.reset()
+ u = Unpickler(f, memo)
+ newdict = u.load()
+ module.__dict__.clear()
+ module.__dict__.update(newdict)
+
+if __name__ == "__main__":
+ pass