[Zope-CVS] CVS: Products/Ape/lib/apelib/zodb3 - __init__.py:1.1 connection.py:1.1 consts.py:1.1 db.py:1.1 interfaces.py:1.1 oidencoder.py:1.1 resource.py:1.1 serializers.py:1.1 storage.py:1.1 utils.py:1.1
Shane Hathaway
shane@zope.com
Wed, 9 Apr 2003 23:09:59 -0400
Update of /cvs-repository/Products/Ape/lib/apelib/zodb3
In directory cvs.zope.org:/tmp/cvs-serv32010/lib/apelib/zodb3
Added Files:
__init__.py connection.py consts.py db.py interfaces.py
oidencoder.py resource.py serializers.py storage.py utils.py
Log Message:
Moved apelib into a "lib" subdirectory. This simplified the
Python hacking required to make apelib a top-level package. Sorry
about the flood of checkins, but CVS makes a move like this quite painful.
=== Added File Products/Ape/lib/apelib/zodb3/__init__.py ===
##############################################################################
#
# Copyright (c) 2002 Zope Corporation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.0 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""apelib.zodb3 package
$Id: __init__.py,v 1.1 2003/04/10 03:09:58 shane Exp $
"""
=== Added File Products/Ape/lib/apelib/zodb3/connection.py === (472/572 lines abridged)
##############################################################################
#
# Copyright (c) 2002 Zope Corporation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.0 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Extension of the ZODB Connection class
$Id: connection.py,v 1.1 2003/04/10 03:09:58 shane Exp $
"""
import sys
from time import time
from types import StringType, TupleType
from cStringIO import StringIO
from cPickle import Unpickler, Pickler
from Acquisition import aq_base
from ZODB import Persistent
from ZODB.POSException \
import ConflictError, ReadConflictError, InvalidObjectReference, \
StorageError
from ZODB.Connection import Connection
from ZODB.ConflictResolution import ResolvedSerial
from zLOG import LOG, ERROR
from consts import HASH0, DEBUG
from apelib.core.interfaces import IKeyedObjectSystem
from apelib.core.events import SerializationEvent, DeserializationEvent
class ApeConnection (Connection):
"""Mapper-driven Connection
Uses a mapper to serialize the state of objects before
pickling, and to deserialize objects based on the pickled
state.
The mapper might, for example, serialize all objects as
tabular records.
"""
_root_mapper = None
[-=- -=- -=- 472 lines omitted -=- -=- -=-]
obj = self._cache.get(oid, None)
if obj is None:
return
if serial == ResolvedSerial:
obj._p_changed = None
else:
if change:
obj._p_changed = 0
#obj._p_serial = serial
self.setSerial(obj, serial)
else:
for oid, serial in store_return:
if not isinstance(serial, StringType):
raise serial
obj = self._cache.get(oid, None)
if obj is None:
continue
if serial == ResolvedSerial:
obj._p_changed = None
else:
if change:
obj._p_changed = 0
#obj._p_serial = serial
self.setSerial(obj, serial)
class UnmanagedJar:
"""Special jar for unmanaged persistent objects.
There is one such jar for each unmanaged persistent object. All
it does is notify the managed persistent object of changes.
Note that unmanaged persistent objects should never be ghosted!
Instead, when the managed persistent object gets ghosted, it
usually removes the last reference to the unmanaged object, which
is then deallocated.
"""
def __init__(self, real_jar, real_oid):
self.real_jar = real_jar
self.real_oid = real_oid
def register(self, ob):
o = self.real_jar[self.real_oid]
o._p_changed = 1
def modifiedInVersion(self, oid):
# XXX PersistentExtra wants this
return ''
=== Added File Products/Ape/lib/apelib/zodb3/consts.py ===
##############################################################################
#
# Copyright (c) 2002 Zope Corporation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.0 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Constants for this package.
$Id: consts.py,v 1.1 2003/04/10 03:09:58 shane Exp $
"""
import os
if os.environ.get('APE_MAPPER_DEBUG'):
DEBUG = 1
else:
DEBUG = 0
ROOT_OID = '\0' * 8
HASH0 = '\0' * 8
HASH1 = '\0' * 7 + '\001'
=== Added File Products/Ape/lib/apelib/zodb3/db.py ===
##############################################################################
#
# Copyright (c) 2002 Zope Corporation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.0 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Extension of the ZODB DB class
$Id: db.py,v 1.1 2003/04/10 03:09:58 shane Exp $
"""
from ZODB.DB import DB, Transaction, cPickle, cStringIO, allocate_lock
from apelib.core.interfaces import IMapper
from connection import ApeConnection
from storage import ApeStorage
from oidencoder import OIDEncoder
from resource import StaticResource
from interfaces import IResourceAccess, IOIDEncoder
def callMapperFactory(factory, kw):
"""Returns (mapper, tpc_conns) given the name of a factory and arguments.
"""
pos = factory.rfind('.')
if pos < 0:
raise ValueError('factory must be a string containing <module>.<name>')
module = factory[:pos]
name = factory[pos + 1:]
m = __import__(module, {}, {}, (name,))
f = getattr(m, name)
return f(**kw)
class ApeDB (DB):
"""Mapper-driven Database
"""
klass = ApeConnection
# SDH: two extra args.
def __init__(self, storage,
mapper_resource=None,
factory=None,
oid_encoder=None,
pool_size=7,
cache_size=400,
cache_deactivate_after=60,
version_pool_size=3,
version_cache_size=100,
version_cache_deactivate_after=10,
):
"""Create an object database.
"""
if mapper_resource is None:
if factory is not None:
# Use a mapper factory
mapper, tpc_conns = callMapperFactory(factory)
assert IMapper.isImplementedBy(mapper)
mapper_resource = StaticResource(mapper)
else:
if isinstance(storage, ApeStorage):
# Use the mapper from the storage
mapper_resource = storage.getMapperResource()
else:
raise RuntimeError('No mapper or factory specified')
else:
# mapper_resource was specified
assert IResourceAccess.isImplementedBy(mapper_resource)
assert factory is None
# Allocate locks:
l=allocate_lock()
self._a=l.acquire
self._r=l.release
# Setup connection pools and cache info
self._pools={},[]
self._temps=[]
self._pool_size=pool_size
self._cache_size=cache_size
self._cache_deactivate_after = cache_deactivate_after
self._version_pool_size=version_pool_size
self._version_cache_size=version_cache_size
self._version_cache_deactivate_after = version_cache_deactivate_after
self._miv_cache={}
# Setup storage
self._storage=storage
storage.registerDB(self, None)
if not hasattr(storage,'tpc_vote'): storage.tpc_vote=lambda *args: None
if oid_encoder is None:
oid_encoder = OIDEncoder()
else:
assert IOIDEncoder.isImplementedBy(oid_encoder)
self._oid_encoder = oid_encoder
self._mapper_resource = mapper_resource
# Pass through methods:
for m in ('history',
'supportsUndo', 'supportsVersions', 'undoLog',
'versionEmpty', 'versions'):
setattr(self, m, getattr(storage, m))
if hasattr(storage, 'undoInfo'):
self.undoInfo=storage.undoInfo
=== Added File Products/Ape/lib/apelib/zodb3/interfaces.py ===
##############################################################################
#
# Copyright (c) 2003 Zope Corporation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.0 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Interfaces for apelib.zodb3.
$Id: interfaces.py,v 1.1 2003/04/10 03:09:58 shane Exp $
"""
from Interface import Interface
class IOIDEncoder (Interface):
def decode(oid):
"Returns a keychain (a tuple) given an OID"
def encode(keychain):
"Returns an OID (a string) given a keychain"
class IResourceAccess (Interface):
"""Provides access to a resource that may need periodic updates.
"""
def access(consumer):
"""Returns the resource.
"""
def release(consumer):
"""Indicates the given consumer is finished with the resource.
The implementation may take an opportunity to update the resource.
"""
=== Added File Products/Ape/lib/apelib/zodb3/oidencoder.py ===
##############################################################################
#
# Copyright (c) 2002 Zope Corporation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.0 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Default OID encoder.
$Id: oidencoder.py,v 1.1 2003/04/10 03:09:58 shane Exp $
"""
from types import TupleType
from marshal import dumps, loads
from consts import ROOT_OID
from interfaces import IOIDEncoder
class OIDEncoder:
"""Simple OID encoder
"""
__implements__ = IOIDEncoder
def decode(self, oid):
"""Returns a keychain."""
if oid == ROOT_OID:
return ()
keychain = loads(oid)
assert isinstance(keychain, TupleType)
return keychain
def encode(self, keychain):
"""Returns an OID."""
assert isinstance(keychain, TupleType)
if keychain == ():
return ROOT_OID
return dumps(keychain)
=== Added File Products/Ape/lib/apelib/zodb3/resource.py ===
##############################################################################
#
# Copyright (c) 2002 Zope Corporation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.0 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Resource access
$Id: resource.py,v 1.1 2003/04/10 03:09:58 shane Exp $
"""
from interfaces import IResourceAccess
class StaticResource:
"""Simple, static resource"""
__implements__ = IResourceAccess
def __init__(self, r):
self.r = r
def access(self, consumer):
return self.r
def release(self, consumer):
pass
=== Added File Products/Ape/lib/apelib/zodb3/serializers.py ===
##############################################################################
#
# Copyright (c) 2003 Zope Corporation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.0 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Serializers specific to ZODB3.
$Id: serializers.py,v 1.1 2003/04/10 03:09:58 shane Exp $
"""
import os
from cStringIO import StringIO
from cPickle import Pickler, Unpickler, UnpickleableError
import time
from types import DictType
from Persistence import Persistent, PersistentMapping
from ZODB.TimeStamp import TimeStamp
from apelib.core.interfaces \
import ISerializer, IFullSerializationEvent, \
IFullDeserializationEvent
from apelib.core.events import SerializationEvent, DeserializationEvent
from apelib.core.exceptions import SerializationError
from apelib.core.schemas import FieldSchema
class FixedPersistentMapping:
"""Unchanging persistent mapping.
Generally used for a ZODB root object."""
__implements__ = ISerializer
def __init__(self):
# map: { name -> (keychain, mapper) }
self.map = {}
def add(self, name, keychain, mapper_names=None):
self.map[name] = (keychain, mapper_names)
def getSchema(self):
return None # No storage
def canSerialize(self, object):
return isinstance(object, PersistentMapping)
def serialize(self, object, event):
names = object.keys()
names.sort()
expected = self.map.keys()
expected.sort()
assert names == expected, '%s != %s' % (names, expected)
for name in names:
keychain, mapper_names = self.map[name]
subob = object[name]
event.notifySerializedRef(name, subob, 0, keychain)
# One of the two will work. ;-)
event.ignoreAttribute('data')
event.ignoreAttribute('_container')
def deserialize(self, object, event, state):
assert state is None
data = {}
for name, (keychain, mapper_names) in self.map.items():
subob = event.dereference(name, keychain,
{'mapper_names': mapper_names})
data[name] = subob
# The PersistentMapping doesn't have its data or _container
# attribute yet, and we don't know what its name should be
# since PersistentMapping's internal structure is not fixed.
# So call the PersistentMapping's constructor.
object.__init__(data)
class RollCall:
"""Helps ensure all parts of an object get serialized.
Designed for debugging purposes.
"""
__implements__ = ISerializer
def getSchema(self):
return None # No storage
def canSerialize(self, object):
return 1
def serialize(self, object, event):
assert IFullSerializationEvent.isImplementedBy(event)
attrs = event.getSerializedAttributeNames()
attrs_map = {}
for attr in attrs:
attrs_map[attr] = 1
missed = []
for k in object.__dict__.keys():
if not k.startswith('_v_') and not attrs_map.has_key(k):
missed.append(repr(k))
if missed:
raise SerializationError(
'Attribute(s) %s of object at %s not serialized' %
(', '.join(missed), repr(event.getKeychain())))
return None
def deserialize(self, object, event, state):
assert state is None
class RemainingState:
"""(De)serializes the remaining state of a Persistent object"""
__implements__ = ISerializer
schema = FieldSchema('data', 'string')
def getSchema(self):
return self.schema
def canSerialize(self, object):
try:
return isinstance(object, Persistent)
except TypeError:
# XXX Python 2.1 thinks Persistent is not a class
return 0
def serialize(self, object, event):
assert IFullSerializationEvent.isImplementedBy(event)
assert isinstance(object, Persistent)
# Allow pickling of cyclic references to the object.
event.notifySerialized('self', object, 0)
# Ignore previously serialized attributes
state = object.__dict__.copy()
for key in state.keys():
if key.startswith('_v_'):
del state[key]
for attrname in event.getSerializedAttributeNames():
if state.has_key(attrname):
del state[attrname]
if not state:
# No data needs to be stored
return ''
outfile = StringIO()
p = Pickler(outfile)
unmanaged = []
def persistent_id(ob, getInternalRef=event.getInternalRef,
unmanaged=unmanaged):
ref = getInternalRef(ob)
if ref is None:
if hasattr(ob, '_p_oid'):
# Persistent objects that end up in the remainder
# are unmanaged. Tell ZODB about them so that
# ZODB can deal with them specially.
unmanaged.append(ob)
return ref
p.persistent_id = persistent_id
try:
p.dump(state)
except UnpickleableError, exc:
# Try to reveal which attribute is unpickleable.
attrname = None
attrvalue = None
for key, value in state.items():
del unmanaged[:]
outfile.seek(0)
outfile.truncate()
p = Pickler(outfile)
p.persistent_id = persistent_id
try:
p.dump(value)
except UnpickleableError:
attrname = key
attrvalue = value
break
if attrname is not None:
# Provide a more informative exception.
if os.environ.get('APE_TRACE_UNPICKLEABLE'):
# Provide an opportunity to examine
# the "attrvalue" attribute.
import pdb
pdb.set_trace()
raise RuntimeError(
'Unable to pickle the %s attribute, %s, '
'of %s at %s. %s.' % (
repr(attrname), repr(attrvalue), repr(object),
repr(event.getKeychain()), str(exc)))
else:
# Couldn't help.
raise
p.dump(unmanaged)
s = outfile.getvalue()
event.addUnmanagedPersistentObjects(unmanaged)
return s
def deserialize(self, object, event, state):
assert IFullDeserializationEvent.isImplementedBy(event)
assert isinstance(object, Persistent)
# Set up to recover cyclic references to the object.
event.notifyDeserialized('self', object)
if state:
infile = StringIO(state)
u = Unpickler(infile)
u.persistent_load = event.loadInternalRef
s = u.load()
object.__dict__.update(s)
try:
unmanaged = u.load()
except EOFError:
# old pickle with no list of unmanaged objects
pass
else:
event.addUnmanagedPersistentObjects(unmanaged)
class ModTimeAttribute:
"""Sets the _p_mtime attribute."""
__implements__ = ISerializer
schema = FieldSchema('mtime', 'int')
def getSchema(self):
return self.schema
def canSerialize(self, obj):
try:
return isinstance(obj, Persistent)
except TypeError:
# XXX Python 2.1 thinks Persistent is not a class
return 0
def setTime(self, obj, t):
"""Sets the last modification time of a Persistent obj to float t.
"""
args = time.gmtime(t)[:5] + (t%60,)
obj._p_serial = repr(TimeStamp(*args))
def serialize(self, obj, event):
now = long(time.time())
if obj._p_changed:
# Indicate that this object just changed. Note that the time
# is a guess.
self.setTime(obj, now)
return now
def deserialize(self, obj, event, state):
self.setTime(obj, state)
=== Added File Products/Ape/lib/apelib/zodb3/storage.py ===
##############################################################################
#
# Copyright (c) 2002 Zope Corporation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.0 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Storage implementation that loads/stores using a mapper.
$Id: storage.py,v 1.1 2003/04/10 03:09:58 shane Exp $
"""
import md5
from cPickle import Pickler, Unpickler
from cStringIO import StringIO
from ZODB import POSException, BaseStorage
from apelib.core.events import MapperEvent, LoadEvent, StoreEvent
from apelib.core.interfaces import ITPCConnection
from apelib.core.exceptions import NoStateFoundError
from consts import HASH0, HASH1, DEBUG
from oidencoder import OIDEncoder
from interfaces import IResourceAccess, IOIDEncoder
class ApeStorage(BaseStorage.BaseStorage):
def __init__(self, mapper_resource, tpc_conns=(),
oid_encoder=None, name=''):
assert IResourceAccess.isImplementedBy(mapper_resource)
self._mapper_resource = mapper_resource
if oid_encoder is None:
oid_encoder = OIDEncoder()
else:
assert IOIDEncoder.isImplementedBy(oid_encoder)
self._oid_encoder = oid_encoder
self._tpc_conns = tpc_conns
sort_keys = []
names = []
try:
opened = []
for c in tpc_conns:
if not ITPCConnection.isImplementedBy(c):
raise RuntimeError('%s is not an ITPCConnection' % repr(c))
c.connect()
opened.append(c)
sort_keys.append(c.sortKey())
names.append(c.getName())
except:
for c in opened:
c.close()
raise
self._sort_key = tuple(sort_keys)
if not name:
name = 'ApeStorage: ' + ', '.join(names)
BaseStorage.BaseStorage.__init__(self, name)
def __len__(self):
return 1
def getSize(self):
# Stub
return 1
def sortKey(self):
return self._sort_key
def getMapperResource(self):
return self._mapper_resource
def hash64(self, value):
"""Returns an 8-byte hash value.
"""
h = '%08x' % hash(value)
if h == HASH0:
# Avoid the special zero hash.
h = HASH1
if DEBUG:
print '64-bit hash of %r is %r' % (value, h)
return h
def _load(self, root_mapper, keychain, hash_only=0):
mapper = root_mapper
mapper_names = []
# Follow the keychain to find the right mapper.
classification = None
for i in range(len(keychain)):
k = keychain[:i + 1]
cfr = mapper.getClassifier()
assert cfr is not None, keychain
event = LoadEvent(mapper, k)
classification, sub_mapper_name = cfr.classifyState(event)
mapper_names.append(sub_mapper_name)
mapper = mapper.getSubMapper(sub_mapper_name)
event = LoadEvent(mapper, keychain)
if hash_only:
event.hash_only = 1
full_state, hash_value = mapper.getGateway().load(event)
return full_state, hash_value, classification, mapper_names
def load(self, oid, version):
if version:
raise POSException.Unsupported, "Versions aren't supported"
self._lock_acquire()
try:
keychain = self._oid_encoder.decode(oid)
root_mapper = self._mapper_resource.access(self)
full_state, hash_value, classification, mapper_names = self._load(
root_mapper, keychain)
file = StringIO()
p = Pickler(file)
p.dump((classification, mapper_names))
p.dump(full_state)
data = file.getvalue()
h = self.hash64(hash_value)
if DEBUG:
print 'loaded', `oid`, `h`
return data, h
finally:
self._lock_release()
def store(self, oid, h64, data, version, transaction):
if transaction is not self._transaction:
raise POSException.StorageTransactionError(self, transaction)
if version:
raise POSException.Unsupported, "Versions aren't supported"
self._lock_acquire()
try:
root_mapper = self._mapper_resource.access(self)
keychain = self._oid_encoder.decode(oid)
# First detect conflicts.
# The "h64" argument, if its value is not 0,
# was previously generated by hash64().
if DEBUG:
print 'storing', `oid`, `h64`
if h64 != HASH0:
# Overwriting an old object. Use the hash to verify
# that the new data was derived from the old data.
info = self._load(root_mapper, keychain, 1)
old_state, old_hash = info[:2]
old_h64 = self.hash64(old_hash)
if h64 != old_h64:
raise POSException.ConflictError(
"Storing %s based on old data. %s != %s" % (
repr(keychain),
repr(h64), repr(old_h64)))
else:
# A new object. Attempts to load should lead to
# NoStateFoundError or a hash of None, otherwise
# there's a conflict.
try:
info = self._load(root_mapper, keychain, 1)
except NoStateFoundError:
pass
else:
old_hash = info[1]
if old_hash is not None:
raise POSException.ConflictError(
"%s already exists" % repr(keychain))
# Now unpickle and store the data.
file = StringIO(data)
u = Unpickler(file)
classification, mapper_names = u.load()
state = u.load()
assert len(keychain) == len(mapper_names)
mapper = root_mapper
cfr = mapper.getClassifier()
for mapper_name in mapper_names:
cfr = mapper.getClassifier()
mapper = mapper.getSubMapper(mapper_name)
event = StoreEvent(mapper, keychain)
new_hash = mapper.getGateway().store(event, state)
if cfr is not None:
cfr.store(event, classification)
new_h64 = self.hash64(new_hash)
finally:
self._lock_release()
if DEBUG:
print 'stored', `oid`, `h64`, `new_h64`
return new_h64
def new_oid(self):
# Try to use the root keychain generator to make a keychain.
root_mapper = self._mapper_resource.access(self)
kgen = root_mapper.getKeychainGenerator()
event = MapperEvent(root_mapper, ())
keychain = kgen.makeKeychain(event, None, 1)
return self._oid_encoder.encode(keychain)
def _clear_temp(self):
pass
def _abort(self):
for c in self._tpc_conns:
c.abort()
def _begin(self, tid, u, d, e):
for c in self._tpc_conns:
c.begin()
def _finish(self, tid, user, desc, ext):
for c in self._tpc_conns:
c.finish()
def _vote(self):
for c in self._tpc_conns:
c.vote()
def pack(self, t, referencesf):
pass
def _splat(self):
"""Spit out a string showing state.
"""
return ''
def close(self):
for c in self._tpc_conns:
c.close()
self._mapper_resource.release(self)
=== Added File Products/Ape/lib/apelib/zodb3/utils.py ===
##############################################################################
#
# Copyright (c) 2003 Zope Corporation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.0 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Utilities for handling ZODB objects.
$Id: utils.py,v 1.1 2003/04/10 03:09:58 shane Exp $
"""
from cStringIO import StringIO
from cPickle import Pickler, Unpickler
from types import StringType
def copyOf(object):
"""Copies a ZODB object, loading subobjects as needed.
Re-ghostifies objects along the way to save memory.
"""
former_ghosts = []
zclass_refs = {}
def persistent_id(ob, former_ghosts=former_ghosts,
zclass_refs=zclass_refs):
if getattr(ob, '_p_changed', 0) is None:
# Load temporarily.
former_ghosts.append(ob)
ob._p_changed = 0
if hasattr(ob, '__bases__'):
m = getattr(ob, '__module__', None)
if (m is not None
and isinstance(m, StringType)
and m.startswith('*')):
n = getattr(ob, '__name__', None)
if n is not None:
# Pickling a ZClass instance. Store the reference to
# the ZClass class separately, so that the pickler
# and unpickler don't trip over the apparently
# missing module.
ref = (m, n)
zclass_refs[ref] = ob
return ref
return None
def persistent_load(ref, zclass_refs=zclass_refs):
return zclass_refs[ref]
stream = StringIO()
p = Pickler(stream, 1)
p.persistent_id = persistent_id
p.dump(object)
if former_ghosts:
for g in former_ghosts:
g._p_changed = None
del former_ghosts[:]
stream.seek(0)
u = Unpickler(stream)
u.persistent_load = persistent_load
return u.load()