[Zope-CVS] CVS: Products/Ape/lib/apelib/zodb3 - connection.py:1.5 storage.py:1.5
Shane Hathaway
shane@zope.com
Mon, 26 May 2003 16:20:09 -0400
Update of /cvs-repository/Products/Ape/lib/apelib/zodb3
In directory cvs.zope.org:/tmp/cvs-serv19924/zodb3
Modified Files:
connection.py storage.py
Log Message:
Changed the ZODB 3 support to use GatewayIO and ObjectSystemIO, moving
a lot of common logic into a single place. In theory, the ZODB4 support
will be able to use GatewayIO and ObjectSystemIO also.
=== Products/Ape/lib/apelib/zodb3/connection.py 1.4 => 1.5 ===
--- Products/Ape/lib/apelib/zodb3/connection.py:1.4 Mon May 26 15:33:16 2003
+++ Products/Ape/lib/apelib/zodb3/connection.py Mon May 26 16:20:09 2003
@@ -32,8 +32,8 @@
from zLOG import LOG, ERROR
from consts import HASH0, DEBUG
+from apelib.core.io import ObjectSystemIO, ClassifiedState
from apelib.core.interfaces import IKeyedObjectSystem
-from apelib.core.events import SerializationEvent, DeserializationEvent
class ApeConnection (Connection):
@@ -46,17 +46,18 @@
The mapper might, for example, serialize all objects as
tabular records.
"""
- _root_mapper = None
+ _osio = None
__implements__ = (IKeyedObjectSystem,
getattr(Connection, '__implements__', ()))
- def getRootMapper(self):
- root_mapper = self._root_mapper
- if root_mapper is None:
+ def getObjectSystemIO(self):
+ osio = self._osio
+ if osio is None:
root_mapper = self._db._mapper_resource.access(self)
- self._root_mapper = root_mapper
- return root_mapper
+ osio = ObjectSystemIO(root_mapper, self)
+ self._osio = osio
+ return osio
def close(self):
@@ -64,8 +65,8 @@
try:
Connection.close(self)
finally:
- if db is not None and self._root_mapper is not None:
- self._root_mapper = None
+ if db is not None and self._osio is not None:
+ self._osio = None
db._mapper_resource.release(self)
@@ -82,16 +83,12 @@
# unpickler.persistent_load=self._persistent_load
try:
- classification, mapper_names = unpickler.load()
+ classified_state = unpickler.load()
except:
raise "Could not load oid %s, pickled data in traceback info may\
contain clues" % (oid)
-
- mapper = self.getRootMapper()
- for mapper_name in mapper_names:
- mapper = mapper.getSubMapper(mapper_name)
- object = mapper.getSerializer().createEmptyInstance(
- self, classification=classification)
+ osio = self.getObjectSystemIO()
+ object = osio.newObject(classified_state)
assert object is not None
object._p_oid=oid
@@ -115,12 +112,9 @@
if hints:
mapper_names = hints.get('mapper_names')
if mapper_names is not None:
- mapper = self.getRootMapper()
- for mapper_name in mapper_names:
- mapper = mapper.getSubMapper(mapper_name)
- ser = mapper.getSerializer()
-
- object = ser.createEmptyInstance(self)
+ classified_state = ClassifiedState(None, None, mapper_names)
+ osio = self.getObjectSystemIO()
+ object = osio.newObject(classified_state)
if object is not None:
object._p_oid=oid
object._p_jar=self
@@ -219,35 +213,10 @@
# SDH: hook in the serializer.
# state=object.__getstate__()
- keychain = self._db._oid_encoder.decode(oid)
- mapper = self.getRootMapper()
- mapper_names = []
oid_encoder = self._db._oid_encoder
- classification = None
- if keychain:
- # Use classification to discover what mapper to use
- # for storage.
- # classify the parents.
- for i in range(1, len(keychain)):
- k = keychain[:i]
- o = self[oid_encoder.encode(k)]
- cfr = mapper.getClassifier()
- classification, sub_mapper_name = \
- cfr.classifyObject(o, k)
- mapper_names.append(sub_mapper_name)
- mapper = mapper.getSubMapper(sub_mapper_name)
- # Now classify the object being stored.
- cfr = mapper.getClassifier()
- classification, sub_mapper_name = cfr.classifyObject(
- object, keychain)
- mapper_names.append(sub_mapper_name)
- mapper = mapper.getSubMapper(sub_mapper_name)
-
- ser = mapper.getSerializer()
- if DEBUG:
- print 'serializing', repr(oid), repr(serial)
- event = SerializationEvent(self, mapper, keychain, object)
- state = ser.serialize(object, event)
+ keychain = oid_encoder.decode(oid)
+ osio = self.getObjectSystemIO()
+ event, classified_state = osio.serialize(keychain, object)
ext_refs = event.getExternalRefs()
if ext_refs:
for (ext_keychain, ext_ref) in ext_refs:
@@ -274,8 +243,7 @@
seek(0)
clear_memo()
- dump((classification, mapper_names))
- dump(state)
+ dump(classified_state)
p=file(1)
s=dbstore(oid,serial,p,version,transaction)
self._store_count = self._store_count + 1
@@ -340,8 +308,7 @@
unpickler=Unpickler(file)
# SDH: external references are reassembled elsewhere.
# unpickler.persistent_load=self._persistent_load
- classification, mapper_names = unpickler.load()
- state = unpickler.load()
+ classified_state = unpickler.load()
# SDH: Let the object mapper do the state setting.
# if hasattr(object, '__setstate__'):
@@ -350,15 +317,8 @@
# d=object.__dict__
# for k,v in state.items(): d[k]=v
keychain = self._db._oid_encoder.decode(oid)
- assert len(keychain) == len(mapper_names)
- mapper = self.getRootMapper()
- for mapper_name in mapper_names:
- mapper = mapper.getSubMapper(mapper_name)
- ser = mapper.getSerializer()
- if DEBUG:
- print 'deserializing', repr(oid), repr(serial)
- event = DeserializationEvent(self, mapper, keychain, object)
- ser.deserialize(object, event, state)
+ osio = self.getObjectSystemIO()
+ event = osio.deserialize(keychain, object, classified_state)
unmanaged = event.getUnmanagedPersistentObjects()
if unmanaged:
=== Products/Ape/lib/apelib/zodb3/storage.py 1.4 => 1.5 ===
--- Products/Ape/lib/apelib/zodb3/storage.py:1.4 Mon May 19 15:32:35 2003
+++ Products/Ape/lib/apelib/zodb3/storage.py Mon May 26 16:20:09 2003
@@ -22,9 +22,7 @@
from ZODB import POSException, BaseStorage
-from apelib.core.events \
- import MapperEvent, GatewayEvent, LoadEvent, StoreEvent, DatabaseInitEvent
-from apelib.core.interfaces import ITPCConnection
+from apelib.core.io import GatewayIO
from apelib.core.exceptions import NoStateFoundError, ConfigurationError
from consts import HASH0, HASH1, DEBUG
from oidencoder import OIDEncoder
@@ -48,33 +46,17 @@
else:
assert IOIDEncoder.isImplementedBy(oid_encoder)
self._oid_encoder = oid_encoder
- self._conn_map = connections
- sort_keys = []
+ gwio = GatewayIO(mapper_resource.access(self), connections)
+ self._gwio = gwio
+ self._conn_list = gwio.getConnectionList()
+ gwio.openConnections()
+ gwio.initDatabases(clear_all)
names = []
- try:
- opened = [] # [(sort_key, conn),]
- for c in connections.values():
- if not ITPCConnection.isImplementedBy(c):
- raise ConfigurationError(
- '%s does not implement ITPCConnection' % repr(c))
- sort_key = c.sortKey()
- sort_keys.append(sort_key)
- c.connect()
- opened.append((sort_key, c))
- names.append(c.getName())
- self.initDatabases(clear_all)
- except:
- for sort_key, c in opened:
- c.close()
- raise
+ sort_keys = []
+ for c in gwio.getConnectionList():
+ names.append(c.getName())
+ sort_keys.append(c.sortKey())
self._sort_key = tuple(sort_keys)
-
- opened.sort()
- conn_list = []
- for sort_key, c in opened:
- conn_list.append(c)
- self._conn_list = conn_list
-
if not name:
name = 'ApeStorage: ' + ', '.join(names)
BaseStorage.BaseStorage.__init__(self, name)
@@ -93,25 +75,7 @@
return self._mapper_resource
def initDatabases(self, clear_all=0):
- """Creates tables, etc.
- """
- root_mapper = self._mapper_resource.access(self)
- # Find all initializers, eliminating duplicates.
- initializers = {} # obj -> 1
- todo = [root_mapper]
- while todo:
- mapper = todo.pop()
- for obj in mapper.getInitializers():
- initializers[obj] = 1
- sub = mapper.listSubMapperNames()
- if sub:
- for name in sub:
- m = mapper.getSubMapper(name)
- todo.append(m)
- # Now call them.
- for initializer in initializers.keys():
- event = DatabaseInitEvent(self._conn_map, clear_all)
- initializer.init(event)
+ self._gwio.initDatabases(clear_all=clear_all)
def hash64(self, value):
"""Returns an 8-byte hash value.
@@ -124,39 +88,17 @@
print '64-bit hash of %r is %r' % (value, h)
return h
- def _load(self, root_mapper, keychain, hash_only=0):
- mapper = root_mapper
- mapper_names = []
- # Follow the keychain to find the right mapper.
- classification = None
- for i in range(len(keychain)):
- k = keychain[:i + 1]
- cfr = mapper.getClassifier()
- assert cfr is not None, keychain
- event = LoadEvent(mapper, k, self._conn_map)
- classification, sub_mapper_name = cfr.classifyState(event)
- mapper_names.append(sub_mapper_name)
- mapper = mapper.getSubMapper(sub_mapper_name)
- event = LoadEvent(mapper, keychain, self._conn_map)
- if hash_only:
- event.hash_only = 1
- full_state, hash_value = mapper.getGateway().load(event)
- return full_state, hash_value, classification, mapper_names
-
-
def load(self, oid, version):
if version:
raise POSException.Unsupported, "Versions aren't supported"
self._lock_acquire()
try:
+ self._mapper_resource.access(self) # Update mapper
keychain = self._oid_encoder.decode(oid)
- root_mapper = self._mapper_resource.access(self)
- full_state, hash_value, classification, mapper_names = self._load(
- root_mapper, keychain)
+ classified_state, hash_value = self._gwio.load(keychain)
file = StringIO()
p = Pickler(file)
- p.dump((classification, mapper_names))
- p.dump(full_state)
+ p.dump(classified_state)
data = file.getvalue()
h = self.hash64(hash_value)
if DEBUG:
@@ -174,7 +116,7 @@
self._lock_acquire()
try:
- root_mapper = self._mapper_resource.access(self)
+ self._mapper_resource.access(self) # Update mapper
keychain = self._oid_encoder.decode(oid)
# First detect conflicts.
@@ -185,8 +127,7 @@
if h64 != HASH0:
# Overwriting an old object. Use the hash to verify
# that the new data was derived from the old data.
- info = self._load(root_mapper, keychain, 1)
- old_state, old_hash = info[:2]
+ old_cs, old_hash = self._gwio.load(keychain, 1)
old_h64 = self.hash64(old_hash)
if h64 != old_h64:
raise POSException.ConflictError(
@@ -198,11 +139,10 @@
# NoStateFoundError or a hash of None, otherwise
# there's a conflict.
try:
- info = self._load(root_mapper, keychain, 1)
+ cs, old_hash = self._gwio.load(keychain, 1)
except NoStateFoundError:
pass
else:
- old_hash = info[1]
if old_hash is not None:
raise POSException.ConflictError(
"%s already exists" % repr(keychain))
@@ -210,18 +150,8 @@
# Now unpickle and store the data.
file = StringIO(data)
u = Unpickler(file)
- classification, mapper_names = u.load()
- state = u.load()
- assert len(keychain) == len(mapper_names)
- mapper = root_mapper
- cfr = mapper.getClassifier()
- for mapper_name in mapper_names:
- cfr = mapper.getClassifier()
- mapper = mapper.getSubMapper(mapper_name)
- event = StoreEvent(mapper, keychain, self._conn_map)
- new_hash = mapper.getGateway().store(event, state)
- if cfr is not None:
- cfr.store(event, classification)
+ classified_state = u.load()
+ new_hash = self._gwio.store(keychain, classified_state)
new_h64 = self.hash64(new_hash)
finally:
self._lock_release()
@@ -231,11 +161,7 @@
return new_h64
def new_oid(self):
- # Try to use the root keychain generator to make a keychain.
- root_mapper = self._mapper_resource.access(self)
- kgen = root_mapper.getKeychainGenerator()
- event = GatewayEvent(root_mapper, (), self._conn_map)
- keychain = kgen.makeKeychain(event, None, 1)
+ keychain = self._gwio.newKeychain()
return self._oid_encoder.encode(keychain)
def _clear_temp(self):
@@ -269,5 +195,4 @@
for c in self._conn_list:
c.close()
self._mapper_resource.release(self)
-