[Zope-CVS] CVS: Products/Ape/lib/apelib/zodb3 - gateways.py:1.3.2.1
scanner.py:1.2.2.1 serializers.py:1.3.2.1 storage.py:1.8.2.1
Shane Hathaway
shane at zope.com
Wed Dec 17 23:44:24 EST 2003
Update of /cvs-repository/Products/Ape/lib/apelib/zodb3
In directory cvs.zope.org:/tmp/cvs-serv13372/lib/apelib/zodb3
Modified Files:
Tag: ape-0_8-branch
gateways.py scanner.py serializers.py storage.py
Log Message:
Continued renaming and refactoring.
tmp/map.py contains notes on the new names for things.
=== Products/Ape/lib/apelib/zodb3/gateways.py 1.3 => 1.3.2.1 ===
--- Products/Ape/lib/apelib/zodb3/gateways.py:1.3 Wed Jul 30 17:33:12 2003
+++ Products/Ape/lib/apelib/zodb3/gateways.py Wed Dec 17 23:43:54 2003
@@ -30,21 +30,18 @@
schema = RowSequenceSchema()
schema.addField('key', 'string', 1)
- schema.addField('keychain', 'keychain')
+ schema.addField('oid', 'string')
- def __init__(self, keychains=None):
- if keychains is None:
- keychains = {}
- self.keychains = keychains
+ def __init__(self, oids=None):
+ if oids is None:
+ oids = {}
+ self.oids = oids
- def set(self, key, keychain):
- self.keychains[key] = keychain
-
- def getSchema(self):
- return self.schema
+ def set(self, key, oid):
+ self.oids[key] = oid
def load(self, event):
- items = self.keychains.items()
+ items = self.oids.items()
items.sort()
return items, None
@@ -56,6 +53,6 @@
% (repr(data), repr(expect)))
return None
- def getSources(self, event):
+ def getPollSources(self, event):
return None
=== Products/Ape/lib/apelib/zodb3/scanner.py 1.2 => 1.2.2.1 ===
--- Products/Ape/lib/apelib/zodb3/scanner.py:1.2 Wed Jul 30 17:33:12 2003
+++ Products/Ape/lib/apelib/zodb3/scanner.py Wed Dec 17 23:43:54 2003
@@ -122,7 +122,7 @@
self.storage = None
def setStorage(self, s):
- # This is needed for calling storage.getSources().
+ # This is needed for calling storage.getPollSources().
self.storage = s
def setOIDs(self, oids):
@@ -149,7 +149,7 @@
LOG('Ape', DEBUG, 'Getting sources for %d oids.'
% len(new_sources))
for oid in new_sources.keys():
- new_sources[oid] = self.storage.getSources(oid)
+ new_sources[oid] = self.storage.getPollSources(oid)
else:
LOG('Ape', DEBUG, "Can't get sources for %d oids. "
"Assuming no sources!" % len(new_sources))
@@ -169,7 +169,7 @@
self.lock.release()
- def setSources(self, oid, sources):
+ def setPollSources(self, oid, sources):
if sources is None:
sources = {}
self.lock.acquire()
@@ -206,7 +206,7 @@
self.lock.release()
changes = {}
for repo, d in to_scan.items():
- c = repo.freshen(d)
+ c = repo.poll(d)
if c:
changes.update(c)
if changes:
@@ -256,7 +256,7 @@
to_scan.setdefault(repo, {})[source] = state
changes = {}
for repo, d in to_scan.items():
- c = repo.freshen(d)
+ c = repo.poll(d)
if c:
changes.update(c)
for oid, sources in t.items():
=== Products/Ape/lib/apelib/zodb3/serializers.py 1.3 => 1.3.2.1 ===
--- Products/Ape/lib/apelib/zodb3/serializers.py:1.3 Tue Sep 16 17:00:07 2003
+++ Products/Ape/lib/apelib/zodb3/serializers.py Wed Dec 17 23:43:54 2003
@@ -35,38 +35,35 @@
class BasicPersistentMapping:
"""Basic PersistentMapping (de)serializer
- This version assumes the PM maps string keys to object references.
+ This version assumes the PM maps string keys to first-class
+ persistent objects.
"""
__implements__ = ISerializer
schema = RowSequenceSchema()
schema.addField('key', 'string', 1)
- schema.addField('keychain', 'keychain')
+ schema.addField('oid', 'string')
- def getSchema(self):
- return self.schema
+ def canSerialize(self, obj):
+ return isinstance(obj, PersistentMapping)
- def canSerialize(self, object):
- return isinstance(object, PersistentMapping)
-
- def serialize(self, obj, event):
- assert self.canSerialize(obj)
+ def serialize(self, event):
+ assert self.canSerialize(event.obj)
res = []
- for key, value in obj.items():
- keychain = event.identifyObject(value)
- if keychain is None:
- keychain = event.makeKeychain(key, 1)
- event.notifySerializedRef(key, value, 0, keychain)
- res.append((key, keychain))
- event.ignoreAttribute('data')
- event.ignoreAttribute('_container')
+ for key, value in event.obj.items():
+ oid = event.obj_db.identify(value)
+ if oid is None:
+ oid = event.conf.oid_gen.new_oid(event, key, True)
+ event.referenced(key, value, False, oid)
+ res.append((key, oid))
+ event.ignore(('data', '_container'))
return res
def deserialize(self, obj, event, state):
assert self.canSerialize(obj)
data = {}
- for (key, keychain) in state:
- value = event.dereference(key, keychain)
+ for (key, oid) in state:
+ value = event.resolve(key, oid)
data[key] = value
obj.__init__(data)
@@ -78,49 +75,42 @@
"""
__implements__ = ISerializer
+ schema = None # No storage
def __init__(self):
- # map: { name -> (keychain, mapper) }
+ # map: { name -> (oid, mapper) }
self.map = {}
- def add(self, name, keychain, mapper_names=None):
- self.map[name] = (keychain, mapper_names)
+ def add(self, name, oid, mapper_name=None):
+ self.map[name] = (oid, mapper_name)
- def getSchema(self):
- return None # No storage
+ def canSerialize(self, obj):
+ return isinstance(obj, PersistentMapping)
- def canSerialize(self, object):
- return isinstance(object, PersistentMapping)
-
- def serialize(self, object, event):
- names = object.keys()
+ def serialize(self, obj, event):
+ names = obj.keys()
names.sort()
expected = self.map.keys()
expected.sort()
assert names == expected, '%s != %s' % (names, expected)
-
for name in names:
- keychain, mapper_names = self.map[name]
- subob = object[name]
- event.notifySerializedRef(name, subob, 0, keychain)
-
- # One of the two will work. ;-)
- event.ignoreAttribute('data')
- event.ignoreAttribute('_container')
-
+ oid, mapper_name = self.map[name]
+ subob = obj[name]
+ event.referenced(name, subob, False, oid)
+ event.ignore(('data', '_container'))
- def deserialize(self, object, event, state):
+ def deserialize(self, obj, event, state):
assert state is None
data = {}
- for name, (keychain, mapper_names) in self.map.items():
- subob = event.dereference(name, keychain,
- {'mapper_names': mapper_names})
+ for name, (oid, mapper_name) in self.map.items():
+ subob = event.resolve(
+ name, oid, {'mapper_name': mapper_name})
data[name] = subob
# The PersistentMapping doesn't have its data or _container
# attribute yet, and we don't know what its name should be
# since PersistentMapping's internal structure is not fixed.
# So call the PersistentMapping's constructor.
- object.__init__(data)
+ obj.__init__(data)
class RollCall:
@@ -129,30 +119,28 @@
Designed for debugging purposes.
"""
__implements__ = ISerializer
+ schema = None # No storage
- def getSchema(self):
- return None # No storage
-
- def canSerialize(self, object):
+ def canSerialize(self, obj):
return 1
- def serialize(self, object, event):
+ def serialize(self, obj, event):
assert IFullSerializationEvent.isImplementedBy(event)
attrs = event.getSerializedAttributeNames()
attrs_map = {}
for attr in attrs:
attrs_map[attr] = 1
missed = []
- for k in object.__dict__.keys():
+ for k in obj.__dict__.keys():
if not k.startswith('_v_') and not attrs_map.has_key(k):
missed.append(repr(k))
if missed:
raise SerializationError(
'Attribute(s) %s of object at %s not serialized' %
- (', '.join(missed), repr(event.getKeychain())))
+ (', '.join(missed), repr(event.getOid())))
return None
- def deserialize(self, object, event, state):
+ def deserialize(self, obj, event, state):
assert state is None
@@ -163,26 +151,23 @@
schema = FieldSchema('data', 'string')
- def getSchema(self):
- return self.schema
-
- def canSerialize(self, object):
+ def canSerialize(self, obj):
try:
- return isinstance(object, Persistent)
+ return isinstance(obj, Persistent)
except TypeError:
# XXX Python 2.1 thinks Persistent is not a class
return 0
- def serialize(self, object, event):
+ def serialize(self, obj, event):
assert IFullSerializationEvent.isImplementedBy(event)
- assert isinstance(object, Persistent)
+ assert isinstance(obj, Persistent)
# Allow pickling of cyclic references to the object.
- event.notifySerialized('self', object, 0)
+ event.serialized('self', obj, False)
# Ignore previously serialized attributes
- state = object.__dict__.copy()
+ state = obj.__dict__.copy()
for key in state.keys():
if key.startswith('_v_'):
del state[key]
@@ -197,9 +182,9 @@
p = Pickler(outfile)
unmanaged = []
- def persistent_id(ob, getInternalRef=event.getInternalRef,
+ def persistent_id(ob, identifyInternal=event.identifyInternal,
unmanaged=unmanaged):
- ref = getInternalRef(ob)
+ ref = identifyInternal(ob)
if ref is None:
if hasattr(ob, '_p_oid'):
# Persistent objects that end up in the remainder
@@ -237,8 +222,8 @@
raise RuntimeError(
'Unable to pickle the %s attribute, %s, '
'of %s at %s. %s.' % (
- repr(attrname), repr(attrvalue), repr(object),
- repr(event.getKeychain()), str(exc)))
+ repr(attrname), repr(attrvalue), repr(obj),
+ repr(event.oid), str(exc)))
else:
# Couldn't help.
raise
@@ -246,41 +231,42 @@
p.persistent_id = lambda ob: None # Stop recording references
p.dump(unmanaged)
s = outfile.getvalue()
- event.addUnmanagedPersistentObjects(unmanaged)
+ event.upos.extend(unmanaged)
return s
- def deserialize(self, object, event, state):
+ def deserialize(self, obj, event, state):
assert IFullDeserializationEvent.isImplementedBy(event)
- assert isinstance(object, Persistent)
+ assert isinstance(obj, Persistent)
- # Set up to recover cyclic references to the object.
- event.notifyDeserialized('self', object)
+ # Set up to resolve cyclic references to the object.
+ event.deserialized('self', obj)
if state:
infile = StringIO(state)
u = Unpickler(infile)
- u.persistent_load = event.loadInternalRef
+ u.persistent_load = event.resolveInternal
s = u.load()
- object.__dict__.update(s)
+ obj.__dict__.update(s)
try:
unmanaged = u.load()
except EOFError:
# old pickle with no list of unmanaged objects
pass
else:
- event.addUnmanagedPersistentObjects(unmanaged)
+ event.upos.extend(unmanaged)
class ModTimeAttribute:
- """Sets the _p_mtime attribute."""
+ """Sets the _p_mtime attribute.
+
+ XXX Due to a ZODB limitation, this class has to set the _p_mtime
+ by setting _p_serial.
+ """
__implements__ = ISerializer
schema = FieldSchema('mtime', 'int')
-
- def getSchema(self):
- return self.schema
def canSerialize(self, obj):
try:
=== Products/Ape/lib/apelib/zodb3/storage.py 1.8 => 1.8.2.1 ===
--- Products/Ape/lib/apelib/zodb3/storage.py:1.8 Tue Sep 16 17:00:07 2003
+++ Products/Ape/lib/apelib/zodb3/storage.py Wed Dec 17 23:43:54 2003
@@ -11,7 +11,7 @@
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
-"""Storage implementation that loads/stores using a mapper.
+"""Storage implementation that loads/stores using Ape mappers.
$Id$
"""
@@ -23,30 +23,21 @@
from ZODB import POSException, BaseStorage
from apelib.core.io import GatewayIO
-from apelib.core.exceptions import NoStateFoundError, ConfigurationError
from consts import HASH0, HASH1, DEBUG
-from oidencoder import OIDEncoder
-from interfaces import IResourceAccess, IOIDEncoder
+from interfaces import IResourceAccess
class ApeStorage(BaseStorage.BaseStorage):
- def __init__(self, mapper_resource, connections,
- oid_encoder=None, name='', clear_all=0):
+ def __init__(self, conf_resource, connections, name='', clear_all=0):
"""Initializes an ApeStorage.
- mapper_resource is a resource for loading the mapper.
+ conf_resource is a resource for loading the IMapperConfiguration.
connections is a mapping that maps names to ITPCConnections.
- oid_encoder is an IOIDEncoder.
"""
- assert IResourceAccess.isImplementedBy(mapper_resource)
- self._mapper_resource = mapper_resource
- if oid_encoder is None:
- oid_encoder = OIDEncoder()
- else:
- assert IOIDEncoder.isImplementedBy(oid_encoder)
- self._oid_encoder = oid_encoder
- gwio = GatewayIO(mapper_resource.access(self), connections)
+ assert IResourceAccess.isImplementedBy(conf_resource)
+ self._conf_resource = conf_resource
+ gwio = GatewayIO(conf_resource.access(self), connections)
self._gwio = gwio
self._conn_list = gwio.getConnectionList()
gwio.openConnections()
@@ -76,9 +67,6 @@
def sortKey(self):
return self._sort_key
- def getMapperResource(self):
- return self._mapper_resource
-
def initDatabases(self, clear_all=0):
self._gwio.initDatabases(clear_all=clear_all)
@@ -102,9 +90,8 @@
raise POSException.Unsupported, "Versions aren't supported"
self._lock_acquire()
try:
- self._mapper_resource.access(self) # Update mapper
- keychain = self._oid_encoder.decode(oid)
- event, classified_state, hash_value = self._gwio.load(keychain)
+ self._conf_resource.access(self) # Update configuration
+ event, classified_state, hash_value = self._gwio.load(oid)
file = StringIO()
p = Pickler(file)
p.dump(classified_state)
@@ -113,9 +100,8 @@
if DEBUG:
print 'loaded', `oid`, `h`
if self._scanner is not None:
- gw = event.getMapper().getGateway()
- sources = gw.getSources(event)
- self._scanner.setSources(oid, sources)
+ sources = event.mapper.gateway.getPollSources(event)
+ self._scanner.setPollSources(oid, sources)
return data, h
finally:
self._lock_release()
@@ -129,46 +115,39 @@
self._lock_acquire()
try:
- self._mapper_resource.access(self) # Update mapper
- keychain = self._oid_encoder.decode(oid)
+ self._conf_resource.access(self) # Update configuration
# First detect conflicts.
# The "h64" argument, if its value is not 0,
# was previously generated by hash64().
if DEBUG:
print 'storing', `oid`, `h64`
- if h64 != HASH0:
+ if h64 == HASH0:
+ # Writing a new object. Ask the gateway not to
+ # overwrite existing data.
+ overwrite = False
+ else:
# Overwriting an old object. Use the hash to verify
# that the new data was derived from the old data.
- event, old_cs, old_hash = self._gwio.load(keychain)
+ # If the test passes, allow the gateway to overwrite
+ # the existing data.
+ overwrite = True
+ event, old_cs, old_hash = self._gwio.load(oid)
old_h64 = self.hash64(old_hash)
if h64 != old_h64:
raise POSException.ConflictError(
"Storing %s based on old data. %s != %s" % (
- repr(keychain),
+ repr(oid),
repr(h64), repr(old_h64)))
- else:
- # A new object. Attempts to load should lead to
- # NoStateFoundError or a hash of None, otherwise
- # there's a conflict.
- try:
- event, cs, old_hash = self._gwio.load(keychain)
- except NoStateFoundError:
- pass
- else:
- if old_hash is not None:
- raise POSException.ConflictError(
- "%s already exists" % repr(keychain))
# Now unpickle and store the data.
file = StringIO(data)
u = Unpickler(file)
classified_state = u.load()
- event, new_hash = self._gwio.store(keychain, classified_state)
+ event, new_hash = self._gwio.store(oid, classified_state, overwrite)
new_h64 = self.hash64(new_hash)
if self._scanner is not None:
- gw = event.getMapper().getGateway()
- sources = gw.getSources(event)
+ sources = event.mapper.gateway.getPollSources(event)
self._scanner.setUncommittedSources(self._serial, oid, sources)
finally:
self._lock_release()
@@ -177,17 +156,15 @@
print 'stored', `oid`, `h64`, `new_h64`
return new_h64
- def getSources(self, oid):
- keychain = self._oid_encoder.decode(oid)
+ def getPollSources(self, oid):
self._lock_acquire()
try:
- return self._gwio.getSources(keychain)
+ return self._gwio.getPollSources(oid)
finally:
self._lock_release()
def new_oid(self):
- keychain = self._gwio.newKeychain()
- return self._oid_encoder.encode(keychain)
+ return self._gwio.new_oid()
def lastTransaction(self):
return self._ltid
@@ -227,5 +204,5 @@
def close(self):
for c in self._conn_list:
c.close()
- self._mapper_resource.release(self)
+ self._conf_resource.release(self)
More information about the Zope-CVS
mailing list