[Zope-CVS] CVS: Products/Ape/lib/apelib/core - oidgen.py:1.1.2.1
events.py:1.6.2.2 gateways.py:1.7.2.2 interfaces.py:1.9.2.2
io.py:1.6.2.2 mapper.py:1.4.4.1 schemas.py:1.4.4.1
serializers.py:1.4.2.1 keygen.py:NONE
Shane Hathaway
shane at zope.com
Sat Dec 13 23:25:17 EST 2003
Update of /cvs-repository/Products/Ape/lib/apelib/core
In directory cvs.zope.org:/tmp/cvs-serv28325/core
Modified Files:
Tag: ape-0_8-branch
events.py gateways.py interfaces.py io.py mapper.py schemas.py
serializers.py
Added Files:
Tag: ape-0_8-branch
oidgen.py
Removed Files:
Tag: ape-0_8-branch
keygen.py
Log Message:
Continued refactoring to fit new names.
=== Added File Products/Ape/lib/apelib/core/oidgen.py ===
##############################################################################
#
# Copyright (c) 2003 Zope Corporation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.0 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Standard oid generators.
$Id: oidgen.py,v 1.1.2.1 2003/12/14 04:24:46 shane Exp $
"""
from apelib.core.interfaces import IOIDGenerator, MappingError
class NullOIDGenerator:
"""A null OID generator refuses to generate any oids."""
__implements__ = IOIDGenerator
def new_oid(self, event, name, stored):
raise MappingError("Null OID generator")
class PathOIDGenerator:
"""Path-based OID generator
"""
__implements__ = IOIDGenerator
def new_oid(self, event, name, stored):
if name is None:
raise MappingError('Path OIDs require a name')
if '/' in name:
raise ValueError, '%s is not a legal name' % name
p = event.oid # parent OID
if p.endswith('/'):
p += name
else:
p = '%s/%s' % (p, name)
return p
=== Products/Ape/lib/apelib/core/events.py 1.6.2.1 => 1.6.2.2 ===
--- Products/Ape/lib/apelib/core/events.py:1.6.2.1 Sat Dec 13 12:08:06 2003
+++ Products/Ape/lib/apelib/core/events.py Sat Dec 13 23:24:46 2003
@@ -34,8 +34,7 @@
__implements__ = interfaces.IDatabaseInitEvent
- def __init__(self, connection, connections, clear_all):
- self.connection = connection
+ def __init__(self, connections, clear_all):
self.connections = connections
self.clear_all = clear_all
@@ -53,9 +52,8 @@
__implements__ = interfaces.IGatewayEvent
- def __init__(self, mapper, oid, connection, connections, classification):
+ def __init__(self, mapper, oid, connections, classification):
MapperEvent.__init__(self, mapper, oid)
- self.connection = connection
self.connections = connections
self.classification = classification
=== Products/Ape/lib/apelib/core/gateways.py 1.7.2.1 => 1.7.2.2 ===
--- Products/Ape/lib/apelib/core/gateways.py:1.7.2.1 Sat Dec 13 12:08:06 2003
+++ Products/Ape/lib/apelib/core/gateways.py Sat Dec 13 23:24:46 2003
@@ -18,8 +18,7 @@
import time
-from interfaces import IGateway
-import exceptions
+from interfaces import IGateway, NoStateFoundError
class CompositeGateway:
@@ -115,7 +114,7 @@
try:
return self.data[event.oid]
except KeyError:
- raise exceptions.NoStateFoundError(event.oid)
+ raise NoStateFoundError(event.oid)
def store(self, event, data):
h = time.time()
=== Products/Ape/lib/apelib/core/interfaces.py 1.9.2.1 => 1.9.2.2 ===
--- Products/Ape/lib/apelib/core/interfaces.py:1.9.2.1 Sat Dec 13 12:08:06 2003
+++ Products/Ape/lib/apelib/core/interfaces.py Sat Dec 13 23:24:46 2003
@@ -104,7 +104,7 @@
class IDatabaseInitEvent (Interface):
"""Interface for events involved in initializing databases."""
- connection = Attribute(description="The current database connection")
+ connections = Attribute(description="A mapping of database connections")
clear_all = Attribute(
description="""True if the database is to be cleared.
@@ -124,11 +124,6 @@
class IGatewayEvent (IMapperEvent):
"""Interface for events used by gateways."""
-# TODO: figure out how to set the connection attribute even when
-# multiple connections exist. Resume work in io.py.
-
- connection = Attribute(description="The current database connection")
-
connections = Attribute(description="A mapping of database connections")
classification = Attribute(description="The classification of the object.")
@@ -381,8 +376,8 @@
'extension' - matches a filename extension
'generic' - matches when no other condition is met. The
generic types depend on the classifier, but
- usually include 'file', 'directory', 'fileish_object',
- and 'folderish_object'.
+ usually include 'file', 'directory', 'file_object',
+ and 'folder_object'.
"""
@@ -413,22 +408,11 @@
class IMapper (Interface):
"""A hub for mapping a certain kind of object.
"""
-
serializer = Attribute(description="The IObjectSerializer for this mapper")
gateway = Attribute(description="The IGateway for this mapper")
- classifier = Attribute(
- description="""The IClassifier for this mapper.
-
- If this mapper references no other OIDs, this may be None.""")
-
- oid_gen = Attribute(
- description="""The IOIDGenerator for this mapper.
-
- If this mapper references no other OIDs, this may be None.""")
-
- initializers = Attribute(description="A list of IDatabaseInitializers.")
+ initializers = Attribute(description="A list of IDatabaseInitializers")
class IConfigurableMapper (IMapper):
=== Products/Ape/lib/apelib/core/io.py 1.6.2.1 => 1.6.2.2 ===
--- Products/Ape/lib/apelib/core/io.py:1.6.2.1 Sat Dec 13 12:08:06 2003
+++ Products/Ape/lib/apelib/core/io.py Sat Dec 13 23:24:46 2003
@@ -35,12 +35,19 @@
self.mapper_name = mapper_name
+class MapperConfiguration:
+ def __init__(self, mappers, classifier, oid_gen):
+ self.mappers = mappers
+ self.classifier = classifier
+ self.oid_gen = oid_gen
+
+
class GatewayIO:
"""Gateway operations facade."""
- def __init__(self, mappers, connections):
- self._mappers = mappers
- self._conn_map = connections
+ def __init__(self, conf, connections):
+ self.conf = conf
+ self.conn_map = connections
# Sort the connections by sort key. Use an extra index to avoid
# using connections as sort keys.
items = [] # [(sort_key, index, conn)]
@@ -54,12 +61,12 @@
conn_list = []
for sort_key, index, c in items:
conn_list.append(c)
- self._conn_list = conn_list
+ self.conn_list = conn_list
def openConnections(self):
try:
opened = []
- for c in self._conn_list:
+ for c in self.conn_list:
c.connect()
opened.append(c)
except:
@@ -68,240 +75,180 @@
raise
def closeConnections(self):
- for conn in self._conn_list:
+ for conn in self.conn_list:
conn.close()
def getConnectionList(self):
- return self._conn_list
+ return self.conn_list
def getConnectionMap(self):
- return self._conn_map
-
+ return self.conn_map
def initDatabases(self, clear_all=0):
"""Creates tables, etc.
"""
# Find all initializers, eliminating duplicates.
initializers = {} # obj -> 1
- for mapper in self._mappers.values():
+ for mapper in self.conf.mappers.values():
for obj in mapper.getInitializers():
initializers[obj] = 1
- todo = [self._root_mapper]
- while todo:
- mapper = todo.pop()
- sub = mapper.listSubMapperNames()
- if sub:
- for name in sub:
- m = mapper.getSubMapper(name)
- todo.append(m)
# Now call them.
- event = DatabaseInitEvent(self._conn_map, clear_all)
+ event = DatabaseInitEvent(self.conn_map, clear_all)
for initializer in initializers.keys():
initializer.init(event)
-
- def classifyState(self, keychain):
- mapper = self._root_mapper
- mapper_names = []
- # Follow the keychain to find the right mapper.
- classification = None
- for i in range(len(keychain)):
- k = keychain[:i + 1]
- cfr = mapper.getClassifier()
- assert cfr is not None, keychain
- event = LoadEvent(mapper, k, self._conn_map, classification)
- classification, sub_mapper_name = cfr.classifyState(event)
- mapper_names.append(sub_mapper_name)
- mapper = mapper.getSubMapper(sub_mapper_name)
- return classification, mapper_names, mapper
-
-
- def load(self, keychain):
- classification, mapper_names, mapper = self.classifyState(keychain)
- event = LoadEvent(mapper, keychain, self._conn_map, classification)
- state, hash_value = mapper.getGateway().load(event)
- cs = ClassifiedState(state, classification, mapper_names)
+ def classifyState(self, oid):
+ event = LoadEvent(None, oid, self.conn_map, None)
+ # Return (classification, mapper_name)
+ return self.conf.classifier.classifyState(event)
+
+ def load(self, oid):
+ classification, mapper_names = self.classifyState(oid)
+ mapper = self.conf.mappers[mapper_name]
+ event = LoadEvent(mapper, oid, self.conn_map, classification)
+ state, hash_value = mapper.gateway.load(event)
+ cs = ClassifiedState(state, classification, mapper_name)
return event, cs, hash_value
-
- def store(self, keychain, classified_state):
- classification = classified_state.classification
- mapper_names = classified_state.mapper_names
- assert len(keychain) == len(mapper_names)
- mapper = self._root_mapper
- prev_mapper = mapper
- for mapper_name in mapper_names:
- prev_mapper = mapper
- mapper = mapper.getSubMapper(mapper_name)
- cfr = prev_mapper.getClassifier()
- event = StoreEvent(mapper, keychain, self._conn_map, classification)
- new_hash = mapper.getGateway().store(event, classified_state.state)
- if cfr is not None:
- cfr.store(event, classification)
+ def store(self, oid, classified_state):
+ mapper = self.conf.mappers[classified_state.mapper_name]
+ event = StoreEvent(mapper, oid, self.conn_map,
+ classified_state.classification)
+ new_hash = mapper.gateway.store(event, classified_state.state)
+ self.conf.classifier.store(event, classification)
return event, new_hash
+ def getPollSources(self, oid):
+ classification, mapper_names = self.classifyState(oid)
+ event = LoadEvent(mapper, oid, self.conn_map, classification)
+ return mapper.gateway.getPollSources(event)
+
+ def new_oid(self):
+ event = GatewayEvent(None, None, self.conn_map, None)
+ return self.oid_gen.new_oid(event, None, 1)
- def getSources(self, keychain):
- classification, mapper_names, mapper = self.classifyState(keychain)
- event = LoadEvent(mapper, keychain, self._conn_map, classification)
- return mapper.getGateway().getSources(event)
-
-
- def newKeychain(self):
- # Try to use the root keychain generator to make a keychain.
- kgen = self._root_mapper.getKeychainGenerator()
- event = GatewayEvent(self._root_mapper, (), self._conn_map, None)
- return kgen.makeKeychain(event, None, 1)
class ObjectSystemIO:
"""Object system (de)serialization facade."""
- def __init__(self, root_mapper, kos):
- self._root_mapper = root_mapper
- self._kos = kos
-
-
- def classifyObject(self, obj, keychain):
- mapper = self._root_mapper
- mapper_names = []
- classification = None
- if keychain:
- # Classify the parents first to discover what mapper to
- # use for storage.
- for i in range(1, len(keychain)):
- k = keychain[:i]
- o = self._kos.getObject(k)
- cfr = mapper.getClassifier()
- classification, sub_mapper_name = cfr.classifyObject(o, k)
- mapper_names.append(sub_mapper_name)
- mapper = mapper.getSubMapper(sub_mapper_name)
- # Now classify the object being stored.
- cfr = mapper.getClassifier()
- classification, sub_mapper_name = cfr.classifyObject(obj, keychain)
- mapper_names.append(sub_mapper_name)
- mapper = mapper.getSubMapper(sub_mapper_name)
- return classification, mapper_names, mapper
-
-
- def serialize(self, keychain, obj):
- classification, mapper_names, mapper = self.classifyObject(
- obj, keychain)
- # Now serialize.
- ser = mapper.getSerializer()
- event = SerializationEvent(self._kos, mapper, keychain, obj)
- state = ser.serialize(obj, event)
- cs = ClassifiedState(state, classification, mapper_names)
+ def __init__(self, conf, obj_db):
+ self.conf = conf
+ self.obj_db = obj_db
+
+ def classifyObject(self, obj, oid):
+ event = SerializationEvent(self.obj_db, None, oid, obj)
+ # Returns (classification, mapper_name)
+ return self.conf.classifier.classifyObject(event)
+
+ def serialize(self, oid, obj):
+ classification, mapper_name = self.classifyObject(obj, oid)
+ ser = self.conf.mappers[mapper_name].serializer
+ event = SerializationEvent(self.obj_db, mapper, oid, obj)
+ state = ser.serialize(event)
+ cs = ClassifiedState(state, classification, mapper_name)
return event, cs
-
- def deserialize(self, keychain, obj, classified_state):
- mapper = self._root_mapper
- assert len(keychain) == len(classified_state.mapper_names)
- for mapper_name in classified_state.mapper_names:
- mapper = mapper.getSubMapper(mapper_name)
- ser = mapper.getSerializer()
- event = DeserializationEvent(self._kos, mapper, keychain, obj)
+ def deserialize(self, oid, obj, classified_state):
+ ser = self.conf.mappers[classified_state.mapper_name].serializer
+ event = DeserializationEvent(self.obj_db, mapper, keychain, obj)
ser.deserialize(obj, event, classified_state.state)
return event
-
def newObject(self, classified_state):
- mapper = self._root_mapper
- for mapper_name in classified_state.mapper_names:
- mapper = mapper.getSubMapper(mapper_name)
- ser = mapper.getSerializer()
+ ser = self.conf.mappers[classified_state.mapper_name].serializer
return ser.createEmptyInstance(
- self._kos, classification=classified_state.classification)
+ self.obj_db, classification=classified_state.classification)
class ExportImport:
"""Simple import/export facade.
"""
-
__implements__ = IObjectDatabase
- def __init__(self, root_mapper, connections, class_factory=None):
- self._objects = {} # { keychain -> obj }
- self._keychains = {} # { id(obj) -> keychain }
- # _incomplete contains the keychains of objects not yet
+ def __init__(self, conf, connections, class_factory=None):
+ self._objects = {} # { oid -> obj }
+ self._oids = {} # { id(obj) -> oid }
+ # _incomplete contains the oids of objects not yet
# imported fully.
- self._incomplete = {} # { keychain -> 1 }
+ self._incomplete = {} # { oid -> 1 }
self._class_factory = class_factory
# Avoid a circular reference by making a weakref proxy
- self.obj_io = ObjectSystemIO(root_mapper, proxy(self))
- self.gw_io = GatewayIO(root_mapper, connections)
+ self.obj_io = ObjectSystemIO(conf, proxy(self))
+ self.gw_io = GatewayIO(conf, connections)
- def _register(self, keychain, obj):
+ def _register(self, oid, obj):
"""Registers obj in the temporary object index.
Returns true if the object was added to the index for the first
time. If the registration conflicts, raises an exception.
"""
is_new = 0
- if self._objects.has_key(keychain):
- if self._objects[keychain] is not obj:
+ if self._objects.has_key(oid):
+ if self._objects[oid] is not obj:
raise ValueError, (
- "Multiple objects for keychain %s" % repr(keychain))
+ "Multiple objects for oid %s" % repr(oid))
else:
- self._objects[keychain] = obj
+ self._objects[oid] = obj
is_new = 1
obj_id = id(obj)
- if self._keychains.has_key(obj_id):
- if self._keychains[obj_id] != keychain:
+ if self._oids.has_key(obj_id):
+ if self._oids[obj_id] != oid:
raise ValueError, (
- "Multiple keychains for object %s" % repr(obj))
+ "Multiple oids for object %s" % repr(obj))
else:
- self._keychains[obj_id] = keychain
+ self._oids[obj_id] = oid
is_new = 1
return is_new
- def exportObject(self, src_obj, dest_keychain=None, deactivate_func=None):
+ def exportObject(self, src_obj, dest_oid=None, deactivate_func=None):
count = 0
- if dest_keychain is None:
- dest_keychain = (self.newKey(),)
- self._register(dest_keychain, src_obj)
- todo = [(dest_keychain, src_obj)]
+ if dest_oid is None:
+ dest_oid = self.new_oid()
+ self._register(dest_oid, src_obj)
+ # Export subobjects.
+ todo = [(dest_oid, src_obj)]
while todo:
- keychain, obj = todo.pop()
- event, classified_state = self.obj_io.serialize(keychain, obj)
+ oid, obj = todo.pop()
+ event, classified_state = self.obj_io.serialize(oid, obj)
count += 1
if deactivate_func is not None:
deactivate_func(obj, count)
- self.gw_io.store(keychain, classified_state)
- ext_refs = event.getExternalRefs()
+ self.gw_io.store(oid, classified_state)
+ ext_refs = event.external
if ext_refs:
- for ext_keychain, ext_obj in ext_refs:
- if self._register(ext_keychain, ext_obj):
- todo.append((ext_keychain, ext_obj))
+ for ext_oid, ext_obj in ext_refs:
+ if self._register(ext_oid, ext_obj):
+ todo.append((ext_oid, ext_obj))
- def importObject(self, src_keychain, dest_obj=None, commit_func=None):
+ def importObject(self, src_oid, dest_obj=None, commit_func=None):
count = 0
if dest_obj is None:
- dest_obj = self.getObject(src_keychain)
+ dest_obj = self.getObject(src_oid)
root_obj = dest_obj
- self._register(src_keychain, dest_obj)
- todo = [(src_keychain, dest_obj)]
+ self._register(src_oid, dest_obj)
+ # Import subobjects.
+ todo = [(src_oid, dest_obj)]
while todo:
- keychain, obj = todo.pop()
- e, classified_state, hash_value = self.gw_io.load(keychain)
- event = self.obj_io.deserialize(keychain, obj, classified_state)
- if self._incomplete.has_key(keychain):
- del self._incomplete[keychain]
+ oid, obj = todo.pop()
+ e, classified_state, hash_value = self.gw_io.load(oid)
+ event = self.obj_io.deserialize(oid, obj, classified_state)
+ if self._incomplete.has_key(oid):
+ del self._incomplete[oid]
count += 1
if commit_func is not None:
commit_func(obj, count)
- ext_refs = event.getExternalRefs()
+ ext_refs = event.external
if ext_refs:
- for ext_keychain, ext_obj in ext_refs:
- if (self._register(ext_keychain, ext_obj)
- or self._incomplete.has_key(ext_keychain)):
- todo.append((ext_keychain, ext_obj))
+ for ext_oid, ext_obj in ext_refs:
+ if (self._register(ext_oid, ext_obj)
+ or self._incomplete.has_key(ext_oid)):
+ todo.append((ext_oid, ext_obj))
return root_obj
@@ -315,27 +262,27 @@
m = __import__(module, {}, {}, ('__doc__',))
return getattr(m, name)
- def getObject(self, keychain, hints=None):
+ def getObject(self, oid, hints=None):
# Should be called only while importing
try:
- return self._objects[keychain]
+ return self._objects[oid]
except KeyError:
# This object has not been loaded yet. Make a stub.
- e, classified_state, hash_value = self.gw_io.load(keychain)
+ e, classified_state, hash_value = self.gw_io.load(oid)
obj = self.obj_io.newObject(classified_state)
# Don't fill in the state yet, to avoid infinite
# recursion. Just register it.
- self._incomplete[keychain] = 1
- self._register(keychain, obj)
+ self._incomplete[oid] = 1
+ self._register(oid, obj)
return obj
loadStub = getObject
def identifyObject(self, obj):
# Normally called only while exporting
- return self._keychains.get(id(obj))
+ return self._oids.get(id(obj))
- def newKey(self):
+ def new_oid(self):
# Should be called only while exporting
- return self.gw_io.newKeychain()[-1]
+ return self.gw_io.new_oid()
=== Products/Ape/lib/apelib/core/mapper.py 1.4 => 1.4.4.1 ===
--- Products/Ape/lib/apelib/core/mapper.py:1.4 Wed Jul 9 11:39:59 2003
+++ Products/Ape/lib/apelib/core/mapper.py Sat Dec 13 23:24:46 2003
@@ -19,7 +19,7 @@
from types import DictType
import interfaces
-from exceptions import ConfigurationError
+from interfaces import ConfigurationError
class Mapper:
@@ -27,63 +27,32 @@
__implements__ = interfaces.IConfigurableMapper
- def __init__(self,
- parent=None,
- serializer=None,
- gateway=None,
- classifier=None,
- kgen=None):
- self._sub_mappers = {}
- self._parent = parent
- self._serializer = serializer
- self._gateway = gateway
- self._classifier = classifier
- self._kgen = kgen
- self._initializers = []
+ def __init__(self, serializer, gateway):
+ self.serializer = serializer
+ self.gateway = gateway
+ self.initializers = []
# IConfigurableMapper implementation
- def setParent(self, p):
- self._parent = p
-
- def setSerializer(self, s):
- self._serializer = s
-
- def setGateway(self, g):
- self._gateway = g
-
- def setClassifier(self, c):
- self._classifier = c
-
- def setKeychainGenerator(self, k):
- self._kgen = k
-
- def addSubMapper(self, name, m=None, replace=0):
- if not replace and self._sub_mappers.has_key(name):
- raise KeyError('mapper name %s already in use' % name)
- if m is None:
- m = Mapper(self)
- self._sub_mappers[name] = m
- return m
-
def addInitializer(self, obj):
- self._initializers.append(obj)
+ self.initializers.append(obj)
- def checkConfiguration(self, path='root', recursive=1):
- s = self._serializer
+ def checkConfiguration(self, my_name):
+ s = self.serializer
if s is None:
raise ConfigurationError(
- 'No serializer configured for mapper %s' % repr(path))
+ 'Mapper %s: No serializer configured' % my_name)
if not interfaces.IFullObjectSerializer.isImplementedBy(s):
raise ConfigurationError(
- 'Not an IFullObjectSerializer: %s' % repr(s))
- g = self._gateway
+ 'Mapper %s: Serializer is not an IFullObjectSerializer'
+ % my_name)
+ g = self.gateway
if g is None:
raise ConfigurationError(
- 'No gateway configured for mapper %s' % repr(path))
+ 'Mapper %s: No gateway configured' % my_name)
if not interfaces.IGateway.isImplementedBy(g):
raise ConfigurationError(
- 'Not an IGateway: %s' % repr(g))
+ 'Mapper %s: Gateway is not an IGateway' % my_name)
if s.getSchema() != g.getSchema():
# Try to show a descriptive error
ss = s.getSchema()
@@ -107,61 +76,4 @@
if msg is None:
msg = '%s != %s' % (ss, gs)
raise ConfigurationError(
- 'Mismatched schemas in mapper "%s": %s' % (path, msg))
- if self._parent is None:
- if self._classifier is None:
- raise ConfigurationError('No root classifier configured')
- if self._kgen is None:
- raise ConfigurationError(
- 'No root keychain generator configured')
- else:
- if not interfaces.IMapper.isImplementedBy(self._parent):
- raise ConfigurationError(
- 'Not an IMapper: %s' % repr(self._parent))
- if (self._classifier is not None
- and not interfaces.IClassifier.isImplementedBy(self._classifier)):
- raise ConfigurationError(
- 'Not an IClassifier: %s' % repr(self._classifier))
- if (self._kgen is not None
- and not interfaces.IKeychainGenerator.isImplementedBy(self._kgen)):
- raise ConfigurationError(
- 'Not an IKeychainGenerator: %s' % repr(self._kgen))
-
- if recursive:
- for n, m in self._sub_mappers.items():
- if not interfaces.IMapper.isImplementedBy(m):
- raise ConfigurationError(
- 'Not an IMapper: %s' % repr(m))
- m.checkConfiguration(('%s/%s' % (path, n)), recursive)
-
- # IMapper implementation
-
- def getSerializer(self):
- return self._serializer
-
- def getGateway(self):
- return self._gateway
-
- def getSubMapper(self, name):
- return self._sub_mappers[name]
-
- def listSubMapperNames(self):
- return self._sub_mappers.keys()
-
- def getClassifier(self):
- if self._classifier is not None:
- return self._classifier
- if self._parent is not None:
- return self._parent.getClassifier()
- return None
-
- def getKeychainGenerator(self):
- if self._kgen is not None:
- return self._kgen
- if self._parent is not None:
- return self._parent.getKeychainGenerator()
- return None
-
- def getInitializers(self):
- return self._initializers
-
+ 'Mapper %s: Mismatched schemas. %s' % (my_name, msg))
=== Products/Ape/lib/apelib/core/schemas.py 1.4 => 1.4.4.1 ===
--- Products/Ape/lib/apelib/core/schemas.py:1.4 Wed Jul 9 11:39:59 2003
+++ Products/Ape/lib/apelib/core/schemas.py Sat Dec 13 23:24:46 2003
@@ -18,8 +18,6 @@
from types import StringType
-from interfaces import IRelationalSchema
-
ok_types = ['unicode', 'string', 'int', 'float', 'bool', 'object',
'classification', 'keychain', 'string:list', 'blob']
@@ -33,7 +31,7 @@
class FieldSchema:
"""Defines the schema of one field."""
- __implements__ = IRelationalSchema
+# __implements__ = IRelationalSchema
def __init__(self, name, type='string', unique=0):
assert type in ok_types, type
@@ -62,7 +60,7 @@
class RowSchema:
"""Defines an ordered set of fields for exactly one row.
"""
- __implements__ = IRelationalSchema
+# __implements__ = IRelationalSchema
def __init__(self, fields=()):
self.fields = []
@@ -101,7 +99,6 @@
class RowSequenceSchema (RowSchema):
"""Defines a schema for a sequence of rows, including row count limits.
"""
- __implements__ = IRelationalSchema
def __init__(self, fields=(), min_rows=0, max_rows=0):
# max_rows == 0 means unlimited.
=== Products/Ape/lib/apelib/core/serializers.py 1.4 => 1.4.2.1 ===
--- Products/Ape/lib/apelib/core/serializers.py:1.4 Wed Jul 30 18:11:40 2003
+++ Products/Ape/lib/apelib/core/serializers.py Sat Dec 13 23:24:46 2003
@@ -19,14 +19,13 @@
from types import StringType
from interfaces import ISerializer, IFullObjectSerializer
-from exceptions import DeserializationError, SerializationError
+from interfaces import DeserializationError, SerializationError
from schemas import FieldSchema
class CompositeSerializer:
"""Full serializer based on partial serializers.
"""
-
__implements__ = IFullObjectSerializer
def __init__(self, module, name, base=None):
@@ -42,6 +41,14 @@
self._part_names.update(base._part_names)
self._parts[:] = base._parts
self._final_parts[:] = base._final_parts
+ self._updateSchema()
+
+ def _updateSchema(self):
+ self.schema = {}
+ for name, serializer in self.getSerializers():
+ s = serializer.schema
+ if s is not None:
+ self.schema[name] = s
def addSerializer(self, name, serializer, force=0, final=0):
if self._part_names.has_key(name):
@@ -53,6 +60,7 @@
else:
self._parts.append((name, serializer))
self._part_names[name] = 1
+ self._updateSchema()
def removeSerializer(self, name):
if not self._part_names.has_key(name):
@@ -63,6 +71,7 @@
del lst[i]
break
del self._part_names[name]
+ self._updateSchema()
def hasSerializer(self, name):
return self._part_names.has_key(name)
@@ -70,34 +79,26 @@
def getSerializers(self):
return self._parts + self._final_parts
- def getSchema(self):
- res = {}
- for name, serializer in self.getSerializers():
- s = serializer.getSchema()
- if s is not None:
- res[name] = s
- return res
-
- def canSerialize(self, object):
- if not hasattr(object, '__class__'):
+ def canSerialize(self, obj):
+ if not hasattr(obj, '__class__'):
return 0
- c = object.__class__
+ c = obj.__class__
return (c.__module__ == self._module and c.__name__ == self._name)
- def serialize(self, object, event):
+ def serialize(self, event):
full_state = {}
- for name, serializer in self.getSerializers():
- event.setSerializerName(name)
- state = serializer.serialize(object, event)
+ for name, s in self.getSerializers():
+ event.serializer_name = name
+ state = s.serialize(event)
if state is not None:
full_state[name] = state
return full_state
- def deserialize(self, object, event, full_state):
- for name, serializer in self.getSerializers():
+ def deserialize(self, event, full_state):
+ for name, s in self.getSerializers():
state = full_state.get(name)
- event.setSerializerName(name)
- serializer.deserialize(object, event, state)
+ event.serializer_name = name
+ s.deserialize(event, state)
def createEmptyInstance(self, class_factory, classification=None):
c = class_factory.getClass(self._module, self._name)
@@ -107,13 +108,12 @@
class AnyObjectSerializer (CompositeSerializer):
"""Full serializer that's not tied to a specific class
"""
-
__implements__ = IFullObjectSerializer
def __init__(self, base=None):
self.init(base)
- def canSerialize(self, object):
+ def canSerialize(self, obj):
return 1
def createEmptyInstance(self, class_factory, classification=None):
@@ -127,7 +127,10 @@
module = cn[:pos]
name = cn[pos + 1:]
c = class_factory.getClass(module, name)
- return c.__basicnew__()
+ if hasattr(c, "__basicnew__"): # ExtensionClass
+ return c.__basicnew__()
+ else:
+ return c.__new__()
class FullState:
@@ -137,17 +140,14 @@
schema = FieldSchema('data', 'object')
- def getSchema(self):
- return self.schema
-
- def canSerialize(self, object):
+ def canSerialize(self, obj):
return 1
- def serialize(self, object, event):
- return object.__getstate__()
+ def serialize(self, event):
+ return event.obj.__getstate__()
- def deserialize(self, object, event, state):
- object.__setstate__(state)
+ def deserialize(self, event, state):
+ event.obj.__setstate__(state)
@@ -159,18 +159,17 @@
def __init__(self, attrname):
self.attrname = attrname
- def getSchema(self):
- return None # No storage
+ schema = None # No storage
- def canSerialize(self, object):
+ def canSerialize(self, obj):
return 1
- def serialize(self, object, event):
- event.ignoreAttribute(self.attrname)
+ def serialize(self, event):
+ event.ignore(self.attrname)
return None
- def deserialize(self, object, event, state):
- assert state is None
+ def deserialize(self, event, state):
+ assert state is None, state
class OptionalSerializer:
@@ -182,27 +181,25 @@
def __init__(self, real, default_state=None):
self._real = real
self._default_state = default_state
+ self.schema = real.schema
- def getSchema(self):
- return self._real.getSchema()
-
- def canSerialize(self, object):
+ def canSerialize(self, obj):
return 1
- def serialize(self, object, event):
- if self._real.canSerialize(object):
- return self._real.serialize(object, event)
+ def serialize(self, event):
+ if self._real.canSerialize(event.obj):
+ return self._real.serialize(event)
else:
return self._default_state
- def deserialize(self, object, event, state):
- if self._real.canSerialize(object):
- self._real.deserialize(object, event, state)
+ def deserialize(self, event, state):
+ if self._real.canSerialize(event.obj):
+ self._real.deserialize(event, state)
else:
if state is not None and state != self._default_state:
raise DeserializationError(
"Optional serializer unable to install state %s into %s" %
- (repr(state), repr(object)))
+ (repr(state), repr(event.obj)))
class StringDataAttribute:
@@ -215,24 +212,21 @@
def __init__(self, attrname):
self.attrname = attrname
- def getSchema(self):
- return self.schema
-
def canSerialize(self, object):
return 1
- def serialize(self, object, event):
+ def serialize(self, event):
attrname = self.attrname
assert attrname
- v = getattr(object, attrname)
+ v = getattr(event.obj, attrname)
assert isinstance(v, StringType)
- event.notifySerialized(attrname, v, 1)
+ event.serialized(attrname, v, 1)
return v
- def deserialize(self, object, event, state):
+ def deserialize(self, event, state):
attrname = self.attrname
assert attrname
assert isinstance(state, StringType)
- setattr(object, attrname, state)
- event.notifyDeserialized(attrname, state)
+ setattr(event.obj, attrname, state)
+ event.deserialized(attrname, state)
=== Removed File Products/Ape/lib/apelib/core/keygen.py ===
More information about the Zope-CVS
mailing list