[Zope-CVS] CVS: Products/AdaptableStorage/zodb - ExportImport.py:1.1 ASConnection.py:1.11
Shane Hathaway
shane@zope.com
Wed, 25 Dec 2002 00:27:36 -0500
Update of /cvs-repository/Products/AdaptableStorage/zodb
In directory cvs.zope.org:/tmp/cvs-serv13179/zodb
Modified Files:
ASConnection.py
Added Files:
ExportImport.py
Log Message:
Got copy/paste working using a patch. Also attempted ZEXP import/export, but
it's a bugger to get right, so I added a module with my experimental code.
Merry Christmas!
=== Added File Products/AdaptableStorage/zodb/ExportImport.py ===
##############################################################################
#
# Copyright (c) 2002 Zope Corporation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.0 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Experimental code for ZEXP export/import.
$Id: ExportImport.py,v 1.1 2002/12/25 05:27:36 shane Exp $
"""
from __future__ import nested_scopes
##from ZODB.Connection import ExtensionKlass
##from ZODB.utils import p64
##from tempfile import TemporaryFile
## def _exportObject(self, ob, new_oid, pickler, buffer, file):
## """Exports in ZEXP format."""
## klass = object.__class__
## if klass is ExtensionKlass:
## # Yee Ha!
## dict={}
## dict.update(object.__dict__)
## del dict['_p_jar']
## args=object.__name__, object.__bases__, dict
## state=None
## else:
## if hasattr(klass, '__getinitargs__'):
## args = object.__getinitargs__()
## len(args) # XXX Assert it's a sequence
## else:
## args = None # New no-constructor protocol!
## module=getattr(klass,'__module__','')
## if module: klass=module, klass.__name__
## __traceback_info__=klass, new_oid, self._version
## state=object.__getstate__()
## buffer.seek(0)
## buffer.truncate()
## pickler.clear_memo()
## pickler.dump((klass, args))
## pickler.dump(state)
## s = buffer.getvalue()
## file.write(new_oid)
## file.write(p64(len(s)))
## file.write(s)
## def exportFile(self, oid, file=None):
## """Export to ZEXP format"""
## if file is None:
## file = TemporaryFile()
## elif type(file) is StringType:
## file = open(file, 'w+b')
## file.write('ZEXP')
## version = self._version
## new_oid = p64(1)
## todo = [oid]
## unmanaged = [] # [(ob, new_oid)]
## translation = {oid: new_oid} # { old_oid -> new_oid }
## done = {} # { old_oid -> 1 }
## next_seq = 2
## last_ghost = None
## def persistent_id(ob):
## _old_oid = getattr(ob, '_p_oid', None)
## if _old_oid is not None:
## _new_oid = translation.get(_old_oid)
## if _new_oid is None:
## _new_oid = p64(next_seq)
## next_seq += 1
## if ob._p_jar is self:
## # managed
## translation[_old_oid] = _new_oid
## if not done.has_key(_old_oid):
## todo.append(_oid_oid)
## else:
## # unmanaged
## unmanaged.append(ob, _new_oid)
## return _new_oid
## return None
## buffer = StringIO()
## pickler = Pickler(buffer, 1)
## pickler.persistent_id = persistent_id
## root_mapper = self.getRootMapper()
## while todo:
## old_oid = todo[0]
## del todo[0]
## done[old_oid] = 1
## data, serial = self._storage.load(old_oid, self._version)
## unpickler = Unpickler(StringIO(data))
## classification, mapper_names = unpickler.load()
## state = unpickler.load()
## mapper = root_mapper
## for mapper_name in mapper_names:
## mapper = mapper.getSubMapper(mapper_name)
## ser = mapper.getSerializer()
## ob = ser.createEmptyInstance(classification)
## keychain = self._db._oid_encoder.decode(old_oid)
## # XXX This will make a bunch of unreferenced ghosts
## event = DeserializationEvent(self, mapper, keychain, ob)
## ser.deserialize(ob, event, state)
## self._exportObject(
## ob, translation[old_oid], pickler, buffer, file)
## while unmanaged:
## ob, new_oid = unmanaged.pop()
## self._exportObject(ob, new_oid, pickler, buffer, file)
## file.write(export_end_marker)
## return file
=== Products/AdaptableStorage/zodb/ASConnection.py 1.10 => 1.11 ===
--- Products/AdaptableStorage/zodb/ASConnection.py:1.10 Mon Dec 23 23:29:34 2002
+++ Products/AdaptableStorage/zodb/ASConnection.py Wed Dec 25 00:27:36 2002
@@ -22,7 +22,7 @@
from ZODB import Persistent
from ZODB.Connection import Connection, StringIO, Unpickler, Pickler, \
- ConflictError, ReadConflictError, ExtensionKlass, LOG, ERROR
+ ConflictError, ReadConflictError, LOG, ERROR
from consts import SERIAL0, DEBUG
from serial_public import IKeyedObjectSystem, SerializationEvent, \
@@ -206,73 +206,58 @@
raise ConflictError(object=object)
self._invalidating.append(oid)
- klass = object.__class__
-
- if klass is ExtensionKlass:
- # SDH: not supported.
- raise NotImplementedError, "Unable to store ZClass instances"
- else:
- if hasattr(klass, '__getinitargs__'):
- args = object.__getinitargs__()
- len(args) # XXX Assert it's a sequence
- else:
- args = None # New no-constructor protocol!
-
- module=getattr(klass,'__module__','')
- if module: klass=module, klass.__name__
- __traceback_info__=klass, oid, self._version
- # SDH: hook in the serializer.
- # state=object.__getstate__()
- keychain = self._db._oid_encoder.decode(oid)
- mapper = self.getRootMapper()
- mapper_names = []
- oid_encoder = self._db._oid_encoder
- classification = None
- if keychain:
- # Use classification to discover what mapper to use
- # for storage.
- # classify the parents.
- for i in range(1, len(keychain)):
- k = keychain[:i]
- o = self[oid_encoder.encode(k)]
- cfr = mapper.getClassifier()
- classification, sub_mapper_name = \
- cfr.classifyObject(o, k)
- mapper_names.append(sub_mapper_name)
- mapper = mapper.getSubMapper(sub_mapper_name)
- # Now classify the object being stored.
+ # SDH: hook in the serializer.
+ # state=object.__getstate__()
+ keychain = self._db._oid_encoder.decode(oid)
+ mapper = self.getRootMapper()
+ mapper_names = []
+ oid_encoder = self._db._oid_encoder
+ classification = None
+ if keychain:
+ # Use classification to discover what mapper to use
+ # for storage.
+ # classify the parents.
+ for i in range(1, len(keychain)):
+ k = keychain[:i]
+ o = self[oid_encoder.encode(k)]
cfr = mapper.getClassifier()
- classification, sub_mapper_name = cfr.classifyObject(
- object, keychain)
+ classification, sub_mapper_name = \
+ cfr.classifyObject(o, k)
mapper_names.append(sub_mapper_name)
mapper = mapper.getSubMapper(sub_mapper_name)
+ # Now classify the object being stored.
+ cfr = mapper.getClassifier()
+ classification, sub_mapper_name = cfr.classifyObject(
+ object, keychain)
+ mapper_names.append(sub_mapper_name)
+ mapper = mapper.getSubMapper(sub_mapper_name)
- ser = mapper.getSerializer()
- if DEBUG:
- print 'serializing', repr(oid), repr(serial)
- event = SerializationEvent(self, mapper, keychain, object)
- state = ser.serialize(object, event)
- ext_refs = event.getExternalRefs()
- if ext_refs:
- for (ext_keychain, ext_ref) in ext_refs:
- if (not ext_ref._p_serial
- or ext_ref._p_serial == SERIAL0):
- ext_oid = oid_encoder.encode(ext_keychain)
- if ext_ref._p_jar:
- if ext_ref._p_jar != self:
- raise InvalidObjectReference
- else:
- ext_ref._p_jar = self
- if ext_ref._p_oid:
- if ext_ref._p_oid != ext_oid:
- raise StorageError('Conflicting OIDs')
- else:
- ext_ref._p_oid = ext_oid
- stack.append(ext_ref)
-
- unmanaged = event.getUnmanagedPersistentObjects()
- if unmanaged:
- self.handleUnmanaged(object, unmanaged)
+ ser = mapper.getSerializer()
+ if DEBUG:
+ print 'serializing', repr(oid), repr(serial)
+ event = SerializationEvent(self, mapper, keychain, object)
+ state = ser.serialize(object, event)
+ ext_refs = event.getExternalRefs()
+ if ext_refs:
+ for (ext_keychain, ext_ref) in ext_refs:
+ if (not ext_ref._p_serial
+ or ext_ref._p_serial == SERIAL0):
+ ext_oid = oid_encoder.encode(ext_keychain)
+ if ext_ref._p_jar:
+ if ext_ref._p_jar != self:
+ raise InvalidObjectReference
+ else:
+ ext_ref._p_jar = self
+ if ext_ref._p_oid:
+ if ext_ref._p_oid != ext_oid:
+ raise StorageError('Conflicting OIDs')
+ else:
+ ext_ref._p_oid = ext_oid
+ stack.append(ext_ref)
+
+ unmanaged = event.getUnmanagedPersistentObjects()
+ if unmanaged:
+ self.handleUnmanaged(object, unmanaged)
seek(0)
clear_memo()
@@ -294,7 +279,6 @@
self._handle_serial(s, oid)
-
def setstate(self, object):
oid=object._p_oid
@@ -417,7 +401,7 @@
return keychain[-1]
- ### Volitalile object expiration ###
+ ### Volatile object expiration ###
def invalidateVolatileObjects(self):
"""Requests invalidation of the loaded volatile objects.
@@ -447,6 +431,12 @@
self.invalidateVolatileObjects()
Connection.sync(self)
+
+ def exportFile(self, oid, file=None):
+ raise NotImplementedError, 'ZEXP Export not implemented'
+
+ def importFile(self, file, clue='', customImporters=None):
+ raise NotImplementedError, 'ZEXP Import not implemented'
class UnmanagedJar: