[Zope-CVS] CVS: Products/Ape/lib/apelib/fs - annotated.py:1.2
base.py:1.6 classification.py:1.4 connection.py:1.6
interfaces.py:1.3 properties.py:1.4 security.py:1.3
structure.py:1.5 cache.py:NONE exceptions.py:NONE
Shane Hathaway
shane at zope.com
Mon Feb 2 10:07:51 EST 2004
Update of /cvs-repository/Products/Ape/lib/apelib/fs
In directory cvs.zope.org:/tmp/cvs-serv26672/lib/apelib/fs
Modified Files:
base.py classification.py connection.py interfaces.py
properties.py security.py structure.py
Added Files:
annotated.py
Removed Files:
cache.py exceptions.py
Log Message:
Moved ape-0_8-branch to the HEAD.
>From CHANGES.txt:
- Major restructuring to reduce the number of concepts in
Ape. Keychains and keys have been replaced with simple string OIDs.
There is now a flat namespace of mappers instead of a tree. Only
one classifier and one OID generator are used in any object
database.
- The ZODB root object is now stored on the filesystem.
=== Products/Ape/lib/apelib/fs/annotated.py 1.1 => 1.2 ===
--- /dev/null Mon Feb 2 10:07:51 2004
+++ Products/Ape/lib/apelib/fs/annotated.py Mon Feb 2 10:07:20 2004
@@ -0,0 +1,263 @@
+##############################################################################
+#
+# Copyright (c) 2003 Zope Corporation and Contributors.
+# All Rights Reserved.
+#
+# This software is subject to the provisions of the Zope Public License,
+# Version 2.0 (ZPL). A copy of the ZPL should accompany this distribution.
+# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+# FOR A PARTICULAR PURPOSE.
+#
+##############################################################################
+"""AnnotatedFilesystem class.
+
+$Id$
+"""
+
+import re
+from time import time
+from types import StringType
+
+
+# This expression matches "\n[sectionname]...\n", where len(sectionname) > 0.
+section_re = re.compile(r'^\[([^\[\]\n]+)\][^\r\n]*(?:\r\n|\r|\n)',
+ re.MULTILINE)
+
+properties_ext = 'properties'
+remainder_ext = 'remainder'
+
+# Match 'foo.properties', 'foo.remainder', 'properties', or 'remainder'.
+# This is for filtering out annotation filenames.
+annotation_re = re.compile('(|.+[.])(%s|%s)$' % (
+ properties_ext, remainder_ext))
+
+# Names of annotations handled by this module
+remainder_ann = 'remainder' # The value is a binary string.
+object_names_ann = 'object_names'
+
+
+class AnnotatedFilesystem:
+ """Filesystem abstraction that adds annotations and automatic extensions.
+
+ Annotations are stored in .properties and .remainder files.
+ """
+
+ def __init__(self, ops, annotation_prefix='.', hidden_filenames='_'):
+ self.ops = ops
+ self.annotation_prefix = annotation_prefix
+ self.hidden_re = re.compile(hidden_filenames)
+ # _anns_cache: { path -> annotations }
+ self._anns_cache = ShortLivedCache()
+ # _dir_cache: { path -> directory info }
+ self._dir_cache = ShortLivedCache()
+
+ def clearCache(self):
+ """Clears the cache of annotations and automatic filename extensions.
+
+ Useful after writing to the filesystem.
+ """
+ self._anns_cache.clear()
+ self._dir_cache.clear()
+
+ def invalidate(self, path):
+ """Invalidates info about a path being written.
+ """
+ self._anns_cache.invalidate(path)
+ self._dir_cache.invalidate(path)
+
+ def getAnnotationPaths(self, path):
+ """Returns the property and remainder paths for a path.
+ """
+ ops = self.ops
+ if ops.isdir(path):
+ base_fn = ops.join(path, self.annotation_prefix)
+ else:
+ dirname, filename = ops.split(path)
+ base_fn = ops.join(dirname, '%s%s.' % (
+ self.annotation_prefix, filename))
+ return (base_fn + properties_ext, base_fn + remainder_ext)
+
+ def getAnnotations(self, path):
+ """Reads the annotations for a path."""
+ res = self._anns_cache.get(path)
+ if res is not None:
+ return res
+ props_fn, rem_fn = self.getAnnotationPaths(path)
+ res = {}
+ try:
+ data = self.ops.readfile(rem_fn, 0)
+ except IOError:
+ # The remainder file apparently does not exist
+ pass
+ else:
+ res[remainder_ann] = data
+ # Note properties file can override the remainder.
+ try:
+ data = self.ops.readfile(props_fn, 1)
+ except IOError:
+ # The properties file apparently does not exist
+ self._anns_cache.set(path, res)
+ return res
+ pos = 0
+ prev_section_name = None
+ while 1:
+ match = section_re.search(data, pos)
+ if match is None:
+ endpos = len(data)
+ else:
+ endpos = match.start()
+ if prev_section_name is not None:
+ # get the data and decode.
+ section = data[pos:endpos].replace('[[', '[')
+ res[prev_section_name] = section
+ if match is None:
+ break
+ else:
+ prev_section_name = match.group(1)
+ pos = match.end()
+ self._anns_cache.set(path, res)
+ return res
+
+ def checkAnnotationName(self, ann_name):
+ if (not isinstance(ann_name, StringType)
+ or not ann_name
+ or '[' in ann_name
+ or ']' in ann_name
+ or '\n' in ann_name):
+ raise ValueError(ann_name)
+
+ def writeAnnotations(self, path, anns):
+ props_fn, rem_fn = self.getAnnotationPaths(path)
+ props_data = ''
+ rem_data = ''
+ items = anns.items()
+ items.sort()
+ for name, value in items:
+ if name == remainder_ann:
+ # Write to the remainder file.
+ rem_data = value
+ else:
+ # Write a section of the properties file.
+ props_data += self.formatSection(name, value)
+ self.writeOrRemove(props_fn, 1, props_data)
+ self.writeOrRemove(rem_fn, 0, rem_data)
+ self._anns_cache.invalidate(path)
+ # The file might be new, so invalidate the directory.
+ self._dir_cache.invalidate(self.ops.dirname(path))
+
+ def formatSection(self, name, text):
+ s = '[%s]\n%s\n' % (name, text.replace('[', '[['))
+ if not text.endswith('\n'):
+ s += '\n'
+ return s
+
+ def writeOrRemove(self, fn, as_text, data):
+ """If data is provided, write it. Otherwise remove the file.
+ """
+ ops = self.ops
+ if data:
+ ops.writefile(fn, as_text, data)
+ else:
+ if ops.exists(fn):
+ ops.remove(fn)
+
+ def isLegalFilename(self, fn):
+ ap = self.annotation_prefix
+ if (not fn or
+ (fn.startswith(ap) and annotation_re.match(fn, len(ap)))
+ or self.hidden_re.match(fn) is not None):
+ return 0
+ return 1
+
+ def computeDirectoryContents(self, path, allow_missing=0):
+ """Returns the name translations for a directory. Caches the results.
+
+ Returns ({filename: name}, {name: filename}).
+ """
+ res = self._dir_cache.get(path)
+ if res is not None:
+ return res
+
+ try:
+ fns = self.ops.listdir(path)
+ except OSError:
+ if allow_missing:
+ return {}, {}
+ raise
+
+ obj_list = [] # [name]
+ trans = {} # { base name -> filename with extension or None }
+ filenames = filter(self.isLegalFilename, fns)
+ anns = self.getAnnotations(path)
+ text = anns.get(object_names_ann)
+ if text:
+ # Prepare a dictionary of translations from basename to filename.
+ for fn in filenames:
+ if '.' in fn:
+ base, ext = fn.split('.', 1)
+ if trans.has_key(base):
+ # Name collision: two or more files have the same base
+ # name. Don't strip the extensions for any of them.
+ trans[base] = None
+ else:
+ trans[base] = fn
+ else:
+ trans[fn] = None
+ obj_list = [line.strip() for line in text.split('\n')]
+ for obj_name in obj_list:
+ if '.' in obj_name:
+ # An object name uses an extension. Don't translate
+ # any name that uses the same base name.
+ base, ext = obj_name.split('.', 1)
+ trans[base] = None
+
+ fn_to_name = {}
+ for fn in filenames:
+ fn_to_name[fn] = fn
+ # Translate the file names to object names.
+ for obj_name in obj_list:
+ fn = trans.get(obj_name)
+ if fn:
+ fn_to_name[fn] = obj_name
+ name_to_fn = {}
+ for fn, name in fn_to_name.items():
+ name_to_fn[name] = fn
+ res = (fn_to_name, name_to_fn)
+ self._dir_cache.set(path, res)
+ return res
+
+
+class ShortLivedCache:
+ """Simple short-lived object cache.
+ """
+ def __init__(self, lifetime=1):
+ # The default lifetime is 1 second.
+ self.lifetime = lifetime
+ self.data = {}
+ self.expiration = time() + lifetime
+
+ def get(self, key, default=None):
+ now = time()
+ if now >= self.expiration:
+ self.data.clear()
+ return default
+ res = self.data.get(key, default)
+ return res
+
+ def set(self, key, value):
+ now = time()
+ if now >= self.expiration:
+ self.data.clear()
+ self.expiration = now + self.lifetime
+ self.data[key] = value
+
+ def invalidate(self, key):
+ try:
+ del self.data[key]
+ except KeyError:
+ pass
+
+ def clear(self):
+ self.data.clear()
=== Products/Ape/lib/apelib/fs/base.py 1.5 => 1.6 ===
--- Products/Ape/lib/apelib/fs/base.py:1.5 Wed Jul 30 17:33:02 2003
+++ Products/Ape/lib/apelib/fs/base.py Mon Feb 2 10:07:20 2004
@@ -27,11 +27,8 @@
def __init__(self, conn_name='fs'):
self.conn_name = conn_name
- def getSchema(self):
- return self.schema
-
def getConnection(self, event):
- return event.getConnection(self.conn_name)
+ return event.connections[self.conn_name]
- def getSources(self, event):
+ def getPollSources(self, event):
return None
=== Products/Ape/lib/apelib/fs/classification.py 1.3 => 1.4 ===
--- Products/Ape/lib/apelib/fs/classification.py:1.3 Wed Jul 30 17:33:02 2003
+++ Products/Ape/lib/apelib/fs/classification.py Mon Feb 2 10:07:20 2004
@@ -11,18 +11,18 @@
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
-"""Filesystem classification section.
+"""Filesystem classification annotation.
$Id$
"""
-from apelib.core.interfaces import IGateway
+from apelib.core.interfaces import IGateway, LoadError, OIDConflictError
from apelib.core.schemas import FieldSchema
from base import FSGatewayBase
-class FSClassificationSection(FSGatewayBase):
+class FSClassificationAnnotation(FSGatewayBase):
"""Gateway for storing classification data."""
__implements__ = IGateway
@@ -31,9 +31,9 @@
def load(self, event):
fs_conn = self.getConnection(event)
- p = event.getKey()
+ p = event.oid
classification = {'node_type': fs_conn.readNodeType(p)}
- text = fs_conn.readSection(p, 'classification', '')
+ text = fs_conn.readAnnotation(p, 'classification', '')
if text:
lines = text.split('\n')
for line in lines:
@@ -46,7 +46,17 @@
def store(self, event, state):
# state is a classification
fs_conn = self.getConnection(event)
- p = event.getKey()
+ p = event.oid
+ if event.is_new:
+ # Don't overwrite existing data
+ try:
+ fs_conn.readNodeType(p)
+ except LoadError:
+ # Nothing exists yet.
+ pass
+ else:
+ # Something exists. Don't overwrite it.
+ raise OIDConflictError(p)
items = state.items()
items.sort()
text = []
@@ -56,5 +66,5 @@
else:
text.append('%s=%s' % (k, v))
text = '\n'.join(text)
- fs_conn.writeSection(p, 'classification', text)
+ fs_conn.writeAnnotation(p, 'classification', text)
return text.strip()
=== Products/Ape/lib/apelib/fs/connection.py 1.5 => 1.6 ===
--- Products/Ape/lib/apelib/fs/connection.py:1.5 Mon Sep 22 07:15:36 2003
+++ Products/Ape/lib/apelib/fs/connection.py Mon Feb 2 10:07:20 2004
@@ -16,149 +16,53 @@
$Id$
"""
-import re
-from types import StringType
-
-from apelib.core.interfaces import ITPCConnection, ISourceRepository
-from apelib.core.exceptions import NoStateFoundError
-from interfaces import IFSConnection
-from exceptions import FSWriteError
-from cache import ShortLivedCache
+from apelib.core.interfaces import ITPCConnection, ISourceRepository, LoadError
+from interfaces import IFSConnection, FSWriteError
from fileops import StandardFileOperations
+from annotated import AnnotatedFilesystem, object_names_ann
+
+# For a node_type_ann, the value is 'f' (file) or 'd' (directory)
+node_type_ann = '@node_type'
+
+# data_ann holds the content of a file. It is not valid for directories.
+data_ann = '@data'
+# file_list_ann holds the content of a directory. It is not valid for files.
+file_list_ann = '@files'
-# Try to decipher this regular expression ;-)
-# It basically matches "\n[sectionname]...\n", where len(sectionname) > 0.
-section_re = re.compile(r'^\[([^\[\]\n]+)\][^\r\n]*(?:\r\n|\r|\n)',
- re.MULTILINE)
-
-# For a NODE_TYPE_SECTION, the value is 'f' (file) or 'd' (directory)
-NODE_TYPE_SECTION = '@node_type'
-
-# For a DATA_SECTION, the value is a two-item tuple containing a
-# string (file) or list of names (directory) and the as_text flag.
-DATA_SECTION = '@data'
-
-SUGGESTED_EXTENSION_SECTION = '@s_ext' # The suggested filename extension.
-OBJECT_NAMES_SECTION = 'object_names' # For directories. The value is text.
-REMAINDER_SECTION = 'remainder' # The value is a binary string.
-
-PROPERTIES_EXTENSION = 'properties'
-REMAINDER_EXTENSION = 'remainder'
-
-# Match 'foo.properties', 'foo.remainder', 'properties', or 'remainder'.
-# This is for filtering out metadata filenames.
-metadata_re = re.compile('(|.+[.])(%s|%s)$' % (
- PROPERTIES_EXTENSION, REMAINDER_EXTENSION))
+# The suggested filename extension.
+suggested_extension_ann = '@s_ext'
class FSConnection:
- """Reads / writes files with 'sections'.
+ """Reads / writes files with annotations.
- The required 'type' section specifies whether the object is a file or
- a directory. The optional 'data' section specifies either the main
- file contents or the names of the files in the directory. All other
- sections get stored in a '.properties' file. The properties file uses
- square-bracket section headers and encodes sections by doubling
- left-square brackets.
+ The required 'type' annotation specifies whether the object is a file
+ or a directory. The optional 'data' annotation specifies either the
+ main file contents or the names of the files in the directory.
+ All other annotations get stored in the '.properties' and
+ '.remainder' files. The properties file uses square-bracket
+ annotation headers and encodes annotations by doubling left-square
+ brackets.
"""
__implements__ = IFSConnection, ITPCConnection, ISourceRepository
basepath = ''
- def __init__(self, basepath, metadata_prefix='.', hidden_filenames='_',
+ def __init__(self, basepath, annotation_prefix='.', hidden_filenames='_',
ops=None):
self.basepath = basepath
- self.metadata_prefix = metadata_prefix
- self.hidden_re = re.compile(hidden_filenames)
- self._final = 0
- # _pending holds the data to be written.
- # _pending: { subpath string -> { section_name -> data } }
- self._pending = {}
- self._props_cache = ShortLivedCache()
- self._dir_cache = ShortLivedCache()
if ops is None:
ops = StandardFileOperations()
self.ops = ops
+ self.afs = AnnotatedFilesystem(
+ ops, annotation_prefix, hidden_filenames)
+ self._final = 0
+ # _pending holds the data to be written.
+ # _pending: { subpath string -> { annotation_name -> data } }
+ self._pending = {}
-
- def _isLegalFilename(self, fn):
- mp = self.metadata_prefix
- if (not fn or
- (fn.startswith(mp) and metadata_re.match(fn, len(mp)))
- or self.hidden_re.match(fn) is not None):
- return 0
- return 1
-
-
- def _computeDirectoryContents(self, path, ignore_error=0):
- """Computes and returns intermediate directory contents info.
-
- Returns (filenames, object_names, translations). The results
- are cached for a short time.
- """
- res = self._dir_cache.get(path)
- if res is not None:
- return res
-
- obj_names = []
- trans = {} # { base name -> filename with extension or None }
- try:
- fns = self.ops.listdir(path)
- except OSError:
- if ignore_error:
- return ([], obj_names, trans)
- raise
-
- filenames = filter(self._isLegalFilename, fns)
- props = self._getPropertiesFromFile(path)
- text = props.get(OBJECT_NAMES_SECTION)
- if text:
- # Prepare a dictionary of translations.
- for fn in filenames:
- if '.' in fn:
- base, ext = fn.split('.', 1)
- if trans.has_key(base):
- # Name collision: two or more files have the same base
- # name. Don't use an extension for this name.
- trans[base] = None
- else:
- trans[base] = fn
- else:
- trans[fn] = None
- obj_names = [line.strip() for line in text.split('\n')]
- for obj_name in obj_names:
- if '.' in obj_name:
- base, ext = obj_name.split('.', 1)
- trans[base] = None
-
- res = (filenames, obj_names, trans)
- self._dir_cache.set(path, res)
- return res
-
-
- def _listDirectoryAsMapping(self, path, ignore_error=0):
- """Returns the translated filenames at path.
-
- The ignore_error flag makes this method return an empty
- dictionary if the directory is not found.
-
- Returns {filename -> obj_name}.
- """
- filenames, obj_names, trans = self._computeDirectoryContents(
- path, ignore_error)
- res = {}
- for fn in filenames:
- res[fn] = fn
- # Translate names.
- for obj_name in obj_names:
- fn = trans.get(obj_name)
- if fn:
- res[fn] = obj_name
- return res
-
-
- def _expandPath(self, subpath):
+ def getPath(self, subpath):
if self.basepath:
while subpath.startswith('/') or subpath.startswith('\\'):
subpath = subpath[1:]
@@ -170,64 +74,22 @@
dir_path, obj_name = self.ops.split(path)
if '.' not in obj_name:
# This object might have an automatic filename extension.
- filenames, obj_names, trans = self._computeDirectoryContents(
- dir_path, 1)
- fn = trans.get(obj_name)
- if fn is not None:
+ contents = self.afs.computeDirectoryContents(dir_path, 1)
+ fn_to_name, name_to_fn = contents
+ fn = name_to_fn.get(obj_name)
+ if fn:
# Use the filename with an extension.
path = self.ops.join(dir_path, fn)
return path
-
- def _checkSectionName(self, section_name):
- if (not isinstance(section_name, StringType)
- or not section_name
- or '[' in section_name
- or ']' in section_name
- or '\n' in section_name
- or section_name.startswith('@')
- or section_name == OBJECT_NAMES_SECTION):
- raise ValueError, section_name
-
-
- def writeSection(self, subpath, section_name, data):
- self._checkSectionName(section_name)
- self._queue(subpath, section_name, data)
-
-
- def writeNodeType(self, subpath, data):
- self._queue(subpath, NODE_TYPE_SECTION, data)
-
-
- def writeData(self, subpath, data, as_text=0):
- self._queue(subpath, DATA_SECTION, (data, as_text))
-
-
- def suggestExtension(self, subpath, ext):
- self._queue(subpath, SUGGESTED_EXTENSION_SECTION, ext)
-
-
- def readSection(self, subpath, section_name, default=None):
- self._checkSectionName(section_name)
- path = self._expandPath(subpath)
- sections = self._getPropertiesFromFile(path)
- return sections.get(section_name, default)
-
-
def readNodeType(self, subpath):
- path = self._expandPath(subpath)
+ path = self.getPath(subpath)
if not self.ops.exists(path):
- raise NoStateFoundError(subpath)
+ raise LoadError("%s does not exist" % path)
return self.ops.isdir(path) and 'd' or 'f'
-
def readData(self, subpath, allow_missing=0, as_text=0):
- path = self._expandPath(subpath)
- isdir = self.ops.isdir(path)
- # Read either the directory listing or the file contents.
- if isdir:
- # Return a sequence of object names.
- return self._listDirectoryAsMapping(path).values()
+ path = self.getPath(subpath)
# Return a string.
try:
return self.ops.readfile(path, as_text)
@@ -236,12 +98,39 @@
return None
raise
+ def readDirectory(self, subpath, allow_missing=0):
+ path = self.getPath(subpath)
+ # Return a sequence of object names.
+ contents = self.afs.computeDirectoryContents(path, allow_missing)
+ fn_to_name, name_to_fn = contents
+ return name_to_fn.keys()
+
+ def readAnnotation(self, subpath, name, default=None):
+ self.afs.checkAnnotationName(name)
+ path = self.getPath(subpath)
+ annotations = self.afs.getAnnotations(path)
+ return annotations.get(name, default)
+
+ def writeNodeType(self, subpath, data):
+ self._queue(subpath, node_type_ann, data)
+
+ def writeData(self, subpath, data, as_text=0):
+ self._queue(subpath, data_ann, (data, as_text))
+
+ def writeDirectory(self, subpath, names):
+ self._queue(subpath, file_list_ann, names)
+
+ def writeAnnotation(self, subpath, name, data):
+ self.afs.checkAnnotationName(name)
+ self._queue(subpath, name, data)
def getExtension(self, subpath):
- path = self._expandPath(subpath)
+ path = self.getPath(subpath)
stuff, ext = self.ops.splitext(path)
return ext
+ def suggestExtension(self, subpath, ext):
+ self._queue(subpath, suggested_extension_ann, ext)
def getModTime(self, subpath, default=0):
"""Returns the time an object was last modified.
@@ -250,10 +139,10 @@
implementation returns the modification time of the most
recently modified of the three.
"""
- path = self._expandPath(subpath)
- props, remainder = self._getPropertyPaths(path)
+ path = self.getPath(subpath)
+ extra = self.afs.getAnnotationPaths(path)
maxtime = -1
- for p in (path, props, remainder):
+ for p in (path,) + tuple(extra):
try:
t = self.ops.getmtime(p)
except OSError:
@@ -266,70 +155,12 @@
return maxtime
- def _getPropertyPaths(self, path):
- """Returns the property and remainder paths for a path."""
- if self.ops.isdir(path):
- base_fn = self.ops.join(path, self.metadata_prefix)
- else:
- dirname, filename = self.ops.split(path)
- base_fn = self.ops.join(dirname, '%s%s.' % (
- self.metadata_prefix, filename))
- return (base_fn + PROPERTIES_EXTENSION, base_fn + REMAINDER_EXTENSION)
-
-
- def _getPropertiesFromFile(self, path):
- """Reads the properties and remainder for a path."""
- res = self._props_cache.get(path)
- if res is not None:
- return res
-
- props_fn, rem_fn = self._getPropertyPaths(path)
-
- res = {}
- try:
- data = self.ops.readfile(rem_fn, 0)
- except IOError:
- # The remainder file apparently does not exist
- pass
- else:
- res[REMAINDER_SECTION] = data
- # Note that the remainder can be overridden by the properties
- # file. Perhaps that should be prevented in the future.
-
- try:
- data = self.ops.readfile(props_fn, 1)
- except IOError:
- # The properties file apparently does not exist
- self._props_cache.set(path, res)
- return res
-
- pos = 0
- prev_section_name = None
- while 1:
- match = section_re.search(data, pos)
- if match is None:
- endpos = len(data)
- else:
- endpos = match.start()
- if prev_section_name is not None:
- # get the data and decode.
- section = data[pos:endpos].replace('[[', '[')
- res[prev_section_name] = section
- if match is None:
- break
- else:
- prev_section_name = match.group(1)
- pos = match.end()
-
- self._props_cache.set(path, res)
- return res
-
-
- def _writeFinal(self, subpath, sections):
- """Performs an actual write of a file or directory to disk."""
- # sections is a mapping.
- path = self._expandPath(subpath)
- t = sections[NODE_TYPE_SECTION]
+ def _writeFinal(self, subpath, anns):
+ """Performs an actual write of a file or directory to disk.
+ """
+ # anns is a mapping.
+ path = self.getPath(subpath)
+ t = anns[node_type_ann]
if not self.ops.exists(path):
if t == 'd':
self.ops.mkdir(path)
@@ -337,7 +168,7 @@
fn = self.ops.split(path)[1]
if '.' not in fn:
# This object has no extension and doesn't yet exist.
- ext = sections.get(SUGGESTED_EXTENSION_SECTION)
+ ext = anns.get(suggested_extension_ann)
if ext:
# Try to use the suggested extension.
if not ext.startswith('.'):
@@ -347,59 +178,24 @@
# No file is in the way.
# Use the suggested extension.
path = p
- props_fn, rem_fn = self._getPropertyPaths(path)
- props_data = ''
- rem_data = ''
- items = sections.items()
- items.sort()
- try:
- for name, value in items:
- if name == NODE_TYPE_SECTION:
- continue
- elif name == DATA_SECTION:
- data, as_text = value
- if t == 'd':
- # Change the list of subobjects.
- self._removeUnlinkedItems(path, data)
- props_data += self._formatSection(
- OBJECT_NAMES_SECTION, '\n'.join(data))
- self._disableConflictingExtensions(subpath, data)
- self._dir_cache.invalidate(path)
- else:
- # Change the file contents.
- self.ops.writefile(path, as_text, data)
- elif name == SUGGESTED_EXTENSION_SECTION:
- # This doesn't need to be written.
- pass
- elif name == REMAINDER_SECTION:
- # Write to the remainder file.
- rem_data = value
- else:
- # Write a metadata section.
- props_data += self._formatSection(name, value)
- finally:
- self._writeOrRemove(props_fn, 1, props_data)
- self._writeOrRemove(rem_fn, 0, rem_data)
- self._props_cache.invalidate(path)
- # The file might be new, so invalidate the directory.
- self._dir_cache.invalidate(self.ops.dirname(path))
-
-
- def _formatSection(self, name, text):
- s = '[%s]\n%s\n' % (name, text.replace('[', '[['))
- if not text.endswith('\n'):
- s += '\n'
- return s
-
-
- def _writeOrRemove(self, fn, as_text, data):
- """If data is provided, write it. Otherwise remove the file.
- """
- if data:
- self.ops.writefile(fn, as_text, data)
- else:
- if self.ops.exists(fn):
- self.ops.remove(fn)
+ to_write = {}
+ for name, value in anns.items():
+ if (name == node_type_ann
+ or name == suggested_extension_ann):
+ # Doesn't need to be written.
+ continue
+ elif name == data_ann:
+ data, as_text = value
+ self.ops.writefile(path, as_text, data)
+ elif name == file_list_ann:
+ # Change the list of subobjects.
+ self._removeUnlinkedItems(path, value)
+ to_write[object_names_ann] = '\n'.join(value)
+ self._disableConflictingExtensions(subpath, value)
+ self.afs.invalidate(path)
+ else:
+ to_write[name] = value
+ self.afs.writeAnnotations(path, to_write)
def _removeUnlinkedItems(self, path, names):
@@ -407,26 +203,26 @@
linked = {}
for name in names:
linked[name] = 1
- for fn, obj_name in self._listDirectoryAsMapping(path).items():
+ fn_to_name, name_to_fn = self.afs.computeDirectoryContents(path)
+ for fn, obj_name in fn_to_name.items():
if not linked.get(obj_name):
item_fn = self.ops.join(path, fn)
if self.ops.isdir(item_fn):
self.ops.rmtree(item_fn)
else:
self.ops.remove(item_fn)
- props_fn, rem_fn = self._getPropertyPaths(item_fn)
- if self.ops.exists(props_fn):
- self.ops.remove(props_fn)
- if self.ops.exists(rem_fn):
- self.ops.remove(rem_fn)
+ extra_paths = self.afs.getAnnotationPaths(item_fn)
+ for p in extra_paths:
+ if self.ops.exists(p):
+ self.ops.remove(p)
def _disableConflictingExtensions(self, subpath, obj_names):
"""Fixes collisions before writing files in a directory.
- Enforces the rule: if 'foo.*' is in the
- database, 'foo' may not have an automatic extension.
- Enforces by un-queuing suggested extensions.
+ Enforces the rule: if 'foo.*' is in the database, 'foo' may
+ not have an automatic extension. Enforces by un-queuing
+ suggested extensions.
"""
reserved = {} # { object name without extension -> 1 }
for obj_name in obj_names:
@@ -436,14 +232,13 @@
if not reserved:
# No objects have extensions.
return
-
while subpath.endswith('/'):
subpath = subpath[:-1]
for obj_name in obj_names:
if reserved.has_key(obj_name):
# Prevent obj_name from using an automatic extension.
child_subpath = '%s/%s' % (subpath, obj_name)
- self._queue(child_subpath, SUGGESTED_EXTENSION_SECTION,
+ self._queue(child_subpath, suggested_extension_ann,
'', force=1)
@@ -454,44 +249,52 @@
transaction commit.
"""
non_containers = {}
- for subpath, sections in items:
- path = self._expandPath(subpath)
+ for subpath, anns in items:
+ path = self.getPath(subpath)
exists = self.ops.exists(path)
if exists and not self.ops.canwrite(path):
raise FSWriteError(
"Can't get write access to %s" % subpath)
# type must be provided and must always be either 'd' or 'f'.
- if (not sections.has_key(NODE_TYPE_SECTION)
- or not sections.has_key(DATA_SECTION)):
+ if not anns.has_key(node_type_ann):
raise FSWriteError(
- 'Data or node type not specified for %s' % subpath)
- t = sections[NODE_TYPE_SECTION]
+ 'Node type not specified for %s' % subpath)
+ t = anns[node_type_ann]
dir = self.ops.dirname(subpath)
if non_containers.get(dir):
raise FSWriteError(
"Not a directory: %s" % dir)
- data, as_text = sections[DATA_SECTION]
if t == 'f':
+ data, as_text = anns[data_ann]
+ if anns.has_key(file_list_ann):
+ raise FSWriteError(
+ "Files can't have directory contents. %s"
+ % subpath)
if exists and self.ops.isdir(path):
raise FSWriteError(
"Can't write file data to directory at %s"
% subpath)
non_containers[subpath] = 1
- if not isinstance(data, StringType):
+ if not isinstance(data, type('')):
raise FSWriteError(
'Data for a file must be a string at %s'
% subpath)
elif t == 'd':
+ data = anns[file_list_ann]
+ if anns.has_key(data_ann):
+ raise FSWriteError(
+ "Directories can't have file data. %s"
+ % subpath)
if exists and not self.ops.isdir(path):
raise FSWriteError(
"Can't write directory contents to file at %s"
% subpath)
- if isinstance(data, StringType):
+ if isinstance(data, type('')):
raise FSWriteError(
'Data for a directory must be a list or tuple at %s'
% subpath)
for item in data:
- if not self._isLegalFilename(item):
+ if not self.afs.isLegalFilename(item):
raise FSWriteError(
'Not a legal object name: %s' % repr(item))
else:
@@ -499,20 +302,20 @@
'Node type must be "d" or "f" at %s' % subpath)
- def _queue(self, subpath, section_name, data, force=0):
+ def _queue(self, subpath, name, data, force=0):
"""Queues data to be written at commit time"""
m = self._pending
- sections = m.get(subpath)
- if sections is None:
- sections = {}
- m[subpath] = sections
- if sections.has_key(section_name) and not force:
- if sections[section_name] != data:
+ anns = m.get(subpath)
+ if anns is None:
+ anns = {}
+ m[subpath] = anns
+ if anns.has_key(name) and not force:
+ if anns[name] != data:
raise FSWriteError(
'Conflicting data storage at %s (%s)' %
- (subpath, section_name))
+ (subpath, name))
else:
- sections[section_name] = data
+ anns[name] = data
#
@@ -530,8 +333,7 @@
self.ops.makedirs(self.basepath)
def begin(self):
- self._props_cache.clear()
- self._dir_cache.clear()
+ self.afs.clearCache()
def vote(self):
"""Do some early verification
@@ -546,8 +348,7 @@
def reset(self):
self._final = 0
self._pending.clear()
- self._props_cache.clear()
- self._dir_cache.clear()
+ self.afs.clearCache()
def abort(self):
self.reset()
@@ -557,8 +358,8 @@
try:
items = self._pending.items()
items.sort() # Ensure that base directories come first.
- for subpath, sections in items:
- self._writeFinal(subpath, sections)
+ for subpath, anns in items:
+ self._writeFinal(subpath, anns)
finally:
self.reset()
@@ -575,16 +376,14 @@
t.append(None)
return t
-
- def getSources(self, subpath):
- p = self._expandPath(subpath)
- props, remainder = self._getPropertyPaths(p)
- paths = (p, props, remainder)
+ def getPollSources(self, subpath):
+ path = self.getPath(subpath)
+ extra = self.afs.getAnnotationPaths(path)
+ paths = (path,) + tuple(extra)
t = self._get_paths_mtime(paths)
return {(self, paths): t}
-
- def freshen(self, sources):
+ def poll(self, sources):
"""ISourceRepository implementation.
Returns the changed items.
@@ -597,5 +396,3 @@
if t != new_t:
res[source] = new_t
return res
-
-
=== Products/Ape/lib/apelib/fs/interfaces.py 1.2 => 1.3 ===
--- Products/Ape/lib/apelib/fs/interfaces.py:1.2 Wed Jul 30 17:33:02 2003
+++ Products/Ape/lib/apelib/fs/interfaces.py Mon Feb 2 10:07:20 2004
@@ -19,66 +19,83 @@
from Interface import Interface
+class FSWriteError (Exception):
+ """Unable to write data"""
+
+
class IFSConnection (Interface):
- """Simple filesystem connection (with textual annotations).
+ """Simple filesystem connection with annotations.
"""
- def writeSection(subpath, section_name, data):
- """Writes a text-based metadata section for a filesystem node."""
-
- def writeNodeType(subpath, data):
- """Writes the node type for a filesystem node.
+ def getPath(subpath):
+ """Returns the filesystem path for a subpath.
- 'd' (directory) and 'f' (file) are supported.
+ May automatically append an extension if the file already
+ exists.
"""
- def writeData(subpath, data, as_text=0):
- """Writes data to a filesystem node.
+ def readNodeType(subpath):
+ """Reads the node type of a filesystem node.
+ """
- In the case of directories, expects a tuple containing the names
- of the files that should be in the directory. In the case of
- files, expects a string.
+ def readData(subpath, allow_missing=0, as_text=0):
+ """Reads the main data stream from a file.
- If as_text is true, the file is written in text mode. The
- as_text flag is ignored for directories.
+ If the allow_missing flag is specified, this method returns
+ None if no such file is found. If as_text is true, the file
+ is read in text mode.
"""
- def suggestExtension(subpath, ext):
- """Suggests a filename extension for a filesystem node.
+ def readDirectory(subpath, allow_missing=0):
+ """Reads the contents of a directory.
- The IFSConnection may use this information to store the file
- with an automatically appended filename extension.
+ Returns a list of object names. If the allow_missing flag is
+ specified, this method returns None if no such directory is
+ found.
"""
- def readSection(subpath, section_name, default=None):
- """Reads a text-based metadata section.
+ def readAnnotation(subpath, name, default=None):
+ """Reads a text-based annotation for a file.
"""
- def readNodeType(subpath):
- """Reads the node type of a filesystem node.
+ def writeNodeType(subpath, data):
+ """Writes the node type for a filesystem node.
+
+ 'd' (directory) and 'f' (file) are supported.
"""
- def readData(subpath, allow_missing=0, as_text=0):
- """Reads the data from a filesystem node.
+ def writeData(subpath, data, as_text=0):
+ """Writes string data to a filesystem node.
- For files, this reads the main data stream. For directories,
- this returns a list of names. If the allow_missing flag is
- specified, this method returns None if no filesystem node is
- found.
+ If 'as_text' is true, the file is written in text mode.
+ """
+
+ def writeDirectory(subpath, names):
+ """Writes data to a directory.
- If as_text is true, the file is read in text mode. The
- as_text flag is ignored for directories.
+ 'names' is a sequence of object names used for determining filenames..
+ """
+
+ def writeAnnotation(subpath, name, data):
+ """Writes a text-based annotation for a filesystem node.
"""
def getExtension(subpath):
- """Returns the filename extension used for a filesystem node.
+ """Returns the filename extension for a subpath.
+ """
+
+ def suggestExtension(subpath, ext):
+ """Suggests a filename extension for a filesystem node.
+
+ The IFSConnection may use this information to store the file
+ with an automatically appended filename extension.
"""
def getModTime(subpath, default=0):
- """Returns the modification time of a file.
+ """Returns the last-modified time of a file.
"""
- def getSources(subpath):
+ def getPollSources(subpath):
"""Returns source information for a subpath.
The source information is a mapping that maps
=== Products/Ape/lib/apelib/fs/properties.py 1.3 => 1.4 ===
--- Products/Ape/lib/apelib/fs/properties.py:1.3 Wed Jul 9 11:40:03 2003
+++ Products/Ape/lib/apelib/fs/properties.py Mon Feb 2 10:07:20 2004
@@ -58,7 +58,8 @@
class FSProperties (FSGatewayBase):
- """Simple properties to filesystem property section gateway."""
+ """Simple properties to filesystem properties annotation gateway.
+ """
__implements__ = IGateway
@@ -67,14 +68,14 @@
schema.addField('type', 'string')
schema.addField('data', 'string')
- def __init__(self, section='properties', conn_name='fs'):
- self.section = str(section)
+ def __init__(self, annotation='properties', conn_name='fs'):
+ self.annotation = str(annotation)
FSGatewayBase.__init__(self, conn_name)
def load(self, event):
- p = event.getKey()
+ p = event.oid
fs_conn = self.getConnection(event)
- text = fs_conn.readSection(p, self.section, '')
+ text = fs_conn.readAnnotation(p, self.annotation, '')
res = []
if text:
lines = text.split('\n')
@@ -95,29 +96,29 @@
lines.append('%s:%s=%s' % (k, t, escape_string(v)))
lines.sort()
text = '\n'.join(lines)
- p = event.getKey()
+ p = event.oid
fs_conn = self.getConnection(event)
- fs_conn.writeSection(p, self.section, text)
+ fs_conn.writeAnnotation(p, self.annotation, text)
state = list(state)
state.sort()
return tuple(state)
-class FSSectionData (FSGatewayBase):
- """Text to filesystem property section gateway."""
+class FSAnnotationData (FSGatewayBase):
+ """Text to filesystem property annotation gateway."""
__implements__ = IGateway
schema = FieldSchema('data', 'string')
- def __init__(self, section, conn_name='fs'):
- self.section = str(section)
+ def __init__(self, annotation, conn_name='fs'):
+ self.annotation = str(annotation)
FSGatewayBase.__init__(self, conn_name)
def load(self, event):
fs_conn = self.getConnection(event)
- p = event.getKey()
- state = fs_conn.readSection(p, self.section, '').strip()
+ p = event.oid
+ state = fs_conn.readAnnotation(p, self.annotation, '').strip()
return state, state
def store(self, event, state):
@@ -125,8 +126,8 @@
raise ValueError('Not a string: %s' % repr(state))
state = state.strip()
if state:
- p = event.getKey()
+ p = event.oid
fs_conn = self.getConnection(event)
- fs_conn.writeSection(p, self.section, state)
+ fs_conn.writeAnnotation(p, self.annotation, state)
return state
=== Products/Ape/lib/apelib/fs/security.py 1.2 => 1.3 ===
--- Products/Ape/lib/apelib/fs/security.py:1.2 Tue Apr 29 18:11:50 2003
+++ Products/Ape/lib/apelib/fs/security.py Mon Feb 2 10:07:20 2004
@@ -16,9 +16,8 @@
$Id$
"""
-from apelib.core.interfaces import IGateway
+from apelib.core.interfaces import IGateway, MappingError
from apelib.core.schemas import RowSequenceSchema
-from apelib.core.exceptions import MappingError
from params import stringToParams, paramsToString
from base import FSGatewayBase
@@ -35,14 +34,13 @@
schema.addField('permission', 'string')
schema.addField('username', 'string')
- def __init__(self, section='security', conn_name='fs'):
- self.section = section
+ def __init__(self, annotation='security', conn_name='fs'):
+ self.annotation = annotation
FSGatewayBase.__init__(self, conn_name)
def load(self, event):
- key = event.getKey()
fs_conn = self.getConnection(event)
- text = fs_conn.readSection(key, self.section, '')
+ text = fs_conn.readAnnotation(event.oid, self.annotation, '')
res = []
if text:
lines = text.split('\n')
@@ -68,7 +66,7 @@
else:
raise ValueError(
"Could not read security declaration "
- "%s for %s" % (repr(line), repr(key)))
+ "%s for %s" % (repr(line), repr(event.oid)))
res.append(tuple(row))
res.sort()
return res, tuple(res)
@@ -90,7 +88,7 @@
lines.sort()
text = '\n'.join(lines)
fs_conn = self.getConnection(event)
- fs_conn.writeSection(event.getKey(), self.section, text)
+ fs_conn.writeAnnotation(event.oid, self.annotation, text)
state = list(state)
state.sort()
return tuple(state)
@@ -110,7 +108,7 @@
def load(self, event):
c = self.getConnection(event)
- p = event.getKey()
+ p = event.oid
assert c.readNodeType(p) == 'f'
text = c.readData(p)
res = []
@@ -158,7 +156,7 @@
domainlist = self._joinList(domains)
to_write = '%s:%s:%s:%s' % (id, password, rolelist, domainlist)
replace_lines[id] = to_write
- p = event.getKey()
+ p = event.oid
fs_conn = self.getConnection(event)
fs_conn.writeNodeType(p, 'f')
text = fs_conn.readData(p, allow_missing=1)
=== Products/Ape/lib/apelib/fs/structure.py 1.4 => 1.5 ===
--- Products/Ape/lib/apelib/fs/structure.py:1.4 Wed Jul 30 17:33:02 2003
+++ Products/Ape/lib/apelib/fs/structure.py Mon Feb 2 10:07:20 2004
@@ -18,7 +18,7 @@
from types import StringType
-from apelib.core.interfaces import IGateway
+from apelib.core.interfaces import IGateway, LoadError
from apelib.core.schemas import FieldSchema, RowSequenceSchema
from base import FSGatewayBase
@@ -42,7 +42,7 @@
def load(self, event):
c = self.getConnection(event)
- p = event.getKey()
+ p = event.oid
assert c.readNodeType(p) == 'f'
state = c.readData(p, as_text=self.text)
return state, state
@@ -51,7 +51,7 @@
if not isinstance(state, StringType):
raise ValueError('Not a string: %s' % repr(state))
c = self.getConnection(event)
- p = event.getKey()
+ p = event.oid
c.writeNodeType(p, 'f')
c.writeData(p, state, as_text=self.text)
return state
@@ -65,12 +65,12 @@
schema = FieldSchema('id', 'string')
def getIdFrom(self, event):
- path = event.getKey()
- pos = path.rfind('/')
+ p = event.oid
+ pos = p.rfind('/')
if pos >= 0:
- return path[pos + 1:]
+ return p[pos + 1:]
else:
- return path
+ return p
def load(self, event):
id = self.getIdFrom(event)
@@ -79,13 +79,13 @@
def store(self, event, state):
id = self.getIdFrom(event)
if state != id:
- raise ValueError('Mismatched file ID')
+ raise ValueError('Mismatched object name: %s != %s' %
+ (state, id))
return id
- def getSources(self, event):
+ def getPollSources(self, event):
fs_conn = self.getConnection(event)
- return fs_conn.getSources(event.getKey())
-
+ return fs_conn.getPollSources(event.oid)
class FSDirectoryItems (FSGatewayBase):
@@ -94,36 +94,36 @@
__implements__ = IGateway
schema = RowSequenceSchema()
- schema.addField('id', 'string', 1)
- schema.addField('keychain', 'keychain')
+ schema.addField('key', 'string', 1)
+ schema.addField('oid', 'string')
def load(self, event):
- p = event.getKey()
+ p = event.oid
c = self.getConnection(event)
assert c.readNodeType(p) == 'd'
- names = c.readData(p)
+ names = c.readDirectory(p)
names.sort()
res = []
for name in names:
- keychain = event.makeKeychain(name, 0)
- res.append((name, keychain))
+ oid = event.conf.oid_gen.new_oid(event, name, False)
+ res.append((name, oid))
res = tuple(res)
return res, res
def store(self, event, state):
- p = event.getKey()
+ p = event.oid
c = self.getConnection(event)
c.writeNodeType(p, 'd')
state = list(state)
state.sort()
if __debug__:
- for name, keychain in state:
- expect = event.makeKeychain(name, 0)
- assert expect == keychain, (
- "Child of %s named %s must use keychain %s, but used %s" %
- (event.getKeychain(), name, expect, keychain))
+ for name, oid in state:
+ expect = event.conf.oid_gen.new_oid(event, name, False)
+ assert expect == oid, (
+ "Child of %s named %s must use OID %s, but used %s" %
+ (event.oid, name, expect, oid))
names = [row[0] for row in state]
- c.writeData(p, names)
+ c.writeDirectory(p, names)
return tuple(state)
@@ -135,7 +135,7 @@
schema = FieldSchema('mtime', 'int')
def load(self, event):
- p = event.getKey()
+ p = event.oid
fs_conn = self.getConnection(event)
state = long(fs_conn.getModTime(p))
return state, None # Use None as the hash (see store())
@@ -145,3 +145,57 @@
# time of a file. Ignore by returning None as the hash.
return None
+
+class RootDirectoryItems (FSGatewayBase):
+ """Read/write the root object.
+
+ The root object is stored as a normal directory with one special feature:
+ the name 'Application' is always present and points to the OID '/'. This
+ allows the root object to be stored inside the application object.
+ """
+
+ __implements__ = IGateway
+
+ schema = RowSequenceSchema()
+ schema.addField('key', 'string', 1)
+ schema.addField('oid', 'string')
+
+ def load(self, event):
+ p = event.oid
+ c = self.getConnection(event)
+ try:
+ t = c.readNodeType(p)
+ except LoadError:
+ # The root object doesn't exist, but it's reasonable
+ # to infer a state anyway.
+ names = []
+ else:
+ assert t == 'd', 'The root object must be a directory'
+ names = c.readDirectory(p)
+ names.sort()
+ res = [('Application', '/')]
+ for name in names:
+ if name != 'Application':
+ oid = event.conf.oid_gen.new_oid(event, name, False)
+ res.append((name, oid))
+ res = tuple(res)
+ return res, res
+
+ def store(self, event, state):
+ p = event.oid
+ c = self.getConnection(event)
+ c.writeNodeType(p, 'd')
+ state = list(state)
+ state.sort()
+ names = []
+ for name, oid in state:
+ if name == 'Application':
+ expect = '/'
+ else:
+ expect = event.conf.oid_gen.new_oid(event, name, False)
+ names.append(name)
+ assert expect == oid, (
+ "Child of %s named %s must use OID %s, but used %s" %
+ (event.oid, name, expect, oid))
+ c.writeDirectory(p, names)
+ return tuple(state)
=== Removed File Products/Ape/lib/apelib/fs/cache.py ===
=== Removed File Products/Ape/lib/apelib/fs/exceptions.py ===
More information about the Zope-CVS
mailing list