[Zope-Checkins] CVS: ZODB3/Tools - zodbload.py:1.2
zeoserverlog.py:1.2 timeout.py:1.2 README.txt:1.2
zeoup.py:1.14 zeoreplay.py:1.4 zeoqueue.py:1.5 zeopack.py:1.9
repozo.py:1.6 parsezeolog.py:1.5 netspace.py:1.2
migrate.py:1.2 fstest.py:1.10 fsrefs.py:1.8
checkbtrees.py:1.2 analyze.py:1.2 space.py:NONE
Jeremy Hylton
jeremy at zope.com
Mon Sep 15 12:29:51 EDT 2003
Update of /cvs-repository/ZODB3/Tools
In directory cvs.zope.org:/tmp/cvs-serv29167/Tools
Modified Files:
zeoup.py zeoreplay.py zeoqueue.py zeopack.py repozo.py
parsezeolog.py netspace.py migrate.py fstest.py fsrefs.py
checkbtrees.py analyze.py
Added Files:
zodbload.py zeoserverlog.py timeout.py README.txt
Removed Files:
space.py
Log Message:
Merge changes from ZODB3-3_2-branch to Zope-2_7-branch.
Please make all future changes on the Zope-2_7-branch instead.
=== ZODB3/Tools/zodbload.py 1.1 => 1.2 ===
--- /dev/null Mon Sep 15 12:29:51 2003
+++ ZODB3/Tools/zodbload.py Mon Sep 15 12:29:19 2003
@@ -0,0 +1,768 @@
+#!python
+##############################################################################
+#
+# Copyright (c) 2003 Zope Corporation and Contributors.
+# All Rights Reserved.
+#
+# This software is subject to the provisions of the Zope Public License,
+# Version 2.0 (ZPL). A copy of the ZPL should accompany this distribution.
+# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+# FOR A PARTICULAR PURPOSE.
+#
+##############################################################################
+"""Test script for testing ZODB under a heavy zope-like load.
+
+Note that, to be as realistic as possible with ZEO, you should run this
+script multiple times, to simulate multiple clients.
+
+Here's how this works.
+
+The script starts some number of threads. Each thread, sequentially
+executes jobs. There is a job producer that produces jobs.
+
+Input data are provided by a mail producer that hands out message from
+a mailbox.
+
+Execution continues until there is an error, which will normally occur
+when the mailbox is exhausted.
+
+Command-line options are used to provide job definitions. Job
+definitions have perameters of the form name=value. Jobs have 2
+standard parameters:
+
+ frequency=integer
+
+ The frequency of the job. The default is 1.
+
+ sleep=float
+
+ The number os seconds to sleep before performing the job. The
+ default is 0.
+
+Usage: loadmail2 [options]
+
+ Options:
+
+ -edit [frequency=integer] [sleep=float]
+
+ Define an edit job. An edit job edits a random already-saved
+ email message, deleting and inserting a random number of words.
+
+ After editing the message, the message is (re)cataloged.
+
+ -insert [number=int] [frequency=integer] [sleep=float]
+
+ Insert some number of email messages.
+
+ -index [number=int] [frequency=integer] [sleep=float]
+
+ Insert and index (catalog) some number of email messages.
+
+ -search [terms='word1 word2 ...'] [frequency=integer] [sleep=float]
+
+ Search the catalog. A query is givem with one or more terms as
+ would be entered into a typical seach box. If no query is
+ given, then queries will be randomly selected based on a set of
+ built-in word list.
+
+ -setup
+
+ Set up the database. This will delete any existing Data.fs
+ file. (Of course, this may have no effect, if there is a
+ custom_zodb that defined a different storage.) It also adds a
+ mail folder and a catalog.
+
+ -options file
+
+ Read options from the given file. Th efile should be a python
+ source file that defines a sequence of options named 'options'.
+
+ -threads n
+
+ Specify the number of threads to execute. If not specified (< 2),
+ then jobs are run in a single (main) thread.
+
+ -mbox filename
+
+ Specify the mailbox for getting input data.
+
+$Id$
+"""
+
+import mailbox
+import math
+import os
+import random
+import re
+import sys
+import threading
+import time
+
+class JobProducer:
+
+ def __init__(self):
+ self.jobs = []
+
+ def add(self, callable, frequency, sleep, repeatp=0):
+ self.jobs.extend([(callable, sleep, repeatp)] * int(frequency))
+ random.shuffle(self.jobs)
+
+ def next(self):
+ factory, sleep, repeatp = random.choice(self.jobs)
+ time.sleep(sleep)
+ callable, args = factory.create()
+ return factory, callable, args, repeatp
+
+ def __nonzero__(self):
+ return not not self.jobs
+
+
+
+class MBox:
+
+ def __init__(self, filename):
+ if ' ' in filename:
+ filename, min, max = filename.split()
+ min = int(min)
+ max = int(max)
+ else:
+ min = max = 0
+
+ if filename.endswith('.bz2'):
+ f = os.popen("bunzip2 <"+filename, 'r')
+ filename = filename[-4:]
+ else:
+ f = open(filename)
+
+ self._mbox = mb = mailbox.UnixMailbox(f)
+
+ self.number = min
+ while min:
+ mb.next()
+ min -= 1
+
+ self._lock = threading.Lock()
+ self.__name__ = os.path.splitext(os.path.split(filename)[1])[0]
+ self._max = max
+
+ def next(self):
+ self._lock.acquire()
+ try:
+ if self._max > 0 and self.number >= self._max:
+ raise IndexError(self.number + 1)
+ message = self._mbox.next()
+ message.body = message.fp.read()
+ message.headers = list(message.headers)
+ self.number += 1
+ message.number = self.number
+ message.mbox = self.__name__
+ return message
+ finally:
+ self._lock.release()
+
+bins = 9973
+#bins = 11
+def mailfolder(app, mboxname, number):
+ mail = getattr(app, mboxname, None)
+ if mail is None:
+ app.manage_addFolder(mboxname)
+ mail = getattr(app, mboxname)
+ from BTrees.Length import Length
+ mail.length = Length()
+ for i in range(bins):
+ mail.manage_addFolder('b'+str(i))
+ bin = hash(str(number))%bins
+ return getattr(mail, 'b'+str(bin))
+
+
+def VmSize():
+
+ try:
+ f = open('/proc/%s/status' % os.getpid())
+ except:
+ return 0
+ else:
+ l = filter(lambda l: l[:7] == 'VmSize:', f.readlines())
+ if l:
+ l = l[0][7:].strip().split()[0]
+ return int(l)
+ return 0
+
+def setup(lib_python):
+ try:
+ os.remove(os.path.join(lib_python, '..', '..', 'var', 'Data.fs'))
+ except:
+ pass
+ import Zope
+ import Products
+ import AccessControl.SecurityManagement
+ app=Zope.app()
+
+ Products.ZCatalog.ZCatalog.manage_addZCatalog(app, 'cat', '')
+
+ from Products.ZCTextIndex.ZCTextIndex import PLexicon
+ from Products.ZCTextIndex.Lexicon import Splitter, CaseNormalizer
+
+ app.cat._setObject('lex',
+ PLexicon('lex', '', Splitter(), CaseNormalizer())
+ )
+
+ class extra:
+ doc_attr = 'PrincipiaSearchSource'
+ lexicon_id = 'lex'
+ index_type = 'Okapi BM25 Rank'
+
+ app.cat.addIndex('PrincipiaSearchSource', 'ZCTextIndex', extra)
+
+ get_transaction().commit()
+
+ system = AccessControl.SpecialUsers.system
+ AccessControl.SecurityManagement.newSecurityManager(None, system)
+
+ app._p_jar.close()
+
+def do(db, f, args):
+ """Do something in a transaction, retrying of necessary
+
+ Measure the speed of both the compurartion and the commit
+ """
+ from ZODB.POSException import ConflictError
+ wcomp = ccomp = wcommit = ccommit = 0.0
+ rconflicts = wconflicts = 0
+ start = time.time()
+
+ while 1:
+ connection = db.open()
+ try:
+ get_transaction().begin()
+ t=time.time()
+ c=time.clock()
+ try:
+ try:
+ r = f(connection, *args)
+ except ConflictError:
+ rconflicts += 1
+ get_transaction().abort()
+ continue
+ finally:
+ wcomp += time.time() - t
+ ccomp += time.clock() - c
+
+ t=time.time()
+ c=time.clock()
+ try:
+ try:
+ get_transaction().commit()
+ break
+ except ConflictError:
+ wconflicts += 1
+ get_transaction().abort()
+ continue
+ finally:
+ wcommit += time.time() - t
+ ccommit += time.clock() - c
+ finally:
+ connection.close()
+
+ return start, wcomp, ccomp, rconflicts, wconflicts, wcommit, ccommit, r
+
+def run1(tid, db, factory, job, args):
+ (start, wcomp, ccomp, rconflicts, wconflicts, wcommit, ccommit, r
+ ) = do(db, job, args)
+ start = "%.4d-%.2d-%.2d %.2d:%.2d:%.2d" % time.localtime(start)[:6]
+ print "%s %s %8.3g %8.3g %s %s\t%8.3g %8.3g %s %r" % (
+ start, tid, wcomp, ccomp, rconflicts, wconflicts, wcommit, ccommit,
+ factory.__name__, r)
+
+def run(jobs, tid=''):
+ import Zope
+ while 1:
+ factory, job, args, repeatp = jobs.next()
+ run1(tid, Zope.DB, factory, job, args)
+ if repeatp:
+ while 1:
+ i = random.randint(0,100)
+ if i > repeatp:
+ break
+ run1(tid, Zope.DB, factory, job, args)
+
+
+def index(connection, messages, catalog):
+ app = connection.root()['Application']
+ for message in messages:
+ mail = mailfolder(app, message.mbox, message.number)
+ docid = 'm'+str(message.number)
+ mail.manage_addDTMLDocument(docid, file=message.body)
+
+ # increment counted
+ getattr(app, message.mbox).length.change(1)
+
+ doc = mail[docid]
+ for h in message.headers:
+ h = h.strip()
+ l = h.find(':')
+ if l <= 0:
+ continue
+ name = h[:l].lower()
+ if name=='subject':
+ name='title'
+ v = h[l+1:].strip()
+ type='string'
+
+ if name=='title':
+ doc.manage_changeProperties(title=h)
+ else:
+ try:
+ doc.manage_addProperty(name, v, type)
+ except:
+ pass
+ if catalog:
+ app.cat.catalog_object(doc)
+
+ return message.number
+
+class IndexJob:
+ needs_mbox = 1
+ catalog = 1
+ prefix = 'index'
+
+ def __init__(self, mbox, number=1):
+ self.__name__ = "%s%s_%s" % (self.prefix, number, mbox.__name__)
+ self.mbox, self.number = mbox, int(number)
+
+ def create(self):
+ messages = [self.mbox.next() for i in range(self.number)]
+ return index, (messages, self.catalog)
+
+
+class InsertJob(IndexJob):
+ catalog = 0
+ prefix = 'insert'
+
+wordre = re.compile(r'(\w{3,20})')
+stop = 'and', 'not'
+def edit(connection, mbox, catalog=1):
+ app = connection.root()['Application']
+ mail = getattr(app, mbox.__name__, None)
+ if mail is None:
+ time.sleep(1)
+ return "No mailbox %s" % mbox.__name__
+
+ nmessages = mail.length()
+ if nmessages < 2:
+ time.sleep(1)
+ return "No messages to edit in %s" % mbox.__name__
+
+ # find a message to edit:
+ while 1:
+ number = random.randint(1, nmessages-1)
+ did = 'm' + str(number)
+
+ mail = mailfolder(app, mbox.__name__, number)
+ doc = getattr(mail, did, None)
+ if doc is not None:
+ break
+
+ text = doc.raw.split()
+ norig = len(text)
+ if norig > 10:
+ ndel = int(math.exp(random.randint(0, int(math.log(norig)))))
+ nins = int(math.exp(random.randint(0, int(math.log(norig)))))
+ else:
+ ndel = 0
+ nins = 10
+
+ for j in range(ndel):
+ j = random.randint(0,len(text)-1)
+ word = text[j]
+ m = wordre.search(word)
+ if m:
+ word = m.group(1).lower()
+ if (not wordsd.has_key(word)) and word not in stop:
+ words.append(word)
+ wordsd[word] = 1
+ del text[j]
+
+ for j in range(nins):
+ word = random.choice(words)
+ text.append(word)
+
+ doc.raw = ' '.join(text)
+
+ if catalog:
+ app.cat.catalog_object(doc)
+
+ return norig, ndel, nins
+
+class EditJob:
+ needs_mbox = 1
+ prefix = 'edit'
+ catalog = 1
+
+ def __init__(self, mbox):
+ self.__name__ = "%s_%s" % (self.prefix, mbox.__name__)
+ self.mbox = mbox
+
+ def create(self):
+ return edit, (self.mbox, self.catalog)
+
+class ModifyJob(EditJob):
+ prefix = 'modify'
+ catalog = 0
+
+
+def search(connection, terms, number):
+ app = connection.root()['Application']
+ cat = app.cat
+ n = 0
+
+ for i in number:
+ term = random.choice(terms)
+
+ results = cat(PrincipiaSearchSource=term)
+ n += len(results)
+ for result in results:
+ did = result.getObject().getId()
+
+ return n
+
+class SearchJob:
+
+ def __init__(self, terms='', number=10):
+
+ if terms:
+ terms = terms.split()
+ self.__name__ = "search_" + '_'.join(terms)
+ self.terms = terms
+ else:
+ self.__name__ = 'search'
+ self.terms = words
+
+ number = min(int(number), len(self.terms))
+ self.number = range(number)
+
+ def create(self):
+ return search, (self.terms, self.number)
+
+
+words=['banishment', 'indirectly', 'imprecise', 'peeks',
+'opportunely', 'bribe', 'sufficiently', 'Occidentalized', 'elapsing',
+'fermenting', 'listen', 'orphanage', 'younger', 'draperies', 'Ida',
+'cuttlefish', 'mastermind', 'Michaels', 'populations', 'lent',
+'cater', 'attentional', 'hastiness', 'dragnet', 'mangling',
+'scabbards', 'princely', 'star', 'repeat', 'deviation', 'agers',
+'fix', 'digital', 'ambitious', 'transit', 'jeeps', 'lighted',
+'Prussianizations', 'Kickapoo', 'virtual', 'Andrew', 'generally',
+'boatsman', 'amounts', 'promulgation', 'Malay', 'savaging',
+'courtesan', 'nursed', 'hungered', 'shiningly', 'ship', 'presides',
+'Parke', 'moderns', 'Jonas', 'unenlightening', 'dearth', 'deer',
+'domesticates', 'recognize', 'gong', 'penetrating', 'dependents',
+'unusually', 'complications', 'Dennis', 'imbalances', 'nightgown',
+'attached', 'testaments', 'congresswoman', 'circuits', 'bumpers',
+'braver', 'Boreas', 'hauled', 'Howe', 'seethed', 'cult', 'numismatic',
+'vitality', 'differences', 'collapsed', 'Sandburg', 'inches', 'head',
+'rhythmic', 'opponent', 'blanketer', 'attorneys', 'hen', 'spies',
+'indispensably', 'clinical', 'redirection', 'submit', 'catalysts',
+'councilwoman', 'kills', 'topologies', 'noxious', 'exactions',
+'dashers', 'balanced', 'slider', 'cancerous', 'bathtubs', 'legged',
+'respectably', 'crochets', 'absenteeism', 'arcsine', 'facility',
+'cleaners', 'bobwhite', 'Hawkins', 'stockade', 'provisional',
+'tenants', 'forearms', 'Knowlton', 'commit', 'scornful',
+'pediatrician', 'greets', 'clenches', 'trowels', 'accepts',
+'Carboloy', 'Glenn', 'Leigh', 'enroll', 'Madison', 'Macon', 'oiling',
+'entertainingly', 'super', 'propositional', 'pliers', 'beneficiary',
+'hospitable', 'emigration', 'sift', 'sensor', 'reserved',
+'colonization', 'shrilled', 'momentously', 'stevedore', 'Shanghaiing',
+'schoolmasters', 'shaken', 'biology', 'inclination', 'immoderate',
+'stem', 'allegory', 'economical', 'daytime', 'Newell', 'Moscow',
+'archeology', 'ported', 'scandals', 'Blackfoot', 'leery', 'kilobit',
+'empire', 'obliviousness', 'productions', 'sacrificed', 'ideals',
+'enrolling', 'certainties', 'Capsicum', 'Brookdale', 'Markism',
+'unkind', 'dyers', 'legislates', 'grotesquely', 'megawords',
+'arbitrary', 'laughing', 'wildcats', 'thrower', 'sex', 'devils',
+'Wehr', 'ablates', 'consume', 'gossips', 'doorways', 'Shari',
+'advanced', 'enumerable', 'existentially', 'stunt', 'auctioneers',
+'scheduler', 'blanching', 'petulance', 'perceptibly', 'vapors',
+'progressed', 'rains', 'intercom', 'emergency', 'increased',
+'fluctuating', 'Krishna', 'silken', 'reformed', 'transformation',
+'easter', 'fares', 'comprehensible', 'trespasses', 'hallmark',
+'tormenter', 'breastworks', 'brassiere', 'bladders', 'civet', 'death',
+'transformer', 'tolerably', 'bugle', 'clergy', 'mantels', 'satin',
+'Boswellizes', 'Bloomington', 'notifier', 'Filippo', 'circling',
+'unassigned', 'dumbness', 'sentries', 'representativeness', 'souped',
+'Klux', 'Kingstown', 'gerund', 'Russell', 'splices', 'bellow',
+'bandies', 'beefers', 'cameramen', 'appalled', 'Ionian', 'butterball',
+'Portland', 'pleaded', 'admiringly', 'pricks', 'hearty', 'corer',
+'deliverable', 'accountably', 'mentors', 'accorded',
+'acknowledgement', 'Lawrenceville', 'morphology', 'eucalyptus',
+'Rena', 'enchanting', 'tighter', 'scholars', 'graduations', 'edges',
+'Latinization', 'proficiency', 'monolithic', 'parenthesizing', 'defy',
+'shames', 'enjoyment', 'Purdue', 'disagrees', 'barefoot', 'maims',
+'flabbergast', 'dishonorable', 'interpolation', 'fanatics', 'dickens',
+'abysses', 'adverse', 'components', 'bowl', 'belong', 'Pipestone',
+'trainees', 'paw', 'pigtail', 'feed', 'whore', 'conditioner',
+'Volstead', 'voices', 'strain', 'inhabits', 'Edwin', 'discourses',
+'deigns', 'cruiser', 'biconvex', 'biking', 'depreciation', 'Harrison',
+'Persian', 'stunning', 'agar', 'rope', 'wagoner', 'elections',
+'reticulately', 'Cruz', 'pulpits', 'wilt', 'peels', 'plants',
+'administerings', 'deepen', 'rubs', 'hence', 'dissension', 'implored',
+'bereavement', 'abyss', 'Pennsylvania', 'benevolent', 'corresponding',
+'Poseidon', 'inactive', 'butchers', 'Mach', 'woke', 'loading',
+'utilizing', 'Hoosier', 'undo', 'Semitization', 'trigger', 'Mouthe',
+'mark', 'disgracefully', 'copier', 'futility', 'gondola', 'algebraic',
+'lecturers', 'sponged', 'instigators', 'looted', 'ether', 'trust',
+'feeblest', 'sequencer', 'disjointness', 'congresses', 'Vicksburg',
+'incompatibilities', 'commend', 'Luxembourg', 'reticulation',
+'instructively', 'reconstructs', 'bricks', 'attache', 'Englishman',
+'provocation', 'roughen', 'cynic', 'plugged', 'scrawls', 'antipode',
+'injected', 'Daedalus', 'Burnsides', 'asker', 'confronter',
+'merriment', 'disdain', 'thicket', 'stinker', 'great', 'tiers',
+'oust', 'antipodes', 'Macintosh', 'tented', 'packages',
+'Mediterraneanize', 'hurts', 'orthodontist', 'seeder', 'readying',
+'babying', 'Florida', 'Sri', 'buckets', 'complementary',
+'cartographer', 'chateaus', 'shaves', 'thinkable', 'Tehran',
+'Gordian', 'Angles', 'arguable', 'bureau', 'smallest', 'fans',
+'navigated', 'dipole', 'bootleg', 'distinctive', 'minimization',
+'absorbed', 'surmised', 'Malawi', 'absorbent', 'close', 'conciseness',
+'hopefully', 'declares', 'descent', 'trick', 'portend', 'unable',
+'mildly', 'Morse', 'reference', 'scours', 'Caribbean', 'battlers',
+'astringency', 'likelier', 'Byronizes', 'econometric', 'grad',
+'steak', 'Austrian', 'ban', 'voting', 'Darlington', 'bison', 'Cetus',
+'proclaim', 'Gilbertson', 'evictions', 'submittal', 'bearings',
+'Gothicizer', 'settings', 'McMahon', 'densities', 'determinants',
+'period', 'DeKastere', 'swindle', 'promptness', 'enablers', 'wordy',
+'during', 'tables', 'responder', 'baffle', 'phosgene', 'muttering',
+'limiters', 'custodian', 'prevented', 'Stouffer', 'waltz', 'Videotex',
+'brainstorms', 'alcoholism', 'jab', 'shouldering', 'screening',
+'explicitly', 'earner', 'commandment', 'French', 'scrutinizing',
+'Gemma', 'capacitive', 'sheriff', 'herbivore', 'Betsey', 'Formosa',
+'scorcher', 'font', 'damming', 'soldiers', 'flack', 'Marks',
+'unlinking', 'serenely', 'rotating', 'converge', 'celebrities',
+'unassailable', 'bawling', 'wording', 'silencing', 'scotch',
+'coincided', 'masochists', 'graphs', 'pernicious', 'disease',
+'depreciates', 'later', 'torus', 'interject', 'mutated', 'causer',
+'messy', 'Bechtel', 'redundantly', 'profoundest', 'autopsy',
+'philosophic', 'iterate', 'Poisson', 'horridly', 'silversmith',
+'millennium', 'plunder', 'salmon', 'missioner', 'advances', 'provers',
+'earthliness', 'manor', 'resurrectors', 'Dahl', 'canto', 'gangrene',
+'gabler', 'ashore', 'frictionless', 'expansionism', 'emphasis',
+'preservations', 'Duane', 'descend', 'isolated', 'firmware',
+'dynamites', 'scrawled', 'cavemen', 'ponder', 'prosperity', 'squaw',
+'vulnerable', 'opthalmic', 'Simms', 'unite', 'totallers', 'Waring',
+'enforced', 'bridge', 'collecting', 'sublime', 'Moore', 'gobble',
+'criticizes', 'daydreams', 'sedate', 'apples', 'Concordia',
+'subsequence', 'distill', 'Allan', 'seizure', 'Isadore', 'Lancashire',
+'spacings', 'corresponded', 'hobble', 'Boonton', 'genuineness',
+'artifact', 'gratuities', 'interviewee', 'Vladimir', 'mailable',
+'Bini', 'Kowalewski', 'interprets', 'bereave', 'evacuated', 'friend',
+'tourists', 'crunched', 'soothsayer', 'fleetly', 'Romanizations',
+'Medicaid', 'persevering', 'flimsy', 'doomsday', 'trillion',
+'carcasses', 'guess', 'seersucker', 'ripping', 'affliction',
+'wildest', 'spokes', 'sheaths', 'procreate', 'rusticates', 'Schapiro',
+'thereafter', 'mistakenly', 'shelf', 'ruination', 'bushel',
+'assuredly', 'corrupting', 'federation', 'portmanteau', 'wading',
+'incendiary', 'thing', 'wanderers', 'messages', 'Paso', 'reexamined',
+'freeings', 'denture', 'potting', 'disturber', 'laborer', 'comrade',
+'intercommunicating', 'Pelham', 'reproach', 'Fenton', 'Alva', 'oasis',
+'attending', 'cockpit', 'scout', 'Jude', 'gagging', 'jailed',
+'crustaceans', 'dirt', 'exquisitely', 'Internet', 'blocker', 'smock',
+'Troutman', 'neighboring', 'surprise', 'midscale', 'impart',
+'badgering', 'fountain', 'Essen', 'societies', 'redresses',
+'afterwards', 'puckering', 'silks', 'Blakey', 'sequel', 'greet',
+'basements', 'Aubrey', 'helmsman', 'album', 'wheelers', 'easternmost',
+'flock', 'ambassadors', 'astatine', 'supplant', 'gird', 'clockwork',
+'foxes', 'rerouting', 'divisional', 'bends', 'spacer',
+'physiologically', 'exquisite', 'concerts', 'unbridled', 'crossing',
+'rock', 'leatherneck', 'Fortescue', 'reloading', 'Laramie', 'Tim',
+'forlorn', 'revert', 'scarcer', 'spigot', 'equality', 'paranormal',
+'aggrieves', 'pegs', 'committeewomen', 'documented', 'interrupt',
+'emerald', 'Battelle', 'reconverted', 'anticipated', 'prejudices',
+'drowsiness', 'trivialities', 'food', 'blackberries', 'Cyclades',
+'tourist', 'branching', 'nugget', 'Asilomar', 'repairmen', 'Cowan',
+'receptacles', 'nobler', 'Nebraskan', 'territorial', 'chickadee',
+'bedbug', 'darted', 'vigilance', 'Octavia', 'summands', 'policemen',
+'twirls', 'style', 'outlawing', 'specifiable', 'pang', 'Orpheus',
+'epigram', 'Babel', 'butyrate', 'wishing', 'fiendish', 'accentuate',
+'much', 'pulsed', 'adorned', 'arbiters', 'counted', 'Afrikaner',
+'parameterizes', 'agenda', 'Americanism', 'referenda', 'derived',
+'liquidity', 'trembling', 'lordly', 'Agway', 'Dillon', 'propellers',
+'statement', 'stickiest', 'thankfully', 'autograph', 'parallel',
+'impulse', 'Hamey', 'stylistic', 'disproved', 'inquirer', 'hoisting',
+'residues', 'variant', 'colonials', 'dequeued', 'especial', 'Samoa',
+'Polaris', 'dismisses', 'surpasses', 'prognosis', 'urinates',
+'leaguers', 'ostriches', 'calculative', 'digested', 'divided',
+'reconfigurer', 'Lakewood', 'illegalities', 'redundancy',
+'approachability', 'masterly', 'cookery', 'crystallized', 'Dunham',
+'exclaims', 'mainline', 'Australianizes', 'nationhood', 'pusher',
+'ushers', 'paranoia', 'workstations', 'radiance', 'impedes',
+'Minotaur', 'cataloging', 'bites', 'fashioning', 'Alsop', 'servants',
+'Onondaga', 'paragraph', 'leadings', 'clients', 'Latrobe',
+'Cornwallis', 'excitingly', 'calorimetric', 'savior', 'tandem',
+'antibiotics', 'excuse', 'brushy', 'selfish', 'naive', 'becomes',
+'towers', 'popularizes', 'engender', 'introducing', 'possession',
+'slaughtered', 'marginally', 'Packards', 'parabola', 'utopia',
+'automata', 'deterrent', 'chocolates', 'objectives', 'clannish',
+'aspirin', 'ferociousness', 'primarily', 'armpit', 'handfuls',
+'dangle', 'Manila', 'enlivened', 'decrease', 'phylum', 'hardy',
+'objectively', 'baskets', 'chaired', 'Sepoy', 'deputy', 'blizzard',
+'shootings', 'breathtaking', 'sticking', 'initials', 'epitomized',
+'Forrest', 'cellular', 'amatory', 'radioed', 'horrified', 'Neva',
+'simultaneous', 'delimiter', 'expulsion', 'Himmler', 'contradiction',
+'Remus', 'Franklinizations', 'luggage', 'moisture', 'Jews',
+'comptroller', 'brevity', 'contradictions', 'Ohio', 'active',
+'babysit', 'China', 'youngest', 'superstition', 'clawing', 'raccoons',
+'chose', 'shoreline', 'helmets', 'Jeffersonian', 'papered',
+'kindergarten', 'reply', 'succinct', 'split', 'wriggle', 'suitcases',
+'nonce', 'grinders', 'anthem', 'showcase', 'maimed', 'blue', 'obeys',
+'unreported', 'perusing', 'recalculate', 'rancher', 'demonic',
+'Lilliputianize', 'approximation', 'repents', 'yellowness',
+'irritates', 'Ferber', 'flashlights', 'booty', 'Neanderthal',
+'someday', 'foregoes', 'lingering', 'cloudiness', 'guy', 'consumer',
+'Berkowitz', 'relics', 'interpolating', 'reappearing', 'advisements',
+'Nolan', 'turrets', 'skeletal', 'skills', 'mammas', 'Winsett',
+'wheelings', 'stiffen', 'monkeys', 'plainness', 'braziers', 'Leary',
+'advisee', 'jack', 'verb', 'reinterpret', 'geometrical', 'trolleys',
+'arboreal', 'overpowered', 'Cuzco', 'poetical', 'admirations',
+'Hobbes', 'phonemes', 'Newsweek', 'agitator', 'finally', 'prophets',
+'environment', 'easterners', 'precomputed', 'faults', 'rankly',
+'swallowing', 'crawl', 'trolley', 'spreading', 'resourceful', 'go',
+'demandingly', 'broader', 'spiders', 'Marsha', 'debris', 'operates',
+'Dundee', 'alleles', 'crunchier', 'quizzical', 'hanging', 'Fisk']
+
+wordsd = {}
+for word in words:
+ wordsd[word] = 1
+
+
+def collect_options(args, jobs, options):
+
+ while args:
+ arg = args.pop(0)
+ if arg.startswith('-'):
+ name = arg[1:]
+ if name == 'options':
+ fname = args.pop(0)
+ d = {}
+ execfile(fname, d)
+ collect_options(list(d['options']), jobs, options)
+ elif options.has_key(name):
+ v = args.pop(0)
+ if options[name] != None:
+ raise ValueError(
+ "Duplicate values for %s, %s and %s"
+ % (name, v, options[name])
+ )
+ options[name] = v
+ elif name == 'setup':
+ options['setup'] = 1
+ elif globals().has_key(name.capitalize()+'Job'):
+ job = name
+ kw = {}
+ while args and args[0].find("=") > 0:
+ arg = args.pop(0).split('=')
+ name, v = arg[0], '='.join(arg[1:])
+ if kw.has_key(name):
+ raise ValueError(
+ "Duplicate parameter %s for job %s"
+ % (name, job)
+ )
+ kw[name]=v
+ if kw.has_key('frequency'):
+ frequency = kw['frequency']
+ del kw['frequency']
+ else:
+ frequency = 1
+
+ if kw.has_key('sleep'):
+ sleep = float(kw['sleep'])
+ del kw['sleep']
+ else:
+ sleep = 0.0001
+
+ if kw.has_key('repeat'):
+ repeatp = float(kw['repeat'])
+ del kw['repeat']
+ else:
+ repeatp = 0
+
+ jobs.append((job, kw, frequency, sleep, repeatp))
+ else:
+ raise ValueError("not an option or job", name)
+ else:
+ raise ValueError("Expected an option", arg)
+
+
+def find_lib_python():
+ for b in os.getcwd(), os.path.split(sys.argv[0])[0]:
+ for i in range(6):
+ d = ['..']*i + ['lib', 'python']
+ p = os.path.join(b, *d)
+ if os.path.isdir(p):
+ return p
+ raise ValueError("Couldn't find lib/python")
+
+def main(args=None):
+ lib_python = find_lib_python()
+ sys.path.insert(0, lib_python)
+
+ if args is None:
+ args = sys.argv[1:]
+ if not args:
+ print __doc__
+ sys.exit(0)
+
+ print args
+ random.seed(hash(tuple(args))) # always use the same for the given args
+
+ options = {"mbox": None, "threads": None}
+ jobdefs = []
+ collect_options(args, jobdefs, options)
+
+ mboxes = {}
+ if options["mbox"]:
+ mboxes[options["mbox"]] = MBox(options["mbox"])
+
+ if options.has_key('setup'):
+ setup(lib_python)
+ else:
+ import Zope
+ Zope.startup()
+
+ #from ThreadedAsync.LoopCallback import loop
+ #threading.Thread(target=loop, args=(), name='asyncore').start()
+
+ jobs = JobProducer()
+ for job, kw, frequency, sleep, repeatp in jobdefs:
+ Job = globals()[job.capitalize()+'Job']
+ if getattr(Job, 'needs_mbox', 0):
+ if not kw.has_key("mbox"):
+ if not options["mbox"]:
+ raise ValueError(
+ "no mailbox (mbox option) file specified")
+ kw['mbox'] = mboxes[options["mbox"]]
+ else:
+ if not mboxes.has_key[kw["mbox"]]:
+ mboxes[kw['mbox']] = MBox[kw['mbox']]
+ kw["mbox"] = mboxes[kw['mbox']]
+ jobs.add(Job(**kw), frequency, sleep, repeatp)
+
+ if not jobs:
+ print "No jobs to execute"
+ return
+
+ threads = int(options['threads'] or '0')
+ if threads > 1:
+ threads = [threading.Thread(target=run, args=(jobs, i), name=str(i))
+ for i in range(threads)]
+ for thread in threads:
+ thread.start()
+ for thread in threads:
+ thread.join()
+ else:
+ run(jobs)
+
+
+if __name__ == '__main__':
+ main()
=== ZODB3/Tools/zeoserverlog.py 1.1 => 1.2 ===
--- /dev/null Mon Sep 15 12:29:51 2003
+++ ZODB3/Tools/zeoserverlog.py Mon Sep 15 12:29:19 2003
@@ -0,0 +1,532 @@
+#!python
+##############################################################################
+#
+# Copyright (c) 2003 Zope Corporation and Contributors.
+# All Rights Reserved.
+#
+# This software is subject to the provisions of the Zope Public License,
+# Version 2.0 (ZPL). A copy of the ZPL should accompany this distribution.
+# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+# FOR A PARTICULAR PURPOSE.
+#
+##############################################################################
+"""Tools for analyzing ZEO Server logs.
+
+This script contains a number of commands, implemented by command
+functions. To run a command, give the command name and it's arguments
+as arguments to this script.
+
+Commands:
+
+ blocked_times file threshold
+
+ Output a summary of episodes where thransactions were blocked
+ when the episode lasted at least threshold seconds.
+
+ The file may be a file name or - to read from standard input.
+ The file may also be a command:
+
+ script blocked_times 'bunzip2 <foo.log.bz2' 60
+
+ If the file is a command, it must contain at least a single
+ space.
+
+ The columns of output are:
+
+ - The time the episode started
+
+ - The seconds from the start of the episode until the blocking
+ transaction finished.
+
+ - The client id (host and port) of the blocking transaction.
+
+ - The seconds from the start of the episode until the end of the
+ episode.
+
+ time_calls file threshold
+
+ Time how long calls took. Note that this is normally combined
+ with grep to time just a particulat kind of call:
+
+ script time_calls 'bunzip2 <foo.log.bz2 | grep tpc_finish' 10
+
+ time_trans threshold
+
+ The columns of output are:
+
+ - The time of the call invocation
+
+ - The seconds from the call to the return
+
+ - The client that made the call.
+
+ time_trans file threshold
+
+ Output a summary of transactions that held the global transaction
+ lock for at least threshold seconds. (This is the time from when
+ voting starts until the transaction is completed by the server.)
+
+ The columns of output are:
+
+ - time that the vote started.
+
+ - client id
+
+ - number of objects written / number of objects updated
+
+ - seconds from tpc_begin to vote start
+
+ - seconds spent voting
+
+ - vote status: n=normal, d=delayed, e=error
+
+ - seconds wating between vote return and finish call
+
+ - time spent finishing or 'abort' if the transaction aborted
+
+ minute file
+
+ Compute production statistics by minute
+
+ The columns of output are:
+
+ - date/time
+
+ - Number of active clients
+
+ - number of reads
+
+ - number of stores
+
+ - number of commits (finish)
+
+ - number of aborts
+
+ - number of transactions (commits + aborts)
+
+ Summary statistics are printed at the end
+
+ minutes file
+
+ Show just the summary statistics for production by minute.
+
+ hour file
+
+ Compute production statistics by hour
+
+ hours file
+
+ Show just the summary statistics for production by hour.
+
+ day file
+
+ Compute production statistics by day
+
+ days file
+
+ Show just the summary statistics for production by day.
+
+ verify file
+
+ Compute verification statistics
+
+ The columns of output are:
+
+ - client id
+ - verification start time
+ - number of object's verified
+ - wall time to verify
+ - average miliseconds to verify per object.
+
+$Id$
+"""
+
+import datetime, sys, re, os
+
+
+def time(line):
+ d = line[:10]
+ t = line[11:19]
+ y, mo, d = map(int, d.split('-'))
+ h, mi, s = map(int, t.split(':'))
+ return datetime.datetime(y, mo, d, h, mi, s)
+
+
+def sub(t1, t2):
+ delta = t2 - t1
+ return delta.days*86400.0+delta.seconds+delta.microseconds/1000000.0
+
+
+
+waitre = re.compile(r'Clients waiting: (\d+)')
+idre = re.compile(r' ZSS:\d+/(\d+.\d+.\d+.\d+:\d+) ')
+def blocked_times(args):
+ f, thresh = args
+
+ t1 = t2 = cid = blocking = waiting = 0
+ last_blocking = False
+
+ thresh = int(thresh)
+
+ for line in xopen(f):
+ line = line.strip()
+
+ if line.endswith('Blocked transaction restarted.'):
+ blocking = False
+ waiting = 0
+ else:
+ s = waitre.search(line)
+ if not s:
+ continue
+ waiting = int(s.group(1))
+ blocking = line.find(
+ 'Transaction blocked waiting for storage') >= 0
+
+ if blocking and waiting == 1:
+ t1 = time(line)
+ t2 = t1
+
+ if not blocking and last_blocking:
+ last_wait = 0
+ t2 = time(line)
+ cid = idre.search(line).group(1)
+
+ if waiting == 0:
+ d = sub(t1, time(line))
+ if d >= thresh:
+ print t1, sub(t1, t2), cid, d
+ t1 = t2 = cid = blocking = waiting = last_wait = max_wait = 0
+
+ last_blocking = blocking
+
+connidre = re.compile(r' zrpc-conn:(\d+.\d+.\d+.\d+:\d+) ')
+def time_calls(f):
+ f, thresh = f
+ if f == '-':
+ f = sys.stdin
+ else:
+ f = xopen(f)
+
+ thresh = float(thresh)
+ t1 = None
+ maxd = 0
+
+ for line in f:
+ line = line.strip()
+
+ if ' calling ' in line:
+ t1 = time(line)
+ elif ' returns ' in line and t1 is not None:
+ d = sub(t1, time(line))
+ if d >= thresh:
+ print t1, d, connidre.search(line).group(1)
+ maxd = max(maxd, d)
+ t1 = None
+
+ print maxd
+
+def xopen(f):
+ if f == '-':
+ return sys.stdin
+ if ' ' in f:
+ return os.popen(f, 'r')
+ return open(f)
+
+def time_tpc(f):
+ f, thresh = f
+ if f == '-':
+ f = sys.stdin
+ else:
+ f = xopen(f)
+
+ thresh = float(thresh)
+ transactions = {}
+
+ for line in f:
+ line = line.strip()
+
+ if ' calling vote(' in line:
+ cid = connidre.search(line).group(1)
+ transactions[cid] = time(line),
+ elif ' vote returns None' in line:
+ cid = connidre.search(line).group(1)
+ transactions[cid] += time(line), 'n'
+ elif ' vote() raised' in line:
+ cid = connidre.search(line).group(1)
+ transactions[cid] += time(line), 'e'
+ elif ' vote returns ' in line:
+ # delayed, skip
+ cid = connidre.search(line).group(1)
+ transactions[cid] += time(line), 'd'
+ elif ' calling tpc_abort(' in line:
+ cid = connidre.search(line).group(1)
+ if cid in transactions:
+ t1, t2, vs = transactions[cid]
+ t = time(line)
+ d = sub(t1, t)
+ if d >= thresh:
+ print 'a', t1, cid, sub(t1, t2), vs, sub(t2, t)
+ del transactions[cid]
+ elif ' calling tpc_finish(' in line:
+ if cid in transactions:
+ cid = connidre.search(line).group(1)
+ transactions[cid] += time(line),
+ elif ' tpc_finish returns ' in line:
+ if cid in transactions:
+ t1, t2, vs, t3 = transactions[cid]
+ t = time(line)
+ d = sub(t1, t)
+ if d >= thresh:
+ print 'c', t1, cid, sub(t1, t2), vs, sub(t2, t3), sub(t3, t)
+ del transactions[cid]
+
+
+newobre = re.compile(r"storea\(.*, '\\x00\\x00\\x00\\x00\\x00")
+def time_trans(f):
+ f, thresh = f
+ if f == '-':
+ f = sys.stdin
+ else:
+ f = xopen(f)
+
+ thresh = float(thresh)
+ transactions = {}
+
+ for line in f:
+ line = line.strip()
+
+ if ' calling tpc_begin(' in line:
+ cid = connidre.search(line).group(1)
+ transactions[cid] = time(line), [0, 0]
+ if ' calling storea(' in line:
+ cid = connidre.search(line).group(1)
+ if cid in transactions:
+ transactions[cid][1][0] += 1
+ if not newobre.search(line):
+ transactions[cid][1][1] += 1
+
+ elif ' calling vote(' in line:
+ cid = connidre.search(line).group(1)
+ if cid in transactions:
+ transactions[cid] += time(line),
+ elif ' vote returns None' in line:
+ cid = connidre.search(line).group(1)
+ if cid in transactions:
+ transactions[cid] += time(line), 'n'
+ elif ' vote() raised' in line:
+ cid = connidre.search(line).group(1)
+ if cid in transactions:
+ transactions[cid] += time(line), 'e'
+ elif ' vote returns ' in line:
+ # delayed, skip
+ cid = connidre.search(line).group(1)
+ if cid in transactions:
+ transactions[cid] += time(line), 'd'
+ elif ' calling tpc_abort(' in line:
+ cid = connidre.search(line).group(1)
+ if cid in transactions:
+ try:
+ t0, (stores, old), t1, t2, vs = transactions[cid]
+ except ValueError:
+ pass
+ else:
+ t = time(line)
+ d = sub(t1, t)
+ if d >= thresh:
+ print t1, cid, "%s/%s" % (stores, old), \
+ sub(t0, t1), sub(t1, t2), vs, \
+ sub(t2, t), 'abort'
+ del transactions[cid]
+ elif ' calling tpc_finish(' in line:
+ if cid in transactions:
+ cid = connidre.search(line).group(1)
+ transactions[cid] += time(line),
+ elif ' tpc_finish returns ' in line:
+ if cid in transactions:
+ t0, (stores, old), t1, t2, vs, t3 = transactions[cid]
+ t = time(line)
+ d = sub(t1, t)
+ if d >= thresh:
+ print t1, cid, "%s/%s" % (stores, old), \
+ sub(t0, t1), sub(t1, t2), vs, \
+ sub(t2, t3), sub(t3, t)
+ del transactions[cid]
+
+def minute(f, slice=16, detail=1, summary=1):
+ f, = f
+
+ if f == '-':
+ f = sys.stdin
+ else:
+ f = xopen(f)
+
+ mlast = r = s = c = a = cl = None
+ rs = []
+ ss = []
+ cs = []
+ as = []
+ ts = []
+ cls = []
+
+ for line in f:
+ line = line.strip()
+ if (line.find('returns') > 0
+ or line.find('storea') > 0
+ or line.find('tpc_abort') > 0
+ ):
+ client = connidre.search(line).group(1)
+ m = line[:slice]
+ if m != mlast:
+ if mlast:
+ if detail:
+ print mlast, len(cl), r, s, c, a, a+c
+ cls.append(len(cl))
+ rs.append(r)
+ ss.append(s)
+ cs.append(c)
+ as.append(a)
+ ts.append(c+a)
+ mlast = m
+ r = s = c = a = 0
+ cl = {}
+ if line.find('zeoLoad') > 0:
+ r += 1
+ cl[client] = 1
+ elif line.find('storea') > 0:
+ s += 1
+ cl[client] = 1
+ elif line.find('tpc_finish') > 0:
+ c += 1
+ cl[client] = 1
+ elif line.find('tpc_abort') > 0:
+ a += 1
+ cl[client] = 1
+
+ if mlast:
+ if detail:
+ print mlast, len(cl), r, s, c, a, a+c
+ cls.append(len(cl))
+ rs.append(r)
+ ss.append(s)
+ cs.append(c)
+ as.append(a)
+ ts.append(c+a)
+
+ if summary:
+ print
+ print 'Summary: \t', '\t'.join(('min', '10%', '25%', 'med',
+ '75%', '90%', 'max', 'mean'))
+ print "n=%6d\t" % len(cls), '-'*62
+ print 'Clients: \t', '\t'.join(map(str,stats(cls)))
+ print 'Reads: \t', '\t'.join(map(str,stats( rs)))
+ print 'Stores: \t', '\t'.join(map(str,stats( ss)))
+ print 'Commits: \t', '\t'.join(map(str,stats( cs)))
+ print 'Aborts: \t', '\t'.join(map(str,stats( as)))
+ print 'Trans: \t', '\t'.join(map(str,stats( ts)))
+
+def stats(s):
+ s.sort()
+ min = s[0]
+ max = s[-1]
+ n = len(s)
+ out = [min]
+ ni = n + 1
+ for p in .1, .25, .5, .75, .90:
+ lp = ni*p
+ l = int(lp)
+ if lp < 1 or lp > n:
+ out.append('-')
+ elif abs(lp-l) < .00001:
+ out.append(s[l-1])
+ else:
+ out.append(int(s[l-1] + (lp - l) * (s[l] - s[l-1])))
+
+ mean = 0.0
+ for v in s:
+ mean += v
+
+ out.extend([max, int(mean/n)])
+
+ return out
+
+def minutes(f):
+ minute(f, 16, detail=0)
+
+def hour(f):
+ minute(f, 13)
+
+def day(f):
+ minute(f, 10)
+
+def hours(f):
+ minute(f, 13, detail=0)
+
+def days(f):
+ minute(f, 10, detail=0)
+
+
+new_connection_idre = re.compile(r"new connection \('(\d+.\d+.\d+.\d+)', (\d+)\):")
+def verify(f):
+ f, = f
+
+ if f == '-':
+ f = sys.stdin
+ else:
+ f = xopen(f)
+
+ t1 = None
+ nv = {}
+ for line in f:
+ if line.find('new connection') > 0:
+ m = new_connection_idre.search(line)
+ cid = "%s:%s" % (m.group(1), m.group(2))
+ nv[cid] = [time(line), 0]
+ elif line.find('calling zeoVerify(') > 0:
+ cid = connidre.search(line).group(1)
+ nv[cid][1] += 1
+ elif line.find('calling endZeoVerify()') > 0:
+ cid = connidre.search(line).group(1)
+ t1, n = nv[cid]
+ if n:
+ d = sub(t1, time(line))
+ print cid, t1, n, d, n and (d*1000.0/n) or '-'
+
+def recovery(f):
+ f, = f
+
+ if f == '-':
+ f = sys.stdin
+ else:
+ f = xopen(f)
+
+ last = ''
+ trans = []
+ n = 0
+ for line in f:
+ n += 1
+ if line.find('RecoveryServer') < 0:
+ continue
+ l = line.find('sending transaction ')
+ if l > 0 and last.find('sending transaction ') > 0:
+ trans.append(line[l+20:].strip())
+ else:
+ if trans:
+ if len(trans) > 1:
+ print " ... %s similar records skipped ..." % (
+ len(trans) - 1)
+ print n, last.strip()
+ trans=[]
+ print n, line.strip()
+ last = line
+
+ if len(trans) > 1:
+ print " ... %s similar records skipped ..." % (
+ len(trans) - 1)
+ print n, last.strip()
+
+
+
+if __name__ == '__main__':
+ globals()[sys.argv[1]](sys.argv[2:])
=== ZODB3/Tools/timeout.py 1.1 => 1.2 ===
--- /dev/null Mon Sep 15 12:29:51 2003
+++ ZODB3/Tools/timeout.py Mon Sep 15 12:29:19 2003
@@ -0,0 +1,68 @@
+#!python
+
+"""Transaction timeout test script.
+
+This script connects to a storage, begins a transaction, calls store()
+and tpc_vote(), and then sleeps forever. This should trigger the
+transaction timeout feature of the server.
+
+usage: timeout.py address delay [storage-name]
+
+"""
+
+import sys
+import time
+
+from ZODB.Transaction import Transaction
+from ZODB.tests.MinPO import MinPO
+from ZODB.tests.StorageTestBase import zodb_pickle
+from ZEO.ClientStorage import ClientStorage
+
+ZERO = '\0'*8
+
+def main():
+ if len(sys.argv) not in (3, 4):
+ sys.stderr.write("Usage: timeout.py address delay [storage-name]\n" %
+ sys.argv[0])
+ sys.exit(2)
+
+ hostport = sys.argv[1]
+ delay = float(sys.argv[2])
+ if sys.argv[3:]:
+ name = sys.argv[3]
+ else:
+ name = "1"
+
+ if "/" in hostport:
+ address = hostport
+ else:
+ if ":" in hostport:
+ i = hostport.index(":")
+ host, port = hostport[:i], hostport[i+1:]
+ else:
+ host, port = "", hostport
+ port = int(port)
+ address = (host, port)
+
+ print "Connecting to %s..." % repr(address)
+ storage = ClientStorage(address, name)
+ print "Connected. Now starting a transaction..."
+
+ oid = storage.new_oid()
+ version = ""
+ revid = ZERO
+ data = MinPO("timeout.py")
+ pickled_data = zodb_pickle(data)
+ t = Transaction()
+ t.user = "timeout.py"
+ storage.tpc_begin(t)
+ storage.store(oid, revid, pickled_data, version, t)
+ print "Stored. Now voting..."
+ storage.tpc_vote(t)
+
+ print "Voted; now sleeping %s..." % delay
+ time.sleep(delay)
+ print "Done."
+
+if __name__ == "__main__":
+ main()
=== ZODB3/Tools/README.txt 1.1 => 1.2 ===
--- /dev/null Mon Sep 15 12:29:51 2003
+++ ZODB3/Tools/README.txt Mon Sep 15 12:29:20 2003
@@ -0,0 +1,118 @@
+This directory contains a collect of utilities for managing ZODB
+databases. Some are more useful than others. If you install ZODB
+using distutils ("python setup.py install"), fsdump.py, fstest.py,
+repozo.py, and zeopack.py will be installed in /usr/local/bin.
+
+Unless otherwise noted, these scripts are invoked with the name of the
+Data.fs file as their only argument. Example: checkbtrees.py data.fs.
+
+
+analyze.py -- A transaction analyzer for FileStorage
+
+Reports on the data in a FileStorage. The report is organized by
+class. It shows total data, as well as separate reports for current
+and historical revisions of objects.
+
+
+checkbtrees.py -- Checks BTrees in a FileStorage for corruption.
+
+Attempts to find all the BTrees contained in a Data.fs and calls their
+_check() methods.
+
+
+fsdump.py -- Summarize FileStorage contents, one line per revision.
+
+Prints a report of FileStorage contents, with one line for each
+transaction and one line for each data record in that transaction.
+Includes time stamps, file positions, and class names.
+
+
+fstest.py -- Simple consistency checker for FileStorage
+
+usage: fstest.py [-v] data.fs
+
+The fstest tool will scan all the data in a FileStorage and report an
+error if it finds any corrupt transaction data. The tool will print a
+message when the first error is detected an exit.
+
+The tool accepts one or more -v arguments. If a single -v is used, it
+will print a line of text for each transaction record it encounters.
+If two -v arguments are used, it will also print a line of text for
+each object. The objects for a transaction will be printed before the
+transaction itself.
+
+Note: It does not check the consistency of the object pickles. It is
+possible for the damage to occur only in the part of the file that
+stores object pickles. Those errors will go undetected.
+
+
+netspace.py -- Hackish attempt to report on size of objects
+
+usage: netspace.py [-P | -v] data.fs
+
+-P: do a pack first
+-v: print info for all objects, even if a traversal path isn't found
+
+Traverses objects from the database root and attempts to calculate
+size of object, including all reachable subobjects.
+
+
+parsezeolog.py -- Parse BLATHER logs from ZEO server.
+
+This script may be obsolete. It has not been tested against the
+current log output of the ZEO server.
+
+Reports on the time and size of transactions committed by a ZEO
+server, by inspecting log messages at BLATHER level.
+
+
+repozo.py -- Incremental backup utility for FileStorage.
+
+Run the script with the -h option to see usage details.
+
+
+timeout.py -- Script to test transaction timeout
+
+usage: timeout.py address delay [storage-name]
+
+This script connects to a storage, begins a transaction, calls store()
+and tpc_vote(), and then sleeps forever. This should trigger the
+transaction timeout feature of the server.
+
+
+zeopack.py -- Script to pack a ZEO server.
+
+The script connects to a server and calls pack() on a specific
+storage. See the script for usage details.
+
+
+zeoreplay.py -- Experimental script to replay transactions from a ZEO log.
+
+Like parsezeolog.py, this may be obsolete because it was written
+against an earlier version of the ZEO server. See the script for
+usage details.
+
+
+zeoup.py
+
+Usage: zeoup.py [options]
+
+The test will connect to a ZEO server, load the root object, and
+attempt to update the zeoup counter in the root. It will report
+success if it updates to counter or if it gets a ConflictError. A
+ConflictError is considered a success, because the client was able to
+start a transaction.
+
+See the script for details about the options.
+
+
+zodbload.py - exercise ZODB under a heavy synthesized Zope-like load
+
+See the module docstring for details. Note that this script requires
+Zope. New in ZODB3 3.1.4.
+
+
+zeoserverlog.py - analyze ZEO server log for performance statistics
+
+See the module docstring for details; there are a large number of
+options. New in ZODB3 3.1.4.
\ No newline at end of file
=== ZODB3/Tools/zeoup.py 1.13 => 1.14 ===
--- ZODB3/Tools/zeoup.py:1.13 Tue Dec 10 13:44:41 2002
+++ ZODB3/Tools/zeoup.py Mon Sep 15 12:29:19 2003
@@ -1,7 +1,7 @@
-#! /usr/bin/env python
+#!python
"""Make sure a ZEO server is running.
-Usage: zeoup.py [options]
+usage: zeoup.py [options]
The test will connect to a ZEO server, load the root object, and attempt to
update the zeoup counter in the root. It will report success if it updates
@@ -11,11 +11,11 @@
Options:
-p port -- port to connect to
-
+
-h host -- host to connect to (default is current host)
-S storage -- storage name (default '1')
-
+
-U path -- Unix-domain socket to connect to
--nowrite -- Do not update the zeoup counter.
=== ZODB3/Tools/zeoreplay.py 1.3 => 1.4 ===
--- ZODB3/Tools/zeoreplay.py:1.3 Wed Dec 18 17:15:03 2002
+++ ZODB3/Tools/zeoreplay.py Mon Sep 15 12:29:19 2003
@@ -1,3 +1,4 @@
+#!python
"""Parse the BLATHER logging generated by ZEO, and optionally replay it.
Usage: zeointervals.py [options]
@@ -186,7 +187,7 @@
meth = getattr(txn, 'tpc_begin', None)
if meth is not None:
meth(when, args, client)
-
+
def storea(self, when, args, client):
txn = self.__curtxn.get(client)
if txn is None:
@@ -221,7 +222,7 @@
print '%s %s %4d %10d %s %s' % (
txn._begintime, txn._finishtime - txn._begintime,
len(txn._objects),
- bytes,
+ bytes,
time.ctime(txn._begintime),
txn._url)
@@ -281,7 +282,7 @@
if replay:
storage = FileStorage(storagefile)
- #storage = BDBFullStorage(storagefile)
+ #storage = BDBFullStorage(storagefile)
#storage = PrimaryStorage('yyz', storage, RS_PORT)
t0 = now()
p = ZEOParser(maxtxns, report, storage)
=== ZODB3/Tools/zeoqueue.py 1.4 => 1.5 ===
--- ZODB3/Tools/zeoqueue.py:1.4 Wed Feb 5 15:45:01 2003
+++ ZODB3/Tools/zeoqueue.py Mon Sep 15 12:29:19 2003
@@ -1,4 +1,4 @@
-#! /usr/bin/env python
+#!python
"""Report on the number of currently waiting clients in the ZEO queue.
Usage: %(PROGRAM)s [options] logfile
=== ZODB3/Tools/zeopack.py 1.8 => 1.9 ===
--- ZODB3/Tools/zeopack.py:1.8 Tue Jan 28 16:20:40 2003
+++ ZODB3/Tools/zeopack.py Mon Sep 15 12:29:19 2003
@@ -1,4 +1,4 @@
-#! /usr/bin/env python
+#!python
"""Connect to a ZEO server and ask it to pack.
Usage: zeopack.py [options]
@@ -6,11 +6,11 @@
Options:
-p port -- port to connect to
-
+
-h host -- host to connect to (default is current host)
-
+
-U path -- Unix-domain socket to connect to
-
+
-S name -- storage name (default is '1')
-d days -- pack objects more than days old
=== ZODB3/Tools/repozo.py 1.5 => 1.6 ===
--- ZODB3/Tools/repozo.py:1.5 Mon Apr 7 17:51:36 2003
+++ ZODB3/Tools/repozo.py Mon Sep 15 12:29:19 2003
@@ -1,64 +1,65 @@
-#!/usr/bin/env python
+#!python
# repozo.py -- incremental and full backups of a Data.fs file.
#
# Originally written by Anthony Baxter
# Significantly modified by Barry Warsaw
-#
-# TODO:
-# allow gzipping of backup files.
-# allow backup files in subdirectories.
"""repozo.py -- incremental and full backups of a Data.fs file.
Usage: %(program)s [options]
Where:
+ Exactly one of -B or -R must be specified:
+
-B / --backup
- backup current ZODB file
+ Backup current ZODB file.
-R / --recover
- restore a ZODB file from a backup
+ Restore a ZODB file from a backup.
-v / --verbose
- Verbose mode
+ Verbose mode.
-h / --help
- Print this text and exit
+ Print this text and exit.
-r dir
--repository=dir
- Repository directory containing the backup files
+ Repository directory containing the backup files. This argument
+ is required.
-Flags for --backup:
+Options for -B/--backup:
-f file
--file=file
- Source Data.fs file
+ Source Data.fs file. This argument is required.
-F / --full
- Force a full backup
+ Force a full backup. By default, an incremental backup is made
+ if possible (e.g., if a pack has occurred since the last
+ incremental backup, a full backup is necessary).
-Q / --quick
Verify via md5 checksum only the last incremental written. This
significantly reduces the disk i/o at the (theoretical) cost of
- inconsistency.
+ inconsistency. This is a probabilistic way of determining whether
+ a full backup is necessary.
-z / --gzip
Compress with gzip the backup files. Uses the default zlib
- compression level.
+ compression level. By default, gzip compression is not used.
-Flags for --recover:
+Options for -R/--recover:
-D str
--date=str
- Recover state as at this date. str is in the format
- yyyy-mm-dd[-hh[-mm]]
-
- -o file
- --output=file
- Write recovered ZODB to given file. If not given, the file will be
+ Recover state as of this date. str is in the format
+ yyyy-mm-dd[-hh[-mm]]
+ By default, current time is used.
+
+ -o filename
+ --output=filename
+ Write recovered ZODB to given file. By default, the file is
written to stdout.
-
-One of --backup or --recover is required.
"""
from __future__ import nested_scopes
@@ -120,14 +121,14 @@
usage(1, msg)
class Options:
- mode = None
- file = None
- repository = None
- full = False
- date = None
- output = None
- quick = False
- gzip = False
+ mode = None # BACKUP or RECOVER
+ file = None # name of input Data.fs file
+ repository = None # name of directory holding backups
+ full = False # True forces full backup
+ date = None # -D argument, if any
+ output = None # where to write recovered data; None = stdout
+ quick = False # -Q flag state
+ gzip = False # -z flag state
options = Options()
@@ -158,6 +159,8 @@
options.output = arg
elif opt in ('-z', '--gzip'):
options.gzip = True
+ else:
+ assert False, (opt, arg)
# Any other arguments are invalid
if args:
@@ -184,20 +187,26 @@
-# Do something with a run of bytes from a file
+# Read bytes (no more than n, or to EOF if n is None) in chunks from the
+# current position in file fp. Pass each chunk as an argument to func().
+# Return the total number of bytes read == the total number of bytes
+# passed in all to func(). Leaves the file position just after the
+# last byte read.
def dofile(func, fp, n=None):
- bytesread = 0
- stop = False
- chunklen = READCHUNK
- while not stop:
- if n is not None and chunklen + bytesread > n:
- chunklen = n - bytesread
- stop = True
- data = fp.read(chunklen)
+ bytesread = 0L
+ while n is None or n > 0:
+ if n is None:
+ todo = READCHUNK
+ else:
+ todo = min(READCHUNK, n)
+ data = fp.read(todo)
if not data:
break
func(data)
- bytesread += len(data)
+ nread = len(data)
+ bytesread += nread
+ if n is not None:
+ n -= nread
return bytesread
@@ -223,9 +232,10 @@
def func(data):
sum.update(data)
ofp.write(data)
- dofile(func, ifp, n)
+ ndone = dofile(func, ifp, n)
ofp.close()
ifp.close()
+ assert ndone == n
return sum.hexdigest()
@@ -296,30 +306,34 @@
log('no files found')
return needed
+# Scan the .dat file corresponding to the last full backup performed.
+# Return
+#
+# filename, startpos, endpos, checksum
+#
+# of the last incremental. If there is no .dat file, or the .dat file
+# is empty, return
+#
+# None, None, None, None
def scandat(repofiles):
- # Scan the .dat file corresponding to the last full backup performed.
- # Return the filename, startpos, endpos, and sum of the last incremental.
- # If all is a list, then append file name and md5sums to the list.
fullfile = repofiles[0]
datfile = os.path.splitext(fullfile)[0] + '.dat'
- # If the .dat file is missing, we have to do a full backup
- fn = startpos = endpos = sum = None
+ fn = startpos = endpos = sum = None # assume .dat file missing or empty
try:
fp = open(datfile)
except IOError, e:
if e.errno <> errno.ENOENT:
raise
else:
- while True:
- line = fp.readline()
- if not line:
- break
- # We only care about the last one
- fn, startpos, endpos, sum = line.split()
+ # We only care about the last one.
+ lines = fp.readlines()
fp.close()
- startpos = long(startpos)
- endpos = long(endpos)
+ if lines:
+ fn, startpos, endpos, sum = lines[-1].split()
+ startpos = long(startpos)
+ endpos = long(endpos)
+
return fn, startpos, endpos, sum
@@ -364,7 +378,7 @@
print >> sys.stderr, 'Cannot overwrite existing file:', dest
sys.exit(2)
log('writing incremental: %s bytes to %s', pos-reposz, dest)
- sum = copyfile(options, dest, reposz, pos)
+ sum = copyfile(options, dest, reposz, pos - reposz)
# The first file in repofiles points to the last full backup. Use this to
# get the .dat file and append the information for this incrementatl to
# that file.
@@ -398,14 +412,18 @@
return
# Now check the md5 sum of the source file, from the last
# incremental's start and stop positions.
- srcfp = open(options.file)
+ srcfp = open(options.file, 'rb')
srcfp.seek(startpos)
srcsum = checksum(srcfp, endpos-startpos)
+ srcfp.close()
log('last incremental file: %s', fn)
log('last incremental checksum: %s', sum)
log('source checksum range: [%s..%s], sum: %s',
startpos, endpos, srcsum)
if sum == srcsum:
+ if srcsz == endpos:
+ log('No changes, nothing to do')
+ return
log('doing incremental, starting at: %s', endpos)
do_incremental_backup(options, endpos, repofiles)
return
@@ -421,7 +439,7 @@
# Get the md5 checksum of the source file, up to two file positions:
# the entire size of the file, and up to the file position of the last
# incremental backup.
- srcfp = open(options.file)
+ srcfp = open(options.file, 'rb')
srcsum = checksum(srcfp, srcsz)
srcfp.seek(0)
srcsum_backedup = checksum(srcfp, reposz)
=== ZODB3/Tools/parsezeolog.py 1.4 => 1.5 ===
--- ZODB3/Tools/parsezeolog.py:1.4 Thu Dec 12 16:34:37 2002
+++ ZODB3/Tools/parsezeolog.py Mon Sep 15 12:29:19 2003
@@ -1,3 +1,4 @@
+#!python
"""Parse the BLATHER logging generated by ZEO2.
An example of the log format is:
@@ -52,7 +53,7 @@
fields = ("time", "vote", "done", "user", "path")
fmt = "%-24s %5s %5s %-15s %s"
hdr = fmt % fields
-
+
def report(self):
"""Print a report about the transaction"""
t = time.ctime(self.begin)
@@ -98,7 +99,7 @@
except KeyError:
print "uknown tid", repr(tid)
return None
-
+
def tpc_finish(self, time, args):
t = self.get_txn(args)
if t is None:
=== ZODB3/Tools/netspace.py 1.1 => 1.2 ===
--- ZODB3/Tools/netspace.py:1.1 Fri May 3 16:33:22 2002
+++ ZODB3/Tools/netspace.py Mon Sep 15 12:29:19 2003
@@ -1,3 +1,4 @@
+#!python
"""Report on the net size of objects counting subobjects.
usage: netspace.py [-P | -v] data.fs
@@ -89,7 +90,7 @@
keys = filter(paths.has_key, keys)
fmt = "%8s %5d %8d %s %s.%s"
-
+
for oid in keys:
data, serialno = fs.load(oid, '')
mod, klass = get_pickle_metadata(data)
=== ZODB3/Tools/migrate.py 1.1 => 1.2 ===
--- ZODB3/Tools/migrate.py:1.1 Tue Jan 14 12:22:59 2003
+++ ZODB3/Tools/migrate.py Mon Sep 15 12:29:19 2003
@@ -1,4 +1,4 @@
-#! /usr/bin/env python
+#!python
##############################################################################
#
# Copyright (c) 2001, 2002, 2003 Zope Corporation and Contributors.
=== ZODB3/Tools/fstest.py 1.9 => 1.10 ===
--- ZODB3/Tools/fstest.py:1.9 Tue Apr 22 13:58:24 2003
+++ ZODB3/Tools/fstest.py Mon Sep 15 12:29:19 2003
@@ -4,14 +4,14 @@
#
# Copyright (c) 2001, 2002 Zope Corporation and Contributors.
# All Rights Reserved.
-#
+#
# This software is subject to the provisions of the Zope Public License,
# Version 2.0 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE
-#
+#
##############################################################################
"""Simple consistency checker for FileStorage.
@@ -109,7 +109,7 @@
It also leaves the file pointer set to pos. The path argument is
used for generating error messages.
"""
-
+
h = file.read(TREC_HDR_LEN)
if not h:
return None, None
@@ -131,7 +131,7 @@
raise FormatError("%s truncated possibly because of"
" damaged records at %s" % (path, pos))
if status == Status.checkpoint:
- raise FormatError("%s checkpoint flag was not cleared at %s"
+ raise FormatError("%s checkpoint flag was not cleared at %s"
% (path, pos))
if status not in ' up':
raise FormatError("%s has invalid status '%s' at %s" %
=== ZODB3/Tools/fsrefs.py 1.7 => 1.8 ===
--- ZODB3/Tools/fsrefs.py:1.7 Fri May 23 17:30:31 2003
+++ ZODB3/Tools/fsrefs.py Mon Sep 15 12:29:19 2003
@@ -4,14 +4,14 @@
#
# Copyright (c) 2002 Zope Corporation and Contributors.
# All Rights Reserved.
-#
+#
# This software is subject to the provisions of the Zope Public License,
# Version 2.0 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE
-#
+#
##############################################################################
"""Check FileStorage for dangling references.
@@ -77,7 +77,7 @@
# that refer to this one, we won't get error reports from
# them. We could fix this by making two passes over the
# storage, but that seems like overkill.
-
+
refs = get_refs(data)
missing = [] # contains 3-tuples of oid, klass-metadata, reason
for info in refs:
=== ZODB3/Tools/checkbtrees.py 1.1 => 1.2 ===
--- ZODB3/Tools/checkbtrees.py:1.1 Thu Jun 20 18:49:50 2002
+++ ZODB3/Tools/checkbtrees.py Mon Sep 15 12:29:20 2003
@@ -1,20 +1,35 @@
-#! /usr/bin/env python
+#!python
"""Check the consistency of BTrees in a Data.fs
usage: checkbtrees.py data.fs
-Try to find all the BTrees in a Data.fs and call their _check() methods.
+Try to find all the BTrees in a Data.fs, call their _check() methods,
+and run them through BTrees.check.check().
"""
from types import IntType
import ZODB
from ZODB.FileStorage import FileStorage
+from BTrees.check import check
+
+# Set of oids we've already visited. Since the object structure is
+# a general graph, this is needed to prevent unbounded paths in the
+# presence of cycles. It's also helpful in eliminating redundant
+# checking when a BTree is pointed to by many objects.
+oids_seen = {}
+
+# Append (obj, path) to L if and only if obj is a persistent object
+# and we haven't seen it before.
+def add_if_new_persistent(L, obj, path):
+ global oids_seen
-def add_if_persistent(L, obj, path):
getattr(obj, '_', None) # unghostify
if hasattr(obj, '_p_oid'):
- L.append((obj, path))
+ oid = obj._p_oid
+ if not oids_seen.has_key(oid):
+ L.append((obj, path))
+ oids_seen[oid] = 1
def get_subobjects(obj):
getattr(obj, '_', None) # unghostify
@@ -25,7 +40,7 @@
attrs = ()
for pair in attrs:
sub.append(pair)
-
+
# what if it is a mapping?
try:
items = obj.items()
@@ -54,7 +69,7 @@
cn = ZODB.DB(fs).open()
rt = cn.root()
todo = []
- add_if_persistent(todo, rt, '')
+ add_if_new_persistent(todo, rt, '')
found = 0
while todo:
@@ -75,6 +90,13 @@
print msg
print "*" * 60
+ try:
+ check(obj)
+ except AssertionError, msg:
+ print "*" * 60
+ print msg
+ print "*" * 60
+
if found % 100 == 0:
cn.cacheMinimize()
@@ -84,7 +106,7 @@
newpath = "%s%s" % (path, k)
else:
newpath = "%s.%s" % (path, k)
- add_if_persistent(todo, v, newpath)
+ add_if_new_persistent(todo, v, newpath)
print "total", len(fs._index), "found", found
=== ZODB3/Tools/analyze.py 1.1 => 1.2 ===
--- ZODB3/Tools/analyze.py:1.1 Mon Aug 26 14:29:58 2002
+++ ZODB3/Tools/analyze.py Mon Sep 15 12:29:20 2003
@@ -1,4 +1,4 @@
-#! /usr/bin/env python
+#!python
# Based on a transaction analyzer by Matt Kromer.
import pickle
@@ -137,4 +137,3 @@
if __name__ == "__main__":
path = sys.argv[1]
report(analyze(path))
-
=== Removed File ZODB3/Tools/space.py ===
More information about the Zope-Checkins
mailing list