summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authornobody <nobody@gnome.org>2006-08-27 00:06:44 +0000
committernobody <nobody@gnome.org>2006-08-27 00:06:44 +0000
commitdd3ed1e8b75521a8481f5e28515ececf9310aff6 (patch)
tree5f4fdf2fe02f8da17404b38dabe7c30a47671da2
parent7ed6c0cec5b91daccda6b5a85ab04f4362dc328e (diff)
This commit was manufactured by cvs2svn to create tagDEBIAN-2_0_4_OOD680M2-1
'DEBIAN-2_0_4_OOD680M2-1'.
-rw-r--r--buildbot/buildbot-source/ChangeLog6129
-rw-r--r--buildbot/buildbot-source/MANIFEST.in17
-rw-r--r--buildbot/buildbot-source/NEWS1621
-rw-r--r--buildbot/buildbot-source/PKG-INFO23
-rw-r--r--buildbot/buildbot-source/README195
-rw-r--r--buildbot/buildbot-source/README.w3295
-rwxr-xr-xbuildbot/buildbot-source/bin/buildbot4
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/__init__.py3
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/buildset.py77
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/changes/__init__.py0
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/changes/base.py14
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/changes/changes.py265
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/changes/dnotify.py103
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/changes/freshcvs.py148
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/changes/freshcvsmail.py5
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/changes/mail.py475
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/changes/maildir.py115
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/changes/maildirgtk.py55
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/changes/maildirtwisted.py76
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/changes/p4poller.py142
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/changes/pb.py89
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/clients/__init__.py0
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/clients/base.py111
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/clients/debug.py163
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/clients/gtkPanes.py428
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/clients/sendchange.py39
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/dnotify.py105
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/interfaces.py890
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/locks.py89
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/master.py1066
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/pbutil.py147
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/process/__init__.py0
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/process/base.py608
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/process/builder.py689
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/process/factory.py177
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/process/maxq.py46
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/process/process_twisted.py119
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/process/step.py2359
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/process/step_twisted.py754
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/process/step_twisted2.py164
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/scheduler.py688
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/scripts/__init__.py0
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/scripts/runner.py749
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/scripts/tryclient.py580
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/slave/__init__.py0
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/slave/bot.py495
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/slave/commands.py1822
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/slave/interfaces.py57
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/slave/registry.py18
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/slave/trial.py175
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/sourcestamp.py85
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/status/__init__.py0
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/status/base.py77
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/status/builder.py1927
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/status/client.py573
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/status/getcws.py133
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/status/html.py2385
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/status/mail.py368
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/status/progress.py308
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/status/tests.py75
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/status/words.py614
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/test/__init__.py0
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/test/emit.py10
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/test/runutils.py193
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/test/sleep.py9
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/test/test__versions.py16
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/test/test_buildreq.py182
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/test/test_changes.py192
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/test/test_config.py1007
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/test/test_control.py140
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/test/test_dependencies.py170
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/test/test_locks.py165
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/test/test_maildir.py79
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/test/test_mailparse.py248
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/test/test_properties.py152
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/test/test_run.py524
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/test/test_runner.py299
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/test/test_scheduler.py313
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/test/test_slavecommand.py265
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/test/test_slaves.py228
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/test/test_status.py949
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/test/test_steps.py236
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/test/test_twisted.py184
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/test/test_util.py26
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/test/test_vc.py2162
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/test/test_web.py493
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/twcompat.py285
-rw-r--r--buildbot/buildbot-source/build/lib/buildbot/util.py71
-rwxr-xr-xbuildbot/buildbot-source/build/scripts-2.3/buildbot4
-rwxr-xr-xbuildbot/buildbot-source/build/scripts-2.4/buildbot4
-rw-r--r--buildbot/buildbot-source/buildbot/__init__.py3
-rw-r--r--buildbot/buildbot-source/buildbot/__init__.pycbin166 -> 0 bytes
-rw-r--r--buildbot/buildbot-source/buildbot/buildbot.pngbin783 -> 0 bytes
-rw-r--r--buildbot/buildbot-source/buildbot/buildset.py77
-rw-r--r--buildbot/buildbot-source/buildbot/changes/__init__.py0
-rw-r--r--buildbot/buildbot-source/buildbot/changes/base.py14
-rw-r--r--buildbot/buildbot-source/buildbot/changes/changes.py265
-rw-r--r--buildbot/buildbot-source/buildbot/changes/dnotify.py103
-rw-r--r--buildbot/buildbot-source/buildbot/changes/freshcvs.py148
-rw-r--r--buildbot/buildbot-source/buildbot/changes/freshcvsmail.py5
-rw-r--r--buildbot/buildbot-source/buildbot/changes/mail.py475
-rw-r--r--buildbot/buildbot-source/buildbot/changes/maildir.py115
-rw-r--r--buildbot/buildbot-source/buildbot/changes/maildirgtk.py55
-rw-r--r--buildbot/buildbot-source/buildbot/changes/maildirtwisted.py76
-rw-r--r--buildbot/buildbot-source/buildbot/changes/p4poller.py142
-rw-r--r--buildbot/buildbot-source/buildbot/changes/pb.py89
-rw-r--r--buildbot/buildbot-source/buildbot/clients/__init__.py0
-rw-r--r--buildbot/buildbot-source/buildbot/clients/base.py111
-rw-r--r--buildbot/buildbot-source/buildbot/clients/debug.glade669
-rw-r--r--buildbot/buildbot-source/buildbot/clients/debug.py163
-rw-r--r--buildbot/buildbot-source/buildbot/clients/gtkPanes.py428
-rw-r--r--buildbot/buildbot-source/buildbot/clients/sendchange.py39
-rw-r--r--buildbot/buildbot-source/buildbot/dnotify.py105
-rw-r--r--buildbot/buildbot-source/buildbot/interfaces.py890
-rw-r--r--buildbot/buildbot-source/buildbot/locks.py89
-rw-r--r--buildbot/buildbot-source/buildbot/master.py1066
-rw-r--r--buildbot/buildbot-source/buildbot/pbutil.py147
-rw-r--r--buildbot/buildbot-source/buildbot/process/__init__.py0
-rw-r--r--buildbot/buildbot-source/buildbot/process/base.py608
-rw-r--r--buildbot/buildbot-source/buildbot/process/base.py.newbak3aug596
-rw-r--r--buildbot/buildbot-source/buildbot/process/builder.py689
-rw-r--r--buildbot/buildbot-source/buildbot/process/factory.py177
-rw-r--r--buildbot/buildbot-source/buildbot/process/maxq.py46
-rw-r--r--buildbot/buildbot-source/buildbot/process/process_twisted.py119
-rw-r--r--buildbot/buildbot-source/buildbot/process/step.py2359
-rw-r--r--buildbot/buildbot-source/buildbot/process/step.py.bak1983
-rw-r--r--buildbot/buildbot-source/buildbot/process/step_twisted.py754
-rw-r--r--buildbot/buildbot-source/buildbot/process/step_twisted2.py164
-rw-r--r--buildbot/buildbot-source/buildbot/scheduler.py688
-rw-r--r--buildbot/buildbot-source/buildbot/scripts/__init__.py0
-rw-r--r--buildbot/buildbot-source/buildbot/scripts/runner.py749
-rw-r--r--buildbot/buildbot-source/buildbot/scripts/sample.cfg150
-rw-r--r--buildbot/buildbot-source/buildbot/scripts/tryclient.py580
-rw-r--r--buildbot/buildbot-source/buildbot/slave/__init__.py0
-rw-r--r--buildbot/buildbot-source/buildbot/slave/bot.py495
-rw-r--r--buildbot/buildbot-source/buildbot/slave/commands.py1824
-rw-r--r--buildbot/buildbot-source/buildbot/slave/interfaces.py57
-rw-r--r--buildbot/buildbot-source/buildbot/slave/registry.py18
-rw-r--r--buildbot/buildbot-source/buildbot/slave/trial.py175
-rw-r--r--buildbot/buildbot-source/buildbot/sourcestamp.py85
-rw-r--r--buildbot/buildbot-source/buildbot/status/__init__.py0
-rw-r--r--buildbot/buildbot-source/buildbot/status/base.py77
-rw-r--r--buildbot/buildbot-source/buildbot/status/builder.py1927
-rw-r--r--buildbot/buildbot-source/buildbot/status/classic.css39
-rw-r--r--buildbot/buildbot-source/buildbot/status/client.py573
-rw-r--r--buildbot/buildbot-source/buildbot/status/getcws.py133
-rw-r--r--buildbot/buildbot-source/buildbot/status/html.py2385
-rw-r--r--buildbot/buildbot-source/buildbot/status/html.py.bakforCWS_View1744
-rw-r--r--buildbot/buildbot-source/buildbot/status/mail.py368
-rw-r--r--buildbot/buildbot-source/buildbot/status/progress.py308
-rw-r--r--buildbot/buildbot-source/buildbot/status/tests.py75
-rw-r--r--buildbot/buildbot-source/buildbot/status/words.py614
-rw-r--r--buildbot/buildbot-source/buildbot/test/__init__.py0
-rw-r--r--buildbot/buildbot-source/buildbot/test/emit.py10
-rw-r--r--buildbot/buildbot-source/buildbot/test/mail/msg168
-rw-r--r--buildbot/buildbot-source/buildbot/test/mail/msg2101
-rw-r--r--buildbot/buildbot-source/buildbot/test/mail/msg397
-rw-r--r--buildbot/buildbot-source/buildbot/test/mail/msg445
-rw-r--r--buildbot/buildbot-source/buildbot/test/mail/msg554
-rw-r--r--buildbot/buildbot-source/buildbot/test/mail/msg670
-rw-r--r--buildbot/buildbot-source/buildbot/test/mail/msg768
-rw-r--r--buildbot/buildbot-source/buildbot/test/mail/msg861
-rw-r--r--buildbot/buildbot-source/buildbot/test/mail/msg918
-rw-r--r--buildbot/buildbot-source/buildbot/test/mail/syncmail.1152
-rw-r--r--buildbot/buildbot-source/buildbot/test/mail/syncmail.256
-rw-r--r--buildbot/buildbot-source/buildbot/test/mail/syncmail.339
-rw-r--r--buildbot/buildbot-source/buildbot/test/mail/syncmail.4290
-rw-r--r--buildbot/buildbot-source/buildbot/test/mail/syncmail.570
-rw-r--r--buildbot/buildbot-source/buildbot/test/runutils.py193
-rw-r--r--buildbot/buildbot-source/buildbot/test/sleep.py9
-rw-r--r--buildbot/buildbot-source/buildbot/test/subdir/emit.py10
-rw-r--r--buildbot/buildbot-source/buildbot/test/test__versions.py16
-rw-r--r--buildbot/buildbot-source/buildbot/test/test_buildreq.py182
-rw-r--r--buildbot/buildbot-source/buildbot/test/test_changes.py192
-rw-r--r--buildbot/buildbot-source/buildbot/test/test_config.py1007
-rw-r--r--buildbot/buildbot-source/buildbot/test/test_control.py140
-rw-r--r--buildbot/buildbot-source/buildbot/test/test_dependencies.py170
-rw-r--r--buildbot/buildbot-source/buildbot/test/test_locks.py165
-rw-r--r--buildbot/buildbot-source/buildbot/test/test_maildir.py79
-rw-r--r--buildbot/buildbot-source/buildbot/test/test_mailparse.py248
-rw-r--r--buildbot/buildbot-source/buildbot/test/test_properties.py152
-rw-r--r--buildbot/buildbot-source/buildbot/test/test_run.py524
-rw-r--r--buildbot/buildbot-source/buildbot/test/test_runner.py299
-rw-r--r--buildbot/buildbot-source/buildbot/test/test_scheduler.py313
-rw-r--r--buildbot/buildbot-source/buildbot/test/test_slavecommand.py265
-rw-r--r--buildbot/buildbot-source/buildbot/test/test_slaves.py228
-rw-r--r--buildbot/buildbot-source/buildbot/test/test_status.py949
-rw-r--r--buildbot/buildbot-source/buildbot/test/test_steps.py236
-rw-r--r--buildbot/buildbot-source/buildbot/test/test_twisted.py184
-rw-r--r--buildbot/buildbot-source/buildbot/test/test_util.py26
-rw-r--r--buildbot/buildbot-source/buildbot/test/test_vc.py2162
-rw-r--r--buildbot/buildbot-source/buildbot/test/test_web.py493
-rw-r--r--buildbot/buildbot-source/buildbot/twcompat.py285
-rw-r--r--buildbot/buildbot-source/buildbot/util.py71
-rw-r--r--buildbot/buildbot-source/contrib/README.txt37
-rwxr-xr-xbuildbot/buildbot-source/contrib/arch_buildbot.py73
-rwxr-xr-xbuildbot/buildbot-source/contrib/fakechange.py76
-rwxr-xr-xbuildbot/buildbot-source/contrib/hg_buildbot.py57
-rwxr-xr-xbuildbot/buildbot-source/contrib/run_maxq.py47
-rwxr-xr-xbuildbot/buildbot-source/contrib/svn_buildbot.py250
-rwxr-xr-xbuildbot/buildbot-source/contrib/svn_watcher.py88
-rwxr-xr-xbuildbot/buildbot-source/contrib/svnpoller.py95
-rwxr-xr-xbuildbot/buildbot-source/contrib/viewcvspoll.py85
-rw-r--r--buildbot/buildbot-source/contrib/windows/buildbot.bat2
-rw-r--r--buildbot/buildbot-source/contrib/windows/buildbot2.bat98
-rw-r--r--buildbot/buildbot-source/docs/PyCon-2003/buildbot.html276
-rw-r--r--buildbot/buildbot-source/docs/PyCon-2003/overview.pngbin43338 -> 0 bytes
-rw-r--r--buildbot/buildbot-source/docs/PyCon-2003/slave.pngbin44733 -> 0 bytes
-rw-r--r--buildbot/buildbot-source/docs/PyCon-2003/stylesheet.css180
-rw-r--r--buildbot/buildbot-source/docs/PyCon-2003/waterfall.pngbin4459 -> 0 bytes
-rw-r--r--buildbot/buildbot-source/docs/buildbot.info4921
-rw-r--r--buildbot/buildbot-source/docs/buildbot.texinfo4825
-rw-r--r--buildbot/buildbot-source/docs/epyrun195
-rw-r--r--buildbot/buildbot-source/docs/examples/glib_master.cfg55
-rw-r--r--buildbot/buildbot-source/docs/examples/hello.cfg102
-rw-r--r--buildbot/buildbot-source/docs/examples/twisted_master.cfg267
-rw-r--r--buildbot/buildbot-source/docs/gen-reference1
-rw-r--r--buildbot/buildbot-source/setup.py65
218 files changed, 0 insertions, 84765 deletions
diff --git a/buildbot/buildbot-source/ChangeLog b/buildbot/buildbot-source/ChangeLog
deleted file mode 100644
index 89f9fb7a9..000000000
--- a/buildbot/buildbot-source/ChangeLog
+++ /dev/null
@@ -1,6129 +0,0 @@
-2006-05-23 Brian Warner <warner@lothar.com>
-
- * buildbot/__init__.py (version): Releasing buildbot-0.7.3
- * docs/buildbot.texinfo: set version to match
- * NEWS: update for 0.7.3
-
- * docs/buildbot.texinfo (Change Sources): mention hg_buildbot.py,
- give a quick mapping from VC system to possible ChangeSources
- (Build Properties): add 'buildername'
-
- * buildbot/process/base.py (Build.setupStatus): oops, set
- 'buildername' and 'buildnumber' properties
- * buildbot/test/test_properties.py (Interpolate.testBuildNumber):
- test them
-
-2006-05-22 Brian Warner <warner@lothar.com>
-
- * docs/buildbot.texinfo (Build Properties): explain the syntax of
- property interpolation better
-
- * README (INSTALLATION): remove old '-v' argument from recommended
- trial command line
-
- * docs/buildbot.texinfo (ShellCommand): add docs for description=
- and descriptionDone= arguments. Thanks to Niklaus Giger for the
- patch. SF#1475494.
-
- * buildbot/slave/commands.py (SVN.parseGotRevision._parse): use
- 'svnversion' instead of grepping the output of 'svn info', much
- simpler and avoids CR/LF problems on windows. Thanks to Olivier
- Bonnet for the suggestion.
- (SVN.parseGotRevision): oops, older verisons of 'svnversion'
- require the WC_PATH argument, so run 'svnversion .' instead.
-
- * buildbot/interfaces.py (IChangeSource): methods in Interfaces
- aren't supposed to have 'self' in their argument list
-
-2006-05-21 Brian Warner <warner@lothar.com>
-
- * buildbot/process/step.py (ShellCommand.start): make
- testInterpolate pass. I was passing the uninterpolated command to
- the RemoteShellCommand constructor
- (ShellCommand._interpolateProperties): oops, handle non-list
- commands (i.e. strings with multiple words separated by spaces in
- them) properly, instead of forgetting about them.
-
- * buildbot/test/test_properties.py (Run.testInterpolate): new test
- to actually try to use build properties in a real build. This test
- fails.
- * buildbot/test/runutils.py (RunMixin.requestBuild): utility methods
- to start and evaluate builds
-
- * buildbot/test/test__versions.py: add a pseudo-test to record
- what version of Twisted/Python/Buildbot are running. This should
- show up at the beginning of _trial_tmp/test.log, and exists to help
- debug other problems.
-
- * buildbot/status/html.py (Waterfall): add 'robots_txt=' argument,
- a filename to be served as 'robots.txt' to discourage web spiders.
- Adapted from a patch by Tobi Vollebregt, thanks!
- * buildbot/test/test_web.py (Waterfall._test_waterfall_5): test it
- (Waterfall.test_waterfall): tweak the way that filenames are put
- into the config file, to accomodate windows pathnames better.
-
- * docs/buildbot.texinfo (HTML Waterfall): document it
-
- * buildbot/process/process_twisted.py
- (QuickTwistedBuildFactory.__init__): recent versions of Twisted
- changed the build process. The new setup.py no longer takes the
- 'all' argument.
- (FullTwistedBuildFactory.__init__): same
- (TwistedReactorsBuildFactory.__init__): same
-
- * contrib/hg_buildbot.py: wrote a commit script for mercurial, to
- be placed in the [hooks] section of the central repository (the
- one that everybody pushes changes to).
-
-2006-05-20 Brian Warner <warner@lothar.com>
-
- * buildbot/slave/commands.py (Darcs.doVCFull): when writing the
- .darcs-context file, use binary mode. I think this was causing a
- Darcs failure under windows.
-
-2006-05-19 Brian Warner <warner@lothar.com>
-
- * buildbot/scripts/tryclient.py (CVSExtractor.getBaseRevision):
- use a timezone string of +0000 and gmtime, since this timestamp is
- sent to a buildmaster and %z is broken.
-
- * buildbot/test/test_vc.py (CVSHelper.getdate): use no timezone
- string and localtime, since this timestamp will only be consumed
- locally, and %z is broken.
-
- * buildbot/slave/commands.py (CVS.parseGotRevision): use +0000 and
- gmtime, since this timestamp is returned to the buildmaster, and
- %z is broken.
-
-2006-05-18 Brian Warner <warner@lothar.com>
-
- * NEWS: update in preparation for next release
-
- * buildbot/test/test_vc.py (VCS_Helper): factor out all the
- setup-repository and do-we-have-the-vc-tools code into a separate
- "helper" class, which sticks around in a single module-level
- object. This seems more likely to continue to work in the future
- than having it hide in the TestCase and hope that TestCases stick
- around for a long time.
-
- * buildbot/test/test_vc.py (MercurialSupport.vc_create): 'hg
- addremove' has been deprecated in recent versions of mercurial, so
- use 'hg add' instead
-
-2006-05-07 Brian Warner <warner@lothar.com>
-
- * buildbot/scheduler.py (Try_Jobdir.messageReceived): when
- operating under windows, move the file before opening it, since
- you can't rename a file that somebody has open.
-
- * buildbot/process/base.py (Build.setupBuild): if something goes
- wrong while creating a Step, log the name and arguments, since the
- error message when you get the number of arguments wrong is really
- opaque.
-
-2006-05-06 Brian Warner <warner@lothar.com>
-
- * buildbot/process/step_twisted.py (Trial.setupEnvironment): more
- bugs in twisted-specific code not covered by my unit tests, this
- time use 'cmd' argument instead of self.cmd
-
- * buildbot/process/process_twisted.py (TwistedBuild.isFileImportant):
- fix stupid braino: either use startwith or find()==0, not both.
- (TwistedReactorsBuildFactory.__init__): another dumb typo
-
- * buildbot/test/test_slavecommand.py (ShellBase.testInterrupt1):
- mark this test as TODO under windows, since process-killing seems
- dodgy there. We'll come back to this later and try to fix it
- properly.
-
- * buildbot/test/test_vc.py (CVSSupport.getdate): use localtime,
- and don't include a timezone
- (CVSSupport.vc_try_checkout): stop trying to strip the timezone.
- This should avoid the windows-with-verbose-timezone-name problem
- altogether.
- (Patch.testPatch): add a test which runs 'patch' with less
- overhead than the full VCBase.do_patch sequence, to try to isolate
- a windows test failure. This one uses slave.commands.ShellCommand
- and 'patch', but none of the VC code.
-
- * buildbot/slave/commands.py (getCommand): use which() to find the
- executables for 'cvs', 'svn', etc. This ought to help under
- windows.
-
- * buildbot/test/test_vc.py (VCBase.do_getpatch): Delete the
- working directory before starting. If an earlier test failed, the
- leftover directory would mistakenly flunk a later test.
- (ArchCommon.registerRepository): fix some tla-vs-baz problems.
- Make sure that we use the right commandlines if which("tla") picks
- up "tla.exe" (as it does under windows).
- (TlaSupport.do_get): factor out this tla-vs-baz difference
- (TlaSupport.vc_create): more tla-vs-baz differences
-
- * buildbot/test/test_slavecommand.py
- (ShellBase.testShellMissingCommand): stop trying to assert
- anything about the error message: different shells on different
- OSes with different languages makes it hard, and it really isn't
- that interesting of a thing to test anyway.
-
- * buildbot/test/test_vc.py (CVSSupport.capable): skip CVS tests if
- we detect cvs-1.10 (which is the version shipped with OS-X 10.3
- "Panther"), because it has a bug which flunks a couple tests in
- weird ways. I've checked that cvs-1.12.9 (as shipped with debian)
- is ok. OS-X 10.4 "Tiger" ships with cvs-1.11, but I haven't been
- able to test that yet.
-
-2006-04-30 Brian Warner <warner@lothar.com>
-
- * buildbot/test/test_vc.py (VCBase.runCommand): set $LC_ALL="C" to
- make sure child commands emit messages in english, so our regexps
- will match. Thanks to Nikaus Giger for identifying the problems.
- (VCBase._do_vctest_export_1): mode="export" is not responsible
- for setting the "got_revision" property, since in many cases it is
- not convenient to determine.
- (SVNSupport.capable): when running 'svn --version' to check for
- ra_local, we want error messages in english
- * buildbot/test/test_slavecommand.py
- (ShellBase.testShellMissingCommand): set $LC_ALL="C" to get bash
- to emit the error message in english
-
- * buildbot/slave/commands.py (SourceBase.setup): stash a copy of
- the environment with $LC_ALL="C" so that Commands which need to
- parse the output of their child processes can obtain it in
- english.
- (SVN.parseGotRevision): call "svn info" afterwards instead of
- watching the output of the "svn update" or "svn checkout".
- (Darcs.parseGotRevision): use $LC_ALL="C" when running the command
- (Arch.parseGotRevision): same
- (Bazaar.parseGotRevision): same
- (Mercurial.parseGotRevision): same
-
- * buildbot/scripts/tryclient.py (SourceStampExtractor.dovc): set
- $LC_ALL="C" when running commands under 'buildbot try', too
-
- * buildbot/test/__init__.py: remove the global os.environ()
- setting, instead we do it just for the tests that run commands and
- need to parse their output.
-
- * buildbot/test/test_scheduler.py (Scheduling.testTryJobdir):
- remove the overly-short .timeout on this test, because non-DNotify
- platforms must fall back to polling which happens at 10 second
- intervals, so a 5 second timeout would never succeed.
-
-2006-04-24 Brian Warner <warner@lothar.com>
-
- * docs/buildbot.texinfo (Installing the code): update trial
- invocation, SF#1469116 by Niklaus Giger.
- (Attributes of Changes): updated branch-name examples to be
- a bit more realistic, SF#1475240 by Stephen Davis.
-
- * contrib/windows/buildbot2.bat: utility wrapper for windows
- developers, contributed by Nick Trout (after a year of neglect..
- sorry!). SF#1194231.
-
- * buildbot/test/test_vc.py (*.capable): store the actual VC
- binary's pathname in VCS[vcname], so it can be retrieved later
- (CVSSupport.vc_try_checkout): incorporate Niklaus Giger's patch to
- strip out non-numeric timezone information, specifically the funky
- German string that his system produced that confuses CVS.
- (DarcsSupport.vc_create): use dovc() instead of vc(), this should
- allow Darcs tests to work on windows
- * buildbot/scripts/tryclient.py (SourceStampExtractor): use
- procutils.which() everywhere, to allow tryclient to work under
- windows. Also from Niklaus Giger, SF#1463394.
-
- * buildbot/twcompat.py (which): move the replacement for a missing
- twisted.python.procutils.which from test_vc.py to here, so it can
- be used in other places too (specifically tryclient.py)
-
-2006-04-23 Brian Warner <warner@lothar.com>
-
- * buildbot/status/html.py (StatusResourceBuild.body): replace the
- bare buildbotURL/projectName line with a proper DIV, along with a
- CSS class of "title", from Stefan Seefeld (SF#1461675).
- (WaterfallStatusResource.body0): remove the redundant 'table'
- class from the table
- (WaterfallStatusResource.body): same. Also add class="LastBuild"
- to the top-row TR, and class="Activity" to the second-row TR,
- rather than putting them in the individual TD nodes.
-
- * buildbot/test/test_vc.py (VCBase.checkGotRevision): test
- 'got_revision' build property for all VC systems that implement
- accurate ones: SVN, Darcs, Arch, Bazaar, Mercurial.
-
- * buildbot/slave/commands.py (SourceBase._handleGotRevision): try
- to determine which revision we actually obtained
- (CVS.parseGotRevision): implement this for CVS, which just means
- to grab a timestamp. Not ideal, and it depends upon the buildslave
- having a clock that is reasonably well syncronized with the server,
- but it's better than nothing.
- (SVN.parseGotRevision): implement it for SVN, which is accurate
- (Darcs.parseGotRevision): same
- (Arch.parseGotRevision): same
- (Bazaar.parseGotRevision): same
- (Mercurial.parseGotRevision): same
-
- * buildbot/process/step.py (LoggedRemoteCommand.remoteUpdate):
- keep a record of all non-stdout/stderr/header/rc status updates,
- for the benefit of RemoteCommands that send other useful things,
- like got_revision
- (Source.commandComplete): put any 'got_revision' status values
- into a build property of the same name
-
-
- * buildbot/process/step_twisted.py (Trial): update to deal with
- new ShellCommand refactoring
-
- * docs/buildbot.texinfo (Build Properties): document new feature
- that allows BuildSteps to get/set Build-wide properties like which
- revision was requested and/or checked out.
-
- * buildbot/interfaces.py (IBuildStatus.getProperty): new method
- * buildbot/status/builder.py (BuildStatus.getProperty): implement
- it. Note that this bumps the persistenceVersion of the saved Build
- object, so add the necessary upgrade-old-version logic to include
- an empty properties dict.
-
- * buildbot/process/base.py (Build.setProperty): implement it
- (Build.getProperty): same
- (Build.startBuild): change build startup to set 'branch',
- 'revision', and 'slavename' properties at the right time
-
- * buildbot/process/step.py (BuildStep.__init__): change setup to
- require 'build' argument in a better way
- (LoggingBuildStep): split ShellCommand into two pieces, for better
- subclassing elsewhere. LoggingBuildStep is a BuildStep which runs
- a single RemoteCommand that sends stdout/stderr status text. It
- also provides the usual commandComplete / createSummary /
- evaluateCommand / getText methods to be overridden...
- (ShellCommand): .. whereas ShellCommand is specifically for
- running RemoteShellCommands. Other shell-like BuildSteps (like
- Source) can inherit from LoggingBuildStep instead of ShellCommand
- (WithProperties): marker class to do build-property interpolation
- (Source): inherit from LoggingBuildStep instead of ShellCommand
- (RemoteDummy): same
-
- * buildbot/test/test_properties.py: test new functionality
-
-2006-04-21 Brian Warner <warner@lothar.com>
-
- * buildbot/test/test_vc.py: rename testBranch to
- testCheckoutBranch to keep the tests in about the right
- alphabetical order
-
-2006-04-18 Brian Warner <warner@lothar.com>
-
- * docs/buildbot.texinfo (PBListener): improve cross-references
- between PBListener and 'buildbot statusgui', thanks to John Pye
- for the suggestion.
-
-2006-04-17 Brian Warner <warner@lothar.com>
-
- * buildbot/twcompat.py (maybeWait): handle SkipTest properly when
- running under Twisted-1.3.0, otherwise skipped tests are reported
- as errors.
-
- * all: use isinstance() instead of 'type(x) is foo', suggested by
- Neal Norwitz
-
- * buildbot/process/process_twisted.py (QuickTwistedBuildFactory):
- oops, fix a brain-fade from the other week, when making the
- addStep changes. I changed all the __init__ upcalls to use the
- wrong superclass name.
- (FullTwistedBuildFactory.__init__): same
- (TwistedDebsBuildFactory.__init__): same
- (TwistedReactorsBuildFactory.__init__): same
- (TwistedBuild.isFileImportant): use .startswith for clarity,
- thanks to Neal Norwitz for the suggestions.
-
- * contrib/viewcvspoll.py: script to poll a viewcvs database for
- changes, then deliver them over PB to a remote buildmaster.
-
- * contrib/svnpoller.py: added script by John Pye to poll a remote
- SVN repository (by running 'svn log') from a cronjob, and run
- 'buildbot sendchange' to deliver the changes to a remote
- buildmaster.
- * contrib/svn_watcher.py: added script by Niklaus Giger (a
- modification of svnpoller.py), same purpose, but this one loops
- internally (rather than expecting to run from a cronjob) and works
- under windows.
- * contrib/README.txt: same
-
-2006-04-11 Brian Warner <warner@lothar.com>
-
- * all: fix a number of incorrect names and missing imports, thanks
- to Anthony Baxter for the patch.
- * buildbot/status/html.py (WaterfallStatusResource.statusToHTML):
- remove unused buggy method.
- * buildbot/status/builder.py (BuildStatus.saveYourself): rmtree
- comes from shutil, not "shutils"
- * buildbot/process/step.py (TreeSize.evaluateCommand): fix bad name
- (Arch.checkSlaveVersion): same
- * buildbot/process/step_twisted.py (Trial.commandComplete): same, in
- some disabled code
- * buildbot/process/step_twisted2.py: add some missing imports
- * buildbot/twcompat.py (_deferGenerator): fix cut-and-paste error,
- this code used to live in twisted.internet.defer
-
-2006-04-10 Brian Warner <warner@lothar.com>
-
- * buildbot/process/step.py (Mercurial): add Mercurial support
- * buildbot/slave/commands.py (Mercurial): same
- * buildbot/scripts/tryclient.py (MercurialExtractor): same
- * buildbot/test/test_vc.py (Mercurial): same, checkout over HTTP is
- not yet tested, but 'try' support *is* covered
- * docs/buildbot.texinfo (Mercurial): document it
-
- * buildbot/process/step.py (LoggedRemoteCommand.remoteUpdate): add
- some debugging messages (turned off)
- * buildbot/test/test_vc.py: improve debug messages
-
-2006-04-07 Brian Warner <warner@lothar.com>
-
- * buildbot/test/test_vc.py (which): define our own which() in case
- we can't import twisted.python.procutils, because procutils doesn't
- exist in Twisted-1.3
-
- * docs/buildbot.texinfo (Interlocks): fix some typos, mention use
- of SlaveLocks for performance tests
-
- * docs/examples/twisted_master.cfg: update to match current usage
-
- * buildbot/changes/p4poller.py (P4Source): add new arguments:
- password, p4 binary, pollinterval, maximum history to check.
- Patch from an anonymous sf.net contributor, SF#1219384.
- * buildbot/process/step.py (P4Sync.__init__): add username,
- password, and client arguments.
- * buildbot/slave/commands.py (P4Sync): same
-
-2006-04-05 Brian Warner <warner@lothar.com>
-
- * buildbot/process/factory.py (BuildFactory.addStep): new method
- to add steps to a BuildFactory. Use it instead of f.steps.append,
- and you can probably avoid using the s() convenience function.
- Patch from Neal Norwitz, sf.net #1412605.
- (other): update all factories to use addStep
- * buildbot/process/process_twisted.py: update all factories to use
- addStep.
-
-2006-04-03 Brian Warner <warner@lothar.com>
-
- * buildbot/test/test_vc.py: modified find-the-VC-command logic to
- work under windows too. Adapted from a patch by Niklaus Giger,
- addresses SF#1463399.
-
- * buildbot/test/__init__.py: set $LANG to 'C', to insure that
- spawned commands emit parseable results in english and not some
- other language. Patch from Niklaus Giger, SF#1463395.
-
- * README (INSTALLATION): discourage users from running unit tests on
- a "network drive", patch from Niklaus Giger, SF#1463394.
-
-2006-03-22 Brian Warner <warner@lothar.com>
-
- * contrib/svn_buildbot.py: rearrange, add an easy-to-change
- function to turn a repository-relative pathname into a (branch,
- branch-relative-filename) tuple. Change this function to handle
- the branch naming policy used by your Subversion repository.
- Thanks to AllMyData.com for sponsoring this work.
-
-2006-03-16 Brian Warner <warner@lothar.com>
-
- * buildbot/scripts/sample.cfg: add python-mode declaration for
- vim. Thanks to John Pye for the patch.
-
- * docs/buildbot.texinfo (Launching the daemons): fix @reboot job
- command line, mention the importance of running 'crontab' as the
- buildmaster/buildslave user. Thanks to John Pye for the catch.
-
-2006-03-13 Brian Warner <warner@lothar.com>
-
- * buildbot/status/words.py (IRC): add an optional password=
- argument, which will be sent to Nickserv in an IDENTIFY message at
- login, to claim the nickname. freenode requires this before the
- bot can sent (or reply to) private messages. Thanks to Clement
- Stenac for the patch.
- * docs/buildbot.texinfo (IRC Bot): document it
-
- * buildbot/status/builder.py (LogFile.merge): don't write chunks
- larger than chunkSize. Fixes SF#1349253.
- * buildbot/test/test_status.py (Log.testLargeSummary): test it
- (Log.testConsumer): update to match new internal chunking behavior
-
-2006-03-12 Brian Warner <warner@lothar.com>
-
- * buildbot/test/test_vc.py: remove the last use of waitForDeferred
-
- * buildbot/test/test_maildir.py (MaildirTest): rename the
- 'timeout' method, as it collides with trial's internals
-
- * buildbot/scripts/runner.py: add 'buildbot restart' command
- (stop): don't sys.exit() out of here, otherwise restart can't work
- * docs/buildbot.texinfo (Shutdown): document it
-
- * buildbot/buildset.py (BuildSet.__init__): clean up docstring
- * buildbot/status/html.py (Waterfall.__init__): same
- * buildbot/process/builder.py (Builder.startBuild): same
- * buildbot/process/base.py (BuildRequest): same
- * buildbot/sourcestamp.py (SourceStamp): same
- * buildbot/scheduler.py (Nightly): same
-
- * buildbot/__init__.py (version): bump to 0.7.2+ while between
- releases
- * docs/buildbot.texinfo: same
-
-2006-02-17 Brian Warner <warner@lothar.com>
-
- * buildbot/__init__.py (version): Releasing buildbot-0.7.2
- * docs/buildbot.texinfo: set version number to match
- * NEWS: update for 0.7.2
-
-2006-02-16 Brian Warner <warner@lothar.com>
-
- * docs/buildbot.texinfo (Build Dependencies): add cindex tag
-
-2006-02-09 Brian Warner <warner@lothar.com>
-
- * docs/buildbot.texinfo (How Different VC Systems Specify Sources):
- add text to explain per-build branch parameters
- * NEWS: mention --umask
-
-2006-02-08 Brian Warner <warner@lothar.com>
-
- * buildbot/scripts/runner.py (Maker.makeSlaveTAC): remove unused
- method
- (SlaveOptions.optParameters): add --umask, to make it possible to
- make buildslave-generated files (including build products) be
- world-readable
- (slaveTAC): same
- * buildbot/slave/bot.py (BuildSlave.startService): same
-
-2006-01-23 Brian Warner <warner@lothar.com>
-
- * buildbot/status/builder.py: urllib.quote() all URLs that include
- Builder names, so that builders can include characters like '/'
- and ' ' without completely breaking the resulting HTML. Thanks to
- Kevin Turner for the patch.
- * buildbot/status/html.py: same
- * buildbot/test/test_web.py (GetURL.testBuild): match changes
-
- * NEWS: update in preparation for upcoming release
-
-2006-01-18 Brian Warner <warner@lothar.com>
-
- * docs/examples/twisted_master.cfg: update to match the Twisted
- buildbot: remove python2.2, switch to exarkun's buildslaves,
- disable the .deb builder until we figure out how to build twisted
- .debs from SVN, add some ktrace debugging to the OS-X build
- process and remove the qt build, remove threadless builders,
- change freebsd builder to use landonf's buildslave.
-
-2006-01-12 Brian Warner <warner@lothar.com>
-
- * buildbot/master.py (Manhole.__init__): let port= be a strports
- specification string, but handle a regular int for backwards
- compatibility. This allows "tcp:12345:interface=127.0.0.1" to be
- used in master.cfg to limit connections to just the local host.
- (BuildMaster.loadConfig): same for c['slavePortnum']
- * buildbot/scheduler.py (Try_Userpass.__init__): same
- * buildbot/status/client.py (PBListener.__init__): same
- * buildbot/status/html.py (Waterfall.__init__): same, for both
- http_port and distrib_port. Include backwards-compatibility checks
- so distrib_port can be a filename string and still mean unix:/foo
- * docs/buildbot.texinfo (Setting the slaveport): document it
- (Debug options): same
- (HTML Waterfall): same
- (PBListener): same
- (try): same
- * buildbot/test/test_config.py (ConfigTest): test it
-
- * buildbot/master.py (BuildMaster.loadConfig): wait for the
- slaveport's disownServiceParent deferred to fire before opening
- the new one. Fixes an annoying bug in the unit tests.
-
-2006-01-03 Brian Warner <warner@lothar.com>
-
- * buildbot/master.py (BuildMaster): remove the .schedulers
- attribute, replacing it with an allSchedulers() method that looks
- for all IService children that implement IScheduler. Having only
- one parent/child relationship means fewer opportunities for bugs.
- (BuildMaster.allSchedulers): new method
- (BuildMaster.loadConfig_Schedulers): update to use allSchedulers,
- also fix ugly bug that caused any config-file reload to
- half-forget about the earlier Schedulers, causing an exception
- when a Change arrived and was handed to a half-connected
- Scheduler. The exception was in scheduler.py line 54ish:
- self.parent.submitBuildSet(bs)
- exceptions.AttributeError: 'NoneType' object has no attribute
- 'submitBuildSet'
- (BuildMaster.addChange): update to use allSchedulers()
-
- * buildbot/scheduler.py (BaseScheduler.__implements__): fix this
- to work properly with twisted-1.3.0, where you must explicitly
- include the __implements__ from parent classes
- (BaseScheduler.__repr__): make it easier to distinguish distinct
- instances
- (BaseUpstreamScheduler.__implements__): same
-
- * buildbot/status/builder.py (Status.getSchedulers): update to
- use allSchedulers()
- * buildbot/test/test_run.py (Run.testMaster): same
- * buildbot/test/test_dependencies.py (Dependencies.findScheduler): same
- * buildbot/test/test_config.py (ConfigTest.testSchedulers): same,
- make sure Scheduler instances are left alone when an identical
- config file is reloaded
- (ConfigElements.testSchedulers): make sure Schedulers are properly
- comparable
-
- * Makefile (TRIALARGS): my local default Twisted version is now
- 2.1.0, update the trial arguments accordingly
-
-2005-12-22 Brian Warner <warner@lothar.com>
-
- * docs/examples/twisted_master.cfg: merge changes from pyr: add
- new win32 builders
-
- * buildbot/scheduler.py (BaseScheduler.addChange): include a dummy
- addChange in the parent class, although I suspect this should be
- fixed better in the future.
-
-2005-11-26 Brian Warner <warner@lothar.com>
-
- * buildbot/scheduler.py (AnyBranchScheduler.addChange): don't
- explode when branch==None, thanks to Kevin Turner for the catch
- * buildbot/test/test_scheduler.py (Scheduling.testAnyBranch): test
- it
-
- * buildbot/__init__.py (version): bump to 0.7.1+ while between
- releases
- * docs/buildbot.texinfo: same
-
-2005-11-26 Brian Warner <warner@lothar.com>
-
- * buildbot/__init__.py (version): Releasing buildbot-0.7.1
- * docs/buildbot.texinfo: set version number to match
-
-2005-11-26 Brian Warner <warner@lothar.com>
-
- * NEWS: update for 0.7.1
-
- * buildbot/status/builder.py (BuildStepStatus.unsubscribe): make
- sure that unsubscribe works even if we never sent an ETA update.
- Also, don't explode on duplicate unsubscribe.
- (BuildStepStatus.addLog): make the convenience "return self"-added
- watcher automatically unsubscribe when the Step finishes.
- (BuildStatus.unsubscribe): same handle-duplicate-unsubscribe
- (BuildStatus.stepStarted): same auto-unsubscribe
- (BuilderStatus.buildStarted): same auto-unsubscribe
-
- * buildbot/interfaces.py (IStatusReceiver.buildStarted): document
- auto-unsubscribe
- (IStatusReceiver.stepStarted): same
- (IStatusReceiver.logStarted): same
-
- * buildbot/test/test_run.py (Status): move the Status test..
- * buildbot/test/test_status.py (Subscription): .. to here
-
-2005-11-25 Brian Warner <warner@lothar.com>
-
- * NEWS: more updates
-
- * buildbot/locks.py: fix the problem in which loading a master.cfg
- file that changes some Builders (but not all of them) can result
- in having multiple copies of the same Lock. Now, the real Locks
- are kept in a table inside the BotMaster, and the Builders/Steps
- use "LockIDs", which are still instances of MasterLock and
- SlaveLock. The real Locks are instances of the new RealMasterLock
- and RealSlaveLock classes.
- * buildbot/master.py (BotMaster.getLockByID): new method to
- convert LockIDs into real Locks.
- * buildbot/process/base.py (Build.startBuild): convert LockIDs
- into real Locks before building
- * buildbot/process/step.py (BuildStep.startStep): same
- * buildbot/test/test_locks.py (Locks.testLock1a): add a test which
- exercises the problem
-
-
- * docs/buildbot.texinfo (Scheduler Types): give a few hints about
- what Schedulers are available
-
- * buildbot/scheduler.py (Nightly): add new Scheduler based upon
- work by Dobes Vandermeer and hacked mercilessly by me. This offers
- 'cron'-style build scheduling at certain times of day, week,
- month, or year.
- * buildbot/test/test_scheduler.py (Scheduling.testNightly): test it
-
- * buildbot/scheduler.py (Scheduler): change fileIsImportant
- handling: treat self.fileIsImportant more as an attribute that
- contains a callable than as a method. If the attribute is None,
- don't call it and assume all filenames are important. It is still
- possible to provide a fileIsImportant method in a subclass,
- however.
- (AnyBranchScheduler): handle fileIsImportant=None, previously it
- was broken
- * buildbot/test/test_scheduler.py (Scheduling.testAnyBranch2):
- test using AnyBranchScheduler with fileIsImportant=None
-
-2005-11-24 Brian Warner <warner@lothar.com>
-
- * buildbot/test/test_config.py (StartService): don't claim a fixed
- port number, instead set slavePort=0 on the first pass, figure out
- what port was allocated, then switch to a config file that uses
- the allocated port.
-
- * buildbot/master.py (BuildMaster.loadConfig): close the old
- slaveport before opening the new one, because unit tests might
- replace slavePort=0 with the same allocated portnumber, and if we
- don't wait for the old port to close first, we get a "port already
- in use" error. There is a tiny race condition here, but the only
- threat is from other programs that bind (statically) to the same
- port number we happened to be allocated, and only if those
- programs use SO_REUSEADDR, and only if they get control in between
- reactor turns.
-
- * Makefile (TRIALARGS): update to handle Twisted > 2.1.0
-
- * buildbot/master.py (BuildMaster.loadConfig_Sources): remove all
- deleted ChangeSources before adding any new ones
- * buildbot/changes/freshcvs.py (FreshCVSSourceNewcred): fix
- compare_attrs, to make sure that a config-file reload does not
- unnecessarily replace an unmodified ChangeSource instance
- * buildbot/test/test_config.py (ConfigTest.testSources): update
-
- * buildbot/scheduler.py (AnyBranchScheduler): fix branches=[] to
- mean "don't build anything", and add a warning if it gets used
- because it isn't actually useful.
-
- * contrib/svn_buildbot.py: update example usage to match the port
- number that gets used by the PBChangeSource
- * buildbot/scripts/sample.cfg: add example of PBChangeSource
-
-2005-11-22 Brian Warner <warner@lothar.com>
-
- * NEWS: start collecting items for next release
-
- * buildbot/process/step.py (SVN.computeSourceRevision): assume
- revisions are strings
- (P4Sync.computeSourceRevision): same
-
- * buildbot/status/html.py (StatusResourceBuild.body): add a link
- to the Buildbot's overall status page
- (StatusResourceBuilder.body): same
-
-2005-11-15 Brian Warner <warner@lothar.com>
-
- * buildbot/master.py (BuildMaster.loadConfig): serialize the
- config-file loading, specifically to make sure old StatusTargets
- are finished shutting down before new ones start up (thus
- resolving a bug in which changing the Waterfall object would fail
- because both new and old instances were claiming the same
- listening port). Also load new Schedulers after all the new
- Builders are set up, in case they fire off a new build right away.
- * buildbot/test/test_config.py (StartService): test it
-
- * buildbot/status/mail.py (MailNotifier.buildMessage): oops, add
- the branch name to the mail body
-
- * buildbot/changes/pb.py (PBChangeSource.compare_attrs): add this.
- Without it, a config-file reload fails to update an existing
- PBChangeSource.
- * buildbot/changes/freshcvs.py (FreshCVSSourceNewcred): add
- username/passwd to compare_attrs, for the same reason
- * buildbot/status/html.py (Waterfall): add favicon to
- compare_attrs, same reason
-
-2005-11-05 Brian Warner <warner@lothar.com>
-
- * buildbot/scripts/tryclient.py (createJobfile): stringify the
- baserev before stuffing it in the jobfile. This resolves problems
- under SVN (and probably Arch) where revisions are expressed as
- numbers. I'm inclined to use string-based revisions everywhere in
- the future, but this fix should be safe for now. Thanks to Steven
- Walter for the patch.
-
- * buildbot/changes/changes.py (ChangeMaster.saveYourself): use
- binary mode when opening pickle files, to make windows work
- better. Thanks to Dobes Vandermeer for the catch.
- * buildbot/status/builder.py (BuildStatus.saveYourself): same
- (BuilderStatus.getBuildByNumber): same
- (Status.builderAdded): same
- * buildbot/master.py (BuildMaster.loadChanges): same
-
- * buildbot/util.py (Swappable): delete unused leftover code
-
- * buildbot/process/step.py (SVN): when building on a non-default
- branch, add the word "[branch]" to the VC step's description, so
- it is obvious that we're not building the usual stuff. Likewise,
- when we are building a specific revision, add the text "rNNN" to
- indicate what that revision number is. Thanks to Brad Hards and
- Nathaniel Smith for the suggestion.
- (Darcs.startVC): same
- (Arch.startVC): same
- (Bazaar.startVC): same
-
- * buildbot/process/factory.py (GNUAutoconf.__init__): fix a silly
- typo, caught by Mark Dillavou, closes SF#1216636.
-
- * buildbot/test/test_status.py (Log.TODO_testDuplicate): add notes
- about a test to add some day
-
- * docs/examples/twisted_master.cfg: update: bot1 can now handle
- the 'full-2.3' build, and the 'reactors' build is now run under
- python-2.4 because the buildslave no longer has gtk/etc bindings
- for earlier versions.
-
-2005-11-03 Brian Warner <warner@lothar.com>
-
- * buildbot/interfaces.py (IBuilderControl.resubmitBuild): new
- method, takes an IBuildStatus and rebuilds it. It might make more
- sense to add this to IBuildControl instead, but that instance goes
- away completely once the build has finished, and resubmitting
- builds can take place weeks later.
- * buildbot/process/builder.py (BuilderControl.resubmitBuild): same
- * buildbot/status/html.py (StatusResourceBuild): also stash an
- IBuilderControl so we can use resubmitBuild.
- (StatusResourceBuild.body): render "resubmit" button if we can.
- Also add hrefs for each BuildStep
- (StatusResourceBuild.rebuild): add action for "resubmit" button
- (StatusResourceBuilder.getChild): give it an IBuilderControl
-
- * buildbot/status/builder.py (Status.getURLForThing): change the
- URL for BuildSteps to have a "step-" prefix, so the magic URLs
- that live as targets of buttons like "stop" and "rebuild" can't
- collide with them.
- * buildbot/status/builder.py (Status.getURLForThing): same
- * buildbot/status/html.py (StatusResourceBuild.getChild): same
- (StepBox.getBox): same
- * buildbot/test/test_web.py (GetURL): same
- (Logfile): same
-
- * buildbot/process/step.py (SVN.__init__): put svnurl/baseURL
- exclusivity checks after Source.__init__ upcall, so misspelled
- arguments will be reported more usefully
- (Darcs.__init__): same
-
-2005-10-29 Brian Warner <warner@lothar.com>
-
- * docs/examples/twisted_master.cfg: don't double-fire the 'quick'
- builder. Move the Try scheduler off to a separate port.
-
-2005-10-27 Brian Warner <warner@lothar.com>
-
- * buildbot/clients/gtkPanes.py
- (TwoRowClient.remote_builderRemoved): disappearing Builders used
- to cause the app to crash, now they don't.
-
- * buildbot/clients/debug.py: display the buildmaster's location
- in the window's title bar
-
-2005-10-26 Brian Warner <warner@lothar.com>
-
- * buildbot/status/mail.py (MailNotifier): urllib.escape the URLs
- in case they have spaces or whatnot. Patch from Dobes Vandermeer.
- * buildbot/test/test_status.py (MyStatus.getURLForThing): fix it
-
- * buildbot/status/html.py (td): put a single non-breaking space
- inside otherwise empty <td> elements, as a workaround for buggy
- browsers which would optimize them away (along with any associated
- styles, like the kind that create the waterfall grid borders).
- Patch from Frerich Raabe.
-
- * buildbot/process/step_twisted.py (Trial): expose the trialMode=
- argv-list as an argument, defaulting to ["-to"], which is
- appropriate for the Trial that comes with Twisted-2.1.0 and
- earlier. The Trial in current Twisted SVN wants
- ["--reporter=bwverbose"] instead. Also expose trialArgs=, which
- defaults to an empty list.
- * buildbot/process/process_twisted.py (TwistedTrial.trialMode):
- match it, now that trialMode= is a list instead of a single string
-
- * buildbot/__init__.py (version): bump to 0.7.0+ while between
- releases
- * docs/buildbot.texinfo: same
-
-2005-10-24 Brian Warner <warner@lothar.com>
-
- * buildbot/__init__.py (version): Releasing buildbot-0.7.0
- * docs/buildbot.texinfo: set version number to match
-
-2005-10-24 Brian Warner <warner@lothar.com>
-
- * README: update for 0.7.0
- * NEWS: same
- * docs/buildbot.texinfo: move the freshcvs stuff out of the README
-
- * buildbot/clients/debug.glade: add 'branch' box to fake-commit
- * buildbot/clients/debug.py (DebugWidget.do_commit): same. Don't
- send the branch= argument unless the user really provided one, to
- retain compatibility with older buildmasters that don't accept
- that argument.
- * buildbot/master.py (DebugPerspective.perspective_fakeChange):
- same
-
- * docs/buildbot.texinfo: update lots of stuff
-
- * buildbot/scripts/runner.py (sendchange): add a --branch argument
- to the 'buildbot sendchange' command
- * buildbot/clients/sendchange.py (Sender.send): same
- * buildbot/changes/pb.py (ChangePerspective): same
- * buildbot/test/test_changes.py (Sender.testSender): test it
-
- * buildbot/process/step.py (SVN.__init__): change 'base_url' and
- 'default_branch' argument names to 'baseURL' and 'defaultBranch',
- for consistency with other BuildStep arguments that use camelCase.
- Well, at least more of them use camelCase (like flunkOnWarnings)
- than don't.. I wish I'd picked one style and stuck with it
- earlier. Annoying, but it's best done before the release, since
- these arguments didn't exist at all in 0.6.6 .
- (Darcs): same
- * buildbot/test/test_vc.py (SVN.testCheckout): same
- (Darcs.testPatch): same
- * docs/buildbot.texinfo (SVN): document the change
- (Darcs): same, add some build-on-branch docs
- * docs/examples/twisted_master.cfg: match change
-
- * buildbot/process/step.py (BuildStep): rename
- slaveVersionNewEnough to slaveVersionIsOlderThan, because that's
- how it is normally used.
- * buildbot/test/test_steps.py (Version.checkCompare): same
-
- * buildbot/process/step.py (CVS.startVC): refuse to build
- update/copy -style builds on a non-default branch with an old
- buildslave (<=0.6.6) that doesn't know how to do it properly. The
- concern is that it will do a VC 'update' in an existing tree when
- it is supposed to be switching branches (and therefore clobbering
- the tree to do a full checkout), thus building the wrong source.
- This used to be a warning, but I think the confusion it is likely
- to cause warrants making it an error.
- (SVN.startVC): same, also make mode=export on old slaves an error
- (Darcs.startVC): same
- (Git.startVC): improve error message for non-Git-enabled slaves
- (Arch.checkSlaveVersion): same. continue to emit a warning when a
- specific revision is built on a slave that doesn't pay attention
- to args['revision'], because for slowly-changing trees it will
- probably do the right thing, and because we have no way to tell
- whether we're asking it to build the most recent version or not.
- * buildbot/interfaces.py (BuildSlaveTooOldError): new exception
-
- * buildbot/scripts/runner.py (SlaveOptions.postOptions): assert
- that 'master' is in host:portnum format, to catch errors sooner
-
-2005-10-23 Brian Warner <warner@lothar.com>
-
- * buildbot/process/step_twisted.py (ProcessDocs.createSummary):
- when creating the list of warning messages, include the line
- immediately after each WARNING: line, since that's usually where
- the file and line number wind up.
-
- * docs/examples/twisted_master.cfg: OS-X slave now does QT, add a
- TryScheduler
-
- * NEWS: update
-
-2005-10-22 Brian Warner <warner@lothar.com>
-
- * buildbot/status/html.py (HtmlResource): incorporate valid-HTML
- patch from Brad Hards
- * buildbot/status/classic.css: same
- * buildbot/test/test_web.py (Waterfall): match changes
-
- * buildbot/test/test_steps.py (BuildStep.setUp): set
- nextBuildNumber so the test passes
- * buildbot/test/test_status.py (MyBuilder): same
-
- * buildbot/status/html.py (StatusResourceBuild.body): revision
- might be numeric, so stringify it before html-escapifying it
- (CurrentBox.getBox): add a "waiting" state, and show a countdown
- timer for the upcoming build
- * buildbot/status/classic.css: add background-color attributes for
- offline/waiting/building classes
-
- * buildbot/status/builder.py (BuildStatus): derive from
- styles.Versioned, fix upgrade of .sourceStamp attribute. Also set
- the default (i.e. unknown) .slavename to "???" instead of None,
- since even unknown slavenames need to be printed eventually.
- (BuilderStatus): also derive from styles.Versioned . More
- importantly, determine .nextBuildNumber at creation/unpickling
- time by scanning the directory of saved BuildStatus instances and
- choosing one larger than the highest-numbered one found. This
- should fix the problem where random errors during upgrades cause
- the buildbot to forget about earlier builds. .nextBuildNumber is
- no longer stored in the pickle.
- (Status.builderAdded): if we can't unpickle the BuilderStatus,
- at least log the error. Also call Builder.determineNextBuildNumber
- once the basedir is set.
-
- * buildbot/master.py (BuildMaster.loadChanges): do
- styles.doUpgrade afterwards, in case I decide to make Changes
- derived from styles.Versioned some day and forget to make this
- change later.
-
-
- * buildbot/test/test_runner.py (Options.testForceOptions): skip
- when running under older pythons (<2.3) in which the shlex module
- doesn't have a 'split' function.
-
- * buildbot/process/step.py (ShellCommand.start): make
- errorMessages= be a list of strings to stuff in the log before the
- command actually starts. This makes it easier to flag multiple
- warning messages, e.g. when the Source steps have to deal with an
- old buildslave.
- (CVS.startVC): handle slaves that don't handle multiple branches
- by switching into 'clobber' mode
- (SVN.startVC): same. Also reject branches without base_url
- (Darcs.startVC): same. Also reject revision= in older slaves
- (Arch.checkSlaveVersion): same (just the multiple-branches stuff)
- (Bazaar.startVC): same, and test for baz separately than for arch
-
- * buildbot/slave/commands.py (cvs_ver): document new features
-
- * buildbot/process/step.py (BuildStep.slaveVersion): document it
- (BuildStep.slaveVersionNewEnough): more useful utility method
- * buildbot/test/test_steps.py (Version): start testing it
-
- * buildbot/status/words.py (IrcStatusBot.command_FORCE): note that
- the 'force' command requires python2.3, for the shlex.split method
-
- * docs/examples/twisted_master.cfg: remove old freshcvs stuff,
- since we don't use it anymore. The Twisted buildbot uses a
- PBChangeSource now.
-
-2005-10-21 Brian Warner <warner@lothar.com>
-
- * buildbot/process/process_twisted.py: rework all BuildFactory
- classes to take a 'source' step as an argument, instead of
- building up the SVN instance in the factory.
- * docs/examples/twisted_master.cfg: enable build-on-branch by
- providing a base_url and default_branch
-
- * buildbot/status/words.py (IrcStatusBot.command_FORCE): add
- control over --branch and --revision, not that they are always
- legal to provide
- * buildbot/status/html.py (StatusResourceBuilder.force): same
- (StatusResourceBuild.body): display SourceStamp components
-
- * buildbot/scripts/runner.py (ForceOptions): option parser for the
- IRC 'force' command, so it can be shared with an eventual
- command-line-tool 'buildbot force' mode.
- * buildbot/test/test_runner.py (Options.testForceOptions): test it
-
-2005-10-20 Brian Warner <warner@lothar.com>
-
- * buildbot/status/mail.py (MailNotifier.buildMessage): reformat
-
- * docs/examples/twisted_master.cfg: update to use Schedulers
-
- * buildbot/scripts/sample.cfg: update with Schedulers
-
- * buildbot/interfaces.py (IBuilderControl.requestBuildSoon): new
- method specifically for use by HTML "force build" button and the
- IRC "force" command. Raises an immediate error if there are no
- slaves available.
- (IBuilderControl.requestBuild): make this just submit a build, not
- try to check for existing slaves or set up any when-finished
- Deferreds or anything.
- * buildbot/process/builder.py (BuilderControl): same
- * buildbot/status/html.py (StatusResourceBuilder.force): same
- * buildbot/status/words.py (IrcStatusBot.command_FORCE): same
- * buildbot/test/test_slaves.py: same
- * buildbot/test/test_web.py: same
-
-2005-10-19 Brian Warner <warner@lothar.com>
-
- * docs/examples/twisted_master.cfg: re-sync with reality: bring
- back python2.2 tests, turn off OS-X threadedselect-reactor tests
-
-2005-10-18 Brian Warner <warner@lothar.com>
-
- * buildbot/status/html.py: provide 'status' argument to most
- StatusResourceFOO objects
- (StatusResourceBuild.body): href-ify the Builder name, add "Steps
- and Logfiles" section to make the Build page into a more-or-less
- comprehensive source of status information about the build
-
- * buildbot/status/mail.py (MailNotifier): include the Build's URL
- * buildbot/status/words.py (IrcStatusBot.buildFinished): same
-
-2005-10-17 Brian Warner <warner@lothar.com>
-
- * buildbot/process/process_twisted.py (TwistedTrial): update Trial
- arguments to accomodate Twisted >=2.1.0 . I will have to figure
- out what to do about other projects: the correct options for
- recent Twisteds will not work for older ones.
-
-2005-10-15 Brian Warner <warner@lothar.com>
-
- * buildbot/status/builder.py (Status.getURLForThing): add method
- to provide a URL for arbitrary IStatusFoo objects. The idea is to
- use this in email/IRC status clients to make them more useful, by
- providing the end user with hints on where to learn more about the
- object being reported on.
- * buildbot/test/test_web.py (GetURL): tests for it
-
-2005-10-14 Brian Warner <warner@lothar.com>
-
- * buildbot/test/test_config.py (ConfigTest._testSources_1): oops,
- fix bug resulting from deferredResult changes
-
-2005-10-13 Brian Warner <warner@lothar.com>
-
- * buildbot/test/test_changes.py: remove use of deferredResult
- * buildbot/test/test_config.py: same
- * buildbot/test/test_control.py: same
- * buildbot/test/test_status.py: same
- * buildbot/test/test_vc.py: this is the only remaining use, since
- it gets used at module level. This needs to be replaced by some
- sort of class-level run-once routine.
-
- * buildbot/status/words.py (IrcStatusBot.command_WATCH): fix typo
-
- * lots: implement multiple slaves per Builder, which means multiple
- current builds per Builder. Some highlights:
- * buildbot/interfaces.py (IBuilderStatus.getState): return a tuple
- of (state,currentBuilds) instead of (state,currentBuild)
- (IBuilderStatus.getCurrentBuilds): replace getCurrentBuild()
- (IBuildStatus.getSlavename): new method, so you can tell which
- slave got used. This only gets set when the build completes.
- (IBuildRequestStatus.getBuilds): new method
-
- * buildbot/process/builder.py (SlaveBuilder): add a .state
- attribute to track things like ATTACHING and IDLE and BUILDING,
- instead of..
- (Builder): .. the .slaves attribute here, which has been turned
- into a simple list of available slaves. Added a separate
- attaching_slaves list to track ones that are not yet ready for
- builds.
- (Builder.fireTestEvent): put off the test-event callback for a
- reactor turn, to make tests a bit more consistent.
- (Ping): cleaned up the slaveping a bit, now it disconnects if the
- ping fails due to an exception. This needs work, I'm worried that
- a code error could lead to a constantly re-connecting slave.
- Especially since I'm trying to move to a distinct remote_ping
- method, separate from the remote_print that we currently use.
- (BuilderControl.requestBuild): return a convenience Deferred that
- provides an IBuildStatus when the build finishes.
- (BuilderControl.ping): ping all connected slaves, only return True
- if they all respond.
-
- * buildbot/slave/bot.py (BuildSlave.stopService): stop trying to
- reconnect when we shut down.
-
- * buildbot/status/builder.py: implement new methods, convert
- one-build-at-a-time methods to handle multiple builds
- * buildbot/status/*.py: do the same in all default status targets
- * buildbot/status/html.py: report the build's slavename in the
- per-Build page, report all buildslaves on the per-Builder page
-
- * buildbot/test/test_run.py: update/create tests
- * buildbot/test/test_slaves.py: same
- * buildbot/test/test_scheduler.py: remove stale test
-
- * docs/buildbot.texinfo: document the new builder-specification
- 'slavenames' parameter
-
-2005-10-12 Brian Warner <warner@lothar.com>
-
- * buildbot/buildset.py (BuildSet): fix bug where BuildSet did not
- report failure correctly, causing Dependent builds to run when
- they shouldn't have.
- * buildbot/status/builder.py (BuildSetStatus): same
- * buildbot/test/test_buildreq.py (Set.testBuildSet): verify it
- (Set.testSuccess): test the both-pass case too
- * buildbot/test/test_dependencies.py (Dependencies.testRun_Fail):
- fix this test: it was ending too early, masking the failure before
- (Logger): specialized StatusReceiver to make sure the dependent
- builds aren't even started, much less completed.
-
-2005-10-07 Brian Warner <warner@lothar.com>
-
- * buildbot/slave/bot.py (SlaveBuilder.activity): survive
- bot.SlaveBuilder being disowned in the middle of a build
-
- * buildbot/status/base.py (StatusReceiverMultiService): oops, make
- this inherit from StatusReceiver. Also upcall in __init__. This
- fixes the embarrasing crash when the new buildSetSubmitted method
- is invoked and Waterfall/etc don't implement their own.
- * buildbot/test/test_run.py: add a TODO note about a test to catch
- just this sort of thing.
-
- * buildbot/process/builder.py (Builder.attached): remove the
- already-attached warning, this situation is normal. Add some
- comments explaining it.
-
-2005-10-02 Brian Warner <warner@lothar.com>
-
- * buildbot/changes/maildir.py (Maildir.start): Tolerate
- OverflowError when setting up dnotify, because some 64-bit systems
- have problems with signed-vs-unsigned constants and trip up on the
- DN_MULTISHOT flag. Patch from Brad Hards.
-
-2005-09-06 Fred Drake <fdrake@users.sourceforge.net>
-
- * buildbot/process/step.py (BuildStep, ShellCommand): Add
- progressMetrics, description, descriptionDone to the 'parms' list,
- and make use the 'parms' list from the implementation class
- instead of only BuildStep to initialize the parameters. This
- allows buildbot.process.factory.s() to initialize all the parms,
- not just those defined in directly by BuildStep.
-
-2005-09-03 Brian Warner <warner@lothar.com>
-
- * NEWS: start adding items for the next release
-
- * docs/examples/twisted_master.cfg: (sync with reality) turn off
- python2.2 tests, change 'Quick' builder to only use python2.3
-
-2005-09-02 Fred Drake <fdrake@users.sourceforge.net>
-
- * buildbot/status/html.py (StatusResourceBuilder.body): only show
- the "Ping Builder" button if the build control is available; the
- user sees an exception otherwise
-
- * docs/buildbot.texinfo (PBChangeSource): fix a typo
-
-2005-09-01 Brian Warner <warner@lothar.com>
-
- * buildbot/interfaces.py (IBuilderStatus.getState): update
- signature, point out that 'build' can be None
- (IBuildStatus.getETA): point out ETA can be none
-
- * buildbot/status/html.py (CurrentBox.getBox): tolerate build/ETA
- being None
- * buildbot/status/words.py (IrcStatusBot.emit_status): same
-
-2005-08-31 Brian Warner <warner@lothar.com>
-
- * buildbot/status/base.py (StatusReceiver.builderChangedState):
- update to match correct signature: removed 'eta' argument
- * buildbot/status/mail.py (MailNotifier.builderChangedState): same
-
-2005-08-30 Brian Warner <warner@lothar.com>
-
- * buildbot/status/builder.py (LogFile): remove the assertion that
- blows up when you try to overwrite an existing logfile, instead
- just emit a warning. This case gets hit when the buildmaster is
- killed and doesn't get a chance to write out the serialized
- BuilderStatus object, so the .nextBuildNumber attribute gets out
- of date.
-
- * buildbot/scripts/runner.py (sendchange): add --revision_file to
- the 'buildbot sendchange' arguments, for the Darcs context file
- * docs/buildbot.texinfo (sendchange): document it
-
- * buildbot/status/html.py: add pending/upcoming builds to CurrentBox
- * buildbot/interfaces.py (IScheduler.getPendingBuildTimes): new method
- (IStatus.getSchedulers): new method
- * buildbot/status/builder.py (BuilderStatus): track pendingBuilds
- (Status.getSchedulers): implement
- * buildbot/process/builder.py (Builder): maintain
- BuilderStatus.pendingBuilds
- * buildbot/scheduler.py (Scheduler.getPendingBuildTimes): new method
- (TryBase.addChange): Try schedulers should ignore Changes
-
- * buildbot/scripts/tryclient.py (getTopdir): implement getTopdir
- for 'try' on CVS/SVN
- * buildbot/test/test_runner.py (Try.testGetTopdir): test case
-
- * buildbot/scripts/tryclient.py (Try): make jobdir-style 'try'
- report status properly.
- (Try.createJob): implement unique buildset IDs
-
- * buildbot/status/client.py (StatusClientPerspective): add a
- perspective_getBuildSets method for the benefit of jobdir-style
- 'try'.
- * docs/buildbot.texinfo (try): more docs
- * buildbot/test/test_scheduler.py (Scheduling.testGetBuildSets):
- new test case
-
-2005-08-18 Brian Warner <warner@lothar.com>
-
- * buildbot/scripts/tryclient.py (Try): make 'try' status reporting
- actually work. It's functional but still kind of clunky. Also, it
- only works with the pb-style.. needs to be made to work with the
- jobdir-style too.
-
- * buildbot/status/client.py (RemoteBuildSet): new class
- (RemoteBuildRequest): same
- (RemoteBuild.remote_waitUntilFinished): return the RemoteBuild
- object, not the internal BuildStatus object.
- (RemoteBuild.remote_subscribe): new method to subscribe to builds
- outside of the usual buildStarted() return value.
- (BuildSubscriber): support class for RemoteBuild.remote_subscribe
-
- * buildbot/scheduler.py (Try_Jobdir): convey buildsetID properly
- (Try_Userpass_Perspective.perspective_try): return a remotely
- usable BuildSetStatus object
-
- * buildbot/interfaces.py (IBuildStatus): remove obsolete
- isStarted()/waitUntilStarted()
-
-2005-08-16 Brian Warner <warner@lothar.com>
-
- * buildbot/status/builder.py: implement IBuildSetStatus and
- IBuildRequestStatus, wire them into place.
- * buildbot/buildset.py: same. Add ID, move wait-until-finished
- methods into the BuildSetStatus object.
- * buildbot/interfaces.py: same
- (IStatus.getBuildSets): new method to get pending BuildSets
- (IStatusReceiver.buildsetSubmitted): new method which hears about
- new BuildSets
- * buildbot/master.py (BuildMaster.submitBuildSet): same
- * buildbot/process/base.py (BuildRequest): same, replace
- waitUntilStarted with subscribe/unsubscribe
- * buildbot/process/builder.py (BuilderControl.forceBuild): use
- subscribe instead of waitUntilStarted
- * buildbot/status/base.py (StatusReceiver.buildsetSubmitted): stub
- for new method
- * buildbot/status/client.py (StatusClientPerspective.builderRemoved):
- same
- * buildbot/test/test_buildreq.py: update for new code
- * buildbot/test/test_control.py (Force.testRequest): same
-
-
- * buildbot/slave/commands.py (Darcs.doVCFull): fix get-revision
- for Darcs to not use the tempfile module, so it works under
- python-2.2 too. We really didn't need the full cleverness of that
- module, since the slave has exclusive control of its own builddir.
-
-2005-08-15 Brian Warner <warner@lothar.com>
-
- * buildbot/scripts/tryclient.py (CVSExtractor): implement 'try'
- for CVS trees. It doesn't work for non-trunk branches,
- unfortunately.
- * buildbot/test/test_vc.py (CVS.testTry): test it, but skip the
- branch test
-
- * Makefile: make it easier to test against python2.2
-
- * buildbot/test/test_vc.py (VCBase.tearDown): provide for
- tearDown2, so things like Arch can unregister archives as they're
- shutting down. The previous subclass-override-tearDown technique
- resulted in a nested maybeWait() and test failures under
- Twisted-1.3.0
-
- * buildbot/scripts/tryclient.py (getSourceStamp): extract branches
- where we can (Arch), add a branch= argument to set the branch used
- when we can't
- (BazExtractor): extract the branch too
- (TlaExtractor): same
- * buildbot/scripts/runner.py (TryOptions): add --branch
- * docs/buildbot.texinfo (try): document --branch/try_branch
-
- * buildbot/slave/commands.py (Darcs): implement get-revision for
- Darcs, so that 'try' will work. This requires the tempfile module
- from python-2.3 .
-
- * buildbot/test/test_vc.py: rewrite tests, getting better coverage
- of revisions, branches, and 'try' in the process.
-
-2005-08-11 Brian Warner <warner@lothar.com>
-
- * buildbot/master.py (DebugPerspective.perspective_pokeIRC): fix
- this, it got broken at some point in the last few releases
- * buildbot/status/words.py (IrcBuildRequest): reply was broken
- (IrcStatusBot.emit_status): handle new IBuilderStatus.getState,
- specifically the removal of ETA information from the tuple
-
- * buildbot/locks.py: use %d for id() instead of %x, avoid a silly
- warning message
-
- * docs/buildbot.texinfo (try): document both --builder and
- 'try_builders' in .buildbot/options
- * buildbot/scripts/runner.py (TryOptions): add --builder,
- accumulate the values into opts['builders']
- * buildbot/scripts/tryclient.py (Try.__init__): set builders
- * buildbot/test/test_runner.py (Try): add some quick tests to make
- sure 'buildbot try --options' and .buildbot/options get parsed
- * buildbot/test/test_scheduler.py (Scheduling.testTryUserpass):
- use --builder control
-
- * docs/buildbot.texinfo (try): add --port argument to PB style
-
- * buildbot/scripts/tryclient.py (SourceStampExtractor): return an
- actual SourceStamp. Still need to extract a branch name, somehow.
- (Try): finish implementing the try client side, still need a UI
- for specifying which builders to use
- (Try.getopt): factor our options/config-file reading
- * buildbot/test/test_scheduler.py (Scheduling.testTryUserpass):
- test it
- * buildbot/test/test_vc.py: match SourceStampExtractor change
-
- * buildbot/scripts/runner.py (Options.opt_verbose): --verbose
- causes the twisted log to be sent to stderr
-
- * buildbot/scheduler.py (Try_Userpass): implement the PB style
-
-2005-08-10 Brian Warner <warner@lothar.com>
-
- * buildbot/scripts/runner.py: Add 'buildbot try' command, jobdir
- style is 90% done, still missing status reporting or waiting for
- the buildsets to finish, and it is completely untested.
-
- * buildbot/trybuild.py: delete file, move contents to ..
- * buildbot/scripts/tryclient.py (getSourceStamp): .. here
- * buildbot/test/test_vc.py: match the move
-
- * buildbot/scheduler.py (Try_Jobdir): implement the jobdir style
- of the TryScheduler, no buildsetID or status-tracking support yet
- * buildbot/test/test_scheduler.py (Scheduling.testTryJobdir): test it
-
- * buildbot/changes/maildir.py (Maildir.setBasedir): make it
- possible to set the basedir after __init__ time, so it is easier
- to use as a Service-child of the BuildMaster instance
-
- * buildbot/changes/maildirtwisted.py (MaildirService): make a form
- that delivers messages to its Service parent instead of requiring
- a subclass to be useful. This turns out to be much easier to build
- unit tests around.
-
- * buildbot/scripts/tryclient.py (createJob): utility code to
- create jobfiles, will eventually be used by 'buildbot try'
-
-2005-08-08 Brian Warner <warner@lothar.com>
-
- * docs/buildbot.texinfo (try): add docs on the
- as-yet-unimplemented Try scheduler
-
- * buildbot/test/test_buildreq.py: move Scheduling tests out to ..
- * buildbot/test/test_scheduler.py: .. here
- (Scheduling.testTryJobdir): add placeholder test for 'try'
-
- * buildbot/test/test_status.py (Log.testMerge3): update to match new
- addEntry merging (>=chunkSize) behavior
- (Log.testConsumer): update to handle new callLater(0) behavior
-
- * buildbot/test/test_web.py: rearrange tests a bit, add test for
- both the MAX_LENGTH bugfix and the resumeProducing hang.
-
- * buildbot/status/builder.py (LogFileProducer.resumeProducing):
- put off the actual resumeProducing for a moment with
- reactor.callLater(0). This works around a twisted-1.3.0 bug which
- causes large logfiles to hang midway through.
-
- * buildbot/process/step.py (BuildStep.addCompleteLog): break the
- logfile up into chunks, both to avoid NetstringReceiver.MAX_LENGTH
- and to improve memory usage when streaming the file out to a web
- browser.
- * buildbot/status/builder.py (LogFile.addEntry): change > to >= to
- make this work cleanly
-
-2005-08-03 Brian Warner <warner@lothar.com>
-
- * buildbot/trybuild.py: new file for 'try' utilities
- (getSourceStamp): run in a tree, find out the baserev+patch
- * buildbot/test/test_vc.py (VCBase.do_getpatch): test it,
- implemented for SVN and Darcs, still working on Arch. I don't know
- how to make CVS work yet.
-
- * docs/buildbot.texinfo: document the 'buildbot' command-line
- tool, including the not-yet-implemented 'try' feature, and the
- in-flux .buildbot/ options directory.
-
-2005-07-20 Brian Warner <warner@lothar.com>
-
- * buildbot/locks.py: added temporary id() numbers to Lock
- descriptions, to track down a not-really-sharing-the-Lock bug
-
- * buildbot/test/runutils.py: must import errno, cut-and-paste bug
-
- * buildbot/test/test_slavecommand.py (ShellBase.failUnlessIn):
- needed for python2.2 compatibility
- * buildbot/test/test_vc.py: python2.2 compatibility: generators
- are from the __future__
-
-2005-07-19 Brian Warner <warner@lothar.com>
-
- * buildbot/master.py (BuildMaster.loadConfig): give a better error
- message when schedulers use unknown builders
-
- * buildbot/process/builder.py (Builder.compareToSetup): make sure
- SlaveLock('name') and MasterLock('name') are distinct
-
- * buildbot/master.py (BuildMaster.loadConfig): oops, sanity-check
- c['schedulers'] in such a way that we can actually accept
- Dependent instances
- * buildbot/test/test_config.py: check it
-
- * buildbot/scheduler.py (Dependent.listBuilderNames): oops, add
- utility method to *all* the Schedulers
- (Periodic.listBuilderNames): same
-
- * docs/buildbot.texinfo (Interlocks): update chapter to match
- reality
-
- * buildbot/master.py (BuildMaster.loadConfig): Add sanity checks
- to make sure that c['sources'], c['schedulers'], and c['status']
- are all lists of the appropriate objects, and that the Schedulers
- all point to real Builders
- * buildbot/interfaces.py (IScheduler, IUpstreamScheduler): add
- 'listBuilderNames' utility method to support this
- * buildbot/scheduler.py: implement the utility method
- * buildbot/test/test_config.py (ConfigTest.testSchedulers): test it
-
- * docs/buildbot.texinfo: add some @cindex entries
-
- * buildbot/test/test_vc.py (Arch.createRepository): set the tla ID
- if it wasn't already set: most tla commands will fail unless one
- has been set.
- (Arch.createRepository): and disable bazaar's revision cache, since
- they cause test failures (the multiple repositories we create all
- interfere with each other through the cache)
-
- * buildbot/test/test_web.py (WebTest): remove use of deferredResult,
- bring it properly up to date with twisted-2.0 test guidelines
-
- * buildbot/master.py (BuildMaster): remove references to old
- 'interlock' module, this caused a bunch of post-merge test
- failures
- * buildbot/test/test_config.py: same
- * buildbot/process/base.py (Build): same
-
- * buildbot/test/test_slaves.py: stubs for new test case
-
- * buildbot/scheduler.py: add test-case-name tag
- * buildbot/test/test_buildreq.py: same
-
- * buildbot/slave/bot.py (SlaveBuilder.__init__): remove some
- unnecessary init code
- (Bot.remote_setBuilderList): match it
-
- * docs/buildbot.texinfo (@settitle): don't claim version 1.0
-
- * buildbot/changes/mail.py (parseSyncmail): update comment
-
- * buildbot/test/test_slavecommand.py: disable Shell tests on
- platforms that don't suport IReactorProcess
-
- * buildbot/status/builder.py (LogFile): remove the 't' mode from
- all places where we open logfiles. It causes OS-X to open the file
- in some weird mode that that prevents us from mixing reads and
- writes to the same filehandle, which we depend upon to implement
- _generateChunks properly. This change doesn't appear to break
- win32, on which "b" and "t" are treated differently but a missing
- flag seems to be interpreted as "t".
-
-2005-07-18 Brian Warner <warner@lothar.com>
-
- * buildbot/slave/commands.py (ShellCommand): overhaul
- error-handling code, to try and make timeout/interrupt work
- properly, and make win32 happier
- * buildbot/test/test_slavecommand.py: clean up, stop using
- reactor.iterate, add tests for timeout and interrupt
- * buildbot/test/sleep.py: utility for a new timeout test
-
- * buildbot/twcompat.py: copy over twisted 1.3/2.0 compatibility
- code from the local-usebranches branch
-
-2005-07-17 Brian Warner <warner@lothar.com>
-
- * buildbot/process/process_twisted.py
- (TwistedReactorsBuildFactory): change the treeStableTimer to 5
- minutes, to match the other twisted BuildFactories, and don't
- excuse failures in c/qt/win32 reactors any more.
-
- * docs/examples/twisted_master.cfg: turn off the 'threadless' and
- 'freebsd' builders, since the buildslaves have been unavailable
- for quite a while
-
-2005-07-13 Brian Warner <warner@lothar.com>
-
- * buildbot/test/test_vc.py (VCBase.do_branch): test the new
- build-on-branch feature
-
- * buildbot/process/step.py (Darcs.__init__): add base_url and
- default_branch arguments, just like SVN
- (Arch.__init__): note that the version= argument is really the
- default branch name
-
- * buildbot/slave/commands.py (SourceBase): keep track of the
- repository+branch that was used for the last checkout in
- SRCDIR/.buildbot-sourcedata . If the contents of this file do not
- match, we clobber the directory and perform a fresh checkout
- rather than trying to do an in-place update. This should protect
- us against trying to get to branch B by doing an update in a tree
- obtained from branch A.
- (CVS.setup): add CVS-specific sourcedata: root, module, and branch
- (SVN.setup): same, just the svnurl
- (Darcs.setup): same, just the repourl
- (Arch.setup): same, arch coordinates (url), version, and
- buildconfig. Also pull the buildconfig from the args dictionary,
- which we weren't doing before, so the build-config was effectively
- disabled.
- (Arch.sourcedirIsUpdateable): don't try to update when we're
- moving to a specific revision: arch can't go backwards, so it is
- safer to just clobber the tree and checkout a new one at the
- desired revision.
- (Bazaar.setup): same sourcedata as Arch
-
- * buildbot/test/test_dependencies.py (Dependencies.testRun_Fail):
- use maybeWait, to work with twisted-1.3.0 and twcompat
- (Dependencies.testRun_Pass): same
-
- * buildbot/test/test_vc.py: rearrange, cleanup
-
- * buildbot/twcompat.py: add defer.waitForDeferred and
- utils.getProcessOutputAndValue, so test_vc.py (which uses them)
- can work under twisted-1.3.0 .
-
- * buildbot/test/test_vc.py: rewrite. The sample repositories are
- now created at setUp time. This increases the runtime of the test
- suite considerably (from 91 seconds to 151), but it removes the
- need for an offline tarball, which should solve a problem I've
- seen where the test host has a different version of svn than the
- tarball build host. The new code also validates that mode=update
- really picks up recent commits. This approach will also make it
- easier to test out branches, because the code which creates the VC
- branches is next to the code which uses them. It will also make it
- possible to test some change-notification hooks, by actually
- performing a VC commit and watching to see the ChangeSource get
- notified.
-
-2005-07-12 Brian Warner <warner@lothar.com>
-
- * docs/buildbot.texinfo (SVN): add branches example
- * docs/Makefile (buildbot.ps): add target for postscript manual
-
- * buildbot/test/test_dependencies.py: s/test_interlocks/test_locks/
- * buildbot/test/test_locks.py: same
-
- * buildbot/process/step.py (Darcs): comment about default branches
-
- * buildbot/master.py (BuildMaster.loadConfig): don't look for
- c['interlocks'] in the config file, complain if it is present.
- Scan all locks in c['builders'] to make sure the Locks they use
- are uniquely named.
- * buildbot/test/test_config.py: remove old c['interlocks'] test,
- add some tests to check for non-uniquely-named Locks
- * buildbot/test/test_vc.py (Patch.doPatch): fix factory.steps,
- since the unique-Lock validation code requires it now
-
- * buildbot/locks.py: fix test-case-name
-
- * buildbot/interlock.py: remove old file
-
-2005-07-11 Brian Warner <warner@lothar.com>
-
- * buildbot/test/test_interlock.py: rename to..
- * buildbot/test/test_locks.py: .. something shorter
-
- * buildbot/slave/bot.py (BuildSlave.stopService): newer Twisted
- versions (after 2.0.1) changed internet.TCPClient to shut down the
- connection in stopService. Change the code to handle this
- gracefully.
-
- * buildbot/process/base.py (Build): handle whole-Build locks
- * buildbot/process/builder.py (Builder.compareToSetup): same
- * buildbot/test/test_interlock.py: make tests work
-
- * buildbot/process/step.py (BuildStep.startStep): complain if a
- Step tries to claim a lock that's owned by its own Build
- (BuildStep.releaseLocks): typo
-
- * buildbot/locks.py (MasterLock): use ComparableMixin so config
- file reloads don't replace unchanged Builders
- (SlaveLock): same
- * buildbot/test/test_config.py (ConfigTest.testInterlocks):
- rewrite to cover new Locks instead of old c['interlocks']
- * buildbot/test/runutils.py (RunMixin.connectSlaves): remember
- slave2 too
-
-
- * buildbot/test/test_dependencies.py (Dependencies.setUp): always
- start the master and connect the buildslave
-
- * buildbot/process/step.py (FailingDummy.done): finish with a
- FAILURE status rather than raising an exception
-
- * buildbot/process/base.py (BuildRequest.mergeReasons): don't try to
- stringify a BuildRequest.reason that is None
-
- * buildbot/scheduler.py (BaseUpstreamScheduler.buildSetFinished):
- minor fix
- * buildbot/status/builder.py (BuildSetStatus): implement enough to
- allow scheduler.Dependent to work
- * buildbot/buildset.py (BuildSet): set .reason and .results
-
- * buildbot/test/test_interlock.py (Locks.setUp): connect both
- slaves, to make the test stop hanging. It still fails, of course,
- because I haven't even started to implement Locks.
-
- * buildbot/test/runutils.py (RunMixin.connectSlaves): new utility
-
- * docs/buildbot.texinfo (Build-Dependencies): redesign the feature
- * buildbot/interfaces.py (IUpstreamScheduler): new Interface
- * buildbot/scheduler.py (BaseScheduler): factor out common stuff
- (Dependent): new class for downstream build dependencies
- * buildbot/test/test_dependencies.py: tests (still failing)
-
- * buildbot/buildset.py (BuildSet.waitUntilSuccess): minor notes
-
-2005-07-07 Brian Warner <warner@lothar.com>
-
- * buildbot/test/runutils.py (RunMixin): factored this class out..
- * buildbot/test/test_run.py: .. from here
- * buildbot/test/test_interlock.py: removed old c['interlock'] tests,
- added new buildbot.locks tests (which all hang right now)
- * buildbot/locks.py (SlaveLock, MasterLock): implement Locks
- * buildbot/process/step.py: claim/release per-BuildStep locks
-
- * docs/Makefile: add 'buildbot.html' target
-
- * buildbot/process/step.py (CVS.__init__): allow branch=None to be
- interpreted as "HEAD", so that all VC steps can accept branch=None
- and have it mean the "default branch".
-
- * docs/buildbot.texinfo: add Schedulers, Dependencies, and Locks
-
-2005-07-07 Brian Warner <warner@lothar.com>
-
- * docs/examples/twisted_master.cfg: update to match current usage
-
- * docs/buildbot.texinfo (System Architecture): comment out the
- image, it doesn't exist yet and just screws up the HTML manual.
-
-2005-07-05 Brian Warner <warner@lothar.com>
-
- * debian/.cvsignore: oops, missed one. Removing leftover file.
-
-2005-06-17 Brian Warner <warner@lothar.com>
-
- * buildbot/test/test_vc.py (VCSupport.__init__): svn --version
- changed its output in 1.2.0, don't mistakenly think that the
- subversion we find isn't capable of supporting our tests.
-
- * debian/*: remove the debian/ directory and its contents, to make
- life easier for the proper Debian maintainer
- * MANIFEST.in: same
- * Makefile (release): same
-
-2005-06-07 Brian Warner <warner@lothar.com>
-
- * everything: create a distinct SourceStamp class to replace the
- ungainly 4-tuple, let it handle merging instead of BuildRequest.
- Changed the signature of Source.startVC to include the revision
- information (instead of passing it through self.args). Implement
- branches for SVN (now only Darcs/Git is missing support). Add more
- Scheduler tests.
-
-2005-06-06 Brian Warner <warner@lothar.com>
-
- * everything: rearrange build scheduling. Create a new Scheduler
- object (configured in c['schedulers'], which submit BuildSets to a
- set of Builders. Builders can now use multiple slaves. Builds can
- be run on alternate branches, either requested manually or driven
- by changes. This changed some of the Status classes. Interlocks
- are out of service until they've been properly split into Locks
- and Dependencies. treeStableTimer, isFileImportant, and
- periodicBuild have all been moved from the Builder to the
- Scheduler.
- (BuilderStatus.currentBigState): removed the 'waiting' and
- 'interlocked' states, removed the 'ETA' argument.
-
-2005-05-24 Brian Warner <warner@lothar.com>
-
- * buildbot/pbutil.py (ReconnectingPBClientFactory): Twisted-1.3
- erroneously abandons the connection (in clientConnectionFailed)
- for non-UserErrors, which means that if we lose the connection due
- to a network problem or a timeout, we'll never try to reconnect.
- Fix this by not upcalling to the buggy parent method. Note:
- twisted-2.0 fixes this, but the function only has 3 lines so it
- makes more sense to copy it than to try and detect the buggyness
- of the parent class. Fixes SF#1207588.
-
- * buildbot/changes/changes.py (Change.branch): doh! Add a
- class-level attribute to accomodate old Change instances that were
- pickled before 0.6.5 (where .branch was added for new Changes).
- This fixes the exception that occurs when you try to look at an
- old Change (through asHTML).
-
- * buildbot/__init__.py (version): bump to 0.6.6+ while between
- releases
-
-2005-05-23 Brian Warner <warner@lothar.com>
-
- * buildbot/__init__.py (version): release 0.6.6
-
-2005-05-23 Brian Warner <warner@lothar.com>
-
- * NEWS: update for 0.6.6 release
- * debian/changelog: same
-
- * buildbot/scripts/runner.py (start): put the basedir in sys.path
- before starting: this was done by twistd back when we spawned it,
- now that we're importing the pieces and running them in the
- current process, we have to do it ourselves. This allows
- master.cfg to import files from the same directory without
- explicitly manipulating PYTHONPATH. Thanks to Thomas Vander
- Stichele for the catch.
- (Options.opt_version): Add a --version command (actually, just make
- the existing --version command emit Buildbot's version too)
-
- * buildbot/status/builder.py (HTMLLogFile.upgrade): oops! second
- fix to make this behave like other LogFiles, this time to handle
- existing LogFiles on disk. (add the missing .upgrade method)
- * buildbot/test/test_status.py (Log.testHTMLUpgrade): test it
-
-2005-05-21 Brian Warner <warner@lothar.com>
-
- * buildbot/test/test_runner.py (Create.testMaster): match the
- rawstring change in runner.py:masterTAC
-
- * buildbot/test/test_config.py (ConfigTest.testIRC): skip unless
- TwistedWords is installed
- * buildbot/test/test_status.py: same, with TwistedMail
-
- * buildbot/master.py: remove old IRC/Waterfall imports (used by
- some old, deprecated, and removed config keys). This should enable
- you to use the base buildbot functionality with Twisted-2.0.0 when
- you don't also have TwistedWeb and TwistedWords installed
-
-2005-05-20 Brian Warner <warner@lothar.com>
-
- * buildbot/scripts/runner.py (run): call sendchange(), not
- do_sendchange(): thus 'buildbot sendchange' was broken in 0.6.5
- (run): call stop("HUP"), not "-HUP", 'buildbot stop' was broken.
- (stop): don't wait for process to die when sending SIGHUP
- (masterTAC): use a rawstring for basedir=, otherwise '\' in the
- directory name gets interpreted, which you don't want
- (slaveTAC): same
-
- * buildbot/__init__.py (version): bump to 0.6.5+ while between
- releases
-
-2005-05-18 Brian Warner <warner@lothar.com>
-
- * buildbot/__init__.py (version): Releasing buildbot-0.6.5
-
-2005-05-18 Brian Warner <warner@lothar.com>
-
- * README: update for 0.6.5
- * debian/changelog: same
-
- * buildbot/changes/changes.py: rename tag= to branch=, since
- that's how we're using it, and my design for the upcoming "build a
- specific branch" feature wants it. also, tag= was too CVS-centric
- * buildbot/changes/mail.py (parseSyncmail): same
- * buildbot/process/base.py (Build.isBranchImportant): same
- * buildbot/test/test_mailparse.py (Test3.testMsgS4): same
- * docs/buildbot.texinfo (Attributes of Changes): same
-
- * NEWS: update tag=, update for upcoming release
-
-2005-05-17 Brian Warner <warner@lothar.com>
-
- * buildbot/scripts/runner.py (stop): actually poll once per
- second, instead of re-killing the poor daemon once per second.
- Sleep briefly (0.1s) before the first poll, since there's a good
- chance we can avoid waiting the full second if the daemon shuts
- down quickly. Also remove the sys.exit() at the end.
- (start): remove the unneighborly sys.exit()
-
- * Makefile: improve permission-setting to not kick Arch so badly
-
- * buildbot/scripts/runner.py (SlaveOptions.optParameters): set a
- default --keepalive=600, since it doesn't hurt very much, and it's
- a hassle to discover that you need it.
- * buildbot/test/test_runner.py (Create.testSlave): test it
-
- * buildbot/status/words.py (IrcStatusBot.buildFinished): Teach the
- IRC bot about EXCEPTION
-
- * buildbot/status/client.py (PBListener): upcall more correctly
-
- * buildbot/process/base.py (Build.allStepsDone): if a step caused
- an exception mark the overall build with EXCEPTION, not SUCCESS
-
- * buildbot/scripts/runner.py (makefile_sample): remove the leading
- newline
- * buildbot/status/mail.py (MailNotifier): oops, forgot to upcall
- * Makefile: update some release-related stuff
-
- * buildbot/slave/commands.py (ShellCommand.kill): if somehow this
- gets called when there isn't actually an active process, just end
- the Command instead of blowing up. I don't know how it gets into
- this state, but the twisted win32 buildslave will sometimes hang,
- and when it shakes its head and comes back, it thinks it's still
- running a Command. The next build causes this command to be
- interrupted, but the lack of self.process.pid breaks the interrupt
- attempt.
-
- * NEWS: document changes since the last release
-
- * buildbot/scripts/runner.py (start): change 'buildbot start' to
- look for Makefile.buildbot instead of a bare Makefile . The
- 'buildbot start' does not install this file, so you have to
- manually copy it if you want to customize startup behavior.
- (createMaster): change 'buildbot master' command to create
- Makefile.sample instead of Makefile, to create master.cfg.sample
- instead of master.cfg (requiring you to copy it before the
- buildmaster can be started). Both sample files are kept up to
- date, i.e. they are overwritten if they have been changed. The
- 'buildbot.tac' file is *not* overwritten, but if the new contents
- don't match the old, a 'buildbot.tac.new' file is created and the
- user is warned. This seems to be a much more sane way to handle
- startup files. Also, don't sys.exit(0) when done, so we can run
- unit tests against it.
- (createSlave): same. Don't overwrite the sample info/ files.
- * buildbot/scripts/sample.mk: remove. the contents were pulled
- into runner.py, since they need to match the behavior of start()
- * setup.py: same
- * MANIFEST.in: same
-
- * docs/buildbot.texinfo (Launching the daemons): document it
- * buildbot/test/test_runner.py (Create): test it
-
- * buildbot/test/test_vc.py (SetupMixin.failUnlessIn): Add a
- version that can handle string-in-string tests, because otherwise
- python-2.2 fails the tests. It'd be tremendous if Trial's test
- took two strings under 2.2 too.
-
- * everything: fixed all deprecation warnings when running against
- Twisted-2.0 . (at least all the ones in buildbot code, there are a
- few that come from Twisted itself). This involved putting most of
- the Twisted-version specific code in the new buildbot.twcompat
- module, and creating some abstract base classes in
- buildbot.changes.base and buildbot.status.base (which might be
- useful anyway). __implements__ is a nuisance and requires an ugly
- 'if' clause everywhere.
-
- * buildbot/test/test_status.py (Mail.testMail): add a 0.1 second
- delay before finishing the test: it seems that smtp.sendmail
- doesn't hang up on the server, so we must wait a moment so it can
- hang up on us. This removes the trial warning about an unclean
- reactor.
-
-2005-05-16 Brian Warner <warner@lothar.com>
-
- * buildbot/process/step.py (Source): add 'retry' argument. It is a
- tuple of (delay, repeats).
- * buildbot/test/test_vc.py (Retry): test it
- * docs/buildbot.texinfo (Source Checkout): document it
- * buildbot/slave/commands.py (SourceBase): add 'retry' parameter.
- (SourceBase.maybeDoVCRetry): If 'retry' is set, failures in
- doVCFull() are handled by re-trying the checkout (after a delay)
- some number of times.
- (ShellCommand._startCommand): make header lines easier to read
-
- * buildbot/test/test_web.py (WebTest.tearDown): factor out master
- shutdown
- (WebTest.test_logfile): make sure master gets shut down, silences
- some "unclean reactor" test errors
-
- * buildbot/test/test_changes.py (Sender.tearDown): spin the
- reactor once after shutdown, something in certain versions of
- Twisted trigger a test failure. 1.3.0 is ok, 2.0.0 fails, 2.0.1pre
- fails, svn-trunk is ok.
-
- * buildbot/test/test_slavecommand.py (Shell.testShellZ): add a
- second win32 error message
-
- * buildbot/test/test_run.py (Status.testSlave): be smarter about
- validating the ETA, so the tests don't fail on slow systems
-
-2005-05-15 Brian Warner <warner@lothar.com>
-
- * buildbot/status/builder.py (HTMLLogFile): make this behave like
- the new LogFile class, so upgrading works properly
- (LogFileProducer.resumeProducing): survive resumeProducing after
- we've exhausted the chunkGenerator
-
- * buildbot/test/test_web.py (WebTest.test_logfile): validate HTML
- logs too
- * buildbot/test/test_status.py (Log.testAdd): validate hasContents
- (Log.testUpgrade): same
-
- * docs/buildbot.texinfo (Maintenance): describe how to delete old
- Builds and logs with a cron job.
-
- * buildbot/status/builder.py (LogFile): revamp LogFiles. Got rid
- of the old non-offline LogFile, added code to upgrade these to
- new-style contents-live-on-disk instances at load time (in a way
- that doesn't invalidate the old Build pickles, so upgrading to
- 0.6.5 is not a one-way operation). Got rid of everything related
- to 'stub' builds.
- (LogFile.__init__): create LogFiles with the parent step status,
- the log's name, and a builder-relative filename where it can keep
- the contents on disk.
- (LogFile.hasContents): new method, clients are advised to call it
- before getText or getChunks and friends. If it returns False, the
- log's contents have been deleted and getText() will raise an
- error.
- (LogFile.getChunks): made it a generator
- (LogFile.subscribeConsumer): new method, takes a Twisted-style
- Consumer (except one that takes chunks instead of strings). This
- enables streaming of very large logfiles without storing the whole
- thing in memory.
- (BuildStatus.generateLogfileName): create names like
- 12-log-compile-output, with a _0 suffix if required to be unique
- (BuildStatus.upgradeLogfiles): transform any old-style (from 0.6.4
- or earlier) logfiles into new-style ones
- (BuilderStatus): remove everything related to 'stub' builds. There
- is now only one build cache, and we don't strip logs from old
- builds anymore.
- (BuilderStatus.getBuildByNumber): check self.currentBuild too,
- since we no longer fight to keep it in the cache
-
- * buildbot/status/html.py (TextLog.render_GET): use a
- ChunkConsumer to stream the log entries efficiently.
- (ChunkConsumer): wrapper which consumes chunks and writes
- formatted HTML.
-
- * buildbot/test/test_twisted.py (Parse.testParse): use a
- LogFile-like object instead of a real one
-
- * buildbot/test/test_status.py (MyLog): handle new LogFile code
- (Log.testMerge3): validate more merge behavior
- (Log.testChunks): validate LogFile.getChunks
- (Log.testUpgrade): validate old-style LogFile upgrading
- (Log.testSubscribe): validate LogFile.subscribe
- (Log.testConsumer): validate LogFile.subscribeConsumer
-
- * buildbot/interfaces.py (IStatusLogStub): remove
- (IStatusLog.subscribeConsumer): new method
- (IStatusLog.hasContents): new method
- (IStatusLogConsumer): describes things passed to subscribeConsumer
-
- * buildbot/status/html.py (StepBox.getBox): Don't offer an href to
- the log contents if it does not have any contents.
- (StatusResourceBuildStep.body): same
- (StatusResourceBuildStep.getChild): give a 404 for empty logs
-
-2005-05-14 Brian Warner <warner@lothar.com>
-
- * buildbot/test/test_web.py (WebTest.test_logfile): add 5-second
- timeouts to try and make the windows metabuildslave not hang
-
-2005-05-13 Mike Taylor <bear@code-bear.com>
-
- * buildbot/slave/commands.py (rmdirRecursive): added a check
- to ensure the path passed into rmdirRecursive actually exists.
- On win32 a non-existant path would generate an exception.
-
-2005-05-13 Brian Warner <warner@lothar.com>
-
- * buildbot/slave/commands.py (rmdirRecursive): replacement for
- shutil.rmtree which behaves correctly on windows in the face of
- files that you have to chmod before deleting. Thanks to Bear at
- the OSAF for the routine.
- (SourceBase.doClobber): use rmdirRecursive
-
-2005-05-12 Brian Warner <warner@lothar.com>
-
- * buildbot/status/builder.py (OfflineLogFile.getChunks): have this
- method generate chunks instead of returning a big list. This
- allows the same method to be used for both old LogFile and new
- OfflineLogFile.
- (OfflineLogFile.getText): use the generator
- (OfflineLogFile.subscribe): same
- * buildbot/status/html.py (TextLog.resumeProducing): same
- * buildbot/interfaces.py (IStatusLog.getChunks): document it
-
- * buildbot/test/test_web.py (WebTest.test_logfile): Add a test to
- point out that OfflineLogFile does not currently work with
- html.Waterfall . Fixing this is high-priority.
-
- * buildbot/scripts/runner.py (start): add --logfile=twistd.log, since
- apparently windows defaults to using stdout
-
- * buildbot/test/test_slavecommand.py (Shell.testShellZ): log a
- better message on failure so I can figure out the win32 problem
-
- * buildbot/slave/commands.py (ShellCommand._startCommand): update
- log messages to include more useful copies of the command being
- run, the argv array, and the child command's environment.
- (Git.doVCFull): update cg-close usage, patch from Brandon Philips.
-
-2005-05-11 Brian Warner <warner@lothar.com>
-
- * setup.py: oops, install debug.glade so 'buildbot debugclient'
- will actually work
- * Makefile: update the deb-snapshot version
-
- * docs/buildbot.texinfo: move all .xhtml docs into a new
- .texinfo-format document, adding a lot of material in the process.
- This is starting to look like a real user's manual. Removed all
- the Lore-related files: *.xhtml, *.css, template.tpl .
- * docs/Makefile: simple makefile to run 'makeinfo'
- * buildbot/scripts/sample.cfg: rearrange slightly
- * MANIFEST.in: include .info and .textinfo, don't include *.xhtml
-
-2005-05-10 Brian Warner <warner@lothar.com>
-
- * buildbot/scripts/runner.py (start): Twisted-1.3.0 used a
- different name for the internal twistw module, handle it.
-
- * MANIFEST.in: we deleted plugins.tml, so stop shipping it
- * setup.py: .. and stop trying to install it
-
- * buildbot/process/step.py (Git): added support for 'cogito' (aka
- 'git'), the new linux kernel VC system (http://kernel.org/git/).
- Thanks to Brandon Philips for the patch.
- * buildbot/slave/commands.py (Git): same
-
-2005-05-06 Brian Warner <warner@lothar.com>
-
- * buildbot/status/builder.py (OfflineLogFile): replace the default
- LogFile with a form that appends its new contents to a disk file
- as they arrive. The complete log data is never kept in RAM. This
- is the first step towards handling very large (100MB+) logfiles
- without choking quite so badly. (The other half is
- producer/consumer on the HTML pages).
- (BuildStepStatus.addLog): use OfflineLogFile by default
- (BuildStatus.getLogfileName): helper code to give the
- OfflineLogFile a filename to work with
-
- * buildbot/test/test_status.py (Results.testAddResults): update
- tests to handle new asserts
- * buildbot/test/test_vc.py (Patch.doPatch): same
- * buildbot/test/test_steps.py (BuildStep.setUp): same
-
-2005-05-05 Brian Warner <warner@lothar.com>
-
- * buildbot/scripts/runner.py (start): if there is no Makefile,
- launch the app by importing twistd's internals and calling run(),
- rather than spawning a new twistd process. This stands a much
- better chance of working under windows.
- (stop): kill the process with os.kill instead of spawning
- /bin/kill, again to reduce the number of external programs which
- windows might not have in the PATH. Also wait up to 5 seconds for
- the process to go away, allowing things like 'buildbot stop;
- buildbot start' to be reliable in the face of slow shutdowns.
-
- * buildbot/master.py (Dispatcher.__getstate__): remove old
- .tap-related methods
- (BuildMaster.__getstate__): same
- (makeService): same
- * buildbot/slave/bot.py (makeService): same
- (Options.longdesc): same
- * buildbot/scripts/runner.py: copy over some old mktap option text
-
- * buildbot/scripts/runner.py (masterTAC): stop using mktap.
- 'buildbot master' now creates a buildbot.tac file, so there is no
- longer a create-instance/save/reload sequence. mktap is dead, long
- live twistd -y.
- * buildbot/scripts/sample.mk: use twistd -y, not -f
- * buildbot/test/test_config.py: remove mktap-based test
- * buildbot/bb_tap.py, buildbot/plugins.tml: delete old files
- * README: don't reference mktap
-
- * docs/source.xhtml: document some of the attributes that Changes
- might have
-
- * docs/steps.xhtml (Bazaar): document the Bazaar checkout step
-
- * general: merge in Change(tag=) patch from Thomas Vander Stichele.
- [org.apestaart@thomas--buildbot/buildbot--cvstag--0-dev--patch-2]
- * buildbot/changes/changes.py (Change)
- * buildbot/changes/mail.py (parseSyncmail)
- * buildbot/test/test_mailparse.py (Test3.getNoPrefix)
- (Test3.testMsgS5)
- * buildbot/process/base.py (Build.isTagImportant)
- (Build.addChange)
-
-
-2005-05-04 Brian Warner <warner@lothar.com>
-
- * buildbot/clients/sendchange.py (Sender.send): tear down the PB
- connection after sending the change, so that unit tests don't
- complain about sockets being left around
-
- * buildbot/status/html.py (WaterfallStatusResource.body): fix
- exception in phase=0 rendering
- * buildbot/test/test_web.py (WebTest.test_waterfall): test it
-
- * buildbot/changes/dnotify.py (DNotify.__init__): remove debug msg
-
- * buildbot/master.py (BuildMaster.loadConfig): finally remove
- deprecated config keys: webPortnum, webPathname, irc, manholePort,
- and configuring builders with tuples.
- * buildbot/test/test_config.py: stop testing compatibility with
- deprecated config keys
- * buildbot/test/test_run.py: same
-
-2005-05-03 Brian Warner <warner@lothar.com>
-
- * contrib/arch_buildbot.py: survive if there are no logfiles
- (username): just use a string, os.getlogin isn't reliable
-
- * buildbot/scripts/runner.py (sendchange): oops, fix the command
- so 'buildbot sendchange' actually works. The earlier test only
- covered the internal (non-reactor-running) form.
-
- * contrib/arch_buildbot.py: utility that can run as an Arch hook
- script to notify the buildmaster about changes
-
- * buildbot/scripts/runner.py (sendchange): new command to send a
- change to a buildbot.changes.pb.PBChangeSource receiver.
- * buildbot/test/test_changes.py (Sender): test it
-
- * buildbot/master.py (BuildMaster.startService): mark .readConfig
- after any reading of the config file, not just when we do it in
- startService. This makes some tests a bit cleaner.
-
- * buildbot/changes/pb.py: add some log messages
-
- * buildbot/process/base.py (Build.startBuild): fix a bug that
- caused an exception when the build terminated in the very first
- step.
- (Build.stepDone): let steps return a status of EXCEPTION. This
- terminates the build right away, and sets the build's overall
- status to EXCEPTION too.
- * buildbot/process/step.py (BuildStep.failed): return a status of
- EXCEPTION when that is what has happened.
-
- * buildbot/process/step.py (Arch.computeSourceRevision): finally
- implement this, allowing Arch-based projects to get precise
- checkouts instead of always using the latest code
- (Bazaar): create variant of Arch to let folks use baz instead of
- tla. Requires a new buildslave too.
- * buildbot/slave/commands.py (Arch): add 'revision' argument
- (Bazaar): create variant of Arch that uses baz instead of tla.
- Remove the code that extracts the archive name from the
- register-archive output, since baz doesn't provide it, and require
- the user provide both the archive name and its location.
- * buildbot/test/test_vc.py (VC.testBazaar): added tests
-
-2005-05-02 Brian Warner <warner@lothar.com>
-
- * buildbot/scripts/sample.cfg: improve docs for c['buildbotURL'],
- thanks to Nick Trout.
-
- * buildbot/scripts/runner.py (Maker.makefile): chmod before edit,
- deals better with source Makefile coming from a read-only CVS
- checkout. Thanks to Nick Trout for the catch.
-
- * buildbot/__init__.py (version): bump to 0.6.4+ while between
- releases
-
-2005-04-28 Brian Warner <warner@lothar.com>
-
- * buildbot/__init__.py (version): Releasing buildbot-0.6.4
-
- * debian/changelog: update for 0.6.4
-
-2005-04-28 Brian Warner <warner@lothar.com>
-
- * README.w32: add a checklist of steps for getting buildbot
- running on windows.
- * MANIFEST.in: include it in the tarball
-
- * NEWS: update
-
- * buildbot/master.py (BuildMaster.upgradeToVersion3): deal with
- broken .tap files from 0.6.3 by getting rid of .services,
- .namedServices, and .change_svc at load time.
-
-2005-04-27 Brian Warner <warner@lothar.com>
-
- * NEWS: update in preparation for new release
-
- * buildbot/test/test_config.py (Save.testSave): don't pull in
- twisted.scripts.twistd, we don't need it and it isn't for windows
- anyway.
-
- * buildbot/changes/changes.py (ChangeMaster.saveYourself):
- accomodate win32 which can't do atomic-rename
-
-2005-04-27 Brian Warner <warner@lothar.com>
-
- * buildbot/test/test_run.py (Disconnect.testBuild2): crank up some
- timeouts to help the slow metabuildbot not flunk them so much
- (Disconnect.testBuild3): same
- (Disconnect.testBuild4): same
- (Disconnect.testInterrupt): same
-
- * buildbot/master.py (BuildMaster.loadChanges): fix change_svc
- setup, it was completely broken for new buildmasters (those which
- did not have a 'change.pck' already saved. Thanks to Paul Warren
- for catching this (embarrassing!) bug.
- (Dispatcher.__getstate__): don't save our registered avatar
- factories, since they'll be re-populated when the config file is
- re-read.
- (BuildMaster.__init__): add a dummy ChangeMaster, used only by
- tests (since the real mktap-generated BuildMaster doesn't save
- this attribute).
- (BuildMaster.__getstate__): don't save any service children,
- they'll all be re-populated when the config file is re-read.
- * buildbot/test/test_config.py (Save.testSave): test for this
-
-2005-04-26 Brian Warner <warner@lothar.com>
-
- * buildbot/buildbot.png: use a new, smaller (16x16) icon image,
- rendered with Blender.. looks a bit nicer.
- * buildbot/docs/images/icon.blend: add the Blender file for it
-
- * buildbot/slave/commands.py (ShellCommand._startCommand): prepend
- 'cmd.exe' (or rather os.environ['COMSPEC']) to the argv list when
- running under windows. This appears to be the best way to allow
- BuildSteps to do something normal like 'trial -v buildbot.test' or
- 'make foo' and still expect it to work. The idea is to make the
- BuildSteps look as much like what a developer would type when
- compiling or testing the tree by hand. This approach probably has
- problems when there are spaces in the arguments, so if you've got
- windows buildslaves, you'll need to pay close attention to your
- commands.
-
- * buildbot/status/html.py (WaterfallStatusResource.body): add the
- timezone to the timestamp column.
- * buildbot/test/test_web.py (WebTest.test_waterfall): test it
-
- * buildbot/scripts/runner.py (loadOptions): do something sane for
- windows, I think. We use %APPDATA%/buildbot instead of
- ~/.buildbot, but we still search everywhere from the current
- directory up to the root for a .buildbot/ subdir. The "is it under
- $HOME" security test was replaced with "is it owned by the current
- user", which is only performed under posix.
- * buildbot/test/test_runner.py (Options.testFindOptions): update
- tests to match. The "is it owned by the current user" check is
- untested. The test has been re-enabled for windows.
-
- * buildbot/test/test_slavecommand.py (Shell.checkOutput): replace
- any "\n" in the expected output with the platform-specific line
- separator. Make this separator "\r\n" on PTYs under unix, they
- seem to do that and I don't know why
-
- * buildbot/test/test_runner.py (Options.optionsFile): disable on
- windows for now, I don't know what ~/.buildbot/ should mean there.
-
- * buildbot/test/test_run.py (BuilderNames.testGetBuilderNames):
- win32 compatibility, don't use "/tmp"
- (Basedir.testChangeBuilddir): remove more unixisms
-
-2005-04-26 Brian Warner <warner@lothar.com>
-
- * buildbot/test/test_control.py (Force.rmtree): python2.2
- compatibility, apparently its shutil.rmtree ignore_errors=
- argument is ignored.
- * buildbot/test/test_run.py (Run.rmtree): same
- (RunMixin.setUp): same
-
- * buildbot/test/test_runner.py (make): python2.2 has os.sep but
- not os.path.sep
-
- * buildbot/test/test_twisted.py (Parse.failUnlessIn): 2.2 has no
- 'substring in string' operator, must use string.find(substr)!=-1
- * buildbot/test/test_vc.py (Patch.failUnlessIn): same
- * buildbot/test/test_web.py (WebTest.failUnlessIn): same
-
- * buildbot/scripts/runner.py (loadOptions): add code to search for
- ~/.buildbot/, a directory with things like 'options', containing
- defaults for various 'buildbot' subcommands. .buildbot/ can be in
- the current directory, your $HOME directory, or anywhere
- inbetween, as long as you're somewhere inside your home directory.
- (debugclient): look in ~/.buildbot/options for master and passwd
- (statuslog): look in ~/.buildbot/options for 'masterstatus'
- * buildbot/test/test_runner.py (Options.testFindOptions): test it
-
- * buildbot/status/client.py (makeRemote): new approach to making
- IRemote(None) be None, which works under Twisted-2.0
- * buildbot/test/test_status.py (Client.testAdaptation): test it
-
- * buildbot/status/builder.py (Status.builderAdded): when loading a
- pickled BuilderStatus in from disk, set its name after loading.
- The config file might have changed its name (but not its
- directory) while it wasn't looking.
-
- * buildbot/process/builder.py (Builder.attached): always return a
- Deferred, even if the builder was already attached
- * buildbot/test/test_run.py (Basedir.testChangeBuilddir): test it
-
-2005-04-25 Brian Warner <warner@lothar.com>
-
- * buildbot/status/words.py (IrcStatusBot.buildFinished): fix a
- category-related exception when announcing a build has finished
-
- * buildbot/status/html.py (StatusResourceChanges.body): oops, don't
- reference no-longer-existent changemaster.sources
- * buildbot/test/test_web.py (WebTest.test_waterfall): test for it
-
- * buildbot/__init__.py (version): bump to 0.6.3+ while between
- releases
-
-2005-04-25 Brian Warner <warner@lothar.com>
-
- * buildbot/__init__.py (version): Releasing buildbot-0.6.3
-
- * debian/changelog: update for 0.6.3
-
-2005-04-25 Brian Warner <warner@lothar.com>
-
- * MANIFEST.in: make sure debug.glade is in the tarball
-
- * README (REQUIREMENTS): list necessary Twisted-2.0 packages
-
- * NEWS: update for the imminent 0.6.3 release
-
- * buildbot/status/html.py (HtmlResource.content): make the
- stylesheet <link> always point at "buildbot.css".
- (StatusResource.getChild): map "buildbot.css" to a static.File
- containing whatever css= argument was provided to Waterfall()
- (Waterfall): provide the "classic" css as the default.
- * docs/waterfall.classic.css: move default CSS from here ..
- * buildbot/status/classic.css: .. to here
-
- * MANIFEST.in: make sure classic.css is included in the tarball
- * setup.py: and that it is installed too, under buildbot/status/
-
- * buildbot/master.py (BuildMaster): oops, set .change_svc=None at
- the module level, because buildbot.tap files from 0.6.2 don't have
- it in their attribute dictionary.
-
- * buildbot/slave/bot.py (Bot.startService): make sure the basedir
- really exists at startup, might save some confusion somewhere.
-
-2005-04-24 Thomas Vander Stichele <thomas at apestaart dot org>
-
- * docs/waterfall.classic.css:
- add a stylesheet that's almost the same as the "classic"
- buildbot style
-
- * buildbot/status/builder.py:
- add EXCEPTION as a result - this is a problem for the bot
- maintainer, not a build problem for the changers
- * buildbot/process/step.py:
- use EXCEPTION instead of FAILURE for exceptions
- * buildbot/status/html.py:
- add build_get_class to get a class out of a build/buildstep
- finish naming the classes
- split out sourceNames to changeNames and builderNames so we
- can style them separately
- * docs/config.xhtml:
- finish documenting classes as they are right now
-
- * buildbot/status/html.py:
- name the classes as we agreed on IRC
- * docs/config.xhtml:
- and document them
-
- * buildbot/status/html.py:
- same for cssclass->class_
-
- * buildbot/status/html.py:
- as decided on IRC, use class_ for the "class" attribute to not
- conflict with the class keyword, and clean up the messy **{} stuff.
-
- * buildbot/status/mail.py:
- put back "builders" argument, and fix docstring, because the
- code *ignores* builders listed in this argument
-
- * buildbot/process/builder.py:
- remove FIXME notes - category is now indeed a cvar of BuilderStatus
-
- * docs/config.xhtml:
- describe the category argument for builders
-
- * buildbot/status/builder.py:
- Fix a silly bug due to merging
-
- * buildbot/process/builder.py:
- remove category from the process Builder ...
- * buildbot/status/builder.py:
- ... and add it to BuilderStatus instead.
- Set category on unpickled builder statuses, they might not have it.
- * buildbot/master.py:
- include category when doing builderAdded
- * buildbot/status/mail.py:
- return None instead of self for builders we are not interested in.
- * buildbot/test/test_run.py:
- fix a bug due to only doing deferredResult on "dummy" waiting
- * buildbot/test/test_status.py:
- add checks for the Mail IStatusReceiver returning None or self
-
- * buildbot/status/html.py:
- fix testsuite by prefixing page title with BuildBot
-
- * buildbot/status/builder.py:
- have .category in builder status ...
- * buildbot/process/builder.py:
- ... and set it from Builder
- * buildbot/status/html.py:
- make .css a class variable
- * buildbot/test/test_status.py:
- write more tests to cover our categories stuff ...
- * buildbot/status/mail.py:
- ... and fix the bug that this uncovered
-
- * buildbot/changes/mail.py:
- * buildbot/changes/pb.py:
- * buildbot/master.py:
- * buildbot/process/base.py:
- * buildbot/process/factory.py:
- * buildbot/process/interlock.py:
- * buildbot/process/step.py:
- * buildbot/process/step_twisted.py:
- * buildbot/slave/commands.py:
- * buildbot/status/builder.py:
- * buildbot/status/client.py:
- * buildbot/status/html.py:
- * buildbot/status/mail.py:
- * buildbot/status/progress.py:
- * buildbot/test/test_changes.py:
- * buildbot/test/test_config.py:
- * buildbot/test/test_control.py:
- * buildbot/test/test_interlock.py:
- * buildbot/test/test_maildir.py:
- * buildbot/test/test_mailparse.py:
- * buildbot/test/test_run.py:
- * buildbot/test/test_slavecommand.py:
- * buildbot/test/test_status.py:
- * buildbot/test/test_steps.py:
- * buildbot/test/test_twisted.py:
- * buildbot/test/test_util.py:
- * buildbot/test/test_vc.py:
- * buildbot/test/test_web.py:
- * buildbot/util.py:
- add test-case-name at the top of a whole set of files
-
- * buildbot/status/builder.py:
- keep order of addition when getting builder names
- * buildbot/status/words.py:
- * buildbot/test/test_run.py:
- add test for getBuilderNames
-
- * buildbot/process/base.py:
- * buildbot/process/step.py:
- * buildbot/status/builder.py:
- * buildbot/status/html.py:
- make buildbot css-able
- replace the color code for purple with purple, don't understand
- why it wasn't purple to start with
-
- * buildbot/status/words.py:
- ok, so it doesn't look like BuilderStatus.remote is still valid.
- Use what waterfall uses instead.
-
- * buildbot/interfaces.py:
- * buildbot/status/builder.py:
- * buildbot/status/html.py:
- * buildbot/status/mail.py:
- * buildbot/status/words.py:
- * buildbot/test/test_run.py:
- use categories everywhere and make it be a list. More sensible
- for the future. Also make words actually respect this in
- buildFinished.
-
- * buildbot/interfaces.py:
- add category argument to getBuilderNames
- * buildbot/process/builder.py:
- * buildbot/status/builder.py:
- * buildbot/status/html.py:
- * buildbot/status/mail.py:
- * buildbot/status/words.py:
- * buildbot/test/test_run.py:
- move from specifying builders by name to specifying the category
-
- * buildbot/status/html.py:
- * buildbot/status/words.py:
- add "builders=" to __init__ of status clients so they can
- limit themselves to the given list of builders to report on
-
- * buildbot/status/html.py: set the title to the product name
-
-2005-04-23 Thomas Vander Stichele <thomas at apestaart dot org>
-
- * buildbot/interfaces.py:
- * buildbot/status/builder.py:
- more documentation. Hm, not sure if ChangeLog entries make sense
- here...
-
-2005-04-23 Brian Warner <warner@lothar.com>
-
- * buildbot/test/test_vc.py (SetupMixin.do_vc): increase timeouts
-
- * buildbot/test/test_slavecommand.py (Shell): increase timeouts
-
- * buildbot/scripts/runner.py: make 'statuslog' and 'statusgui' be
- the sub-commands that log buildmaster status to stdout and to a
- GUI window, respectively.
-
- * buildbot/clients/gtkPanes.py: overhaul. basic two-row
- functionality is working again, but all the step-status and ETA
- stuff is missing. Commented out a lot of code pending more
- overhaul work.
-
- * buildbot/status/client.py: make sure that IRemote(None) is None
-
- * buildbot/changes/changes.py: import defer, oops
- (ChangeMaster): remove the .sources list, rely upon the fact that
- MultiServices can be treated as sequences of their children. This
- cleans up the add/remove ChangeSource routines a lot, as we keep
- exactly one list of the current sources instead of three.
-
- * buildbot/master.py (BuildMaster.__init__): remove .sources, set
- up an empty ChangeMaster at init time.
- (BuildMaster.loadChanges): if there are changes to be had from
- disk, replace self.change_svc with the new ones. If not, keep
- using the empty ChangeMaster set up in __init__.
- (BuildMaster.loadConfig_Sources): use list(self.change_svc)
- instead of a separate list, makes the code a bit cleaner.
- * buildbot/test/test_config.py (ConfigTest.testSimple): match it
- (ConfigTest.testSources): same, also wait for loadConfig to finish.
- Extend the test to make sure we can get rid of the sources when
- we're done.
-
-2005-04-22 Brian Warner <warner@lothar.com>
-
- * buildbot/scripts/runner.py (Maker.mkinfo): create the info/admin
- and info/host files when making the slave directory
-
- * buildbot/test/test_run.py (RunMixin.shutdownSlave): remove the
- whendone= argument, just return the Deferred and let the caller do
- what they want with it.
- (Disconnect.testBuild1): wait for shutdownSlave
- (Basedir.testChangeBuilddir): new test to make sure changes to the
- builddir actually get propagated to the slave
-
- * buildbot/slave/bot.py (SlaveBuilder.setBuilddir): use an
- explicit method, rather than passing the builddir in __init__ .
- Make sure to update self.basedir too, this was broken before.
- (Bot.remote_setBuilderList): use b.setBuilddir for both new
- builders and for ones that have just had their builddir changed.
- (BotFactory): add a class-level .perspective attribute, so
- BuildSlave.waitUntilDisconnected won't get upset when the
- connection hasn't yet been established
- (BuildSlave.__init__): keep track of the bot.Bot instance, so
- tests can reach through it to inspect the SlaveBuilders
-
- * buildbot/process/base.py (Build.buildException): explain the
- log.err with a log.msg
- * buildbot/process/builder.py (Builder.startBuild): same
- (Builder._startBuildFailed): improve error message
-
- * buildbot/pbutil.py (RBCP.failedToGetPerspective): if the failure
- occurred because we lost the brand-new connection, retry instead
- of giving up. If not, it's probably an authorization failure, and
- it makes sense to stop trying. Make sure we log.msg the reason
- that we're log.err'ing the failure, otherwise test failures are
- really hard to figure out.
-
- * buildbot/master.py: change loadConfig() to return a Deferred
- that doesn't fire until the change has been fully implemented.
- This means any connected slaves have been updated with the new
- builddir. This change makes it easier to test the code which
- actually implements this builddir-updating.
- (BotPerspective.addBuilder): return Deferred
- (BotPerspective.removeBuilder): same
- (BotPerspective.attached): same
- (BotPerspective._attached): same. finish with remote_print before
- starting the getSlaveInfo, instead of doing them in parallel
- (BotPerspective.list_done): same
- (BotMaster.removeSlave): same. Fix the typo that meant we weren't
- actually calling slave.disconnect()
- (BotMaster.addBuilder): same
- (BotMaster.removeBuilder): same
- (BuildMaster.loadConfig): same
- (BuildMaster.loadConfig_Slaves): same
- (BuildMaster.loadConfig_Sources): same
- (BuildMaster.loadConfig_Builders): same
- (BuildMaster.loadConfig_status): same
-
- * buildbot/changes/changes.py (ChangeMaster.removeSource): return
- a Deferred that fires when the source is finally removed
-
- * buildbot/slave/commands.py (SourceBase.doClobber): when removing
- the previous tree on win32, where we have to do it synchronously,
- make sure we return a Deferred anyway.
- (SourceBase.doCopy): same
-
- * buildbot/scripts/runner.py (statusgui): use the text client for
- now, while I rewrite the Gtk one
- * buildbot/clients/base.py: strip out old code, leaving just the
- basic print-message-on-event functionality. I also remove the
- ReconnectingPBClientFactory, but it does at least quit when it
- loses the connection instead of going silent
-
-2005-04-21 Brian Warner <warner@lothar.com>
-
- * Makefile: minor tweaks
-
- * NEWS: point out deprecation warnings, new features for
- /usr/bin/buildbot
-
- * buildbot/master.py (BuildMaster.loadConfig): emit
- DeprecationWarnings for Builders defined with tuples. Rearrange
- code to facility removal of deprecated configuration keys in the
- next release.
-
- * buildbot/scripts/runner.py (createMaster,createSlave): rewrite
- 'buildbot' command to put a little Makefile in the target that
- helps you re-create the buildbot.tap file, start or stop the
- master/slave, and reconfigure (i.e. SIGHUP) the master. Also chmod
- all the files 0600, since they contain passwords.
- (start): if there is a Makefile, and /usr/bin/make exists, use
- 'make start' in preference to a raw twistd command. This lets
- slave admins put things like PYTHONPATH variables in their
- Makefiles and have them still work when the slave is started with
- 'buildbot start ~/slave/foo'. The test is a bit clunky, it would
- be nice to first try the 'make' command and only fall back to
- twistd if it fails. TODO: the Makefile's "start" command does not
- add the --reactor=win32 argument when running under windows.
- (Options.debugclient, Options.statusgui): add sub-commands to launch
- the debug client (formerly in contrib/debugclient.py) and the
- Gtk status application (currently broken)
- * buildbot/clients/debug.py: move from contrib/debugclient.py
- * buildbot/clients/debug.glade: same
-
- * buildbot/test/test_trial.py: remove it. This requires some
- functionality out of Twisted that isn't there yet, and until then
- having it around just confuses things.
-
- * buildbot/test/test_slavecommand.py (Shell): test both with and
- without PTYs, and make sure that command output is properly
- interleaved in the with-PTY case. I think the without-PTY test
- should pass on windows, where we never use PTYs anyway.
-
-2005-04-20 Brian Warner <warner@lothar.com>
-
- * README (REQUIREMENTS): mention Twisted-2.0.0 compatibility
-
- * MANIFEST.in: add epyrun, gen-reference, buildbot.png
-
- * NEWS: start creating entries for the next release
-
- * buildbot/slave/commands.py (ShellCommand.__init__): use os.pathsep
-
- * buildbot/test/test_web.py (WebTest.test_webPortnum): add timeout
- (WebTest.test_webPathname): same
- (WebTest.test_webPathname_port): same
- (WebTest.test_waterfall): use the default favicon rather than
- rooting around the filesystem for it. Open the expected-icon file
- in binary mode, to make win32 tests happier (thanks to Nick Trout
- for the catch)
- * buildbot/status/html.py (buildbot_icon): win32 portability
-
- * buildbot/test/test_slavecommand.py (SlaveCommandTestCase.testShellZ):
- win32-compatibility fixes from Nick Trout, the "file not found" message
- is different under windows
- (FakeSlaveBuilder.__init__): clean up setup a bit
- * buildbot/test/test_vc.py (VCSupport.__init__): win32: use os.pathsep
-
-2005-04-19 Brian Warner <warner@lothar.com>
-
- * buildbot/test/test_vc.py (SetupMixin.setUpClass): fix the
- skip-if-repositories-are-unavailable test to not kill the trial
- that comes with Twisted-1.3.0
-
- * setup.py: install buildbot.png icon file when installing code
-
- * buildbot/slave/commands.py (ShellCommand._startCommand): log the
- environment used by the command, at least on the child side.
-
- * buildbot/status/html.py (TextLog.pauseProducing): add a note,
- this method needs to be added and implemented because it gets
- called under heavy load. I don't quite understand the
- producer/consumer API enough to write it.
- (StatusResource.getChild): add a resource for /favicon.ico
- (Waterfall.__init__): add favicon= argument
- * buildbot/test/test_web.py (WebTest.test_waterfall): test it
- (WebTest.test_webPortnum): stop using deprecated 'webPortnum'
- (WebTest.test_webPathname): same
- (WebTest.test_webPathname_port): same
- * docs/config.xhtml: mention favicon=
- * buildbot/buildbot.png: add a default icon, dorky as it is
-
-2005-04-18 Thomas Vander Stichele <thomas at apestaart dot org>
-
- * buildbot/master.py:
- * buildbot/process/base.py:
- * buildbot/process/builder.py:
- * buildbot/process/interlock.py:
- * buildbot/status/builder.py:
- * buildbot/status/html.py:
- * buildbot/status/mail.py:
- * buildbot/status/words.py:
- new documentation while digging through the code
-
-2005-04-17 Brian Warner <warner@lothar.com>
-
- * general: try to fix file modes on all .py files: a+r, a-x,
- but let buildbot/clients/*.py be +x since they're tools
-
- * docs/epyrun (addMod): when an import fails, say why
-
- * Makefile: Add a 'docs' target, hack on the PYTHONPATH stuff
-
-2005-04-17 Thomas Vander Stichele <thomas at apestaart dot org>
-
- * buildbot/process/base.py:
- * buildbot/process/builder.py:
- * buildbot/status/builder.py:
- new documentation while digging through the code
-
-2005-04-17 Thomas Vander Stichele <thomas at apestaart dot org>
-
- * buildbot/changes/changes.py:
- * buildbot/changes/p4poller.py:
- * buildbot/interfaces.py:
- * buildbot/process/base.py:
- * buildbot/process/builder.py:
- * buildbot/process/step.py:
- * buildbot/process/step_twisted.py:
- * buildbot/slave/bot.py:
- * buildbot/slave/commands.py:
- * buildbot/status/builder.py:
- fix all docstrings to make epydoc happy. In the process of fixing
- some, I also moved pieces of docs, and removed some deprecated
- documentation
-
-2005-04-17 Thomas Vander Stichele <thomas at apestaart dot org>
-
- * buildbot/process/builder.py:
- * buildbot/process/interlock.py:
- * buildbot/process/process_twisted.py:
- * buildbot/process/step.py:
- BuildProcess -> Build, as it looks like that's what happened
- * buildbot/process/base.py:
- * buildbot/process/factory.py:
- update epydoc stuff
-
-2005-04-17 Brian Warner <warner@lothar.com>
-
- * buildbot/process/process_twisted.py (QuickTwistedBuildFactory):
- update compile command to accomodate the Twisted split.. now
- instead of './setup.py build_ext -i', you do './setup.py all
- build_ext -i', to run build_ext over all sub-projects.
- (FullTwistedBuildFactory): same
- (TwistedReactorsBuildFactory): same
-
- * buildbot/status/html.py (TextLog.finished): null out self.req
- when we're done, otherwise the reference cycle of TextLog to .req
- to .notifications to a Deferred to TextLog.stop keeps them from
- being collected, and consumes a huge (610MB on pyramid at last
- check) amount of memory.
-
-2005-04-11 Brian Warner <warner@lothar.com>
-
- * buildbot/test/test_vc.py (VCSupport.__init__): use abspath() to
- normalize the VC-repository location.. makes SVN happier with
- certain test environments.
-
- * buildbot/process/step.py (RemoteShellCommand.__init__): let each
- RemoteShellCommand gets its own .env dictionary, so that code in
- start() doesn't mutate the original. I think this should fix the
- step_twisted.Trial problem where multiple identical components
- kept getting added to PYTHONPATH= over and over again.
-
- * general: merge org.apestaart@thomas/buildbot--doc--0--patch-3,
- adding epydoc-format docstrings to many classes. Thanks to Thomas
- Vander Stichele for the patches.
- * docs/epyrun, docs/gen-reference: add epydoc-generating tools
- * buildbot/status/mail.py, buildbot/process/step_twisted.py: same
- * buildbot/slave/bot.py, commands.py, registry.py: same
-
-2005-04-05 Brian Warner <warner@lothar.com>
-
- * buildbot/slave/commands.py (SourceBase.doCopy): use cp -p to
- preserve timestamps, helps incremental builds of large trees.
- Patch from Rene Rivera.
-
- * buildbot/slave/bot.py (SlaveBuilder.commandComplete): oops, log
- 'failure' and not the non-existent 'why'. Thanks to Rene Rivera
- for the catch.
-
-2005-04-03 Brian Warner <warner@lothar.com>
-
- * buildbot/master.py (BuildMaster.loadConfig): only call exec()
- with one dict, apparently exec has some scoping bugs when used
- with both global/local dicts. Thanks to Nathaniel Smith for the
- catch.
-
-2005-04-02 Brian Warner <warner@lothar.com>
-
- * buildbot/process/step_twisted.py (countFailedTests): the new
- trial in Twisted-2.0 emits a slightly different status line than
- old trial ("PASSED.." instead of "OK.."). Handle it so we don't
- mistakenly think the test count is unparseable.
- (Trial.start): note that for some reason each build causes another
- copy of self.testpath to be prepended to PYTHONPATH. This needs to
- be fixed but I'm not sure quite where the problem is.
-
-2005-04-01 Brian Warner <warner@lothar.com>
-
- * buildbot/test/test_run.py (Run.testMaster): change some uses of
- deferredResult to avoid hangs/warnings under twisted-2.0
- (RunMixin.tearDown): same
- (RunMixin.shutdownSlave): same
- (Disconnect.testIdle1): same
- (Disconnect.testBuild2): same: wait one second after the build
- finishes for test to really be done.. this should be cleaned up to
- avoid wasting that second. Builder.detach uses a callLater(0),
- either that should be done in-line (something else needed that
- behavior), or it should return a Deferred that fires when the
- builder is really offline.
- (Disconnect.testBuild3): same
- (Disconnect.testDisappear): same
-
- * buildbot/test/test_web.py: rearrange server-setup and teardown
- code to remove unclean-reactor warnings from twisted-2.0
-
- * buildbot/test/test_vc.py: rearrange probe-for-VC-program routine
- so the tests don't hang under twisted-2.0
-
-2005-03-31 Brian Warner <warner@lothar.com>
-
- * buildbot/slave/bot.py (Bot.remote_setBuilderList): fix typo that
- caused a warning each time the master changed our set of builders
-
- * buildbot/status/builder.py (BuildStatus.saveYourself): under
- w32, don't unlink the file unless it already exists. Thanks to
- Baptiste Lepilleur for the catch.
- (BuilderStatus.saveYourself): same
-
-2005-02-01 Brian Warner <warner@lothar.com>
-
- * buildbot/status/html.py (TextLog.getChild): use a /text child
- URL, such as http://foo.com/svn-hello/builds/1/test/0/text instead
- of http://foo.com/svn-hello/builds/1/test/0 , to retrieve the
- logfile as text/plain (no markup, no headers). This replaces the
- previous scheme (which used an ?text=1 argument), and gets us back
- to a relative link (which works better when the buildbot lives
- behind another web server, such as Apache configured as a reverse
- proxy). Thanks to Gerald Combs for spotting the problem.
-
- * buildbot/__init__.py (version): bump to 0.6.2+ while between
- releases
-
-2004-12-13 Brian Warner <warner@lothar.com>
-
- * buildbot/__init__.py (version): Releasing buildbot-0.6.2
-
- * debian/changelog: update for 0.6.2
- * NEWS: finalize for 0.6.2
-
-2004-12-11 Brian Warner <warner@lothar.com>
-
- * NEWS: bring it up to date
-
- * buildbot/slave/bot.py (BotFactory): revamp keepalive/lost-master
- detection code. Require some sign of life from the buildmaster
- every BotFactory.keepaliveInterval seconds. Provoke this
- indication at BotFactory.keepaliveTimeout seconds before the
- deadline by sending a keepalive request. We don't actually care if
- that request is answered in a timely fashion, what we care about
- is that .activity() is called before the deadline. .activity() is
- triggered by any PB message from the master (including an ack to
- one of the slave's status-update messages). With this new scheme,
- large status messages over slow pipes are OK, as long as any given
- message can be sent (and thus acked) within .keepaliveTimeout
- seconds (which defaults to 30).
- (SlaveBuilder.remote_startCommand): record activity
- (SlaveBuilder.ackUpdate): same
- (SlaveBuilder.ackComplete): same
- (BotFactory.gotPerspective): same
- * buildbot/test/test_run.py (Disconnect.testSlaveTimeout): test it
-
-2004-12-09 Brian Warner <warner@lothar.com>
-
- * buildbot/status/html.py (StatusResourceBuilder.getChild): remove
- debug message
-
- * buildbot/process/step_twisted.py (Trial._commandComplete):
- update self.cmd when we start the 'cat test.log' transfer. Without
- this, we cannot interrupt the correct RemoteCommand when we lose
- the connection.
-
- * buildbot/process/step.py (RemoteCommand.interrupt): don't bother
- trying to tell the slave to stop the command if we're already
- inactive, or if we no longer have a .remote
-
- * buildbot/process/builder.py (Builder._detached): don't let an
- exception in currentBuild.stopBuild() prevent the builder from
- being marked offline
-
-2004-12-07 Brian Warner <warner@lothar.com>
-
- * buildbot/status/words.py (IrcStatusBot.getBuilder): catch the
- KeyError that happens when you ask for a non-existent Builder, and
- translate it into a UsageError.
-
- * buildbot/test/test_run.py (Disconnect.testBuild4): validate that
- losing the slave in the middle of a remote step is handled too
-
- * buildbot/process/step.py (ShellCommand.interrupt): 'reason' can
- be a Failure, so be sure to stringify it before using it as the
- contents of the 'interrupt' logfile
- (RemoteCommand.interrupt): use stringified 'why' in
- remote_interruptCommand too, just in case
-
-2004-12-06 Brian Warner <warner@lothar.com>
-
- * buildbot/slave/commands.py (Arch.doVCUpdate): use 'tla replay'
- instead of 'tla update', which is more efficient in case we've
- missed a couple of patches since the last update.
-
- * debian/changelog: update for previous (0.6.1) release. Obviously
- this needs to be handled better.
-
-2004-12-05 Brian Warner <warner@lothar.com>
-
- * NEWS: update for stuff since last release
-
- * buildbot/master.py (DebugPerspective.attached): return 'self', to
- match the maybeDeferred change in Dispatcher.requestAvatar
- * buildbot/changes/pb.py (ChangePerspective.attached): same
- * buildbot/status/client.py (StatusClientPerspective.attached): same
- * buildbot/process/builder.py (Builder._attached3): same
- * buildbot/pbutil.py (NewCredPerspective.attached): same
-
- * buildbot/status/html.py (WaterfallStatusResource.phase2): Add
- the date to the top-most box, if it is not the same as today's
- date.
-
- * docs/slave.xhtml: provide a buildslave setup checklist
-
- * docs/source.xhtml (Arch): correct terminology
-
-2004-12-04 Brian Warner <warner@lothar.com>
-
- * buildbot/test/test_slavecommand.py: use sys.executable instead
- of hard-coding 'python' for child commands, might help portability
-
- * docs/examples/twisted_master.cfg: update to current usage
-
- * buildbot/status/words.py (IrcStatusBot.command_STOP): add a
- 'stop build' command to the IRC bot
-
- * buildbot/master.py (Dispatcher.requestAvatar): remove debug
- message that broke PBChangeSource
-
- * buildbot/slave/bot.py: clean up shutdown/lose-master code
- (SlaveBuilder): make some attributes class-level, remove the old
- "update queue" which existed to support resuming a build after the
- master connection was lost. Try to reimplement that feature later.
- (SlaveBuilder.stopCommand): clear self.command when the
- SlaveCommand finishes, so that we don't try to kill a leftover one
- at shutdown time.
- (SlaveBuilder.commandComplete): same, merge with commandFailed and
- .finishCommand
-
- * buildbot/slave/commands.py (SourceBase): set self.command for
- all VC commands, so they can be interrupted.
-
-2004-12-03 Brian Warner <warner@lothar.com>
-
- * buildbot/master.py: clean up slave-handling code, to handle
- slave-disconnect and multiple-connect better
- (BotPerspective): make these long-lasting, exactly one per bot
- listed in the config file.
- (BotPerspective.attached): if a slave connects while an existing
- one appears to still be connected, disconnect the old one first.
- (BotPerspective.disconnect): new method to forcibly disconnect a
- buildslave. Use some hacks to empty the transmit buffer quickly to
- avoid the long (20-min?) TCP timeout that could occur if the old
- slave has dropped off the net.
- (BotMaster): Keep persistent BotPerspectives in .slaves, let them
- own their own SlaveStatus objects. Remove .attached/.detached, add
- .addSlave/.removeSlave, treat slaves like Builders (config file
- parsing sends deltas to the BotMaster). Inform the slave
- instances, i.e. the BotPerspective, about addBuilder and
- removeBuilder.
- (BotMaster.getPerspective): turns into a single dict lookup
- (Dispatcher.requestAvatar): allow .attached to return a Deferred,
- which gives BotPerspective.attached a chance to disconnect the old
- slave first.
- (BuildMaster.loadConfig): add code (disabled) to validate that all
- builders use known slaves (listed in c['bots']). The check won't
- work with tuple-specified builders, which are deprecated but not
- yet invalid, so the check is disabled for now.
- (BuildMaster.loadConfig_Slaves): move slave-config into a separate
- routine, do the add/changed/removed dance with them like we do
- with builders.
- (BuildMaster.loadConfig_Sources): move source-config into a
- separate routine too
-
- * buildbot/status/builder.py (Status.getSlave): get the
- SlaveStatus object from the BotPerspective, not the BotMaster.
-
- * buildbot/test/test_run.py: bunch of new tests for losing the
- buildslave at various points in the build, handling a slave that
- connects multiple times, and making sure we can interrupt a
- running build
-
- * buildbot/slave/bot.py (BuildSlave): make it possible to use
- something other than 'Bot' for the Bot object, to make certain
- test cases easier to write.
- (BuildSlave.waitUntilDisconnected): utility method for testing
-
-2004-11-30 Brian Warner <warner@lothar.com>
-
- * buildbot/test/test_run.py (RunMixin): refactor, remove debug msg
-
- * buildbot/interfaces.py (IBuilderControl.ping): add timeout=
- argument, return a Deferred that always fires with True or False.
- I don't use an errback to indicate 'ping failed' so that callers
- are free to ignore the deferred without causing spurious errors in
- the logs.
- * buildbot/process/builder.py (BuilderControl.ping): implement it
-
- * buildbot/test/test_run.py (Status.testDisappear): test ping
- (Status.disappearSlave): fix it
-
-2004-11-30 Brian Warner <warner@lothar.com>
-
- * buildbot/interfaces.py (IBuildControl): add .stopBuild
- (IBuilderControl): add .getBuild(num), only works for the current
- build, of course, although it might be interesting to offer
- something for builds in the .waiting or .interlocked state.
-
- * buildbot/process/base.py (Build): have .stopBuild just do the
- interrupt, then let the build die by itself.
- (BuildControl): add .stopBuild, and add a point-event named
- 'interrupt' just after the build so status viewers can tell that
- someone killed it.
- (BuilderControl): add .getBuild
-
- * buildbot/process/step.py (Dummy): use haltOnFailure so it really
- stops when you kill it, good for testing
- (ShellCommand.interrupt): add a logfile named 'interrupt' which
- contains the 'reason' text.
-
- * buildbot/status/html.py: Add Stop Build button, if the build can
- still be stopped. Send a Redirect (to the top page) one second
- later, hopefully long enough for the interrupt to have an effect.
- Move make_row() up to top-level to share it between Stop Build and
- Force Build.
-
- * buildbot/slave/commands.py: only kill the child process once
-
- * buildbot/test/test_run.py: add testInterrupt
-
-2004-11-29 Brian Warner <warner@lothar.com>
-
- * buildbot/process/base.py: Refactor command interruption. The
- Build is now responsible for noticing that the slave has gone
- away: Build.lostRemote() interrupts the current step and makes
- sure that no further ones will be started.
-
- * buildbot/process/builder.py: When the initial remote_startBuild
- message fails, log it: this usually indicates that the slave has
- gone away, but we don't really start paying attention until they
- fail to respond to the first step's command.
-
- * buildbot/process/step.py (RemoteCommand): Does *not* watch for
- slave disconnect. Now sports a new interrupt() method. Error
- handling was simplified a lot by chaining deferreds, so
- remoteFailed/remoteComplete were merged into a single
- remoteComplete method (which can now get a Failure object).
- Likewise failed/finished were merged into just _finished.
- (BuildStep): Add interrupt(why) method, and if why is a
- ConnectionLost Failure then the step is failed with some useful
- error text.
-
- * buildbot/slave/bot.py: stop the current command when the remote
- Step reference is lost, and when the slave is shut down.
- (Bot): make it a MultiService, so it can have children. Use
- stopService to tell when the slave is shutting down.
- (SlaveBuilder): make it a Service, and a child of the Bot. Add
- remote_interruptCommand (which asks the current SlaveCommand to
- stop but allows it to keep emitting status messages), and
- stopCommand (which tells it to shut up and die).
-
- * buildbot/slave/commands.py: make commands interruptible
- (ShellCommand.kill): factor out os.kill logic
- (Command): factor out setup()
- (Command.sendStatus): don't send status if .running is false, this
- happens when the command has been halted.
- (Command.interrupt): new method, used to tell the command to die
- (SlaveShellCommand): implement .interrupt
- (DummyCommand): implement .interrupt
- (SourceBase, etc): factor out setup(), don't continue substeps if
- .interrupted is set
-
- * buildbot/status/builder.py: fix all waitUntilFinished() methods
- so they can be called after finishing
-
- * buildbot/test/test_run.py: new tests for disconnect behavior,
- refactor slave-shutdown routines, add different kinds of
- slave-shutdown
-
-2004-11-27 Brian Warner <warner@lothar.com>
-
- * buildbot/status/words.py (IrcStatusBot.convertTime): utility
- method to express ETA time like "2m45s" instead of "165 seconds"
-
-2004-11-24 Brian Warner <warner@lothar.com>
-
- * buildbot/test/test_vc.py (VC.testArch): unregister the test
- archive after the test completes, to avoid cluttering the user's
- 'tla archives' listing with a bogus entry. Arch doesn't happen to
- provide any way to override the use of ~/.arch-params/, so there
- isn't a convenient way to avoid touching the setup of the user who
- runs the test.
- (VC_HTTP.testArchHTTP): same
-
-2004-11-23 Brian Warner <warner@lothar.com>
-
- * buildbot/status/html.py (TextLog): split render() up into
- render_HEAD and render_GET. Use a Producer when sending log
- chunks, to reduce memory requirements and avoid sending huge
- non-Banana-able strings over web.distrib connections. Requires
- peeking under the covers of IStatusLog.
- (TextLog.resumeProducing): fix the "as text" link, handle client
- disconnects that occur while we're still sending old chunks.
-
- * buildbot/status/builder.py (HTMLLogFile.waitUntilFinished): oops,
- use defer.succeed, not the non-existent defer.success
- (LogFile.waitUntilFinished): same
- (LogFile.subscribe): don't add watchers to a finished logfile
-
- * buildbot/__init__.py (version): bump to 0.6.1+ while between
- releases
-
-2004-11-23 Brian Warner <warner@lothar.com>
-
- * buildbot/__init__.py (version): Releasing buildbot-0.6.1
-
-2004-11-23 Brian Warner <warner@lothar.com>
-
- * NEWS: update for the 0.6.1 release
- * MANIFEST.in: add new files
-
- * README (INSTALLATION): explain how to enable the extra VC tests
-
- * buildbot/status/builder.py (LogFile): add .runEntries at the class
- level to, so old pickled builds can be displayed ok
-
-2004-11-22 Brian Warner <warner@lothar.com>
-
- * NEWS: summarize updates since last release
-
- * README (SLAVE): fix usage of 'buildbot slave' command. Thanks to
- Yoz Grahame. Closes SF#1050138.
-
- * docs/changes.xhtml (FreshCVSSourceNewcred): fix typo. Closes
- SF#1042563.
-
- * buildbot/process/step_twisted.py (Trial): update docs a bit
-
- * docs/factories.xhtml: fix Trial factory docs to match reality.
- Closes: SF#1049758.
-
- * buildbot/process/factory.py (Trial.__init__): add args for
- randomly= and recurse=, making them available to instantiators
- instead of only to subclassers. Closes: SF#1049759.
-
-2004-11-15 Brian Warner <warner@lothar.com>
-
- * buildbot/process/process_twisted.py (QuickTwistedBuildFactory):
- try to teach the Quick factory to use multiple versions of python
-
-2004-11-12 Brian Warner <warner@lothar.com>
-
- * buildbot/status/builder.py (BuilderStatus.saveYourself): use a
- safer w32-compatible approach, and only use it on windows
- (BuildStatus.saveYourself): same
-
-2004-11-11 Brian Warner <warner@lothar.com>
-
- * buildbot/status/builder.py (LogFile.addEntry): smarter way to do
- it: one string merge per chunk. There are now separate .entries
- and .runEntries lists: when enumerating over all chunks, make sure
- to look at both.
- * buildbot/test/test_status.py (Log): more tests
-
- * buildbot/status/builder.py (LogFile.addEntry): Merge string
- chunks together, up to 10kb per chunk. This ought to cut down on
- the CPU-burning overhead of large log files. Thanks to Alexander
- Staubo for spotting the problem.
- * buildbot/test/test_status.py (Log): tests for same
-
-2004-11-10 Brian Warner <warner@lothar.com>
-
- * buildbot/status/mail.py (MailNotifier.buildMessage): add a Date
- header to outbound mail
- * buildbot/test/test_status.py (Mail.testBuild1): test for same
-
-2004-11-08 Brian Warner <warner@lothar.com>
-
- * buildbot/status/builder.py (BuilderStatus.saveYourself): w32
- can't do os.rename() onto an existing file, so catch the exception
- and unlink the target file first. This introduces a slight window
- where the existing file could be lost, but the main failure case
- (disk full) should still be handled safely.
- (BuildStatus.saveYourself): same
-
- * buildbot/changes/pb.py (ChangePerspective): use a configurable
- separator character instead of os.sep, because the filenames being
- split here are coming from the VC system, which can have a
- different pathname convention than the local host. This should
- help a buildmaster running on windows that uses a CVS repository
- which runs under unix.
- * buildbot/changes/mail.py (MaildirSource): same, for all parsers
-
- * buildbot/process/step_twisted.py (Trial.createSummary): survive
- when there are no test failures to be parsed
-
- * buildbot/scripts/runner.py (createMaster): use shutil.copy()
- instead of the unix-specific os.system("cp"), thanks to Elliot
- Murphy for this and the other buildbot-vs-windows catches.
- * buildbot/test/test_maildir.py (MaildirTest.deliverMail): same
-
- * contrib/windows/buildbot.bat: prefix a '@', apparently to not
- echo the command as it is run
-
- * setup.py: install sample.mk too, not just sample.cfg
- (scripts): install contrib/windows/buildbot.bat on windows
-
-2004-11-07 Brian Warner <warner@lothar.com>
-
- * buildbot/process/builder.py (Builder._detached): clear the
- self.currentBuild reference, otherwise the next build will be
- skipped because we think the Builder is already in use.
-
- * docs/examples/twisted_master.cfg: update to match current usage
- on the Twisted buildbot
-
-2004-10-29 Brian Warner <warner@lothar.com>
-
- * buildbot/status/mail.py (MailNotifier): fix typo in docs
-
-2004-10-28 Brian Warner <warner@lothar.com>
-
- * buildbot/slave/commands.py (SourceBase): refactor subclasses to
- have separate doVCUpdate/doVCFull methods. Catch an update failure
- and respond by clobbering the source directory and re-trying. This
- will handle local changes (like replacing a file with a directory)
- that will cause CVS and SVN updates to fail.
- * buildbot/test/test_vc.py (SetupMixin.do_vc): test the same
-
- * buildbot/process/step.py (LoggedRemoteCommand.__repr__): avoid a
- python-2.4 warning
-
-2004-10-19 Brian Warner <warner@lothar.com>
-
- * buildbot/process/step_twisted.py (Trial.createSummary): bugfixes
-
- * buildbot/status/html.py (StatusResourceTestResults): display any
- TestResults that the Build might have
- (StatusResourceTestResult): and the logs for each TestResult
- (StatusResourceBuild): add link from the per-build page
-
-2004-10-15 Brian Warner <warner@lothar.com>
-
- * buildbot/process/step_twisted.py (Trial.createSummary): parse
- the 'problems' portion of stdout, add TestResults to our build
- * buildbot/test/test_twisted.py (Parse.testParse): test it
-
- * buildbot/interfaces.py (IBuildStatus.getTestResults): new method
- to retrieve a dict of accumulated test results
- (ITestResult): define what a single test result can do
- * buildbot/status/builder.py (TestResult): implement ITestResult
- (BuildStatus.getTestResults): retrieve dict of TestResults
- (BuildStatus.addTestResult): add TestResults
- * buildbot/test/test_status.py (Results.testAddResults): test it
-
-2004-10-14 Brian Warner <warner@lothar.com>
-
- * buildbot/test/test_maildir.py (MaildirTest): use shutil.rmtree
- instead of os.system("rm -rf") for win32 portability
-
- * buildbot/test/test_slavecommand.py (SlaveCommandTestCase): use
- SignalMixin instead of starting/stopping the reactor, which is
- likely to cause problems with other tests
-
- * buildbot/slave/commands.py (SourceBase.doCopy): remove leftover
- self.copyComplete() call. Yoz Grahame makes the catch.
-
- * contrib/windows/buildbot.bat: helper script to deal with path
- issues. Thanks to Yoz Grahame.
-
- * buildbot/master.py (BuildMaster.startService): don't register a
- SIGHUP handler if the signal module has no SIGHUP attribute.
- Apparently win32 does this.
-
- * buildbot/scripts/runner.py (start): add --reactor=win32 on win32
-
- * buildbot/test/test_web.py (WebTest.test_webPathname): skip the
- test if the reactor can't offer UNIX sockets
-
- * buildbot/status/html.py (StatusResourceBuild.body): fix syntax
- error introduced in the last commit. We really need that
- metabuildbot :).
-
-2004-10-12 Brian Warner <warner@lothar.com>
-
- * buildbot/changes/mail.py (MaildirSource.describe): fix exception
- when describing a maildir source. Thanks to Stephen Davis.
-
- * buildbot/status/words.py (IrcStatusBot.command_WATCH): round off
- ETA seconds
-
- * buildbot/scripts/runner.py (createMaster): install Makefile too
- (start): add --no_save to 'start' command
- * buildbot/scripts/sample.mk: simple convenience Makefile with
- start/stop/reload targets
-
- * buildbot/__init__.py (version): bump to 0.6.0+ while between
- releases
-
-2004-09-30 Brian Warner <warner@lothar.com>
-
- * setup.py: Releasing buildbot-0.6.0
-
-2004-09-30 Brian Warner <warner@lothar.com>
-
- * MANIFEST.in: add debian/*, sample.cfg, more docs files. Remove
- test_trial.py from the source tarball until support is complete.
-
- * NEWS: update for 0.6.0 release
- * buildbot/__init__.py (version): same
- * README: same
-
- * buildbot/status/words.py (IrcStatusBot.command_SOURCE): add
- 'source' command to tell users where to get the Buildbot source
-
- * docs/examples/*.cfg: update to modern standards
-
- * NEWS: update for release
-
- * buildbot/scripts/runner.py (createMaster): remove the
- -shutdown.tap stuff now that it isn't necessary
- (createSlave): same
- (start): launch buildbot.tap, not buildbot-shutdown.tap
-
-
- * buildbot/status/mail.py (Domain): shorten class name
- (MailNotifier): if lookup= is a string, pass it to Domain()
- * buildbot/test/test_status.py (Mail.testBuild1): new class name
- (Mail.testBuild2): test the string-to-Domain shortcut
- (Mail.testMail): fix test
-
-
- * buildbot/scripts/sample.cfg: improve the build-the-buildbot
- example config file
-
- * buildbot/status/builder.py (BuildStatus.__setstate__): re-set
- more attributes on load
- (BuilderStatus.stubBuildCacheSize): bump to 30, this was too low
- to accomodate the whole waterfall page at once, and the thrashing
- results in a lot of unnecessary loads
- (BuildStatus.saveYourself): use binary pickles, not fluffy text
- (BuilderStatus.saveYourself): same
- (BuilderStatus.eventGenerator): stop generating on the first missing
- build. We assume that saved builds are deleted oldest-first.
- (BuildStepStatus.__getstate__): .progress might not exist
-
- * buildbot/changes/changes.py (ChangeMaster): make it
- serializable, in $masterdir/changes.pck
- (ChangeMaster.stopService): save on shutdown
- * buildbot/master.py (BuildMaster.loadChanges): load at startup
- * buildbot/test/test_config.py: load Changes before config file
-
-
- * buildbot/slave/commands.py (ShellCommand.doTimeout): put the
- "Oh my god, you killed the command" header on a separate line
-
- * buildbot/status/builder.py (BuilderStatus.getStubBuildByNumber):
- skip over corrupted build pickles
- (BuilderStatus.getFullBuildByNumber): same
- (BuilderStatus.eventGenerator): skip over unavailable builds
- (BuildStatus.saveYourself): save builds to a .tmp file first, then
- do an atomic rename. This prevents a corrupted pickle when some
- internal serialization error occurs.
- (BuilderStatus.saveYourself): same
-
- * buildbot/slave/commands.py (SlaveShellCommand): oops, restore
- the timeout for shell commands, it got lost somehow
-
- * buildbot/status/builder.py (BuilderStatus.eventGenerator): if we
- run out of build steps, return the rest of the builder events
-
- * buildbot/interfaces.py (IBuilderControl.ping): add method
-
- * buildbot/process/builder.py (BuilderControl.ping): move
- slave-ping to BuilderControl, and fix the failure case in the
- process (Event.finish() is the verb, Event.finished is the noun).
-
- * buildbot/status/html.py (StatusResourceBuilder.ping): ping
- through the BuilderControl instead of the BuilderStatus
- (EventBox): add adapter for builder.Event, allowing builder events to
- be displayed in the waterfall display
-
- * buildbot/master.py (BotMaster.stopService): add a 'master
- shutdown' event to the builder's log
- (BuildMaster.startService): and a 'master started' on startup
-
- * buildbot/status/builder.py (BuilderStatus.eventGenerator): merge
- builder events into the BuildStep event stream
- (Status.builderAdded): add a 'builder created' event
-
-
- * buildbot/status/words.py (IrcStatusBot.command_WATCH): new
- command to announce the completion of a running build
- (IrcStatusBot.command_FORCE): announce when the build finishes
-
- * buildbot/status/builder.py (BuilderStatus.addFullBuildToCache):
- don't evict unfinished builds from the cache: they must stay in
- the full-cache until their logfiles have stopped changing. Make
- sure the eviction loop terminates if an unfinished build was hit.
- (HTMLLogFile.getTextWithHeaders): return HTML as if it were text.
- This lets exceptions be dumped in an email status message. Really
- we need LogFiles which contain both text and HTML, instead of two
- separate classes.
- (BuildStatus.__getstate__): handle self.finished=False
- (Status.builderAdded): if the pickle is corrupted, abandon the
- history and create a new BuilderStatus object.
-
- * buildbot/process/base.py (Build.stopBuild): tolerate lack of a
- self.progress attribute, helped one test which doesn't fully set
- up the Build object.
-
- * buildbot/interfaces.py (IStatusLogStub): split out some of the
- IStatusLog methods into an Interface that is implemented by "stub"
- logs, for which all the actual text chunks are on disk (in the
- pickled Build instance). To show the log contents, you must first
- adapt the stub log to a full IStatusLog object.
-
- * buildbot/status/builder.py (LogFileStub): create separate stub
- log objects, which can be upgraded to a real one if necessary.
- (LogFile): make them persistable, and let them stubify themselves
- (HTMLLogFile): same
- (BuildStepStatus): same
- (BuildStatus): same
- (BuildStatus.saveYourself): save the whole build out to disk
- (BuilderStatus): make it persistable
- (BuilderStatus.saveYourself): save the builder to disk
- (BuilderStatus.addFullBuildToCache): implement two caches which
- hold Build objects: a small one which holds full Builds, and a
- larger one which holds "stubbed" Builds (ones with their LogFiles
- turned into LogFileStubs). This reduces memory usage by the
- buildmaster by not keeping more than a few (default is 2) whole
- build logs in RAM all the time.
- (BuilderStatus.getBuild): rewrite to pull from disk (through the
- cache)
- (BuilderStatus.eventGenerator): rewrite since .builds went away
- (BuilderStatus.buildStarted): remove the .builds array. Add the
- build to the "full" cache when it starts.
- (BuilderStatus._buildFinished): save the build to disk when it
- finishes
- (Status): give it a basedir (same as the BuildMaster's basedir)
- where the builder pickles can be saved
- (Status.builderAdded): create the BuilderStatus ourselves, by
- loading a pickle from disk (or creating a new instance if there
- was none on disk). Return the BuilderStatus so the master can glue
- it into the new Builder object.
-
- * buildbot/master.py (BotMaster.stopService): on shutdown, tell
- all BuilderStatuses to save themselves out to disk. This is in
- lieu of saving anything important in the main Application pickle
- (the -shutdown.tap file).
- (BuildMaster.__init__): give Status() a basedir for its files
- (BuildMaster.loadConfig_Builders): do status.builderAdded first,
- to get the BuilderStatus, then give it to the Builder (instead of
- doing it the other way around). It's ok if the status announces
- the new Builder before it's really ready, as the outside world can
- only see the BuilderStatus object anyway (and it is ready before
- builderAdded returns). Use the builder's "builddir" (which
- normally specifies where the slave will run the builder) as the
- master's basedir (for saving serialized builds).
-
- * buildbot/status/html.py (StatusResourceBuildStep.getChild):
- coerce the logfile to IStatusLog before trying to get the text
- chunks out of it. This will pull the full (non-stubified) Build in
- from disk if necessary.
- (TextLog): fix the adapter registration
-
- * buildbot/test/test_control.py (Force.setUp): create the basedir
- * buildbot/test/test_web.py: same
- * buildbot/test/test_vc.py (SetupMixin.setUp): same
- * buildbot/test/test_status.py (Mail.makeBuild): match new setup
- * buildbot/test/test_run.py (Run.testMaster): same
- (Status.setUp): same
-
-2004-09-29 Fred L. Drake, Jr. <fdrake@acm.org>
-
- * buildbot/status/html.py (Waterfall.__init__): store actual
- allowForce flag passed in rather than using True for everyone;
- make sure setting it to False doesn't cause a NameError
- (Waterfall.setup).
- (StatusResourceBuilder.__init__) add the builder name to the page
- title.
- (StatusResourceBuilder.body) move HTML generation for a name/value
- row into a helper method (StatusResourceBuilder.make_row); only
- generate the "Force Build" form if allowForce was True and the
- slave is connected. Use class attributes in the generated HTML to
- spread a little CSS-joy.
-
-2004-09-28 Brian Warner <warner@lothar.com>
-
- * buildbot/process/step_twisted.py (Trial.createSummary): fix
- warning-scanner to not ignore things like
- 'ComponentsDeprecationWarning' and 'exceptions.RuntimeWarning'
-
- * buildbot/status/html.py (StatusResource.control): add some
- class-level values for .control in an attempt to make upgrading
- smoother
-
- * buildbot/util.py (ComparableMixin): survive missing attributes,
- such as when a class is modified and we're comparing old instances
- against new ones
-
- * buildbot/status/words.py (IrcStatusBot.privmsg): clean up
- failure handling, remove a redundant try/except block. Don't
- return the full traceback to the IRC channel.
- (IrcStatusBot.command_FORCE): catch new exceptions, return useful
- error messages. Get ETA properly.
-
- * buildbot/status/html.py (StatusResourceBuild.body): html.escape
- the reason, since (at least) IRC message will have <> in them.
- (StatusResourceBuilder.__init__): take an IBuilderControl
- (StatusResourceBuilder.force): use the IBuilderControl we get in
- the constructor instead of trying to make our own. Catch the
- new exceptions and ignore them for now (until we make an
- intermediate web page where we could show the error message)
- (StatusResource): create with an IControl, use it to give an
- IBuilderControl to all children
- (Waterfall): take an allowForce= option, pass an IControl object
- to StatusResource if it is True
-
- * buildbot/test/test_web.py (ConfiguredMaster): handle IControl
-
- * buildbot/master.py (BotPerspective.perspective_forceBuild):
- catch new exceptions and return string forms
-
- * buildbot/interfaces.py: add NoSlaveError, BuilderInUseError
- * buildbot/process/builder.py (Builder.forceBuild): raise them
- * buildbot/test/test_control.py (Force.testNoSlave): new test
- (Force.testBuilderInUse): same
-
-
- * buildbot/status/words.py (IrcStatusBot): enable build-forcing
-
- * buildbot/test/test_run.py: use IControl
- * buildbot/test/test_vc.py: same
-
- * buildbot/status/html.py (StatusResourceBuilder.force): rewrite
- to use IControl. Still offline.
- * buildbot/status/words.py (IrcStatusBot.command_FORCE): same
-
- * buildbot/process/builder.py (Builder.doPeriodicBuild): set
- who=None so periodic builds don't send out status mail
- (Builder.forceBuild): include reason in the log message
- (BuilderControl.forceBuild): rename 'name' to 'who'
-
- * buildbot/master.py (BotPerspective.perspective_forceBuild): add
- 'who' parameter, but make it None by default so builds forced by
- slave admins don't cause status mail to be sent to anybody
- (BotMaster.forceBuild): same. this method is deprecated.
- (DebugPerspective.perspective_forceBuild): same, use IControl.
- (DebugPerspective.perspective_fakeChange): use IControl..
- (Dispatcher.requestAvatar): .. so don't set .changemaster
-
- * buildbot/interfaces.py (IBuilderControl.forceBuild): rename 'who'
- parameter to avoid confusion with the name of the builder
-
-
- * buildbot/status/mail.py: refine comment about needing 2.3
-
- * buildbot/status/html.py: move all imports to the top
-
- * buildbot/test/test_control.py: test new interfaces
- * buildbot/test/test_run.py (Status): handle new interfaces
- * buildbot/test/test_vc.py (SetupMixin.doBuild): same
-
- * buildbot/process/base.py (BuildControl): implement IBuildControl
- and its lonely getStatus() method
-
- * buildbot/process/builder.py (BuilderControl): implement
- IBuilderControl, obtained by adapting the Builder instance
- (Builder.startBuild): return a BuilderControl instead of a
- Deferred. The caller can use bc.getStatus().waitUntilFinished() to
- accomplish the same thing.
-
- * buildbot/master.py: move all import statements to the top
- (Control): implement IControl, obtained by adapting the
- BuildMaster instance.
-
- * buildbot/interfaces.py: add IControl, IBuilderControl, and
- IBuildControl. These are used to force builds. Eventually they
- will provide ways to reconfigure the Builders, pause or abandon a
- Build, and perhaps control the BuildMaster itself.
-
-2004-09-26 Brian Warner <warner@lothar.com>
-
- * buildbot/util.py (ComparableMixin): survive twisted>1.3.0 which
- ends up comparing us against something without a .__class__
-
-2004-09-24 Brian Warner <warner@lothar.com>
-
- * buildbot/scripts/runner.py: rearrange option parsing a lot, to get
- usage text right.
-
- * Makefile: add 'deb-snapshot' target, to create a timestamped
- .deb package
-
- * debian/rules (binary-indep): skip CVS/ files in dh_installexamples
-
-2004-09-23 Brian Warner <warner@lothar.com>
-
- * buildbot/__init__.py (version): move version string here
- * setup.py: get version string from buildbot.version
- * buildbot/status/html.py (WaterfallStatusResource.body): add
- buildbot version to the page footer
- * buildbot/status/words.py (IrcStatusBot.command_VERSION): provide
- version when asked
-
- * buildbot/master.py (BotMaster.getPerspective): detect duplicate
- slaves, let the second know where the first one is coming from
- (BuildMaster.__init__): turn on .unsafeTracebacks so the slave can
- see our exceptions. It would be nice if there were a way to just
- send them the exception type and value, not the full traceback.
-
-
- * buildbot/status/mail.py (MailNotifier): add a new argument
- sendToInterestedUsers=, which can be set to False to disable the
- usual send-to-blamelist behavior.
- (top): handle python-2.2 which has no email.MIMEMultipart
- (MailNotifier.buildMessage): don't send logs without MIMEMultipart
- (MailNotifier.disownServiceParent): unsubscribe on removal
-
- * buildbot/test/test_status.py (Mail.testBuild2): test it
-
-
- * buildbot/status/progress.py (Expectations.wavg): tolerate
- current=None, which happens when steps start failing badly
- * buildbot/test/test_status.py (Progress.testWavg): test for it
-
- * buildbot/process/step.py (SVN.startVC): when the (old) slave
- doesn't understand args['revision'], emit a warning instead of
- bailing completely. Updating to -rHEAD is probably close enough.
-
- * buildbot/process/step_twisted.py (Trial.start): fix sanity-check
-
- * buildbot/test/test_status.py: at least import bb.status.client
- even if we don't have any test coverage for it yet
-
- * contrib/svn_buildbot.py: don't require python2.3
- (main): wait, do require it (for sets.py), but explain how to
- make it work under python2.2
-
-2004-09-23 Brian Warner <warner@lothar.com>
-
- * contrib/svn_buildbot.py: include the revision number in the Change
-
- * buildbot/changes/freshcvs.py (FreshCVSSourceNewcred): use when=,
- using util.now() because FreshCVS is a realtime service
-
- * buildbot/status/event.py: delete dead code
- * buildbot/process/step.py: don't import dead Event class
- * buildbot/process/step_twisted.py: same
- * buildbot/status/builder.py: same
- * buildbot/status/client.py: same
-
- * buildbot/test/test_process.py: kill buggy out-of-date disabled test
-
- * buildbot/changes/changes.py (Change): set .when from an __init__
- argument (which defaults to now()), rather than having
- ChangeMaster.addChange set it later.
- (ChangeMaster.addChange): same
-
- * buildbot/changes/mail.py (parseFreshCVSMail): pass in when=
- (parseSyncmail): same. Just use util.now() for now.
- (parseBonsaiMail): parse the timestamp field for when=
-
- * buildbot/test/test_vc.py (SourceStamp.addChange): page in when=
- instead of setting .when after the fact
-
-2004-09-22 slyphon
-
- * buildbot/slave/trial.py: new SlaveCommand to machine-parse test
- results when the target project uses retrial. Still under
- development.
- * buildbot/test/test_trial.py: same
-
-2004-09-21 Brian Warner <warner@lothar.com>
-
- * buildbot/status/mail.py (MailNotifier.__init__): include
- success/warnings/failure in the Subject line
- (MailNotifier.buildMessage): add the buildbot's URL to the body,
- use step.logname for the addLogs=True attachment filenames
- * buildbot/test/test_status.py (Mail): test Subject lines
- (Mail.testLogs): test attachment filenames
-
- * buildbot/master.py (DebugPerspective.perspective_fakeChange):
- accept a 'who' argument from the debug tool
- * contrib/debugclient.py (DebugWidget.do_commit): send 'who'
- * contrib/debug.glade: add text box to set 'who'
-
- * buildbot/interfaces.py (IBuildStatus.getBuilder): replace
- .getBuilderName with .getBuilder().getName(), more flexible
- (IStatusLog.getName): logs have short names, but you can prefix
- them with log.getStep().getName() to make them more useful
- * buildbot/status/builder.py: same
- * buildbot/status/client.py: same
- * buildbot/status/html.py: same
- * buildbot/test/test_run.py (Status.testSlave): same
- * buildbot/process/step.py: tweak logfile names
-
- * buildbot/status/mail.py (MailNotifier): add lookup, change
- argument to extraRecipients. The notifier is now aimed at sending
- mail to the people involved in a particular build, with additional
- constant recipients as a secondary function.
-
- * buildbot/test/test_status.py: add coverage for IEmailLookup,
- including slow-lookup and failing-lookup. Make sure the blamelist
- members are included.
-
- * buildbot/interfaces.py: new interfaces IEmailSender+IEmailLookup
- (IBuildStatus.getResponsibleUsers): rename from getBlamelist
- (IBuildStatus.getInterestedUsers): new method
- * buildbot/status/builder.py (BuildStatus.getResponsibleUsers): same
- * buildbot/status/client.py (remote_getResponsibleUsers): same
- * buildbot/status/html.py (StatusResourceBuild.body): same
- * buildbot/test/test_run.py (Status.testSlave): same
-
-2004-09-20 Brian Warner <warner@lothar.com>
-
- * docs/users.xhtml: update concepts
-
- * Makefile: add a convenience makefile, for things like 'make
- test'. It is not included in the source tarball.
-
-2004-09-16 Brian Warner <warner@lothar.com>
-
- * NEWS: mention /usr/bin/buildbot, debian/*
-
- * debian/*: add preliminary debian packaging. Many thanks to
- Kirill Lapshin (and Kevin Turner) for the hard work. I've mangled
- it considerably since it left their hands, I am responsible for
- all breakage that's resulted.
-
- * bin/buildbot: create a top-level 'buildbot' command, to be
- installed in /usr/bin/buildbot . For now it's just a simple
- frontend to mktap/twistd/kill, but eventually it will be the entry
- point to the 'try' command and also a status client. It is also
- intended to support the upcoming debian-packaging init.d scripts.
- * buildbot/scripts/runner.py: the real work is done here
- * buildbot/scripts/__init__.py: need this too
- * buildbot/scripts/sample.cfg: this is installed in new
- buildmaster directories
- * setup.py: install new stuff
-
-2004-09-15 Brian Warner <warner@lothar.com>
-
- * buildbot/test/test_vc.py: skip SVN tests if svn can't handle the
- 'file:' schema (the version shipped with OS-X was built without the
- ra_local plugin).
- (SetupMixin.tearDown): stop the goofy twisted.web timer which
- updates the log-timestamp, to make sure it isn't still running after
- the test finishes
-
- * docs/config.xhtml: Add projectName, projectURL, buildbotURL
- values to the config file.
- * docs/examples/hello.cfg: add examples
- * buildbot/interfaces.py (IStatus.getBuildbotURL): define accessors
- * buildbot/status/builder.py (Status.getProjectURL): implement them
- * buildbot/master.py (BuildMaster.loadConfig): set them from config
- * buildbot/test/test_config.py (ConfigTest.testSimple): test them
- * buildbot/status/html.py (WaterfallStatusResource): display them
-
-
- * buildbot/test/test_vc.py (FakeBuilder.name): add attribute so
- certain error cases don't suffer a secondary exception.
- (top): Skip tests if the corresponding VC tool is not installed.
-
- * buildbot/process/factory.py (Trial): introduce separate
- 'buildpython' and 'trialpython' lists, since trialpython=[] is
- what you want to invoke /usr/bin/python, whereas ./setup.py is
- less likely to be executable. Add env= parameter to pass options
- to test cases (which is how I usually write tests, I don't know if
- anyone else does it this way).
-
- * buildbot/process/step_twisted.py (Trial): handle python=None.
- Require 'testpath' be a string, not a list. Fix tests= typo.
- (Trial.start): sanity-check any PYTHONPATH value for stringness.
-
- * buildbot/process/step.py (RemoteCommand._remoteFailed): goofy
- way to deal with the possibility of removing the disconnect notify
- twice.
- (CVS): add a 'login' parameter to give a password to 'cvs login',
- commonly used with pserver methods (where pw="" or pw="guest")
-
- * buildbot/slave/commands.py (SourceBase): move common args
- extraction and setup() to __init__, so everything is ready by the
- time setup() is called
- (CVS.start): call 'cvs login' if a password was supplied
- (ShellCommand): special-case PYTHONPATH: prepend the master's
- value to any existing slave-local value.
-
- * buildbot/process/builder.py (Builder.updateBigStatus): if we
- don't have a remote, mark the builder as Offline. This whole
- function should probably go away and be replaced by individual
- deltas.
- (Builder.buildFinished): return the results to the build-finished
- deferred callback, helps with testing
-
-2004-09-14 Brian Warner <warner@lothar.com>
-
- * buildbot/test/test_vc.py: put all the repositories needed to run
- the complete tests into a single small (1.3MB) tarball, so I can
- make that tarball available on the buildbot web site. Test HTTP
- access (for Arch and Darcs) by spawning a temporary web server
- while the test runs.
-
- * docs/users.xhtml: new document, describe Buildbot's limited
- understanding of different human users
-
- * buildbot/test/test_vc.py: rearrange test cases a bit
-
- * buildbot/process/step_twisted.py (Trial): handle testpath=
- * buildbot/process/factory.py (Trial): update to use step.Trial
-
- * buildbot/slave/commands.py (ShellCommandPP): fix fatal typo
-
- * buildbot/status/builder.py (BuildStatus.getText): add text2 to
- the overall build text (which gives you 'failed 2 tests' rather
- than just 'failed')
- (BuildStepStatus.text2): default to [], not None
-
- * buildbot/process/step_twisted.py (Trial.commandComplete): text2
- must be a list
-
-2004-09-12 Brian Warner <warner@lothar.com>
-
- * buildbot/master.py (BotPerspective._commandsUnavailable): don't
- log the whole exception if it's just an AttributeError (old slave)
-
- * buildbot/process/step.py (ShellCommand.__init__): stash .workdir
- so (e.g.) sub-commands can be run in the right directory.
- (ShellCommand.start): accept an optional errorMessage= argument
- to make life easier for SVN.start
- (SVN.startVC): put the "can't do mode=export" warning in the LogFile
- headers
- (ShellCommand.start): move ['dir'] compatibility hack..
- (RemoteShellCommand.start): .. to here so everyone can use it
-
- * buildbot/process/step_twisted.py (Trial): use .workdir
-
- * buildbot/process/step_twisted.py (BuildDebs.getText): fix the
- text displayed when debuild fails completely
- (Trial): snarf _trial_temp/test.log from the slave and display it
-
-2004-09-11 Brian Warner <warner@lothar.com>
-
- * buildbot/process/step_twisted.py (ProcessDocs.getText): typo
-
- * buildbot/process/process_twisted.py (TwistedTrial.tests): oops,
- set to 'twisted', so --recurse can find twisted/web/test/*, etc
-
- * buildbot/process/step.py (ShellCommand): call .createSummary
- before .evaluateCommand instead of the other way around. This
- makes it slightly easier to count warnings and then use that to
- set results=WARNINGS
- * buildbot/process/step_twisted.py: cosmetic, swap the methods
-
- * buildbot/process/base.py (Build.buildFinished): update status
- before doing progress. It's embarrassing for the build to be stuck
- in the "building" state when an exceptions occurs elsewhere..
-
- * buildbot/status/progress.py (Expectations.expectedBuildTime):
- python2.2 doesn't have 'sum'
-
- * buildbot/status/builder.py (Status.getBuilderNames): return a copy,
- to prevent clients from accidentally sorting it
-
- * buildbot/master.py (Manhole): add username/password
- (BuildMaster.loadConfig): use c['manhole']=Manhole() rather than
- c['manholePort'], deprecate old usage
- * docs/config.xhtml: document c['manhole']
- * docs/examples/hello.cfg: show example of using a Manhole
-
-
- * buildbot/test/test_steps.py (FakeBuilder.getSlaveCommandVersion):
- pretend the slave is up to date
-
- * buildbot/status/builder.py (BuildStepStatus.stepFinished): 'log',
- the module, overlaps with 'log', the local variable
-
- * buildbot/status/html.py: oops, 2.2 needs __future__ for generators
-
- * buildbot/process/builder.py (Builder.getSlaveCommandVersion):
- new method to let Steps find out the version of their
- corresponding SlaveCommand.
- * buildbot/process/step.py (BuildStep.slaveVersion): utility method
- (ShellCommand.start): add 'dir' argument for <=0.5.0 slaves
- (CVS.startVC): backwards compatibility for <=0.5.0 slaves
- (SVN.startVC): same
- (Darcs.startVC): detect old slaves (missing the 'darcs' command)
- (Arch.startVC): same
- (P4Sync.startVC): same
-
- * buildbot/process/step.py (LoggedRemoteCommand.start): return the
- Deferred so we can catch errors in remote_startCommand
- (RemoteShellCommand.start): same
-
- * docs/examples/twisted_master.cfg: update sample config file
-
- * buildbot/slave/commands.py (ShellCommandPP): write to stdin
- after connectionMade() is called, not before. Close stdin at that
- point too.
-
- * buildbot/process/process_twisted.py: update to use Trial, clean
- up argument passing (move to argv arrays instead of string
- commands)
-
- * buildbot/process/step_twisted.py (Trial): new step to replace
- RunUnitTests, usable by any trial-using project (not just
- Twisted). Arguments have changed, see the docstring for details.
-
- * buildbot/process/base.py (Build.startBuild): this now returns a
- Deferred. Exceptions that occur during setupBuild are now
- caught better and lead to fewer build_status weirdnesses, like
- finishing a build that was never started.
- (Build.buildFinished): fire the Deferred instead of calling
- builder.buildFinished directly. The callback argument is this
- Build, everything else can be extracted from it, including the
- new build.results attribute.
- * buildbot/process/builder.py (Builder.startBuild): same
- (Builder.buildFinished): same, extract results from build
-
- * buildbot/process/step.py (ShellCommands): remove dead code
-
-2004-09-08 Brian Warner <warner@lothar.com>
-
- * buildbot/test/test_vc.py (VC.doPatch): verify that a new build
- doesn't try to use the leftover patched workdir
- (SourceStamp): test source-stamp computation for CVS and SVN
-
- * buildbot/slave/commands.py (SourceBase.doPatch): mark the
- patched workdir ('touch .buildbot-patched') so we don't try to
- update it later
- (SourceBase.start): add ['revision'] for all Source steps
- (CVS): change args: use ['branch'] for -r, remove ['files']
- (CVS.buildVC): fix revision/branch stuff
- (SVN): add revision stuff
-
- * buildbot/process/step.py (BuildStep.__init__): reject unknown
- kwargs (except 'workdir') to avoid silent spelling errors
- (ShellCommand.__init__): same
- (Source): new base class for CVS/SVN/etc. Factor out everything
- common, add revision computation (perform the checkout with a -D
- DATE or -r REVISION that gets exactly the sources described by the
- last Change), overridable with step.alwaysUseLatest. Add patch
- handling (build.getSourceStamp can trigger the use of a base
- revision and a patch).
- (CVS, SVN, Darcs, Arch, P4Sync): refactor, remove leftover arguments
- * docs/steps.xhtml: update docs
- * docs/source.xhtml: mention .checkoutDelay
- * docs/examples/hello.cfg: show use of checkoutDelay, alwaysUseLatest
-
- * buildbot/process/base.py (Build.setSourceStamp): add a
- .sourceStamp attribute to each Build. If set, this indicates that
- the build should be done with something other than the most
- recent source tree. This will be used to implement "try" builds.
- (Build.allChanges): new support method
- (Build.lastChangeTime): remove, functionality moved to Source steps
- (Build.setupBuild): copy the Step args before adding ['workdir'],
- to avoid modifying the BuildFactory (and thus triggering spurious
- config changes)
-
-
- * buildbot/status/html.py: rename s/commits/changes/
- (StatusResourceChanges): same
- (CommitBox.getBox): same, update URL
- (WaterfallStatusResource): same
- (StatusResource.getChild): same
-
- * contrib/debugclient.py (DebugWidget.do_commit): send .revision
- * contrib/debug.glade: add optional 'revision' to the fakeChange
-
- * buildbot/changes/changes.py (html_tmpl): display .revision
- (ChangeMaster.addChange): note .revision in log
- * buildbot/changes/pb.py (ChangePerspective.perspective_addChange):
- accept a ['revision'] attribute
-
- * buildbot/process/factory.py (BuildFactory): use ComparableMixin
-
- * buildbot/master.py (BotMaster.getPerspective): update the
- .connected flag in SlaveStatus when it connects
- (BotMaster.detach): and when it disconnects
- (DebugPerspective.perspective_fakeChange): take a 'revision' attr
- (BuildMaster.loadConfig_Builders): walk old list correctly
-
- * buildbot/test/test_config.py: fix prefix= usage
-
-2004-09-06 Brian Warner <warner@lothar.com>
-
- * NEWS: mention P4
-
- * buildbot/changes/p4poller.py (P4Source): New ChangeSource to
- poll a P4 depot looking for recent changes. Thanks to Dave
- Peticolas for the contribution. Probably needs some testing after
- I mangled it.
-
- * buildbot/process/step.py (P4Sync): simple P4 source-updater,
- requires manual client setup for each buildslave. Rather
- experimental. Thanks again to Dave Peticolas.
- * buildbot/slave/commands.py (P4Sync): slave-side source-updater
-
- * buildbot/changes/changes.py (Change): add a .revision attribute,
- which will eventually be used to generate source-stamp values.
-
- * buildbot/process/step.py (RemoteCommand.start): use
- notifyOnDisconnect to notice when we lose the slave, then treat it
- like an exception. This allows LogFiles to be closed and the build
- to be wrapped up normally. Be sure to remove the disconnect
- notification when the step completes so we don't accumulate a
- bazillion such notifications which will fire weeks later (when the
- slave finally disconnects normally). Fixes SF#915807, thanks to
- spiv (Andrew Bennetts) for the report.
- (LoggedRemoteCommand): move __init__ code to RemoteCommand, since it
- really isn't Logged- specific
- (LoggedRemoteCommand.remoteFailed): Add an extra newline to the
- header, since it's almost always going to be appended to an
- incomplete line
- * buildbot/test/test_steps.py (BuildStep.testShellCommand1):
- update test to handle use of notifyOnDisconnect
-
- * buildbot/status/builder.py (BuilderStatus.currentlyOffline):
- don't clear .ETA and .currentBuild when going offline, let the
- current build clean up after itself
-
- * buildbot/process/builder.py (Builder.detached): wait a moment
- before doing things like stopping the current build, because the
- current step will probably notice the disconnect and cleanup the
- build by itself
- * buildbot/test/test_run.py (Status.tearDown): update test to
- handle asynchronous build-detachment
-
- * buildbot/process/base.py (Build.stopBuild): minor shuffles
-
- * buildbot/status/html.py (WaterfallStatusResource.buildGrid):
- hush a debug message
-
-2004-09-05 Brian Warner <warner@lothar.com>
-
- * buildbot/changes/maildir.py (Maildir.start): catch an IOError
- when the dnotify fcntl() fails and fall back to polling. Linux 2.2
- kernels do this: the fcntl module has the F_NOTIFY constant, but
- the kernel itself doesn't support the operation. Thanks to Olly
- Betts for spotting the problem.
-
- * buildbot/process/step.py (Darcs): new source-checkout command
- (Arch): new source-checkout command
- (todo_P4): fix constructor syntax, still just a placeholder
- * buildbot/test/test_vc.py (VC.testDarcs): test it
- (VC.testDarcsHTTP): same, via localhost HTTP
- (VC.testArch): same
- (VC.testArchHTTP): same
- * NEWS: mention new features
-
- * buildbot/slave/commands.py (ShellCommand): add .keepStdout,
- which tells the step to stash stdout text locally (in .stdout).
- Slave-side Commands can use this to make decisions based upon the
- output of the the ShellCommand (not just the exit code).
- (Darcs): New source-checkout command
- (Arch): New source-checkout command, uses .keepStdout in one place
- where it needs to discover the archive's default name.
-
- * docs/steps.xhtml: Document options taken by Darcs and Arch.
- * docs/source.xhtml: add brief descriptions of Darcs and Arch
- * docs/examples/hello.cfg: add examples of Darcs and Arch checkout
-
- * buildbot/process/step.py (ShellCommand.describe): add an
- alternate .descriptionDone attribute which provides descriptive
- text when the step is complete. .description can be ["compiling"],
- for use while the step is running, then .descriptionDone can be
- ["compile"], used alone when the step succeeds or with "failed" when
- it does not. Updated other steps to use the new text.
- * buildbot/process/step_twisted.py: same
- * buildbot/test/test_run.py: update tests to match
-
-2004-08-30 Brian Warner <warner@lothar.com>
-
- * buildbot/process/step.py (ShellCommand.createSummary): fix docs
- (CVS.__init__): send 'patch' argument to slave
- (CVS.start): don't create the LoggedRemoteCommand until start(),
- so we can catch a .patch added after __init__
- (SVN.__init__): add 'patch' to SVN too
- (SVN.start): same
-
- * buildbot/slave/commands.py (ShellCommand): add a 'stdin'
- argument, to let commands push data into the process' stdin pipe.
- Move usePTY to a per-instance attribute, and clear it if 'stdin'
- is in use, since closing a PTY doesn't really affect the process
- in the right way (in particular, I couldn't run /usr/bin/patch
- under a pty).
- (SourceBase.doPatch): handle 'patch' argument
-
- * buildbot/test/test_vc.py (VC.doPatch): test 'patch' argument for
- both CVS and SVN
-
- * buildbot/slave/commands.py (cvs_ver): fix version-parsing goo
- * buildbot/slave/bot.py (Bot.remote_getCommands): send command
- versions to master
- * buildbot/master.py (BotPerspective.got_commands): get command
- versions from slave, give to each builder
- * buildbot/process/builder.py (Builder.attached): stash slave
- command versions in .remoteCommands
-
- * docs/steps.xhtml: bring docs in-line with reality
-
- * buildbot/process/step.py (CVS.__init__): more brutal
- compatibility code removal
- (SVN.__init__): same
-
- * buildbot/slave/commands.py (SlaveShellCommand): update docs
- (SlaveShellCommand.start): require ['workdir'] argument, remove
- the ['dir'] fallback (compatibility will come later)
- (SourceBase): update docs
- (SourceBase.start): remove ['directory'] fallback
- (CVS): update docs
- (SVN): update docs
- * buildbot/test/test_config.py (ConfigTest.testBuilders): update test
- * buildbot/test/test_steps.py (BuildStep.testShellCommand1): same
- * buildbot/test/test_slavecommand.py (SlaveCommandTestCase): same
-
- * buildbot/process/step.py (RemoteShellCommand.__init__): add
- want_stdout/want_stderr. remove old 'dir' keyword (to simplify the
- code.. I will figure out 0.5.0-compatibility hooks later)
-
-2004-08-30 Brian Warner <warner@lothar.com>
-
- * buildbot/process/process_twisted.py: rewrite in terms of new
- BuildFactory base class. It got significantly shorter. Yay
- negative code days.
-
- * buildbot/process/step_twisted.py (HLint.start): fix to make it
- work with the new "self.build isn't nailed down until we call
- step.start()" scheme: specifically, __init__ is called before the
- build has decided on which Changes are going in, so we don't scan
- build.allFiles() for .xhtml files until start()
- (HLint.commandComplete): use getText(), not getStdout()
- (RunUnitTests.start): same: don't use .build until start()
- (RunUnitTests.describe): oops, don't report (None) when using
- the default reactor
- (RunUnitTests.commandComplete): use getText()
- (RunUnitTests.createSummary): same
- (BuildDebs.commandComplete): same
-
- * buildbot/process/step.py (RemoteShellCommand.__init__): don't
- set args['command'] until start(), since our BuildStep is allowed
- to change their mind up until that point
- (TreeSize.commandComplete): use getText(), not getStdout()
-
- * docs/examples/twisted_master.cfg: update to current standards
-
- * docs/factories.xhtml: update
- * buildbot/process/factory.py: implement all the common factories
- described in the docs. The Trial factory doesn't work yet, and
- I've probably broken all the process_twisted.py factories in the
- process. There are compatibility classes left in for things like
- the old BasicBuildFactory, but subclasses of them are unlikely to
- work.
- * docs/examples/glib_master.cfg: use new BuildFactories
- * docs/examples/hello.cfg: same
-
- * buildbot/test/test_config.py (ConfigTest.testBuilders): remove
- explicit 'workdir' args
-
- * buildbot/process/base.py (BuildFactory): move factories to ..
- * buildbot/process/factory.py (BuildFactory): .. here
- * buildbot/process/process_twisted.py: handle move
- * buildbot/test/test_config.py: same
- * buildbot/test/test_run.py: same
- * buildbot/test/test_steps.py: same
- * buildbot/test/test_vc.py: same
- * docs/factories.xhtml: same
-
- * NEWS: mention config changes that require updating master.cfg
-
- * buildbot/process/base.py (Build.setupBuild): add a 'workdir'
- argument to all steps that weren't given one already, pointing at
- the "build/" directory.
-
- * docs/examples/hello.cfg: remove explicit 'workdir' args
-
- * docs/factories.xhtml: document standard BuildFactory clases,
- including a bunch which are have not yet been written
-
-2004-08-29 Brian Warner <warner@lothar.com>
-
- * buildbot/interfaces.py (IBuildStepStatus.getResults): move
- result constants (SUCCESS, WARNINGS, FAILURE, SKIPPED) to
- buildbot.status.builder so they aren't quite so internal
- * buildbot/process/base.py, buildbot/process/builder.py: same
- * buildbot/process/maxq.py, buildbot/process/step.py: same
- * buildbot/process/step_twisted.py, buildbot/status/builder.py: same
- * buildbot/status/mail.py, buildbot/test/test_run.py: same
- * buildbot/test/test_status.py, buildbot/test/test_vc.py: same
-
- * buildbot/status/html.py (StatusResourceBuildStep): oops, update
- to handle new getLogs()-returns-list behavior
- (StatusResourceBuildStep.getChild): same
- (StepBox.getBox): same
- (WaterfallStatusResource.phase0): same
-
- * docs/source.xhtml: document how Buildbot uses version-control
- systems (output side: how we get source trees)
- * docs/changes.xhtml: rename from sources.xhtml, documents VC
- systems (input side: how we learn about Changes)
-
- * buildbot/master.py (Manhole): use ComparableMixin
- * buildbot/changes/freshcvs.py (FreshCVSSourceNewcred): same
- * buildbot/changes/mail.py (MaildirSource): same
- * buildbot/status/client.py (PBListener): same
- * buildbot/status/html.py (Waterfall): same
- * buildbot/status/words.py (IRC): same
-
- * NEWS: start describing new features
-
- * buildbot/status/mail.py (MailNotifier): finish implementation.
- The message body is still a bit sparse.
- * buildbot/test/test_status.py (Mail): test it
-
- * buildbot/util.py (ComparableMixin): class to provide the __cmp__
- and __hash__ methods I wind up adding everywhere. Specifically
- intended to support the buildbot config-file update scheme where
- we compare, say, the old list of IStatusTargets against the new
- one and don't touch something which shows up on both lists.
- * buildbot/test/test_util.py (Compare): test case for it
-
- * buildbot/interfaces.py (IBuildStatus): change .getLogs() to
- return a list instead of a dict
- (IBuildStepStatus.getLogs): same. The idea is that steps create
- logs with vaguely unique names (although their uniqueness is not
- guaranteed). Thus a compilation step should create its sole
- logfile with the name 'compile', and contribute it to the
- BuildStatus. If a step has two logfiles, try to create them with
- different names (like 'test.log' and 'test.summary'), and only
- contribute the important ones to the overall BuildStatus.
- * buildbot/status/builder.py (Event.getLogs): same
- (BuildStepStatus): fix default .text and .results
- (BuildStepStatus.addLog): switch to list-like .getLogs()
- (BuildStepStatus.stepFinished): same
- (BuildStatus.text): fix default .text
- (BuildStatus.getLogs): temporary hack to return all logs (from all
- child BuildStepStatus objects). Needs to be fixed to only report
- the significant ones (as contributed by the steps themselves)
- * buildbot/test/test_run.py: handle list-like .getLogs()
- * buildbot/test/test_steps.py (BuildStep.testShellCommand1): same
-
-2004-08-28 Brian Warner <warner@lothar.com>
-
- * buildbot/process/builder.py (Builder.attached): serialize the
- attachment process, so the attach-watcher isn't called until the
- slave is really available. Add detached watchers too, which makes
- testing easier.
-
- * buildbot/test/test_vc.py: test VC modes (clobber/update/etc)
-
- * buildbot/test/test_swap.py: remove dead code
-
- * buildbot/slave/commands.py (ShellCommandPP): add debug messages
- (ShellCommand.start): treat errors in _startCommand/spawnProcess
- sort of as if the command being run exited with a -1. There may
- still be some holes in this scheme.
- (CVSCommand): add 'revision' tag to the VC commands, make sure the
- -r option appears before the module list
- * buildbot/process/step.py (CVS): add 'revision' argument
-
- * buildbot/slave/bot.py (SlaveBuilder._ackFailed): catch failures
- when sending updates or stepComplete messages to the master, since
- we don't currently care whether they arrive or not. When we revamp
- the master/slave protocol to really resume interrupted builds,
- this will need revisiting.
- (lostRemote): remove spurious print
-
- * buildbot/master.py (BotPerspective.attached): serialize the
- new-builder interrogation process, to make testing easier
- (BotMaster.waitUntilBuilderDetached): convenience function
-
- * buildbot/status/builder.py (BuilderStatus): prune old builds
- (BuildStatus.pruneSteps): .. and steps
- (BuildStepStatus.pruneLogs): .. and logs
- (BuilderStatus.getBuild): handle missing builds
- * buildbot/status/html.py (StatusResourceBuild.body): display build
- status in the per-build page
- (BuildBox.getBox): color finished builds in the per-build box
-
-2004-08-27 Brian Warner <warner@lothar.com>
-
- * buildbot/status/mail.py (MailNotifier): new notification class,
- not yet finished
-
- * buildbot/slave/commands.py (SourceBase): refactor SVN and CVS into
- variants of a common base class which handles all the mode= logic
-
- * buildbot/interfaces.py (IBuildStatus.getPreviousBuild): add
- convenience method
- * buildbot/status/builder.py (BuildStatus.getPreviousBuild): same
-
-2004-08-26 Brian Warner <warner@lothar.com>
-
- * buildbot/test/test_slavecommand.py: accomodate new slavecommand
- interfaces
-
- * buildbot/test/test_run.py: update to new Logfile interface, new
- buildbot.slave modules
- * buildbot/test/test_steps.py: same, remove Swappable, add timeouts
-
- * MANIFEST.in: new sample config file
- * docs/examples/hello.cfg: same
-
- * buildbot/process/step_twisted.py: remove dead import
-
- * buildbot/process/step.py (RemoteCommand.run): catch errors
- during .start
- (RemoteCommand.remote_update): ignore updates that arrive after
- we've shut down
- (RemoteCommand.remote_complete): ignore duplicate complete msgs
- (RemoteCommand._remoteComplete): cleanup failure handling, reduce
- the responsibilities of the subclass's methods
- (BuildStep.failed): catch errors during failure processing
- (BuildStep.addHTMLLog): provide all-HTML logfiles (from Failures)
- (CVS): move to a mode= argument (described in docstring), rather
- than the ungainly clobber=/export=/copydir= combination.
- (SVN): add mode= functionality to SVN too
- (todo_Darcs, todo_Arch, todo_P4): placeholders for future work
-
- * buildbot/process/base.py (Build.startNextStep): catch errors
- during s.startStep()
-
- * buildbot/clients/base.py: update to new PB client interface.
- gtkPanes is still broken
-
- * buildbot/bot.py, buildbot/slavecommand.py: move to..
- * buildbot/slave/bot.py, buildbot/slave/commands.py: .. new directory
- * setup.py: add buildbot.slave module
- * buildbot/bb_tap.py: handle move
- * buildbot/slave/registry.py: place to register commands, w/versions
- * buildbot/slave/bot.py: major simplifications
- (SlaveBuilder.remote_startCommand): use registry for slave commands,
- instead of a fixed table. Eventually this will make the slave more
- extensible. Use 'start' method on the command, not .startCommand.
- Fix unsafeTracebacks handling (I think).
- * buildbot/slave/commands.py: major cleanup. ShellCommand is now a
- helper class with a .start method that returns a Deferred.
- SlaveShellCommand is the form reached by the buildmaster. Commands
- which use multiple ShellCommands can just chain them as Deferreds,
- with some helper methods in Command (_abandonOnFailure and
- _checkAbandoned) to bail on rc!=0.
- (CVSCommand): prefer new mode= argument
- (SVNFetch): add mode= argument
-
- * buildbot/master.py (DebugPerspective.perspective_forceBuild):
- put a useful reason string on the build
-
- * buildbot/status/builder.py (LogFile): do LogFile right: move the
- core functionality into an IStatusLog object
- (BuildStatus.sendETAUpdate): don't send empty build-eta messages
- * buildbot/status/html.py (TextLog): HTML-rendering goes here
- (StatusResourceBuild.body): use proper accessor methods
- * buildbot/status/client.py (RemoteLog): PB-access goes here
- (StatusClientPerspective.perspective_subscribe): add "full" mode,
- which delivers log contents too
- (PBListener.__cmp__): make PBListeners comparable, thus removeable
- * buildbot/status/event.py: remove old Logfile completely
-
- * buildbot/interfaces.py (IStatusLog.subscribe): make the
- subscription interface for IStatusLog subscriptions just like all
- other the status subscriptions
- (IStatusReceiver.logChunk): method called on subscribers
-
-2004-08-24 Brian Warner <warner@lothar.com>
-
- * buildbot/process/builder.py (Builder._pong): oops, ping response
- includes a result (the implicit None returned by remote_print).
- Accept it so the _pong method handles the response correctly.
-
-2004-08-06 Brian Warner <warner@lothar.com>
-
- * buildbot/test/test_config.py: update IRC, PBListener tests
-
- * buildbot/status/client.py (StatusClientPerspective): total
- rewrite to match new IStatus interfaces. New subscription scheme.
- There are still a few optimizations to make (sending down extra
- information with event messages so the client doesn't have to do a
- round trip). The logfile-retrieval code is probably still broken.
- Moved the PB service into its own port, you can no longer share a
- TCP socket between a PBListener and, say, the slaveport (this
- should be fixed eventually).
- * buildbot/clients/base.py (Client): revamp to match. still needs
- a lot of work, but basic event reporting works fine. gtkPanes is
- completely broken.
-
- * buildbot/status/words.py (IRC): move to c['status']. Each IRC
- instance talks to a single irc server. Threw out all the old
- multi-server handling code. Still need to add back in
- builder-control (i.e. "force build")
-
- * buildbot/status/html.py (StatusResourceBuildStep.body): add some
- more random text to the as-yet-unreachable per-step page
-
- * buildbot/status/builder.py (BuildStepStatus.sendETAUpdate):
- rename to stepETAUpdate
- (BuildStatus.subscribe): add build-wide ETA updates
- (BuilderStatus.getState): remove more cruft
- (BuilderStatus.getCurrentBuild): remove more cruft
- (BuilderStatus.buildStarted): really handle tuple-subscription
- * buildbot/test/test_run.py (Status.testSlave): handle the
- stepETAUpdate rename
-
- * buildbot/master.py (BuildMaster): don't add a default
- StatusClientService. Don't add a default IrcStatusFactory. Both
- are now added through c['status'] in the config file. c['irc'] is
- accepted for backwards compatibility, the only quirk is you cannot
- use c['irc'] to specify IRC servers on ports other than 6667.
-
- * buildbot/interfaces.py (IBuildStatus.getCurrentStep): add method
- (IStatusReceiver.buildStarted): allow update-interval on subscribe
- (IStatusReceiver.buildETAUpdate): send build-wide ETA updates
- (IStatusReceiver.stepETAUpdate): rename since it's step-specific
-
-
- * buildbot/master.py (BuildMaster.startService): SIGHUP now causes
- the buildmaster to re-read its config file
-
-
- * buildbot/test/test_web.py (test_webPortnum): need a new hack to
- find out the port our server is running on
- (WebTest.test_webPathname_port): same
-
- * buildbot/test/test_config.py (testWebPortnum): test it
- (testWebPathname): ditto
-
- * docs/config.xhtml: document new c['status'] configuration option
-
- * buildbot/status/html.py (Waterfall): new top-level class which
- can be added to c['status']. This creates the Site as well as the
- necessary TCPServer/UNIXServer. It goes through the BuildMaster,
- reachable as .parent, for everything.
-
- * buildbot/master.py (Manhole): make it a normal service Child
- (BuildMaster.loadConfig_status): c['status'] replaces webPortnum and
- webPathname. It will eventually replace c['irc'] and the implicit
- PB listener as well. c['webPortnum'] and c['webPathname'] are left
- in as (deprecated) backward compatibility hooks for now.
-
-
- * buildbot/process/builder.py (Builder.buildFinished): don't
- inform out builder_status about a finished build, as it finds out
- through its child BuildStatus object
-
- * buildbot/status/html.py: extensive revamp. Use adapters to make
- Boxes out of BuildStepStatus and friends. Acknowledge that Steps
- have both starting and finishing times and adjust the waterfall
- display accordingly, using spacers if necessary. Use SlaveStatus
- to get buildslave info.
- (StatusResourceBuildStep): new just-one-step resource, used to get
- logfiles. No actual href to it yet.
-
- * buildbot/status/event.py (Logfile.doSwap): disable Swappable for
- the time being, until I get the file-naming scheme right
-
- * buildbot/status/builder.py (Event): clean started/finished names
- (BuildStatus.isFinished): .finished is not None is the right test
- (BuildStatus.buildStarted): track started/finished times ourselves
- (BuilderStatus.getSlave): provide access to SlaveStatus object
- (BuilderStatus.getLastFinishedBuild): all builds are now in
- .builds, even the currently-running one. Accomodate this change.
- (BuilderStatus.eventGenerator): new per-builder event generator.
- Returns BuildStepStatus and BuildStatus objects, since they can
- both be adapted as necessary.
- (BuilderStatus.addEvent): clean up started/finished attributes
- (BuilderStatus.startBuild,finishBuild): remove dead code
- (SlaveStatus): new object to provide ISlaveStatus
-
- * buildbot/process/step.py (ShellCommand.getColor): actually
- return the color instead of setting it ourselves
- (CVS.__init__): pull .timeout and .workdir options out of
- **kwargs, since BuildStep will ignore them. Without this neither
- will be sent to the slave correctly.
- (SVN.__init__): same
-
- * buildbot/process/builder.py (Builder): move flags to class-level
- attributes
- (Builder.attached): remove .remoteInfo, let the BotPerspective and
- SlaveStatus handle that
-
- * buildbot/process/base.py (Build.firstEvent): remove dead code
- (Build.stopBuild): bugfix
-
- * buildbot/changes/pb.py (PBChangeSource.describe): add method
-
- * buildbot/changes/changes.py (Change): add IStatusEvent methods
- (ChangeMaster.eventGenerator): yield Changes, since there are now
- Adapters to turn them into HTML boxes
-
- * buildbot/master.py (BotMaster): track SlaveStatus from BotMaster
- (BotPerspective.attached): feed a SlaveStatus object
- (BuildMaster.loadConfig): add a manhole port (debug over telnet)
- (BuildMaster.loadConfig_Builders): give BuilderStatus a parent
-
- * buildbot/interfaces.py: API additions
- (ISlaveStatus): place to get slave status
-
-2004-08-04 Brian Warner <warner@lothar.com>
-
- * buildbot/slavecommand.py (DummyCommand.finished): send rc=0 when
- the delay finishes, so the step is marked as SUCCESS
-
- * buildbot/test/test_run.py (Status.testSlave): cover more of
- IBuildStatus and IBuildStepStatus
-
- * buildbot/status/progress.py (StepProgress): move some flags to
- class-level attributes
- (StepProgress.remaining): if there are no other progress metrics
- to go by, fall back to elapsed time
- (StepProgress.setExpectations): take a dict of metrics instead of
- a list
- (BuildProgress.setExpectationsFrom): pull expectations from the
- Expectations, instead of having it push them to the BuildProgress
- (Expectations): move some flags to class-level attributes
- (Expectations.__init__): copy per-step times from the
- BuildProgress too
- (Expectations.expectedBuildTime): new method for per-build ETA
-
- * buildbot/status/event.py (Logfile): move some flags to
- class-level attributes
- (Logfile.logProgressTo): better method name, let step set the
- progress axis name (instead of always being "output")
-
- * buildbot/status/builder.py (BuildStepStatus.getTimes): track the
- times directly, rather than depending upon the (possibly missing)
- .progress object. Use 'None' to indicate "not started/finished
- yet"
- (BuildStepStatus.getExpectations): oops, return the full list of
- expectations
- (BuilderStatus._buildFinished): append finished builds to .builds
-
- * buildbot/process/step.py (BuildStep): add separate .useProgress
- flag, since empty .progressMetrics[] still implies that time is a
- useful predictor
- (CVS): set up the cmd in __init__, instead of waiting for start()
-
- * buildbot/process/base.py (Build.startBuild): disable the 'when'
- calculation, this will eventually turn into a proper sourceStamp
- (Build.setupBuild): tell the Progress to load from the Expectations,
- instead of having the Expectations stuff things into the Progress
- (Build.buildException): add a build-level errback to make sure the
- build's Deferred fires even in case of exceptions
-
- * buildbot/master.py (BotMaster.forceBuild): convey the reason into
- the forced build
- * buildbot/process/builder.py (Builder.forceBuild): convey the
- reason instead of creating a fake Change
-
- * docs/examples/twisted_master.cfg: update to match reality
-
- * buildbot/test/test_config.py, buildbot/test/test_process.py:
- * buildbot/test/test_run.py, buildbot/test/test_steps.py:
- fix or remove broken/breaking tests
-
- * buildbot/status/event.py (Logfile.__len__): remove evil method
-
- * buildbot/status/builder.py (BuildStepStatus.stepStarted): tolerate
- missing .build, for test convenience
-
- * buildbot/process/step_twisted.py: import fixes
-
- * buildbot/process/step.py (BuildStep.failed): exception is FAILURE
-
- * buildbot/master.py (BuildMaster.loadConfig_Builders): leftover
- .statusbag reference
-
- * buildbot/bot.py (BuildSlave.stopService): tear down the TCP
- connection at shutdown, and stop it from reconnecting
-
- * buildbot/test/test_run.py (Run.testSlave): use a RemoteDummy to
- chase down remote-execution bugs
-
- * buildbot/process/step.py: more fixes, remove
- BuildStep.setStatus()
- * buildbot/status/builder.py: move setStatus() functionality into
- BuildStatus.addStep
- * buildbot/status/event.py: minor fixes
-
-2004-08-03 Brian Warner <warner@lothar.com>
-
- * buildbot/process/base.py, buildbot/process/builder.py
- * buildbot/process/step.py, buildbot/status/builder.py
- * buildbot/status/event.py, buildbot/test/test_run.py:
- fix status delivery, get a basic test case working
- * buildbot/master.py: finish implementing basic status delivery,
- temporarily disable HTML/IRC/PB status sources
-
- * buildbot/bot.py (Bot.remote_setBuilderList): remove debug noise
-
- * buildbot/status/progress.py (BuildProgress): remove dead code
-
- * buildbot/interfaces.py
- * buildbot/process/base.py, buildbot/process/builder.py
- * buildbot/process/step.py, buildbot/process/step_twisted.py
- * buildbot/status/builder.py: Complete overhaul of the all
- status-delivery code, unifying all types of status clients (HTML,
- IRC, PB). See interfaces.IBuildStatus for an idea of what it will
- look like. This commit is a checkpointing of the work-in-progress:
- the input side is mostly done (Builders/Builds sending status
- to the BuilderStatus/BuildStatus objects), but the output side has
- not yet been started (HTML resources querying BuilderStatus
- objects). Things are probably very broken right now and may remain
- so for several weeks, I apologize for the disruption.
-
- * buildbot/status/event.py: add a setHTML method to use pre-rendered
- HTML as the log's contents. Currently used for exception tracebacks.
- * buildbot/status/progress.py: minor spelling changes
-
-2004-08-02 Brian Warner <warner@lothar.com>
-
- * docs/config.xhtml: XHTML fixes, makes raw .xhtml files viewable
- in mozilla. Also added stylesheets copied from Twisted's docs.
- Remember that these files are meant to be run through Lore first.
- Thanks to Philipp Frauenfelder for the fixes.
- * docs/factories.xhtml, docs/sources.xhtml, docs/steps.xhtml: same
- * docs/stylesheet-unprocessed.css, docs/stylesheet.css: same
- * docs/template.tpl: added a Lore template
-
-2004-07-29 Brian Warner <warner@lothar.com>
-
- * buildbot/interfaces.py: revamp status delivery. This is the
- preview: these are the Interfaces that will be provided by new
- Builder code, and to which the current HTML/IRC/PB status
- displayers will be adapted.
-
- * buildbot/slavecommand.py (ShellCommand.start): look for .usePTY
- on the SlaveBuilder, not the Bot.
- * buildbot/bot.py (Bot.remote_setBuilderList): copy Bot.usePTY to
- SlaveBuilder.usePTY
- * buildbot/test/test_slavecommand.py (FakeSlaveBuilder.usePTY):
- set .usePTY on the FakeSlaveBuilder
-
-2004-07-25 Brian Warner <warner@lothar.com>
-
- * buildbot/changes/freshcvs.py: add some debug log messages
- (FreshCVSConnectionFactory.gotPerspective): pre-emptively fix the
- disabled 'setFilter' syntax
- (FreshCVSSourceNewcred.__init__): warn about prefix= values that
- don't end with a slash
-
- * buildbot/process/base.py (Builder._pong_failed): add TODO note
-
- * setup.py: bump to 0.5.0+ while between releases
-
-2004-07-23 Brian Warner <warner@lothar.com>
-
- * setup.py (version): Releasing buildbot-0.5.0
-
-2004-07-23 Brian Warner <warner@lothar.com>
-
- * README: update for 0.5.0 release
-
- * NEWS: update for 0.5.0 release
-
-2004-07-22 Brian Warner <warner@lothar.com>
-
- * buildbot/slavecommand.py (ShellCommand): make usePTY a
- mktap-time configuration flag (--usepty=1, --usepty=0)
- * buildbot/bot.py: same
-
- * buildbot/master.py (BotPerspective.got_dirs): don't complain about
- an 'info' directory being unwanted
-
- * buildbot/changes/freshcvs.py (FreshCVSSource): flip the
- newcred/oldcred switch. Newcred (for CVSToys-1.0.10 and later) is now
- the default. To communicate with an oldcred daemond (CVSToys-1.0.9
- and earlier), use a FreshCVSSourceOldcred instead.
- (test): simple test routine: connect to server, print changes
-
- * buildbot/changes/changes.py (Change.getTime): make it possible
- to print un-timestamped changes
-
- * buildbot/master.py (makeApp): delete ancient dead code
- (BuildMaster.loadTheConfigFile): make "master.cfg" name configurable
- * buildbot/test/test_config.py (testFindConfigFile): test it
-
- * docs/examples/twisted_master.cfg (b22w32): use iocp reactor
- instead of win32 one
-
-
- * buildbot/master.py (BuildMaster.loadConfig_Builders): config file
- now takes a dictionary instead of a tuple. See docs/config.xhtml for
- details.
-
- * buildbot/process/base.py (Builder.__init__): change constructor
- to accept a dictionary of config data, rather than discrete
- name/slave/builddir/factory arguments
-
- * docs/examples/twisted_master.cfg: update to new syntax
- * docs/examples/glib_master.cfg: same
- * buildbot/test/test_config.py (ConfigTest.testBuilders): some
- rough tests of the new syntax
-
-
- * buildbot/master.py (BuildMaster.loadConfig): allow webPathname
- to be an int, which means "run a web.distrib sub-server on a TCP
- port". This lets you publish the buildbot status page to a remote
- twisted.web server (using distrib.ResourceSubscription). Also
- rename the local attributes used to hold these web things so
- they're more in touch with reality.
- * buildbot/test/test_web.py: test webPortnum and webPathname
- * docs/config.xhtml: document this new use of webPathname
-
- * docs/config.xhtml: new document, slightly ahead of reality
-
- * buildbot/changes/freshcvs.py (FreshCVSSourceNewcred.notify): fix
- 'prefix' handling: treat it as a simple string to check with
- .startswith, instead of treating it as a directory. This allows
- sub-directories to be used. If you use prefix=, you should give it
- a string that starts just below the CVSROOT and ends with a slash.
- This prefix will be stripped from all filenames, and filenames
- which do not start with it will be ignored.
-
-2004-07-20 Cory Dodt <corydodt@twistedmatrix.com>
-
- * contrib/svn_buildbot.py: Add --include (synonym for --filter)
- and --exclude (inverse of --include). SVN post-commit hooks
- now have total control over which changes get sent to buildbot and which
- do not.
-
-2004-07-10 Brian Warner <warner@lothar.com>
-
- * buildbot/test/test_twisted.py (Case1.testCountFailedTests): fix
- test case to match new API
-
- * buildbot/status/event.py (Logfile.getEntries): fix silly bug
- which crashed HTML display when self.entries=[] (needed to
- distinguish between [], which means "no entries yet", and None,
- which means "the entries have been swapped out to disk, go fetch
- them").
-
-2004-07-04 Brian Warner <warner@lothar.com>
-
- * buildbot/process/step_twisted.py (countFailedTests): Count
- skips, expectedFailures, and unexpectedSuccesses. Start scanning
- 10kb from the end because any import errors are wedged there and
- they would make us think the test log was unparseable.
- (RunUnitTests.finishStatus): add skip/todo counts to the event box
-
-2004-06-26 Brian Warner <warner@lothar.com>
-
- * buildbot/process/step_twisted.py (RemovePYCs): turn the
- delete-*.pyc command into an actual BuildStep, so we can label it
- nicely
- * buildbot/process/process_twisted.py (QuickTwistedBuildFactory):
- (FullTwistedBuildFactory): same
-
-2004-06-25 Cory Dodt <corydodt@twistedmatrix.com>
-
- * contrib/fakechange.py: Add an errback when sending the fake
- change, so we know it didn't work.
-
-2004-06-25 Christopher Armstrong <radix@twistedmatrix.com>
-
- * buildbot/process/step_twisted.py: Delete *.pyc files before
- calling trial, so it doesn't catch any old .pyc files whose .py
- files have been moved or deleted.
-
- * buildbot/process/step_twisted.py (RunUnitTests): 1) Add a new
- parameter, 'recurse', that passes -R to trial. 2) have 'runAll'
- imply 'recurse'. 3) Make the default 'allTests' be ["twisted"]
- instead of ["twisted.test"], so that the end result is "trial -R
- twisted".
-
- * contrib/svn_buildbot.py: Add a --filter parameter that accepts a
- regular expression to match filenames that should be ignored when
- changed. Also add a --revision parameter that specifies the
- revision to examine, which is useful for debugging.
-
-2004-06-25 Brian Warner <warner@lothar.com>
-
- * buildbot/process/step_twisted.py (trialTextSummarizer): create a
- summary of warnings (like DeprecationWarnings), next to the
- "summary" file
-
-2004-05-13 Brian Warner <warner@lothar.com>
-
- * docs/examples/twisted_master.cfg: enable the win32 builder, as
- we now have a w32 build slave courtesy of Mike Taylor.
-
- * buildbot/process/base.py (Build.checkInterlocks): OMG this was
- so broken. Fixed a race condition that tripped up interlocked
- builds and caused the status to be stuck at "Interlocked" forever.
- The twisted buildbot's one interlocked build just so happened to
- never hit this case until recently (the feeding builds both pass
- before the interlocked build is attempted.. usually it has to wait
- a while).
- (Builder._pong_failed): fix method signature
-
- * setup.py: bump to 0.4.3+ while between releases
-
-2004-04-30 Brian Warner <warner@lothar.com>
-
- * setup.py (version): Releasing buildbot-0.4.3
-
-2004-04-30 Brian Warner <warner@lothar.com>
-
- * MANIFEST.in: add the doc fragments in docs/*.xhtml
-
- * README: update for 0.4.3 release
-
- * NEWS: update for 0.4.3 release
-
- * buildbot/master.py (BuildMaster.__getstate__): make sure
- Versioned.__getstate__ is invoked, for upgrade from 0.4.2
-
- * buildbot/process/step_twisted.py (RunUnitTests.trial): add
- .trial as a class attribute, for upgrade from 0.4.2
-
- * buildbot/changes/changes.py (Change.links): add .links for
- upgrade from 0.4.2
-
- * buildbot/status/event.py (Logfile.__getstate__): get rid of both
- .textWatchers and .htmlWatchers at save time, since they are both
- volatile, should allow smooth 0.4.2 upgrade
-
- * buildbot/process/step.py (CVS.finishStatus): catch failed
- CVS/SVN commands so we can make the status box red
-
-2004-04-29 Brian Warner <warner@lothar.com>
-
- * buildbot/changes/freshcvs.py
- (FreshCVSConnectionFactory.gotPerspective): add (commented-out)
- code to do setFilter(), which tells the freshcvs daemon to not
- send us stuff that we're not interested in. I will uncomment it
- when a new version of CVSToys is available in which setFilter()
- actually works, and I get a chance to test it better.
-
- * docs/examples/twisted_master.cfg: start using a PBChangeSource
-
- * buildbot/master.py (Dispatcher): use a registration scheme
- instead of hardwired service names
- (BuildMaster): keep track of the Dispatcher to support
- registration
-
- * buildbot/changes/changes.py (ChangeMaster): create a distinct
- PBChangeSource class instead of having it be an undocumented
- internal feature of the ChangeMaster. Split out the code into a
- new file.
- * buildbot/changes/pb.py (PBChangeSource): same
- * buildbot/test/test_changes.py: a few tests for PBChangeSource
-
- * docs/{factories|sources|steps}.xhtml: document some pieces
-
- * docs/examples/twisted_master.cfg: use SVN instead of CVS, stop
- using FCMaildirSource
- (f23osx): update OS-X builder to use python2.3, since the slave
- was updated to Panther (10.3.3)
-
-2004-03-21 Brian Warner <warner@lothar.com>
-
- * buildbot/process/process_twisted.py: factor out doCheckout, change
- to use SVN instead of CVS
-
- * buildbot/process/base.py (BasicBuildFactory): refactor to make
- an SVN subclass easier
- (BasicSVN): subclass which uses Subversion instead of CVS
-
-2004-03-15 Christopher Armstrong <radix@twistedmatrix.com>
-
- * buildbot/slavecommand.py (ShellCommand.start): use COMSPEC instead
- of /bin/sh on win32
- (CVSCommand.cvsComplete): don't assume chdir worked on win32
-
-2004-02-25 Brian Warner <warner@lothar.com>
-
- * buildbot/slavecommand.py (ShellCommand): ['commands'] argument
- is now either a list (which is passed to spawnProcess directly) or
- a string (which gets passed to /bin/sh -c). This removes the useSH
- flag and the ArgslistCommand class. Also send status header at the
- start and end of each command, instead of having the master-side
- code do that.
- (CVSCommand): fix the doUpdate command, it failed to do the 'cp
- -r'. Update to use list-based arguments.
- (SVNFetch): use list-based arguments, use ['dir'] argument to
- simplify code.
- * buildbot/test/test_steps.py (Commands): match changes
-
- * buildbot/process/step.py (InternalShellCommand.words): handle
- command lists
- (SVN): inherit from CVS, cleanup
-
- * buildbot/status/event.py (Logfile.content): render in HTML, with
- stderr in red and headers (like the name of the command we're
- about to run) in blue. Add link to a second URL (url + "?text=1")
- to get just stdout/stderr in text/plain without markup. There is
- still a problem with .entries=None causing a crash, it seems to occur
- when the logfile is read before it is finished.
-
- * buildbot/bot.py (BotFactory.doKeepalive): add a 30-second
- timeout to the keepalives, and use it to explicitly do a
- loseConnection instead of waiting for TCP to notice the loss. This
- ought to clear up the silent-lossage problem.
- (unsafeTracebacks): pass exception tracebacks back to the master,
- makes it much easier to debug problems
-
-2004-02-23 Brian Warner <warner@lothar.com>
-
- * buildbot/slavecommand.py (ShellCommand): add useSH flag to pass
- the whole command to /bin/sh instead of execve [Johan Dahlin]
- (CVSCommand): drop '-r BRANCH' if BRANCH==None instead of usiing
- '-r HEAD' [Johan Dahlin]
- (CVSCommand.start2): fix cvsdir calculation [Johan Dahlin]
-
- * buildbot/changes/changes.py (Change): add links= argument, add
- asHTML method [Johan Dahlin]. Modified to make a bit more
- XHTMLish. Still not sure how to best use links= .
-
- * buildbot/status/html.py (StatusResourceCommits.getChild): use
- Change.asHTML to display the change, not asText
-
- * buildbot/status/html.py (StatusResourceBuilder): web button to
- ping slave
-
- * buildbot/test/test_run.py: test to actually start a buildmaster
- and poke at it
-
- * MANIFEST.in: bring back accidentally-dropped test helper files
-
- * buildbot/test/test_config.py (ConfigTest.testSources): skip tests
- that require cvstoys if it is not installed
-
- * buildbot/process/step_twisted.py (RunUnitTests): allow other
- values of "bin/trial" [Dave Peticolas]
- (RunUnitTests.finishStatus): say "no tests run" instead of "0
- tests passed" when we didn't happen to run any tests
-
- * buildbot/process/step.py (Compile): use haltOnFailure instead of
- flunkOnFailure [Johan Dahlin]
-
- * buildbot/process/base.py (ConfigurableBuild.setSteps): allow
- multiple instances of the same Step class by suffixing "_2", etc,
- to the name until it is unique. This name needs to be unique
- because it is used as a key in the dictionary that tracks build
- progress.
- * buildbot/test/test_steps.py (Steps.testMultipleStepInstances):
- add test for it
-
- * buildbot/process/base.py (Builder.ping): add "ping slave" command
-
-2004-01-14 Brian Warner <warner@lothar.com>
-
- * buildbot/status/words.py (IrcStatusBot): when we leave or get
- kicked from a channel, log it
-
- * buildbot/master.py (Dispatcher): add "poke IRC" command to say
- something over whatever IRC channels the buildmaster is currently
- connected to. Added to try and track down a problem in which the
- master thinks it is still connected but the IRCd doesn't see it. I
- used a styles.Versioned this time, so hopefully users won't have
- to rebuild their .tap files this time.
- * contrib/debug.glade: add a "Poke IRC" button
- * contrib/debugclient.py: same
-
- * setup.py: bump to 0.4.2+ while between releases
-
-2004-01-08 Brian Warner <warner@lothar.com>
-
- * setup.py (version): Releasing buildbot-0.4.2
-
-2004-01-08 Brian Warner <warner@lothar.com>
-
- * NEWS: update for 0.4.2 release
-
- * README: document how to run the tests, now that they all pass
-
- * buildbot/changes/maildir.py (Maildir.poll): minor comment
-
- * buildbot/process/step.py (CVS): add a global_options= argument,
- which lets you set CVS global options for the command like "-r"
- for read-only checkout, or "-R" to avoid writing in the
- repository.
- * buildbot/slavecommand.py (CVSCommand): same
-
- * buildbot/status/event.py (Logfile): add a .doSwap switch to make
- testing easier (it is turned off when testing, to avoid the
- leftover timer)
-
- * buildbot/process/step.py (InternalBuildStep): shuffle code a bit
- to make it easier to test: break generateStepID() out to a
- separate function, only update statusbag if it exists.
- (ShellCommands): create useful text for dict-based commands too.
-
- * test/*, buildbot/test/*: move unit tests under the buildbot/
- directory
- * setup.py (packages): install buildbot.test too
-
- * buildbot/test/test_slavecommand.py: fix it, tests pass now
- * buildbot/test/test_steps.py: fix it, tests pass now
-
-2004-01-06 Brian Warner <warner@lothar.com>
-
- * buildbot/changes/mail.py (parseFreshCVSMail): looks like new
- freshcvs mail uses a slightly different syntax for new
- directories. Update parser to handle either.
- * test/test_mailparse.py (Test1.testMsg9): test for same
-
-2003-12-21 Brian Warner <warner@lothar.com>
-
- * buildbot/process/process_twisted.py (TwistedDebsBuildFactory): set
- 'warnOnWarnings' so that lintian errors mark the build orange
-
-2003-12-17 Brian Warner <warner@lothar.com>
-
- * buildbot/changes/mail.py (parseBonsaiMail): parser for commit
- messages emitted by Bonsai, contributed by Stephen Davis.
-
- * test/*: moved all tests to use trial instead of unittest. Some
- still fail (test_steps, test_slavecommand, and test_process).
-
- * setup.py (version): bump to 0.4.1+ while between releases
-
-2003-12-09 Brian Warner <warner@lothar.com>
-
- * setup.py (version): Releasing buildbot-0.4.1
-
-2003-12-09 Brian Warner <warner@lothar.com>
-
- * NEWS: update for 0.4.1 release
-
- * docs/examples/twisted_master.cfg: add netbsd builder, shuffle
- freebsd builder code a little bit
-
- * buildbot/changes/freshcvs.py (FreshCVSSourceNewcred.__cmp__):
- don't try to compare attributes of different classes
- * buildbot/changes/mail.py (MaildirSource.__cmp__): same
- (MaildirSource.messageReceived): fix Change delivery
-
- * buildbot/master.py (BuildMaster.loadConfig): insert 'basedir'
- into the config file's namespace before loading it, like the
- documentation claims it does
- * docs/examples/twisted_master.cfg: remove explicit 'basedir'
- (useFreshCVS): switch to using a maildir until Twisted's freshcvs
- daemon comes back online
-
-2003-12-08 Brian Warner <warner@lothar.com>
-
- * docs/examples/twisted_master.cfg: provide an explicit 'basedir'
- so the example will work with online=0 as well
-
- * buildbot/changes/mail.py (FCMaildirSource, SyncmailMaildirSource):
- fix the __implements__ line
-
- * buildbot/changes/maildirtwisted.py (MaildirTwisted): make this
- class a twisted.application.service.Service, use startService to
- get it moving.
-
- * buildbot/changes/dnotify.py (DNotify): use os.open to get the
- directory fd instead of simple open(). I'm sure this used to work,
- but the current version of python refuses to open directories with
- open().
-
-2003-12-05 Brian Warner <warner@lothar.com>
-
- * setup.py (version): bump to 0.4.0+ while between releases
-
-2003-12-05 Brian Warner <warner@lothar.com>
-
- * setup.py (version): Releasing buildbot-0.4.0
-
-2003-12-05 Brian Warner <warner@lothar.com>
-
- * docs/examples/glib_master.cfg: replace old sample scripts with
- new-style config files
- * MANIFEST.in: include .cfg files in distribution tarball
-
- * buildbot/changes/freshcvs.py (FreshCVSListener.remote_goodbye):
- implement a dummy method to avoid the exception that occurs when
- freshcvs sends this to us.
-
- * buildbot/pbutil.py (ReconnectingPBClientFactory.stopFactory):
- removed the method, as it broke reconnection. Apparently
- stopFactory is called each time the connection attempt fails. Must
- rethink this.
- (ReconnectingPBClientFactory.__getstate__): squash the _callID
- attribute before serialization, since without stopFactory the
- reconnect timer may still be active and they aren't serializable.
-
- * test/test_mailparse.py (ParseTest): test with 'self' argument
-
- * buildbot/changes/mail.py (parseFreshCVSMail): add (silly) 'self'
- argument, as these "functions" are invoked like methods from class
- attributes and therefore always get an instance as the first
- argument.
-
- * buildbot/changes/maildir.py (Maildir.start): fix error in error
- message: thanks to Stephen Davis for the catch
-
-2003-12-04 Brian Warner <warner@lothar.com>
-
- * buildbot/pbutil.py: complete rewrite using PBClientFactory and
- twisted's standard ReconnectingClientFactory. Handles both oldcred
- and newcred connections. Also has a bug-workaround for
- ReconnectingClientFactory serializing its connector when it
- shouldn't.
-
- * buildbot/bot.py (BotFactory): rewrite connection layer with new
- pbutil. Replace makeApp stuff with proper newcred/mktap
- makeService(). Don't serialize Ephemerals on shutdown.
-
- * buildbot/changes/changes.py (ChangeMaster): make it a
- MultiService and add the sources as children, to get startService
- and stopService for free. This also gets rid of the .running flag.
-
- * buildbot/changes/freshcvs.py (FreshCVSSource): rewrite to use
- new pbutil, turn into a TCPClient at the same time (to get
- startService for free). Two variants exist: FreshCVSSourceOldcred
- and FreshCVSSourceNewcred (CVSToys doesn't actualy support newcred
- yet, but when it does, we'll be ready).
- (FreshCVSSource.notify): handle paths which are empty after the
- prefix is stripped. This only happens when the top-level (prefix)
- directory is added, at the very beginning of a Repository's life.
-
- * buildbot/clients/base.py: use new pbutil, clean up startup code.
- Now the only reconnecting code is in the factory where it belongs.
- (Builder.unsubscribe): unregister the disconnect callback when we
- delete the builder on command from the master (i.e. when the
- buildmaster is reconfigured and that builder goes away). This
- fixes a multiple-delete exception when the status client is shut
- down afterwards.
- * buildbot/clients/gtkPanes.py (GtkClient): cleanup, match the
- base Client.
-
- * buildbot/status/words.py (IrcStatusBot): add some more sillyness
- (IrcStatusBot.getBuilderStatus): fix minor exception in error message
-
-2003-10-20 Christopher Armstrong <radix@twistedmatrix.com>
-
- * contrib/run_maxq.py: Accept a testdir as an argument rather than
- a list of globs (ugh). The testdir will be searched for files
- named *.tests and run the tests in the order specified in each of
- those files. This allows for "dependancies" between tests to be
- codified.
-
- * buildbot/process/maxq.py (MaxQ.__init__): Accept a testdir
- argument to pass to run_maxq.py, instead of a glob.
-
-2003-10-17 Brian Warner <warner@lothar.com>
-
- * buildbot/process/step_twisted.py (HLint.start): ignore .xhtml
- files that live in the sandbox
-
-2003-10-15 Brian Warner <warner@lothar.com>
-
- * buildbot/process/step_twisted.py (ProcessDocs.finished): fix
- spelling error in "docs" count-warnings output
- (HLint.start): stupid thinko meant .xhtml files were ignored
-
- * docs/examples/twisted_master.cfg (reactors): disable cReactor
- tests now that cReactor is banished to the sandbox
-
-2003-10-10 Brian Warner <warner@lothar.com>
-
- * buildbot/process/step_twisted.py (ProcessDocs, HLint): new Twisted
- scheme: now .xhtml are sources and .html are generated
-
-2003-10-08 Brian Warner <warner@lothar.com>
-
- * buildbot/process/step_twisted.py (RunUnitTests.__init__): oops,
- we were ignoring the 'randomly' parameter.
-
-2003-10-01 Brian Warner <warner@lothar.com>
-
- * buildbot/slavecommand.py (ShellCommand.start): set usePTY=1 on
- posix, to kill sub-children of aborted slavecommands.
-
- * buildbot/status/builder.py: rename Builder to BuilderStatus.
- Clean up initialization: lastBuildStatus remains None until the
- first build has been completed.
-
- * buildbot/status/html.py (WaterfallStatusResource.body): handle
- None as a lastBuildStatus
- * buildbot/clients/gtkPanes.py: same
-
- * buildbot/status/client.py (StatusClientService): keep
- BuilderStatus objects in self.statusbags . These objects now live
- here in the StatusClientService and are referenced by the Builder
- object, rather than the other way around.
- * buildbot/status/words.py (IrcStatusBot.getBuilderStatus): same
- * buildbot/process/base.py (Builder): same
- * test/test_config.py (ConfigTest.testBuilders): same
-
- * buildbot/master.py (BuildMaster.loadConfig_Builders): when modifying
- an existing builder, leave the statusbag alone. This will preserve the
- event history.
-
- * buildbot/pbutil.py (ReconnectingPB.connect): add initial newcred
- hook. This will probably go away in favor of a class in upcoming
- Twisted versions.
-
- * buildbot/changes/freshcvs.py (FreshCVSSource.start): Remove old
- serviceName from newcred FreshCVSNotifiee setup
-
-2003-09-29 Brian Warner <warner@lothar.com>
-
- * buildbot/process/process_twisted.py: switch to new reactor
- abbreviations
- * docs/examples/twisted_master.cfg: same
-
- * README (REQUIREMENTS): mention twisted-1.0.8a3 requirement
-
- * buildbot/status/words.py (IrcStatusBot.getBuilder): use the
- botmaster reference instead of the oldapp service lookup
-
- * buildbot/master.py (BuildMaster.__init__): give the
- StatusClientService a reference to the botmaster to make it easier to
- force builds
-
-2003-09-24 Christopher Armstrong <radix@twistedmatrix.com>
-
- * buildbot/status/html.py (Box.td): escape hreffy things so you
- can have spaces in things like builder names
- (StatusResourceBuilder.body)
- (WaterfallStatusResource.body)
- (WaterfallStatusResource.body0): same
-
-2003-09-25 Brian Warner <warner@lothar.com>
-
- * buildbot/master.py (BuildMaster.loadConfig_Builders): don't
- rearrange the builder list when adding or removing builders: keep
- them in the order the user requested.
- * test/test_config.py (ConfigTest.testBuilders): verify it
-
- * contrib/debug.glade: give the debug window a name
-
- * buildbot/process/base.py (Builder.buildTimerFired): builders can
- now wait on multiple interlocks. Fix code relating to that.
- (Builder.checkInterlocks): same
- * buildbot/status/builder.py (Builder.currentlyInterlocked): same
-
- * buildbot/master.py (BuildMaster.loadConfig): move from
- deprecated pb.BrokerFactory to new pb.PBServerFactory
- * test/test_config.py (ConfigTest.testWebPathname): same
-
- * docs/examples/twisted_master.cfg: fix interlock declaration
-
- * buildbot/master.py (BotMaster.addInterlock): move code to attach
- Interlocks to their Builders into interlock.py .
- (BuildMaster.loadConfig_Interlocks): fix interlock handling
-
- * test/test_config.py (ConfigTest.testInterlocks): validate
- interlock handling
-
- * buildbot/process/base.py (Builder.__init__): better comments
- * buildbot/process/interlock.py (Interlock.__repr__): same
- (Interlock.deactivate): add .active flag, move the code that
- attaches/detaches builders into the Interlock
-
-2003-09-24 Christopher Armstrong <radix@twistedmatrix.com>
-
- * buildbot/process/maxq.py (MaxQ): support for running a set of MaxQ
- tests using the new run_maxq.py script, and reporting failures by
- parsing its output.
-
- * contrib/run_maxq.py: Hacky little script for running a set of maxq
- tests, reporting their success or failure in a buildbot-friendly
- manner.
-
-2003-09-24 Brian Warner <warner@lothar.com>
-
- * docs/examples/twisted_master.cfg: example of a new-style config
- file. This lives in the buildmaster base directory as
- "master.cfg".
-
- * contrib/debugclient.py (DebugWidget.do_rebuild): add 'reload'
- button to make the master re-read its config file
-
- * buildbot/master.py (BuildMaster.loadConfig): new code to load
- buildmaster configuration from a file. This file can be re-read
- later, and the buildmaster will update itself to match the new
- desired configuration. Also use new Twisted Application class.
- * test/Makefile, test/test_config.py: unit tests for same
-
- * buildbot/changes/freshcvs.py (FreshCVSSource.__cmp__): make
- FreshCVSSources comparable, to support reload.
- * buildbot/changes/mail.py (MaildirSource.__cmp__): same
-
- * buildbot/process/base.py (Builder): make them comparable, make
- Interlocks easier to attach, to support reload. Handle
- re-attachment of remote slaves.
- * buildbot/process/interlock.py (Interlock): same
-
- * buildbot/bot.py, bb_tap.py, changes/changes.py: move to
- Twisted's new Application class. Requires Twisted >= 1.0.8 .
- buildmaster taps are now constructed with mktap.
- * buildbot/status/client.py (StatusClientService): same
-
- * buildbot/status/words.py: move to new Services, add support to
- connect to multiple networks, add reload support, allow nickname
- to be configured on a per-network basis
-
-2003-09-20 Brian Warner <warner@lothar.com>
-
- * docs/examples/twisted_master.py (twisted_app): use python2.3 for
- the freebsd builder, now that the machine has been upgraded and no
- longer has python2.2
-
- * setup.py (version): bump to 0.3.5+ while between releases
-
-2003-09-19 Brian Warner <warner@lothar.com>
-
- * setup.py (version): Releasing buildbot-0.3.5
-
-2003-09-19 Brian Warner <warner@lothar.com>
-
- * NEWS: add post-0.3.4 notes
-
- * README (REQUIREMENTS): note twisted-1.0.7 requirement
-
- * MANIFEST.in: add contrib/*
-
- * docs/examples/twisted_master.py (twisted_app): all build slaves must
- use a remote root now: cvs.twistedmatrix.com
-
- * buildbot/changes/freshcvs.py (FreshCVSNotifiee.connect): update
- to newcred
- (FreshCVSNotifieeOldcred): but retain a class that uses oldcred for
- compatibility with old servers
- (FreshCVSSource.start): and provide a way to use it
- (FreshCVSNotifiee.disconnect): handle unconnected notifiee
-
- * docs/examples/twisted_master.py (twisted_app): update to new
- makeApp interface.
- (twisted_app): listen on new ~buildbot socket
- (twisted_app): Twisted CVS has moved to cvs.twistedmatrix.com
-
- * buildbot/process/process_twisted.py: Use 'copydir' on CVS steps
- to reduce cvs bandwidth (update instead of full checkout)
-
-2003-09-11 Brian Warner <warner@lothar.com>
-
- * contrib/fakechange.py: demo how to connect to the changemaster
- port. You can use this technique to submit changes to the
- buildmaster from source control systems that offer a hook to run a
- script when changes are committed.
-
- * contrib/debugclient.py: tool to connect to the debug port. You
- can use it to force builds, submit fake changes, and wiggle the
- builder state
-
- * buildbot/master.py: the Big NewCred Reorganization. Use a single
- 'Dispatcher' realm to handle all the different kinds of
- connections and Perspectives: buildslaves, the changemaster port,
- the debug port, and the status client port. NewCredPerspectives
- now have .attached/.detached methods called with the remote 'mind'
- reference, much like old perspectives did. All the pb.Services
- turned into ordinary app.ApplicationServices .
- (DebugService): went away, DebugPerspectives are now created
- directly by the Dispatcher.
- (makeApp): changed interface a little bit
-
- * buildbot/changes/changes.py: newcred
- * buildbot/status/client.py: newcred
-
- * buildbot/clients/base.py: newcred client side changes
- * buildbot/bot.py: ditto
-
- * docs/examples/glib_master.py: handle new makeApp() interface
- * docs/examples/twisted_master.py: ditto
-
- * buildbot/pbutil.py (NewCredPerspective): add a helper class to
- base newcred Perspectives on. This should go away once Twisted
- itself provides something sensible.
-
-
-2003-09-11 Christopher Armstrong <radix@twistedmatrix.com>
-
- * contrib/svn_buildbot.py: A program that you can call from your
- SVNREPO/hooks/post-commit file that will notify a BuildBot master
- when a change in an SVN repository has happened. See the top of
- the file for some minimal usage info.
-
-2003-09-10 Christopher Armstrong <radix@twistedmatrix.com>
-
- * buildbot/slavecommand.py (ArglistCommand): Add new
- ArglistCommand that takes an argument list rather than a string as
- a parameter. Using a st.split() for argv is very bad.
-
- * buildbot/slavecommand.py (SVNFetch): Now has the ability to
- update to a particular revision rather than always checking out
- (still not very smart about it, there may be cases where the
- checkout becomes inconsistent).
-
-2003-09-10 Christopher Armstrong <radix@twistedmatrix.com>
-
- * buildbot/{bot.py,slavecommand.py,process/step.py}: Rudimentary
- SVN fetch support. It can checkout (not update!) a specified
- revision from a specified repository to a specified directory.
-
- * buildbot/status/progress.py (Expectations.update): Fix an
- obvious bug (apparently created by the change described in the
- previous ChangeLog message) by moving a check to *after* the
- variable it checks is defined.
-
-
-2003-09-08 Brian Warner <warner@lothar.com>
-
- * buildbot/status/progress.py (Expectations.update): hack to catch
- an exception TTimo sees: sometimes the update() method seems to
- get called before the step has actually finished, so the .stopTime
- is not set, so no totalTime() is available and we average None
- with the previous value. Catch this and just don't update the
- metrics, and emit a log message.
-
-2003-08-24 Brian Warner <warner@lothar.com>
-
- * buildbot/process/base.py (BasicBuildFactory): accept 'cvsCopy'
- parameter to set copydir='original' in CVS commands.
-
- * buildbot/process/step.py (CVS): accept 'copydir' parameter.
-
- * buildbot/slavecommand.py (CVSCommand): add 'copydir' parameter,
- which tells the command to maintain a separate original-source CVS
- workspace. For each build, this workspace will be updated, then
- the tree copied into a new workdir. This reduces CVS bandwidth
- (from a full checkout to a mere update) while doubling the local
- disk usage (to keep two copies of the tree).
-
-2003-08-21 Brian Warner <warner@lothar.com>
-
- * buildbot/status/event.py (Logfile.addEntry): if the master web
- server dies while we're serving a page, request.write raises
- pb.DeadReferenceError . Catch this and treat it like a
- notifyFinish event by dropping the request.
-
-2003-08-18 Brian Warner <warner@lothar.com>
-
- * buildbot/status/words.py (IrcStatusBot.command_FORCE): complain
- (instead of blowing up) if a force-build command is given without
- a reason field
-
- * buildbot/changes/changes.py (ChangeMaster.getChangeNumbered):
- don't blow up if there aren't yet any Changes in the list
-
-2003-08-02 Brian Warner <warner@lothar.com>
-
- * buildbot/bot.py (updateApplication): don't set the .tap name,
- since we shouldn't assume we own the whole .tap file
-
- * buildbot/bb_tap.py (updateApplication): clean up code, detect
- 'mktap buildbot' (without a subcommand) better
-
-2003-07-29 Brian Warner <warner@lothar.com>
-
- * buildbot/status/words.py
- (IrcStatusFactory.clientConnectionLost): when we lose the
- connection to the IRC server, schedule a reconnection attempt.
-
- * buildbot/slavecommand.py (CVSCommand.doClobber): on non-posix,
- use shutil.rmtree instead of forking off an "rm -rf" command.
- rmtree may take a while and will block until it finishes, so we
- use "rm -rf" if available.
-
- * docs/examples/twisted_master.py: turn off kqreactor, it hangs
- freebsd buildslave badly
-
- * setup.py (version): bump to 0.3.4+ while between releases
-
-2003-07-28 Brian Warner <warner@lothar.com>
-
- * setup.py (version): Releasing buildbot-0.3.4
-
-2003-07-28 Brian Warner <warner@lothar.com>
-
- * NEWS: update in preparation for release
-
- * buildbot/slavecommand.py (ShellCommand.doTimeout): use
- process.signalProcess instead of os.kill, to improve w32
- portability
-
- * docs/examples/twisted_master.py (twisted_app): turn off
- win32eventreactor: the tests hang the buildslave badly
-
- * buildbot/process/base.py (Build.buildFinished): update ETA even on
- failed builds, since usually the failures are consistent
-
- * buildbot/process/process_twisted.py (TwistedReactorsBuildFactory):
- add compileOpts/compileOpts2 to reactors build
-
- * docs/examples/twisted_master.py (twisted_app): add "-c mingw32"
- (twisted_app): use both default and win32eventreactor on w32 build.
- Use both default and kqreactor on freebsd build.
-
- * buildbot/process/process_twisted.py (FullTwistedBuildFactory):
- add compileOpts2, which is put after the build_ext argument. w32
- needs "-c mingw32" here.
-
- * buildbot/status/html.py (StatusResourceBuilder.getChild): don't
- touch .acqpath, it goes away in recent Twisted releases
-
- * docs/examples/twisted_master.py (twisted_app): use "python" for
- the w32 buildslave, not "python2.2"
-
- * buildbot/bot.py (Bot.remote_getSlaveInfo): only look in info/ if
- the directory exists.. should hush an exception under w32
-
- * buildbot/slavecommand.py (ShellCommandPP.processEnded): use
- ProcessTerminated -provided values for signal and exitCode rather
- than parsing the unix status code directly. This should remove one
- more roadblock for a w32-hosted buildslave.
-
- * test/test_mailparse.py: add test cases for Syncmail parser
-
- * Buildbot/changes/freshcvsmail.py: remove leftover code, leave a
- temporary compatibility import. Note! Start importing
- FCMaildirSource from changes.mail instead of changes.freshcvsmail
-
- * buildbot/changes/mail.py (parseSyncmail): finish Syncmail parser
-
-2003-07-27 Brian Warner <warner@lothar.com>
-
- * NEWS: started adding new features
-
- * buildbot/changes/mail.py: start work on Syncmail parser, move
- mail sources into their own file
-
- * buildbot/changes/freshcvs.py (FreshCVSNotifiee): mark the class
- as implementing IChangeSource
- * buildbot/changes/freshcvsmail.py (FCMaildirSource): ditto
-
- * buildbot/interfaces.py: define the IChangeSource interface
-
-2003-07-26 Brian Warner <warner@lothar.com>
-
- * buildbot/master.py (makeApp): docstring (thanks to Kevin Turner)
-
-2003-06-25 Brian Warner <warner@lothar.com>
-
- * buildbot/status/words.py (IrcStatusBot.emit_last): round off
- seconds display
-
-2003-06-17 Brian Warner <warner@lothar.com>
-
- * buildbot/status/words.py: clean up method usage to avoid error
- in silly IRC command
- (IrcStatusBot.emit_status): round off seconds display
-
- * buildbot/process/base.py (Build): delete the timer when saving
- to the .tap file, and restore it (if it should still be running)
- upon restore. This should fix the "next build in -34 seconds"
- messages that result when the master is restarted while builds are
- sitting in the .waiting slot. If the time for the build has
- already passed, start it very soon (in 1 second).
-
- * buildbot/status/words.py: more silly commands
-
- * README (REQUIREMENTS): add URLs to all required software
-
- * buildbot/status/words.py ('last'): mention results of, and time
- since last build
-
-2003-05-28 Brian Warner <warner@lothar.com>
-
- * buildbot/status/words.py: add 'last' command
- (IrcStatusBot.emit_status): add current-small text to 'status' output
-
- * docs/examples/twisted_master.py (twisted_app): turn on IRC bot
- (twisted_app): remove spaces from OS-X builder name
-
- * buildbot/master.py (makeApp): add knob to turn on IRC bot
- * buildbot/status/words.py: IRC bot should actually be useful now
-
-2003-05-23 Brian Warner <warner@lothar.com>
-
- * buildbot/bot.py (Bot.remote_getSlaveInfo): add routines to get
- "slave information" from $(slavedir)/info/* . These files are
- maintained by the slave administrator, and describe the
- machine/environment that is hosting the slave. Information from
- them is put into the "Builder" HTML page. Still need to establish
- a set of well-known filenames and meanings for this data: at the
- moment, *all* info/* files are sent to the master, but only
- 'admin' and 'host' are used on that end.
- * buildbot/status/html.py (StatusResourceBuilder.body): ditto
- * buildbot/process/base.py (Builder.setRemoteInfo): ditto
- * buildbot/master.py (BotPerspective.got_info): ditto
-
-2003-05-22 Brian Warner <warner@lothar.com>
-
- * setup.py (version): bump version to 0.3.3+ while between releases
-
-2003-05-21 Brian Warner <warner@lothar.com>
-
- * setup.py: Releasing buildbot-0.3.3
-
-2003-05-21 Brian Warner <warner@lothar.com>
-
- * NEWS: 0.3.3 news items
-
- * README: describe --keepalive and life behind a NAT box
-
- * buildbot/bot.py (Bot.connected): implement application-level
- keepalives to deal with NAT timeouts, turn them on with
- --keepalive option or when SO_KEEPALIVE doesn't work.
-
- * buildbot/master.py (BotPerspective): accept keepalives silently
-
- * buildbot/process/base.py (Build.buildException): CopiedFailures
- don't carry as much information as local ones, so don't try to
- create a big HTMLized version of them.
-
- * buildbot/process/step.py (InternalShellCommand.stepFailed): close
- log file when step fails due to an exception, such as when the slave
- becomes unreachable
-
- * buildbot/process/step_twisted.py (RunUnitTests): use trial's new
- --testmodule argument instead of grepping for test-case-name tags
- ourselves. Remove FindUnitTests code.
- * buildbot/slavecommand.py, buildbot/bot.py: remove old code
-
- * MANIFEST.in: Add docs/examples, files under test/ . Oops!
-
-2003-05-16 Brian Warner <warner@lothar.com>
-
- * buildbot/process/base.py (BasicBuildFactory): add 'configureEnv'
- argument to allow things like CFLAGS=-O0 to be passed without relying
- upon /bin/sh processing on the slave.
-
- * buildbot/process/step.py (InternalShellCommand.start): send
- 'env' dict to slave
- * buildbot/slavecommand.py (ShellCommand.start): create argv with
- 'split' instead of letting /bin/sh do it. This should also remove
- the need for /bin/sh on the buildslave, making it more likely to
- work with win32.
-
- * buildbot/status/html.py: html-escape text in blamelist.
- Add "force build" button to the Builder page.
-
- * buildbot/process/step_twisted.py (countFailedTests): look at
- last 1000 characters for status line, as import errors can put it
- before the -200 point.
-
-2003-05-15 Brian Warner <warner@lothar.com>
-
- * docs/examples/twisted_master.py: use clobber=0 for remote builds
-
- * buildbot/process/process_twisted.py (FullTwistedBuildFactory):
- make 'clobber' a parameter, so it is possible to have builds which
- do full tests but do a cvs update instead of hammering the CVS
- server with a full checkout each build
-
- * buildbot/process/step.py (InternalShellCommand): bump default
- timeout to 20 minutes
-
- * buildbot/bot.py (Bot.debug_forceBuild): utility method to ask
- the master to trigger a build. Run it via manhole.
-
- * buildbot/master.py (BotPerspective.perspective_forceBuild):
- allow slaves to trigger any build that they host, to make life
- easier for slave admins who are testing out new build processes
-
- * buildbot/process/process_twisted.py (TwistedReactorsBuildFactory):
- don't flunk cReactor or qtreactor on failure, since they fail alot
- these days. Do warnOnFailure instead.
-
- * buildbot/process/base.py: change Builder.buildable from a list
- into a single slot. When we don't have a slave, new builds (once
- they make it past the timeout) are now merged into an existing
- buildable one instead of being queued. With this change, a slave
- which has been away for a while doesn't get pounded with all the
- builds it missed, but instead just does a single build.
-
-2003-05-07 Brian Warner <warner@lothar.com>
-
- * setup.py (version): bump version to 0.3.2+ while between releases
-
-2003-05-07 Brian Warner <warner@lothar.com>
-
- * setup.py: Releasing buildbot-0.3.2
-
-2003-05-07 Brian Warner <warner@lothar.com>
-
- * setup.py: fix major packaging error: include subdirectories!
-
- * NEWS: add changes since last release
-
- * README (REQUIREMENTS): update twisted/python dependencies
-
- * buildbot/status/builder.py (Builder.startBuild): change
- BuildProcess API: now they should call startBuild/finishBuild
- instead of pushing firstEvent / setLastBuildStatus. Moving towards
- keeping a list of builds in the statusbag, to support other kinds of
- status delivery.
- (Builder.addClient): send current-activity-small to new clients
- * buildbot/process/base.py (Build.startBuild, .buildFinished): use
- new API
-
- * buildbot/status/client.py: drop RemoteReferences at shutdown
-
- * buildbot/status/event.py (Event.stoppedObserving): oops, add it
-
- * buildbot/status/progress.py (BuildProgress.remote_subscribe):
- more debug messages for remote status client
-
- * buildbot/process/step.py (InternalBuildStep.stepComplete)
- (.stepFailed): only fire the Deferred once, even if both
- stepComplete and stepFailed are called. I think this can happen if
- an exception occurs at a weird time.
-
- * buildbot/status/words.py: work-in-progress: IRC status delivery
-
-2003-05-05 Brian Warner <warner@lothar.com>
-
- * docs/examples/twisted_master.py (twisted_app): hush internal
- python2.3 distutils deprecation warnings
- * buildbot/process/process_twisted.py (FullTwistedBuildFactory):
- add compileOpts= argument which inserts extra args before the
- "setup.py build_ext" command. This can be used to give -Wignore
- warnings, to hush some internal python-2.3 deprecation messages.
-
- * buildbot/process/step_twisted.py (RunUnitTests): parameterize
- the ['twisted.test'] default test case to make it easier to change
- in subclasses
-
- * buildbot/clients/base.py: switch to pb.Cacheable-style Events
- * buildbot/clients/gtkPanes.py: ditto
-
- * buildbot/process/step_twisted.py (RunUnitTests): use randomly=
- arg to collapse RunUnitTestsRandomly into RunUnitTests
- * buildbot/process/process_twisted.py (FullTwistedBuildFactory):
- use RunUnitTests(randomly=1) instead of RunUnitTestsRandomly
-
- * buildbot/status/html.py (StatusResource): shuffle Resources
- around to fix a bug: both 'http://foo:8080' and 'http://foo:8080/'
- would serve the waterfall display, but the internal links were
- only valid on the trailing-slash version. The correct behavior is
- for the non-slashed one to serve a Redirect to the slashed one.
- This only shows up when the buildbot page is hanging off another
- server, like a Twisted-Web distributed server.
-
- * buildbot/status/event.py (Event, RemoteEvent): make Events
- pb.Cacheable, with RemoteEvent as the cached version. This removes
- a lot of explicit send-an-update code.
- * buildbot/status/builder.py (Builder): remove send-update code
- * buildbot/status/client.py (ClientBuilder): remove send-update
- code, and log errors that occur during callRemote (mostly to catch
- InsecureJelly exceptions)
-
- * buildbot/process/process_twisted.py (QuickTwistedBuildFactory):
- run Lore with the same python used in the rest of the build
-
- * buildbot/process/step_twisted2.py (RunUnitTestsJelly): moved
-
- * buildbot/process/step_twisted.py (HLint): accept 'python'
- argument. Catch rc!=0 and mark the step as failed. This marks the
- build orange ("has warnings").
- (RunUnitTestsJelly): move out to step_twisted2.py
-
- * buildbot/util.py (ignoreStaleRefs): add utility function
-
- * buildbot/master.py (DebugPerspective.perspective_setCurrentState):
- don't fake ETA object, it's too hard to get right
-
-2003-05-02 Brian Warner <warner@lothar.com>
-
- * docs/examples/twisted_master.py (twisted_app): add FreeBSD builder
-
-2003-05-01 Brian Warner <warner@lothar.com>
-
- * buildbot/status/html.py (StatusResource.body): oops, I was
- missing a <tr>, causing the waterfall page to be misrendered in
- everything except Galeon.
-
-2003-04-29 Brian Warner <warner@lothar.com>
-
- * docs/examples/twisted_master.py: make debuild use python-2.2
- explicitly, now that Twisted stopped supporting 2.1
-
- * buildbot/process/step_twisted.py (BuildDebs.finishStatus): oops,
- handle tuple results too. I keep forgetting this, which suggests
- it needs to be rethought.
-
- * setup.py (setup): bump version to 0.3.1+ while between releases
-
-2003-04-29 Brian Warner <warner@lothar.com>
-
- * setup.py: Releasing buildbot-0.3.1
-
-2003-04-29 Brian Warner <warner@lothar.com>
-
- * README (SUPPORT): add plea to send questions to the mailing list
-
- * NEWS, MANIFEST.in: add description of recent changes
-
- * docs/examples/twisted_master.py: add the code used to create the
- Twisted buildmaster, with passwords and such removed out to a
- separate file.
-
- * buildbot/changes/changes.py, freshcvs.py, freshcvsmail.py: split
- out cvstoys-using bits from generic changes.py, to allow non-cvstoys
- buildmasters to not require CVSToys be installed.
- * README, docs/examples/glib_master: update to match the change
-
- * buildbot/clients/base.py, buildbot/bot.py,
- buildbot/changes/changes.py, buildbot/pbutil.py: copy
- ReconnectingPB from CVSToys distribution to remove CVSToys
- dependency for build slaves and status clients. Buildmasters which
- use FreshCVSSources still require cvstoys be installed, of course.
-
-2003-04-25 Brian Warner <warner@lothar.com>
-
- * buildbot/process/process_twisted.py (FullTwistedBuildFactory): add
- runTestsRandomly arg to turn on trial -z
-
- * buildbot/process/step_twisted.py (TwistedJellyTestResults):
- experimental code to use trial's machine-parseable output to get
- more detailed test results. Still has some major issues.
- (RunUnitTestsRandomly): subclass to add "-z 0" option, runs tests
- in random sequence
-
- * buildbot/status/builder.py (Builder.setCurrentBuild):
- anticipating moving build history into statusbag, not used yet
-
- * buildbot/status/tests.py: code to centralize test results,
- doesn't work quite yet
-
- * buildbot/status/event.py (Event): use hasattr("setName") instead
- of isinstance for now.. need better long-term solution
-
- * buildbot/status/html.py: Remove old imports
-
-2003-04-24 Brian Warner <warner@lothar.com>
-
- * buildbot/process/process_twisted.py (TwistedBuild.isFileImportant):
- ignore changes under doc/fun/ and sandbox/
-
- * buildbot/process/step_twisted.py: update pushEvent and friends.
-
- * buildbot/status/html.py (Box.td): replace event.buildername with
- event.parent.getSwappableName(). Needs more thought.
-
- * buildbot/status/builder.py (Builder): Replace pushEvent and
- getLastEvent with {set|update|addFileTo|finish}CurrentActivity.
- Tell events they are being pruned with event.delete().
-
- * buildbot/process/base.py (Build): Remove Builder status-handling
- methods. s/pushEvent/setCurrentActivity/.
-
- * buildbot/process/step.py (BuildStep): clean up status delivery.
- Gouse builder.statusbag methods instead of intermediate builder
- methods. s/updateLastEvent/updateCurrentActivity/.
- s/finalizeLastEvent/finishCurrentActivity/. Use
- addFileToCurrentActivity for summaryFunction.
-
- * buildbot/status/event.py (Logfile): put data in a Swappable when
- .finish is called.
- (Event): add more setter methods. Remove .buildername, use .parent
- and getSwappableName instead (needs more thought).
-
- * buildbot/util.py (Swappable):
- * test/test_swap.py: don't bother setting filename at __init__
- time, do it later. Change setFilename args to take parent first,
- since it provides the most significant part of the filename.
-
-2003-04-23 Brian Warner <warner@lothar.com>
-
- * buildbot/status/event.py (Logfile.addEntry): append to previous
- entry, if possible
-
- * buildbot/process/step.py (BuildStep.finalizeLastEvent):
- anticipating Swappable
- (InternalShellCommand.remoteUpdate): split out various log-adding
- methods so subclasses can snarf stdout separately
-
- * buildbot/process/base.py (Builder.finalizeLastEvent): more code
- in anticipation of Swappable build logs
- (Builder.testsFinished): anticipating TestResults, still disabled
-
- * buildbot/status/builder.py (Builder.pruneEvents): only keep the
- last 100 events
-
- * buildbot/status/event.py (Logfile): add (disabled) support for
- Swappable, not ready for use yet
-
- * buildbot/util.py (Swappable): object which is swapped out to
- disk after some period of no use.
- * test/test_swap.py: test buildbot.utils.Swappable
-
-2003-04-14 Brian Warner <warner@lothar.com>
-
- * buildbot/process/base.py (Builder.doPeriodicBuild): add simple
- periodic-build timer. Set the .periodicBuildTime on a builder
- instance to some number of seconds to activate it.
-
- * buildbot/master.py (BotMaster.forceBuild): change forceBuild API
-
- * buildbot/process/step.py (ShellCommand.finishStatus): use log.msg in
- a way that survives result tuples
-
-2003-04-12 Brian Warner <warner@lothar.com>
-
- * buildbot/process/step.py (ShellCommand.finishStatusSummary):
- return a dict instead of a tuple: allow summarizers to provide
- multiple summaries if they want
- * buildbot/process/step_twisted.py (trialTextSummarizer): return dict
- (debuildSummarizer): summarize lintian warnings/errors
-
-2003-04-10 Brian Warner <warner@lothar.com>
-
- * README (REQUIREMENTS): slave requires twisted-1.0.4a2
-
-2003-04-09 Brian Warner <warner@lothar.com>
-
- * buildbot/process/step_twisted.py (trialTextSummarizer): Don't create
- empty summaries: happens when the tests fail so hard they don't emit
- a parseable summary line.
-
- * buildbot/process/step.py (ShellCommand.finishStatusSummary):
- Allow summaryFunction to return None to indicate no summary should
- be added.
-
- * buildbot/status/event.py (Logfile.removeHtmlWatcher): avoid
- writing to stale HTTP requests: notice when they disconnect and
- remove the request from the list. Also add CacheToFile from
- moshez, will be used later.
-
-2003-04-08 Brian Warner <warner@lothar.com>
-
- * buildbot/process/step_twisted.py (ProcessDocs.finished): warnings
- should be an int, not a list of strings
-
- * buildbot/changes/changes.py (FreshCVSSource.stop): don't disconnect
- if we weren't actually connected
-
- * buildbot/process/step_twisted.py (trialTextSummarizer): function
- to show the tail end of the trial text output
-
- * buildbot/process/step.py (ShellCommand.finishStatusSummary): add
- hook to summarize the results of a ShellCommand
-
-2003-04-07 Brian Warner <warner@lothar.com>
-
- * buildbot/process/step_twisted.py (RunUnitTests): consolidate all
- twisted test suite code into a single class.
- * buildbot/process/process_twisted.py: same
-
-2003-04-04 Brian Warner <warner@lothar.com>
-
- * setup.py, MANIFEST.in: hack to make sure plugins.tml gets installed
-
- * README (SLAVE): document use of mktap to create slave .tap file
- (REQUIREMENTS): describe dependencies
-
- * buildbot/bb_tap.py, buildbot/plugins.tml:
- * buildbot/bot.py (updateApplication): Add mktap support for creating
- buildslave .tap files
-
-2003-03-28 Brian Warner <warner@lothar.com>
-
- * buildbot/process/step.py (InternalShellCommand.finished): handle
- new tuple result values (fix embarrasing bug that appeared during
- PyCon demo)
-
-2003-03-27 Brian Warner <warner@lothar.com>
-
- * docs/examples/glib_master.py, README: add sample buildmaster.tap
- -making program
-
-2003-03-25 Brian Warner <warner@lothar.com>
-
- * buildbot/process/step.py (CVS, ShellCommand): add reason for failure
- to overall build status
- * buildbot/clients/base.py (Builder): improve event printing
- * buildbot/process/base.py (BasicBuildFactory): use specific steps
- instead of generic ShellCommand
- (Build): Add .stopBuild, use it when slave is detached
-
- * buildbot/process/step.py (Configure,Test): give the steps their
- own names and status strings
-
- * buildbot/status/html.py (StatusResource): add "show" argument,
- lets you limit the set of Builders being displayed.
-
-2003-03-20 Brian Warner <warner@lothar.com>
-
- * buildbot/process/basic.py: removed
-
-2003-03-19 Brian Warner <warner@lothar.com>
-
- * buildbot/process/process_twisted.py (FullTwistedBuildFactory):
- turn off process-docs by default
-
- * buildbot/process/base.py (Builder.getBuildNumbered): Don't blow up
- when displaying build information without anything in allBuilds[]
-
- * buildbot/bot.py (makeApp): really take password from sys.argv
-
-2003-03-18 Brian Warner <warner@lothar.com>
-
- * buildbot/bot.py (buildApp): take password from sys.argv
-
- * README: replace with more useful text
-
- * setup.py: add a real one
- * MANIFEST.in, .cvsignore: more distutils packaging stuff
-
- * docs/PyCon-2003/: added sources for PyCon paper.
-
- * buildbot/process/base.py, step.py: revamp. BuildProcess is gone,
- now Build objects control the process and Builder only handles
- slave stuff and distribution of changes/status. A new BuildFactory
- class creates Build objects on demand.
-
- Created ConfigurableBuild which takes a list of steps to run. This
- makes it a lot easier to set up a new kind of build and moves us
- closer to being able to configure a build from a web page.
-
- * buildbot/process/step_twisted.py, process_twisted.py: move to
- new model. A lot of code went away.
-
- * buildbot/status/progress.py (BuildProgress.newProgress): Don't
- send lots of empty progress messages to the client.
-
- * buildbot/master.py (makeApp): enforce builder-name uniqueness
-
-2003-02-20 Brian Warner <warner@lothar.com>
-
- * buildbot/process/step_twisted.py (BuildDebs): count lintian hits
-
- * buildbot/slavecommand.py (ShellCommand): back to usePTY=0. The
- Twisted bug that prevented non-pty processes from working just got
- fixed, and the bug that leaks ptys is still being investigated.
-
- * buildbot/process/step.py (CVS): send timeout arg to slave
-
- * buildbot/clients/gtkPanes.py: add connection-status row, handle
- builders coming and going
- * buildbot/clients/base.py: clean up protocol, move to ReconnectingPB
- from CVSToys, handle lost-buildmaster
-
- * buildbot/status/client.py (StatusClientService.removeBuilder):
- Clean up status client protocol: send builders (with references)
- as they are created, rather than sending a list and requiring the
- client to figure out which ones are new.
- * buildbot/master.py (BotMaster.forceBuild): Log debugclient
- attempts to force a build on an unknown builder
-
-2003-02-19 Brian Warner <warner@lothar.com>
-
- * buildbot/slavecommand.py (CVSCommand): add timeout to sub-commands
- * buildbot/slavecommand.py (ShellCommand.start): stop using PTYs until
- Twisted stops leaking them.
- * buildbot/clients/gtkPanes.py (CompactBuilder): forget ETA when the
- builder goes to an idle state.
-
- * buildbot/slavecommand.py (ShellCommand.start): bring back PTYs until
- I figure out why CVS commands hang without them, and/or I fix the
- hung-command timeout
-
-2003-02-16 Brian Warner <warner@lothar.com>
-
- * buildbot/process/step_twisted.py: bin/hlint went away, replace
- with 'bin/lore --output lint'. Use 'bin/trial -o' to remove
- ansi-color markup. Remove GenerateLore step. Count hlint warnings in
- GenerateDocs now that they are prefixed with WARNING:.
-
- * buildbot/status/html.py (StatusResource.body): Fix Builder link,
- use manual href target instead of request.childLink
-
- * buildbot/clients/gtkPanes.py: Fix progress countdown: update the
- display every second, but update the ETA every 5 seconds (or
- whenever) as remote_progress messages arrive.
-
-
-2003-02-12 Brian Warner <warner@lothar.com>
-
- * *: import current sources from home CVS repository
-
-
-# Local Variables:
-# add-log-time-format: add-log-iso8601-time-string
-# End:
diff --git a/buildbot/buildbot-source/MANIFEST.in b/buildbot/buildbot-source/MANIFEST.in
deleted file mode 100644
index 11e9abecf..000000000
--- a/buildbot/buildbot-source/MANIFEST.in
+++ /dev/null
@@ -1,17 +0,0 @@
-
-include ChangeLog MANIFEST.in README README.w32 NEWS
-include docs/PyCon-2003/buildbot.html docs/PyCon-2003/stylesheet.css
-include docs/PyCon-2003/overview.png docs/PyCon-2003/slave.png
-include docs/PyCon-2003/waterfall.png
-include docs/examples/*.cfg
-include docs/buildbot.texinfo docs/buildbot.info
-include docs/epyrun docs/gen-reference
-include buildbot/test/mail/* buildbot/test/subdir/*
-include buildbot/scripts/sample.cfg
-include buildbot/status/classic.css
-include buildbot/clients/debug.glade
-include buildbot/buildbot.png
-
-exclude buildbot/test/test_trial.py
-
-include contrib/* contrib/windows/*
diff --git a/buildbot/buildbot-source/NEWS b/buildbot/buildbot-source/NEWS
deleted file mode 100644
index cea8653d9..000000000
--- a/buildbot/buildbot-source/NEWS
+++ /dev/null
@@ -1,1621 +0,0 @@
-User visible changes in Buildbot.
-
-* Release 0.7.3 (23 May 2006)
-
-** compatibility
-
-This release is compatible with Twisted-1.3.0, but the next one will not be.
-Please upgrade to at least Twisted-2.0.x soon, as the next buildbot release
-will require it.
-
-** new features
-
-*** Mercurial support
-
-Support for Mercurial version control system (http://selenic.com/mercurial)
-has been added. This adds a buildbot.process.step.Mercurial BuildStep. A
-suitable hook script to deliver changes to the buildmaster is still missing.
-
-*** 'buildbot restart' command
-
-The 'buildbot restart BASEDIR' command will perform a 'buildbot stop' and
-'buildbot start', and will attempt to wait for the buildbot process to shut
-down in between. This is useful when you need to upgrade the code on your
-buildmaster or buildslave and want to take it down for a minimum amount of
-time.
-
-*** build properties
-
-Each build now has a set of named "Build Properties", which can be set by
-steps and interpolated into ShellCommands. The 'revision' and 'got_revision'
-properties are the most interesting ones available at this point, and can be
-used e.g. to get the VC revision number into the filename of a generated
-tarball. See the user's manual section entited "Build Properties" for more
-details.
-
-** minor features
-
-*** IRC now takes password= argument
-
-Useful for letting your bot claim a persistent identity.
-
-*** svn_buildbot.py is easier to modify to understand branches
-*** BuildFactory has a new .addStep method
-*** p4poller has new arguments
-*** new contrib scripts: viewcvspoll, svnpoller, svn_watcher
-
-These poll an external VC repository to watch for changes, as opposed to
-adding a hook script to the repository that pushes changes into the
-buildmaster. This means higher latency but may be easier to configure,
-especially if you do not have authority on the repository host.
-
-*** VC build property 'got_revision'
-
-The 'got_revision' property reports what revision a VC step actually
-acquired, which may be useful to know when building from HEAD.
-
-*** improved CSS in Waterfall
-
-The Waterfall display has a few new class= tags, which may make it easier to
-write custom CSS to make it look prettier.
-
-*** robots_txt= argument in Waterfall
-
-You can now pass a filename to the robots_txt= argument, which will be served
-as the "robots.txt" file. This can be used to discourage search engine
-spiders from crawling through the numerous build-status pages.
-
-** bugfixes
-
-*** tests more likely to pass on non-English systems
-
-The unit test suite now sets $LANG='C' to make subcommands emit error
-messages in english instead of whatever native language is in use on the
-host. This improves the chances that the unit tests will pass on such
-systems. This affects certain VC-related subcommands too.
-
-test_vc was assuming that the system time was expressed with a numeric
-timezone, which is not always the case, especially under windows. This
-probably works better now than it did before. This only affects the CVS
-tests.
-
-'buildbot try' (for CVS) now uses UTC instead of the local timezone. The
-'got_revision' property is also expressed in UTC. Both should help deal with
-buggy versions of CVS that don't parse numeric timezones properly.
-
-
-* Release 0.7.2 (17 Feb 2006)
-
-** new features
-
-*** all TCP port numbers in config file now accept a strports string
-
-Sometimes it is useful to restrict certain TCP ports that the buildmaster
-listens on to use specific network interfaces. In particular, if the
-buildmaster and SVN repository live on the same machine, you may want to
-restrict the PBChangeSource to only listen on the loopback interface,
-insuring that no external entities can inject Changes into the buildbot.
-Likewise, if you are using something like Apache's reverse-proxy feature to
-provide access to the buildmaster's HTML status page, you might want to hide
-the real Waterfall port by having it only bind to the loopback interface.
-
-To accomplish this, use a string like "tcp:12345:interface=127.0.0.1" instead
-of a number like 12345. These strings are called "strports specification
-strings", and are documented in twisted's twisted.application.strports module
-(you can probably type 'pydoc twisted.application.strports' to see this
-documentation). Pretty much everywhere the buildbot takes a port number will
-now accept a strports spec, and any bare numbers are translated into TCP port
-numbers (listening on all network interfaces) for compatibility.
-
-*** buildslave --umask control
-
-Twisted's daemonization utility (/usr/bin/twistd) automatically sets the
-umask to 077, which means that all files generated by both the buildmaster
-and the buildslave will only be readable by the account under which the
-respective daemon is running. This makes it unnecessarily difficult to share
-build products (e.g. by symlinking ~/public_html/current_docs/ to a directory
-within the slave's build directory where each build puts the results of a
-"make docs" step).
-
-The 'buildbot slave <PARAMS>' command now accepts a --umask argument, which
-can be used to override the umask set by twistd. If you create the buildslave
-with '--umask=022', then all build products will be world-readable, making it
-easier for other processes (run under other accounts) to access them.
-
-** bug fixes
-
-The 0.7.1 release had a bug whereby reloading the config file could break all
-configured Schedulers, causing them to raise an exception when new changes
-arrived but not actually schedule a new build. This has been fixed.
-
-Fixed a bug which caused the AnyBranchScheduler to explode when branch==None.
-Thanks to Kevin Turner for the catch. I also think I fixed a bug whereby the
-TryScheduler would explode when it was given a Change (which it is supposed
-to simply ignore).
-
-The Waterfall display now does more quoting of names (including Builder
-names, BuildStep names, etc), so it is more likely that these names can
-contain unusual characters like spaces, quotes, and slashes. There may still
-be some problems with these kinds of names, however.. please report any bugs
-to the mailing list.
-
-
-* Release 0.7.1 (26 Nov 2005)
-
-** new features
-
-*** scheduler.Nightly
-
-Dobes Vandermeer contributed a cron-style 'Nightly' scheduler. Unlike the
-more-primitive Periodic class (which only lets you specify the duration
-between build attempts), Nightly lets you schedule builds for specific times
-of day, week, month, or year. The interface is very much like the crontab(5)
-file. See the buildbot.scheduler.Nightly docstring for complete details.
-
-** minor new features
-
-*** step.Trial can work with Trial from Twisted >2.1.0
-
-The 'Trial' step now accepts the trialMode= argument, which should be a list
-of strings to be added to trial's argv array. This defaults to ["-to"], which
-is appropriate for the Trial that ships in Twisted-2.1.0 and earlier, and
-tells Trial to emit non-colorized verbose output. To use this step with
-trials from later versions of Twisted, this should be changed to
-["--reporter=bwverbose"].
-
-In addition, you can now set other Trial command-line parameters through the
-trialArgs= argument. This is a list of strings, and defaults to an empty list.
-
-*** Added a 'resubmit this build' button to the web page
-
-*** Make the VC-checkout step's description more useful
-
-Added the word "[branch]" to the VC step's description (used in the Step's
-box on the Waterfall page, among others) when we're checking out a
-non-default branch. Also add "rNNN" where appropriate to indicate which
-revision is being checked out. Thanks to Brad Hards and Nathaniel Smith for
-the suggestion.
-
-** bugs fixed
-
-Several patches from Dobes Vandermeer: Escape the URLs in email, in case they
-have spaces and such. Fill otherwise-empty <td> elements, as a workaround for
-buggy browsers that might optimize them away. Also use binary mode when
-opening status pickle files, to make windows work better. The
-AnyBranchScheduler now works even when you don't provide a fileIsImportant=
-argument.
-
-Stringify the base revision before stuffing it into a 'try' jobfile, helping
-SVN and Arch implement 'try' builds better. Thanks to Steven Walter for the
-patch.
-
-Fix the compare_attrs list in PBChangeSource, FreshCVSSource, and Waterfall.
-Before this, certain changes to these objects in the master.cfg file were
-ignored, such that you would have to stop and re-start the buildmaster to
-make them take effect.
-
-The config file is now loaded serially, shutting down old (or replaced)
-Status/ChangeSource plugins before starting new ones. This fixes a bug in
-which changing an aspect of, say, the Waterfall display would cause an
-exception as both old and new instances fight over the same TCP port. This
-should also fix a bug whereby new Periodic Schedulers could fire a build
-before the Builders have finished being added.
-
-There was a bug in the way Locks were handled when the config file was
-reloaded: changing one Builder (but not the others) and reloading master.cfg
-would result in multiple instances of the same Lock object, so the Locks
-would fail to prevent simultaneous execution of Builds or Steps. This has
-been fixed.
-
-** other changes
-
-For a long time, certain StatusReceiver methods (like buildStarted and
-stepStarted) have been able to return another StatusReceiver instance
-(usually 'self') to indicate that they wish to subscribe to events within the
-new object. For example, if the buildStarted() method returns 'self', the
-status receiver will also receive events for the new build, like
-stepStarted() and buildETAUpdate(). Returning a 'self' from buildStarted() is
-equivalent to calling build.subscribe(self).
-
-Starting with buildbot-0.7.1, this auto-subscribe convenience will also
-register to automatically unsubscribe the target when the build or step has
-finished, just as if build.unsubscribe(self) had been called. Also, the
-unsubscribe() method has been changed to not explode if the same receiver is
-unsubscribed multiple times. (note that it will still explode is the same
-receiver is *subscribed* multiple times, so please continue to refrain from
-doing that).
-
-
-* Release 0.7.0 (24 Oct 2005)
-
-** new features
-
-*** new c['schedulers'] config-file element (REQUIRED)
-
-The code which decides exactly *when* a build is performed has been massively
-refactored, enabling much more flexible build scheduling. YOU MUST UPDATE
-your master.cfg files to match: in general this will merely require you to
-add an appropriate c['schedulers'] entry. Any old ".treeStableTime" settings
-on the BuildFactory instances will now be ignored. The user's manual has
-complete details with examples of how the new Scheduler classes work.
-
-*** c['interlocks'] removed, Locks and Dependencies now separate items
-
-The c['interlocks'] config element has been removed, and its functionality
-replaced with two separate objects. Locks are used to tell the buildmaster
-that certain Steps or Builds should not run at the same time as other Steps
-or Builds (useful for test suites that require exclusive access to some
-external resource: of course the real fix is to fix the tests, because
-otherwise your developers will be suffering from the same limitations). The
-Lock object is created in the config file and then referenced by a Step
-specification tuple or by the 'locks' key of the Builder specification
-dictionary. Locks come in two flavors: MasterLocks are buildmaster-wide,
-while SlaveLocks are specific to a single buildslave.
-
-When you want to have one Build run or not run depending upon whether some
-other set of Builds have passed or failed, you use a special kind of
-Scheduler defined in the scheduler.Dependent class. This scheduler watches an
-upstream Scheduler for builds of a given source version to complete, and only
-fires off its own Builders when all of the upstream's Builders have built
-that version successfully.
-
-Both features are fully documented in the user's manual.
-
-*** 'buildbot try'
-
-The 'try' feature has finally been added. There is some configuration
-involved, both in the buildmaster config and on the developer's side, but
-once in place this allows the developer to type 'buildbot try' in their
-locally-modified tree and to be given a report of what would happen if their
-changes were to be committed. This works by computing a (base revision,
-patch) tuple that describes the developer's tree, sending that to the
-buildmaster, then running a build with that source on a given set of
-Builders. The 'buildbot try' tool then emits status messages until the builds
-have finished.
-
-'try' exists to allow developers to run cross-platform tests on their code
-before committing it, reducing the chances they will inconvenience other
-developers by breaking the build. The UI is still clunky, but expect it to
-change and improve over the next few releases.
-
-Instructions for developers who want to use 'try' (and the configuration
-changes necessary to enable its use) are in the user's manual.
-
-*** Build-On-Branch
-
-When suitably configured, the buildbot can be used to build trees from a
-variety of related branches. You can set up Schedulers to build a tree using
-whichever branch was last changed, or users can request builds of specific
-branches through IRC, the web page, or (eventually) the CLI 'buildbot force'
-subcommand.
-
-The IRC 'force' command now takes --branch and --revision arguments (not that
-they always make sense). Likewise the HTML 'force build' button now has an
-input field for branch and revision. Your build's source-checkout step must
-be suitably configured to support this: for SVN it involves giving both a
-base URL and a default branch. Other VC systems are configured differently.
-The ChangeSource must also provide branch information: the 'buildbot
-sendchange' command now takes a --branch argument to help hook script writers
-accomplish this.
-
-*** Multiple slaves per Builder
-
-You can now attach multiple buildslaves to each Builder. This can provide
-redundancy or primitive load-balancing among many machines equally capable of
-running the build. To use this, define a key in the Builder specification
-dictionary named 'slavenames' with a list of buildslave names (instead of the
-usual 'slavename' that contains just a single slavename).
-
-*** minor new features
-
-The IRC and email status-reporting facilities now provide more specific URLs
-for particular builds, in addition to the generic buildmaster home page. The
-HTML per-build page now has more information.
-
-The Twisted-specific test classes have been modified to match the argument
-syntax preferred by Trial as of Twisted-2.1.0 and newer. The generic trial
-steps are still suitable for the Trial that comes with older versions of
-Twisted, but may produce deprecation warnings or errors when used with the
-latest Trial.
-
-** bugs fixed
-
-DNotify, used by the maildir-watching ChangeSources, had problems on some
-64-bit systems relating to signed-vs-unsigned constants and the DN_MULTISHOT
-flag. A workaround was provided by Brad Hards.
-
-The web status page should now be valid XHTML, thanks to a patch by Brad
-Hards. The charset parameter is specified to be UTF-8, so VC comments,
-builder names, etc, should probably all be in UTF-8 to be displayed properly.
-
-** creeping version dependencies
-
-The IRC 'force build' command now requires python2.3 (for the shlex.split
-function).
-
-
-* Release 0.6.6 (23 May 2005)
-
-** bugs fixed
-
-The 'sendchange', 'stop', and 'sighup' subcommands were broken, simple bugs
-that were not caught by the test suite. Sorry.
-
-The 'buildbot master' command now uses "raw" strings to create .tac files
-that will still function under windows (since we must put directory names
-that contain backslashes into that file).
-
-The keep-on-disk behavior added in 0.6.5 included the ability to upgrade old
-in-pickle LogFile instances. This upgrade function was not added to the
-HTMLLogFile class, so an exception would be raised when attempting to load or
-display any build with one of these logs (which are normally used only for
-showing build exceptions). This has been fixed.
-
-Several unnecessary imports were removed, so the Buildbot should function
-normally with just Twisted-2.0.0's "Core" module installed. (of course you
-will need TwistedWeb, TwistedWords, and/or TwistedMail if you use status
-targets that require them). The test suite should skip all tests that cannot
-be run because of missing Twisted modules.
-
-The master/slave's basedir is now prepended to sys.path before starting the
-daemon. This used to happen implicitly (as a result of twistd's setup
-preamble), but 0.6.5 internalized the invocation of twistd and did not copy
-this behavior. This change restores the ability to access "private.py"-style
-modules in the basedir from the master.cfg file with a simple "import
-private" statement. Thanks to Thomas Vander Stichele for the catch.
-
-
-* Release 0.6.5 (18 May 2005)
-
-** deprecated config keys removed
-
-The 'webPortnum', 'webPathname', 'irc', and 'manholePort' config-file keys,
-which were deprecated in the previous release, have now been removed. In
-addition, Builders must now always be configured with dictionaries: the
-support for configuring them with tuples has been removed.
-
-** master/slave creation and startup changed
-
-The buildbot no longer uses .tap files to store serialized representations of
-the buildmaster/buildslave applications. Instead, this release now uses .tac
-files, which are human-readable scripts that create new instances (rather
-than .tap files, which were pickles of pre-created instances). 'mktap
-buildbot' is gone.
-
-You will need to update your buildbot directories to handle this. The
-procedure is the same as creating a new buildmaster or buildslave: use
-'buildbot master BASEDIR' or 'buildbot slave BASEDIR ARGS..'. This will
-create a 'buildbot.tac' file in the target directory. The 'buildbot start
-BASEDIR' will use twistd to start the application.
-
-The 'buildbot start' command now looks for a Makefile.buildbot, and if it
-finds one (and /usr/bin/make exists), it will use it to start the application
-instead of calling twistd directly. This allows you to customize startup,
-perhaps by adding environment variables. The setup commands create a sample
-file in Makefile.sample, but you must copy this to Makefile.buildbot to
-actually use it. The previous release looked for a bare 'Makefile', and also
-installed a 'Makefile', so you were always using the customized approach,
-even if you didn't ask for it. That old Makefile launched the .tap file, so
-changing names was also necessary to make sure that the new 'buildbot start'
-doesn't try to run the old .tap file.
-
-'buildbot stop' now uses os.kill instead of spawning an external process,
-making it more likely to work under windows. It waits up to 5 seconds for the
-daemon to go away, so you can now do 'buildbot stop BASEDIR; buildbot start
-BASEDIR' with less risk of launching the new daemon before the old one has
-fully shut down. Likewise, 'buildbot start' imports twistd's internals
-directly instead of spawning an external copy, so it should work better under
-windows.
-
-** new documentation
-
-All of the old Lore-based documents were converted into a new Texinfo-format
-manual, and considerable new text was added to describe the installation
-process. The docs are not yet complete, but they're slowly shaping up to form
-a proper user's manual.
-
-** new features
-
-Arch checkouts can now use precise revision stamps instead of always using
-the latest revision. A separate Source step for using Bazaar (an alternative
-Arch client) instead of 'tla' was added. A Source step for Cogito (the new
-linux kernel VC system) was contributed by Brandon Philips. All Source steps
-now accept a retry= argument to indicate that failing VC checkouts should be
-retried a few times (SF#1200395), note that this requires an updated
-buildslave.
-
-The 'buildbot sendchange' command was added, to be used in VC hook scripts to
-send changes at a pb.PBChangeSource . contrib/arch_buildbot.py was added to
-use this tool; it should be installed using the 'Arch meta hook' scheme.
-
-Changes can now accept a branch= parameter, and Builders have an
-isBranchImportant() test that acts like isFileImportant(). Thanks to Thomas
-Vander Stichele. Note: I renamed his tag= to branch=, in anticipation of an
-upcoming feature to build specific branches. "tag" seemed too CVS-centric.
-
-LogFiles have been rewritten to stream the incoming data directly to disk
-rather than keeping a copy in memory all the time (SF#1200392). This
-drastically reduces the buildmaster's memory requirements and makes 100MB+
-log files feasible. The log files are stored next to the serialized Builds,
-in files like BASEDIR/builder-dir/12-log-compile-output, so you'll want a
-cron job to delete old ones just like you do with old Builds. Old-style
-Builds from 0.6.4 and earlier are converted when they are first read, so the
-first load of the Waterfall display after updating to this release may take
-quite some time.
-
-** build process updates
-
-BuildSteps can now return a status of EXCEPTION, which terminates the build
-right away. This allows exceptions to be caught right away, but still make
-sure the build stops quickly.
-
-** bug fixes
-
-Some more windows incompatibilities were fixed. The test suite now has two
-failing tests remaining, both of which appear to be Twisted issues that
-should not affect normal operation.
-
-The test suite no longer raises any deprecation warnings when run against
-twisted-2.0 (except for the ones which come from Twisted itself).
-
-
-* Release 0.6.4 (28 Apr 2005)
-
-** major bugs fixed
-
-The 'buildbot' tool in 0.6.3, when used to create a new buildmaster, failed
-unless it found a 'changes.pck' file. As this file is created by a running
-buildmaster, this made 0.6.3 completely unusable for first-time
-installations. This has been fixed.
-
-** minor bugs fixed
-
-The IRC bot had a bug wherein asking it to watch a certain builder (the "I'll
-give a shout when the build finishes" message) would cause an exception, so
-it would not, in fact, shout. The HTML page had an exception in the "change
-sources" page (reached by following the "Changes" link at the top of the
-column that shows the names of commiters). Re-loading the config file while
-builders were already attached would result in a benign error message. The
-server side of the PBListener status client had an exception when providing
-information about a non-existent Build (e.g., when the client asks for the
-Build that is currently running, and the server says "None").
-
-These bugs have all been fixed.
-
-The unit tests now pass under python2.2; they were failing before because of
-some 2.3isms that crept in. More unit tests which failed under windows now
-pass, only one (test_webPathname_port) is still failing.
-
-** 'buildbot' tool looks for a .buildbot/options file
-
-The 'statusgui' and the 'debugclient' subcommands can both look for a
-.buildbot/ directory, and an 'options' file therein, to extract default
-values for the location of the buildmaster. This directory is searched in the
-current directory, its parent, etc, all the way up to the filesystem root
-(assuming you own the directories in question). It also look in ~/.buildbot/
-for this file. This feature allows you to put a .buildbot at the top of your
-working tree, telling any 'buildbot' invocations you perform therein how to
-get to the buildmaster associated with that tree's project.
-
-Windows users get something similar, using %APPDATA%/buildbot instead of
-~/.buildbot .
-
-** windows ShellCommands are launched with 'cmd.exe'
-
-The buildslave has been modified to run all list-based ShellCommands by
-prepending [os.environ['COMSPEC'], '/c'] to the argv list before execution.
-This should allow the buildslave's PATH to be searched for commands,
-improving the chances that it can run the same 'trial -o foo' commands as a
-unix buildslave. The potential downside is that spaces in argv elements might
-be re-parsed, or quotes might be re-interpreted. The consensus on the mailing
-list was that this is a useful thing to do, but please report any problems
-you encounter with it.
-
-** minor features
-
-The Waterfall display now shows the buildbot's home timezone at the top of
-the timestamp column. The default favicon.ico is now much nicer-looking (it
-is generated with Blender.. the icon.blend file is available in CVS in
-docs/images/ should you care to play with it).
-
-
-
-* Release 0.6.3 (25 Apr 2005)
-
-** 'buildbot' tool gets more uses
-
-The 'buildbot' executable has acquired three new subcommands. 'buildbot
-debugclient' brings up the small remote-control panel that connects to a
-buildmaster (via the slave port and the c['debugPassword']). This tool,
-formerly in contrib/debugclient.py, lets you reload the config file, force
-builds, and simulate inbound commit messages. It requires gtk2, glade, and
-the python bindings for both to be installed.
-
-'buildbot statusgui' brings up a live status client, formerly available by
-running buildbot/clients/gtkPanes.py as a program. This connects to the PB
-status port that you create with:
-
- c['status'].append(client.PBListener(portnum))
-
-and shows two boxes per Builder, one for the last build, one for current
-activity. These boxes are updated in realtime. The effect is primitive, but
-is intended as an example of what's possible with the PB status interface.
-
-'buildbot statuslog' provides a text-based running log of buildmaster events.
-
-Note: command names are subject to change. These should get much more useful
-over time.
-
-** web page has a favicon
-
-When constructing the html.Waterfall instance, you can provide the filename
-of an image that will be provided when the "favicon.ico" resource is
-requested. Many web browsers display this as an icon next to the URL or
-bookmark. A goofy little default icon is included.
-
-** web page has CSS
-
-Thanks to Thomas Vander Stichele, the Waterfall page is now themable through
-CSS. The default CSS is located in buildbot/status/classic.css, and creates a
-page that is mostly identical to the old, non-CSS based table.
-
-You can specify a different CSS file to use by passing it as the css=
-argument to html.Waterfall(). See the docstring for Waterfall for some more
-details.
-
-** builder "categories"
-
-Thomas has added code which places each Builder in an optional "category".
-The various status targets (Waterfall, IRC, MailNotifier) can accept a list
-of categories, and they will ignore any activity in builders outside this
-list. This makes it easy to create some Builders which are "experimental" or
-otherwise not yet ready for the world to see, or indicate that certain
-builders should not harass developers when their tests fail, perhaps because
-the build slaves for them are not yet fully functional.
-
-** Deprecated features
-
-*** defining Builders with tuples is deprecated
-
-For a long time, the preferred way to define builders in the config file has
-been with a dictionary. The less-flexible old style of a 4-item tuple (name,
-slavename, builddir, factory) is now officially deprecated (i.e., it will
-emit a warning if you use it), and will be removed in the next release.
-Dictionaries are more flexible: additional keys like periodicBuildTime are
-simply unavailable to tuple-defined builders.
-
-Note: it is a good idea to watch the logfile (usually in twistd.log) when you
-first start the buildmaster, or whenever you reload the config file. Any
-warnings or errors in the config file will be found there.
-
-*** c['webPortnum'], c['webPathname'], c['irc'] are deprecated
-
-All status reporters should be defined in the c['status'] array, using
-buildbot.status.html.Waterfall or buildbot.status.words.IRC . These have been
-deprecated for a while, but this is fair warning that these keys will be
-removed in the next release.
-
-*** c['manholePort'] is deprecated
-
-Again, this has been deprecated for a while, in favor of:
-
- c['manhole'] = master.Manhole(port, username, password)
-
-The preferred syntax will eventually let us use other, better kinds of debug
-shells, such as the experimental curses-based ones in the Twisted sandbox
-(which would offer command-line editing and history).
-
-** bug fixes
-
-The waterfall page has been improved a bit. A circular-reference bug in the
-web page's TextLog class was fixed, which caused a major memory leak in a
-long-running buildmaster with large logfiles that are viewed frequently.
-Modifying the config file in a way which only changed a builder's base
-directory now works correctly. The 'buildbot' command tries to create
-slightly more useful master/slave directories, adding a Makefile entry to
-re-create the .tap file, and removing global-read permissions from the files
-that may contain buildslave passwords.
-
-** twisted-2.0.0 compatibility
-
-Both buildmaster and buildslave should run properly under Twisted-2.0 . There
-are still some warnings about deprecated functions, some of which could be
-fixed, but there are others that would require removing compatibility with
-Twisted-1.3, and I don't expect to do that until 2.0 has been out and stable
-for at least several months. The unit tests should pass under 2.0, whereas
-the previous buildbot release had tests which could hang when run against the
-new "trial" framework in 2.0.
-
-The Twisted-specific steps (including Trial) have been updated to match 2.0
-functionality.
-
-** win32 compatibility
-
-Thankt to Nick Trout, more compatibility fixes have been incorporated,
-improving the chances that the unit tests will pass on windows systems. There
-are still some problems, and a step-by-step "running buildslaves on windows"
-document would be greatly appreciated.
-
-** API docs
-
-Thanks to Thomas Vander Stichele, most of the docstrings have been converted
-to epydoc format. There is a utility in docs/gen-reference to turn these into
-a tree of cross-referenced HTML pages. Eventually these docs will be
-auto-generated and somehow published on the buildbot web page.
-
-
-
-* Release 0.6.2 (13 Dec 2004)
-
-** new features
-
-It is now possible to interrupt a running build. Both the web page and the
-IRC bot feature 'stop build' commands, which can be used to interrupt the
-current BuildStep and accelerate the termination of the overall Build. The
-status reporting for these still leaves something to be desired (an
-'interrupt' event is pushed into the column, and the reason for the interrupt
-is added to a pseudo-logfile for the step that was stopped, but if you only
-look at the top-level status it appears that the build failed on its own).
-
-Builds are also halted if the connection to the buildslave is lost. On the
-slave side, any active commands are halted if the connection to the
-buildmaster is lost.
-
-** minor new features
-
-The IRC log bot now reports ETA times in a MMSS format like "2m45s" instead
-of the clunky "165 seconds".
-
-** bug fixes
-
-*** Slave Disconnect
-
-Slave disconnects should be handled better now: the current build should be
-abandoned properly. Earlier versions could get into weird states where the
-build failed to finish, clogging the builder forever (or at least until the
-buildmaster was restarted).
-
-In addition, there are weird network conditions which could cause a
-buildslave to attempt to connect twice to the same buildmaster. This can
-happen when the slave is sending large logfiles over a slow link, while using
-short keepalive timeouts. The buildmaster has been fixed to allow the second
-connection attempt to take precedence over the first, so that the older
-connection is jettisoned to make way for the newer one.
-
-In addition, the buildslave has been fixed to be less twitchy about timeouts.
-There are now two parameters: keepaliveInterval (which is controlled by the
-mktap 'keepalive' argument), and keepaliveTimeout (which requires editing the
-.py source to change from the default of 30 seconds). The slave expects to
-see *something* from the master at least once every keepaliveInterval
-seconds, and will try to provoke a response (by sending a keepalive request)
-'keepaliveTimeout' seconds before the end of this interval just in case there
-was no regular traffic. Any kind of traffic will qualify, including
-acknowledgements of normal build-status updates.
-
-The net result is that, as long as any given PB message can be sent over the
-wire in less than 'keepaliveTimeout' seconds, the slave should not mistakenly
-disconnect because of a timeout. There will be traffic on the wire at least
-every 'keepaliveInterval' seconds, which is what you want to pay attention to
-if you're trying to keep an intervening NAT box from dropping what it thinks
-is an abandoned connection. A quiet loss of connection will be detected
-within 'keepaliveInterval' seconds.
-
-*** Large Logfiles
-
-The web page rendering code has been fixed to deliver large logfiles in
-pieces, using a producer/consumer apparatus. This avoids the large spike in
-memory consumption when the log file body was linearized into a single string
-and then buffered in the socket's application-side transmit buffer. This
-should also avoid the 640k single-string limit for web.distrib servers that
-could be hit by large (>640k) logfiles.
-
-
-
-* Release 0.6.1 (23 Nov 2004)
-
-** win32 improvements/bugfixes
-
-Several changes have gone in to improve portability to non-unix systems. It
-should be possible to run a build slave under windows without major issues
-(although step-by-step documentation is still greatly desired: check the
-mailing list for suggestions from current win32 users).
-
-*** PBChangeSource: use configurable directory separator, not os.sep
-
-The PBChangeSource, which listens on a TCP socket for change notices
-delivered from tools like contrib/svn_buildbot.py, was splitting source
-filenames with os.sep . This is inappropriate, because those file names are
-coming from the VC repository, not the local filesystem, and the repository
-host may be running a different OS (with a different separator convention)
-than the buildmaster host. In particular, a win32 buildmaster using a CVS
-repository running on a unix box would be confused.
-
-PBChangeSource now takes a sep= argument to indicate the separator character
-to use.
-
-*** build saving should work better
-
-windows cannot do the atomic os.rename() trick that unix can, so under win32
-the buildmaster falls back to save/delete-old/rename, which carries a slight
-risk of losing a saved build log (if the system were to crash between the
-delete-old and the rename).
-
-** new features
-
-*** test-result tracking
-
-Work has begun on fine-grained test-result handling. The eventual goal is to
-be able to track individual tests over time, and create problem reports when
-a test starts failing (which then are resolved when the test starts passing
-again). The first step towards this is an ITestResult interface, and code in
-the TrialTestParser to create such results for all non-passing tests (the
-ones for which Trial emits exception tracebacks).
-
-These test results are currently displayed in a tree-like display in a page
-accessible from each Build's page (follow the numbered link in the yellow
-box at the start of each build to get there).
-
-This interface is still in flux, as it really wants to be able to accomodate
-things like compiler warnings and tests that are skipped because of missing
-libraries or unsupported architectures.
-
-** bug fixes
-
-*** VC updates should survive temporary failures
-
-Some VC systems (CVS and SVN in particular) get upset when files are turned
-into directories or vice versa, or when repository items are moved without
-the knowledge of the VC system. The usual symptom is that a 'cvs update'
-fails where a fresh checkout succeeds.
-
-To avoid having to manually intervene, the build slaves' VC commands have
-been refactored to respond to update failures by deleting the tree and
-attempting a full checkout. This may cause some unnecessary effort when,
-e.g., the CVS server falls off the net, but in the normal case it will only
-come into play when one of these can't-cope situations arises.
-
-*** forget about an existing build when the slave detaches
-
-If the slave was lost during a build, the master did not clear the
-.currentBuild reference, making that builder unavailable for later builds.
-This has been fixed, so that losing a slave should be handled better. This
-area still needs some work, I think it's still possible to get both the
-slave and the master wedged by breaking the connection at just the right
-time. Eventually I want to be able to resume interrupted builds (especially
-when the interruption is the result of a network failure and not because the
-slave or the master actually died).
-
-*** large logfiles now consume less memory
-
-Build logs are stored as lists of (type,text) chunks, so that
-stdout/stderr/headers can be displayed differently (if they were
-distinguishable when they were generated: stdout and stderr are merged when
-usePTY=1). For multi-megabyte logfiles, a large list with many short strings
-could incur a large overhead. The new behavior is to merge same-type string
-chunks together as they are received, aiming for a chunk size of about 10kb,
-which should bring the overhead down to a more reasonable level.
-
-There remains an issue with actually delivering large logfiles over, say,
-the HTML interface. The string chunks must be merged together into a single
-string before delivery, which causes a spike in the memory usage when the
-logfile is viewed. This can also break twisted.web.distrib -type servers,
-where the underlying PB protocol imposes a 640k limit on the size of
-strings. This will be fixed (with a proper Producer/Consumer scheme) in the
-next release.
-
-
-* Release 0.6.0 (30 Sep 2004)
-
-** new features
-
-*** /usr/bin/buildbot control tool
-
-There is now an executable named 'buildbot'. For now, this just provides a
-convenient front-end to mktap/twistd/kill, but eventually it will provide
-access to other client functionality (like the 'try' builds, and a status
-client). Assuming you put your buildbots in /var/lib/buildbot/master/FOO,
-you can do 'buildbot create-master /var/lib/buildbot/master/FOO' and it will
-create the .tap file and set up a sample master.cfg for you. Later,
-'buildbot start /var/lib/buildbot/master/FOO' will start the daemon.
-
-
-*** build status now saved in external files, -shutdown.tap unnecessary
-
-The status rewrite included a change to save all build status in a set of
-external files. These files, one per build, are put in a subdirectory of the
-master's basedir (named according to the 'builddir' parameter of the Builder
-configuration dictionary). This helps keep the buildmaster's memory
-consumption small: the (potentially large) build logs are kept on disk
-instead of in RAM. There is a small cache (2 builds per builder) kept in
-memory, but everything else lives on disk.
-
-The big change is that the buildmaster now keeps *all* status in these
-files. It is no longer necessary to preserve the buildbot-shutdown.tap file
-to run a persistent buildmaster. The buildmaster may be launched with
-'twistd -f buildbot.tap' each time, in fact the '-n' option can be added to
-prevent twistd from automatically creating the -shutdown.tap file.
-
-There is still one lingering bug with this change: the Expectations object
-for each builder (which records how long the various steps took, to provide
-an ETA value for the next time) is not yet saved. The result is that the
-first build after a restart will not provide an ETA value.
-
-0.6.0 keeps status in a single file per build, as opposed to 0.5.0 which
-kept status in many subdirectories (one layer for builds, another for steps,
-and a third for logs). 0.6.0 will detect and delete these subdirectories as
-it overwrites them.
-
-The saved builds are optional. To prevent disk usage from growing without
-bounds, you may want to set up a cron job to run 'find' and delete any which
-are too old. The status displays will happily survive without those saved
-build objects.
-
-The set of recorded Changes is kept in a similar file named 'changes.pck'.
-
-
-*** source checkout now uses timestamp/revision
-
-Source checkouts are now performed with an appropriate -D TIMESTAMP (for
-CVS) or -r REVISION (for SVN) marker to obtain the exact sources that were
-specified by the most recent Change going into the current Build. This
-avoids a race condition in which a change might be committed after the build
-has started but before the source checkout has completed, resulting in a
-mismatched set of source files. Such changes are now ignored.
-
-This works by keeping track of repository-wide revision/transaction numbers
-(for version control systems that offer them, like SVN). The checkout or
-update is performed with the highest such revision number. For CVS (which
-does not have them), the timestamp of each commit message is used, and a -D
-argument is created to place the checkout squarely in the middle of the "tree
-stable timer"'s window.
-
-This also provides the infrastructure for the upcoming 'try' feature. All
-source-checkout commands can now obtain a base revision marker and a patch
-from the Build, allowing certain builds to be performed on something other
-than the most recent sources.
-
-See source.xhtml and steps.xhtml for details.
-
-
-*** Darcs and Arch support added
-
-There are now build steps which retrieve a source tree from Darcs and Arch
-repositories. See steps.xhtml for details.
-
-Preliminary P4 support has been added, thanks to code from Dave Peticolas.
-You must manually set up each build slave with an appropriate P4CLIENT: all
-buildbot does is run 'p4 sync' at the appropriate times.
-
-
-*** Status reporting rewritten
-
-Status reporting was completely revamped. The config file now accepts a
-BuildmasterConfig['status'] entry, with a list of objects that perform status
-delivery. The old config file entries which controlled the web status port
-and the IRC bot have been deprecated in favor of adding instances to
-['status']. The following status-delivery classes have been implemented, all
-in the 'buildbot.status' package:
-
- client.PBListener(port, username, passwd)
- html.Waterfall(http_port, distrib_port)
- mail.MailNotifier(fromaddr, mode, extraRecipients..)
- words.IRC(host, nick, channels)
-
-See the individual docstrings for details about how to use each one. You can
-create new status-delivery objects by following the interfaces found in the
-buildbot.interfaces module.
-
-
-*** BuildFactory configuration process changed
-
-The basic BuildFactory class is now defined in buildbot.process.factory
-rather than buildbot.process.base, so you will have to update your config
-files. factory.BuildFactory is the base class, which accepts a list of Steps
-to run. See docs/factories.xhtml for details.
-
-There are now easier-to-use BuildFactory classes for projects which use GNU
-Autoconf, perl's MakeMaker (CPAN), python's distutils (but no unit tests),
-and Twisted's Trial. Each one takes a separate 'source' Step to obtain the
-source tree, and then fills in the rest of the Steps for you.
-
-
-*** CVS/SVN VC steps unified, simplified
-
-The confusing collection of arguments for the CVS step ('clobber=',
-'copydir=', and 'export=') have been removed in favor of a single 'mode'
-argument. This argument describes how you want to use the sources: whether
-you want to update and compile everything in the same tree (mode='update'),
-or do a fresh checkout and full build each time (mode='clobber'), or
-something in between.
-
-The SVN (Subversion) step has been unified and accepts the same mode=
-parameter as CVS. New version control steps will obey the same interface.
-
-Most of the old configuration arguments have been removed. You will need to
-update your configuration files to use the new arguments. See
-docs/steps.xhtml for a description of all the new parameters.
-
-
-*** Preliminary Debian packaging added
-
-Thanks to the contributions of Kirill Lapshin, we can now produce .deb
-installer packages. These are still experimental, but they include init.d
-startup/shutdown scripts, which the the new /usr/bin/buildbot to invoke
-twistd. Create your buildmasters in /var/lib/buildbot/master/FOO, and your
-slaves in /var/lib/buildbot/slave/BAR, then put FOO and BAR in the
-appropriate places in /etc/default/buildbot . After that, the buildmasters
-and slaves will be started at every boot.
-
-Pre-built .debs are not yet distributed. Use 'debuild -uc -us' from the
-source directory to create them.
-
-
-** minor features
-
-
-*** Source Stamps
-
-Each build now has a "source stamp" which describes what sources it used. The
-idea is that the sources for this particular build can be completely
-regenerated from the stamp. The stamp is a tuple of (revision, patch), where
-the revision depends on the VC system being used (for CVS it is either a
-revision tag like "BUILDBOT-0_5_0" or a datestamp like "2004/07/23", for
-Subversion it is a revision number like 11455). This must be combined with
-information from the Builder that is constant across all builds (something to
-point at the repository, and possibly a branch indicator for CVS and other VC
-systems that don't fold this into the repository string).
-
-The patch is an optional unified diff file, ready to be applied by running
-'patch -p0 <PATCH' from inside the workdir. This provides support for the
-'try' feature that will eventually allow developers to run buildbot tests on
-their code before checking it in.
-
-
-*** SIGHUP causes the buildmaster's configuration file to be re-read
-
-*** IRC bot now has 'watch' command
-
-You can now tell the buildbot's IRC bot to 'watch <buildername>' on a builder
-which is currently performing a build. When that build is finished, the
-buildbot will make an announcement (including the results of the build).
-
-The IRC 'force build' command will also announce when the resulting build has
-completed.
-
-
-*** the 'force build' option on HTML and IRC status targets can be disabled
-
-The html.Waterfall display and the words.IRC bot may be constructed with an
-allowForce=False argument, which removes the ability to force a build through
-these interfaces. Future versions will be able to restrict this build-forcing
-capability to authenticated users. The per-builder HTML page no longer
-displays the 'Force Build' buttons if it does not have this ability. Thanks
-to Fred Drake for code and design suggestions.
-
-
-*** master now takes 'projectName' and 'projectURL' settings
-
-These strings allow the buildbot to describe what project it is working for.
-At the moment they are only displayed on the Waterfall page, but in the next
-release they will be retrieveable from the IRC bot as well.
-
-
-*** survive recent (SVN) Twisted versions
-
-The buildbot should run correctly (albeit with plenty of noisy deprecation
-warnings) under the upcoming Twisted-2.0 release.
-
-
-*** work-in-progress realtime Trial results acquisition
-
-Jonathan Simms (<slyphon>) has been working on 'retrial', a rewrite of
-Twisted's unit test framework that will most likely be available in
-Twisted-2.0 . Although it is not yet complete, the buildbot will be able to
-use retrial in such a way that build status is reported on a per-test basis,
-in real time. This will be the beginning of fine-grained test tracking and
-Problem management, described in docs/users.xhtml .
-
-
-* Release 0.5.0 (22 Jul 2004)
-
-** new features
-
-*** web.distrib servers via TCP
-
-The 'webPathname' config option, which specifies a UNIX socket on which to
-publish the waterfall HTML page (for use by 'mktap web -u' or equivalent),
-now accepts a numeric port number. This publishes the same thing via TCP,
-allowing the parent web server to live on a separate machine.
-
-This config option could be named better, but it will go away altogether in
-a few releases, when status delivery is unified. It will be replaced with a
-WebStatusTarget object, and the config file will simply contain a list of
-various kinds of status targets.
-
-*** 'master.cfg' filename is configurable
-
-The buildmaster can use a config file named something other than
-"master.cfg". Use the --config=foo.cfg option to mktap to control this.
-
-*** FreshCVSSource now uses newcred (CVSToys >= 1.0.10)
-
-The FreshCVSSource class now defaults to speaking to freshcvs daemons from
-modern CVSToys releases. If you need to use the buildbot with a daemon from
-CVSToys-1.0.9 or earlier, use FreshCVSSourceOldcred instead. Note that the
-new form only requires host/port/username/passwd: the "serviceName"
-parameter is no longer meaningful.
-
-*** Builders are now configured with a dictionary, not a tuple
-
-The preferred way to set up a Builder in master.cfg is to provide a
-dictionary with various keys, rather than a (non-extensible) 4-tuple. See
-docs/config.xhtml for details. The old tuple-way is still supported for now,
-it will probably be deprecated in the next release and removed altogether in
-the following one.
-
-*** .periodicBuildTime is now exposed to the config file
-
-To set a builder to run at periodic intervals, simply add a
-'periodicBuildTime' key to its master.cfg dictionary. Again, see
-docs/config.xhtml for details.
-
-*** svn_buildbot.py adds --include, --exclude
-
-The commit trigger script now gives you more control over which files are
-sent to the buildmaster and which are not.
-
-*** usePTY is controllable at slave mktap time
-
-The buildslaves usually run their child processes in a pty, which creates a
-process group for all the children, which makes it much easier to kill them
-all at once (i.e. if a test hangs). However this causes problems on some
-systems. Rather than hacking slavecommand.py to disable the use of these
-ptys, you can now create the slave's .tap file with --usepty=0 at mktap
-time.
-
-** Twisted changes
-
-A summary of warnings (e.g. DeprecationWarnings) is provided as part of the
-test-case summarizer. The summarizer also counts Skips, expectedFailures,
-and unexpectedSuccesses, displaying the counts on the test step's event box.
-
-The RunUnitTests step now uses "trial -R twisted" instead of "trial
-twisted.test", which is a bit cleaner. All .pyc files are deleted before
-starting trial, to avoid getting tripped up by deleted .py files.
-
-** documentation
-
-docs/config.xhtml now describes the syntax and allowed contents of the
-'master.cfg' configuration file.
-
-** bugfixes
-
-Interlocks had a race condition that could cause the lock to get stuck
-forever.
-
-FreshCVSSource has a prefix= argument that was moderately broken (it used to
-only work if the prefix was a single directory component). It now works with
-subdirectories.
-
-The buildmaster used to complain when it saw the "info" directory in a
-slave's workspace. This directory is used to publish information about the
-slave host and its administrator, and is not a leftover build directory as
-the complaint suggested. This complain has been silenced.
-
-
-* Release 0.4.3 (30 Apr 2004)
-
-** PBChangeSource made explicit
-
-In 0.4.2 and before, an internal interface was available which allowed
-special clients to inject changes into the Buildmaster. This interface is
-used by the contrib/svn_buildbot.py script. The interface has been extracted
-into a proper PBChangeSource object, which should be created in the
-master.cfg file just like the other kinds of ChangeSources. See
-docs/sources.xhtml for details.
-
-If you were implicitly using this change source (for example, if you use
-Subversion and the svn_buildbot.py script), you *must* add this source to
-your master.cfg file, or changes will not be delivered and no builds will be
-triggered.
-
-The PBChangeSource accepts the same "prefix" argument as all other
-ChangeSources. For a SVN repository that follows the recommended practice of
-using "trunk/" for the trunk revisions, you probably want to construct the
-source like this:
-
- source = PBChangeSource(prefix="trunk")
-
-to make sure that the Builders are given sensible (trunk-relative)
-filenames for each changed source file.
-
-** Twisted changes
-
-*** step_twisted.RunUnitTests can change "bin/trial"
-
-The twisted RunUnitTests step was enhanced to let you run something other
-than "bin/trial", making it easier to use a buildbot on projects which use
-Twisted but aren't actually Twisted itself.
-
-*** Twisted now uses Subversion
-
-Now that Twisted has moved from CVS to SVN, the Twisted build processes have
-been modified to perform source checkouts from the Subversion repository.
-
-** minor feature additions
-
-*** display Changes with HTML
-
-Changes are displayed with a bit more pizazz, and a links= argument was
-added to allow things like ViewCVS links to be added to the display
-(although it is not yet clear how this argument should be used: the
-interface remains subject to change untill it has been documented).
-
-*** display ShellCommand logs with HTML
-
-Headers are in blue, stderr is in red (unless usePTY=1 in which case stderr
-and stdout are indistinguishable). A link is provided which returns the same
-contents as plain text (by appending "?text=1" to the URL).
-
-*** buildslaves send real tracebacks upon error
-
-The .unsafeTracebacks option has been turned on for the buildslaves,
-allowing them to send a full stack trace when an exception occurs, which is
-logged in the buildmaster's twistd.log file. This makes it much easier to
-determine what went wrong on the slave side.
-
-*** BasicBuildFactory refactored
-
-The BasicBuildFactory class was refactored to make it easier to create
-derivative classes, in particular the BasicSVN variant.
-
-*** "ping buildslave" web button added
-
-There is now a button on the "builder information" page that lets a web user
-initiate a ping of the corresponding build slave (right next to the button
-that lets them force a build). This was added to help track down a problem
-with the slave keepalives.
-
-** bugs fixed:
-
-You can now have multiple BuildSteps with the same name (the names are used
-as hash keys in the data structure that helps determine ETA values for each
-step, the new code creates unique key names if necessary to avoid
-collisions). This means that, for example, you do not have to create a
-BuildStep subclass just to have two Compile steps in the same process.
-
-If CVSToys is not installed, the tests that depend upon it are skipped.
-
-Some tests in 0.4.2 failed because of a missing set of test files, they are
-now included in the tarball properly.
-
-Slave keepalives should work better now in the face of silent connection
-loss (such as when an intervening NAT box times out the association), the
-connection should be reestablished in minutes instead of hours.
-
-Shell commands on the slave are invoked with an argument list instead of the
-ugly and error-prone split-on-spaces approach. If the ShellCommand is given
-a string (instead of a list), it will fall back to splitting on spaces.
-Shell commands should work on win32 now (using COMSPEC instead of /bin/sh).
-
-Buildslaves under w32 should theoretically work now, and one was running for
-the Twisted buildbot for a while until the machine had to be returned.
-
-The "header" lines in ShellCommand logs (which include the first line, that
-displays the command being run, and the last, which shows its exit status)
-are now generated by the buildslave side instead of the local (buildmaster)
-side. This can provide better error handling and is generally cleaner.
-However, if you have an old buildslave (running 0.4.2 or earlier) and a new
-buildmaster, then neither end will generate these header lines.
-
-CVSCommand was improved, in certain situations 0.4.2 would perform
-unnecessary checkouts (when an update would have sufficed). Thanks to Johan
-Dahlin for the patches. The status output was fixed as well, so that
-failures in CVS and SVN commands (such as not being able to find the 'svn'
-executable) make the step status box red.
-
-Subversion support was refactored to make it behave more like CVS. This is a
-work in progress and will be improved in the next release.
-
-
-* Release 0.4.2 (08 Jan 2004)
-
-** test suite updated
-
-The test suite has been completely moved over to Twisted's "Trial"
-framework, and all tests now pass. To run the test suite (consisting of 64
-tests, probably covering about 30% of BuildBot's logic), do this:
-
- PYTHONPATH=. trial -v buildbot.test
-
-** Mail parsers updated
-
-Several bugs in the mail-parsing code were fixed, allowing a buildmaster to
-be triggered by mail sent out by a CVS repository. (The Twisted Buildbot is
-now using this to trigger builds, as their CVS server machine is having some
-difficulties with FreshCVS). The FreshCVS mail format for directory
-additions appears to have changed recently: the new parser should handle
-both old and new-style messages.
-
-A parser for Bonsai commit messages (buildbot.changes.mail.parseBonsaiMail)
-was contributed by Stephen Davis. Thanks Stephen!
-
-** CVS "global options" now available
-
-The CVS build step can now accept a list of "global options" to give to the
-cvs command. These go before the "update"/"checkout" word, and are described
-fully by "cvs --help-options". Two useful ones might be "-r", which causes
-checked-out files to be read-only, and "-R", which assumes the repository is
-read-only (perhaps by not attempting to write to lock files).
-
-
-* Release 0.4.1 (09 Dec 2003)
-
-** MaildirSources fixed
-
-Several bugs in MaildirSource made them unusable. These have been fixed (for
-real this time). The Twisted buildbot is using an FCMaildirSource while they
-fix some FreshCVS daemon problems, which provided the encouragement for
-getting these bugs fixed.
-
-In addition, the use of DNotify (only available under linux) was somehow
-broken, possibly by changes in some recent version of Python. It appears to
-be working again now (against both python-2.3.3c1 and python-2.2.1).
-
-** master.cfg can use 'basedir' variable
-
-As documented in the sample configuration file (but not actually implemented
-until now), a variable named 'basedir' is inserted into the namespace used
-by master.cfg . This can be used with something like:
-
- os.path.join(basedir, "maildir")
-
-to obtain a master-basedir-relative location.
-
-
-* Release 0.4.0 (05 Dec 2003)
-
-** newapp
-
-I've moved the codebase to Twisted's new 'application' framework, which
-drastically cleans up service startup/shutdown just like newcred did for
-authorization. This is mostly an internal change, but the interface to
-IChangeSources was modified, so in the off chance that someone has written a
-custom change source, it may have to be updated to the new scheme.
-
-The most user-visible consequence of this change is that now both
-buildmasters and buildslaves are generated with the standard Twisted 'mktap'
-utility. Basic documentation is in the README file.
-
-Both buildmaster and buildslave .tap files need to be re-generated to run
-under the new code. I have not figured out the styles.Versioned upgrade path
-well enough to avoid this yet. Sorry.
-
-This also means that both buildslaves and the buildmaster require
-Twisted-1.1.0 or later.
-
-** reloadable master.cfg
-
-Most aspects of a buildmaster is now controlled by a configuration file
-which can be re-read at runtime without losing build history. This feature
-makes the buildmaster *much* easier to maintain.
-
-In the previous release, you would create the buildmaster by writing a
-program to define the Builders and ChangeSources and such, then run it to
-create the .tap file. In the new release, you use 'mktap' to create the .tap
-file, and the only parameter you give it is the base directory to use. Each
-time the buildmaster starts, it will look for a file named 'master.cfg' in
-that directory and parse it as a python script. That script must define a
-dictionary named 'BuildmasterConfig' with various keys to define the
-builders, the known slaves, what port to use for the web server, what IRC
-channels to connect to, etc.
-
-This config file can be re-read at runtime, and the buildmaster will compute
-the differences and add/remove services as necessary. The re-reading is
-currently triggered through the debug port (contrib/debugclient.py is the
-debug port client), but future releases will add the ability to trigger the
-reconfiguration by IRC command, web page button, and probably a local UNIX
-socket (with a helper script to trigger a rebuild locally).
-
-docs/examples/twisted_master.cfg contains a sample configuration file, which
-also lists all the keys that can be set.
-
-There may be some bugs lurking, such as re-configuring the buildmaster while
-a build is running. It needs more testing.
-
-** MaxQ support
-
-Radix contributed some support scripts to run MaxQ test scripts. MaxQ
-(http://maxq.tigris.org/) is a web testing tool that allows you to record
-HTTP sessions and play them back.
-
-** Builders can now wait on multiple Interlocks
-
-The "Interlock" code has been enhanced to allow multiple builders to wait on
-each one. This was done to support the new config-file syntax for specifying
-Interlocks (in which each interlock is a tuple of A and [B], where A is the
-builder the Interlock depends upon, and [B] is a list of builders that
-depend upon the Interlock).
-
-"Interlock" is misnamed. In the next release it will be changed to
-"Dependency", because that's what it really expresses. A new class (probably
-called Interlock) will be created to express the notion that two builders
-should not run at the same time, useful when multiple builders are run on
-the same machine and thrashing results when several CPU- or disk- intensive
-compiles are done simultaneously.
-
-** FreshCVSSource can now handle newcred-enabled FreshCVS daemons
-
-There are now two FreshCVSSource classes: FreshCVSSourceNewcred talks to
-newcred daemons, and FreshCVSSourceOldcred talks to oldcred ones. Mind you,
-FreshCVS doesn't yet do newcred, but when it does, we'll be ready.
-
-'FreshCVSSource' maps to the oldcred form for now. That will probably change
-when the current release of CVSToys supports newcred by default.
-
-** usePTY=1 on posix buildslaves
-
-When a buildslave is running under POSIX (i.e. pretty much everything except
-windows), child processes are created with a pty instead of separate
-stdin/stdout/stderr pipes. This makes it more likely that a hanging build
-(when killed off by the timeout code) will have all its sub-childred cleaned
-up. Non-pty children would tend to leave subprocesses running because the
-buildslave was only able to kill off the top-level process (typically
-'make').
-
-Windows doesn't have any concept of ptys, so non-posix systems do not try to
-enable them.
-
-** mail parsers should actually work now
-
-The email parsing functions (FCMaildirSource and SyncmailMaildirSource) were
-broken because of my confused understanding of how python class methods
-work. These sources should be functional now.
-
-** more irc bot sillyness
-
-The IRC bot can now perform half of the famous AYBABTO scene.
-
-
-* Release 0.3.5 (19 Sep 2003)
-
-** newcred
-
-Buildbot has moved to "newcred", a new authorization framework provided by
-Twisted, which is a good bit cleaner and easier to work with than the
-"oldcred" scheme in older versions. This causes both buildmaster and
-buildslaves to depend upon Twisted 1.0.7 or later. The interface to
-'makeApp' has changed somewhat (the multiple kinds of remote connections all
-use the same TCP port now).
-
-Old buildslaves will get "_PortalWrapper instance has no attribute
-'remote_username'" errors when they try to connect. They must be upgraded.
-
-The FreshCVSSource uses PB to connect to the CVSToys server. This has been
-upgraded to use newcred too. If you get errors (TODO: what do they look
-like?) in the log when the buildmaster tries to connect, you need to upgrade
-your FreshCVS service or use the 'useOldcred' argument when creating your
-FreshCVSSource. This is a temporary hack to allow the buildmaster to talk to
-oldcred CVSToys servers. Using it will trigger deprecation warnings. It will
-go away eventually.
-
-In conjunction with this change, makeApp() now accepts a password which can
-be applied to the debug service.
-
-** new features
-
-*** "copydir" for CVS checkouts
-
-The CVS build step can now accept a "copydir" parameter, which should be a
-directory name like "source" or "orig". If provided, the CVS checkout is
-done once into this directory, then copied into the actual working directory
-for compilation etc. Later updates are done in place in the copydir, then
-the workdir is replaced with a copy.
-
-This reduces CVS bandwidth (update instead of full checkout) at the expense
-of twice the disk space (two copies of the tree).
-
-*** Subversion (SVN) support
-
-Radix (Christopher Armstrong) contributed early support for building
-Subversion-based trees. The new 'SVN' buildstep behaves roughly like the
-'CVS' buildstep, and the contrib/svn_buildbot.py script can be used as a
-checkin trigger to feed changes to a running buildmaster.
-
-** notable bugfixes
-
-*** .tap file generation
-
-We no longer set the .tap filename, because the buildmaster/buildslave
-service might be added to an existing .tap file and we shouldn't presume to
-own the whole thing. You may want to manually rename the "buildbot.tap" file
-to something more meaningful (like "buildslave-bot1.tap").
-
-*** IRC reconnect
-
-If the IRC server goes away (it was restarted, or the network connection was
-lost), the buildmaster will now schedule a reconnect attempt.
-
-*** w32 buildslave fixes
-
-An "rm -rf" was turned into shutil.rmtree on non-posix systems.
-
-
-* Release 0.3.4 (28 Jul 2003)
-
-** IRC client
-
-The buildmaster can now join a set of IRC channels and respond to simple
-queries about builder status.
-
-** slave information
-
-The build slaves can now report information from a set of info/* files in
-the slave base directory to the buildmaster. This will be used by the slave
-administrator to announce details about the system hosting the slave,
-contact information, etc. For now, info/admin should contain the name/email
-of the person who is responsible for the buildslave, and info/host should
-describe the system hosting the build slave (OS version, CPU speed, memory,
-etc). The contents of these files are made available through the waterfall
-display.
-
-** change notification email parsers
-
-A parser for Syncmail (syncmail.sourceforge.net) was added. SourceForge
-provides examples of setting up syncmail to deliver CVS commit messages to
-mailing lists, so hopefully this will make it easier for sourceforge-hosted
-projects to set up a buildbot.
-
-email processors were moved into buildbot.changes.mail . FCMaildirSource was
-moved, and the compatibility location (buildbot.changes.freshcvsmail) will
-go away in the next release.
-
-** w32 buildslave ought to work
-
-Some non-portable code was changed to make it more likely that the
-buildslave will run under windows. The Twisted buildbot now has a
-(more-or-less) working w32 buildslave.
-
-
-* Release 0.3.3 (21 May 2003):
-
-** packaging changes
-
-*** include doc/examples in the release. Oops again.
-
-** network changes
-
-*** add keepalives to deal with NAT boxes
-
-Some NAT boxes drop port mappings if the TCP connection looks idle for too
-long (maybe 30 minutes?). Add application-level keepalives (dummy commands
-sent from slave to master every 10 minutes) to appease the NAT box and keep
-our connection alive. Enable this with --keepalive in the slave mktap
-command line. Check the README for more details.
-
-** UI changes
-
-*** allow slaves to trigger any build that they host
-
-Added an internal function to ask the buildmaster to start one of their
-builds. Must be triggered with a debugger or manhole on the slave side for
-now, will add a better UI later.
-
-*** allow web page viewers to trigger any build
-
-Added a button to the per-build page (linked by the build names on the third
-row of the waterfall page) to allow viewers to manually trigger builds.
-There is a field for them to indicate who they are and why they are
-triggering the build. It is possible to abuse this, but for now the benefits
-outweigh the damage that could be done (worst case, someone can make your
-machine run builds continuously).
-
-** generic buildprocess changes
-
-*** don't queue multiple builds for offline slaves
-
-If a slave is not online when a build is ready to run, that build is queued
-so the slave will run it when it next connects. However, the buildmaster
-used to queue every such build, so the poor slave machine would be subject
-to tens or hundreds of builds in a row when they finally did come online.
-The buildmaster has been changed to merge these multiple builds into a
-single one.
-
-*** bump ShellCommand default timeout to 20 minutes
-
-Used for testing out the win32 twisted builder. I will probably revert this
-in the next relese.
-
-*** split args in ShellCommand ourselves instead of using /bin/sh
-
-This should remove the need for /bin/sh on the slave side, improving the
-chances that the buildslave can run on win32.
-
-*** add configureEnv argument to Configure step, pass env dict to slave
-
-Allows build processes to do things like 'CFLAGS=-O0 ./configure' without
-using /bin/sh to set the environment variable
-
-** Twisted buildprocess changes
-
-*** warn instead of flunk the build when cReactor or qtreactor tests fail
-
-These two always fail. For now, downgrade those failures to a warning
-(orange box instead of red).
-
-*** don't use 'clobber' on remote builds
-
-Builds that run on remote machines (freebsd, OS-X) now use 'cvs update'
-instead of clobbering their trees and doing a fresh checkout. The multiple
-simultaneous CVS checkouts were causing a strain on Glyph's upstream
-bandwidth.
-
-*** use trial --testmodule instead of our own test-case-name grepper
-
-The Twisted coding/testing convention has developers put 'test-case-name'
-tags (emacs local variables, actually) in source files to indicate which
-test cases should be run to exercise that code. Twisted's unit-test
-framework just acquired an argument to look for these tags itself. Use that
-instead of the extra FindUnitTestsForFiles build step we were doing before.
-Removes a good bit of code from buildbot and into Twisted where it really
-belongs.
-
-
-* Release 0.3.2 (07 May 2003):
-
-** packaging changes
-
-*** fix major packaging bug: none of the buildbot/* subdirectories were
-included in the 0.3.1 release. Sorry, I'm still figuring out distutils
-here..
-
-** internal changes
-
-*** use pb.Cacheable to update Events in remote status client. much cleaner.
-
-*** start to clean up BuildProcess->status.builder interface
-
-** bug fixes
-
-*** waterfall display was missing a <tr>, causing it to be misrendered in most
-browsers (except the one I was testing it with, of course)
-
-*** URL without trailing slash (when served in a twisted-web distributed
-server, with a url like "http://twistedmatrix.com/~warner.twistd") should do
-redirect to URL-with-trailing-slash, otherwise internal hrefs are broken.
-
-*** remote status clients: forget RemoteReferences at shutdown, removes
-warnings about "persisting Ephemerals"
-
-** Twisted buildprocess updates:
-
-*** match build process as of twisted-1.0.5
-**** use python2.2 everywhere now that twisted rejects python2.1
-**** look for test-result constants in multiple places
-*** move experimental 'trial --jelly' code to separate module
-*** add FreeBSD builder
-*** catch rc!=0 in HLint step
-*** remove RunUnitTestsRandomly, use randomly=1 parameter instead
-*** parameterize ['twisted.test'] default test case to make subclassing easier
-*** ignore internal distutils warnings in python2.3 builder
-
-
-* Release 0.3.1 (29 Apr 2003):
-
-** First release.
-
-** Features implemented:
-
- change notification from FreshCVS server or parsed maildir contents
-
- timed builds
-
- basic builds, configure/compile/test
-
- some Twisted-specific build steps: docs, unit tests, debuild
-
- status reporting via web page
-
-** Features still experimental/unpolished
-
- status reporting via PB client
diff --git a/buildbot/buildbot-source/PKG-INFO b/buildbot/buildbot-source/PKG-INFO
deleted file mode 100644
index 00f85d29f..000000000
--- a/buildbot/buildbot-source/PKG-INFO
+++ /dev/null
@@ -1,23 +0,0 @@
-Metadata-Version: 1.0
-Name: buildbot
-Version: 0.7.3
-Summary: BuildBot build automation system
-Home-page: http://buildbot.sourceforge.net/
-Author: Brian Warner
-Author-email: warner-buildbot@lothar.com
-License: GNU GPL
-Description:
- The BuildBot is a system to automate the compile/test cycle required by
- most software projects to validate code changes. By automatically
- rebuilding and testing the tree each time something has changed, build
- problems are pinpointed quickly, before other developers are
- inconvenienced by the failure. The guilty developer can be identified
- and harassed without human intervention. By running the builds on a
- variety of platforms, developers who do not have the facilities to test
- their changes everywhere before checkin will at least know shortly
- afterwards whether they have broken the build or not. Warning counts,
- lint checks, image size, compile time, and other build parameters can
- be tracked over time, are more visible, and are therefore easier to
- improve.
-
-Platform: UNKNOWN
diff --git a/buildbot/buildbot-source/README b/buildbot/buildbot-source/README
deleted file mode 100644
index 42c6e398e..000000000
--- a/buildbot/buildbot-source/README
+++ /dev/null
@@ -1,195 +0,0 @@
-
-BuildBot: build/test automation
- http://buildbot.sourceforge.net/
- Brian Warner <warner-buildbot @ lothar . com>
-
-
-Abstract:
-
-The BuildBot is a system to automate the compile/test cycle required by most
-software projects to validate code changes. By automatically rebuilding and
-testing the tree each time something has changed, build problems are
-pinpointed quickly, before other developers are inconvenienced by the
-failure. The guilty developer can be identified and harassed without human
-intervention. By running the builds on a variety of platforms, developers
-who do not have the facilities to test their changes everywhere before
-checkin will at least know shortly afterwards whether they have broken the
-build or not. Warning counts, lint checks, image size, compile time, and
-other build parameters can be tracked over time, are more visible, and
-are therefore easier to improve.
-
-The overall goal is to reduce tree breakage and provide a platform to run
-tests or code-quality checks that are too annoying or pedantic for any human
-to waste their time with. Developers get immediate (and potentially public)
-feedback about their changes, encouraging them to be more careful about
-testing before checkin.
-
-
-Features:
-
- * run builds on a variety of slave platforms
- * arbitrary build process: handles projects using C, Python, whatever
- * minimal host requirements: python and Twisted
- * slaves can be behind a firewall if they can still do checkout
- * status delivery through web page, email, IRC, other protocols
- * track builds in progress, provide estimated completion time
- * flexible configuration by subclassing generic build process classes
- * debug tools to force a new build, submit fake Changes, query slave status
- * released under the GPL
-
-
-DOCUMENTATION:
-
-The PyCon paper has a good description of the overall architecture. It is
-available in HTML form in docs/PyCon-2003/buildbot.html, or on the web page.
-
-docs/buildbot.info contains the beginnings of the User's Manual, and the
-Installation chapter is the best guide to use for setup instructions. The
-.texinfo source can also be turned into printed documentation.
-
-REQUIREMENTS:
-
- Python: http://www.python.org
-
- Buildbot requires python-2.2 or later, and is primarily developed against
- python-2.3. The buildmaster uses generators, a feature which is not
- available in python-2.1, and both master and slave require a version of
- Twisted which only works with python-2.2 or later. Certain features (like
- the inclusion of build logs in status emails) require python-2.2.2 or
- later, while the IRC 'force' command requires python-2.3 .
-
- Twisted: http://twistedmatrix.com
-
- Both the buildmaster and the buildslaves require Twisted-1.3.0 or later.
- It has been mainly developed against Twisted-2.0.1, but has been tested
- against Twisted-2.1.0 (the most recent as this time), and might even work
- on versions as old as Twisted-1.1.0, but as always the most recent version
- is recommended.
-
- When using the split subpackages of Twisted-2.x.x, you'll need at least
- "Twisted" (the core package), and you'll also want TwistedMail,
- TwistedWeb, and TwistedWords (for sending email, serving a web status
- page, and delivering build status via IRC, respectively).
-
- CVSToys: http://purl.net/net/CVSToys
-
- If your buildmaster uses FreshCVSSource to receive change notification
- from a cvstoys daemon, it will require CVSToys be installed (tested with
- CVSToys-1.0.10). If the it doesn't use that source (i.e. if you only use
- a mail-parsing change source, or the SVN notification script), you will
- not need CVSToys.
-
-INSTALLATION:
-
-Please read the User's Manual in docs/buildbot.info (or in HTML form on the
-buildbot web site) for complete instructions. This file only contains a brief
-summary.
-
- RUNNING THE UNIT TESTS
-
-If you would like to run the unit test suite, use a command like this:
-
- PYTHONPATH=. trial buildbot.test
-
-This should run up to 175 tests, depending upon what VC tools you have
-installed. On my desktop machine it takes about four minutes to complete.
-Nothing should fail, a few might be skipped. If any of the tests fail, you
-should stop and investigate the cause before continuing the installation
-process, as it will probably be easier to track down the bug early.
-
-Neither CVS nor SVN support file based repositories on network filesystem
-(or network drives in Windows parlance). Therefore it is recommended to run
-all unit tests on local hard disks.
-
- INSTALLING THE LIBRARIES:
-
-The first step is to install the python libraries. This package uses the
-standard 'distutils' module, so installing them is usually a matter of
-doing something like:
-
- python ./setup.py install
-
-To test this, shift to a different directory (like /tmp), and run:
-
- pydoc buildbot
-
-If it shows you a brief description of the package and its contents, the
-install went ok. If it says "no Python documentation found for 'buildbot'",
-then something went wrong.
-
-
- SETTING UP A BUILD SLAVE:
-
-If you want to run a build slave, you need to obtain the following pieces of
-information from the administrator of the buildmaster you intend to connect
-to:
-
- your buildslave's name
- the password assigned to your buildslave
- the hostname and port number of the buildmaster, i.e. example.com:8007
-
-You also need to pick a working directory for the buildslave. All commands
-will be run inside this directory.
-
-Now run the 'buildbot' command as follows:
-
- buildbot slave WORKDIR MASTERHOST:PORT SLAVENAME PASSWORD
-
-This will create a file called "buildbot.tac", which bundles up all the state
-needed by the build slave application. Twisted has a tool called "twistd"
-which knows how to load these saved applications and start running them.
-twistd takes care of logging and daemonization (running the program in the
-background). /usr/bin/buildbot is a front end which runs twistd for you.
-
-Once you've set up the directory with the .tac file, you start it running
-like this:
-
- buildbot start WORKDIR
-
-This will start the build slave in the background and finish, so you don't
-need to put it in the background yourself with "&". The process ID of the
-background task is written to a file called "twistd.pid", and all output from
-the program is written to a log file named "twistd.log". Look in twistd.log
-to make sure the buildslave has started.
-
-To shut down the build slave, use:
-
- buildbot stop WORKDIR
-
-
- RUNNING BEHIND A NAT BOX:
-
-Some network environments will not properly maintain a TCP connection that
-appears to be idle. NAT boxes which do some form of connection tracking may
-drop the port mapping if it looks like the TCP session has been idle for too
-long. The buildslave attempts to turn on TCP "keepalives" (supported by
-Twisted 1.0.6 and later), and if these cannot be activated, it uses
-application level keepalives (which send a dummy message to the build master
-on a periodic basis). The TCP keepalive is typically sent at intervals of
-about 2 hours, and is configurable through the kernel. The application-level
-keepalive defaults to running once every 10 minutes.
-
-To manually turn on application-level keepalives, or to set them to use some
-other interval, add "--keepalive NNN" to the 'buildbot slave' command line.
-NNN is the number of seconds between keepalives. Use as large a value as your
-NAT box allows to reduce the amount of unnecessary traffic on the wire. 600
-seconds (10 minutes) is a reasonable value.
-
-
- SETTING UP A BUILD MASTER:
-
-Please read the user's manual for instructions. The short form is that you
-use 'buildbot master MASTERDIR' to create the base directory, then you edit
-the 'master.cfg' file to configure the buildmaster. Once this is ready, you
-use 'buildbot START MASTERDIR' to launch it.
-
-A sample configuration file will be created for you in WORKDIR/master.cfg .
-There are more examples in docs/examples/, and plenty of documentation in the
-user's manual. Everything is controlled by the config file.
-
-
-SUPPORT:
-
- Please send questions, bugs, patches, etc, to the buildbot-devel mailing
- list reachable through http://buildbot.sourceforge.net/, so that everyone
- can see them.
diff --git a/buildbot/buildbot-source/README.w32 b/buildbot/buildbot-source/README.w32
deleted file mode 100644
index de54c97f3..000000000
--- a/buildbot/buildbot-source/README.w32
+++ /dev/null
@@ -1,95 +0,0 @@
-Several users have reported success in running a buildslave under Windows.
-The following list of steps might help you accomplish the same. They are a
-list of what I did as a unix guy struggling to make a winXP box run the
-buildbot unit tests. When I was done, most of the unit tests passed.
-
-If you discover things that are missing or incorrect, please send your
-corrections to the buildbot-devel mailing list (archives and subscription
-information are available at http://buildbot.sourceforge.net).
-
-Many thanks to Mike "Bear" Taylor for developing this list.
-
-
-0. Check to make sure your PATHEXT environment variable has ";.PY" in
-it -- if not set your global environment to include it.
-
- Control Panels / System / Advanced / Environment Variables / System variables
-
-1. Install python -- 2.4 -- http://python.org
- * run win32 installer - no special options needed so far
-
-2. install zope interface package -- 3.0.1final --
-http://www.zope.org/Products/ZopeInterface
- * run win32 installer - it should auto-detect your python 2.4
- installation
-
-3. python for windows extensions -- build 203 --
-http://pywin32.sourceforge.net/
- * run win32 installer - it should auto-detect your python 2.4
- installation
-
- the installer complains about a missing DLL. Download mfc71.dll from the
- site mentioned in the warning
- (http://starship.python.net/crew/mhammond/win32/) and move it into
- c:\Python24\DLLs
-
-4. at this point, to preserve my own sanity, I grabbed cygwin.com's setup.exe
- and started it. It behaves a lot like dselect. I installed bash and other
- tools (but *not* python). I added C:\cygwin\bin to PATH, allowing me to
- use tar, md5sum, cvs, all the usual stuff. I also installed emacs, going
- from the notes at http://www.gnu.org/software/emacs/windows/ntemacs.html .
- Their FAQ at http://www.gnu.org/software/emacs/windows/faq3.html#install
- has a note on how to swap CapsLock and Control.
-
- I also modified PATH (in the same place as PATHEXT) to include C:\Python24
- and C:\Python24\Scripts . This will allow 'python' and (eventually) 'trial'
- to work in a regular command shell.
-
-5. twisted -- 2.0 -- http://twistedmatrix.com/projects/core/
- * unpack tarball and run
- python setup.py install
- Note: if you want to test your setup - run:
- python c:\python24\Scripts\trial.py -o -R twisted
- (the -o will format the output for console and the "-R twisted" will
- recursively run all unit tests)
-
- I had to edit Twisted (core)'s setup.py, to make detectExtensions() return
- an empty list before running builder._compile_helper(). Apparently the test
- it uses to detect if the (optional) C modules can be compiled causes the
- install process to simply quit without actually installing anything.
-
- I installed several packages: core, Lore, Mail, Web, and Words. They all got
- copied to C:\Python24\Lib\site-packages\
-
- At this point
-
- trial --version
-
- works, so 'trial -o -R twisted' will run the Twisted test suite. Note that
- this is not necessarily setting PYTHONPATH, so it may be running the test
- suite that was installed, not the one in the current directory.
-
-6. I used CVS to grab a copy of the latest Buildbot sources. To run the
- tests, you must first add the buildbot directory to PYTHONPATH. Windows
- does not appear to have a Bourne-shell-style syntax to set a variable just
- for a single command, so you have to set it once and remember it will
- affect all commands for the lifetime of that shell session.
-
- set PYTHONPATH=.
- trial -o -r win32 buildbot.test
-
- To run against both buildbot-CVS and, say, Twisted-SVN, do:
-
- set PYTHONPATH=.;C:\path to\Twisted-SVN
-
-
-All commands are done using the normal cmd.exe command shell. As of
-buildbot-0.6.4, only one unit test fails (test_webPathname_port) when you run
-under the 'win32' reactor. (if you run under the default reactor, many of the
-child-process-spawning commands fail, but test_webPathname_port passes. go
-figure.)
-
-Actually setting up a buildslave is not yet covered by this document. Patches
-gladly accepted.
-
- -Brian
diff --git a/buildbot/buildbot-source/bin/buildbot b/buildbot/buildbot-source/bin/buildbot
deleted file mode 100755
index cf3628dd5..000000000
--- a/buildbot/buildbot-source/bin/buildbot
+++ /dev/null
@@ -1,4 +0,0 @@
-#!/usr/bin/python
-
-from buildbot.scripts import runner
-runner.run()
diff --git a/buildbot/buildbot-source/build/lib/buildbot/__init__.py b/buildbot/buildbot-source/build/lib/buildbot/__init__.py
deleted file mode 100644
index ed1ce3fd3..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/__init__.py
+++ /dev/null
@@ -1,3 +0,0 @@
-#! /usr/bin/python
-
-version = "0.7.3"
diff --git a/buildbot/buildbot-source/build/lib/buildbot/buildset.py b/buildbot/buildbot-source/build/lib/buildbot/buildset.py
deleted file mode 100644
index 0e163738d..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/buildset.py
+++ /dev/null
@@ -1,77 +0,0 @@
-
-from twisted.internet import defer
-
-from buildbot.process import base
-from buildbot.status import builder
-
-
-class BuildSet:
- """I represent a set of potential Builds, all of the same source tree,
- across a specified list of Builders. I can represent a build of a
- specific version of the source tree (named by source.branch and
- source.revision), or a build of a certain set of Changes
- (source.changes=list)."""
-
- def __init__(self, builderNames, source, reason=None, bsid=None):
- """
- @param source: a L{buildbot.sourcestamp.SourceStamp}
- """
- self.builderNames = builderNames
- self.source = source
- self.reason = reason
- self.stillHopeful = True
- self.status = bss = builder.BuildSetStatus(source, reason,
- builderNames, bsid)
-
- def waitUntilSuccess(self):
- return self.status.waitUntilSuccess()
- def waitUntilFinished(self):
- return self.status.waitUntilFinished()
-
- def start(self, builders):
- """This is called by the BuildMaster to actually create and submit
- the BuildRequests."""
- self.requests = []
- reqs = []
-
- # create the requests
- for b in builders:
- req = base.BuildRequest(self.reason, self.source, b.name)
- reqs.append((b, req))
- self.requests.append(req)
- d = req.waitUntilFinished()
- d.addCallback(self.requestFinished, req)
-
- # tell our status about them
- req_statuses = [req.status for req in self.requests]
- self.status.setBuildRequestStatuses(req_statuses)
-
- # now submit them
- for b,req in reqs:
- b.submitBuildRequest(req)
-
- def requestFinished(self, buildstatus, req):
- # TODO: this is where individual build status results are aggregated
- # into a BuildSet-wide status. Consider making a rule that says one
- # WARNINGS results in the overall status being WARNINGS too. The
- # current rule is that any FAILURE means FAILURE, otherwise you get
- # SUCCESS.
- self.requests.remove(req)
- results = buildstatus.getResults()
- if results == builder.FAILURE:
- self.status.setResults(results)
- if self.stillHopeful:
- # oh, cruel reality cuts deep. no joy for you. This is the
- # first failure. This flunks the overall BuildSet, so we can
- # notify success watchers that they aren't going to be happy.
- self.stillHopeful = False
- self.status.giveUpHope()
- self.status.notifySuccessWatchers()
- if not self.requests:
- # that was the last build, so we can notify finished watchers. If
- # we haven't failed by now, we can claim success.
- if self.stillHopeful:
- self.status.setResults(builder.SUCCESS)
- self.status.notifySuccessWatchers()
- self.status.notifyFinishedWatchers()
-
diff --git a/buildbot/buildbot-source/build/lib/buildbot/changes/__init__.py b/buildbot/buildbot-source/build/lib/buildbot/changes/__init__.py
deleted file mode 100644
index e69de29bb..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/changes/__init__.py
+++ /dev/null
diff --git a/buildbot/buildbot-source/build/lib/buildbot/changes/base.py b/buildbot/buildbot-source/build/lib/buildbot/changes/base.py
deleted file mode 100644
index 2b0a331f2..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/changes/base.py
+++ /dev/null
@@ -1,14 +0,0 @@
-#! /usr/bin/python
-
-from twisted.application import service
-from twisted.python import components
-
-from buildbot.twcompat import implements
-from buildbot.interfaces import IChangeSource
-from buildbot import util
-
-class ChangeSource(service.Service, util.ComparableMixin):
- if implements:
- implements(IChangeSource)
- else:
- __implements__ = IChangeSource, service.Service.__implements__
diff --git a/buildbot/buildbot-source/build/lib/buildbot/changes/changes.py b/buildbot/buildbot-source/build/lib/buildbot/changes/changes.py
deleted file mode 100644
index 9ca9112f0..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/changes/changes.py
+++ /dev/null
@@ -1,265 +0,0 @@
-#! /usr/bin/python
-
-from __future__ import generators
-import string, sys, os, os.path, time, types
-try:
- import cPickle as pickle
-except ImportError:
- import pickle
-
-from twisted.python import log, components
-from twisted.internet import defer
-from twisted.spread import pb
-from twisted.application import service
-from twisted.cred import portal
-from twisted.web import html
-
-from buildbot import interfaces, util
-from buildbot.twcompat import implements, providedBy
-
-html_tmpl = """
-<p>Changed by: <b>%(who)s</b><br />
-Changed at: <b>%(at)s</b><br />
-%(branch)s
-%(revision)s
-<br />
-
-Changed files:
-%(files)s
-
-Comments:
-%(comments)s
-</p>
-"""
-
-class Change:
- """I represent a single change to the source tree. This may involve
- several files, but they are all changed by the same person, and there is
- a change comment for the group as a whole.
-
- If the version control system supports sequential repository- (or
- branch-) wide change numbers (like SVN, P4, and Arch), then revision=
- should be set to that number. The highest such number will be used at
- checkout time to get the correct set of files.
-
- If it does not (like CVS), when= should be set to the timestamp (seconds
- since epoch, as returned by time.time()) when the change was made. when=
- will be filled in for you (to the current time) if you omit it, which is
- suitable for ChangeSources which have no way of getting more accurate
- timestamps.
-
- Changes should be submitted to ChangeMaster.addChange() in
- chronologically increasing order. Out-of-order changes will probably
- cause the html.Waterfall display to be corrupted."""
-
- if implements:
- implements(interfaces.IStatusEvent)
- else:
- __implements__ = interfaces.IStatusEvent,
-
- number = None
-
- links = []
- branch = None
- revision = None # used to create a source-stamp
-
- def __init__(self, who, files, comments, isdir=0, links=[],
- revision=None, when=None, branch=None):
- self.who = who
- self.files = files
- self.comments = comments
- self.isdir = isdir
- self.links = links
- self.revision = revision
- if when is None:
- when = util.now()
- self.when = when
- self.branch = branch
-
- def asText(self):
- data = ""
- data += self.getFileContents()
- data += "At: %s\n" % self.getTime()
- data += "Changed By: %s\n" % self.who
- data += "Comments: %s\n\n" % self.comments
- return data
-
- def asHTML(self):
- links = []
- for file in self.files:
- link = filter(lambda s: s.find(file) != -1, self.links)
- if len(link) == 1:
- # could get confused
- links.append('<a href="%s"><b>%s</b></a>' % (link[0], file))
- else:
- links.append('<b>%s</b>' % file)
- revision = ""
- if self.revision:
- revision = "Revision: <b>%s</b><br />\n" % self.revision
- branch = ""
- if self.branch:
- branch = "Branch: <b>%s</b><br />\n" % self.branch
-
- kwargs = { 'who' : html.escape(self.who),
- 'at' : self.getTime(),
- 'files' : html.UL(links) + '\n',
- 'revision': revision,
- 'branch' : branch,
- 'comments': html.PRE(self.comments) }
- return html_tmpl % kwargs
-
- def getTime(self):
- if not self.when:
- return "?"
- return time.strftime("%a %d %b %Y %H:%M:%S",
- time.localtime(self.when))
-
- def getTimes(self):
- return (self.when, None)
-
- def getText(self):
- return [html.escape(self.who)]
- def getColor(self):
- return "white"
- def getLogs(self):
- return {}
-
- def getFileContents(self):
- data = ""
- if len(self.files) == 1:
- if self.isdir:
- data += "Directory: %s\n" % self.files[0]
- else:
- data += "File: %s\n" % self.files[0]
- else:
- data += "Files:\n"
- for f in self.files:
- data += " %s\n" % f
- return data
-
-class ChangeMaster(service.MultiService):
-
- """This is the master-side service which receives file change
- notifications from CVS. It keeps a log of these changes, enough to
- provide for the HTML waterfall display, and to tell
- temporarily-disconnected bots what they missed while they were
- offline.
-
- Change notifications come from two different kinds of sources. The first
- is a PB service (servicename='changemaster', perspectivename='change'),
- which provides a remote method called 'addChange', which should be
- called with a dict that has keys 'filename' and 'comments'.
-
- The second is a list of objects derived from the ChangeSource class.
- These are added with .addSource(), which also sets the .changemaster
- attribute in the source to point at the ChangeMaster. When the
- application begins, these will be started with .start() . At shutdown
- time, they will be terminated with .stop() . They must be persistable.
- They are expected to call self.changemaster.addChange() with Change
- objects.
-
- There are several different variants of the second type of source:
-
- - L{buildbot.changes.mail.MaildirSource} watches a maildir for CVS
- commit mail. It uses DNotify if available, or polls every 10
- seconds if not. It parses incoming mail to determine what files
- were changed.
-
- - L{buildbot.changes.freshcvs.FreshCVSSource} makes a PB
- connection to the CVSToys 'freshcvs' daemon and relays any
- changes it announces.
-
- """
-
- debug = False
- # todo: use Maildir class to watch for changes arriving by mail
-
- def __init__(self):
- service.MultiService.__init__(self)
- self.changes = []
- # self.basedir must be filled in by the parent
- self.nextNumber = 1
-
- def addSource(self, source):
- assert providedBy(source, interfaces.IChangeSource)
- assert providedBy(source, service.IService)
- if self.debug:
- print "ChangeMaster.addSource", source
- source.setServiceParent(self)
-
- def removeSource(self, source):
- assert source in self
- if self.debug:
- print "ChangeMaster.removeSource", source, source.parent
- d = defer.maybeDeferred(source.disownServiceParent)
- return d
-
- def addChange(self, change):
- """Deliver a file change event. The event should be a Change object.
- This method will timestamp the object as it is received."""
- log.msg("adding change, who %s, %d files, rev=%s, branch=%s, "
- "comments %s" % (change.who, len(change.files),
- change.revision, change.branch,
- change.comments))
- change.number = self.nextNumber
- self.nextNumber += 1
- self.changes.append(change)
- self.parent.addChange(change)
- # TODO: call pruneChanges after a while
-
- def pruneChanges(self):
- self.changes = self.changes[-100:] # or something
-
- def eventGenerator(self):
- for i in range(len(self.changes)-1, -1, -1):
- c = self.changes[i]
- yield c
-
- def getChangeNumbered(self, num):
- if not self.changes:
- return None
- first = self.changes[0].number
- if first + len(self.changes)-1 != self.changes[-1].number:
- log.msg(self,
- "lost a change somewhere: [0] is %d, [%d] is %d" % \
- (self.changes[0].number,
- len(self.changes) - 1,
- self.changes[-1].number))
- for c in self.changes:
- log.msg("c[%d]: " % c.number, c)
- return None
- offset = num - first
- log.msg(self, "offset", offset)
- return self.changes[offset]
-
- def __getstate__(self):
- d = service.MultiService.__getstate__(self)
- del d['parent']
- del d['services'] # lose all children
- del d['namedServices']
- return d
-
- def __setstate__(self, d):
- self.__dict__ = d
- # self.basedir must be set by the parent
- self.services = [] # they'll be repopulated by readConfig
- self.namedServices = {}
-
-
- def saveYourself(self):
- filename = os.path.join(self.basedir, "changes.pck")
- tmpfilename = filename + ".tmp"
- try:
- pickle.dump(self, open(tmpfilename, "wb"))
- if sys.platform == 'win32':
- # windows cannot rename a file on top of an existing one
- if os.path.exists(filename):
- os.unlink(filename)
- os.rename(tmpfilename, filename)
- except Exception, e:
- log.msg("unable to save changes")
- log.err()
-
- def stopService(self):
- self.saveYourself()
- return service.MultiService.stopService(self)
diff --git a/buildbot/buildbot-source/build/lib/buildbot/changes/dnotify.py b/buildbot/buildbot-source/build/lib/buildbot/changes/dnotify.py
deleted file mode 100644
index ac566a8eb..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/changes/dnotify.py
+++ /dev/null
@@ -1,103 +0,0 @@
-#! /usr/bin/python
-
-import fcntl, signal, os
-
-class DNotify_Handler:
- def __init__(self):
- self.watchers = {}
- self.installed = 0
- def install(self):
- if self.installed:
- return
- signal.signal(signal.SIGIO, self.fire)
- self.installed = 1
- def uninstall(self):
- if not self.installed:
- return
- signal.signal(signal.SIGIO, signal.SIG_DFL)
- self.installed = 0
- def add(self, watcher):
- self.watchers[watcher.fd] = watcher
- self.install()
- def remove(self, watcher):
- if self.watchers.has_key(watcher.fd):
- del(self.watchers[watcher.fd])
- if not self.watchers:
- self.uninstall()
- def fire(self, signum, frame):
- # this is the signal handler
- # without siginfo_t, we must fire them all
- for watcher in self.watchers.values():
- watcher.callback()
-
-class DNotify:
- DN_ACCESS = fcntl.DN_ACCESS # a file in the directory was read
- DN_MODIFY = fcntl.DN_MODIFY # a file was modified (write,truncate)
- DN_CREATE = fcntl.DN_CREATE # a file was created
- DN_DELETE = fcntl.DN_DELETE # a file was unlinked
- DN_RENAME = fcntl.DN_RENAME # a file was renamed
- DN_ATTRIB = fcntl.DN_ATTRIB # a file had attributes changed (chmod,chown)
-
- handler = [None]
-
- def __init__(self, dirname, callback=None,
- flags=[DN_MODIFY,DN_CREATE,DN_DELETE,DN_RENAME]):
-
- """This object watches a directory for changes. The .callback
- attribute should be set to a function to be run every time something
- happens to it. Be aware that it will be called more times than you
- expect."""
-
- if callback:
- self.callback = callback
- else:
- self.callback = self.fire
- self.dirname = dirname
- self.flags = reduce(lambda x, y: x | y, flags) | fcntl.DN_MULTISHOT
- self.fd = os.open(dirname, os.O_RDONLY)
- # ideally we would move the notification to something like SIGRTMIN,
- # (to free up SIGIO) and use sigaction to have the signal handler
- # receive a structure with the fd number. But python doesn't offer
- # either.
- if not self.handler[0]:
- self.handler[0] = DNotify_Handler()
- self.handler[0].add(self)
- fcntl.fcntl(self.fd, fcntl.F_NOTIFY, self.flags)
- def remove(self):
- self.handler[0].remove(self)
- os.close(self.fd)
- def fire(self):
- print self.dirname, "changed!"
-
-def test_dnotify1():
- d = DNotify(".")
- import time
- while 1:
- signal.pause()
-
-def test_dnotify2():
- # create ./foo/, create/delete files in ./ and ./foo/ while this is
- # running. Notice how both notifiers are fired when anything changes;
- # this is an unfortunate side-effect of the lack of extended sigaction
- # support in Python.
- count = [0]
- d1 = DNotify(".")
- def fire1(count=count, d1=d1):
- print "./ changed!", count[0]
- count[0] += 1
- if count[0] > 5:
- d1.remove()
- del(d1)
- # change the callback, since we can't define it until after we have the
- # dnotify object. Hmm, unless we give the dnotify to the callback.
- d1.callback = fire1
- def fire2(): print "foo/ changed!"
- d2 = DNotify("foo", fire2)
- import time
- while 1:
- signal.pause()
-
-
-if __name__ == '__main__':
- test_dnotify2()
-
diff --git a/buildbot/buildbot-source/build/lib/buildbot/changes/freshcvs.py b/buildbot/buildbot-source/build/lib/buildbot/changes/freshcvs.py
deleted file mode 100644
index e88d351ba..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/changes/freshcvs.py
+++ /dev/null
@@ -1,148 +0,0 @@
-#! /usr/bin/python
-
-import os.path
-
-from twisted.cred import credentials
-from twisted.spread import pb
-from twisted.application.internet import TCPClient
-from twisted.python import log
-
-import cvstoys.common # to make sure VersionedPatch gets registered
-
-from buildbot.twcompat import implements
-from buildbot.interfaces import IChangeSource
-from buildbot.pbutil import ReconnectingPBClientFactory
-from buildbot.changes.changes import Change
-from buildbot import util
-
-class FreshCVSListener(pb.Referenceable):
- def remote_notify(self, root, files, message, user):
- try:
- self.source.notify(root, files, message, user)
- except Exception, e:
- print "notify failed"
- log.err()
-
- def remote_goodbye(self, message):
- pass
-
-class FreshCVSConnectionFactory(ReconnectingPBClientFactory):
-
- def gotPerspective(self, perspective):
- log.msg("connected to FreshCVS daemon")
- ReconnectingPBClientFactory.gotPerspective(self, perspective)
- self.source.connected = True
- # TODO: freshcvs-1.0.10 doesn't handle setFilter correctly, it will
- # be fixed in the upcoming 1.0.11 . I haven't been able to test it
- # to make sure the failure mode is survivable, so I'll just leave
- # this out for now.
- return
- if self.source.prefix is not None:
- pathfilter = "^%s" % self.source.prefix
- d = perspective.callRemote("setFilter",
- None, pathfilter, None)
- # ignore failures, setFilter didn't work in 1.0.10 and this is
- # just an optimization anyway
- d.addErrback(lambda f: None)
-
- def clientConnectionLost(self, connector, reason):
- ReconnectingPBClientFactory.clientConnectionLost(self, connector,
- reason)
- self.source.connected = False
-
-class FreshCVSSourceNewcred(TCPClient, util.ComparableMixin):
- """This source will connect to a FreshCVS server associated with one or
- more CVS repositories. Each time a change is committed to a repository,
- the server will send us a message describing the change. This message is
- used to build a Change object, which is then submitted to the
- ChangeMaster.
-
- This class handles freshcvs daemons which use newcred. CVSToys-1.0.9
- does not, later versions might.
- """
-
- if implements:
- implements(IChangeSource)
- else:
- __implements__ = IChangeSource, TCPClient.__implements__
- compare_attrs = ["host", "port", "username", "password", "prefix"]
-
- changemaster = None # filled in when we're added
- connected = False
-
- def __init__(self, host, port, user, passwd, prefix=None):
- self.host = host
- self.port = port
- self.username = user
- self.password = passwd
- if prefix is not None and not prefix.endswith("/"):
- log.msg("WARNING: prefix '%s' should probably end with a slash" \
- % prefix)
- self.prefix = prefix
- self.listener = l = FreshCVSListener()
- l.source = self
- self.factory = f = FreshCVSConnectionFactory()
- f.source = self
- self.creds = credentials.UsernamePassword(user, passwd)
- f.startLogin(self.creds, client=l)
- TCPClient.__init__(self, host, port, f)
-
- def __repr__(self):
- return "<FreshCVSSource where=%s, prefix=%s>" % \
- ((self.host, self.port), self.prefix)
-
- def describe(self):
- online = ""
- if not self.connected:
- online = " [OFFLINE]"
- return "freshcvs %s:%s%s" % (self.host, self.port, online)
-
- def notify(self, root, files, message, user):
- pathnames = []
- isdir = 0
- for f in files:
- if not isinstance(f, (cvstoys.common.VersionedPatch,
- cvstoys.common.Directory)):
- continue
- pathname, filename = f.pathname, f.filename
- #r1, r2 = getattr(f, 'r1', None), getattr(f, 'r2', None)
- if isinstance(f, cvstoys.common.Directory):
- isdir = 1
- path = os.path.join(pathname, filename)
- log.msg("FreshCVS notify '%s'" % path)
- if self.prefix:
- if path.startswith(self.prefix):
- path = path[len(self.prefix):]
- else:
- continue
- pathnames.append(path)
- if pathnames:
- # now() is close enough: FreshCVS *is* realtime, after all
- when=util.now()
- c = Change(user, pathnames, message, isdir, when=when)
- self.parent.addChange(c)
-
-class FreshCVSSourceOldcred(FreshCVSSourceNewcred):
- """This is for older freshcvs daemons (from CVSToys-1.0.9 and earlier).
- """
-
- def __init__(self, host, port, user, passwd,
- serviceName="cvstoys.notify", prefix=None):
- self.host = host
- self.port = port
- self.prefix = prefix
- self.listener = l = FreshCVSListener()
- l.source = self
- self.factory = f = FreshCVSConnectionFactory()
- f.source = self
- f.startGettingPerspective(user, passwd, serviceName, client=l)
- TCPClient.__init__(self, host, port, f)
-
- def __repr__(self):
- return "<FreshCVSSourceOldcred where=%s, prefix=%s>" % \
- ((self.host, self.port), self.prefix)
-
-# this is suitable for CVSToys-1.0.10 and later. If you run CVSToys-1.0.9 or
-# earlier, use FreshCVSSourceOldcred instead.
-FreshCVSSource = FreshCVSSourceNewcred
-
diff --git a/buildbot/buildbot-source/build/lib/buildbot/changes/freshcvsmail.py b/buildbot/buildbot-source/build/lib/buildbot/changes/freshcvsmail.py
deleted file mode 100644
index e897f4990..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/changes/freshcvsmail.py
+++ /dev/null
@@ -1,5 +0,0 @@
-#! /usr/bin/python
-
-# leftover import for compatibility
-
-from buildbot.changes.mail import FCMaildirSource
diff --git a/buildbot/buildbot-source/build/lib/buildbot/changes/mail.py b/buildbot/buildbot-source/build/lib/buildbot/changes/mail.py
deleted file mode 100644
index b5237e9a9..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/changes/mail.py
+++ /dev/null
@@ -1,475 +0,0 @@
-# -*- test-case-name: buildbot.test.test_mailparse -*-
-
-"""
-Parse various kinds of 'CVS notify' email.
-"""
-import os, os.path, re
-from rfc822 import Message
-
-from buildbot import util
-from buildbot.twcompat import implements
-from buildbot.changes import base, changes, maildirtwisted
-
-
-def parseOOAllCVSmail(self, fd, prefix=None, sep="/"):
- """Parse messages sent by the 'allcvs' program
- """
- # pretty much the same as freshcvs mail, not surprising since CVS is the
- # one creating most of the text
-
- m = Message(fd)
- # The mail is sent from the person doing the checkin. Assume that the
- # local username is enough to identify them (this assumes a one-server
- # cvs-over-rsh environment rather than the server-dirs-shared-over-NFS
- # model)
- name, addr = m.getaddr("from")
- if not addr:
- return None # no From means this message isn't from FreshCVS
- at = addr.find("@")
- if at == -1:
- who = addr # might still be useful
- else:
- who = addr[:at]
-
- # we take the time of receipt as the time of checkin. Not correct (it
- # depends upon the email latency), but it avoids the out-of-order-changes
- # issue. Also syncmail doesn't give us anything better to work with,
- # unless you count pulling the v1-vs-v2 timestamp out of the diffs, which
- # would be ugly. TODO: Pulling the 'Date:' header from the mail is a
- # possibility, and email.Utils.parsedate_tz may be useful. It should be
- # configurable, however, because there are a lot of broken clocks out
- # there.
- when = util.now()
- subject = m.getheader("subject")
- # syncmail puts the repository-relative directory in the subject:
- # mprefix + "%(dir)s %(file)s,%(oldversion)s,%(newversion)s", where
- # 'mprefix' is something that could be added by a mailing list
- # manager.
- # this is the only reasonable way to determine the directory name
- space = subject.find(" ")
- if space != -1:
- directory = subject[:space]
- else:
- directory = subject
- files = []
- comments = ""
- isdir = 0
- branch = None
- lines = m.fp.readlines()
-
- while lines:
- line = lines.pop(0)
- #if line == "\n":
- # break
- #if line == "Log:\n":
- # lines.insert(0, line)
- # break
- line = line.lstrip()
- line = line.rstrip()
-
- if line.startswith('Tag:'):
- branch = line.split(' ')[-1].rstrip()
- branch = branch.replace("cws_src680_","")
- break
- else:
- continue
-
- #thesefiles = line.split(" ")
- #for f in thesefiles:
- # f = sep.join([directory, f])
- # if prefix:
- # bits = f.split(sep)
- # if bits[0] == prefix:
- # f = sep.join(bits[1:])
- # else:
- # break
-
- # files.append(f)
-
- while lines:
- line = lines.pop(0)
- if (line == "Modified:\n" or
- line == "Added:\n" or
- line == "Removed:\n"):
- break
-
- while lines:
- line = lines.pop(0)
- if line == "\n":
- break
- if line == "Log:\n":
- lines.insert(0, line)
- break
- line = line.lstrip()
- line = line.rstrip()
-
- thesefiles = line.split(" ")
- for f in thesefiles:
- f = sep.join([directory, f])
- if prefix:
- bits = f.split(sep)
- if bits[0] == prefix:
- f = sep.join(bits[1:])
- else:
- break
- files.append(f)
-
-
- #if not files:
- # return None
-
- if not branch:
- return None
-
- while lines:
- line = lines.pop(0)
- if line == "Log:\n":
- break
-
- while lines:
- line = lines.pop(0)
- #if line.find("Directory: ") == 0:
- # break
- #if re.search(r"^--- NEW FILE", line):
- # break
- #if re.search(r" DELETED ---$", line):
- # break
- comments += line
- comments = comments.rstrip() + "\n"
- change = changes.Change(who, files, comments, isdir, when=when,
- branch=branch)
- return change
-
-
-
-def parseFreshCVSMail(self, fd, prefix=None, sep="/"):
- """Parse mail sent by FreshCVS"""
- # this uses rfc822.Message so it can run under python2.1 . In the future
- # it will be updated to use python2.2's "email" module.
-
- m = Message(fd)
- # FreshCVS sets From: to "user CVS <user>", but the <> part may be
- # modified by the MTA (to include a local domain)
- name, addr = m.getaddr("from")
- if not name:
- return None # no From means this message isn't from FreshCVS
- cvs = name.find(" CVS")
- if cvs == -1:
- return None # this message isn't from FreshCVS
- who = name[:cvs]
-
- # we take the time of receipt as the time of checkin. Not correct, but it
- # avoids the out-of-order-changes issue. See the comment in parseSyncmail
- # about using the 'Date:' header
- when = util.now()
-
- files = []
- comments = ""
- isdir = 0
- lines = m.fp.readlines()
- while lines:
- line = lines.pop(0)
- if line == "Modified files:\n":
- break
- while lines:
- line = lines.pop(0)
- if line == "\n":
- break
- line = line.rstrip("\n")
- linebits = line.split(None, 1)
- file = linebits[0]
- if prefix:
- # insist that the file start with the prefix: FreshCVS sends
- # changes we don't care about too
- bits = file.split(sep)
- if bits[0] == prefix:
- file = sep.join(bits[1:])
- else:
- break
- if len(linebits) == 1:
- isdir = 1
- elif linebits[1] == "0 0":
- isdir = 1
- files.append(file)
- while lines:
- line = lines.pop(0)
- if line == "Log message:\n":
- break
- # message is terminated by "ViewCVS links:" or "Index:..." (patch)
- while lines:
- line = lines.pop(0)
- if line == "ViewCVS links:\n":
- break
- if line.find("Index: ") == 0:
- break
- comments += line
- comments = comments.rstrip() + "\n"
-
- if not files:
- return None
-
- change = changes.Change(who, files, comments, isdir, when=when)
-
- return change
-
-def parseSyncmail(self, fd, prefix=None, sep="/"):
- """Parse messages sent by the 'syncmail' program, as suggested by the
- sourceforge.net CVS Admin documentation. Syncmail is maintained at
- syncmail.sf.net .
- """
- # pretty much the same as freshcvs mail, not surprising since CVS is the
- # one creating most of the text
-
- m = Message(fd)
- # The mail is sent from the person doing the checkin. Assume that the
- # local username is enough to identify them (this assumes a one-server
- # cvs-over-rsh environment rather than the server-dirs-shared-over-NFS
- # model)
- name, addr = m.getaddr("from")
- if not addr:
- return None # no From means this message isn't from FreshCVS
- at = addr.find("@")
- if at == -1:
- who = addr # might still be useful
- else:
- who = addr[:at]
-
- # we take the time of receipt as the time of checkin. Not correct (it
- # depends upon the email latency), but it avoids the out-of-order-changes
- # issue. Also syncmail doesn't give us anything better to work with,
- # unless you count pulling the v1-vs-v2 timestamp out of the diffs, which
- # would be ugly. TODO: Pulling the 'Date:' header from the mail is a
- # possibility, and email.Utils.parsedate_tz may be useful. It should be
- # configurable, however, because there are a lot of broken clocks out
- # there.
- when = util.now()
-
- subject = m.getheader("subject")
- # syncmail puts the repository-relative directory in the subject:
- # mprefix + "%(dir)s %(file)s,%(oldversion)s,%(newversion)s", where
- # 'mprefix' is something that could be added by a mailing list
- # manager.
- # this is the only reasonable way to determine the directory name
- space = subject.find(" ")
- if space != -1:
- directory = subject[:space]
- else:
- directory = subject
-
- files = []
- comments = ""
- isdir = 0
- branch = None
-
- lines = m.fp.readlines()
- #while lines:
- # line = lines.pop(0)
-
- # if (line == "Modified:\n" or
- # line == "Added:\n" or
- # line == "Removed:\n"):
- # break
-
- while lines:
- line = lines.pop(0)
- #if line == "\n":
- # break
- #if line == "Log:\n":
- # lines.insert(0, line)
- # break
- line = line.lstrip()
- line = line.rstrip()
- # note: syncmail will send one email per directory involved in a
- # commit, with multiple files if they were in the same directory.
- # Unlike freshCVS, it makes no attempt to collect all related
- # commits into a single message.
-
- # note: syncmail will report a Tag underneath the ... Files: line
- # e.g.: Tag: BRANCH-DEVEL
-
- if line.startswith('Tag:'):
- branch = line.split(' ')[-1].rstrip()
- branch = branch.replace("cws_src680_","")
- continue
-
- # note: it doesn't actually make sense to use portable functions
- # like os.path.join and os.sep, because these filenames all use
- # separator conventions established by the remote CVS server (which
- # is probably running on unix), not the local buildmaster system.
- thesefiles = line.split(" ")
- for f in thesefiles:
- f = sep.join([directory, f])
- if prefix:
- # insist that the file start with the prefix: we may get
- # changes we don't care about too
- bits = f.split(sep)
- if bits[0] == prefix:
- f = sep.join(bits[1:])
- else:
- break
- # TODO: figure out how new directories are described, set .isdir
- files.append(f)
-
- #if not files:
- # return None
-
- if not branch:
- return None
-
- while lines:
- line = lines.pop(0)
- if line == "Log:\n":
- break
- # message is terminated by "Index:..." (patch) or "--- NEW FILE.."
- # or "--- filename DELETED ---". Sigh.
- while lines:
- line = lines.pop(0)
- if line.find("Index: ") == 0:
- break
- if re.search(r"^--- NEW FILE", line):
- break
- if re.search(r" DELETED ---$", line):
- break
- comments += line
- comments = comments.rstrip() + "\n"
-
- change = changes.Change(who, files, comments, isdir, when=when,
- branch=branch)
-
- return change
-
-# Bonsai mail parser by Stephen Davis.
-#
-# This handles changes for CVS repositories that are watched by Bonsai
-# (http://www.mozilla.org/bonsai.html)
-
-# A Bonsai-formatted email message looks like:
-#
-# C|1071099907|stephend|/cvs|Sources/Scripts/buildbot|bonsai.py|1.2|||18|7
-# A|1071099907|stephend|/cvs|Sources/Scripts/buildbot|master.cfg|1.1|||18|7
-# R|1071099907|stephend|/cvs|Sources/Scripts/buildbot|BuildMaster.py|||
-# LOGCOMMENT
-# Updated bonsai parser and switched master config to buildbot-0.4.1 style.
-#
-# :ENDLOGCOMMENT
-#
-# In the first example line, stephend is the user, /cvs the repository,
-# buildbot the directory, bonsai.py the file, 1.2 the revision, no sticky
-# and branch, 18 lines added and 7 removed. All of these fields might not be
-# present (during "removes" for example).
-#
-# There may be multiple "control" lines or even none (imports, directory
-# additions) but there is one email per directory. We only care about actual
-# changes since it is presumed directory additions don't actually affect the
-# build. At least one file should need to change (the makefile, say) to
-# actually make a new directory part of the build process. That's my story
-# and I'm sticking to it.
-
-def parseBonsaiMail(self, fd, prefix=None):
- """Parse mail sent by the Bonsai cvs loginfo script."""
-
- msg = Message(fd)
-
- # we don't care who the email came from b/c the cvs user is in the msg
- # text
-
- who = "unknown"
- timestamp = None
- files = []
- lines = msg.fp.readlines()
-
- # read the control lines (what/who/where/file/etc.)
- while lines:
- line = lines.pop(0)
- if line == "LOGCOMMENT\n":
- break;
- line = line.rstrip("\n")
-
- # we'd like to do the following but it won't work if the number of
- # items doesn't match so...
- # what, timestamp, user, repo, module, file = line.split( '|' )
- items = line.split('|')
- if len(items) < 6:
- # not a valid line, assume this isn't a bonsai message
- return None
-
- try:
- # just grab the bottom-most timestamp, they're probably all the
- # same. TODO: I'm assuming this is relative to the epoch, but
- # this needs testing.
- timestamp = int(items[1])
- except ValueError:
- pass
-
- user = items[2]
- if user:
- who = user
-
- module = items[4]
- file = items[5]
- if module and file:
- path = "%s/%s" % (module, file)
- files.append(path)
-
- # if no files changed, return nothing
- if not files:
- return None
-
- # read the comments
- comments = ""
- while lines:
- line = lines.pop(0)
- if line == ":ENDLOGCOMMENT\n":
- break
- comments += line
- comments = comments.rstrip() + "\n"
-
- # return buildbot Change object
- return changes.Change(who, files, comments, when=timestamp)
-
-
-class MaildirSource(maildirtwisted.MaildirTwisted, base.ChangeSource):
- """This source will watch a maildir that is subscribed to a FreshCVS
- change-announcement mailing list.
- """
- # we need our own implements() here, at least for twisted-1.3, because
- # the double-inheritance of Service shadows __implements__ from
- # ChangeSource.
- if not implements:
- __implements__ = base.ChangeSource.__implements__
-
- compare_attrs = ["basedir", "newdir", "pollinterval", "parser"]
- parser = None
- name = None
-
- def __init__(self, maildir, prefix=None, sep="/"):
- maildirtwisted.MaildirTwisted.__init__(self, maildir)
- self.prefix = prefix
- self.sep = sep
-
- def describe(self):
- return "%s mailing list in maildir %s" % (self.name, self.basedir)
-
- def messageReceived(self, filename):
- path = os.path.join(self.basedir, "new", filename)
- change = self.parser(open(path, "r"), self.prefix, self.sep)
- if change:
- self.parent.addChange(change)
- os.rename(os.path.join(self.basedir, "new", filename),
- os.path.join(self.basedir, "cur", filename))
-
-class FCMaildirSource(MaildirSource):
- parser = parseFreshCVSMail
- name = "FreshCVS"
-
-class OOMaildirSource(MaildirSource):
- parser = parseOOAllCVSmail
- name = "AllCVS"
-
-class SyncmailMaildirSource(MaildirSource):
- parser = parseSyncmail
- name = "Syncmail"
-
-class BonsaiMaildirSource(MaildirSource):
- parser = parseBonsaiMail
- name = "Bonsai"
diff --git a/buildbot/buildbot-source/build/lib/buildbot/changes/maildir.py b/buildbot/buildbot-source/build/lib/buildbot/changes/maildir.py
deleted file mode 100644
index 83ff5ae14..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/changes/maildir.py
+++ /dev/null
@@ -1,115 +0,0 @@
-#! /usr/bin/python
-
-# This is a class which watches a maildir for new messages. It uses the
-# linux dirwatcher API (if available) to look for new files. The
-# .messageReceived method is invoked with the filename of the new message,
-# relative to the 'new' directory of the maildir.
-
-# this is an abstract base class. It must be subclassed by something to
-# provide a delay function (which polls in the case that DNotify isn't
-# available) and a way to safely schedule code to run after a signal handler
-# has fired. See maildirgtk.py and maildirtwisted.py for forms that use the
-# event loops provided by Gtk+ and Twisted.
-
-try:
- from dnotify import DNotify
- have_dnotify = 1
-except:
- have_dnotify = 0
-import os, os.path
-
-class Maildir:
- """This is a class which watches a maildir for new messages. Once
- started, it will run its .messageReceived method when a message is
- available.
- """
- def __init__(self, basedir=None):
- """Create the Maildir watcher. BASEDIR is the maildir directory (the
- one which contains new/ and tmp/)
- """
- self.basedir = basedir
- self.files = []
- self.pollinterval = 10 # only used if we don't have DNotify
- self.running = 0
- self.dnotify = None
-
- def setBasedir(self, basedir):
- self.basedir = basedir
-
- def start(self):
- """You must run start to receive any messages."""
- assert self.basedir
- self.newdir = os.path.join(self.basedir, "new")
- if self.running:
- return
- self.running = 1
- if not os.path.isdir(self.basedir) or not os.path.isdir(self.newdir):
- raise "invalid maildir '%s'" % self.basedir
- # we must hold an fd open on the directory, so we can get notified
- # when it changes.
- global have_dnotify
- if have_dnotify:
- try:
- self.dnotify = DNotify(self.newdir, self.dnotify_callback,
- [DNotify.DN_CREATE])
- except (IOError, OverflowError):
- # IOError is probably linux<2.4.19, which doesn't support
- # dnotify. OverflowError will occur on some 64-bit machines
- # because of a python bug
- print "DNotify failed, falling back to polling"
- have_dnotify = 0
-
- self.poll()
-
- def startTimeout(self):
- raise NotImplemented
- def stopTimeout(self):
- raise NotImplemented
- def dnotify_callback(self):
- print "callback"
- self.poll()
- raise NotImplemented
-
- def stop(self):
- if self.dnotify:
- self.dnotify.remove()
- self.dnotify = None
- else:
- self.stopTimeout()
- self.running = 0
-
- def poll(self):
- assert self.basedir
- # see what's new
- for f in self.files:
- if not os.path.isfile(os.path.join(self.newdir, f)):
- self.files.remove(f)
- newfiles = []
- for f in os.listdir(self.newdir):
- if not f in self.files:
- newfiles.append(f)
- self.files.extend(newfiles)
- # TODO: sort by ctime, then filename, since safecat uses a rather
- # fine-grained timestamp in the filename
- for n in newfiles:
- # TODO: consider catching exceptions in messageReceived
- self.messageReceived(n)
- if not have_dnotify:
- self.startTimeout()
-
- def messageReceived(self, filename):
- """Called when a new file is noticed. Override it in subclasses.
- Will receive path relative to maildir/new."""
- print filename
-
-
-def test1():
- m = Maildir("ddir")
- m.start()
- import signal
- while 1:
- signal.pause()
-
-if __name__ == '__main__':
- test1()
-
diff --git a/buildbot/buildbot-source/build/lib/buildbot/changes/maildirgtk.py b/buildbot/buildbot-source/build/lib/buildbot/changes/maildirgtk.py
deleted file mode 100644
index 4bc03c4c5..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/changes/maildirgtk.py
+++ /dev/null
@@ -1,55 +0,0 @@
-#! /usr/bin/python
-
-# This is a class which watches a maildir for new messages. It uses the
-# linux dirwatcher API (if available) to look for new files. The
-# .messageReceived method is invoked with the filename of the new message,
-# relative to the top of the maildir (so it will look like "new/blahblah").
-
-# This form uses the Gtk event loop to handle polling and signal safety
-
-if __name__ == '__main__':
- import pygtk
- pygtk.require("2.0")
-
-import gtk
-from maildir import Maildir
-
-class MaildirGtk(Maildir):
- def __init__(self, basedir):
- Maildir.__init__(self, basedir)
- self.idler = None
- def startTimeout(self):
- self.timeout = gtk.timeout_add(self.pollinterval*1000, self.doTimeout)
- def doTimeout(self):
- self.poll()
- return gtk.TRUE # keep going
- def stopTimeout(self):
- if self.timeout:
- gtk.timeout_remove(self.timeout)
- self.timeout = None
- def dnotify_callback(self):
- # make it safe
- self.idler = gtk.idle_add(self.idlePoll)
- def idlePoll(self):
- gtk.idle_remove(self.idler)
- self.idler = None
- self.poll()
- return gtk.FALSE
-
-def test1():
- class MaildirTest(MaildirGtk):
- def messageReceived(self, filename):
- print "changed:", filename
- m = MaildirTest("ddir")
- print "watching ddir/new/"
- m.start()
- #gtk.main()
- # to allow the python-side signal handler to run, we must surface from
- # gtk (which blocks on the C-side) every once in a while.
- while 1:
- gtk.mainiteration() # this will block until there is something to do
- m.stop()
- print "done"
-
-if __name__ == '__main__':
- test1()
diff --git a/buildbot/buildbot-source/build/lib/buildbot/changes/maildirtwisted.py b/buildbot/buildbot-source/build/lib/buildbot/changes/maildirtwisted.py
deleted file mode 100644
index ec1bb98b9..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/changes/maildirtwisted.py
+++ /dev/null
@@ -1,76 +0,0 @@
-#! /usr/bin/python
-
-# This is a class which watches a maildir for new messages. It uses the
-# linux dirwatcher API (if available) to look for new files. The
-# .messageReceived method is invoked with the filename of the new message,
-# relative to the top of the maildir (so it will look like "new/blahblah").
-
-# This version is implemented as a Twisted Python "Service". It uses the
-# twisted Reactor to handle polling and signal safety.
-
-from twisted.application import service
-from twisted.internet import reactor
-from maildir import Maildir
-
-class MaildirTwisted(Maildir, service.Service):
- timeout = None
-
- def startService(self):
- self.start()
- service.Service.startService(self)
- def stopService(self):
- self.stop()
- service.Service.stopService(self)
-
- def startTimeout(self):
- self.timeout = reactor.callLater(self.pollinterval, self.poll)
- def stopTimeout(self):
- if self.timeout:
- self.timeout.cancel()
- self.timeout = None
-
- def dnotify_callback(self):
- # make it safe
- #reactor.callFromThread(self.poll)
- reactor.callLater(1, self.poll)
- # give it a moment. I found that qmail had problems when the message
- # was removed from the maildir instantly. It shouldn't, that's what
- # maildirs are made for. I wasn't able to eyeball any reason for the
- # problem, and safecat didn't behave the same way, but qmail reports
- # "Temporary_error_on_maildir_delivery" (qmail-local.c:165,
- # maildir_child() process exited with rc not in 0,2,3,4). Not sure why,
- # would have to hack qmail to investigate further, easier to just
- # wait a second before yanking the message out of new/ .
-
-## def messageReceived(self, filename):
-## if self.callback:
-## self.callback(filename)
-
-class MaildirService(MaildirTwisted):
- """I watch a maildir for new messages. I should be placed as the service
- child of some MultiService instance. When running, I use the linux
- dirwatcher API (if available) or poll for new files in the 'new'
- subdirectory of my maildir path. When I discover a new message, I invoke
- my parent's .messageReceived() method with the short filename of the new
- message, so the full name of the new file can be obtained with
- os.path.join(maildir, 'new', filename). I will not move or delete the
- file on my own: the parent should do this in messageReceived().
- """
- def messageReceived(self, filename):
- self.parent.messageReceived(filename)
-
-
-def test1():
- class MaildirTest(MaildirTwisted):
- def messageReceived(self, filename):
- print "changed:", filename
- m = MaildirTest(basedir="ddir")
- print "watching ddir/new/"
- m.startService()
- reactor.run()
- print "done"
-
-if __name__ == '__main__':
- test1()
-
-
diff --git a/buildbot/buildbot-source/build/lib/buildbot/changes/p4poller.py b/buildbot/buildbot-source/build/lib/buildbot/changes/p4poller.py
deleted file mode 100644
index d14e57c49..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/changes/p4poller.py
+++ /dev/null
@@ -1,142 +0,0 @@
-#! /usr/bin/python
-
-# Many thanks to Dave Peticolas for contributing this module
-
-from twisted.internet import defer
-from twisted.internet.utils import getProcessOutput
-from twisted.internet.task import LoopingCall
-
-from buildbot import util
-from buildbot.changes import base, changes
-
-class P4Source(base.ChangeSource, util.ComparableMixin):
- """This source will poll a perforce repository for changes and submit
- them to the change master."""
-
- compare_attrs = ["p4port", "p4user", "p4passwd", "p4client", "p4base",
- "p4bin", "pollinterval", "histmax"]
-
- parent = None # filled in when we're added
- last_change = None
- loop = None
- volatile = ['loop']
-
- def __init__(self, p4port, p4user, p4passwd=None, p4client=None,
- p4base='//...', p4bin='p4',
- pollinterval=60 * 10, histmax=100):
- """
- @type p4port: string
- @param p4port: p4 port definition (host:portno)
- @type p4user: string
- @param p4user: p4 user
- @type p4passwd: string
- @param p4passwd: p4 passwd
- @type p4client: string
- @param p4client: name of p4 client to poll
- @type p4base: string
- @param p4base: p4 file specification to limit a poll to
- (i.e., //...)
- @type p4bin: string
- @param p4bin: path to p4 binary, defaults to just 'p4'
- @type pollinterval: int
- @param pollinterval: interval in seconds between polls
- @type histmax: int
- @param histmax: maximum number of changes to look back through
- """
-
- self.p4port = p4port
- self.p4user = p4user
- self.p4passwd = p4passwd
- self.p4client = p4client
- self.p4base = p4base
- self.p4bin = p4bin
- self.pollinterval = pollinterval
- self.histmax = histmax
-
- def startService(self):
- self.loop = LoopingCall(self.checkp4)
- self.loop.start(self.pollinterval)
- base.ChangeSource.startService(self)
-
- def stopService(self):
- self.loop.stop()
- return base.ChangeSource.stopService(self)
-
- def describe(self):
- return "p4source %s-%s %s" % (self.p4port, self.p4client, self.p4base)
-
- def checkp4(self):
- d = self._get_changes()
- d.addCallback(self._process_changes)
- d.addCallback(self._handle_changes)
-
- def _get_changes(self):
- args = []
- if self.p4port:
- args.extend(['-p', self.p4port])
- if self.p4user:
- args.extend(['-u', self.p4user])
- if self.p4passwd:
- args.extend(['-P', self.p4passwd])
- if self.p4client:
- args.extend(['-c', self.p4client])
- args.extend(['changes', '-m', str(self.histmax), self.p4base])
- env = {}
- return getProcessOutput(self.p4bin, args, env)
-
- def _process_changes(self, result):
- last_change = self.last_change
- changelists = []
- for line in result.split('\n'):
- line = line.strip()
- if not line: continue
- _, num, _, date, _, user, _ = line.split(' ', 6)
- if last_change is None:
- self.last_change = num
- return []
- if last_change == num: break
- change = {'num' : num, 'date' : date, 'user' : user.split('@')[0]}
- changelists.append(change)
- changelists.reverse() # oldest first
- ds = [self._get_change(c) for c in changelists]
- return defer.DeferredList(ds)
-
- def _get_change(self, change):
- args = []
- if self.p4port:
- args.extend(['-p', self.p4port])
- if self.p4user:
- args.extend(['-u', self.p4user])
- if self.p4passwd:
- args.extend(['-P', self.p4passwd])
- if self.p4client:
- args.extend(['-c', self.p4client])
- args.extend(['describe', '-s', change['num']])
- env = {}
- d = getProcessOutput(self.p4bin, args, env)
- d.addCallback(self._process_change, change)
- return d
-
- def _process_change(self, result, change):
- lines = result.split('\n')
- comments = ''
- while not lines[0].startswith('Affected files'):
- comments += lines.pop(0) + '\n'
- change['comments'] = comments
- lines.pop(0) # affected files
- files = []
- while lines:
- line = lines.pop(0).strip()
- if not line: continue
- files.append(line.split(' ')[1])
- change['files'] = files
- return change
-
- def _handle_changes(self, result):
- for success, change in result:
- if not success: continue
- c = changes.Change(change['user'], change['files'],
- change['comments'],
- revision=change['num'])
- self.parent.addChange(c)
- self.last_change = change['num']
diff --git a/buildbot/buildbot-source/build/lib/buildbot/changes/pb.py b/buildbot/buildbot-source/build/lib/buildbot/changes/pb.py
deleted file mode 100644
index 105f1efdf..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/changes/pb.py
+++ /dev/null
@@ -1,89 +0,0 @@
-# -*- test-case-name: buildbot.test.test_changes -*-
-
-import os, os.path
-
-from twisted.application import service
-from twisted.python import log
-
-from buildbot.pbutil import NewCredPerspective
-from buildbot.changes import base, changes
-
-class ChangePerspective(NewCredPerspective):
-
- def __init__(self, changemaster, prefix, sep="/"):
- self.changemaster = changemaster
- self.prefix = prefix
- # this is the separator as used by the VC system, not the local host.
- # If for some reason you're running your CVS repository under
- # windows, you'll need to use a PBChangeSource(sep="\\")
- self.sep = sep
-
- def attached(self, mind):
- return self
- def detached(self, mind):
- pass
-
- def perspective_addChange(self, changedict):
- log.msg("perspective_addChange called")
- pathnames = []
- for path in changedict['files']:
- if self.prefix:
- bits = path.split(self.sep)
- if bits[0] == self.prefix:
- if bits[1:]:
- path = self.sep.join(bits[1:])
- else:
- path = ''
- else:
- break
- pathnames.append(path)
-
- if pathnames:
- change = changes.Change(changedict['who'],
- pathnames,
- changedict['comments'],
- branch=changedict.get('branch'),
- revision=changedict.get('revision'),
- )
- self.changemaster.addChange(change)
-
-class PBChangeSource(base.ChangeSource):
- compare_attrs = ["user", "passwd", "port", "prefix", "sep"]
-
- def __init__(self, user="change", passwd="changepw", port=None,
- prefix=None, sep="/"):
- # TODO: current limitations
- assert user == "change"
- assert passwd == "changepw"
- assert port == None
- self.user = user
- self.passwd = passwd
- self.port = port
- self.prefix = prefix
- self.sep = sep
-
- def describe(self):
- # TODO: when the dispatcher is fixed, report the specific port
- #d = "PB listener on port %d" % self.port
- d = "PBChangeSource listener on all-purpose slaveport"
- if self.prefix is not None:
- d += " (prefix '%s')" % self.prefix
- return d
-
- def startService(self):
- base.ChangeSource.startService(self)
- # our parent is the ChangeMaster object
- # find the master's Dispatch object and register our username
- # TODO: the passwd should be registered here too
- master = self.parent.parent
- master.dispatcher.register(self.user, self)
-
- def stopService(self):
- base.ChangeSource.stopService(self)
- # unregister our username
- master = self.parent.parent
- master.dispatcher.unregister(self.user)
-
- def getPerspective(self):
- return ChangePerspective(self.parent, self.prefix, self.sep)
-
diff --git a/buildbot/buildbot-source/build/lib/buildbot/clients/__init__.py b/buildbot/buildbot-source/build/lib/buildbot/clients/__init__.py
deleted file mode 100644
index e69de29bb..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/clients/__init__.py
+++ /dev/null
diff --git a/buildbot/buildbot-source/build/lib/buildbot/clients/base.py b/buildbot/buildbot-source/build/lib/buildbot/clients/base.py
deleted file mode 100644
index c5d12a322..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/clients/base.py
+++ /dev/null
@@ -1,111 +0,0 @@
-#! /usr/bin/python
-
-import sys, re
-
-from twisted.spread import pb
-from twisted.cred import credentials
-from twisted.internet import reactor
-
-class StatusClient(pb.Referenceable):
- """To use this, call my .connected method with a RemoteReference to the
- buildmaster's StatusClientPerspective object.
- """
-
- def __init__(self, events):
- self.builders = {}
- self.events = events
-
- def connected(self, remote):
- print "connected"
- self.remote = remote
- remote.callRemote("subscribe", self.events, 5, self)
-
- def remote_builderAdded(self, buildername, builder):
- print "builderAdded", buildername
-
- def remote_builderRemoved(self, buildername):
- print "builderRemoved", buildername
-
- def remote_builderChangedState(self, buildername, state, eta):
- print "builderChangedState", buildername, state, eta
-
- def remote_buildStarted(self, buildername, build):
- print "buildStarted", buildername
-
- def remote_buildFinished(self, buildername, build, results):
- print "buildFinished", results
-
- def remote_buildETAUpdate(self, buildername, build, eta):
- print "ETA", buildername, eta
-
- def remote_stepStarted(self, buildername, build, stepname, step):
- print "stepStarted", buildername, stepname
-
- def remote_stepFinished(self, buildername, build, stepname, step, results):
- print "stepFinished", buildername, stepname, results
-
- def remote_stepETAUpdate(self, buildername, build, stepname, step,
- eta, expectations):
- print "stepETA", buildername, stepname, eta
-
- def remote_logStarted(self, buildername, build, stepname, step,
- logname, log):
- print "logStarted", buildername, stepname
-
- def remote_logFinished(self, buildername, build, stepname, step,
- logname, log):
- print "logFinished", buildername, stepname
-
- def remote_logChunk(self, buildername, build, stepname, step, logname, log,
- channel, text):
- ChunkTypes = ["STDOUT", "STDERR", "HEADER"]
- print "logChunk[%s]: %s" % (ChunkTypes[channel], text)
-
-class TextClient:
- def __init__(self, master, events="steps"):
- """
- @type events: string, one of builders, builds, steps, logs, full
- @param events: specify what level of detail should be reported.
- - 'builders': only announce new/removed Builders
- - 'builds': also announce builderChangedState, buildStarted, and
- buildFinished
- - 'steps': also announce buildETAUpdate, stepStarted, stepFinished
- - 'logs': also announce stepETAUpdate, logStarted, logFinished
- - 'full': also announce log contents
- """
- self.master = master
- self.listener = StatusClient(events)
-
- def run(self):
- """Start the TextClient."""
- self.startConnecting()
- reactor.run()
-
- def startConnecting(self):
- try:
- host, port = re.search(r'(.+):(\d+)', self.master).groups()
- port = int(port)
- except:
- print "unparseable master location '%s'" % self.master
- print " expecting something more like localhost:8007"
- raise
- cf = pb.PBClientFactory()
- creds = credentials.UsernamePassword("statusClient", "clientpw")
- d = cf.login(creds)
- reactor.connectTCP(host, port, cf)
- d.addCallback(self.connected)
- return d
- def connected(self, ref):
- ref.notifyOnDisconnect(self.disconnected)
- self.listener.connected(ref)
-
- def disconnected(self, ref):
- print "lost connection"
- reactor.stop()
-
-if __name__ == '__main__':
- master = "localhost:8007"
- if len(sys.argv) > 1:
- master = sys.argv[1]
- c = TextClient()
- c.run()
diff --git a/buildbot/buildbot-source/build/lib/buildbot/clients/debug.py b/buildbot/buildbot-source/build/lib/buildbot/clients/debug.py
deleted file mode 100644
index 5e0fa6e4b..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/clients/debug.py
+++ /dev/null
@@ -1,163 +0,0 @@
-#! /usr/bin/python
-
-from twisted.internet import gtk2reactor
-gtk2reactor.install()
-from twisted.internet import reactor
-from twisted.python import util
-from twisted.spread import pb
-from twisted.cred import credentials
-import gtk, gtk.glade, gnome.ui
-import os, sys, re
-
-class DebugWidget:
- def __init__(self, master="localhost:8007", passwd="debugpw"):
- self.connected = 0
- try:
- host, port = re.search(r'(.+):(\d+)', master).groups()
- except:
- print "unparseable master location '%s'" % master
- print " expecting something more like localhost:8007"
- raise
- self.host = host
- self.port = int(port)
- self.passwd = passwd
- self.remote = None
- xml = self.xml = gtk.glade.XML(util.sibpath(__file__, "debug.glade"))
- g = xml.get_widget
- self.buildname = g('buildname')
- self.filename = g('filename')
- self.connectbutton = g('connectbutton')
- self.connectlabel = g('connectlabel')
- g('window1').connect('destroy', lambda win: gtk.mainquit())
- # put the master info in the window's titlebar
- g('window1').set_title("Buildbot Debug Tool: %s" % master)
- c = xml.signal_connect
- c('do_connect', self.do_connect)
- c('do_reload', self.do_reload)
- c('do_rebuild', self.do_rebuild)
- c('do_poke_irc', self.do_poke_irc)
- c('do_build', self.do_build)
- c('do_commit', self.do_commit)
- c('on_usebranch_toggled', self.usebranch_toggled)
- self.usebranch_toggled(g('usebranch'))
- c('on_userevision_toggled', self.userevision_toggled)
- self.userevision_toggled(g('userevision'))
- c('do_current_offline', self.do_current, "offline")
- c('do_current_idle', self.do_current, "idle")
- c('do_current_waiting', self.do_current, "waiting")
- c('do_current_building', self.do_current, "building")
-
- def do_connect(self, widget):
- if self.connected:
- self.connectlabel.set_text("Disconnecting...")
- if self.remote:
- self.remote.broker.transport.loseConnection()
- else:
- self.connectlabel.set_text("Connecting...")
- f = pb.PBClientFactory()
- creds = credentials.UsernamePassword("debug", self.passwd)
- d = f.login(creds)
- reactor.connectTCP(self.host, int(self.port), f)
- d.addCallbacks(self.connect_complete, self.connect_failed)
- def connect_complete(self, ref):
- self.connectbutton.set_label("Disconnect")
- self.connectlabel.set_text("Connected")
- self.connected = 1
- self.remote = ref
- self.remote.callRemote("print", "hello cleveland")
- self.remote.notifyOnDisconnect(self.disconnected)
- def connect_failed(self, why):
- self.connectlabel.set_text("Failed")
- print why
- def disconnected(self, ref):
- self.connectbutton.set_label("Connect")
- self.connectlabel.set_text("Disconnected")
- self.connected = 0
- self.remote = None
-
- def do_reload(self, widget):
- if not self.remote:
- return
- d = self.remote.callRemote("reload")
- d.addErrback(self.err)
- def do_rebuild(self, widget):
- print "Not yet implemented"
- return
- def do_poke_irc(self, widget):
- if not self.remote:
- return
- d = self.remote.callRemote("pokeIRC")
- d.addErrback(self.err)
-
- def do_build(self, widget):
- if not self.remote:
- return
- name = self.buildname.get_text()
- d = self.remote.callRemote("forceBuild", name)
- d.addErrback(self.err)
-
- def usebranch_toggled(self, widget):
- rev = self.xml.get_widget('branch')
- if widget.get_active():
- rev.set_sensitive(True)
- else:
- rev.set_sensitive(False)
-
- def userevision_toggled(self, widget):
- rev = self.xml.get_widget('revision')
- if widget.get_active():
- rev.set_sensitive(True)
- else:
- rev.set_sensitive(False)
-
- def do_commit(self, widget):
- if not self.remote:
- return
- filename = self.filename.get_text()
- who = self.xml.get_widget("who").get_text()
-
- branch = None
- if self.xml.get_widget("usebranch").get_active():
- branch = self.xml.get_widget('branch').get_text()
- if branch == '':
- branch = None
-
- revision = None
- if self.xml.get_widget("userevision").get_active():
- revision = self.xml.get_widget('revision').get_text()
- try:
- revision = int(revision)
- except ValueError:
- pass
- if revision == '':
- revision = None
-
- kwargs = { 'revision': revision, 'who': who }
- if branch:
- kwargs['branch'] = branch
- d = self.remote.callRemote("fakeChange", filename, **kwargs)
- d.addErrback(self.err)
-
- def do_current(self, widget, state):
- if not self.remote:
- return
- name = self.buildname.get_text()
- d = self.remote.callRemote("setCurrentState", name, state)
- d.addErrback(self.err)
- def err(self, failure):
- print "received error"
- failure.printTraceback()
-
-
- def run(self):
- reactor.run()
-
-if __name__ == '__main__':
- master = "localhost:8007"
- if len(sys.argv) > 1:
- master = sys.argv[1]
- passwd = "debugpw"
- if len(sys.argv) > 2:
- passwd = sys.argv[2]
- d = DebugWidget(master, passwd)
- d.run()
diff --git a/buildbot/buildbot-source/build/lib/buildbot/clients/gtkPanes.py b/buildbot/buildbot-source/build/lib/buildbot/clients/gtkPanes.py
deleted file mode 100644
index b82ac509c..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/clients/gtkPanes.py
+++ /dev/null
@@ -1,428 +0,0 @@
-#! /usr/bin/python
-
-from twisted.internet import gtk2reactor
-gtk2reactor.install()
-
-from twisted.internet import reactor
-
-import sys, time
-
-import pygtk
-pygtk.require("2.0")
-import gtk
-assert(gtk.Window) # in gtk1 it's gtk.GtkWindow
-
-from twisted.spread import pb
-
-#from buildbot.clients.base import Builder, Client
-from buildbot.clients.base import TextClient
-#from buildbot.util import now
-
-'''
-class Pane:
- def __init__(self):
- pass
-
-class OneRow(Pane):
- """This is a one-row status bar. It has one square per Builder, and that
- square is either red, yellow, or green. """
-
- def __init__(self):
- Pane.__init__(self)
- self.widget = gtk.VBox(gtk.FALSE, 2)
- self.nameBox = gtk.HBox(gtk.TRUE)
- self.statusBox = gtk.HBox(gtk.TRUE)
- self.widget.add(self.nameBox)
- self.widget.add(self.statusBox)
- self.widget.show_all()
- self.builders = []
-
- def getWidget(self):
- return self.widget
- def addBuilder(self, builder):
- print "OneRow.addBuilder"
- # todo: ordering. Should follow the order in which they were added
- # to the original BotMaster
- self.builders.append(builder)
- # add the name to the left column, and a label (with background) to
- # the right
- name = gtk.Label(builder.name)
- status = gtk.Label('??')
- status.set_size_request(64,64)
- box = gtk.EventBox()
- box.add(status)
- name.show()
- box.show_all()
- self.nameBox.add(name)
- self.statusBox.add(box)
- builder.haveSomeWidgets([name, status, box])
-
-class R2Builder(Builder):
- def start(self):
- self.nameSquare.set_text(self.name)
- self.statusSquare.set_text("???")
- self.subscribe()
- def haveSomeWidgets(self, widgets):
- self.nameSquare, self.statusSquare, self.statusBox = widgets
-
- def remote_newLastBuildStatus(self, event):
- color = None
- if event:
- text = "\n".join(event.text)
- color = event.color
- else:
- text = "none"
- self.statusSquare.set_text(text)
- if color:
- print "color", color
- self.statusBox.modify_bg(gtk.STATE_NORMAL,
- gtk.gdk.color_parse(color))
-
- def remote_currentlyOffline(self):
- self.statusSquare.set_text("offline")
- def remote_currentlyIdle(self):
- self.statusSquare.set_text("idle")
- def remote_currentlyWaiting(self, seconds):
- self.statusSquare.set_text("waiting")
- def remote_currentlyInterlocked(self):
- self.statusSquare.set_text("interlocked")
- def remote_currentlyBuilding(self, eta):
- self.statusSquare.set_text("building")
-
-
-class CompactRow(Pane):
- def __init__(self):
- Pane.__init__(self)
- self.widget = gtk.VBox(gtk.FALSE, 3)
- self.nameBox = gtk.HBox(gtk.TRUE, 2)
- self.lastBuildBox = gtk.HBox(gtk.TRUE, 2)
- self.statusBox = gtk.HBox(gtk.TRUE, 2)
- self.widget.add(self.nameBox)
- self.widget.add(self.lastBuildBox)
- self.widget.add(self.statusBox)
- self.widget.show_all()
- self.builders = []
-
- def getWidget(self):
- return self.widget
-
- def addBuilder(self, builder):
- self.builders.append(builder)
-
- name = gtk.Label(builder.name)
- name.show()
- self.nameBox.add(name)
-
- last = gtk.Label('??')
- last.set_size_request(64,64)
- lastbox = gtk.EventBox()
- lastbox.add(last)
- lastbox.show_all()
- self.lastBuildBox.add(lastbox)
-
- status = gtk.Label('??')
- status.set_size_request(64,64)
- statusbox = gtk.EventBox()
- statusbox.add(status)
- statusbox.show_all()
- self.statusBox.add(statusbox)
-
- builder.haveSomeWidgets([name, last, lastbox, status, statusbox])
-
- def removeBuilder(self, name, builder):
- self.nameBox.remove(builder.nameSquare)
- self.lastBuildBox.remove(builder.lastBuildBox)
- self.statusBox.remove(builder.statusBox)
- self.builders.remove(builder)
-
-class CompactBuilder(Builder):
- def setup(self):
- self.timer = None
- self.text = []
- self.eta = None
- def start(self):
- self.nameSquare.set_text(self.name)
- self.statusSquare.set_text("???")
- self.subscribe()
- def haveSomeWidgets(self, widgets):
- (self.nameSquare,
- self.lastBuildSquare, self.lastBuildBox,
- self.statusSquare, self.statusBox) = widgets
-
- def remote_currentlyOffline(self):
- self.eta = None
- self.stopTimer()
- self.statusSquare.set_text("offline")
- self.statusBox.modify_bg(gtk.STATE_NORMAL,
- gtk.gdk.color_parse("red"))
- def remote_currentlyIdle(self):
- self.eta = None
- self.stopTimer()
- self.statusSquare.set_text("idle")
- def remote_currentlyWaiting(self, seconds):
- self.nextBuild = now() + seconds
- self.startTimer(self.updateWaiting)
- def remote_currentlyInterlocked(self):
- self.stopTimer()
- self.statusSquare.set_text("interlocked")
- def startTimer(self, func):
- # the func must clear self.timer and return gtk.FALSE when the event
- # has arrived
- self.stopTimer()
- self.timer = gtk.timeout_add(1000, func)
- func()
- def stopTimer(self):
- if self.timer:
- gtk.timeout_remove(self.timer)
- self.timer = None
- def updateWaiting(self):
- when = self.nextBuild
- if now() < when:
- next = time.strftime("%H:%M:%S", time.localtime(when))
- secs = "[%d seconds]" % (when - now())
- self.statusSquare.set_text("waiting\n%s\n%s" % (next, secs))
- return gtk.TRUE # restart timer
- else:
- # done
- self.statusSquare.set_text("waiting\n[RSN]")
- self.timer = None
- return gtk.FALSE
-
- def remote_currentlyBuilding(self, eta):
- self.stopTimer()
- self.statusSquare.set_text("building")
- if eta:
- d = eta.callRemote("subscribe", self, 5)
-
- def remote_newLastBuildStatus(self, event):
- color = None
- if event:
- text = "\n".join(event.text)
- color = event.color
- else:
- text = "none"
- if not color: color = "gray"
- self.lastBuildSquare.set_text(text)
- self.lastBuildBox.modify_bg(gtk.STATE_NORMAL,
- gtk.gdk.color_parse(color))
-
- def remote_newEvent(self, event):
- assert(event.__class__ == GtkUpdatingEvent)
- self.current = event
- event.builder = self
- self.text = event.text
- if not self.text: self.text = ["idle"]
- self.eta = None
- self.stopTimer()
- self.updateText()
- color = event.color
- if not color: color = "gray"
- self.statusBox.modify_bg(gtk.STATE_NORMAL,
- gtk.gdk.color_parse(color))
-
- def updateCurrent(self):
- text = self.current.text
- if text:
- self.text = text
- self.updateText()
- color = self.current.color
- if color:
- self.statusBox.modify_bg(gtk.STATE_NORMAL,
- gtk.gdk.color_parse(color))
- def updateText(self):
- etatext = []
- if self.eta:
- etatext = [time.strftime("%H:%M:%S", time.localtime(self.eta))]
- if now() > self.eta:
- etatext += ["RSN"]
- else:
- seconds = self.eta - now()
- etatext += ["[%d secs]" % seconds]
- text = "\n".join(self.text + etatext)
- self.statusSquare.set_text(text)
- def updateTextTimer(self):
- self.updateText()
- return gtk.TRUE # restart timer
-
- def remote_progress(self, seconds):
- if seconds == None:
- self.eta = None
- else:
- self.eta = now() + seconds
- self.startTimer(self.updateTextTimer)
- self.updateText()
- def remote_finished(self, eta):
- self.eta = None
- self.stopTimer()
- self.updateText()
- eta.callRemote("unsubscribe", self)
-'''
-
-class TwoRowBuilder:
- def __init__(self, ref):
- self.lastbox = lastbox = gtk.EventBox()
- self.lastlabel = lastlabel = gtk.Label("?")
- lastbox.add(lastlabel)
- lastbox.set_size_request(64,64)
-
- self.currentbox = currentbox = gtk.EventBox()
- self.currentlabel = currentlabel = gtk.Label("?")
- currentbox.add(currentlabel)
- currentbox.set_size_request(64,64)
-
- self.ref = ref
-
- def setColor(self, box, color):
- box.modify_bg(gtk.STATE_NORMAL, gtk.gdk.color_parse(color))
-
- def getLastBuild(self):
- d = self.ref.callRemote("getLastFinishedBuild")
- d.addCallback(self.gotLastBuild)
- def gotLastBuild(self, build):
- if build:
- build.callRemote("getText").addCallback(self.gotLastText)
- build.callRemote("getColor").addCallback(self.gotLastColor)
-
- def gotLastText(self, text):
- self.lastlabel.set_text("\n".join(text))
- def gotLastColor(self, color):
- self.setColor(self.lastbox, color)
-
- def getState(self):
- self.ref.callRemote("getState").addCallback(self.gotState)
- def gotState(self, res):
- state, ETA, builds = res
- # state is one of: offline, idle, waiting, interlocked, building
- # TODO: ETA is going away, you have to look inside the builds to get
- # that value
- currentmap = {"offline": "red",
- "idle": "white",
- "waiting": "yellow",
- "interlocked": "yellow",
- "building": "yellow",}
- text = state
- self.setColor(self.currentbox, currentmap[state])
- if ETA is not None:
- text += "\nETA=%s secs" % ETA
- self.currentlabel.set_text(state)
-
- def buildStarted(self, build):
- pass
- def buildFinished(self, build, results):
- self.gotLastBuild(build)
-
-
-class TwoRowClient(pb.Referenceable):
- def __init__(self, window):
- self.window = window
- self.buildernames = []
- self.builders = {}
-
- def connected(self, ref):
- print "connected"
- self.ref = ref
- self.pane = gtk.VBox(False, 2)
- self.table = gtk.Table(1+2, 1)
- self.pane.add(self.table)
- self.window.vb.add(self.pane)
- self.pane.show_all()
- ref.callRemote("subscribe", "builds", 5, self)
-
- def removeTable(self):
- for child in self.table.get_children():
- self.table.remove(child)
- self.pane.remove(self.table)
-
- def makeTable(self):
- columns = len(self.builders)
- self.table = gtk.Table(2, columns)
- self.pane.add(self.table)
- for i in range(len(self.buildernames)):
- name = self.buildernames[i]
- b = self.builders[name]
- self.table.attach(gtk.Label(name), i, i+1, 0, 1)
- self.table.attach(b.lastbox, i, i+1, 1, 2,
- xpadding=1, ypadding=1)
- self.table.attach(b.currentbox, i, i+1, 2, 3,
- xpadding=1, ypadding=1)
- self.table.show_all()
-
- def rebuildTable(self):
- self.removeTable()
- self.makeTable()
-
- def remote_builderAdded(self, buildername, builder):
- print "builderAdded", buildername
- assert buildername not in self.buildernames
- self.buildernames.append(buildername)
-
- b = TwoRowBuilder(builder)
- self.builders[buildername] = b
- self.rebuildTable()
- b.getLastBuild()
- b.getState()
-
- def remote_builderRemoved(self, buildername):
- del self.builders[buildername]
- self.buildernames.remove(buildername)
- self.rebuildTable()
-
- def remote_builderChangedState(self, name, state, eta):
- self.builders[name].gotState((state, eta, None))
- def remote_buildStarted(self, name, build):
- self.builders[name].buildStarted(build)
- def remote_buildFinished(self, name, build, results):
- self.builders[name].buildFinished(build, results)
-
-
-class GtkClient(TextClient):
- ClientClass = TwoRowClient
-
- def __init__(self, master):
- self.master = master
-
- w = gtk.Window()
- self.w = w
- #w.set_size_request(64,64)
- w.connect('destroy', lambda win: gtk.main_quit())
- self.vb = gtk.VBox(False, 2)
- self.status = gtk.Label("unconnected")
- self.vb.add(self.status)
- self.listener = self.ClientClass(self)
- w.add(self.vb)
- w.show_all()
-
- def connected(self, ref):
- self.status.set_text("connected")
- TextClient.connected(self, ref)
-
-"""
- def addBuilder(self, name, builder):
- Client.addBuilder(self, name, builder)
- self.pane.addBuilder(builder)
- def removeBuilder(self, name):
- self.pane.removeBuilder(name, self.builders[name])
- Client.removeBuilder(self, name)
-
- def startConnecting(self, master):
- self.master = master
- Client.startConnecting(self, master)
- self.status.set_text("connecting to %s.." % master)
- def connected(self, remote):
- Client.connected(self, remote)
- self.status.set_text(self.master)
- remote.notifyOnDisconnect(self.disconnected)
- def disconnected(self, remote):
- self.status.set_text("disconnected, will retry")
-"""
-
-def main():
- master = "localhost:8007"
- if len(sys.argv) > 1:
- master = sys.argv[1]
- c = GtkClient(master)
- c.run()
-
-if __name__ == '__main__':
- main()
-
diff --git a/buildbot/buildbot-source/build/lib/buildbot/clients/sendchange.py b/buildbot/buildbot-source/build/lib/buildbot/clients/sendchange.py
deleted file mode 100644
index 3887505e5..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/clients/sendchange.py
+++ /dev/null
@@ -1,39 +0,0 @@
-
-from twisted.spread import pb
-from twisted.cred import credentials
-from twisted.internet import reactor
-from twisted.python import log
-
-class Sender:
- def __init__(self, master, user):
- self.user = user
- self.host, self.port = master.split(":")
- self.port = int(self.port)
-
- def send(self, branch, revision, comments, files):
- change = {'who': self.user, 'files': files, 'comments': comments,
- 'branch': branch, 'revision': revision}
-
- f = pb.PBClientFactory()
- d = f.login(credentials.UsernamePassword("change", "changepw"))
- reactor.connectTCP(self.host, self.port, f)
- d.addCallback(self.addChange, change)
- return d
-
- def addChange(self, remote, change):
- d = remote.callRemote('addChange', change)
- d.addCallback(lambda res: remote.broker.transport.loseConnection())
- return d
-
- def printSuccess(self, res):
- print "change sent successfully"
- def printFailure(self, why):
- print "change NOT sent"
- print why
-
- def stop(self, res):
- reactor.stop()
- return res
-
- def run(self):
- reactor.run()
diff --git a/buildbot/buildbot-source/build/lib/buildbot/dnotify.py b/buildbot/buildbot-source/build/lib/buildbot/dnotify.py
deleted file mode 100644
index d4c5eda34..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/dnotify.py
+++ /dev/null
@@ -1,105 +0,0 @@
-#! /usr/bin/python
-
-# spiv wants this
-
-import fcntl, signal
-
-class DNotify_Handler:
- def __init__(self):
- self.watchers = {}
- self.installed = 0
- def install(self):
- if self.installed:
- return
- signal.signal(signal.SIGIO, self.fire)
- self.installed = 1
- def uninstall(self):
- if not self.installed:
- return
- signal.signal(signal.SIGIO, signal.SIG_DFL)
- self.installed = 0
- def add(self, watcher):
- self.watchers[watcher.fd.fileno()] = watcher
- self.install()
- def remove(self, watcher):
- if self.watchers.has_key(watcher.fd.fileno()):
- del(self.watchers[watcher.fd.fileno()])
- if not self.watchers:
- self.uninstall()
- def fire(self, signum, frame):
- # this is the signal handler
- # without siginfo_t, we must fire them all
- for watcher in self.watchers.values():
- watcher.callback()
-
-class DNotify:
- DN_ACCESS = fcntl.DN_ACCESS # a file in the directory was read
- DN_MODIFY = fcntl.DN_MODIFY # a file was modified (write,truncate)
- DN_CREATE = fcntl.DN_CREATE # a file was created
- DN_DELETE = fcntl.DN_DELETE # a file was unlinked
- DN_RENAME = fcntl.DN_RENAME # a file was renamed
- DN_ATTRIB = fcntl.DN_ATTRIB # a file had attributes changed (chmod,chown)
-
- handler = [None]
-
- def __init__(self, dirname, callback=None,
- flags=[DN_MODIFY,DN_CREATE,DN_DELETE,DN_RENAME]):
-
- """This object watches a directory for changes. The .callback
- attribute should be set to a function to be run every time something
- happens to it. Be aware that it will be called more times than you
- expect."""
-
- if callback:
- self.callback = callback
- else:
- self.callback = self.fire
- self.dirname = dirname
- self.flags = reduce(lambda x, y: x | y, flags) | fcntl.DN_MULTISHOT
- self.fd = open(dirname, "r")
- # ideally we would move the notification to something like SIGRTMIN,
- # (to free up SIGIO) and use sigaction to have the signal handler
- # receive a structure with the fd number. But python doesn't offer
- # either.
- if not self.handler[0]:
- self.handler[0] = DNotify_Handler()
- self.handler[0].add(self)
- fcntl.fcntl(self.fd, fcntl.F_NOTIFY, self.flags)
- def remove(self):
- self.handler[0].remove(self)
- self.fd.close()
- def fire(self):
- print self.dirname, "changed!"
-
-def test_dnotify1():
- d = DNotify(".")
- import time
- while 1:
- signal.pause()
-
-def test_dnotify2():
- # create ./foo/, create/delete files in ./ and ./foo/ while this is
- # running. Notice how both notifiers are fired when anything changes;
- # this is an unfortunate side-effect of the lack of extended sigaction
- # support in Python.
- count = [0]
- d1 = DNotify(".")
- def fire1(count=count, d1=d1):
- print "./ changed!", count[0]
- count[0] += 1
- if count[0] > 5:
- d1.remove()
- del(d1)
- # change the callback, since we can't define it until after we have the
- # dnotify object. Hmm, unless we give the dnotify to the callback.
- d1.callback = fire1
- def fire2(): print "foo/ changed!"
- d2 = DNotify("foo", fire2)
- import time
- while 1:
- signal.pause()
-
-
-if __name__ == '__main__':
- test_dnotify2()
-
diff --git a/buildbot/buildbot-source/build/lib/buildbot/interfaces.py b/buildbot/buildbot-source/build/lib/buildbot/interfaces.py
deleted file mode 100644
index e3986317b..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/interfaces.py
+++ /dev/null
@@ -1,890 +0,0 @@
-#! /usr/bin/python
-
-"""Interface documentation.
-
-Define the interfaces that are implemented by various buildbot classes.
-"""
-
-from twisted.python.components import Interface
-
-# exceptions that can be raised while trying to start a build
-class NoSlaveError(Exception):
- pass
-class BuilderInUseError(Exception):
- pass
-class BuildSlaveTooOldError(Exception):
- pass
-
-class IChangeSource(Interface):
- """Object which feeds Change objects to the changemaster. When files or
- directories are changed and the version control system provides some
- kind of notification, this object should turn it into a Change object
- and pass it through::
-
- self.changemaster.addChange(change)
- """
-
- def start():
- """Called when the buildmaster starts. Can be used to establish
- connections to VC daemons or begin polling."""
-
- def stop():
- """Called when the buildmaster shuts down. Connections should be
- terminated, polling timers should be canceled."""
-
- def describe():
- """Should return a string which briefly describes this source. This
- string will be displayed in an HTML status page."""
-
-class IScheduler(Interface):
- """I watch for Changes in the source tree and decide when to trigger
- Builds. I create BuildSet objects and submit them to the BuildMaster. I
- am a service, and the BuildMaster is always my parent."""
-
- def addChange(change):
- """A Change has just been dispatched by one of the ChangeSources.
- Each Scheduler will receive this Change. I may decide to start a
- build as a result, or I might choose to ignore it."""
-
- def listBuilderNames():
- """Return a list of strings indicating the Builders that this
- Scheduler might feed."""
-
- def getPendingBuildTimes():
- """Return a list of timestamps for any builds that are waiting in the
- tree-stable-timer queue. This is only relevant for Change-based
- schedulers, all others can just return an empty list."""
- # TODO: it might be nice to make this into getPendingBuildSets, which
- # would let someone subscribe to the buildset being finished.
- # However, the Scheduler doesn't actually create the buildset until
- # it gets submitted, so doing this would require some major rework.
-
-class IUpstreamScheduler(Interface):
- """This marks an IScheduler as being eligible for use as the 'upstream='
- argument to a buildbot.scheduler.Dependent instance."""
-
- def subscribeToSuccessfulBuilds(target):
- """Request that the target callbable be invoked after every
- successful buildset. The target will be called with a single
- argument: the SourceStamp used by the successful builds."""
-
- def listBuilderNames():
- """Return a list of strings indicating the Builders that this
- Scheduler might feed."""
-
-class ISourceStamp(Interface):
- pass
-
-class IEmailSender(Interface):
- """I know how to send email, and can be used by other parts of the
- Buildbot to contact developers."""
- pass
-
-class IEmailLookup(Interface):
- def getAddress(user):
- """Turn a User-name string into a valid email address. Either return
- a string (with an @ in it), None (to indicate that the user cannot
- be reached by email), or a Deferred which will fire with the same."""
-
-class IStatus(Interface):
- """I am an object, obtainable from the buildmaster, which can provide
- status information."""
-
- def getProjectName():
- """Return the name of the project that this Buildbot is working
- for."""
- def getProjectURL():
- """Return the URL of this Buildbot's project."""
- def getBuildbotURL():
- """Return the URL of the top-most Buildbot status page, or None if
- this Buildbot does not provide a web status page."""
- def getURLFor(thing):
- """Return the URL of a page which provides information on 'thing',
- which should be an object that implements one of the status
- interfaces defined in L{buildbot.interfaces}. Returns None if no
- suitable page is available (or if no Waterfall is running)."""
-
- def getSchedulers():
- """Return a list of ISchedulerStatus objects for all
- currently-registered Schedulers."""
-
- def getBuilderNames(categories=None):
- """Return a list of the names of all current Builders."""
- def getBuilder(name):
- """Return the IBuilderStatus object for a given named Builder."""
- def getSlave(name):
- """Return the ISlaveStatus object for a given named buildslave."""
-
- def getBuildSets():
- """Return a list of active (non-finished) IBuildSetStatus objects."""
-
- def subscribe(receiver):
- """Register an IStatusReceiver to receive new status events. The
- receiver will immediately be sent a set of 'builderAdded' messages
- for all current builders. It will receive further 'builderAdded' and
- 'builderRemoved' messages as the config file is reloaded and builders
- come and go. It will also receive 'buildsetSubmitted' messages for
- all outstanding BuildSets (and each new BuildSet that gets
- submitted). No additional messages will be sent unless the receiver
- asks for them by calling .subscribe on the IBuilderStatus objects
- which accompany the addedBuilder message."""
-
- def unsubscribe(receiver):
- """Unregister an IStatusReceiver. No further status messgaes will be
- delivered."""
-
-class IBuildSetStatus(Interface):
- """I represent a set of Builds, each run on a separate Builder but all
- using the same source tree."""
-
- def getSourceStamp():
- pass
- def getReason():
- pass
- def getID():
- """Return the BuildSet's ID string, if any. The 'try' feature uses a
- random string as a BuildSetID to relate submitted jobs with the
- resulting BuildSet."""
- def getResponsibleUsers():
- pass # not implemented
- def getInterestedUsers():
- pass # not implemented
- def getBuilderNames():
- """Return a list of the names of all Builders on which this set will
- do builds."""
- def getBuildRequests():
- """Return a list of IBuildRequestStatus objects that represent my
- component Builds. This list might correspond to the Builders named by
- getBuilderNames(), but if builder categories are used, or 'Builder
- Aliases' are implemented, then they may not."""
- def isFinished():
- pass
- def waitUntilSuccess():
- """Return a Deferred that fires (with this IBuildSetStatus object)
- when the outcome of the BuildSet is known, i.e., upon the first
- failure, or after all builds complete successfully."""
- def waitUntilFinished():
- """Return a Deferred that fires (with this IBuildSetStatus object)
- when all builds have finished."""
- def getResults():
- pass
-
-class IBuildRequestStatus(Interface):
- """I represent a request to build a particular set of source code on a
- particular Builder. These requests may be merged by the time they are
- finally turned into a Build."""
-
- def getSourceStamp():
- pass
- def getBuilderName():
- pass
- def getBuilds():
- """Return a list of IBuildStatus objects for each Build that has been
- started in an attempt to satify this BuildRequest."""
-
- def subscribe(observer):
- """Register a callable that will be invoked (with a single
- IBuildStatus object) for each Build that is created to satisfy this
- request. There may be multiple Builds created in an attempt to handle
- the request: they may be interrupted by the user or abandoned due to
- a lost slave. The last Build (the one which actually gets to run to
- completion) is said to 'satisfy' the BuildRequest. The observer will
- be called once for each of these Builds, both old and new."""
- def unsubscribe(observer):
- """Unregister the callable that was registered with subscribe()."""
-
-
-class ISlaveStatus(Interface):
- def getName():
- """Return the name of the build slave."""
-
- def getAdmin():
- """Return a string with the slave admin's contact data."""
-
- def getHost():
- """Return a string with the slave host info."""
-
- def isConnected():
- """Return True if the slave is currently online, False if not."""
-
-class ISchedulerStatus(Interface):
- def getName():
- """Return the name of this Scheduler (a string)."""
-
- def getPendingBuildsets():
- """Return an IBuildSet for all BuildSets that are pending. These
- BuildSets are waiting for their tree-stable-timers to expire."""
- # TODO: this is not implemented anywhere
-
-
-class IBuilderStatus(Interface):
- def getName():
- """Return the name of this Builder (a string)."""
-
- def getState():
- # TODO: this isn't nearly as meaningful as it used to be
- """Return a tuple (state, builds) for this Builder. 'state' is the
- so-called 'big-status', indicating overall status (as opposed to
- which step is currently running). It is a string, one of 'offline',
- 'idle', or 'building'. 'builds' is a list of IBuildStatus objects
- (possibly empty) representing the currently active builds."""
-
- def getSlaves():
- """Return a list of ISlaveStatus objects for the buildslaves that are
- used by this builder."""
-
- def getPendingBuilds():
- """Return an IBuildRequestStatus object for all upcoming builds
- (those which are ready to go but which are waiting for a buildslave
- to be available."""
-
- def getCurrentBuilds():
- """Return a list containing an IBuildStatus object for each build
- currently in progress."""
- # again, we could probably provide an object for 'waiting' and
- # 'interlocked' too, but things like the Change list might still be
- # subject to change
-
- def getLastFinishedBuild():
- """Return the IBuildStatus object representing the last finished
- build, which may be None if the builder has not yet finished any
- builds."""
-
- def getBuild(number):
- """Return an IBuildStatus object for a historical build. Each build
- is numbered (starting at 0 when the Builder is first added),
- getBuild(n) will retrieve the Nth such build. getBuild(-n) will
- retrieve a recent build, with -1 being the most recent build
- started. If the Builder is idle, this will be the same as
- getLastFinishedBuild(). If the Builder is active, it will be an
- unfinished build. This method will return None if the build is no
- longer available. Older builds are likely to have less information
- stored: Logs are the first to go, then Steps."""
-
- def getEvent(number):
- """Return an IStatusEvent object for a recent Event. Builders
- connecting and disconnecting are events, as are ping attempts.
- getEvent(-1) will return the most recent event. Events are numbered,
- but it probably doesn't make sense to ever do getEvent(+n)."""
-
- def subscribe(receiver):
- """Register an IStatusReceiver to receive new status events. The
- receiver will be given builderChangedState, buildStarted, and
- buildFinished messages."""
-
- def unsubscribe(receiver):
- """Unregister an IStatusReceiver. No further status messgaes will be
- delivered."""
-
-class IBuildStatus(Interface):
- """I represent the status of a single Build/BuildRequest. It could be
- in-progress or finished."""
-
- def getBuilder():
- """
- Return the BuilderStatus that owns this build.
-
- @rtype: implementor of L{IBuilderStatus}
- """
-
- def isFinished():
- """Return a boolean. True means the build has finished, False means
- it is still running."""
-
- def waitUntilFinished():
- """Return a Deferred that will fire when the build finishes. If the
- build has already finished, this deferred will fire right away. The
- callback is given this IBuildStatus instance as an argument."""
-
- def getProperty(propname):
- """Return the value of the build property with the given name."""
-
- def getReason():
- """Return a string that indicates why the build was run. 'changes',
- 'forced', and 'periodic' are the most likely values. 'try' will be
- added in the future."""
-
- def getSourceStamp():
- """Return a tuple of (branch, revision, patch) which can be used to
- re-create the source tree that this build used. 'branch' is a string
- with a VC-specific meaning, or None to indicate that the checkout
- step used its default branch. 'revision' is a string, the sort you
- would pass to 'cvs co -r REVISION'. 'patch' is either None, or a
- (level, diff) tuple which represents a patch that should be applied
- with 'patch -pLEVEL < DIFF' from the directory created by the
- checkout operation.
-
- This method will return None if the source information is no longer
- available."""
- # TODO: it should be possible to expire the patch but still remember
- # that the build was r123+something.
-
- # TODO: change this to return the actual SourceStamp instance, and
- # remove getChanges()
-
- def getChanges():
- """Return a list of Change objects which represent which source
- changes went into the build."""
-
- def getResponsibleUsers():
- """Return a list of Users who are to blame for the changes that went
- into this build. If anything breaks (at least anything that wasn't
- already broken), blame them. Specifically, this is the set of users
- who were responsible for the Changes that went into this build. Each
- User is a string, corresponding to their name as known by the VC
- repository."""
-
- def getInterestedUsers():
- """Return a list of Users who will want to know about the results of
- this build. This is a superset of getResponsibleUsers(): it adds
- people who are interested in this build but who did not actually
- make the Changes that went into it (build sheriffs, code-domain
- owners)."""
-
- def getNumber():
- """Within each builder, each Build has a number. Return it."""
-
- def getPreviousBuild():
- """Convenience method. Returns None if the previous build is
- unavailable."""
-
- def getSteps():
- """Return a list of IBuildStepStatus objects. For invariant builds
- (those which always use the same set of Steps), this should always
- return the complete list, however some of the steps may not have
- started yet (step.getTimes()[0] will be None). For variant builds,
- this may not be complete (asking again later may give you more of
- them)."""
-
- def getTimes():
- """Returns a tuple of (start, end). 'start' and 'end' are the times
- (seconds since the epoch) when the Build started and finished. If
- the build is still running, 'end' will be None."""
-
- # while the build is running, the following methods make sense.
- # Afterwards they return None
-
- def getETA():
- """Returns the number of seconds from now in which the build is
- expected to finish, or None if we can't make a guess. This guess will
- be refined over time."""
-
- def getCurrentStep():
- """Return an IBuildStepStatus object representing the currently
- active step."""
-
- # Once you know the build has finished, the following methods are legal.
- # Before ths build has finished, they all return None.
-
- def getSlavename():
- """Return the name of the buildslave which handled this build."""
-
- def getText():
- """Returns a list of strings to describe the build. These are
- intended to be displayed in a narrow column. If more space is
- available, the caller should join them together with spaces before
- presenting them to the user."""
-
- def getColor():
- """Returns a single string with the color that should be used to
- display the build. 'green', 'orange', or 'red' are the most likely
- ones."""
-
- def getResults():
- """Return a constant describing the results of the build: one of the
- constants in buildbot.status.builder: SUCCESS, WARNINGS, or
- FAILURE."""
-
- def getLogs():
- """Return a list of logs that describe the build as a whole. Some
- steps will contribute their logs, while others are are less important
- and will only be accessible through the IBuildStepStatus objects.
- Each log is an object which implements the IStatusLog interface."""
-
- def getTestResults():
- """Return a dictionary that maps test-name tuples to ITestResult
- objects. This may return an empty or partially-filled dictionary
- until the build has completed."""
-
- # subscription interface
-
- def subscribe(receiver, updateInterval=None):
- """Register an IStatusReceiver to receive new status events. The
- receiver will be given stepStarted and stepFinished messages. If
- 'updateInterval' is non-None, buildETAUpdate messages will be sent
- every 'updateInterval' seconds."""
-
- def unsubscribe(receiver):
- """Unregister an IStatusReceiver. No further status messgaes will be
- delivered."""
-
-class ITestResult(Interface):
- """I describe the results of a single unit test."""
-
- def getName():
- """Returns a tuple of strings which make up the test name. Tests may
- be arranged in a hierarchy, so looking for common prefixes may be
- useful."""
-
- def getResults():
- """Returns a constant describing the results of the test: SUCCESS,
- WARNINGS, FAILURE."""
-
- def getText():
- """Returns a list of short strings which describe the results of the
- test in slightly more detail. Suggested components include
- 'failure', 'error', 'passed', 'timeout'."""
-
- def getLogs():
- # in flux, it may be possible to provide more structured information
- # like python Failure instances
- """Returns a dictionary of test logs. The keys are strings like
- 'stdout', 'log', 'exceptions'. The values are strings."""
-
-
-class IBuildStepStatus(Interface):
- """I hold status for a single BuildStep."""
-
- def getName():
- """Returns a short string with the name of this step. This string
- may have spaces in it."""
-
- def getBuild():
- """Returns the IBuildStatus object which contains this step."""
-
- def getTimes():
- """Returns a tuple of (start, end). 'start' and 'end' are the times
- (seconds since the epoch) when the Step started and finished. If the
- step has not yet started, 'start' will be None. If the step is still
- running, 'end' will be None."""
-
- def getExpectations():
- """Returns a list of tuples (name, current, target). Each tuple
- describes a single axis along which the step's progress can be
- measured. 'name' is a string which describes the axis itself, like
- 'filesCompiled' or 'tests run' or 'bytes of output'. 'current' is a
- number with the progress made so far, while 'target' is the value
- that we expect (based upon past experience) to get to when the build
- is finished.
-
- 'current' will change over time until the step is finished. It is
- 'None' until the step starts. When the build is finished, 'current'
- may or may not equal 'target' (which is merely the expectation based
- upon previous builds)."""
-
- def getLogs():
- """Returns a list of IStatusLog objects. If the step has not yet
- finished, this list may be incomplete (asking again later may give
- you more of them)."""
-
-
- def isFinished():
- """Return a boolean. True means the step has finished, False means it
- is still running."""
-
- def waitUntilFinished():
- """Return a Deferred that will fire when the step finishes. If the
- step has already finished, this deferred will fire right away. The
- callback is given this IBuildStepStatus instance as an argument."""
-
- # while the step is running, the following methods make sense.
- # Afterwards they return None
-
- def getETA():
- """Returns the number of seconds from now in which the step is
- expected to finish, or None if we can't make a guess. This guess will
- be refined over time."""
-
- # Once you know the step has finished, the following methods are legal.
- # Before ths step has finished, they all return None.
-
- def getText():
- """Returns a list of strings which describe the step. These are
- intended to be displayed in a narrow column. If more space is
- available, the caller should join them together with spaces before
- presenting them to the user."""
-
- def getColor():
- """Returns a single string with the color that should be used to
- display this step. 'green', 'orange', 'red' and 'yellow' are the
- most likely ones."""
-
- def getResults():
- """Return a tuple describing the results of the step: (result,
- strings). 'result' is one of the constants in
- buildbot.status.builder: SUCCESS, WARNINGS, FAILURE, or SKIPPED.
- 'strings' is an optional list of strings that the step wants to
- append to the overall build's results. These strings are usually
- more terse than the ones returned by getText(): in particular,
- successful Steps do not usually contribute any text to the overall
- build."""
-
- # subscription interface
-
- def subscribe(receiver, updateInterval=10):
- """Register an IStatusReceiver to receive new status events. The
- receiver will be given logStarted and logFinished messages. It will
- also be given a ETAUpdate message every 'updateInterval' seconds."""
-
- def unsubscribe(receiver):
- """Unregister an IStatusReceiver. No further status messgaes will be
- delivered."""
-
-class IStatusEvent(Interface):
- """I represent a Builder Event, something non-Build related that can
- happen to a Builder."""
-
- def getTimes():
- """Returns a tuple of (start, end) like IBuildStepStatus, but end==0
- indicates that this is a 'point event', which has no duration.
- SlaveConnect/Disconnect are point events. Ping is not: it starts
- when requested and ends when the response (positive or negative) is
- returned"""
-
- def getText():
- """Returns a list of strings which describe the event. These are
- intended to be displayed in a narrow column. If more space is
- available, the caller should join them together with spaces before
- presenting them to the user."""
-
- def getColor():
- """Returns a single string with the color that should be used to
- display this event. 'red' and 'yellow' are the most likely ones."""
-
-class IStatusLog(Interface):
- """I represent a single Log, which is a growing list of text items that
- contains some kind of output for a single BuildStep. I might be finished,
- in which case this list has stopped growing.
-
- Each Log has a name, usually something boring like 'log' or 'output'.
- These names are not guaranteed to be unique, however they are usually
- chosen to be useful within the scope of a single step (i.e. the Compile
- step might produce both 'log' and 'warnings'). The name may also have
- spaces. If you want something more globally meaningful, at least within a
- given Build, try::
-
- '%s.%s' % (log.getStep.getName(), log.getName())
-
- The Log can be presented as plain text, or it can be accessed as a list
- of items, each of which has a channel indicator (header, stdout, stderr)
- and a text chunk. An HTML display might represent the interleaved
- channels with different styles, while a straight download-the-text
- interface would just want to retrieve a big string.
-
- The 'header' channel is used by ShellCommands to prepend a note about
- which command is about to be run ('running command FOO in directory
- DIR'), and append another note giving the exit code of the process.
-
- Logs can be streaming: if the Log has not yet finished, you can
- subscribe to receive new chunks as they are added.
-
- A ShellCommand will have a Log associated with it that gathers stdout
- and stderr. Logs may also be created by parsing command output or
- through other synthetic means (grepping for all the warnings in a
- compile log, or listing all the test cases that are going to be run).
- Such synthetic Logs are usually finished as soon as they are created."""
-
-
- def getName():
- """Returns a short string with the name of this log, probably 'log'.
- """
-
- def getStep():
- """Returns the IBuildStepStatus which owns this log."""
- # TODO: can there be non-Step logs?
-
- def isFinished():
- """Return a boolean. True means the log has finished and is closed,
- False means it is still open and new chunks may be added to it."""
-
- def waitUntilFinished():
- """Return a Deferred that will fire when the log is closed. If the
- log has already finished, this deferred will fire right away. The
- callback is given this IStatusLog instance as an argument."""
-
- def subscribe(receiver, catchup):
- """Register an IStatusReceiver to receive chunks (with logChunk) as
- data is added to the Log. If you use this, you will also want to use
- waitUntilFinished to find out when the listener can be retired.
- Subscribing to a closed Log is a no-op.
-
- If 'catchup' is True, the receiver will immediately be sent a series
- of logChunk messages to bring it up to date with the partially-filled
- log. This allows a status client to join a Log already in progress
- without missing any data. If the Log has already finished, it is too
- late to catch up: just do getText() instead.
-
- If the Log is very large, the receiver will be called many times with
- a lot of data. There is no way to throttle this data. If the receiver
- is planning on sending the data on to somewhere else, over a narrow
- connection, you can get a throttleable subscription by using
- C{subscribeConsumer} instead."""
-
- def unsubscribe(receiver):
- """Remove a receiver previously registered with subscribe(). Attempts
- to remove a receiver which was not previously registered is a no-op.
- """
-
- def subscribeConsumer(consumer):
- """Register an L{IStatusLogConsumer} to receive all chunks of the
- logfile, including all the old entries and any that will arrive in
- the future. The consumer will first have their C{registerProducer}
- method invoked with a reference to an object that can be told
- C{pauseProducing}, C{resumeProducing}, and C{stopProducing}. Then the
- consumer's C{writeChunk} method will be called repeatedly with each
- (channel, text) tuple in the log, starting with the very first. The
- consumer will be notified with C{finish} when the log has been
- exhausted (which can only happen when the log is finished). Note that
- a small amount of data could be written via C{writeChunk} even after
- C{pauseProducing} has been called.
-
- To unsubscribe the consumer, use C{producer.stopProducing}."""
-
- # once the log has finished, the following methods make sense. They can
- # be called earlier, but they will only return the contents of the log up
- # to the point at which they were called. You will lose items that are
- # added later. Use C{subscribe} or C{subscribeConsumer} to avoid missing
- # anything.
-
- def hasContents():
- """Returns True if the LogFile still has contents available. Returns
- False for logs that have been pruned. Clients should test this before
- offering to show the contents of any log."""
-
- def getText():
- """Return one big string with the contents of the Log. This merges
- all non-header chunks together."""
-
- def getTextWithHeaders():
- """Return one big string with the contents of the Log. This merges
- all chunks (including headers) together."""
-
- def getChunks():
- """Generate a list of (channel, text) tuples. 'channel' is a number,
- 0 for stdout, 1 for stderr, 2 for header. (note that stderr is merged
- into stdout if PTYs are in use)."""
-
-class IStatusLogConsumer(Interface):
- """I am an object which can be passed to IStatusLog.subscribeConsumer().
- I represent a target for writing the contents of an IStatusLog. This
- differs from a regular IStatusReceiver in that it can pause the producer.
- This makes it more suitable for use in streaming data over network
- sockets, such as an HTTP request. Note that the consumer can only pause
- the producer until it has caught up with all the old data. After that
- point, C{pauseProducing} is ignored and all new output from the log is
- sent directoy to the consumer."""
-
- def registerProducer(producer, streaming):
- """A producer is being hooked up to this consumer. The consumer only
- has to handle a single producer. It should send .pauseProducing and
- .resumeProducing messages to the producer when it wants to stop or
- resume the flow of data. 'streaming' will be set to True because the
- producer is always a PushProducer.
- """
-
- def unregisterProducer():
- """The previously-registered producer has been removed. No further
- pauseProducing or resumeProducing calls should be made. The consumer
- should delete its reference to the Producer so it can be released."""
-
- def writeChunk(chunk):
- """A chunk (i.e. a tuple of (channel, text)) is being written to the
- consumer."""
-
- def finish():
- """The log has finished sending chunks to the consumer."""
-
-class IStatusReceiver(Interface):
- """I am an object which can receive build status updates. I may be
- subscribed to an IStatus, an IBuilderStatus, or an IBuildStatus."""
-
- def buildsetSubmitted(buildset):
- """A new BuildSet has been submitted to the buildmaster.
-
- @type buildset: implementor of L{IBuildSetStatus}
- """
-
- def builderAdded(builderName, builder):
- """
- A new Builder has just been added. This method may return an
- IStatusReceiver (probably 'self') which will be subscribed to receive
- builderChangedState and buildStarted/Finished events.
-
- @type builderName: string
- @type builder: L{buildbot.status.builder.BuilderStatus}
- @rtype: implementor of L{IStatusReceiver}
- """
-
- def builderChangedState(builderName, state):
- """Builder 'builderName' has changed state. The possible values for
- 'state' are 'offline', 'idle', and 'building'."""
-
- def buildStarted(builderName, build):
- """Builder 'builderName' has just started a build. The build is an
- object which implements IBuildStatus, and can be queried for more
- information.
-
- This method may return an IStatusReceiver (it could even return
- 'self'). If it does so, stepStarted and stepFinished methods will be
- invoked on the object for the steps of this one build. This is a
- convenient way to subscribe to all build steps without missing any.
- This receiver will automatically be unsubscribed when the build
- finishes.
-
- It can also return a tuple of (IStatusReceiver, interval), in which
- case buildETAUpdate messages are sent ever 'interval' seconds, in
- addition to the stepStarted and stepFinished messages."""
-
- def buildETAUpdate(build, ETA):
- """This is a periodic update on the progress this Build has made
- towards completion."""
-
- def stepStarted(build, step):
- """A step has just started. 'step' is the IBuildStepStatus which
- represents the step: it can be queried for more information.
-
- This method may return an IStatusReceiver (it could even return
- 'self'). If it does so, logStarted and logFinished methods will be
- invoked on the object for logs created by this one step. This
- receiver will be automatically unsubscribed when the step finishes.
-
- Alternatively, the method may return a tuple of an IStatusReceiver
- and an integer named 'updateInterval'. In addition to
- logStarted/logFinished messages, it will also receive stepETAUpdate
- messages about every updateInterval seconds."""
-
- def stepETAUpdate(build, step, ETA, expectations):
- """This is a periodic update on the progress this Step has made
- towards completion. It gets an ETA (in seconds from the present) of
- when the step ought to be complete, and a list of expectation tuples
- (as returned by IBuildStepStatus.getExpectations) with more detailed
- information."""
-
- def logStarted(build, step, log):
- """A new Log has been started, probably because a step has just
- started running a shell command. 'log' is the IStatusLog object
- which can be queried for more information.
-
- This method may return an IStatusReceiver (such as 'self'), in which
- case the target's logChunk method will be invoked as text is added to
- the logfile. This receiver will automatically be unsubsribed when the
- log finishes."""
-
- def logChunk(build, step, log, channel, text):
- """Some text has been added to this log. 'channel' is 0, 1, or 2, as
- defined in IStatusLog.getChunks."""
-
- def logFinished(build, step, log):
- """A Log has been closed."""
-
- def stepFinished(build, step, results):
- """A step has just finished. 'results' is the result tuple described
- in IBuildStepStatus.getResults."""
-
- def buildFinished(builderName, build, results):
- """
- A build has just finished. 'results' is the result tuple described
- in L{IBuildStatus.getResults}.
-
- @type builderName: string
- @type build: L{buildbot.status.builder.BuildStatus}
- @type results: tuple
- """
-
- def builderRemoved(builderName):
- """The Builder has been removed."""
-
-class IControl(Interface):
- def addChange(change):
- """Add a change to all builders. Each Builder will decide for
- themselves whether the change is interesting or not, and may initiate
- a build as a result."""
-
- def submitBuildSet(buildset):
- """Submit a BuildSet object, which will eventually be run on all of
- the builders listed therein."""
-
- def getBuilder(name):
- """Retrieve the IBuilderControl object for the given Builder."""
-
-class IBuilderControl(Interface):
- def forceBuild(who, reason):
- """DEPRECATED, please use L{requestBuild} instead.
-
- Start a build of the latest sources. If 'who' is not None, it is
- string with the name of the user who is responsible for starting the
- build: they will be added to the 'interested users' list (so they may
- be notified via email or another Status object when it finishes).
- 'reason' is a string describing why this user requested the build.
-
- The results of forced builds are always sent to the Interested Users,
- even if the Status object would normally only send results upon
- failures.
-
- forceBuild() may raise L{NoSlaveError} or L{BuilderInUseError} if it
- cannot start the build.
-
- forceBuild() returns a Deferred which fires with an L{IBuildControl}
- object that can be used to further control the new build, or from
- which an L{IBuildStatus} object can be obtained."""
-
- def requestBuild(request):
- """Queue a L{buildbot.process.base.BuildRequest} object for later
- building."""
-
- def requestBuildSoon(request):
- """Submit a BuildRequest like requestBuild, but raise a
- L{buildbot.interfaces.NoSlaveError} if no slaves are currently
- available, so it cannot be used to queue a BuildRequest in the hopes
- that a slave will eventually connect. This method is appropriate for
- use by things like the web-page 'Force Build' button."""
-
- def resubmitBuild(buildStatus, reason="<rebuild, no reason given>"):
- """Rebuild something we've already built before. This submits a
- BuildRequest to our Builder using the same SourceStamp as the earlier
- build. This has no effect (but may eventually raise an exception) if
- this Build has not yet finished."""
-
- def getPendingBuilds():
- """Return a list of L{IBuildRequestControl} objects for this Builder.
- Each one corresponds to a pending build that has not yet started (due
- to a scarcity of build slaves). These upcoming builds can be canceled
- through the control object."""
-
- def getBuild(number):
- """Attempt to return an IBuildControl object for the given build.
- Returns None if no such object is available. This will only work for
- the build that is currently in progress: once the build finishes,
- there is nothing to control anymore."""
-
- def ping(timeout=30):
- """Attempt to contact the slave and see if it is still alive. This
- returns a Deferred which fires with either True (the slave is still
- alive) or False (the slave did not respond). As a side effect, adds
- an event to this builder's column in the waterfall display
- containing the results of the ping."""
- # TODO: this ought to live in ISlaveControl, maybe with disconnect()
- # or something. However the event that is emitted is most useful in
- # the Builder column, so it kinda fits here too.
-
-class IBuildRequestControl(Interface):
- def subscribe(observer):
- """Register a callable that will be invoked (with a single
- IBuildControl object) for each Build that is created to satisfy this
- request. There may be multiple Builds created in an attempt to handle
- the request: they may be interrupted by the user or abandoned due to
- a lost slave. The last Build (the one which actually gets to run to
- completion) is said to 'satisfy' the BuildRequest. The observer will
- be called once for each of these Builds, both old and new."""
- def unsubscribe(observer):
- """Unregister the callable that was registered with subscribe()."""
- def cancel():
- """Remove the build from the pending queue. Has no effect if the
- build has already been started."""
-
-class IBuildControl(Interface):
- def getStatus():
- """Return an IBuildStatus object for the Build that I control."""
- def stopBuild(reason="<no reason given>"):
- """Halt the build. This has no effect if the build has already
- finished."""
diff --git a/buildbot/buildbot-source/build/lib/buildbot/locks.py b/buildbot/buildbot-source/build/lib/buildbot/locks.py
deleted file mode 100644
index a5ae40b93..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/locks.py
+++ /dev/null
@@ -1,89 +0,0 @@
-# -*- test-case-name: buildbot.test.test_locks -*-
-
-from twisted.python import log
-from twisted.internet import reactor, defer
-from buildbot import util
-
-class BaseLock:
- owner = None
- description = "<BaseLock>"
-
- def __init__(self, name):
- self.name = name
- self.waiting = []
-
- def __repr__(self):
- return self.description
-
- def isAvailable(self):
- log.msg("%s isAvailable: self.owner=%s" % (self, self.owner))
- return not self.owner
-
- def claim(self, owner):
- log.msg("%s claim(%s)" % (self, owner))
- assert owner is not None
- self.owner = owner
- log.msg(" %s is claimed" % (self,))
-
- def release(self, owner):
- log.msg("%s release(%s)" % (self, owner))
- assert owner is self.owner
- self.owner = None
- reactor.callLater(0, self.nowAvailable)
-
- def waitUntilAvailable(self, owner):
- log.msg("%s waitUntilAvailable(%s)" % (self, owner))
- assert self.owner, "You aren't supposed to call this on a free Lock"
- d = defer.Deferred()
- self.waiting.append((d, owner))
- return d
-
- def nowAvailable(self):
- log.msg("%s nowAvailable" % self)
- assert not self.owner
- if not self.waiting:
- return
- d,owner = self.waiting.pop(0)
- d.callback(self)
-
-class RealMasterLock(BaseLock):
- def __init__(self, name):
- BaseLock.__init__(self, name)
- self.description = "<MasterLock(%s)>" % (name,)
-
- def getLock(self, slave):
- return self
-
-class RealSlaveLock(BaseLock):
- def __init__(self, name):
- BaseLock.__init__(self, name)
- self.description = "<SlaveLock(%s)>" % (name,)
- self.locks = {}
-
- def getLock(self, slavebuilder):
- slavename = slavebuilder.slave.slavename
- if not self.locks.has_key(slavename):
- lock = self.locks[slavename] = BaseLock(self.name)
- lock.description = "<SlaveLock(%s)[%s] %d>" % (self.name,
- slavename,
- id(lock))
- self.locks[slavename] = lock
- return self.locks[slavename]
-
-
-# master.cfg should only reference the following MasterLock and SlaveLock
-# classes. They are identifiers that will be turned into real Locks later,
-# via the BotMaster.getLockByID method.
-
-class MasterLock(util.ComparableMixin):
- compare_attrs = ['name']
- lockClass = RealMasterLock
- def __init__(self, name):
- self.name = name
-
-class SlaveLock(util.ComparableMixin):
- compare_attrs = ['name']
- lockClass = RealSlaveLock
- def __init__(self, name):
- self.name = name
-
diff --git a/buildbot/buildbot-source/build/lib/buildbot/master.py b/buildbot/buildbot-source/build/lib/buildbot/master.py
deleted file mode 100644
index 784807bd9..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/master.py
+++ /dev/null
@@ -1,1066 +0,0 @@
-# -*- test-case-name: buildbot.test.test_run -*-
-
-from __future__ import generators
-import string, sys, os, time, warnings
-try:
- import signal
-except ImportError:
- signal = None
-try:
- import cPickle as pickle
-except ImportError:
- import pickle
-
-from twisted.python import log, usage, components
-from twisted.internet import defer, reactor
-from twisted.spread import pb
-from twisted.cred import portal, checkers
-from twisted.application import service, strports
-from twisted.persisted import styles
-from twisted.manhole import telnet
-
-# sibling imports
-from buildbot import util
-from buildbot.twcompat import implements
-from buildbot.util import now
-from buildbot.pbutil import NewCredPerspective
-from buildbot.process.builder import Builder, IDLE
-from buildbot.status.builder import BuilderStatus, SlaveStatus, Status
-from buildbot.changes.changes import Change, ChangeMaster
-from buildbot import interfaces
-
-########################################
-
-
-
-
-class BotPerspective(NewCredPerspective):
- """This is the master-side representative for a remote buildbot slave.
- There is exactly one for each slave described in the config file (the
- c['bots'] list). When buildbots connect in (.attach), they get a
- reference to this instance. The BotMaster object is stashed as the
- .service attribute."""
-
- slave_commands = None
-
- def __init__(self, name):
- self.slavename = name
- self.slave_status = SlaveStatus(name)
- self.builders = [] # list of b.p.builder.Builder instances
- self.slave = None # a RemoteReference to the Bot, when connected
-
- def addBuilder(self, builder):
- """Called to add a builder after the slave has connected.
-
- @return: a Deferred that indicates when an attached slave has
- accepted the new builder."""
-
- self.builders.append(builder)
- if self.slave:
- return self.sendBuilderList()
- return defer.succeed(None)
-
- def removeBuilder(self, builder):
- """Tell the slave that the given builder has been removed, allowing
- it to discard the associated L{buildbot.slave.bot.SlaveBuilder}
- object.
-
- @return: a Deferred that fires when the slave has finished removing
- the SlaveBuilder
- """
- self.builders.remove(builder)
- if self.slave:
- builder.detached(self)
- return self.sendBuilderList()
- return defer.succeed(None)
-
- def __repr__(self):
- return "<BotPerspective '%s', builders: %s>" % \
- (self.slavename,
- string.join(map(lambda b: b.name, self.builders), ','))
-
- def attached(self, mind):
- """This is called when the slave connects.
-
- @return: a Deferred that fires with a suitable pb.IPerspective to
- give to the slave (i.e. 'self')"""
-
- if self.slave:
- # uh-oh, we've got a duplicate slave. The most likely
- # explanation is that the slave is behind a slow link, thinks we
- # went away, and has attempted to reconnect, so we've got two
- # "connections" from the same slave, but the previous one is
- # stale. Give the new one precedence.
- log.msg("duplicate slave %s replacing old one" % self.slavename)
-
- # just in case we've got two identically-configured slaves,
- # report the IP addresses of both so someone can resolve the
- # squabble
- tport = self.slave.broker.transport
- log.msg("old slave was connected from", tport.getPeer())
- log.msg("new slave is from", mind.broker.transport.getPeer())
- d = self.disconnect()
- d.addCallback(lambda res: self._attached(mind))
- return d
-
- return self._attached(mind)
-
- def disconnect(self):
- if not self.slave:
- return defer.succeed(None)
- log.msg("disconnecting old slave %s now" % self.slavename)
-
- # all kinds of teardown will happen as a result of
- # loseConnection(), but it happens after a reactor iteration or
- # two. Hook the actual disconnect so we can know when it is safe
- # to connect the new slave. We have to wait one additional
- # iteration (with callLater(0)) to make sure the *other*
- # notifyOnDisconnect handlers have had a chance to run.
- d = defer.Deferred()
-
- self.slave.notifyOnDisconnect(lambda res: # TODO: d=d ?
- reactor.callLater(0, d.callback, None))
- tport = self.slave.broker.transport
- # this is the polite way to request that a socket be closed
- tport.loseConnection()
- try:
- # but really we don't want to wait for the transmit queue to
- # drain. The remote end is unlikely to ACK the data, so we'd
- # probably have to wait for a (20-minute) TCP timeout.
- #tport._closeSocket()
- # however, doing _closeSocket (whether before or after
- # loseConnection) somehow prevents the notifyOnDisconnect
- # handlers from being run. Bummer.
- tport.offset = 0
- tport.dataBuffer = ""
- pass
- except:
- # however, these hacks are pretty internal, so don't blow up if
- # they fail or are unavailable
- log.msg("failed to accelerate the shutdown process")
- pass
- log.msg("waiting for slave to finish disconnecting")
-
- # When this Deferred fires, we'll be ready to accept the new slave
- return d
-
- def _attached(self, mind):
- """We go through a sequence of calls, gathering information, then
- tell our Builders that they have a slave to work with.
-
- @return: a Deferred that fires (with 'self') when our Builders are
- prepared to deal with the slave.
- """
- self.slave = mind
- d = self.slave.callRemote("print", "attached")
- d.addErrback(lambda why: 0)
- self.slave_status.connected = True
- log.msg("bot attached")
-
- # TODO: there is a window here (while we're retrieving slaveinfo)
- # during which a disconnect or a duplicate-slave will be confusing
- d.addCallback(lambda res: self.slave.callRemote("getSlaveInfo"))
- d.addCallbacks(self.got_info, self.infoUnavailable)
- d.addCallback(self._attached2)
- d.addCallback(lambda res: self)
- return d
-
- def got_info(self, info):
- log.msg("Got slaveinfo from '%s'" % self.slavename)
- # TODO: info{} might have other keys
- self.slave_status.admin = info.get("admin")
- self.slave_status.host = info.get("host")
-
- def infoUnavailable(self, why):
- # maybe an old slave, doesn't implement remote_getSlaveInfo
- log.msg("BotPerspective.infoUnavailable")
- log.err(why)
-
- def _attached2(self, res):
- d = self.slave.callRemote("getCommands")
- d.addCallback(self.got_commands)
- d.addErrback(self._commandsUnavailable)
- d.addCallback(self._attached3)
- return d
-
- def got_commands(self, commands):
- self.slave_commands = commands
-
- def _commandsUnavailable(self, why):
- # probably an old slave
- log.msg("BotPerspective._commandsUnavailable")
- if why.check(AttributeError):
- return
- log.err(why)
-
- def _attached3(self, res):
- d = self.slave.callRemote("getDirs")
- d.addCallback(self.got_dirs)
- d.addErrback(self._dirsFailed)
- d.addCallback(self._attached4)
- return d
-
- def got_dirs(self, dirs):
- wanted = map(lambda b: b.builddir, self.builders)
- unwanted = []
- for d in dirs:
- if d not in wanted and d != "info":
- unwanted.append(d)
- if unwanted:
- log.msg("slave %s has leftover directories (%s): " % \
- (self.slavename, string.join(unwanted, ',')) + \
- "you can delete them now")
-
- def _dirsFailed(self, why):
- log.msg("BotPerspective._dirsFailed")
- log.err(why)
-
- def _attached4(self, res):
- return self.sendBuilderList()
-
- def sendBuilderList(self):
- # now make sure their list of Builders matches ours
- blist = []
- for b in self.builders:
- blist.append((b.name, b.builddir))
- d = self.slave.callRemote("setBuilderList", blist)
- d.addCallback(self.list_done)
- d.addErrback(self._listFailed)
- return d
-
- def list_done(self, blist):
- # this could come back at weird times. be prepared to handle oddness
- dl = []
- for name, remote in blist.items():
- for b in self.builders:
- if b.name == name:
- # if we sent the builders list because of a config
- # change, the Builder might already be attached.
- # Builder.attached will ignore us if this happens.
- d = b.attached(self, remote, self.slave_commands)
- dl.append(d)
- continue
- return defer.DeferredList(dl)
-
- def _listFailed(self, why):
- log.msg("BotPerspective._listFailed")
- log.err(why)
- # TODO: hang up on them, without setBuilderList we can't use them
-
- def perspective_forceBuild(self, name, who=None):
- # slave admins are allowed to force any of their own builds
- for b in self.builders:
- if name == b.name:
- try:
- b.forceBuild(who, "slave requested build")
- return "ok, starting build"
- except interfaces.BuilderInUseError:
- return "sorry, builder was in use"
- except interfaces.NoSlaveError:
- return "sorry, there is no slave to run the build"
- else:
- log.msg("slave requested build for unknown builder '%s'" % name)
- return "sorry, invalid builder name"
-
- def perspective_keepalive(self):
- pass
-
- def detached(self, mind):
- self.slave = None
- self.slave_status.connected = False
- for b in self.builders:
- b.detached(self)
- log.msg("Botmaster.detached(%s)" % self.slavename)
-
-
-class BotMaster(service.Service):
-
- """This is the master-side service which manages remote buildbot slaves.
- It provides them with BotPerspectives, and distributes file change
- notification messages to them.
- """
-
- debug = 0
-
- def __init__(self):
- self.builders = {}
- self.builderNames = []
- # builders maps Builder names to instances of bb.p.builder.Builder,
- # which is the master-side object that defines and controls a build.
- # They are added by calling botmaster.addBuilder() from the startup
- # code.
-
- # self.slaves contains a ready BotPerspective instance for each
- # potential buildslave, i.e. all the ones listed in the config file.
- # If the slave is connected, self.slaves[slavename].slave will
- # contain a RemoteReference to their Bot instance. If it is not
- # connected, that attribute will hold None.
- self.slaves = {} # maps slavename to BotPerspective
- self.statusClientService = None
- self.watchers = {}
-
- # self.locks holds the real Lock instances
- self.locks = {}
-
- # these four are convenience functions for testing
-
- def waitUntilBuilderAttached(self, name):
- b = self.builders[name]
- #if b.slaves:
- # return defer.succeed(None)
- d = defer.Deferred()
- b.watchers['attach'].append(d)
- return d
-
- def waitUntilBuilderDetached(self, name):
- b = self.builders.get(name)
- if not b or not b.slaves:
- return defer.succeed(None)
- d = defer.Deferred()
- b.watchers['detach'].append(d)
- return d
-
- def waitUntilBuilderFullyDetached(self, name):
- b = self.builders.get(name)
- # TODO: this looks too deeply inside the Builder object
- if not b or not b.slaves:
- return defer.succeed(None)
- d = defer.Deferred()
- b.watchers['detach_all'].append(d)
- return d
-
- def waitUntilBuilderIdle(self, name):
- b = self.builders[name]
- # TODO: this looks way too deeply inside the Builder object
- for sb in b.slaves:
- if sb.state != IDLE:
- d = defer.Deferred()
- b.watchers['idle'].append(d)
- return d
- return defer.succeed(None)
-
-
- def addSlave(self, slavename):
- slave = BotPerspective(slavename)
- self.slaves[slavename] = slave
-
- def removeSlave(self, slavename):
- d = self.slaves[slavename].disconnect()
- del self.slaves[slavename]
- return d
-
- def getBuildernames(self):
- return self.builderNames
-
- def addBuilder(self, builder):
- """This is called by the setup code to define what builds should be
- performed. Each Builder object has a build slave that should host
- that build: the builds cannot be done until the right slave
- connects.
-
- @return: a Deferred that fires when an attached slave has accepted
- the new builder.
- """
-
- if self.debug: print "addBuilder", builder
- log.msg("Botmaster.addBuilder(%s)" % builder.name)
-
- if builder.name in self.builderNames:
- raise KeyError("muliply defined builder '%s'" % builder.name)
- for slavename in builder.slavenames:
- if not self.slaves.has_key(slavename):
- raise KeyError("builder %s uses undefined slave %s" % \
- (builder.name, slavename))
-
- self.builders[builder.name] = builder
- self.builderNames.append(builder.name)
- builder.setBotmaster(self)
-
- dl = [self.slaves[slavename].addBuilder(builder)
- for slavename in builder.slavenames]
- return defer.DeferredList(dl)
-
- def removeBuilder(self, builder):
- """Stop using a Builder.
- This removes the Builder from the list of active Builders.
-
- @return: a Deferred that fires when an attached slave has finished
- removing the SlaveBuilder
- """
- if self.debug: print "removeBuilder", builder
- log.msg("Botmaster.removeBuilder(%s)" % builder.name)
- b = self.builders[builder.name]
- del self.builders[builder.name]
- self.builderNames.remove(builder.name)
- for slavename in builder.slavenames:
- slave = self.slaves.get(slavename)
- if slave:
- return slave.removeBuilder(builder)
- return defer.succeed(None)
-
- def getPerspective(self, slavename):
- return self.slaves[slavename]
-
- def shutdownSlaves(self):
- # TODO: make this into a bot method rather than a builder method
- for b in self.slaves.values():
- b.shutdownSlave()
-
- def stopService(self):
- for b in self.builders.values():
- b.builder_status.addPointEvent(["master", "shutdown"])
- b.builder_status.saveYourself()
- return service.Service.stopService(self)
-
- def getLockByID(self, lockid):
- """Convert a Lock identifier into an actual Lock instance.
- @param lockid: a locks.MasterLock or locks.SlaveLock instance
- @return: a locks.RealMasterLock or locks.RealSlaveLock instance
- """
- k = (lockid.__class__, lockid.name)
- if not k in self.locks:
- self.locks[k] = lockid.lockClass(lockid.name)
- return self.locks[k]
-
-########################################
-
-class Manhole(service.MultiService, util.ComparableMixin):
- compare_attrs = ["port", "username", "password"]
-
- def __init__(self, port, username, password):
- service.MultiService.__init__(self)
- if type(port) is int:
- port = "tcp:%d" % port
- self.port = port
- self.username = username
- self.password = password
- self.f = f = telnet.ShellFactory()
- f.username = username
- f.password = password
- s = strports.service(port, f)
- s.setServiceParent(self)
-
- def startService(self):
- log.msg("Manhole listening on port %s" % self.port)
- service.MultiService.startService(self)
- master = self.parent
- self.f.namespace['master'] = master
- self.f.namespace['status'] = master.getStatus()
-
-class DebugPerspective(NewCredPerspective):
- def attached(self, mind):
- return self
- def detached(self, mind):
- pass
-
- def perspective_forceBuild(self, buildername, who=None):
- c = interfaces.IControl(self.master)
- bc = c.getBuilder(buildername)
- bc.forceBuild(who, "debug tool 'Force Build' button pushed")
-
- def perspective_fakeChange(self, file, revision=None, who="fakeUser",
- branch=None):
- change = Change(who, [file], "some fake comments\n",
- branch=branch, revision=revision)
- c = interfaces.IControl(self.master)
- c.addChange(change)
-
- def perspective_setCurrentState(self, buildername, state):
- builder = self.botmaster.builders.get(buildername)
- if not builder: return
- if state == "offline":
- builder.statusbag.currentlyOffline()
- if state == "idle":
- builder.statusbag.currentlyIdle()
- if state == "waiting":
- builder.statusbag.currentlyWaiting(now()+10)
- if state == "building":
- builder.statusbag.currentlyBuilding(None)
- def perspective_reload(self):
- print "doing reload of the config file"
- self.master.loadTheConfigFile()
- def perspective_pokeIRC(self):
- print "saying something on IRC"
- from buildbot.status import words
- for s in self.master:
- if isinstance(s, words.IRC):
- bot = s.f
- for channel in bot.channels:
- print " channel", channel
- bot.p.msg(channel, "Ow, quit it")
-
- def perspective_print(self, msg):
- print "debug", msg
-
-class Dispatcher(styles.Versioned):
- if implements:
- implements(portal.IRealm)
- else:
- __implements__ = portal.IRealm,
- persistenceVersion = 2
-
- def __init__(self):
- self.names = {}
-
- def upgradeToVersion1(self):
- self.master = self.botmaster.parent
- def upgradeToVersion2(self):
- self.names = {}
-
- def register(self, name, afactory):
- self.names[name] = afactory
- def unregister(self, name):
- del self.names[name]
-
- def requestAvatar(self, avatarID, mind, interface):
- assert interface == pb.IPerspective
- afactory = self.names.get(avatarID)
- if afactory:
- p = afactory.getPerspective()
- elif avatarID == "debug":
- p = DebugPerspective()
- p.master = self.master
- p.botmaster = self.botmaster
- elif avatarID == "statusClient":
- p = self.statusClientService.getPerspective()
- else:
- # it must be one of the buildslaves: no other names will make it
- # past the checker
- p = self.botmaster.getPerspective(avatarID)
-
- if not p:
- raise ValueError("no perspective for '%s'" % avatarID)
-
- d = defer.maybeDeferred(p.attached, mind)
- d.addCallback(self._avatarAttached, mind)
- return d
-
- def _avatarAttached(self, p, mind):
- return (pb.IPerspective, p, lambda p=p,mind=mind: p.detached(mind))
-
-########################################
-
-# service hierarchy:
-# BuildMaster
-# BotMaster
-# ChangeMaster
-# all IChangeSource objects
-# StatusClientService
-# TCPClient(self.ircFactory)
-# TCPServer(self.slaveFactory) -> dispatcher.requestAvatar
-# TCPServer(self.site)
-# UNIXServer(ResourcePublisher(self.site))
-
-
-class BuildMaster(service.MultiService, styles.Versioned):
- debug = 0
- persistenceVersion = 3
- manhole = None
- debugPassword = None
- projectName = "(unspecified)"
- projectURL = None
- buildbotURL = None
- change_svc = None
-
- def __init__(self, basedir, configFileName="master.cfg"):
- service.MultiService.__init__(self)
- self.setName("buildmaster")
- self.basedir = basedir
- self.configFileName = configFileName
-
- # the dispatcher is the realm in which all inbound connections are
- # looked up: slave builders, change notifications, status clients, and
- # the debug port
- dispatcher = Dispatcher()
- dispatcher.master = self
- self.dispatcher = dispatcher
- self.checker = checkers.InMemoryUsernamePasswordDatabaseDontUse()
- # the checker starts with no user/passwd pairs: they are added later
- p = portal.Portal(dispatcher)
- p.registerChecker(self.checker)
- self.slaveFactory = pb.PBServerFactory(p)
- self.slaveFactory.unsafeTracebacks = True # let them see exceptions
-
- self.slavePortnum = None
- self.slavePort = None
-
- self.botmaster = BotMaster()
- self.botmaster.setName("botmaster")
- self.botmaster.setServiceParent(self)
- dispatcher.botmaster = self.botmaster
-
- self.status = Status(self.botmaster, self.basedir)
-
- self.statusTargets = []
-
- self.bots = []
- # this ChangeMaster is a dummy, only used by tests. In the real
- # buildmaster, where the BuildMaster instance is activated
- # (startService is called) by twistd, this attribute is overwritten.
- self.useChanges(ChangeMaster())
-
- self.readConfig = False
-
- def upgradeToVersion1(self):
- self.dispatcher = self.slaveFactory.root.portal.realm
-
- def upgradeToVersion2(self): # post-0.4.3
- self.webServer = self.webTCPPort
- del self.webTCPPort
- self.webDistribServer = self.webUNIXPort
- del self.webUNIXPort
- self.configFileName = "master.cfg"
-
- def upgradeToVersion3(self):
- # post 0.6.3, solely to deal with the 0.6.3 breakage. Starting with
- # 0.6.5 I intend to do away with .tap files altogether
- self.services = []
- self.namedServices = {}
- del self.change_svc
-
- def startService(self):
- service.MultiService.startService(self)
- self.loadChanges() # must be done before loading the config file
- if not self.readConfig:
- # TODO: consider catching exceptions during this call to
- # loadTheConfigFile and bailing (reactor.stop) if it fails,
- # since without a config file we can't do anything except reload
- # the config file, and it would be nice for the user to discover
- # this quickly.
- self.loadTheConfigFile()
- if signal and hasattr(signal, "SIGHUP"):
- signal.signal(signal.SIGHUP, self._handleSIGHUP)
- for b in self.botmaster.builders.values():
- b.builder_status.addPointEvent(["master", "started"])
- b.builder_status.saveYourself()
-
- def useChanges(self, changes):
- if self.change_svc:
- # TODO: can return a Deferred
- self.change_svc.disownServiceParent()
- self.change_svc = changes
- self.change_svc.basedir = self.basedir
- self.change_svc.setName("changemaster")
- self.dispatcher.changemaster = self.change_svc
- self.change_svc.setServiceParent(self)
-
- def loadChanges(self):
- filename = os.path.join(self.basedir, "changes.pck")
- try:
- changes = pickle.load(open(filename, "rb"))
- styles.doUpgrade()
- except IOError:
- log.msg("changes.pck missing, using new one")
- changes = ChangeMaster()
- except EOFError:
- log.msg("corrupted changes.pck, using new one")
- changes = ChangeMaster()
- self.useChanges(changes)
-
- def _handleSIGHUP(self, *args):
- reactor.callLater(0, self.loadTheConfigFile)
-
- def getStatus(self):
- """
- @rtype: L{buildbot.status.builder.Status}
- """
- return self.status
-
- def loadTheConfigFile(self, configFile=None):
- if not configFile:
- configFile = os.path.join(self.basedir, self.configFileName)
-
- log.msg("loading configuration from %s" % configFile)
- configFile = os.path.expanduser(configFile)
-
- try:
- f = open(configFile, "r")
- except IOError, e:
- log.msg("unable to open config file '%s'" % configFile)
- log.msg("leaving old configuration in place")
- log.err(e)
- return
-
- try:
- self.loadConfig(f)
- except:
- log.msg("error during loadConfig")
- log.err()
- f.close()
-
- def loadConfig(self, f):
- """Internal function to load a specific configuration file. Any
- errors in the file will be signalled by raising an exception.
-
- @return: a Deferred that will fire (with None) when the configuration
- changes have been completed. This may involve a round-trip to each
- buildslave that was involved."""
-
- localDict = {'basedir': os.path.expanduser(self.basedir)}
- try:
- exec f in localDict
- except:
- log.msg("error while parsing config file")
- raise
-
- try:
- config = localDict['BuildmasterConfig']
- except KeyError:
- log.err("missing config dictionary")
- log.err("config file must define BuildmasterConfig")
- raise
-
- known_keys = "bots sources schedulers builders slavePortnum " + \
- "debugPassword manhole " + \
- "status projectName projectURL buildbotURL"
- known_keys = known_keys.split()
- for k in config.keys():
- if k not in known_keys:
- log.msg("unknown key '%s' defined in config dictionary" % k)
-
- try:
- # required
- bots = config['bots']
- sources = config['sources']
- schedulers = config['schedulers']
- builders = config['builders']
- slavePortnum = config['slavePortnum']
-
- # optional
- debugPassword = config.get('debugPassword')
- manhole = config.get('manhole')
- status = config.get('status', [])
- projectName = config.get('projectName')
- projectURL = config.get('projectURL')
- buildbotURL = config.get('buildbotURL')
-
- except KeyError, e:
- log.msg("config dictionary is missing a required parameter")
- log.msg("leaving old configuration in place")
- raise
-
- # do some validation first
- for name, passwd in bots:
- if name in ("debug", "change", "status"):
- raise KeyError, "reserved name '%s' used for a bot" % name
- if config.has_key('interlocks'):
- raise KeyError("c['interlocks'] is no longer accepted")
-
- assert isinstance(sources, (list, tuple))
- for s in sources:
- assert interfaces.IChangeSource(s, None)
- # this assertion catches c['schedulers'] = Scheduler(), since
- # Schedulers are service.MultiServices and thus iterable.
- assert isinstance(schedulers, (list, tuple))
- for s in schedulers:
- assert interfaces.IScheduler(s, None)
- assert isinstance(status, (list, tuple))
- for s in status:
- assert interfaces.IStatusReceiver(s, None)
-
- slavenames = [name for name,pw in bots]
- buildernames = []
- dirnames = []
- for b in builders:
- if type(b) is tuple:
- raise ValueError("builder %s must be defined with a dict, "
- "not a tuple" % b[0])
- if b.has_key('slavename') and b['slavename'] not in slavenames:
- raise ValueError("builder %s uses undefined slave %s" \
- % (b['name'], b['slavename']))
- for n in b.get('slavenames', []):
- if n not in slavenames:
- raise ValueError("builder %s uses undefined slave %s" \
- % (b['name'], n))
- if b['name'] in buildernames:
- raise ValueError("duplicate builder name %s"
- % b['name'])
- buildernames.append(b['name'])
- if b['builddir'] in dirnames:
- raise ValueError("builder %s reuses builddir %s"
- % (b['name'], b['builddir']))
- dirnames.append(b['builddir'])
-
- for s in schedulers:
- for b in s.listBuilderNames():
- assert b in buildernames, \
- "%s uses unknown builder %s" % (s, b)
-
- # assert that all locks used by the Builds and their Steps are
- # uniquely named.
- locks = {}
- for b in builders:
- for l in b.get('locks', []):
- if locks.has_key(l.name):
- if locks[l.name] is not l:
- raise ValueError("Two different locks (%s and %s) "
- "share the name %s"
- % (l, locks[l.name], l.name))
- else:
- locks[l.name] = l
- # TODO: this will break with any BuildFactory that doesn't use a
- # .steps list, but I think the verification step is more
- # important.
- for s in b['factory'].steps:
- for l in s[1].get('locks', []):
- if locks.has_key(l.name):
- if locks[l.name] is not l:
- raise ValueError("Two different locks (%s and %s)"
- " share the name %s"
- % (l, locks[l.name], l.name))
- else:
- locks[l.name] = l
-
- # slavePortnum supposed to be a strports specification
- if type(slavePortnum) is int:
- slavePortnum = "tcp:%d" % slavePortnum
-
- # now we're committed to implementing the new configuration, so do
- # it atomically
- # TODO: actually, this is spread across a couple of Deferreds, so it
- # really isn't atomic.
-
- d = defer.succeed(None)
-
- self.projectName = projectName
- self.projectURL = projectURL
- self.buildbotURL = buildbotURL
-
- # self.bots: Disconnect any that were attached and removed from the
- # list. Update self.checker with the new list of passwords,
- # including debug/change/status.
- d.addCallback(lambda res: self.loadConfig_Slaves(bots))
-
- # self.debugPassword
- if debugPassword:
- self.checker.addUser("debug", debugPassword)
- self.debugPassword = debugPassword
-
- # self.manhole
- if manhole != self.manhole:
- # changing
- if self.manhole:
- # disownServiceParent may return a Deferred
- d.addCallback(lambda res: self.manhole.disownServiceParent())
- self.manhole = None
- if manhole:
- self.manhole = manhole
- manhole.setServiceParent(self)
-
- # add/remove self.botmaster.builders to match builders. The
- # botmaster will handle startup/shutdown issues.
- d.addCallback(lambda res: self.loadConfig_Builders(builders))
-
- d.addCallback(lambda res: self.loadConfig_status(status))
-
- # Schedulers are added after Builders in case they start right away
- d.addCallback(lambda res: self.loadConfig_Schedulers(schedulers))
- # and Sources go after Schedulers for the same reason
- d.addCallback(lambda res: self.loadConfig_Sources(sources))
-
- # self.slavePort
- if self.slavePortnum != slavePortnum:
- if self.slavePort:
- def closeSlavePort(res):
- d1 = self.slavePort.disownServiceParent()
- self.slavePort = None
- return d1
- d.addCallback(closeSlavePort)
- if slavePortnum is not None:
- def openSlavePort(res):
- self.slavePort = strports.service(slavePortnum,
- self.slaveFactory)
- self.slavePort.setServiceParent(self)
- d.addCallback(openSlavePort)
- log.msg("BuildMaster listening on port %s" % slavePortnum)
- self.slavePortnum = slavePortnum
-
- log.msg("configuration update started")
- d.addCallback(lambda res: log.msg("configuration update complete"))
- self.readConfig = True # TODO: consider not setting this until the
- # Deferred fires.
- return d
-
- def loadConfig_Slaves(self, bots):
- # set up the Checker with the names and passwords of all valid bots
- self.checker.users = {} # violates abstraction, oh well
- for user, passwd in bots:
- self.checker.addUser(user, passwd)
- self.checker.addUser("change", "changepw")
-
- # identify new/old bots
- old = self.bots; oldnames = [name for name,pw in old]
- new = bots; newnames = [name for name,pw in new]
- # removeSlave will hang up on the old bot
- dl = [self.botmaster.removeSlave(name)
- for name in oldnames if name not in newnames]
- [self.botmaster.addSlave(name)
- for name in newnames if name not in oldnames]
-
- # all done
- self.bots = bots
- return defer.DeferredList(dl, fireOnOneErrback=1, consumeErrors=0)
-
- def loadConfig_Sources(self, sources):
- log.msg("loadConfig_Sources, change_svc is", self.change_svc,
- self.change_svc.parent)
- # shut down any that were removed, start any that were added
- deleted_sources = [s for s in self.change_svc if s not in sources]
- added_sources = [s for s in sources if s not in self.change_svc]
- dl = [self.change_svc.removeSource(s) for s in deleted_sources]
- def addNewOnes(res):
- [self.change_svc.addSource(s) for s in added_sources]
- d = defer.DeferredList(dl, fireOnOneErrback=1, consumeErrors=0)
- d.addCallback(addNewOnes)
- return d
-
- def allSchedulers(self):
- # TODO: when twisted-1.3 compatibility is dropped, switch to the
- # providedBy form, because it's faster (no actual adapter lookup)
- return [child for child in self
- #if interfaces.IScheduler.providedBy(child)]
- if interfaces.IScheduler(child, None)]
-
-
- def loadConfig_Schedulers(self, newschedulers):
- oldschedulers = self.allSchedulers()
- removed = [s for s in oldschedulers if s not in newschedulers]
- added = [s for s in newschedulers if s not in oldschedulers]
- dl = [defer.maybeDeferred(s.disownServiceParent) for s in removed]
- def addNewOnes(res):
- for s in added:
- s.setServiceParent(self)
- d = defer.DeferredList(dl, fireOnOneErrback=1)
- d.addCallback(addNewOnes)
- return d
-
- def loadConfig_Builders(self, newBuilders):
- dl = []
- old = self.botmaster.getBuildernames()
- newNames = []
- newList = {}
- for data in newBuilders:
- name = data['name']
- newList[name] = data
- newNames.append(name)
-
- # identify all that were removed
- for old in self.botmaster.builders.values()[:]:
- if old.name not in newList.keys():
- log.msg("removing old builder %s" % old.name)
- d = self.botmaster.removeBuilder(old)
- dl.append(d)
- # announce the change
- self.status.builderRemoved(old.name)
-
- # everything in newList is either unchanged, changed, or new
- for newName, data in newList.items():
- old = self.botmaster.builders.get(newName)
- name = data['name']
- basedir = data['builddir'] # used on both master and slave
- #name, slave, builddir, factory = data
- if not old: # new
- # category added after 0.6.2
- category = data.get('category', None)
- log.msg("adding new builder %s for category %s" %
- (name, category))
- statusbag = self.status.builderAdded(name, basedir, category)
- builder = Builder(data, statusbag)
- d = self.botmaster.addBuilder(builder)
- dl.append(d)
- else:
- diffs = old.compareToSetup(data)
- if not diffs: # unchanged: leave it alone
- log.msg("builder %s is unchanged" % name)
- pass
- else:
- # changed: remove and re-add. Don't touch the statusbag
- # object: the clients won't see a remove/add cycle
- log.msg("updating builder %s: %s" % (name,
- "\n".join(diffs)))
- # TODO: if the basedir was changed, we probably need to
- # make a new statusbag
- # TODO: if a slave is connected and we're re-using the
- # same slave, try to avoid a disconnect/reconnect cycle.
- statusbag = old.builder_status
- statusbag.saveYourself() # seems like a good idea
- d = self.botmaster.removeBuilder(old)
- dl.append(d)
- builder = Builder(data, statusbag)
- # point out that the builder was updated
- statusbag.addPointEvent(["config", "updated"])
- d = self.botmaster.addBuilder(builder)
- dl.append(d)
- # now that everything is up-to-date, make sure the names are in the
- # desired order
- self.botmaster.builderNames = newNames
- return defer.DeferredList(dl, fireOnOneErrback=1, consumeErrors=0)
-
- def loadConfig_status(self, status):
- dl = []
-
- # remove old ones
- for s in self.statusTargets[:]:
- if not s in status:
- log.msg("removing IStatusReceiver", s)
- d = defer.maybeDeferred(s.disownServiceParent)
- dl.append(d)
- self.statusTargets.remove(s)
- # after those are finished going away, add new ones
- def addNewOnes(res):
- for s in status:
- if not s in self.statusTargets:
- log.msg("adding IStatusReceiver", s)
- s.setServiceParent(self)
- self.statusTargets.append(s)
- d = defer.DeferredList(dl, fireOnOneErrback=1)
- d.addCallback(addNewOnes)
- return d
-
-
- def addChange(self, change):
- for s in self.allSchedulers():
- s.addChange(change)
-
- def submitBuildSet(self, bs):
- # determine the set of Builders to use
- builders = []
- for name in bs.builderNames:
- b = self.botmaster.builders.get(name)
- if b:
- if b not in builders:
- builders.append(b)
- continue
- # TODO: add aliases like 'all'
- raise KeyError("no such builder named '%s'" % name)
-
- # now tell the BuildSet to create BuildRequests for all those
- # Builders and submit them
- bs.start(builders)
- self.status.buildsetSubmitted(bs.status)
-
-
-class Control:
- if implements:
- implements(interfaces.IControl)
- else:
- __implements__ = interfaces.IControl,
-
- def __init__(self, master):
- self.master = master
-
- def addChange(self, change):
- self.master.change_svc.addChange(change)
-
- def submitBuildSet(self, bs):
- self.master.submitBuildSet(bs)
-
- def getBuilder(self, name):
- b = self.master.botmaster.builders[name]
- return interfaces.IBuilderControl(b)
-
-components.registerAdapter(Control, BuildMaster, interfaces.IControl)
-
-# so anybody who can get a handle on the BuildMaster can force a build with:
-# IControl(master).getBuilder("full-2.3").forceBuild("me", "boredom")
-
diff --git a/buildbot/buildbot-source/build/lib/buildbot/pbutil.py b/buildbot/buildbot-source/build/lib/buildbot/pbutil.py
deleted file mode 100644
index bc85a016d..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/pbutil.py
+++ /dev/null
@@ -1,147 +0,0 @@
-
-"""Base classes handy for use with PB clients.
-"""
-
-from twisted.spread import pb
-
-from twisted.spread.pb import PBClientFactory
-from twisted.internet import protocol
-from twisted.python import log
-
-class NewCredPerspective(pb.Avatar):
- def attached(self, mind):
- return self
- def detached(self, mind):
- pass
-
-class ReconnectingPBClientFactory(PBClientFactory,
- protocol.ReconnectingClientFactory):
- """Reconnecting client factory for PB brokers.
-
- Like PBClientFactory, but if the connection fails or is lost, the factory
- will attempt to reconnect.
-
- Instead of using f.getRootObject (which gives a Deferred that can only
- be fired once), override the gotRootObject method.
-
- Instead of using the newcred f.login (which is also one-shot), call
- f.startLogin() with the credentials and client, and override the
- gotPerspective method.
-
- Instead of using the oldcred f.getPerspective (also one-shot), call
- f.startGettingPerspective() with the same arguments, and override
- gotPerspective.
-
- gotRootObject and gotPerspective will be called each time the object is
- received (once per successful connection attempt). You will probably want
- to use obj.notifyOnDisconnect to find out when the connection is lost.
-
- If an authorization error occurs, failedToGetPerspective() will be
- invoked.
-
- To use me, subclass, then hand an instance to a connector (like
- TCPClient).
- """
-
- def __init__(self):
- PBClientFactory.__init__(self)
- self._doingLogin = False
- self._doingGetPerspective = False
-
- def clientConnectionFailed(self, connector, reason):
- PBClientFactory.clientConnectionFailed(self, connector, reason)
- # Twisted-1.3 erroneously abandons the connection on non-UserErrors.
- # To avoid this bug, don't upcall, and implement the correct version
- # of the method here.
- if self.continueTrying:
- self.connector = connector
- self.retry()
-
- def clientConnectionLost(self, connector, reason):
- PBClientFactory.clientConnectionLost(self, connector, reason,
- reconnecting=True)
- RCF = protocol.ReconnectingClientFactory
- RCF.clientConnectionLost(self, connector, reason)
-
- def clientConnectionMade(self, broker):
- self.resetDelay()
- PBClientFactory.clientConnectionMade(self, broker)
- if self._doingLogin:
- self.doLogin(self._root)
- if self._doingGetPerspective:
- self.doGetPerspective(self._root)
- self.gotRootObject(self._root)
-
- def __getstate__(self):
- # this should get folded into ReconnectingClientFactory
- d = self.__dict__.copy()
- d['connector'] = None
- d['_callID'] = None
- return d
-
- # oldcred methods
-
- def getPerspective(self, *args):
- raise RuntimeError, "getPerspective is one-shot: use startGettingPerspective instead"
-
- def startGettingPerspective(self, username, password, serviceName,
- perspectiveName=None, client=None):
- self._doingGetPerspective = True
- if perspectiveName == None:
- perspectiveName = username
- self._oldcredArgs = (username, password, serviceName,
- perspectiveName, client)
-
- def doGetPerspective(self, root):
- # oldcred getPerspective()
- (username, password,
- serviceName, perspectiveName, client) = self._oldcredArgs
- d = self._cbAuthIdentity(root, username, password)
- d.addCallback(self._cbGetPerspective,
- serviceName, perspectiveName, client)
- d.addCallbacks(self.gotPerspective, self.failedToGetPerspective)
-
-
- # newcred methods
-
- def login(self, *args):
- raise RuntimeError, "login is one-shot: use startLogin instead"
-
- def startLogin(self, credentials, client=None):
- self._credentials = credentials
- self._client = client
- self._doingLogin = True
-
- def doLogin(self, root):
- # newcred login()
- d = self._cbSendUsername(root, self._credentials.username,
- self._credentials.password, self._client)
- d.addCallbacks(self.gotPerspective, self.failedToGetPerspective)
-
-
- # methods to override
-
- def gotPerspective(self, perspective):
- """The remote avatar or perspective (obtained each time this factory
- connects) is now available."""
- pass
-
- def gotRootObject(self, root):
- """The remote root object (obtained each time this factory connects)
- is now available. This method will be called each time the connection
- is established and the object reference is retrieved."""
- pass
-
- def failedToGetPerspective(self, why):
- """The login process failed, most likely because of an authorization
- failure (bad password), but it is also possible that we lost the new
- connection before we managed to send our credentials.
- """
- log.msg("ReconnectingPBClientFactory.failedToGetPerspective")
- if why.check(pb.PBConnectionLost):
- log.msg("we lost the brand-new connection")
- # retrying might help here, let clientConnectionLost decide
- return
- # probably authorization
- self.stopTrying() # logging in harder won't help
- log.err(why)
diff --git a/buildbot/buildbot-source/build/lib/buildbot/process/__init__.py b/buildbot/buildbot-source/build/lib/buildbot/process/__init__.py
deleted file mode 100644
index e69de29bb..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/process/__init__.py
+++ /dev/null
diff --git a/buildbot/buildbot-source/build/lib/buildbot/process/base.py b/buildbot/buildbot-source/build/lib/buildbot/process/base.py
deleted file mode 100644
index 82412564d..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/process/base.py
+++ /dev/null
@@ -1,608 +0,0 @@
-# -*- test-case-name: buildbot.test.test_step -*-
-
-import types, time
-from StringIO import StringIO
-
-from twisted.python import log, components
-from twisted.python.failure import Failure
-from twisted.internet import reactor, defer, error
-from twisted.spread import pb
-
-from buildbot import interfaces
-from buildbot.twcompat import implements
-from buildbot.util import now
-from buildbot.status.builder import SUCCESS, WARNINGS, FAILURE, EXCEPTION
-from buildbot.status.builder import Results, BuildRequestStatus
-from buildbot.status.progress import BuildProgress
-
-class BuildRequest:
- """I represent a request to a specific Builder to run a single build.
-
- I have a SourceStamp which specifies what sources I will build. This may
- specify a specific revision of the source tree (so source.branch,
- source.revision, and source.patch are used). The .patch attribute is
- either None or a tuple of (patchlevel, diff), consisting of a number to
- use in 'patch -pN', and a unified-format context diff.
-
- Alternatively, the SourceStamp may specify a set of Changes to be built,
- contained in source.changes. In this case, I may be mergeable with other
- BuildRequests on the same branch.
-
- I may be part of a BuildSet, in which case I will report status results
- to it.
-
- I am paired with a BuildRequestStatus object, to which I feed status
- information.
-
- @type source: a L{buildbot.sourcestamp.SourceStamp} instance.
- @ivar source: the source code that this BuildRequest use
-
- @type reason: string
- @ivar reason: the reason this Build is being requested. Schedulers
- provide this, but for forced builds the user requesting the
- build will provide a string.
-
- @ivar status: the IBuildStatus object which tracks our status
-
- @ivar submittedAt: a timestamp (seconds since epoch) when this request
- was submitted to the Builder. This is used by the CVS
- step to compute a checkout timestamp.
- """
-
- source = None
- builder = None
- startCount = 0 # how many times we have tried to start this build
-
- if implements:
- implements(interfaces.IBuildRequestControl)
- else:
- __implements__ = interfaces.IBuildRequestControl,
-
- def __init__(self, reason, source, builderName=None, username=None, config=None, installsetcheck=None):
- # TODO: remove the =None on builderName, it is there so I don't have
- # to change a lot of tests that create BuildRequest objects
- assert interfaces.ISourceStamp(source, None)
- self.username = username
- self.config = config
- self.installsetcheck = installsetcheck
- self.reason = reason
- self.source = source
- self.start_watchers = []
- self.finish_watchers = []
- self.status = BuildRequestStatus(source, builderName)
-
- def canBeMergedWith(self, other):
- return self.source.canBeMergedWith(other.source)
-
- def mergeWith(self, others):
- return self.source.mergeWith([o.source for o in others])
-
- def mergeReasons(self, others):
- """Return a reason for the merged build request."""
- reasons = []
- for req in [self] + others:
- if req.reason and req.reason not in reasons:
- reasons.append(req.reason)
- return ", ".join(reasons)
-
- def mergeConfig(self, others):
- """Return a config for the merged build request."""
- configs = []
- for con in [self] + others:
- if con.config and con.config not in configs:
- configs.append(con.config)
- return ", ".join(configs)
-
- def mergeInstallSet(self, others):
- """Return a installsetcheck for the merged build request."""
- installsetchecks = []
- for isc in [self] + others:
- if isc.installsetcheck and isc.installsetcheck not in installsetchecks:
- installsetchecks.append(isc.installsetcheck)
- return ", ".join(installsetchecks)
-
- def mergeUsername(self, others):
- """Return a username for the merged build request."""
- usernames = []
- for isc in [self] + others:
- if isc.username and isc.username not in usernames:
- usernames.append(isc.username)
- return ", ".join(usernames)
-
- def waitUntilFinished(self):
- """Get a Deferred that will fire (with a
- L{buildbot.interfaces.IBuildStatus} instance when the build
- finishes."""
- d = defer.Deferred()
- self.finish_watchers.append(d)
- return d
-
- # these are called by the Builder
-
- def requestSubmitted(self, builder):
- # the request has been placed on the queue
- self.builder = builder
-
- def buildStarted(self, build, buildstatus):
- """This is called by the Builder when a Build has been started in the
- hopes of satifying this BuildRequest. It may be called multiple
- times, since interrupted builds and lost buildslaves may force
- multiple Builds to be run until the fate of the BuildRequest is known
- for certain."""
- for o in self.start_watchers[:]:
- # these observers get the IBuildControl
- o(build)
- # while these get the IBuildStatus
- self.status.buildStarted(buildstatus)
-
- def finished(self, buildstatus):
- """This is called by the Builder when the BuildRequest has been
- retired. This happens when its Build has either succeeded (yay!) or
- failed (boo!). TODO: If it is halted due to an exception (oops!), or
- some other retryable error, C{finished} will not be called yet."""
-
- for w in self.finish_watchers:
- w.callback(buildstatus)
- self.finish_watchers = []
-
- # IBuildRequestControl
-
- def subscribe(self, observer):
- self.start_watchers.append(observer)
- def unsubscribe(self, observer):
- self.start_watchers.remove(observer)
-
- def cancel(self):
- """Cancel this request. This can only be successful if the Build has
- not yet been started.
-
- @return: a boolean indicating if the cancel was successful."""
- if self.builder:
- return self.builder.cancelBuildRequest(self)
- return False
-
-
-class Build:
- """I represent a single build by a single bot. Specialized Builders can
- use subclasses of Build to hold status information unique to those build
- processes.
-
- I control B{how} the build proceeds. The actual build is broken up into a
- series of steps, saved in the .buildSteps[] array as a list of
- L{buildbot.process.step.BuildStep} objects. Each step is a single remote
- command, possibly a shell command.
-
- During the build, I put status information into my C{BuildStatus}
- gatherer.
-
- After the build, I go away.
-
- I can be used by a factory by setting buildClass on
- L{buildbot.process.factory.BuildFactory}
-
- @ivar request: the L{BuildRequest} that triggered me
- @ivar build_status: the L{buildbot.status.builder.BuildStatus} that
- collects our status
- """
-
- if implements:
- implements(interfaces.IBuildControl)
- else:
- __implements__ = interfaces.IBuildControl,
-
- workdir = "build"
- build_status = None
- reason = "changes"
- finished = False
- results = None
- config = None
- installsetcheck = None
- username = None
-
- def __init__(self, requests):
- self.requests = requests
- for req in self.requests:
- req.startCount += 1
- self.locks = []
- # build a source stamp
- self.source = requests[0].mergeWith(requests[1:])
- self.reason = requests[0].mergeReasons(requests[1:])
- self.config = requests[0].mergeConfig(requests[1:])
- self.installsetcheck = requests[0].mergeInstallSet(requests[1:])
- self.username = requests[0].mergeUsername(requests[1:])
- #self.abandoned = False
-
- self.progress = None
- self.currentStep = None
- self.slaveEnvironment = {}
-
- def setBuilder(self, builder):
- """
- Set the given builder as our builder.
-
- @type builder: L{buildbot.process.builder.Builder}
- """
- self.builder = builder
-
- def setLocks(self, locks):
- self.locks = locks
-
- def getSourceStamp(self):
- return self.source
-
- def setProperty(self, propname, value):
- """Set a property on this build. This may only be called after the
- build has started, so that it has a BuildStatus object where the
- properties can live."""
- self.build_status.setProperty(propname, value)
-
- def getProperty(self, propname):
- return self.build_status.properties[propname]
-
-
- def allChanges(self):
- return self.source.changes
-
- def allFiles(self):
- # return a list of all source files that were changed
- files = []
- havedirs = 0
- for c in self.allChanges():
- for f in c.files:
- files.append(f)
- if c.isdir:
- havedirs = 1
- return files
-
- def __repr__(self):
- return "<Build %s>" % (self.builder.name,)
-
- def __getstate__(self):
- d = self.__dict__.copy()
- if d.has_key('remote'):
- del d['remote']
- return d
-
- def blamelist(self):
- blamelist = []
- for c in self.allChanges():
- if c.who not in blamelist:
- blamelist.append(c.who)
- blamelist.sort()
- return blamelist
-
- def changesText(self):
- changetext = ""
- for c in self.allChanges():
- changetext += "-" * 60 + "\n\n" + c.asText() + "\n"
- # consider sorting these by number
- return changetext
-
- def setSteps(self, steps):
- """Set a list of StepFactories, which are generally just class
- objects which derive from step.BuildStep . These are used to create
- the Steps themselves when the Build starts (as opposed to when it is
- first created). By creating the steps later, their __init__ method
- will have access to things like build.allFiles() ."""
- self.stepFactories = steps # tuples of (factory, kwargs)
- for s in steps:
- pass
-
-
-
-
- useProgress = True
-
- def getSlaveCommandVersion(self, command, oldversion=None):
- return self.slavebuilder.getSlaveCommandVersion(command, oldversion)
-
- def setupStatus(self, build_status):
- self.build_status = build_status
- self.setProperty("buildername", self.builder.name)
- self.setProperty("buildnumber", self.build_status.number)
- self.setProperty("branch", self.source.branch)
- self.setProperty("revision", self.source.revision)
- self.setProperty("config", self.config)
- self.setProperty("installsetcheck", self.installsetcheck)
- self.setProperty("username", self.username)
-
- def setupSlaveBuilder(self, slavebuilder):
- self.slavebuilder = slavebuilder
- self.slavename = slavebuilder.slave.slavename
- self.setProperty("slavename", self.slavename)
-
- def startBuild(self, build_status, expectations, slavebuilder):
- """This method sets up the build, then starts it by invoking the
- first Step. It returns a Deferred which will fire when the build
- finishes. This Deferred is guaranteed to never errback."""
-
- # we are taking responsibility for watching the connection to the
- # remote. This responsibility was held by the Builder until our
- # startBuild was called, and will not return to them until we fire
- # the Deferred returned by this method.
-
- log.msg("%s.startBuild" % self)
- self.setupStatus(build_status)
- # now that we have a build_status, we can set properties
- self.setupSlaveBuilder(slavebuilder)
-
- # convert all locks into their real forms
- self.locks = [self.builder.botmaster.getLockByID(l)
- for l in self.locks]
- # then narrow SlaveLocks down to the right slave
- self.locks = [l.getLock(self.slavebuilder) for l in self.locks]
- self.remote = slavebuilder.remote
- self.remote.notifyOnDisconnect(self.lostRemote)
- d = self.deferred = defer.Deferred()
-
- try:
- self.setupBuild(expectations) # create .steps
- except:
- # the build hasn't started yet, so log the exception as a point
- # event instead of flunking the build. TODO: associate this
- # failure with the build instead. this involves doing
- # self.build_status.buildStarted() from within the exception
- # handler
- log.msg("Build.setupBuild failed")
- log.err(Failure())
- self.builder.builder_status.addPointEvent(["setupBuild",
- "exception"],
- color="purple")
- self.finished = True
- self.results = FAILURE
- self.deferred = None
- d.callback(self)
- return d
-
- self.build_status.buildStarted(self)
- self.acquireLocks().addCallback(self._startBuild_2)
- return d
-
- def acquireLocks(self, res=None):
- log.msg("acquireLocks(step %s, locks %s)" % (self, self.locks))
- if not self.locks:
- return defer.succeed(None)
- for lock in self.locks:
- if not lock.isAvailable():
- log.msg("Build %s waiting for lock %s" % (self, lock))
- d = lock.waitUntilAvailable(self)
- d.addCallback(self.acquireLocks)
- return d
- # all locks are available, claim them all
- for lock in self.locks:
- lock.claim(self)
- return defer.succeed(None)
-
- def _startBuild_2(self, res):
- self.startNextStep()
-
- def setupBuild(self, expectations):
- # create the actual BuildSteps. If there are any name collisions, we
- # add a count to the loser until it is unique.
- self.steps = []
- self.stepStatuses = {}
- stepnames = []
- sps = []
-
- for factory, args in self.stepFactories:
- args = args.copy()
- if not args.has_key("workdir"):
- args['workdir'] = self.workdir
- try:
- step = factory(build=self, **args)
- except:
- log.msg("error while creating step, factory=%s, args=%s"
- % (factory, args))
- raise
- name = step.name
- count = 1
- while name in stepnames and count < 100:
- count += 1
- name = step.name + "_%d" % count
- if name in stepnames:
- raise RuntimeError("duplicate step '%s'" % step.name)
- if name != "Install_Set" or (self.installsetcheck and name == "Install_Set") :
- #continue
- step.name = name
- stepnames.append(name)
- self.steps.append(step)
-
- # tell the BuildStatus about the step. This will create a
- # BuildStepStatus and bind it to the Step.
- self.build_status.addStep(step)
-
- sp = None
- if self.useProgress:
- # XXX: maybe bail if step.progressMetrics is empty? or skip
- # progress for that one step (i.e. "it is fast"), or have a
- # separate "variable" flag that makes us bail on progress
- # tracking
- sp = step.setupProgress()
- if sp:
- sps.append(sp)
-
- # Create a buildbot.status.progress.BuildProgress object. This is
- # called once at startup to figure out how to build the long-term
- # Expectations object, and again at the start of each build to get a
- # fresh BuildProgress object to track progress for that individual
- # build. TODO: revisit at-startup call
-
- if self.useProgress:
- self.progress = BuildProgress(sps)
- if self.progress and expectations:
- self.progress.setExpectationsFrom(expectations)
-
- # we are now ready to set up our BuildStatus.
- self.build_status.setSourceStamp(self.source)
- self.build_status.setUsername(self.username)
- self.build_status.setReason(self.reason)
- self.build_status.setBlamelist(self.blamelist())
- self.build_status.setProgress(self.progress)
-
- self.results = [] # list of FAILURE, SUCCESS, WARNINGS, SKIPPED
- self.result = SUCCESS # overall result, may downgrade after each step
- self.text = [] # list of text string lists (text2)
-
- def getNextStep(self):
- """This method is called to obtain the next BuildStep for this build.
- When it returns None (or raises a StopIteration exception), the build
- is complete."""
- if not self.steps:
- return None
- return self.steps.pop(0)
-
- def startNextStep(self):
- try:
- s = self.getNextStep()
- except StopIteration:
- s = None
- if not s:
- return self.allStepsDone()
- self.currentStep = s
- d = defer.maybeDeferred(s.startStep, self.remote)
- d.addCallback(self._stepDone, s)
- d.addErrback(self.buildException)
-
- def _stepDone(self, results, step):
- self.currentStep = None
- if self.finished:
- return # build was interrupted, don't keep building
- terminate = self.stepDone(results, step) # interpret/merge results
- if terminate:
- return self.allStepsDone()
- self.startNextStep()
-
- def stepDone(self, result, step):
- """This method is called when the BuildStep completes. It is passed a
- status object from the BuildStep and is responsible for merging the
- Step's results into those of the overall Build."""
-
- terminate = False
- text = None
- if type(result) == types.TupleType:
- result, text = result
- assert type(result) == type(SUCCESS)
- log.msg(" step '%s' complete: %s" % (step.name, Results[result]))
- self.results.append(result)
- if text:
- self.text.extend(text)
- if not self.remote:
- terminate = True
- if result == FAILURE:
- if step.warnOnFailure:
- if self.result != FAILURE:
- self.result = WARNINGS
- if step.flunkOnFailure:
- self.result = FAILURE
- if step.haltOnFailure:
- self.result = FAILURE
- terminate = True
- elif result == WARNINGS:
- if step.warnOnWarnings:
- if self.result != FAILURE:
- self.result = WARNINGS
- if step.flunkOnWarnings:
- self.result = FAILURE
- elif result == EXCEPTION:
- self.result = EXCEPTION
- terminate = True
- return terminate
-
- def lostRemote(self, remote=None):
- # the slave went away. There are several possible reasons for this,
- # and they aren't necessarily fatal. For now, kill the build, but
- # TODO: see if we can resume the build when it reconnects.
- log.msg("%s.lostRemote" % self)
- self.remote = None
- if self.currentStep:
- # this should cause the step to finish.
- log.msg(" stopping currentStep", self.currentStep)
- self.currentStep.interrupt(Failure(error.ConnectionLost()))
-
- def stopBuild(self, reason="<no reason given>"):
- # the idea here is to let the user cancel a build because, e.g.,
- # they realized they committed a bug and they don't want to waste
- # the time building something that they know will fail. Another
- # reason might be to abandon a stuck build. We want to mark the
- # build as failed quickly rather than waiting for the slave's
- # timeout to kill it on its own.
-
- log.msg(" %s: stopping build: %s" % (self, reason))
- if self.finished:
- return
- # TODO: include 'reason' in this point event
- self.builder.builder_status.addPointEvent(['interrupt'])
- self.currentStep.interrupt(reason)
- if 0:
- # TODO: maybe let its deferred do buildFinished
- if self.currentStep and self.currentStep.progress:
- # XXX: really .fail or something
- self.currentStep.progress.finish()
- text = ["stopped", reason]
- self.buildFinished(text, "red", FAILURE)
-
- def allStepsDone(self):
- if self.result == FAILURE:
- color = "red"
- text = ["failed"]
- elif self.result == WARNINGS:
- color = "orange"
- text = ["warnings"]
- elif self.result == EXCEPTION:
- color = "purple"
- text = ["exception"]
- else:
- color = "green"
- text = ["build", "successful"]
- text.extend(self.text)
- return self.buildFinished(text, color, self.result)
-
- def buildException(self, why):
- log.msg("%s.buildException" % self)
- log.err(why)
- self.buildFinished(["build", "exception"], "purple", FAILURE)
-
- def buildFinished(self, text, color, results):
- """This method must be called when the last Step has completed. It
- marks the Build as complete and returns the Builder to the 'idle'
- state.
-
- It takes three arguments which describe the overall build status:
- text, color, results. 'results' is one of SUCCESS, WARNINGS, or
- FAILURE.
-
- If 'results' is SUCCESS or WARNINGS, we will permit any dependant
- builds to start. If it is 'FAILURE', those builds will be
- abandoned."""
-
- self.finished = True
- if self.remote:
- self.remote.dontNotifyOnDisconnect(self.lostRemote)
- self.results = results
-
- log.msg(" %s: build finished" % self)
- self.build_status.setSlavename(self.slavename)
- self.build_status.setText(text)
- self.build_status.setColor(color)
- self.build_status.setResults(results)
- self.build_status.buildFinished()
- if self.progress:
- # XXX: also test a 'timing consistent' flag?
- log.msg(" setting expectations for next time")
- self.builder.setExpectations(self.progress)
- reactor.callLater(0, self.releaseLocks)
- self.deferred.callback(self)
- self.deferred = None
-
- def releaseLocks(self):
- log.msg("releaseLocks(%s): %s" % (self, self.locks))
- for lock in self.locks:
- lock.release(self)
-
- # IBuildControl
-
- def getStatus(self):
- return self.build_status
-
- # stopBuild is defined earlier
-
diff --git a/buildbot/buildbot-source/build/lib/buildbot/process/builder.py b/buildbot/buildbot-source/build/lib/buildbot/process/builder.py
deleted file mode 100644
index 59f3c3cd2..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/process/builder.py
+++ /dev/null
@@ -1,689 +0,0 @@
-#! /usr/bin/python
-
-import warnings
-
-from twisted.python import log, components, failure
-from twisted.spread import pb
-from twisted.internet import reactor, defer
-
-from buildbot import interfaces, sourcestamp
-from buildbot.twcompat import implements
-from buildbot.status.progress import Expectations
-from buildbot.status import builder
-from buildbot.util import now
-from buildbot.process import base
-
-(ATTACHING, # slave attached, still checking hostinfo/etc
- IDLE, # idle, available for use
- PINGING, # build about to start, making sure it is still alive
- BUILDING, # build is running
- ) = range(4)
-
-class SlaveBuilder(pb.Referenceable):
- """I am the master-side representative for one of the
- L{buildbot.slave.bot.SlaveBuilder} objects that lives in a remote
- buildbot. When a remote builder connects, I query it for command versions
- and then make it available to any Builds that are ready to run. """
-
- state = ATTACHING
- remote = None
- build = None
-
- def __init__(self, builder):
- self.builder = builder
- self.ping_watchers = []
-
- def getSlaveCommandVersion(self, command, oldversion=None):
- if self.remoteCommands is None:
- # the slave is 0.5.0 or earlier
- return oldversion
- return self.remoteCommands.get(command)
-
- def attached(self, slave, remote, commands):
- self.slave = slave
- self.remote = remote
- self.remoteCommands = commands # maps command name to version
- log.msg("Buildslave %s attached to %s" % (slave.slavename,
- self.builder.name))
- d = self.remote.callRemote("setMaster", self)
- d.addErrback(self._attachFailure, "Builder.setMaster")
- d.addCallback(self._attached2)
- return d
-
- def _attached2(self, res):
- d = self.remote.callRemote("print", "attached")
- d.addErrback(self._attachFailure, "Builder.print 'attached'")
- d.addCallback(self._attached3)
- return d
-
- def _attached3(self, res):
- # now we say they're really attached
- return self
-
- def _attachFailure(self, why, where):
- assert isinstance(where, str)
- log.msg(where)
- log.err(why)
- return why
-
- def detached(self):
- log.msg("Buildslave %s detached from %s" % (self.slave.slavename,
- self.builder.name))
- self.slave = None
- self.remote = None
- self.remoteCommands = None
-
- def startBuild(self, build):
- self.build = build
-
- def finishBuild(self):
- self.build = None
-
-
- def ping(self, timeout, status=None):
- """Ping the slave to make sure it is still there. Returns a Deferred
- that fires with True if it is.
-
- @param status: if you point this at a BuilderStatus, a 'pinging'
- event will be pushed.
- """
-
- newping = not self.ping_watchers
- d = defer.Deferred()
- self.ping_watchers.append(d)
- if newping:
- if status:
- event = status.addEvent(["pinging"], "yellow")
- d2 = defer.Deferred()
- d2.addCallback(self._pong_status, event)
- self.ping_watchers.insert(0, d2)
- # I think it will make the tests run smoother if the status
- # is updated before the ping completes
- Ping().ping(self.remote, timeout).addCallback(self._pong)
-
- return d
-
- def _pong(self, res):
- watchers, self.ping_watchers = self.ping_watchers, []
- for d in watchers:
- d.callback(res)
-
- def _pong_status(self, res, event):
- if res:
- event.text = ["ping", "success"]
- event.color = "green"
- else:
- event.text = ["ping", "failed"]
- event.color = "red"
- event.finish()
-
-class Ping:
- running = False
- timer = None
-
- def ping(self, remote, timeout):
- assert not self.running
- self.running = True
- log.msg("sending ping")
- self.d = defer.Deferred()
- # TODO: add a distinct 'ping' command on the slave.. using 'print'
- # for this purpose is kind of silly.
- remote.callRemote("print", "ping").addCallbacks(self._pong,
- self._ping_failed,
- errbackArgs=(remote,))
-
- # We use either our own timeout or the (long) TCP timeout to detect
- # silently-missing slaves. This might happen because of a NAT
- # timeout or a routing loop. If the slave just shuts down (and we
- # somehow missed the FIN), we should get a "connection refused"
- # message.
- self.timer = reactor.callLater(timeout, self._ping_timeout, remote)
- return self.d
-
- def _ping_timeout(self, remote):
- log.msg("ping timeout")
- # force the BotPerspective to disconnect, since this indicates that
- # the bot is unreachable.
- del self.timer
- remote.broker.transport.loseConnection()
- # the forcibly-lost connection will now cause the ping to fail
-
- def _stopTimer(self):
- if not self.running:
- return
- self.running = False
-
- if self.timer:
- self.timer.cancel()
- del self.timer
-
- def _pong(self, res):
- log.msg("ping finished: success")
- self._stopTimer()
- self.d.callback(True)
-
- def _ping_failed(self, res, remote):
- log.msg("ping finished: failure")
- self._stopTimer()
- # the slave has some sort of internal error, disconnect them. If we
- # don't, we'll requeue a build and ping them again right away,
- # creating a nasty loop.
- remote.broker.transport.loseConnection()
- # TODO: except, if they actually did manage to get this far, they'll
- # probably reconnect right away, and we'll do this game again. Maybe
- # it would be better to leave them in the PINGING state.
- self.d.callback(False)
-
-
-class Builder(pb.Referenceable):
- """I manage all Builds of a given type.
-
- Each Builder is created by an entry in the config file (the c['builders']
- list), with a number of parameters.
-
- One of these parameters is the L{buildbot.process.factory.BuildFactory}
- object that is associated with this Builder. The factory is responsible
- for creating new L{Build<buildbot.process.base.Build>} objects. Each
- Build object defines when and how the build is performed, so a new
- Factory or Builder should be defined to control this behavior.
-
- The Builder holds on to a number of L{base.BuildRequest} objects in a
- list named C{.buildable}. Incoming BuildRequest objects will be added to
- this list, or (if possible) merged into an existing request. When a slave
- becomes available, I will use my C{BuildFactory} to turn the request into
- a new C{Build} object. The C{BuildRequest} is forgotten, the C{Build}
- goes into C{.building} while it runs. Once the build finishes, I will
- discard it.
-
- I maintain a list of available SlaveBuilders, one for each connected
- slave that the C{slavenames} parameter says we can use. Some of these
- will be idle, some of them will be busy running builds for me. If there
- are multiple slaves, I can run multiple builds at once.
-
- I also manage forced builds, progress expectation (ETA) management, and
- some status delivery chores.
-
- I am persisted in C{BASEDIR/BUILDERNAME/builder}, so I can remember how
- long a build usually takes to run (in my C{expectations} attribute). This
- pickle also includes the L{buildbot.status.builder.BuilderStatus} object,
- which remembers the set of historic builds.
-
- @type buildable: list of L{buildbot.process.base.BuildRequest}
- @ivar buildable: BuildRequests that are ready to build, but which are
- waiting for a buildslave to be available.
-
- @type building: list of L{buildbot.process.base.Build}
- @ivar building: Builds that are actively running
-
- """
-
- expectations = None # this is created the first time we get a good build
- START_BUILD_TIMEOUT = 10
-
- def __init__(self, setup, builder_status):
- """
- @type setup: dict
- @param setup: builder setup data, as stored in
- BuildmasterConfig['builders']. Contains name,
- slavename(s), builddir, factory, locks.
- @type builder_status: L{buildbot.status.builder.BuilderStatus}
- """
- self.name = setup['name']
- self.slavenames = []
- if setup.has_key('slavename'):
- self.slavenames.append(setup['slavename'])
- if setup.has_key('slavenames'):
- self.slavenames.extend(setup['slavenames'])
- self.builddir = setup['builddir']
- self.buildFactory = setup['factory']
- self.locks = setup.get("locks", [])
- if setup.has_key('periodicBuildTime'):
- raise ValueError("periodicBuildTime can no longer be defined as"
- " part of the Builder: use scheduler.Periodic"
- " instead")
-
- # build/wannabuild slots: Build objects move along this sequence
- self.buildable = []
- self.building = []
-
- # buildslaves which have connected but which are not yet available.
- # These are always in the ATTACHING state.
- self.attaching_slaves = []
-
- # buildslaves at our disposal. Each SlaveBuilder instance has a
- # .state that is IDLE, PINGING, or BUILDING. "PINGING" is used when a
- # Build is about to start, to make sure that they're still alive.
- self.slaves = []
-
- self.builder_status = builder_status
- self.builder_status.setSlavenames(self.slavenames)
-
- # for testing, to help synchronize tests
- self.watchers = {'attach': [], 'detach': [], 'detach_all': [],
- 'idle': []}
-
- def setBotmaster(self, botmaster):
- self.botmaster = botmaster
-
- def compareToSetup(self, setup):
- diffs = []
- setup_slavenames = []
- if setup.has_key('slavename'):
- setup_slavenames.append(setup['slavename'])
- setup_slavenames.extend(setup.get('slavenames', []))
- if setup_slavenames != self.slavenames:
- diffs.append('slavenames changed from %s to %s' \
- % (self.slavenames, setup_slavenames))
- if setup['builddir'] != self.builddir:
- diffs.append('builddir changed from %s to %s' \
- % (self.builddir, setup['builddir']))
- if setup['factory'] != self.buildFactory: # compare objects
- diffs.append('factory changed')
- oldlocks = [(lock.__class__, lock.name)
- for lock in setup.get('locks',[])]
- newlocks = [(lock.__class__, lock.name)
- for lock in self.locks]
- if oldlocks != newlocks:
- diffs.append('locks changed from %s to %s' % (oldlocks, newlocks))
- return diffs
-
- def __repr__(self):
- return "<Builder '%s'>" % self.name
-
-
- def submitBuildRequest(self, req):
- req.submittedAt = now()
- self.buildable.append(req)
- req.requestSubmitted(self)
- self.builder_status.addBuildRequest(req.status)
- self.maybeStartBuild()
-
- def cancelBuildRequest(self, req):
- if req in self.buildable:
- self.buildable.remove(req)
- self.builder_status.removeBuildRequest(req.status)
- return True
- return False
-
- def __getstate__(self):
- d = self.__dict__.copy()
- # TODO: note that d['buildable'] can contain Deferreds
- del d['building'] # TODO: move these back to .buildable?
- del d['slaves']
- return d
-
- def __setstate__(self, d):
- self.__dict__ = d
- self.building = []
- self.slaves = []
-
- def fireTestEvent(self, name, with=None):
- if with is None:
- with = self
- watchers = self.watchers[name]
- self.watchers[name] = []
- for w in watchers:
- reactor.callLater(0, w.callback, with)
-
- def attached(self, slave, remote, commands):
- """This is invoked by the BotPerspective when the self.slavename bot
- registers their builder.
-
- @type slave: L{buildbot.master.BotPerspective}
- @param slave: the BotPerspective that represents the buildslave as a
- whole
- @type remote: L{twisted.spread.pb.RemoteReference}
- @param remote: a reference to the L{buildbot.slave.bot.SlaveBuilder}
- @type commands: dict: string -> string, or None
- @param commands: provides the slave's version of each RemoteCommand
-
- @rtype: L{twisted.internet.defer.Deferred}
- @return: a Deferred that fires (with 'self') when the slave-side
- builder is fully attached and ready to accept commands.
- """
- for s in self.attaching_slaves + self.slaves:
- if s.slave == slave:
- # already attached to them. This is fairly common, since
- # attached() gets called each time we receive the builder
- # list from the slave, and we ask for it each time we add or
- # remove a builder. So if the slave is hosting builders
- # A,B,C, and the config file changes A, we'll remove A and
- # re-add it, triggering two builder-list requests, getting
- # two redundant calls to attached() for B, and another two
- # for C.
- #
- # Therefore, when we see that we're already attached, we can
- # just ignore it. TODO: build a diagram of the state
- # transitions here, I'm concerned about sb.attached() failing
- # and leaving sb.state stuck at 'ATTACHING', and about
- # the detached() message arriving while there's some
- # transition pending such that the response to the transition
- # re-vivifies sb
- return defer.succeed(self)
-
- sb = SlaveBuilder(self)
- self.attaching_slaves.append(sb)
- d = sb.attached(slave, remote, commands)
- d.addCallback(self._attached)
- d.addErrback(self._not_attached, slave)
- return d
-
- def _attached(self, sb):
- # TODO: make this .addSlaveEvent(slave.slavename, ['connect']) ?
- self.builder_status.addPointEvent(['connect', sb.slave.slavename])
- sb.state = IDLE
- self.attaching_slaves.remove(sb)
- self.slaves.append(sb)
- self.maybeStartBuild()
-
- self.fireTestEvent('attach')
- return self
-
- def _not_attached(self, why, slave):
- # already log.err'ed by SlaveBuilder._attachFailure
- # TODO: make this .addSlaveEvent?
- # TODO: remove from self.slaves (except that detached() should get
- # run first, right?)
- self.builder_status.addPointEvent(['failed', 'connect',
- slave.slave.slavename])
- # TODO: add an HTMLLogFile of the exception
- self.fireTestEvent('attach', why)
-
- def detached(self, slave):
- """This is called when the connection to the bot is lost."""
- log.msg("%s.detached" % self, slave.slavename)
- for sb in self.attaching_slaves + self.slaves:
- if sb.slave == slave:
- break
- else:
- log.msg("WEIRD: Builder.detached(%s) (%s)"
- " not in attaching_slaves(%s)"
- " or slaves(%s)" % (slave, slave.slavename,
- self.attaching_slaves,
- self.slaves))
- return
- if sb.state == BUILDING:
- # the Build's .lostRemote method (invoked by a notifyOnDisconnect
- # handler) will cause the Build to be stopped, probably right
- # after the notifyOnDisconnect that invoked us finishes running.
-
- # TODO: should failover to a new Build
- #self.retryBuild(sb.build)
- pass
-
- if sb in self.attaching_slaves:
- self.attaching_slaves.remove(sb)
- if sb in self.slaves:
- self.slaves.remove(sb)
-
- # TODO: make this .addSlaveEvent?
- self.builder_status.addPointEvent(['disconnect', slave.slavename])
- sb.detached() # inform the SlaveBuilder that their slave went away
- self.updateBigStatus()
- self.fireTestEvent('detach')
- if not self.slaves:
- self.fireTestEvent('detach_all')
-
- def updateBigStatus(self):
- if not self.slaves:
- self.builder_status.setBigState("offline")
- elif self.building:
- self.builder_status.setBigState("building")
- else:
- self.builder_status.setBigState("idle")
- self.fireTestEvent('idle')
-
- def maybeStartBuild(self):
- log.msg("maybeStartBuild: %s %s" % (self.buildable, self.slaves))
- if not self.buildable:
- self.updateBigStatus()
- return # nothing to do
- # find the first idle slave
- for sb in self.slaves:
- if sb.state == IDLE:
- break
- else:
- log.msg("%s: want to start build, but we don't have a remote"
- % self)
- self.updateBigStatus()
- return
-
- # there is something to build, and there is a slave on which to build
- # it. Grab the oldest request, see if we can merge it with anything
- # else.
- req = self.buildable.pop(0)
- self.builder_status.removeBuildRequest(req.status)
- mergers = []
- for br in self.buildable[:]:
- if req.canBeMergedWith(br):
- self.buildable.remove(br)
- self.builder_status.removeBuildRequest(br.status)
- mergers.append(br)
- requests = [req] + mergers
-
- # Create a new build from our build factory and set ourself as the
- # builder.
- build = self.buildFactory.newBuild(requests)
- build.setBuilder(self)
- build.setLocks(self.locks)
-
- # start it
- self.startBuild(build, sb)
-
- def startBuild(self, build, sb):
- """Start a build on the given slave.
- @param build: the L{base.Build} to start
- @param sb: the L{SlaveBuilder} which will host this build
-
- @return: a Deferred which fires with a
- L{buildbot.interfaces.IBuildControl} that can be used to stop the
- Build, or to access a L{buildbot.interfaces.IBuildStatus} which will
- watch the Build as it runs. """
-
- self.building.append(build)
-
- # claim the slave. TODO: consider moving changes to sb.state inside
- # SlaveBuilder.. that would be cleaner.
- sb.state = PINGING
- sb.startBuild(build)
-
- self.updateBigStatus()
-
- log.msg("starting build %s.. pinging the slave" % build)
- # ping the slave to make sure they're still there. If they're fallen
- # off the map (due to a NAT timeout or something), this will fail in
- # a couple of minutes, depending upon the TCP timeout. TODO: consider
- # making this time out faster, or at least characterize the likely
- # duration.
- d = sb.ping(self.START_BUILD_TIMEOUT)
- d.addCallback(self._startBuild_1, build, sb)
- return d
-
- def _startBuild_1(self, res, build, sb):
- if not res:
- return self._startBuildFailed("slave ping failed", build, sb)
- # The buildslave is ready to go.
- sb.state = BUILDING
- d = sb.remote.callRemote("startBuild")
- d.addCallbacks(self._startBuild_2, self._startBuildFailed,
- callbackArgs=(build,sb), errbackArgs=(build,sb))
- return d
-
- def _startBuild_2(self, res, build, sb):
- # create the BuildStatus object that goes with the Build
- bs = self.builder_status.newBuild()
-
- # start the build. This will first set up the steps, then tell the
- # BuildStatus that it has started, which will announce it to the
- # world (through our BuilderStatus object, which is its parent).
- # Finally it will start the actual build process.
- d = build.startBuild(bs, self.expectations, sb)
- d.addCallback(self.buildFinished, sb)
- d.addErrback(log.err) # this shouldn't happen. if it does, the slave
- # will be wedged
- for req in build.requests:
- req.buildStarted(build, bs)
- return build # this is the IBuildControl
-
- def _startBuildFailed(self, why, build, sb):
- # put the build back on the buildable list
- log.msg("I tried to tell the slave that the build %s started, but "
- "remote_startBuild failed: %s" % (build, why))
- # release the slave
- sb.finishBuild()
- sb.state = IDLE
-
- log.msg("re-queueing the BuildRequest")
- self.building.remove(build)
- for req in build.requests:
- self.buildable.insert(0, req) # they get first priority
- self.builder_status.addBuildRequest(req.status)
-
- # other notifyOnDisconnect calls will mark the slave as disconnected.
- # Re-try after they have fired, maybe there's another slave
- # available. TODO: I don't like these un-synchronizable callLaters..
- # a better solution is to mark the SlaveBuilder as disconnected
- # ourselves, but we'll need to make sure that they can tolerate
- # multiple disconnects first.
- reactor.callLater(0, self.maybeStartBuild)
-
- def buildFinished(self, build, sb):
- """This is called when the Build has finished (either success or
- failure). Any exceptions during the build are reported with
- results=FAILURE, not with an errback."""
-
- # release the slave
- sb.finishBuild()
- sb.state = IDLE
- # otherwise the slave probably got removed in detach()
-
- self.building.remove(build)
- for req in build.requests:
- req.finished(build.build_status)
- self.maybeStartBuild()
-
- def setExpectations(self, progress):
- """Mark the build as successful and update expectations for the next
- build. Only call this when the build did not fail in any way that
- would invalidate the time expectations generated by it. (if the
- compile failed and thus terminated early, we can't use the last
- build to predict how long the next one will take).
- """
- if self.expectations:
- self.expectations.update(progress)
- else:
- # the first time we get a good build, create our Expectations
- # based upon its results
- self.expectations = Expectations(progress)
- log.msg("new expectations: %s seconds" % \
- self.expectations.expectedBuildTime())
-
- def shutdownSlave(self):
- if self.remote:
- self.remote.callRemote("shutdown")
-
-
-class BuilderControl(components.Adapter):
- if implements:
- implements(interfaces.IBuilderControl)
- else:
- __implements__ = interfaces.IBuilderControl,
-
- def forceBuild(self, who, reason):
- """This is a shortcut for building the current HEAD.
-
- (false: You get back a BuildRequest, just as if you'd asked politely.
- To get control of the resulting build, you'll need use
- req.subscribe() .)
-
- (true: You get back a Deferred that fires with an IBuildControl)
-
- This shortcut peeks into the Builder and raises an exception if there
- is no slave available, to make backwards-compatibility a little
- easier.
- """
-
- warnings.warn("Please use BuilderControl.requestBuildSoon instead",
- category=DeprecationWarning, stacklevel=1)
-
- # see if there is an idle slave, so we can emit an appropriate error
- # message
- for sb in self.original.slaves:
- if sb.state == IDLE:
- break
- else:
- if self.original.building:
- raise interfaces.BuilderInUseError("All slaves are in use")
- raise interfaces.NoSlaveError("There are no slaves connected")
-
- req = base.BuildRequest(reason, sourcestamp.SourceStamp())
- self.requestBuild(req)
- # this is a hack that fires the Deferred for the first build and
- # ignores any others
- class Watcher:
- def __init__(self, req):
- self.req = req
- def wait(self):
- self.d = d = defer.Deferred()
- req.subscribe(self.started)
- return d
- def started(self, bs):
- if self.d:
- self.req.unsubscribe(self.started)
- self.d.callback(bs)
- self.d = None
- w = Watcher(req)
- return w.wait()
-
- def requestBuild(self, req):
- """Submit a BuildRequest to this Builder."""
- self.original.submitBuildRequest(req)
-
- def requestBuildSoon(self, req):
- """Submit a BuildRequest like requestBuild, but raise a
- L{buildbot.interfaces.NoSlaveError} if no slaves are currently
- available, so it cannot be used to queue a BuildRequest in the hopes
- that a slave will eventually connect. This method is appropriate for
- use by things like the web-page 'Force Build' button."""
- if not self.original.slaves:
- raise interfaces.NoSlaveError
- self.requestBuild(req)
-
- def resubmitBuild(self, bs, reason="<rebuild, no reason given>"):
- if not bs.isFinished():
- return
- branch, revision, patch = bs.getSourceStamp()
- changes = bs.getChanges()
- ss = sourcestamp.SourceStamp(branch, revision, patch, changes)
- req = base.BuildRequest(reason, ss, self.original.name)
- self.requestBuild(req)
-
- def getPendingBuilds(self):
- # return IBuildRequestControl objects
- raise NotImplementedError
-
- def getBuild(self, number):
- for b in self.original.building:
- if b.build_status.number == number:
- return b
- return None
-
- def ping(self, timeout=30):
- if not self.original.slaves:
- self.original.builder_status.addPointEvent(["ping", "no slave"],
- "red")
- return defer.succeed(False) # interfaces.NoSlaveError
- dl = []
- for s in self.original.slaves:
- dl.append(s.ping(timeout, self.original.builder_status))
- d = defer.DeferredList(dl)
- d.addCallback(self._gatherPingResults)
- return d
-
- def _gatherPingResults(self, res):
- for ignored,success in res:
- if not success:
- return False
- return True
-
-components.registerAdapter(BuilderControl, Builder, interfaces.IBuilderControl)
diff --git a/buildbot/buildbot-source/build/lib/buildbot/process/factory.py b/buildbot/buildbot-source/build/lib/buildbot/process/factory.py
deleted file mode 100644
index 295aee9ec..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/process/factory.py
+++ /dev/null
@@ -1,177 +0,0 @@
-# -*- test-case-name: buildbot.test.test_step -*-
-
-from buildbot import util
-from buildbot.process.base import Build
-from buildbot.process import step
-
-# deprecated, use BuildFactory.addStep
-def s(steptype, **kwargs):
- # convenience function for master.cfg files, to create step
- # specification tuples
- return (steptype, kwargs)
-
-class BuildFactory(util.ComparableMixin):
- """
- @cvar buildClass: class to use when creating builds
- @type buildClass: L{buildbot.process.base.Build}
- """
- buildClass = Build
- useProgress = 1
- compare_attrs = ['buildClass', 'steps', 'useProgress']
-
- def __init__(self, steps=None):
- if steps is None:
- steps = []
- self.steps = steps
-
- def newBuild(self, request):
- """Create a new Build instance.
- @param request: a L{base.BuildRequest} describing what is to be built
- """
- b = self.buildClass(request)
- b.useProgress = self.useProgress
- b.setSteps(self.steps)
- return b
-
- def addStep(self, steptype, **kwargs):
- self.steps.append((steptype, kwargs))
-
-
-# BuildFactory subclasses for common build tools
-
-class GNUAutoconf(BuildFactory):
- def __init__(self, source, configure="./configure",
- configureEnv={},
- configureFlags=[],
- compile=["make", "all"],
- test=["make", "check"]):
- assert isinstance(source, tuple)
- assert issubclass(source[0], step.BuildStep)
- BuildFactory.__init__(self, [source])
- if configure is not None:
- # we either need to wind up with a string (which will be
- # space-split), or with a list of strings (which will not). The
- # list of strings is the preferred form.
- if type(configure) is str:
- if configureFlags:
- assert not " " in configure # please use list instead
- command = [configure] + configureFlags
- else:
- command = configure
- else:
- assert isinstance(configure, (list, tuple))
- command = configure + configureFlags
- self.addStep(step.Configure, command=command, env=configureEnv)
- if compile is not None:
- self.addStep(step.Compile, command=compile)
- if test is not None:
- self.addStep(step.Test, command=test)
-
-class CPAN(BuildFactory):
- def __init__(self, source, perl="perl"):
- assert isinstance(source, tuple)
- assert issubclass(source[0], step.BuildStep)
- BuildFactory.__init__(self, [source])
- self.addStep(step.Configure, command=[perl, "Makefile.PL"])
- self.addStep(step.Compile, command=["make"])
- self.addStep(step.Test, command=["make", "test"])
-
-class Distutils(BuildFactory):
- def __init__(self, source, python="python", test=None):
- assert isinstance(source, tuple)
- assert issubclass(source[0], step.BuildStep)
- BuildFactory.__init__(self, [source])
- self.addStep(step.Compile, command=[python, "./setup.py", "build"])
- if test is not None:
- self.addStep(step.Test, command=test)
-
-class Trial(BuildFactory):
- """Build a python module that uses distutils and trial. Set 'tests' to
- the module in which the tests can be found, or set useTestCaseNames=True
- to always have trial figure out which tests to run (based upon which
- files have been changed).
-
- See docs/factories.xhtml for usage samples. Not all of the Trial
- BuildStep options are available here, only the most commonly used ones.
- To get complete access, you will need to create a custom
- BuildFactory."""
-
- trial = "trial"
- randomly = False
- recurse = False
-
- def __init__(self, source,
- buildpython=["python"], trialpython=[], trial=None,
- testpath=".", randomly=None, recurse=None,
- tests=None, useTestCaseNames=False, env=None):
- BuildFactory.__init__(self, [source])
- assert isinstance(source, tuple)
- assert issubclass(source[0], step.BuildStep)
- assert tests or useTestCaseNames, "must use one or the other"
- if trial is not None:
- self.trial = trial
- if randomly is not None:
- self.randomly = randomly
- if recurse is not None:
- self.recurse = recurse
-
- from buildbot.process import step_twisted
- buildcommand = buildpython + ["./setup.py", "build"]
- self.addStep(step.Compile, command=buildcommand, env=env)
- self.addStep(step_twisted.Trial,
- python=trialpython, trial=self.trial,
- testpath=testpath,
- tests=tests, testChanges=useTestCaseNames,
- randomly=self.randomly,
- recurse=self.recurse,
- env=env,
- )
-
-
-# compatibility classes, will go away. Note that these only offer
-# compatibility at the constructor level: if you have subclassed these
-# factories, your subclasses are unlikely to still work correctly.
-
-ConfigurableBuildFactory = BuildFactory
-
-class BasicBuildFactory(GNUAutoconf):
- # really a "GNU Autoconf-created tarball -in-CVS tree" builder
-
- def __init__(self, cvsroot, cvsmodule,
- configure=None, configureEnv={},
- compile="make all",
- test="make check", cvsCopy=False):
- mode = "clobber"
- if cvsCopy:
- mode = "copy"
- source = s(step.CVS, cvsroot=cvsroot, cvsmodule=cvsmodule, mode=mode)
- GNUAutoconf.__init__(self, source,
- configure=configure, configureEnv=configureEnv,
- compile=compile,
- test=test)
-
-class QuickBuildFactory(BasicBuildFactory):
- useProgress = False
-
- def __init__(self, cvsroot, cvsmodule,
- configure=None, configureEnv={},
- compile="make all",
- test="make check", cvsCopy=False):
- mode = "update"
- source = s(step.CVS, cvsroot=cvsroot, cvsmodule=cvsmodule, mode=mode)
- GNUAutoconf.__init__(self, source,
- configure=configure, configureEnv=configureEnv,
- compile=compile,
- test=test)
-
-class BasicSVN(GNUAutoconf):
-
- def __init__(self, svnurl,
- configure=None, configureEnv={},
- compile="make all",
- test="make check"):
- source = s(step.SVN, svnurl=svnurl, mode="update")
- GNUAutoconf.__init__(self, source,
- configure=configure, configureEnv=configureEnv,
- compile=compile,
- test=test)
diff --git a/buildbot/buildbot-source/build/lib/buildbot/process/maxq.py b/buildbot/buildbot-source/build/lib/buildbot/process/maxq.py
deleted file mode 100644
index 9ea0ddd30..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/process/maxq.py
+++ /dev/null
@@ -1,46 +0,0 @@
-from buildbot.process import step
-from buildbot.status import event, builder
-
-class MaxQ(step.ShellCommand):
- flunkOnFailure = True
- name = "maxq"
-
- def __init__(self, testdir=None, **kwargs):
- if not testdir:
- raise TypeError("please pass testdir")
- command = 'run_maxq.py %s' % (testdir,)
- step.ShellCommand.__init__(self, command=command, **kwargs)
-
- def startStatus(self):
- evt = event.Event("yellow", ['running', 'maxq', 'tests'],
- files={'log': self.log})
- self.setCurrentActivity(evt)
-
-
- def finished(self, rc):
- self.failures = 0
- if rc:
- self.failures = 1
- output = self.log.getAll()
- self.failures += output.count('\nTEST FAILURE:')
-
- result = (builder.SUCCESS, ['maxq'])
-
- if self.failures:
- result = (builder.FAILURE,
- [str(self.failures), 'maxq', 'failures'])
-
- return self.stepComplete(result)
-
- def finishStatus(self, result):
- if self.failures:
- color = "red"
- text = ["maxq", "failed"]
- else:
- color = "green"
- text = ['maxq', 'tests']
- self.updateCurrentActivity(color=color, text=text)
- self.finishStatusSummary()
- self.finishCurrentActivity()
-
-
diff --git a/buildbot/buildbot-source/build/lib/buildbot/process/process_twisted.py b/buildbot/buildbot-source/build/lib/buildbot/process/process_twisted.py
deleted file mode 100644
index 34052679f..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/process/process_twisted.py
+++ /dev/null
@@ -1,119 +0,0 @@
-#! /usr/bin/python
-
-# Build classes specific to the Twisted codebase
-
-from buildbot.process.base import Build
-from buildbot.process.factory import BuildFactory
-from buildbot.process import step
-from buildbot.process.step_twisted import HLint, ProcessDocs, BuildDebs, \
- Trial, RemovePYCs
-
-class TwistedBuild(Build):
- workdir = "Twisted" # twisted's bin/trial expects to live in here
- def isFileImportant(self, filename):
- if filename.startswith("doc/fun/"):
- return 0
- if filename.startswith("sandbox/"):
- return 0
- return 1
-
-class TwistedTrial(Trial):
- tests = "twisted"
- # the Trial in Twisted >=2.1.0 has --recurse on by default, and -to
- # turned into --reporter=bwverbose .
- recurse = False
- trialMode = ["--reporter=bwverbose"]
- testpath = None
- trial = "./bin/trial"
-
-class TwistedBaseFactory(BuildFactory):
- buildClass = TwistedBuild
- # bin/trial expects its parent directory to be named "Twisted": it uses
- # this to add the local tree to PYTHONPATH during tests
- workdir = "Twisted"
-
- def __init__(self, source):
- BuildFactory.__init__(self, [source])
-
-class QuickTwistedBuildFactory(TwistedBaseFactory):
- treeStableTimer = 30
- useProgress = 0
-
- def __init__(self, source, python="python"):
- TwistedBaseFactory.__init__(self, source)
- if type(python) is str:
- python = [python]
- self.addStep(HLint, python=python[0])
- self.addStep(RemovePYCs)
- for p in python:
- cmd = [p, "setup.py", "build_ext", "-i"]
- self.addStep(step.Compile, command=cmd, flunkOnFailure=True)
- self.addStep(TwistedTrial, python=p, testChanges=True)
-
-class FullTwistedBuildFactory(TwistedBaseFactory):
- treeStableTimer = 5*60
-
- def __init__(self, source, python="python",
- processDocs=False, runTestsRandomly=False,
- compileOpts=[], compileOpts2=[]):
- TwistedBaseFactory.__init__(self, source)
- if processDocs:
- self.addStep(ProcessDocs)
-
- if type(python) == str:
- python = [python]
- assert isinstance(compileOpts, list)
- assert isinstance(compileOpts2, list)
- cmd = (python + compileOpts + ["setup.py", "build_ext"]
- + compileOpts2 + ["-i"])
-
- self.addStep(step.Compile, command=cmd, flunkOnFailure=True)
- self.addStep(RemovePYCs)
- self.addStep(TwistedTrial, python=python, randomly=runTestsRandomly)
-
-class TwistedDebsBuildFactory(TwistedBaseFactory):
- treeStableTimer = 10*60
-
- def __init__(self, source, python="python"):
- TwistedBaseFactory.__init__(self, source)
- self.addStep(ProcessDocs, haltOnFailure=True)
- self.addStep(BuildDebs, warnOnWarnings=True)
-
-class TwistedReactorsBuildFactory(TwistedBaseFactory):
- treeStableTimer = 5*60
-
- def __init__(self, source,
- python="python", compileOpts=[], compileOpts2=[],
- reactors=None):
- TwistedBaseFactory.__init__(self, source)
-
- if type(python) == str:
- python = [python]
- assert isinstance(compileOpts, list)
- assert isinstance(compileOpts2, list)
- cmd = (python + compileOpts + ["setup.py", "build_ext"]
- + compileOpts2 + ["-i"])
-
- self.addStep(step.Compile, command=cmd, warnOnFailure=True)
-
- if reactors == None:
- reactors = [
- 'gtk2',
- 'gtk',
- #'kqueue',
- 'poll',
- 'c',
- 'qt',
- #'win32',
- ]
- for reactor in reactors:
- flunkOnFailure = 1
- warnOnFailure = 0
- #if reactor in ['c', 'qt', 'win32']:
- # # these are buggy, so tolerate failures for now
- # flunkOnFailure = 0
- # warnOnFailure = 1
- self.addStep(RemovePYCs) # TODO: why?
- self.addStep(TwistedTrial, name=reactor, python=python,
- reactor=reactor, flunkOnFailure=flunkOnFailure,
- warnOnFailure=warnOnFailure)
diff --git a/buildbot/buildbot-source/build/lib/buildbot/process/step.py b/buildbot/buildbot-source/build/lib/buildbot/process/step.py
deleted file mode 100644
index c723ab8c5..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/process/step.py
+++ /dev/null
@@ -1,2359 +0,0 @@
-# -*- test-case-name: buildbot.test.test_steps -*-
-
-import time, random, types, re, warnings, os
-from email.Utils import formatdate
-
-from twisted.internet import reactor, defer, error
-from twisted.spread import pb
-from twisted.python import log
-from twisted.python.failure import Failure
-from twisted.web.util import formatFailure
-
-from buildbot.interfaces import BuildSlaveTooOldError
-from buildbot.util import now
-from buildbot.status import progress, builder
-from buildbot.status.builder import SUCCESS, WARNINGS, FAILURE, SKIPPED, \
- EXCEPTION
-
-"""
-BuildStep and RemoteCommand classes for master-side representation of the
-build process
-"""
-
-class RemoteCommand(pb.Referenceable):
- """
- I represent a single command to be run on the slave. I handle the details
- of reliably gathering status updates from the slave (acknowledging each),
- and (eventually, in a future release) recovering from interrupted builds.
- This is the master-side object that is known to the slave-side
- L{buildbot.slave.bot.SlaveBuilder}, to which status update are sent.
-
- My command should be started by calling .run(), which returns a
- Deferred that will fire when the command has finished, or will
- errback if an exception is raised.
-
- Typically __init__ or run() will set up self.remote_command to be a
- string which corresponds to one of the SlaveCommands registered in
- the buildslave, and self.args to a dictionary of arguments that will
- be passed to the SlaveCommand instance.
-
- start, remoteUpdate, and remoteComplete are available to be overridden
-
- @type commandCounter: list of one int
- @cvar commandCounter: provides a unique value for each
- RemoteCommand executed across all slaves
- @type active: boolean
- @cvar active: whether the command is currently running
- """
- commandCounter = [0] # we use a list as a poor man's singleton
- active = False
-
- def __init__(self, remote_command, args):
- """
- @type remote_command: string
- @param remote_command: remote command to start. This will be
- passed to
- L{buildbot.slave.bot.SlaveBuilder.remote_startCommand}
- and needs to have been registered
- slave-side by
- L{buildbot.slave.registry.registerSlaveCommand}
- @type args: dict
- @param args: arguments to send to the remote command
- """
-
- self.remote_command = remote_command
- self.args = args
-
- def __getstate__(self):
- dict = self.__dict__.copy()
- # Remove the remote ref: if necessary (only for resumed builds), it
- # will be reattached at resume time
- if dict.has_key("remote"):
- del dict["remote"]
- return dict
-
- def run(self, step, remote):
- self.active = True
- self.step = step
- self.remote = remote
- c = self.commandCounter[0]
- self.commandCounter[0] += 1
- #self.commandID = "%d %d" % (c, random.randint(0, 1000000))
- self.commandID = "%d" % c
- log.msg("%s: RemoteCommand.run [%s]" % (self, self.commandID))
- self.deferred = defer.Deferred()
-
- d = defer.maybeDeferred(self.start)
-
- # _finished is called with an error for unknown commands, errors
- # that occur while the command is starting (including OSErrors in
- # exec()), StaleBroker (when the connection was lost before we
- # started), and pb.PBConnectionLost (when the slave isn't responding
- # over this connection, perhaps it had a power failure, or NAT
- # weirdness). If this happens, self.deferred is fired right away.
- d.addErrback(self._finished)
-
- # Connections which are lost while the command is running are caught
- # when our parent Step calls our .lostRemote() method.
- return self.deferred
-
- def start(self):
- """
- Tell the slave to start executing the remote command.
-
- @rtype: L{twisted.internet.defer.Deferred}
- @returns: a deferred that will fire when the remote command is
- done (with None as the result)
- """
- # This method only initiates the remote command.
- # We will receive remote_update messages as the command runs.
- # We will get a single remote_complete when it finishes.
- # We should fire self.deferred when the command is done.
- d = self.remote.callRemote("startCommand", self, self.commandID,
- self.remote_command, self.args)
- return d
-
- def interrupt(self, why):
- # TODO: consider separating this into interrupt() and stop(), where
- # stop() unconditionally calls _finished, but interrupt() merely
- # asks politely for the command to stop soon.
-
- log.msg("RemoteCommand.interrupt", self, why)
- if not self.active:
- log.msg(" but this RemoteCommand is already inactive")
- return
- if not self.remote:
- log.msg(" but our .remote went away")
- return
- if isinstance(why, Failure) and why.check(error.ConnectionLost):
- log.msg("RemoteCommand.disconnect: lost slave")
- self.remote = None
- self._finished(why)
- return
-
- # tell the remote command to halt. Returns a Deferred that will fire
- # when the interrupt command has been delivered.
-
- d = defer.maybeDeferred(self.remote.callRemote, "interruptCommand",
- self.commandID, str(why))
- # the slave may not have remote_interruptCommand
- d.addErrback(self._interruptFailed)
- return d
-
- def _interruptFailed(self, why):
- log.msg("RemoteCommand._interruptFailed", self)
- # TODO: forcibly stop the Command now, since we can't stop it
- # cleanly
- return None
-
- def remote_update(self, updates):
- """
- I am called by the slave's L{buildbot.slave.bot.SlaveBuilder} so
- I can receive updates from the running remote command.
-
- @type updates: list of [object, int]
- @param updates: list of updates from the remote command
- """
- max_updatenum = 0
- for (update, num) in updates:
- #log.msg("update[%d]:" % num)
- try:
- if self.active: # ignore late updates
- self.remoteUpdate(update)
- except:
- # log failure, terminate build, let slave retire the update
- self._finished(Failure())
- # TODO: what if multiple updates arrive? should
- # skip the rest but ack them all
- if num > max_updatenum:
- max_updatenum = num
- return max_updatenum
-
- def remoteUpdate(self, update):
- raise NotImplementedError("You must implement this in a subclass")
-
- def remote_complete(self, failure=None):
- """
- Called by the slave's L{buildbot.slave.bot.SlaveBuilder} to
- notify me the remote command has finished.
-
- @type failure: L{twisted.python.failure.Failure} or None
-
- @rtype: None
- """
- # call the real remoteComplete a moment later, but first return an
- # acknowledgement so the slave can retire the completion message.
- if self.active:
- reactor.callLater(0, self._finished, failure)
- return None
-
- def _finished(self, failure=None):
- self.active = False
- # call .remoteComplete. If it raises an exception, or returns the
- # Failure that we gave it, our self.deferred will be errbacked. If
- # it does not (either it ate the Failure or there the step finished
- # normally and it didn't raise a new exception), self.deferred will
- # be callbacked.
- d = defer.maybeDeferred(self.remoteComplete, failure)
- # arrange for the callback to get this RemoteCommand instance
- # instead of just None
- d.addCallback(lambda r: self)
- # this fires the original deferred we returned from .run(),
- # with self as the result, or a failure
- d.addBoth(self.deferred.callback)
-
- def remoteComplete(self, maybeFailure):
- """Subclasses can override this.
-
- This is called when the RemoteCommand has finished. 'maybeFailure'
- will be None if the command completed normally, or a Failure
- instance in one of the following situations:
-
- - the slave was lost before the command was started
- - the slave didn't respond to the startCommand message
- - the slave raised an exception while starting the command
- (bad command name, bad args, OSError from missing executable)
- - the slave raised an exception while finishing the command
- (they send back a remote_complete message with a Failure payload)
-
- and also (for now):
- - slave disconnected while the command was running
-
- This method should do cleanup, like closing log files. It should
- normally return the 'failure' argument, so that any exceptions will
- be propagated to the Step. If it wants to consume them, return None
- instead."""
-
- return maybeFailure
-
-class LoggedRemoteCommand(RemoteCommand):
- """
- I am a L{RemoteCommand} which expects the slave to send back
- stdout/stderr/rc updates. I gather these updates into a
- L{buildbot.status.builder.LogFile} named C{self.log}. You can give me a
- LogFile to use by calling useLog(), or I will create my own when the
- command is started. Unless you tell me otherwise, I will close the log
- when the command is complete.
- """
-
- log = None
- closeWhenFinished = False
- rc = None
- debug = False
-
- def __repr__(self):
- return "<RemoteCommand '%s' at %d>" % (self.remote_command, id(self))
-
- def useLog(self, loog, closeWhenFinished=False):
- self.log = loog
- self.closeWhenFinished = closeWhenFinished
-
- def start(self):
- if self.log is None:
- # orphan LogFile, cannot be subscribed to
- self.log = builder.LogFile(None)
- self.closeWhenFinished = True
- self.updates = {}
- log.msg("LoggedRemoteCommand.start", self.log)
- return RemoteCommand.start(self)
-
- def addStdout(self, data):
- self.log.addStdout(data)
- def addStderr(self, data):
- self.log.addStderr(data)
- def addHeader(self, data):
- self.log.addHeader(data)
- def remoteUpdate(self, update):
- if self.debug:
- for k,v in update.items():
- log.msg("Update[%s]: %s" % (k,v))
- if update.has_key('stdout'):
- self.addStdout(update['stdout'])
- if update.has_key('stderr'):
- self.addStderr(update['stderr'])
- if update.has_key('header'):
- self.addHeader(update['header'])
- if update.has_key('rc'):
- rc = self.rc = update['rc']
- log.msg("%s rc=%s" % (self, rc))
- self.addHeader("program finished with exit code %d\n" % rc)
- for k in update:
- if k not in ('stdout', 'stderr', 'header', 'rc'):
- if k not in self.updates:
- self.updates[k] = []
- self.updates[k].append(update[k])
-
- def remoteComplete(self, maybeFailure):
- if self.closeWhenFinished:
- if maybeFailure:
- self.addHeader("\nremoteFailed: %s" % maybeFailure)
- else:
- log.msg("closing log")
- self.log.finish()
- return maybeFailure
-
-class RemoteShellCommand(LoggedRemoteCommand):
- """This class helps you run a shell command on the build slave. It will
- accumulate all the command's output into a Log. When the command is
- finished, it will fire a Deferred. You can then check the results of the
- command and parse the output however you like."""
-
- def __init__(self, workdir, command, env=None,
- want_stdout=1, want_stderr=1,
- timeout=20*60, **kwargs):
- """
- @type workdir: string
- @param workdir: directory where the command ought to run,
- relative to the Builder's home directory. Defaults to
- '.': the same as the Builder's homedir. This should
- probably be '.' for the initial 'cvs checkout'
- command (which creates a workdir), and the Build-wide
- workdir for all subsequent commands (including
- compiles and 'cvs update').
-
- @type command: list of strings (or string)
- @param command: the shell command to run, like 'make all' or
- 'cvs update'. This should be a list or tuple
- which can be used directly as the argv array.
- For backwards compatibility, if this is a
- string, the text will be given to '/bin/sh -c
- %s'.
-
- @type env: dict of string->string
- @param env: environment variables to add or change for the
- slave. Each command gets a separate
- environment; all inherit the slave's initial
- one. TODO: make it possible to delete some or
- all of the slave's environment.
-
- @type want_stdout: bool
- @param want_stdout: defaults to True. Set to False if stdout should
- be thrown away. Do this to avoid storing or
- sending large amounts of useless data.
-
- @type want_stderr: bool
- @param want_stderr: False if stderr should be thrown away
-
- @type timeout: int
- @param timeout: tell the remote that if the command fails to
- produce any output for this number of seconds,
- the command is hung and should be killed. Use
- None to disable the timeout.
- """
- self.command = command # stash .command, set it later
- if env is not None:
- # avoid mutating the original master.cfg dictionary. Each
- # ShellCommand gets its own copy, any start() methods won't be
- # able to modify the original.
- env = env.copy()
- args = {'workdir': workdir,
- 'env': env,
- 'want_stdout': want_stdout,
- 'want_stderr': want_stderr,
- 'timeout': timeout,
- }
- LoggedRemoteCommand.__init__(self, "shell", args)
-
- def start(self):
- self.args['command'] = self.command
- if self.remote_command == "shell":
- # non-ShellCommand slavecommands are responsible for doing this
- # fixup themselves
- if self.step.slaveVersion("shell", "old") == "old":
- self.args['dir'] = self.args['workdir']
- what = "command '%s' in dir '%s'" % (self.args['command'],
- self.args['workdir'])
- log.msg(what)
- return LoggedRemoteCommand.start(self)
-
- def __repr__(self):
- return "<RemoteShellCommand '%s'>" % self.command
-
-
-class RemoteTCSHCommand(LoggedRemoteCommand):
- """This class helps you run a shell command on the build slave. It will
- accumulate all the command's output into a Log. When the command is
- finished, it will fire a Deferred. You can then check the results of the
- command and parse the output however you like."""
-
- def __init__(self, workdir, command, env=None,
- want_stdout=1, want_stderr=1,
- timeout=240*60, **kwargs):
- """
- @type workdir: string
- @param workdir: directory where the command ought to run,
- relative to the Builder's home directory. Defaults to
- '.': the same as the Builder's homedir. This should
- probably be '.' for the initial 'cvs checkout'
- command (which creates a workdir), and the Build-wide
- workdir for all subsequent commands (including
- compiles and 'cvs update').
-
- @type command: list of strings (or string)
- @param command: the shell command to run, like 'make all' or
- 'cvs update'. This should be a list or tuple
- which can be used directly as the argv array.
- For backwards compatibility, if this is a
- string, the text will be given to '/bin/sh -c
- %s'.
-
- @type env: dict of string->string
- @param env: environment variables to add or change for the
- slave. Each command gets a separate
- environment; all inherit the slave's initial
- one. TODO: make it possible to delete some or
- all of the slave's environment.
-
- @type want_stdout: bool
- @param want_stdout: defaults to True. Set to False if stdout should
- be thrown away. Do this to avoid storing or
- sending large amounts of useless data.
-
- @type want_stderr: bool
- @param want_stderr: False if stderr should be thrown away
-
- @type timeout: int
- @param timeout: tell the remote that if the command fails to
- produce any output for this number of seconds,
- the command is hung and should be killed. Use
- None to disable the timeout.
- """
- self.command = command # stash .command, set it later
- if env is not None:
- # avoid mutating the original master.cfg dictionary. Each
- # ShellCommand gets its own copy, any start() methods won't be
- # able to modify the original.
- env = env.copy()
- args = {'workdir': workdir,
- 'env': env,
- 'want_stdout': want_stdout,
- 'want_stderr': want_stderr,
- 'timeout': timeout,
- }
- LoggedRemoteCommand.__init__(self, "tcsh", args)
-
- def start(self):
- self.args['command'] = self.command
- if self.remote_command == "tcsh":
- # non-ShellCommand slavecommands are responsible for doing this
- # fixup themselves
- if self.step.slaveVersion("tcsh", "old") == "old":
- self.args['dir'] = self.args['workdir']
- what = "command '%s' in dir '%s'" % (self.args['command'],
- self.args['workdir'])
- log.msg(what)
- return LoggedRemoteCommand.start(self)
-
- def __repr__(self):
- return "<RemoteShellCommand '%s'>" % self.command
-
-
-class BuildStep:
- """
- I represent a single step of the build process. This step may involve
- zero or more commands to be run in the build slave, as well as arbitrary
- processing on the master side. Regardless of how many slave commands are
- run, the BuildStep will result in a single status value.
-
- The step is started by calling startStep(), which returns a Deferred that
- fires when the step finishes. See C{startStep} for a description of the
- results provided by that Deferred.
-
- __init__ and start are good methods to override. Don't forget to upcall
- BuildStep.__init__ or bad things will happen.
-
- To launch a RemoteCommand, pass it to .runCommand and wait on the
- Deferred it returns.
-
- Each BuildStep generates status as it runs. This status data is fed to
- the L{buildbot.status.builder.BuildStepStatus} listener that sits in
- C{self.step_status}. It can also feed progress data (like how much text
- is output by a shell command) to the
- L{buildbot.status.progress.StepProgress} object that lives in
- C{self.progress}, by calling C{progress.setProgress(metric, value)} as it
- runs.
-
- @type build: L{buildbot.process.base.Build}
- @ivar build: the parent Build which is executing this step
-
- @type progress: L{buildbot.status.progress.StepProgress}
- @ivar progress: tracks ETA for the step
-
- @type step_status: L{buildbot.status.builder.BuildStepStatus}
- @ivar step_status: collects output status
- """
-
- # these parameters are used by the parent Build object to decide how to
- # interpret our results. haltOnFailure will affect the build process
- # immediately, the others will be taken into consideration when
- # determining the overall build status.
- #
- haltOnFailure = False
- flunkOnWarnings = False
- flunkOnFailure = False
- warnOnWarnings = False
- warnOnFailure = False
-
- # 'parms' holds a list of all the parameters we care about, to allow
- # users to instantiate a subclass of BuildStep with a mixture of
- # arguments, some of which are for us, some of which are for the subclass
- # (or a delegate of the subclass, like how ShellCommand delivers many
- # arguments to the RemoteShellCommand that it creates). Such delegating
- # subclasses will use this list to figure out which arguments are meant
- # for us and which should be given to someone else.
- parms = ['build', 'name', 'locks',
- 'haltOnFailure',
- 'flunkOnWarnings',
- 'flunkOnFailure',
- 'warnOnWarnings',
- 'warnOnFailure',
- 'progressMetrics',
- ]
-
- name = "generic"
- locks = []
- progressMetrics = [] # 'time' is implicit
- useProgress = True # set to False if step is really unpredictable
- build = None
- step_status = None
- progress = None
-
- def __init__(self, build, **kwargs):
- self.build = build
- for p in self.__class__.parms:
- if kwargs.has_key(p):
- setattr(self, p, kwargs[p])
- del kwargs[p]
- # we want to encourage all steps to get a workdir, so tolerate its
- # presence here. It really only matters for non-ShellCommand steps
- # like Dummy
- if kwargs.has_key('workdir'):
- del kwargs['workdir']
- if kwargs:
- why = "%s.__init__ got unexpected keyword argument(s) %s" \
- % (self, kwargs.keys())
- raise TypeError(why)
-
- def setupProgress(self):
- if self.useProgress:
- sp = progress.StepProgress(self.name, self.progressMetrics)
- self.progress = sp
- self.step_status.setProgress(sp)
- return sp
- return None
-
- def getProperty(self, propname):
- return self.build.getProperty(propname)
-
- def setProperty(self, propname, value):
- self.build.setProperty(propname, value)
-
- def startStep(self, remote):
- """Begin the step. This returns a Deferred that will fire when the
- step finishes.
-
- This deferred fires with a tuple of (result, [extra text]), although
- older steps used to return just the 'result' value, so the receiving
- L{base.Build} needs to be prepared to handle that too. C{result} is
- one of the SUCCESS/WARNINGS/FAILURE/SKIPPED constants from
- L{buildbot.status.builder}, and the extra text is a list of short
- strings which should be appended to the Build's text results. This
- text allows a test-case step which fails to append B{17 tests} to the
- Build's status, in addition to marking the build as failing.
-
- The deferred will errback if the step encounters an exception,
- including an exception on the slave side (or if the slave goes away
- altogether). Failures in shell commands (rc!=0) will B{not} cause an
- errback, in general the BuildStep will evaluate the results and
- decide whether to treat it as a WARNING or FAILURE.
-
- @type remote: L{twisted.spread.pb.RemoteReference}
- @param remote: a reference to the slave's
- L{buildbot.slave.bot.SlaveBuilder} instance where any
- RemoteCommands may be run
- """
-
- self.remote = remote
- self.deferred = defer.Deferred()
- # convert all locks into their real form
- self.locks = [self.build.builder.botmaster.getLockByID(l)
- for l in self.locks]
- # then narrow SlaveLocks down to the slave that this build is being
- # run on
- self.locks = [l.getLock(self.build.slavebuilder) for l in self.locks]
- for l in self.locks:
- if l in self.build.locks:
- log.msg("Hey, lock %s is claimed by both a Step (%s) and the"
- " parent Build (%s)" % (l, self, self.build))
- raise RuntimeError("lock claimed by both Step and Build")
- d = self.acquireLocks()
- d.addCallback(self._startStep_2)
- return self.deferred
-
- def acquireLocks(self, res=None):
- log.msg("acquireLocks(step %s, locks %s)" % (self, self.locks))
- if not self.locks:
- return defer.succeed(None)
- for lock in self.locks:
- if not lock.isAvailable():
- log.msg("step %s waiting for lock %s" % (self, lock))
- d = lock.waitUntilAvailable(self)
- d.addCallback(self.acquireLocks)
- return d
- # all locks are available, claim them all
- for lock in self.locks:
- lock.claim(self)
- return defer.succeed(None)
-
- def _startStep_2(self, res):
- if self.progress:
- self.progress.start()
- self.step_status.stepStarted()
- try:
- skip = self.start()
- if skip == SKIPPED:
- reactor.callLater(0, self.releaseLocks)
- reactor.callLater(0, self.deferred.callback, SKIPPED)
- except:
- log.msg("BuildStep.startStep exception in .start")
- self.failed(Failure())
-
- def start(self):
- """Begin the step. Override this method and add code to do local
- processing, fire off remote commands, etc.
-
- To spawn a command in the buildslave, create a RemoteCommand instance
- and run it with self.runCommand::
-
- c = RemoteCommandFoo(args)
- d = self.runCommand(c)
- d.addCallback(self.fooDone).addErrback(self.failed)
-
- As the step runs, it should send status information to the
- BuildStepStatus::
-
- self.step_status.setColor('red')
- self.step_status.setText(['compile', 'failed'])
- self.step_status.setText2(['4', 'warnings'])
-
- To add a LogFile, use self.addLog. Make sure it gets closed when it
- finishes. When giving a Logfile to a RemoteShellCommand, just ask it
- to close the log when the command completes::
-
- log = self.addLog('output')
- cmd = RemoteShellCommand(args)
- cmd.useLog(log, closeWhenFinished=True)
-
- You can also create complete Logfiles with generated text in a single
- step::
-
- self.addCompleteLog('warnings', text)
-
- When the step is done, it should call self.finished(result). 'result'
- will be provided to the L{buildbot.process.base.Build}, and should be
- one of the constants defined above: SUCCESS, WARNINGS, FAILURE, or
- SKIPPED.
-
- If the step encounters an exception, it should call self.failed(why).
- 'why' should be a Failure object. This automatically fails the whole
- build with an exception. It is a good idea to add self.failed as an
- errback to any Deferreds you might obtain.
-
- If the step decides it does not need to be run, start() can return
- the constant SKIPPED. This fires the callback immediately: it is not
- necessary to call .finished yourself. This can also indicate to the
- status-reporting mechanism that this step should not be displayed."""
-
- raise NotImplementedError("your subclass must implement this method")
-
- def interrupt(self, reason):
- """Halt the command, either because the user has decided to cancel
- the build ('reason' is a string), or because the slave has
- disconnected ('reason' is a ConnectionLost Failure). Any further
- local processing should be skipped, and the Step completed with an
- error status. The results text should say something useful like
- ['step', 'interrupted'] or ['remote', 'lost']"""
- pass
-
- def releaseLocks(self):
- log.msg("releaseLocks(%s): %s" % (self, self.locks))
- for lock in self.locks:
- lock.release(self)
-
- def finished(self, results):
- if self.progress:
- self.progress.finish()
- self.step_status.stepFinished(results)
- self.releaseLocks()
- self.deferred.callback(results)
-
- def failed(self, why):
- # if isinstance(why, pb.CopiedFailure): # a remote exception might
- # only have short traceback, so formatFailure is not as useful as
- # you'd like (no .frames, so no traceback is displayed)
- log.msg("BuildStep.failed, traceback follows")
- log.err(why)
- try:
- if self.progress:
- self.progress.finish()
- self.addHTMLLog("err.html", formatFailure(why))
- self.addCompleteLog("err.text", why.getTraceback())
- # could use why.getDetailedTraceback() for more information
- self.step_status.setColor("purple")
- self.step_status.setText([self.name, "exception"])
- self.step_status.setText2([self.name])
- self.step_status.stepFinished(EXCEPTION)
- except:
- log.msg("exception during failure processing")
- log.err()
- # the progress stuff may still be whacked (the StepStatus may
- # think that it is still running), but the build overall will now
- # finish
- try:
- self.releaseLocks()
- except:
- log.msg("exception while releasing locks")
- log.err()
-
- log.msg("BuildStep.failed now firing callback")
- self.deferred.callback(EXCEPTION)
-
- # utility methods that BuildSteps may find useful
-
- def slaveVersion(self, command, oldversion=None):
- """Return the version number of the given slave command. For the
- commands defined in buildbot.slave.commands, this is the value of
- 'cvs_ver' at the top of that file. Non-existent commands will return
- a value of None. Buildslaves running buildbot-0.5.0 or earlier did
- not respond to the version query: commands on those slaves will
- return a value of OLDVERSION, so you can distinguish between old
- buildslaves and missing commands.
-
- If you know that <=0.5.0 buildslaves have the command you want (CVS
- and SVN existed back then, but none of the other VC systems), then it
- makes sense to call this with oldversion='old'. If the command you
- want is newer than that, just leave oldversion= unspecified, and the
- command will return None for a buildslave that does not implement the
- command.
- """
- return self.build.getSlaveCommandVersion(command, oldversion)
-
- def slaveVersionIsOlderThan(self, command, minversion):
- sv = self.build.getSlaveCommandVersion(command, None)
- if sv is None:
- return True
- # the version we get back is a string form of the CVS version number
- # of the slave's buildbot/slave/commands.py, something like 1.39 .
- # This might change in the future (I might move away from CVS), but
- # if so I'll keep updating that string with suitably-comparable
- # values.
- if sv.split(".") < minversion.split("."):
- return True
- return False
-
- def addLog(self, name):
- loog = self.step_status.addLog(name)
- return loog
-
- def addCompleteLog(self, name, text):
- log.msg("addCompleteLog(%s)" % name)
- loog = self.step_status.addLog(name)
- size = loog.chunkSize
- for start in range(0, len(text), size):
- loog.addStdout(text[start:start+size])
- loog.finish()
-
- def addHTMLLog(self, name, html):
- log.msg("addHTMLLog(%s)" % name)
- self.step_status.addHTMLLog(name, html)
-
- def runCommand(self, c):
- d = c.run(self, self.remote)
- return d
-
-
-
-class LoggingBuildStep(BuildStep):
- # This is an abstract base class, suitable for inheritance by all
- # BuildSteps that invoke RemoteCommands which emit stdout/stderr messages
-
- progressMetrics = ['output']
-
- def describe(self, done=False):
- raise NotImplementedError("implement this in a subclass")
-
- def startCommand(self, cmd, errorMessages=[]):
- """
- @param cmd: a suitable RemoteCommand which will be launched, with
- all output being put into a LogFile named 'log'
- """
- self.cmd = cmd # so we can interrupt it
- self.step_status.setColor("yellow")
- self.step_status.setText(self.describe(False))
- loog = self.addLog("log")
- for em in errorMessages:
- loog.addHeader(em)
- log.msg("ShellCommand.start using log", loog)
- log.msg(" for cmd", cmd)
- cmd.useLog(loog, True)
- loog.logProgressTo(self.progress, "output")
- d = self.runCommand(cmd)
- d.addCallbacks(self._commandComplete, self.checkDisconnect)
- d.addErrback(self.failed)
-
- def interrupt(self, reason):
- # TODO: consider adding an INTERRUPTED or STOPPED status to use
- # instead of FAILURE, might make the text a bit more clear.
- # 'reason' can be a Failure, or text
- self.addCompleteLog('interrupt', str(reason))
- d = self.cmd.interrupt(reason)
- return d
-
- def checkDisconnect(self, f):
- f.trap(error.ConnectionLost)
- self.step_status.setColor("red")
- self.step_status.setText(self.describe(True) +
- ["failed", "slave", "lost"])
- self.step_status.setText2(["failed", "slave", "lost"])
- return self.finished(FAILURE)
-
- def _commandComplete(self, cmd):
- self.commandComplete(cmd)
- self.createSummary(cmd.log)
- results = self.evaluateCommand(cmd)
- self.setStatus(cmd, results)
- return self.finished(results)
-
- # to refine the status output, override one or more of the following
- # methods. Change as little as possible: start with the first ones on
- # this list and only proceed further if you have to
- #
- # createSummary: add additional Logfiles with summarized results
- # evaluateCommand: decides whether the step was successful or not
- #
- # getText: create the final per-step text strings
- # describeText2: create the strings added to the overall build status
- #
- # getText2: only adds describeText2() when the step affects build status
- #
- # setStatus: handles all status updating
-
- # commandComplete is available for general-purpose post-completion work.
- # It is a good place to do one-time parsing of logfiles, counting
- # warnings and errors. It should probably stash such counts in places
- # like self.warnings so they can be picked up later by your getText
- # method.
-
- # TODO: most of this stuff should really be on BuildStep rather than
- # ShellCommand. That involves putting the status-setup stuff in
- # .finished, which would make it hard to turn off.
-
- def commandComplete(self, cmd):
- """This is a general-purpose hook method for subclasses. It will be
- called after the remote command has finished, but before any of the
- other hook functions are called."""
- pass
-
-
- def createSummary(self, log):
- """To create summary logs, do something like this:
- warnings = grep('^Warning:', log.getText())
- self.addCompleteLog('warnings', warnings)
- """
- file = open('process_log','w')
- file.write(log.getText())
- file.close()
- command = "grep warning: process_log"
- warnings = os.popen(command).read()
- errors = os.popen("grep error: process_log").read()
- tail = os.popen("tail -50 process_log").read()
- if warnings != "" :
- self.addCompleteLog('warnings',warnings)
- if errors != "":
- self.addCompleteLog('errors',errors)
- self.addCompleteLog('tail',tail)
-
-
-
- def evaluateCommand(self, cmd):
- """Decide whether the command was SUCCESS, WARNINGS, or FAILURE.
- Override this to, say, declare WARNINGS if there is any stderr
- activity, or to say that rc!=0 is not actually an error."""
-
- if cmd.rc != 0:
- return FAILURE
- # if cmd.log.getStderr(): return WARNINGS
- return SUCCESS
-
- def getText(self, cmd, results):
- if results == SUCCESS:
- return self.describe(True)
- elif results == WARNINGS:
- return self.describe(True) + ["warnings"]
- else:
- return self.describe(True) + ["failed"]
-
- def getText2(self, cmd, results):
- """We have decided to add a short note about ourselves to the overall
- build description, probably because something went wrong. Return a
- short list of short strings. If your subclass counts test failures or
- warnings of some sort, this is a good place to announce the count."""
- # return ["%d warnings" % warningcount]
- # return ["%d tests" % len(failedTests)]
- return [self.name]
-
- def maybeGetText2(self, cmd, results):
- if results == SUCCESS:
- # successful steps do not add anything to the build's text
- pass
- elif results == WARNINGS:
- if (self.flunkOnWarnings or self.warnOnWarnings):
- # we're affecting the overall build, so tell them why
- return self.getText2(cmd, results)
- else:
- if (self.haltOnFailure or self.flunkOnFailure
- or self.warnOnFailure):
- # we're affecting the overall build, so tell them why
- return self.getText2(cmd, results)
- return []
-
- def getColor(self, cmd, results):
- assert results in (SUCCESS, WARNINGS, FAILURE)
- if results == SUCCESS:
- return "green"
- elif results == WARNINGS:
- return "orange"
- else:
- return "red"
-
- def setStatus(self, cmd, results):
- # this is good enough for most steps, but it can be overridden to
- # get more control over the displayed text
- self.step_status.setColor(self.getColor(cmd, results))
- self.step_status.setText(self.getText(cmd, results))
- self.step_status.setText2(self.maybeGetText2(cmd, results))
-
-
-# -*- test-case-name: buildbot.test.test_properties -*-
-
-class _BuildPropertyDictionary:
- def __init__(self, build):
- self.build = build
- def __getitem__(self, name):
- p = self.build.getProperty(name)
- if p is None:
- p = ""
- return p
-
-class WithProperties:
- """This is a marker class, used in ShellCommand's command= argument to
- indicate that we want to interpolate a build property.
- """
-
- def __init__(self, fmtstring, *args):
- self.fmtstring = fmtstring
- self.args = args
-
- def render(self, build):
- if self.args:
- strings = []
- for name in self.args:
- p = build.getProperty(name)
- if p is None:
- p = ""
- strings.append(p)
- s = self.fmtstring % tuple(strings)
- else:
- s = self.fmtstring % _BuildPropertyDictionary(build)
- return s
-
-
-class TCSHShellCommand(LoggingBuildStep):
- """I run a single shell command on the buildslave. I return FAILURE if
- the exit code of that command is non-zero, SUCCESS otherwise. To change
- this behavior, override my .evaluateCommand method.
-
- I create a single Log named 'log' which contains the output of the
- command. To create additional summary Logs, override my .createSummary
- method.
-
- The shell command I run (a list of argv strings) can be provided in
- several ways:
- - a class-level .command attribute
- - a command= parameter to my constructor (overrides .command)
- - set explicitly with my .setCommand() method (overrides both)
-
- @ivar command: a list of argv strings (or WithProperties instances).
- This will be used by start() to create a
- RemoteShellCommand instance.
-
- """
-
- name = "shell"
- description = None # set this to a list of short strings to override
- descriptionDone = None # alternate description when the step is complete
- command = None # set this to a command, or set in kwargs
-
- def __init__(self, workdir,
- description=None, descriptionDone=None,
- command=None,
- **kwargs):
- # most of our arguments get passed through to the RemoteShellCommand
- # that we create, but first strip out the ones that we pass to
- # BuildStep (like haltOnFailure and friends), and a couple that we
- # consume ourselves.
- self.workdir = workdir # required by RemoteShellCommand
- if description:
- self.description = description
- if descriptionDone:
- self.descriptionDone = descriptionDone
- if command:
- self.command = command
-
- # pull out the ones that BuildStep wants, then upcall
- buildstep_kwargs = {}
- for k in kwargs.keys()[:]:
- if k in self.__class__.parms:
- buildstep_kwargs[k] = kwargs[k]
- del kwargs[k]
- LoggingBuildStep.__init__(self, **buildstep_kwargs)
-
- # everything left over goes to the RemoteShellCommand
- kwargs['workdir'] = workdir # including a copy of 'workdir'
- self.remote_kwargs = kwargs
-
-
- def setCommand(self, command):
- self.command = command
-
- def describe(self, done=False):
- """Return a list of short strings to describe this step, for the
- status display. This uses the first few words of the shell command.
- You can replace this by setting .description in your subclass, or by
- overriding this method to describe the step better.
-
- @type done: boolean
- @param done: whether the command is complete or not, to improve the
- way the command is described. C{done=False} is used
- while the command is still running, so a single
- imperfect-tense verb is appropriate ('compiling',
- 'testing', ...) C{done=True} is used when the command
- has finished, and the default getText() method adds some
- text, so a simple noun is appropriate ('compile',
- 'tests' ...)
- """
-
- if done and self.descriptionDone is not None:
- return self.descriptionDone
- if self.description is not None:
- return self.description
-
- words = self.command
- # TODO: handle WithProperties here
- if isinstance(words, types.StringTypes):
- words = words.split()
- if len(words) < 1:
- return ["???"]
- if len(words) == 1:
- return ["'%s'" % words[0]]
- if len(words) == 2:
- return ["'%s" % words[0], "%s'" % words[1]]
- return ["'%s" % words[0], "%s" % words[1], "...'"]
-
- def _interpolateProperties(self, command):
- # interpolate any build properties into our command
- if not isinstance(command, (list, tuple)):
- return command
- command_argv = []
- for argv in command:
- if isinstance(argv, WithProperties):
- command_argv.append(argv.render(self.build))
- else:
- command_argv.append(argv)
- return command_argv
-
- def setupEnvironment(self, cmd):
- # merge in anything from Build.slaveEnvironment . Earlier steps
- # (perhaps ones which compile libraries or sub-projects that need to
- # be referenced by later steps) can add keys to
- # self.build.slaveEnvironment to affect later steps.
- slaveEnv = self.build.slaveEnvironment
- if slaveEnv:
- if cmd.args['env'] is None:
- cmd.args['env'] = {}
- cmd.args['env'].update(slaveEnv)
- # note that each RemoteShellCommand gets its own copy of the
- # dictionary, so we shouldn't be affecting anyone but ourselves.
-
- def start(self):
- command = self._interpolateProperties(self.command)
- # create the actual RemoteShellCommand instance now
- kwargs = self.remote_kwargs
- kwargs['command'] = command
- cmd = RemoteTCSHCommand(**kwargs)
- self.setupEnvironment(cmd)
- self.startCommand(cmd)
-
-
-
-class ShellCommand(LoggingBuildStep):
- """I run a single shell command on the buildslave. I return FAILURE if
- the exit code of that command is non-zero, SUCCESS otherwise. To change
- this behavior, override my .evaluateCommand method.
-
- I create a single Log named 'log' which contains the output of the
- command. To create additional summary Logs, override my .createSummary
- method.
-
- The shell command I run (a list of argv strings) can be provided in
- several ways:
- - a class-level .command attribute
- - a command= parameter to my constructor (overrides .command)
- - set explicitly with my .setCommand() method (overrides both)
-
- @ivar command: a list of argv strings (or WithProperties instances).
- This will be used by start() to create a
- RemoteShellCommand instance.
-
- """
-
- name = "shell"
- description = None # set this to a list of short strings to override
- descriptionDone = None # alternate description when the step is complete
- command = None # set this to a command, or set in kwargs
-
- def __init__(self, workdir,
- description=None, descriptionDone=None,
- command=None,
- **kwargs):
- # most of our arguments get passed through to the RemoteShellCommand
- # that we create, but first strip out the ones that we pass to
- # BuildStep (like haltOnFailure and friends), and a couple that we
- # consume ourselves.
- self.workdir = workdir # required by RemoteShellCommand
- if description:
- self.description = description
- if descriptionDone:
- self.descriptionDone = descriptionDone
- if command:
- self.command = command
-
- # pull out the ones that BuildStep wants, then upcall
- buildstep_kwargs = {}
- for k in kwargs.keys()[:]:
- if k in self.__class__.parms:
- buildstep_kwargs[k] = kwargs[k]
- del kwargs[k]
- LoggingBuildStep.__init__(self, **buildstep_kwargs)
-
- # everything left over goes to the RemoteShellCommand
- kwargs['workdir'] = workdir # including a copy of 'workdir'
- self.remote_kwargs = kwargs
-
-
- def setCommand(self, command):
- self.command = command
-
- def describe(self, done=False):
- """Return a list of short strings to describe this step, for the
- status display. This uses the first few words of the shell command.
- You can replace this by setting .description in your subclass, or by
- overriding this method to describe the step better.
-
- @type done: boolean
- @param done: whether the command is complete or not, to improve the
- way the command is described. C{done=False} is used
- while the command is still running, so a single
- imperfect-tense verb is appropriate ('compiling',
- 'testing', ...) C{done=True} is used when the command
- has finished, and the default getText() method adds some
- text, so a simple noun is appropriate ('compile',
- 'tests' ...)
- """
-
- if done and self.descriptionDone is not None:
- return self.descriptionDone
- if self.description is not None:
- return self.description
-
- words = self.command
- # TODO: handle WithProperties here
- if isinstance(words, types.StringTypes):
- words = words.split()
- if len(words) < 1:
- return ["???"]
- if len(words) == 1:
- return ["'%s'" % words[0]]
- if len(words) == 2:
- return ["'%s" % words[0], "%s'" % words[1]]
- return ["'%s" % words[0], "%s" % words[1], "...'"]
-
- def _interpolateProperties(self, command):
- # interpolate any build properties into our command
- if not isinstance(command, (list, tuple)):
- return command
- command_argv = []
- for argv in command:
- if isinstance(argv, WithProperties):
- command_argv.append(argv.render(self.build))
- else:
- command_argv.append(argv)
- return command_argv
-
- def setupEnvironment(self, cmd):
- # merge in anything from Build.slaveEnvironment . Earlier steps
- # (perhaps ones which compile libraries or sub-projects that need to
- # be referenced by later steps) can add keys to
- # self.build.slaveEnvironment to affect later steps.
- slaveEnv = self.build.slaveEnvironment
- if slaveEnv:
- if cmd.args['env'] is None:
- cmd.args['env'] = {}
- cmd.args['env'].update(slaveEnv)
- # note that each RemoteShellCommand gets its own copy of the
- # dictionary, so we shouldn't be affecting anyone but ourselves.
-
- def start(self):
- command = self._interpolateProperties(self.command)
- # create the actual RemoteShellCommand instance now
- kwargs = self.remote_kwargs
- kwargs['command'] = command
- cmd = RemoteShellCommand(**kwargs)
- self.setupEnvironment(cmd)
- self.startCommand(cmd)
-
-
-
-
-class TreeSize(ShellCommand):
- name = "treesize"
- command = ["du", "-s", "."]
- kb = None
-
- def commandComplete(self, cmd):
- out = cmd.log.getText()
- m = re.search(r'^(\d+)', out)
- if m:
- self.kb = int(m.group(1))
-
- def evaluateCommand(self, cmd):
- if cmd.rc != 0:
- return FAILURE
- if self.kb is None:
- return WARNINGS # not sure how 'du' could fail, but whatever
- return SUCCESS
-
- def getText(self, cmd, results):
- if self.kb is not None:
- return ["treesize", "%d kb" % self.kb]
- return ["treesize", "unknown"]
-
-
-class Source(LoggingBuildStep):
- """This is a base class to generate a source tree in the buildslave.
- Each version control system has a specialized subclass, and is expected
- to override __init__ and implement computeSourceRevision() and
- startVC(). The class as a whole builds up the self.args dictionary, then
- starts a LoggedRemoteCommand with those arguments.
- """
-
- # if the checkout fails, there's no point in doing anything else
- haltOnFailure = True
- notReally = False
-
- branch = None # the default branch, should be set in __init__
-
- def __init__(self, workdir, mode='update', alwaysUseLatest=False,
- timeout=20*60, retry=None, **kwargs):
- """
- @type workdir: string
- @param workdir: local directory (relative to the Builder's root)
- where the tree should be placed
-
- @type mode: string
- @param mode: the kind of VC operation that is desired:
- - 'update': specifies that the checkout/update should be
- performed directly into the workdir. Each build is performed
- in the same directory, allowing for incremental builds. This
- minimizes disk space, bandwidth, and CPU time. However, it
- may encounter problems if the build process does not handle
- dependencies properly (if you must sometimes do a 'clean
- build' to make sure everything gets compiled), or if source
- files are deleted but generated files can influence test
- behavior (e.g. python's .pyc files), or when source
- directories are deleted but generated files prevent CVS from
- removing them.
-
- - 'copy': specifies that the source-controlled workspace
- should be maintained in a separate directory (called the
- 'copydir'), using checkout or update as necessary. For each
- build, a new workdir is created with a copy of the source
- tree (rm -rf workdir; cp -r copydir workdir). This doubles
- the disk space required, but keeps the bandwidth low
- (update instead of a full checkout). A full 'clean' build
- is performed each time. This avoids any generated-file
- build problems, but is still occasionally vulnerable to
- problems such as a CVS repository being manually rearranged
- (causing CVS errors on update) which are not an issue with
- a full checkout.
-
- - 'clobber': specifies that the working directory should be
- deleted each time, necessitating a full checkout for each
- build. This insures a clean build off a complete checkout,
- avoiding any of the problems described above, but is
- bandwidth intensive, as the whole source tree must be
- pulled down for each build.
-
- - 'export': is like 'clobber', except that e.g. the 'cvs
- export' command is used to create the working directory.
- This command removes all VC metadata files (the
- CVS/.svn/{arch} directories) from the tree, which is
- sometimes useful for creating source tarballs (to avoid
- including the metadata in the tar file). Not all VC systems
- support export.
-
- @type alwaysUseLatest: boolean
- @param alwaysUseLatest: whether to always update to the most
- recent available sources for this build.
-
- Normally the Source step asks its Build for a list of all
- Changes that are supposed to go into the build, then computes a
- 'source stamp' (revision number or timestamp) that will cause
- exactly that set of changes to be present in the checked out
- tree. This is turned into, e.g., 'cvs update -D timestamp', or
- 'svn update -r revnum'. If alwaysUseLatest=True, bypass this
- computation and always update to the latest available sources
- for each build.
-
- The source stamp helps avoid a race condition in which someone
- commits a change after the master has decided to start a build
- but before the slave finishes checking out the sources. At best
- this results in a build which contains more changes than the
- buildmaster thinks it has (possibly resulting in the wrong
- person taking the blame for any problems that result), at worst
- is can result in an incoherent set of sources (splitting a
- non-atomic commit) which may not build at all.
-
- @type retry: tuple of ints (delay, repeats) (or None)
- @param retry: if provided, VC update failures are re-attempted up
- to REPEATS times, with DELAY seconds between each
- attempt. Some users have slaves with poor connectivity
- to their VC repository, and they say that up to 80% of
- their build failures are due to transient network
- failures that could be handled by simply retrying a
- couple times.
-
- """
-
- LoggingBuildStep.__init__(self, **kwargs)
-
- assert mode in ("update", "copy", "clobber", "export")
- if retry:
- delay, repeats = retry
- assert isinstance(repeats, int)
- assert repeats > 0
- self.args = {'mode': mode,
- 'workdir': workdir,
- 'timeout': timeout,
- 'retry': retry,
- 'patch': None, # set during .start
- }
- self.alwaysUseLatest = alwaysUseLatest
-
- # Compute defaults for descriptions:
- description = ["updating"]
- descriptionDone = ["update"]
- if mode == "clobber":
- description = ["checkout"]
- # because checkingouting takes too much space
- descriptionDone = ["checkout"]
- elif mode == "export":
- description = ["exporting"]
- descriptionDone = ["export"]
- self.description = description
- self.descriptionDone = descriptionDone
-
- def describe(self, done=False):
- if done:
- return self.descriptionDone
- return self.description
-
- def computeSourceRevision(self, changes):
- """Each subclass must implement this method to do something more
- precise than -rHEAD every time. For version control systems that use
- repository-wide change numbers (SVN, P4), this can simply take the
- maximum such number from all the changes involved in this build. For
- systems that do not (CVS), it needs to create a timestamp based upon
- the latest Change, the Build's treeStableTimer, and an optional
- self.checkoutDelay value."""
- return None
-
- def start(self):
- if self.notReally:
- log.msg("faking %s checkout/update" % self.name)
- self.step_status.setColor("green")
- self.step_status.setText(["fake", self.name, "successful"])
- self.addCompleteLog("log",
- "Faked %s checkout/update 'successful'\n" \
- % self.name)
- return SKIPPED
-
- # what source stamp would this build like to use?
- s = self.build.getSourceStamp()
- # if branch is None, then use the Step's "default" branch
- branch = s.branch or self.branch
- # if revision is None, use the latest sources (-rHEAD)
- revision = s.revision
- if not revision and not self.alwaysUseLatest:
- revision = self.computeSourceRevision(s.changes)
- # if patch is None, then do not patch the tree after checkout
-
- # 'patch' is None or a tuple of (patchlevel, diff)
- patch = s.patch
-
- self.startVC(branch, revision, patch)
-
- def commandComplete(self, cmd):
- got_revision = None
- if cmd.updates.has_key("got_revision"):
- got_revision = cmd.updates["got_revision"][-1]
- self.setProperty("got_revision", got_revision)
-
-
-
-class CVS(Source):
- """I do CVS checkout/update operations.
-
- Note: if you are doing anonymous/pserver CVS operations, you will need
- to manually do a 'cvs login' on each buildslave before the slave has any
- hope of success. XXX: fix then, take a cvs password as an argument and
- figure out how to do a 'cvs login' on each build
- """
-
- name = "cvs"
-
- #progressMetrics = ['output']
- #
- # additional things to track: update gives one stderr line per directory
- # (starting with 'cvs server: Updating ') (and is fairly stable if files
- # is empty), export gives one line per directory (starting with 'cvs
- # export: Updating ') and another line per file (starting with U). Would
- # be nice to track these, requires grepping LogFile data for lines,
- # parsing each line. Might be handy to have a hook in LogFile that gets
- # called with each complete line.
-
- def __init__(self, cvsroot, cvsmodule, slavedir, filename="buildbotget.pl",
- global_options=[], branch=None, checkoutDelay=None,
- login=None,
- clobber=0, export=0, copydir=None,
- **kwargs):
-
- """
- @type cvsroot: string
- @param cvsroot: CVS Repository from which the source tree should
- be obtained. '/home/warner/Repository' for local
- or NFS-reachable repositories,
- ':pserver:anon@foo.com:/cvs' for anonymous CVS,
- 'user@host.com:/cvs' for non-anonymous CVS or
- CVS over ssh. Lots of possibilities, check the
- CVS documentation for more.
-
- @type cvsmodule: string
- @param cvsmodule: subdirectory of CVS repository that should be
- retrieved
-
- @type login: string or None
- @param login: if not None, a string which will be provided as a
- password to the 'cvs login' command, used when a
- :pserver: method is used to access the repository.
- This login is only needed once, but must be run
- each time (just before the CVS operation) because
- there is no way for the buildslave to tell whether
- it was previously performed or not.
-
- @type branch: string
- @param branch: the default branch name, will be used in a '-r'
- argument to specify which branch of the source tree
- should be used for this checkout. Defaults to None,
- which means to use 'HEAD'.
-
- @type checkoutDelay: int or None
- @param checkoutDelay: if not None, the number of seconds to put
- between the last known Change and the
- timestamp given to the -D argument. This
- defaults to exactly half of the parent
- Build's .treeStableTimer, but it could be
- set to something else if your CVS change
- notification has particularly weird
- latency characteristics.
-
- @type global_options: list of strings
- @param global_options: these arguments are inserted in the cvs
- command line, before the
- 'checkout'/'update' command word. See
- 'cvs --help-options' for a list of what
- may be accepted here. ['-r'] will make
- the checked out files read only. ['-r',
- '-R'] will also assume the repository is
- read-only (I assume this means it won't
- use locks to insure atomic access to the
- ,v files)."""
-
- self.checkoutDelay = checkoutDelay
- self.branch = branch
- self.workdir = kwargs['workdir']
- self.slavedir = slavedir
- self.filename = filename
-
- if not kwargs.has_key('mode') and (clobber or export or copydir):
- # deal with old configs
- warnings.warn("Please use mode=, not clobber/export/copydir",
- DeprecationWarning)
- if export:
- kwargs['mode'] = "export"
- elif clobber:
- kwargs['mode'] = "clobber"
- elif copydir:
- kwargs['mode'] = "copy"
- else:
- kwargs['mode'] = "update"
-
- Source.__init__(self, **kwargs)
-
- self.args.update({'cvsroot': cvsroot,
- 'cvsmodule': cvsmodule,
- 'filename':filename,
- 'slavedir':slavedir,
- 'global_options': global_options,
- 'login': login,
- })
-
- def computeSourceRevision(self, changes):
- if not changes:
- return None
- lastChange = max([c.when for c in changes])
- if self.checkoutDelay is not None:
- when = lastChange + self.checkoutDelay
- else:
- lastSubmit = max([r.submittedAt for r in self.build.requests])
- when = (lastChange + lastSubmit) / 2
- return formatdate(when)
-
- def startVC(self, branch, revision, patch):
- #if self.slaveVersionIsOlderThan("cvs", "1.39"):
- # the slave doesn't know to avoid re-using the same sourcedir
- # when the branch changes. We have no way of knowing which branch
- # the last build used, so if we're using a non-default branch and
- # either 'update' or 'copy' modes, it is safer to refuse to
- # build, and tell the user they need to upgrade the buildslave.
- # if (branch != self.branch
- # and self.args['mode'] in ("update", "copy")):
- # m = ("This buildslave (%s) does not know about multiple "
- # "branches, and using mode=%s would probably build the "
- # "wrong tree. "
- # "Refusing to build. Please upgrade the buildslave to "
- # "buildbot-0.7.0 or newer." % (self.build.slavename,
- # self.args['mode']))
- # log.msg(m)
- # raise BuildSlaveTooOldError(m)
-
- if branch is None:
- branch = "HEAD"
- self.args['branch'] = branch
- self.args['revision'] = revision
- self.args['patch'] = patch
-
- if self.args['branch'] == "HEAD" and self.args['revision']:
- # special case. 'cvs update -r HEAD -D today' gives no files
- # TODO: figure out why, see if it applies to -r BRANCH
- self.args['branch'] = None
-
- # deal with old slaves
- warnings = []
- slavever = self.slaveVersion("cvs", "old")
-
- if slavever == "old":
- # 0.5.0
- if self.args['mode'] == "export":
- self.args['export'] = 1
- elif self.args['mode'] == "clobber":
- self.args['clobber'] = 1
- elif self.args['mode'] == "copy":
- self.args['copydir'] = "source"
- self.args['tag'] = self.args['branch']
- assert not self.args['patch'] # 0.5.0 slave can't do patch
-
- #cmd = LoggedRemoteCommand("cvs", self.args)
- self.args['command'] = "./" + self.args['filename'] + " " + self.args['branch'] + " " + self.args['workdir'] + " " + self.args['slavedir'] + " "+"up"
- cmd = LoggedRemoteCommand("shell", self.args)
- self.startCommand(cmd, warnings)
-
-
-class SVN(Source):
- """I perform Subversion checkout/update operations."""
-
- name = 'svn'
-
- def __init__(self, svnurl=None, baseURL=None, defaultBranch=None,
- directory=None, **kwargs):
- """
- @type svnurl: string
- @param svnurl: the URL which points to the Subversion server,
- combining the access method (HTTP, ssh, local file),
- the repository host/port, the repository path, the
- sub-tree within the repository, and the branch to
- check out. Using C{svnurl} does not enable builds of
- alternate branches: use C{baseURL} to enable this.
- Use exactly one of C{svnurl} and C{baseURL}.
-
- @param baseURL: if branches are enabled, this is the base URL to
- which a branch name will be appended. It should
- probably end in a slash. Use exactly one of
- C{svnurl} and C{baseURL}.
-
- @param defaultBranch: if branches are enabled, this is the branch
- to use if the Build does not specify one
- explicitly. It will simply be appended
- to C{baseURL} and the result handed to
- the SVN command.
- """
-
- if not kwargs.has_key('workdir') and directory is not None:
- # deal with old configs
- warnings.warn("Please use workdir=, not directory=",
- DeprecationWarning)
- kwargs['workdir'] = directory
-
- self.svnurl = svnurl
- self.baseURL = baseURL
- self.branch = defaultBranch
-
- Source.__init__(self, **kwargs)
-
- if not svnurl and not baseURL:
- raise ValueError("you must use exactly one of svnurl and baseURL")
-
-
- def computeSourceRevision(self, changes):
- if not changes:
- return None
- lastChange = max([int(c.revision) for c in changes])
- return lastChange
-
- def startVC(self, branch, revision, patch):
-
- # handle old slaves
- warnings = []
- slavever = self.slaveVersion("svn", "old")
- if not slavever:
- m = "slave does not have the 'svn' command"
- raise BuildSlaveTooOldError(m)
-
- if self.slaveVersionIsOlderThan("svn", "1.39"):
- # the slave doesn't know to avoid re-using the same sourcedir
- # when the branch changes. We have no way of knowing which branch
- # the last build used, so if we're using a non-default branch and
- # either 'update' or 'copy' modes, it is safer to refuse to
- # build, and tell the user they need to upgrade the buildslave.
- if (branch != self.branch
- and self.args['mode'] in ("update", "copy")):
- m = ("This buildslave (%s) does not know about multiple "
- "branches, and using mode=%s would probably build the "
- "wrong tree. "
- "Refusing to build. Please upgrade the buildslave to "
- "buildbot-0.7.0 or newer." % (self.build.slavename,
- self.args['mode']))
- raise BuildSlaveTooOldError(m)
-
- if slavever == "old":
- # 0.5.0 compatibility
- if self.args['mode'] in ("clobber", "copy"):
- # TODO: use some shell commands to make up for the
- # deficiency, by blowing away the old directory first (thus
- # forcing a full checkout)
- warnings.append("WARNING: this slave can only do SVN updates"
- ", not mode=%s\n" % self.args['mode'])
- log.msg("WARNING: this slave only does mode=update")
- if self.args['mode'] == "export":
- raise BuildSlaveTooOldError("old slave does not have "
- "mode=export")
- self.args['directory'] = self.args['workdir']
- if revision is not None:
- # 0.5.0 can only do HEAD. We have no way of knowing whether
- # the requested revision is HEAD or not, and for
- # slowly-changing trees this will probably do the right
- # thing, so let it pass with a warning
- m = ("WARNING: old slave can only update to HEAD, not "
- "revision=%s" % revision)
- log.msg(m)
- warnings.append(m + "\n")
- revision = "HEAD" # interprets this key differently
- if patch:
- raise BuildSlaveTooOldError("old slave can't do patch")
-
- if self.svnurl:
- assert not branch # we need baseURL= to use branches
- self.args['svnurl'] = self.svnurl
- else:
- self.args['svnurl'] = self.baseURL + branch
- self.args['revision'] = revision
- self.args['patch'] = patch
-
- revstuff = []
- if branch is not None and branch != self.branch:
- revstuff.append("[branch]")
- if revision is not None:
- revstuff.append("r%s" % revision)
- self.description.extend(revstuff)
- self.descriptionDone.extend(revstuff)
-
- cmd = LoggedRemoteCommand("svn", self.args)
- self.startCommand(cmd, warnings)
-
-
-class Darcs(Source):
- """Check out a source tree from a Darcs repository at 'repourl'.
-
- To the best of my knowledge, Darcs has no concept of file modes. This
- means the eXecute-bit will be cleared on all source files. As a result,
- you may need to invoke configuration scripts with something like:
-
- C{s(step.Configure, command=['/bin/sh', './configure'])}
- """
-
- name = "darcs"
-
- def __init__(self, repourl=None, baseURL=None, defaultBranch=None,
- **kwargs):
- """
- @type repourl: string
- @param repourl: the URL which points at the Darcs repository. This
- is used as the default branch. Using C{repourl} does
- not enable builds of alternate branches: use
- C{baseURL} to enable this. Use either C{repourl} or
- C{baseURL}, not both.
-
- @param baseURL: if branches are enabled, this is the base URL to
- which a branch name will be appended. It should
- probably end in a slash. Use exactly one of
- C{repourl} and C{baseURL}.
-
- @param defaultBranch: if branches are enabled, this is the branch
- to use if the Build does not specify one
- explicitly. It will simply be appended to
- C{baseURL} and the result handed to the
- 'darcs pull' command.
- """
- self.repourl = repourl
- self.baseURL = baseURL
- self.branch = defaultBranch
- Source.__init__(self, **kwargs)
- assert kwargs['mode'] != "export", \
- "Darcs does not have an 'export' mode"
- if (not repourl and not baseURL) or (repourl and baseURL):
- raise ValueError("you must provide exactly one of repourl and"
- " baseURL")
-
- def startVC(self, branch, revision, patch):
- slavever = self.slaveVersion("darcs")
- if not slavever:
- m = "slave is too old, does not know about darcs"
- raise BuildSlaveTooOldError(m)
-
- if self.slaveVersionIsOlderThan("darcs", "1.39"):
- if revision:
- # TODO: revisit this once we implement computeSourceRevision
- m = "0.6.6 slaves can't handle args['revision']"
- raise BuildSlaveTooOldError(m)
-
- # the slave doesn't know to avoid re-using the same sourcedir
- # when the branch changes. We have no way of knowing which branch
- # the last build used, so if we're using a non-default branch and
- # either 'update' or 'copy' modes, it is safer to refuse to
- # build, and tell the user they need to upgrade the buildslave.
- if (branch != self.branch
- and self.args['mode'] in ("update", "copy")):
- m = ("This buildslave (%s) does not know about multiple "
- "branches, and using mode=%s would probably build the "
- "wrong tree. "
- "Refusing to build. Please upgrade the buildslave to "
- "buildbot-0.7.0 or newer." % (self.build.slavename,
- self.args['mode']))
- raise BuildSlaveTooOldError(m)
-
- if self.repourl:
- assert not branch # we need baseURL= to use branches
- self.args['repourl'] = self.repourl
- else:
- self.args['repourl'] = self.baseURL + branch
- self.args['revision'] = revision
- self.args['patch'] = patch
-
- revstuff = []
- if branch is not None and branch != self.branch:
- revstuff.append("[branch]")
- self.description.extend(revstuff)
- self.descriptionDone.extend(revstuff)
-
- cmd = LoggedRemoteCommand("darcs", self.args)
- self.startCommand(cmd)
-
-
-class Git(Source):
- """Check out a source tree from a git repository 'repourl'."""
-
- name = "git"
-
- def __init__(self, repourl, **kwargs):
- """
- @type repourl: string
- @param repourl: the URL which points at the git repository
- """
- self.branch = None # TODO
- Source.__init__(self, **kwargs)
- self.args['repourl'] = repourl
-
- def startVC(self, branch, revision, patch):
- self.args['branch'] = branch
- self.args['revision'] = revision
- self.args['patch'] = patch
- slavever = self.slaveVersion("git")
- if not slavever:
- raise BuildSlaveTooOldError("slave is too old, does not know "
- "about git")
- cmd = LoggedRemoteCommand("git", self.args)
- self.startCommand(cmd)
-
-
-class Arch(Source):
- """Check out a source tree from an Arch repository named 'archive'
- available at 'url'. 'version' specifies which version number (development
- line) will be used for the checkout: this is mostly equivalent to a
- branch name. This version uses the 'tla' tool to do the checkout, to use
- 'baz' see L{Bazaar} instead.
- """
-
- name = "arch"
- # TODO: slaves >0.6.6 will accept args['build-config'], so use it
-
- def __init__(self, url, version, archive=None, **kwargs):
- """
- @type url: string
- @param url: the Arch coordinates of the repository. This is
- typically an http:// URL, but could also be the absolute
- pathname of a local directory instead.
-
- @type version: string
- @param version: the category--branch--version to check out. This is
- the default branch. If a build specifies a different
- branch, it will be used instead of this.
-
- @type archive: string
- @param archive: The archive name. If provided, it must match the one
- that comes from the repository. If not, the
- repository's default will be used.
- """
- self.branch = version
- Source.__init__(self, **kwargs)
- self.args.update({'url': url,
- 'archive': archive,
- })
-
- def computeSourceRevision(self, changes):
- # in Arch, fully-qualified revision numbers look like:
- # arch@buildbot.sourceforge.net--2004/buildbot--dev--0--patch-104
- # For any given builder, all of this is fixed except the patch-104.
- # The Change might have any part of the fully-qualified string, so we
- # just look for the last part. We return the "patch-NN" string.
- if not changes:
- return None
- lastChange = None
- for c in changes:
- if not c.revision:
- continue
- if c.revision.endswith("--base-0"):
- rev = 0
- else:
- i = c.revision.rindex("patch")
- rev = int(c.revision[i+len("patch-"):])
- lastChange = max(lastChange, rev)
- if lastChange is None:
- return None
- if lastChange == 0:
- return "base-0"
- return "patch-%d" % lastChange
-
- def checkSlaveVersion(self, cmd, branch):
- warnings = []
- slavever = self.slaveVersion(cmd)
- if not slavever:
- m = "slave is too old, does not know about %s" % cmd
- raise BuildSlaveTooOldError(m)
-
- # slave 1.28 and later understand 'revision'
- if self.slaveVersionIsOlderThan(cmd, "1.28"):
- if not self.alwaysUseLatest:
- # we don't know whether our requested revision is the latest
- # or not. If the tree does not change very quickly, this will
- # probably build the right thing, so emit a warning rather
- # than refuse to build at all
- m = "WARNING, buildslave is too old to use a revision"
- log.msg(m)
- warnings.append(m + "\n")
-
- if self.slaveVersionIsOlderThan(cmd, "1.39"):
- # the slave doesn't know to avoid re-using the same sourcedir
- # when the branch changes. We have no way of knowing which branch
- # the last build used, so if we're using a non-default branch and
- # either 'update' or 'copy' modes, it is safer to refuse to
- # build, and tell the user they need to upgrade the buildslave.
- if (branch != self.branch
- and self.args['mode'] in ("update", "copy")):
- m = ("This buildslave (%s) does not know about multiple "
- "branches, and using mode=%s would probably build the "
- "wrong tree. "
- "Refusing to build. Please upgrade the buildslave to "
- "buildbot-0.7.0 or newer." % (self.build.slavename,
- self.args['mode']))
- log.msg(m)
- raise BuildSlaveTooOldError(m)
-
- return warnings
-
- def startVC(self, branch, revision, patch):
- self.args['version'] = branch
- self.args['revision'] = revision
- self.args['patch'] = patch
- warnings = self.checkSlaveVersion("arch", branch)
-
- revstuff = []
- if branch is not None and branch != self.branch:
- revstuff.append("[branch]")
- if revision is not None:
- revstuff.append("patch%s" % revision)
- self.description.extend(revstuff)
- self.descriptionDone.extend(revstuff)
-
- cmd = LoggedRemoteCommand("arch", self.args)
- self.startCommand(cmd, warnings)
-
-
-class Bazaar(Arch):
- """Bazaar is an alternative client for Arch repositories. baz is mostly
- compatible with tla, but archive registration is slightly different."""
-
- # TODO: slaves >0.6.6 will accept args['build-config'], so use it
-
- def __init__(self, url, version, archive, **kwargs):
- """
- @type url: string
- @param url: the Arch coordinates of the repository. This is
- typically an http:// URL, but could also be the absolute
- pathname of a local directory instead.
-
- @type version: string
- @param version: the category--branch--version to check out
-
- @type archive: string
- @param archive: The archive name (required). This must always match
- the one that comes from the repository, otherwise the
- buildslave will attempt to get sources from the wrong
- archive.
- """
- self.branch = version
- Source.__init__(self, **kwargs)
- self.args.update({'url': url,
- 'archive': archive,
- })
-
- def startVC(self, branch, revision, patch):
- self.args['version'] = branch
- self.args['revision'] = revision
- self.args['patch'] = patch
- warnings = self.checkSlaveVersion("bazaar", branch)
-
- revstuff = []
- if branch is not None and branch != self.branch:
- revstuff.append("[branch]")
- if revision is not None:
- revstuff.append("patch%s" % revision)
- self.description.extend(revstuff)
- self.descriptionDone.extend(revstuff)
-
- cmd = LoggedRemoteCommand("bazaar", self.args)
- self.startCommand(cmd, warnings)
-
-class Mercurial(Source):
- """Check out a source tree from a mercurial repository 'repourl'."""
-
- name = "hg"
-
- def __init__(self, repourl=None, baseURL=None, defaultBranch=None,
- **kwargs):
- """
- @type repourl: string
- @param repourl: the URL which points at the Mercurial repository.
- This is used as the default branch. Using C{repourl}
- does not enable builds of alternate branches: use
- C{baseURL} to enable this. Use either C{repourl} or
- C{baseURL}, not both.
-
- @param baseURL: if branches are enabled, this is the base URL to
- which a branch name will be appended. It should
- probably end in a slash. Use exactly one of
- C{repourl} and C{baseURL}.
-
- @param defaultBranch: if branches are enabled, this is the branch
- to use if the Build does not specify one
- explicitly. It will simply be appended to
- C{baseURL} and the result handed to the
- 'hg clone' command.
- """
- self.repourl = repourl
- self.baseURL = baseURL
- self.branch = defaultBranch
- Source.__init__(self, **kwargs)
- if (not repourl and not baseURL) or (repourl and baseURL):
- raise ValueError("you must provide exactly one of repourl and"
- " baseURL")
-
- def startVC(self, branch, revision, patch):
- slavever = self.slaveVersion("hg")
- if not slavever:
- raise BuildSlaveTooOldError("slave is too old, does not know "
- "about hg")
-
- if self.repourl:
- assert not branch # we need baseURL= to use branches
- self.args['repourl'] = self.repourl
- else:
- self.args['repourl'] = self.baseURL + branch
- self.args['revision'] = revision
- self.args['patch'] = patch
-
- revstuff = []
- if branch is not None and branch != self.branch:
- revstuff.append("[branch]")
- self.description.extend(revstuff)
- self.descriptionDone.extend(revstuff)
-
- cmd = LoggedRemoteCommand("hg", self.args)
- self.startCommand(cmd)
-
-
-class todo_P4(Source):
- name = "p4"
-
- # to create the working directory for the first time:
- # need to create a View. The 'Root' parameter will have to be filled
- # in by the buildslave with the abspath of the basedir. Then the
- # setup process involves 'p4 client' to set up the view. After
- # that, 'p4 sync' does all the necessary updating.
- # P4PORT=P4PORT P4CLIENT=name p4 client
-
- def __init__(self, p4port, view, **kwargs):
- Source.__init__(self, **kwargs)
- self.args.update({'p4port': p4port,
- 'view': view,
- })
-
- def startVC(self, branch, revision, patch):
- cmd = LoggedRemoteCommand("p4", self.args)
- self.startCommand(cmd)
-
-class P4Sync(Source):
- """This is a partial solution for using a P4 source repository. You are
- required to manually set up each build slave with a useful P4
- environment, which means setting various per-slave environment variables,
- and creating a P4 client specification which maps the right files into
- the slave's working directory. Once you have done that, this step merely
- performs a 'p4 sync' to update that workspace with the newest files.
-
- Each slave needs the following environment:
-
- - PATH: the 'p4' binary must be on the slave's PATH
- - P4USER: each slave needs a distinct user account
- - P4CLIENT: each slave needs a distinct client specification
-
- You should use 'p4 client' (?) to set up a client view spec which maps
- the desired files into $SLAVEBASE/$BUILDERBASE/source .
- """
-
- name = "p4sync"
-
- def __init__(self, p4port, p4user, p4passwd, p4client, **kwargs):
- assert kwargs['mode'] == "copy", "P4Sync can only be used in mode=copy"
- self.branch = None
- Source.__init__(self, **kwargs)
- self.args['p4port'] = p4port
- self.args['p4user'] = p4user
- self.args['p4passwd'] = p4passwd
- self.args['p4client'] = p4client
-
- def computeSourceRevision(self, changes):
- if not changes:
- return None
- lastChange = max([int(c.revision) for c in changes])
- return lastChange
-
- def startVC(self, branch, revision, patch):
- slavever = self.slaveVersion("p4sync")
- assert slavever, "slave is too old, does not know about p4"
- cmd = LoggedRemoteCommand("p4sync", self.args)
- self.startCommand(cmd)
-
-
-class Dummy(BuildStep):
- """I am a dummy no-op step, which runs entirely on the master, and simply
- waits 5 seconds before finishing with SUCCESS
- """
-
- haltOnFailure = True
- name = "dummy"
-
- def __init__(self, timeout=5, **kwargs):
- """
- @type timeout: int
- @param timeout: the number of seconds to delay before completing
- """
- BuildStep.__init__(self, **kwargs)
- self.timeout = timeout
- self.timer = None
-
- def start(self):
- self.step_status.setColor("yellow")
- self.step_status.setText(["delay", "%s secs" % self.timeout])
- self.timer = reactor.callLater(self.timeout, self.done)
-
- def interrupt(self, reason):
- if self.timer:
- self.timer.cancel()
- self.timer = None
- self.step_status.setColor("red")
- self.step_status.setText(["delay", "interrupted"])
- self.finished(FAILURE)
-
- def done(self):
- self.step_status.setColor("green")
- self.finished(SUCCESS)
-
-class FailingDummy(Dummy):
- """I am a dummy no-op step that 'runs' master-side and finishes (with a
- FAILURE status) after 5 seconds."""
-
- name = "failing dummy"
-
- def start(self):
- self.step_status.setColor("yellow")
- self.step_status.setText(["boom", "%s secs" % self.timeout])
- self.timer = reactor.callLater(self.timeout, self.done)
-
- def done(self):
- self.step_status.setColor("red")
- self.finished(FAILURE)
-
-class RemoteDummy(LoggingBuildStep):
- """I am a dummy no-op step that runs on the remote side and
- simply waits 5 seconds before completing with success.
- See L{buildbot.slave.commands.DummyCommand}
- """
-
- haltOnFailure = True
- name = "remote dummy"
-
- def __init__(self, timeout=5, **kwargs):
- """
- @type timeout: int
- @param timeout: the number of seconds to delay
- """
- LoggingBuildStep.__init__(self, **kwargs)
- self.timeout = timeout
- self.description = ["remote", "delay", "%s secs" % timeout]
-
- def describe(self, done=False):
- return self.description
-
- def start(self):
- args = {'timeout': self.timeout}
- cmd = LoggedRemoteCommand("dummy", args)
- self.startCommand(cmd)
-
-class Configure(ShellCommand):
-
- name = "configure"
- haltOnFailure = 1
- description = ["configuring"]
- descriptionDone = ["configure"]
- command = ["./configure"]
-
-class OOConfigure(ShellCommand):
-
- name = "configure"
- haltOnFailure = 1
- description = ["configuring"]
- descriptionDone = ["configure"]
- command = ["./configure"]
- config = None
-
- def __init__(self, config, **kwargs):
- self.config = config
- ShellCommand.__init__(self, **kwargs)
-
- def start(self):
- command = self._interpolateProperties(self.command)
- config = self.build.config + " " + self.config
- # create the actual RemoteShellCommand instance now
- kwargs = self.remote_kwargs
- kwargs['command'] = command + " " + config
- cmd = RemoteShellCommand(**kwargs)
- self.setupEnvironment(cmd)
- self.startCommand(cmd)
-
-
-class OOBootstrap(TCSHShellCommand):
-
- name = "bootstrap"
- haltOnFailure = 1
- description = ["bootstraping"]
- descriptionDone = ["bootstrap"]
- command = ["./bootstrap"]
-
-class OOEnvSet(TCSHShellCommand):
-
- name = "source"
- haltOnFailure = 1
- description = ["environment_setting"]
- descriptionDone = ["environment_set"]
- command = ["source"]
-
-class OORehash(TCSHShellCommand):
-
- name = "rehash"
- haltOnFailure = 1
- description = ["rehashing"]
- descriptionDone = ["rehash"]
- command = ["rehash"]
-
-
-
-class OOCompile(ShellCommand):
-
- name = "compile"
- haltOnFailure = 1
- description = ["compiling"]
- descriptionDone = ["compile"]
- command = ["dmake"]
-
- OFFprogressMetrics = ['output']
- # things to track: number of files compiled, number of directories
- # traversed (assuming 'make' is being used)
-
- #def createSummary(self, cmd):
- # command = "grep warning: " + log.getText()
- # self.addCompleteLog('warnings',os.popen(command).read())
- def createSummary(self, log):
- # TODO: grep for the characteristic GCC warning/error lines and
- # assemble them into a pair of buffers
- try:
- logFileName = self.step_status.logs[0].getFilename()
- print '%s' %logFileName
-
- command = "./create_logs.pl " + logFileName
- result = os.popen(command).read()
-
- summary_log_file_name = logFileName + "_brief.html"
- summary_log_file = open(summary_log_file_name)
- self.addHTMLLog('summary log', summary_log_file.read())
-
- command = "grep warning: "+ logFileName
- warnings = os.popen(command).read()
-
- command = "grep error: "+ logFileName
- errors = os.popen(command).read()
-
- command = "tail -50 "+logFileName
- tail = os.popen(command).read()
-
- if warnings != "" :
- self.addCompleteLog('warnings',warnings)
-
- if errors != "":
- self.addCompleteLog('errors',errors)
-
- if tail != "":
- self.addCompleteLog('tail',tail)
-
- except:
- #log.msg("Exception: Cannot open logFile")
- print "cannot execute createSummary after OOCompile"
-
-
-class OOSmokeTest(ShellCommand):
-
- name = "smokeTest"
- #haltOnFailure = 1
- description = ["smoke_testing"]
- descriptionDone = ["Smoke Test"]
- command = ["build"]
-
-class OOInstallSet(ShellCommand):
-
- name = "Install_Set"
- #haltOnFailure = 1
- description = ["generating install set"]
- descriptionDone = ["install set"]
- command = ["echo"]
-
- def start(self):
- buildstatus = self.build.build_status
- installset_filename = buildstatus.getBuilder().getName() +"_build" + `buildstatus.getNumber()` + "_installset.tar.gz"
- installset_filename = installset_filename.replace(" ","_")
- branch, revision, patch = buildstatus.getSourceStamp()
- #command = "cd instsetoo_native && find -wholename '*/OpenOffice/*install*/*download' -exec tar -zcvf "+ installset_filename +" {} \; && ../../../dav2 --dir=" + branch + " --file="+ installset_filename +" --user=" + self.user + " --pass=" + self.password
-
- command = "cd instsetoo_native && find -path '*/OpenOffice/*install*/*download' -exec tar -zcvf "+ installset_filename +" {} \; && scp "+ installset_filename + " buildmaster@ooo-staging.osuosl.org:/home/buildmaster/buildmaster/installsets/"
-
-
- kwargs = self.remote_kwargs
- kwargs['command'] = command
- cmd = RemoteShellCommand(timeout=120*60, **kwargs)
- self.setupEnvironment(cmd)
- self.startCommand(cmd)
-
-
- def createSummary(self, log):
- buildstatus = self.build.build_status
- installset_filename = buildstatus.getBuilder().getName() +"_build" + `buildstatus.getNumber()` + "_installset.tar.gz"
- installset_filename = installset_filename.replace(" ","_")
- #branch, revision, patch = buildstatus.getSourceStamp()
- #url = "http://ooo-staging.osuosl.org/DAV/" +branch+ "/" + installset_filename
- result = "To download installset click <a href='"+installset_filename+"'> here </a>"
- #if buildstatus.getResults() == builder.SUCCESS:
- #if log.getText().find("exit code 0") != -1:
- self.addHTMLLog('download', result)
-
-
-class Compile(ShellCommand):
-
- name = "compile"
- haltOnFailure = 1
- description = ["compiling"]
- descriptionDone = ["compile"]
- command = ["make", "all"]
-
- OFFprogressMetrics = ['output']
- # things to track: number of files compiled, number of directories
- # traversed (assuming 'make' is being used)
-
- def createSummary(self, cmd):
- # TODO: grep for the characteristic GCC warning/error lines and
- # assemble them into a pair of buffers
- pass
-
-class Test(ShellCommand):
-
- name = "test"
- warnOnFailure = 1
- description = ["testing"]
- descriptionDone = ["test"]
- command = ["make", "test"]
diff --git a/buildbot/buildbot-source/build/lib/buildbot/process/step_twisted.py b/buildbot/buildbot-source/build/lib/buildbot/process/step_twisted.py
deleted file mode 100644
index 36d8632bf..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/process/step_twisted.py
+++ /dev/null
@@ -1,754 +0,0 @@
-# -*- test-case-name: buildbot.test.test_twisted -*-
-
-from twisted.python import log, failure
-
-from buildbot.status import tests, builder
-from buildbot.status.builder import SUCCESS, FAILURE, WARNINGS, SKIPPED
-from buildbot.process import step
-from buildbot.process.step import BuildStep, ShellCommand
-
-try:
- import cStringIO as StringIO
-except ImportError:
- import StringIO
-import os, re, types
-
-# BuildSteps that are specific to the Twisted source tree
-
-class HLint(ShellCommand):
- """I run a 'lint' checker over a set of .xhtml files. Any deviations
- from recommended style is flagged and put in the output log.
-
- This step looks at .changes in the parent Build to extract a list of
- Lore XHTML files to check."""
-
- name = "hlint"
- description = ["running", "hlint"]
- descriptionDone = ["hlint"]
- warnOnWarnings = True
- warnOnFailure = True
- # TODO: track time, but not output
- warnings = 0
-
- def __init__(self, python=None, **kwargs):
- ShellCommand.__init__(self, **kwargs)
- self.python = python
-
- def start(self):
- # create the command
- htmlFiles = {}
- for f in self.build.allFiles():
- if f.endswith(".xhtml") and not f.startswith("sandbox/"):
- htmlFiles[f] = 1
- # remove duplicates
- hlintTargets = htmlFiles.keys()
- hlintTargets.sort()
- if not hlintTargets:
- return SKIPPED
- self.hlintFiles = hlintTargets
- c = []
- if self.python:
- c.append(self.python)
- c += ["bin/lore", "-p", "--output", "lint"] + self.hlintFiles
- self.setCommand(c)
-
- # add an extra log file to show the .html files we're checking
- self.addCompleteLog("files", "\n".join(self.hlintFiles)+"\n")
-
- ShellCommand.start(self)
-
- def commandComplete(self, cmd):
- # TODO: remove the 'files' file (a list of .xhtml files that were
- # submitted to hlint) because it is available in the logfile and
- # mostly exists to give the user an idea of how long the step will
- # take anyway).
- lines = cmd.log.getText().split("\n")
- warningLines = filter(lambda line:':' in line, lines)
- if warningLines:
- self.addCompleteLog("warnings", "".join(warningLines))
- warnings = len(warningLines)
- self.warnings = warnings
-
- def evaluateCommand(self, cmd):
- # warnings are in stdout, rc is always 0, unless the tools break
- if cmd.rc != 0:
- return FAILURE
- if self.warnings:
- return WARNINGS
- return SUCCESS
-
- def getText2(self, cmd, results):
- if cmd.rc != 0:
- return ["hlint"]
- return ["%d hlin%s" % (self.warnings,
- self.warnings == 1 and 't' or 'ts')]
-
-def countFailedTests(output):
- # start scanning 10kb from the end, because there might be a few kb of
- # import exception tracebacks between the total/time line and the errors
- # line
- chunk = output[-10000:]
- lines = chunk.split("\n")
- lines.pop() # blank line at end
- # lines[-3] is "Ran NN tests in 0.242s"
- # lines[-2] is blank
- # lines[-1] is 'OK' or 'FAILED (failures=1, errors=12)'
- # or 'FAILED (failures=1)'
- # or "PASSED (skips=N, successes=N)" (for Twisted-2.0)
- # there might be other lines dumped here. Scan all the lines.
- res = {'total': None,
- 'failures': 0,
- 'errors': 0,
- 'skips': 0,
- 'expectedFailures': 0,
- 'unexpectedSuccesses': 0,
- }
- for l in lines:
- out = re.search(r'Ran (\d+) tests', l)
- if out:
- res['total'] = int(out.group(1))
- if (l.startswith("OK") or
- l.startswith("FAILED ") or
- l.startswith("PASSED")):
- # the extra space on FAILED_ is to distinguish the overall
- # status from an individual test which failed. The lack of a
- # space on the OK is because it may be printed without any
- # additional text (if there are no skips,etc)
- out = re.search(r'failures=(\d+)', l)
- if out: res['failures'] = int(out.group(1))
- out = re.search(r'errors=(\d+)', l)
- if out: res['errors'] = int(out.group(1))
- out = re.search(r'skips=(\d+)', l)
- if out: res['skips'] = int(out.group(1))
- out = re.search(r'expectedFailures=(\d+)', l)
- if out: res['expectedFailures'] = int(out.group(1))
- out = re.search(r'unexpectedSuccesses=(\d+)', l)
- if out: res['unexpectedSuccesses'] = int(out.group(1))
- # successes= is a Twisted-2.0 addition, and is not currently used
- out = re.search(r'successes=(\d+)', l)
- if out: res['successes'] = int(out.group(1))
-
- return res
-
-UNSPECIFIED=() # since None is a valid choice
-
-class Trial(ShellCommand):
- """I run a unit test suite using 'trial', a unittest-like testing
- framework that comes with Twisted. Trial is used to implement Twisted's
- own unit tests, and is the unittest-framework of choice for many projects
- that use Twisted internally.
-
- Projects that use trial typically have all their test cases in a 'test'
- subdirectory of their top-level library directory. I.e. for my package
- 'petmail', the tests are in 'petmail/test/test_*.py'. More complicated
- packages (like Twisted itself) may have multiple test directories, like
- 'twisted/test/test_*.py' for the core functionality and
- 'twisted/mail/test/test_*.py' for the email-specific tests.
-
- To run trial tests, you run the 'trial' executable and tell it where the
- test cases are located. The most common way of doing this is with a
- module name. For petmail, I would run 'trial petmail.test' and it would
- locate all the test_*.py files under petmail/test/, running every test
- case it could find in them. Unlike the unittest.py that comes with
- Python, you do not run the test_foo.py as a script; you always let trial
- do the importing and running. The 'tests' parameter controls which tests
- trial will run: it can be a string or a list of strings.
-
- You can also use a higher-level module name and pass the --recursive flag
- to trial: this will search recursively within the named module to find
- all test cases. For large multiple-test-directory projects like Twisted,
- this means you can avoid specifying all the test directories explicitly.
- Something like 'trial --recursive twisted' will pick up everything.
-
- To find these test cases, you must set a PYTHONPATH that allows something
- like 'import petmail.test' to work. For packages that don't use a
- separate top-level 'lib' directory, PYTHONPATH=. will work, and will use
- the test cases (and the code they are testing) in-place.
- PYTHONPATH=build/lib or PYTHONPATH=build/lib.$ARCH are also useful when
- you do a'setup.py build' step first. The 'testpath' attribute of this
- class controls what PYTHONPATH= is set to.
-
- Trial has the ability (through the --testmodule flag) to run only the set
- of test cases named by special 'test-case-name' tags in source files. We
- can get the list of changed source files from our parent Build and
- provide them to trial, thus running the minimal set of test cases needed
- to cover the Changes. This is useful for quick builds, especially in
- trees with a lot of test cases. The 'testChanges' parameter controls this
- feature: if set, it will override 'tests'.
-
- The trial executable itself is typically just 'trial' (which is usually
- found on your $PATH as /usr/bin/trial), but it can be overridden with the
- 'trial' parameter. This is useful for Twisted's own unittests, which want
- to use the copy of bin/trial that comes with the sources. (when bin/trial
- discovers that it is living in a subdirectory named 'Twisted', it assumes
- it is being run from the source tree and adds that parent directory to
- PYTHONPATH. Therefore the canonical way to run Twisted's own unittest
- suite is './bin/trial twisted.test' rather than 'PYTHONPATH=.
- /usr/bin/trial twisted.test', especially handy when /usr/bin/trial has
- not yet been installed).
-
- To influence the version of python being used for the tests, or to add
- flags to the command, set the 'python' parameter. This can be a string
- (like 'python2.2') or a list (like ['python2.3', '-Wall']).
-
- Trial creates and switches into a directory named _trial_temp/ before
- running the tests, and sends the twisted log (which includes all
- exceptions) to a file named test.log . This file will be pulled up to
- the master where it can be seen as part of the status output.
-
- There are some class attributes which may be usefully overridden
- by subclasses. 'trialMode' and 'trialArgs' can influence the trial
- command line.
- """
-
- flunkOnFailure = True
- python = None
- trial = "trial"
- trialMode = ["-to"]
- trialArgs = []
- testpath = UNSPECIFIED # required (but can be None)
- testChanges = False # TODO: needs better name
- recurse = False
- reactor = None
- randomly = False
- tests = None # required
-
- def __init__(self, reactor=UNSPECIFIED, python=None, trial=None,
- testpath=UNSPECIFIED,
- tests=None, testChanges=None,
- recurse=None, randomly=None,
- trialMode=None, trialArgs=None,
- **kwargs):
- """
- @type testpath: string
- @param testpath: use in PYTHONPATH when running the tests. If
- None, do not set PYTHONPATH. Setting this to '.' will
- cause the source files to be used in-place.
-
- @type python: string (without spaces) or list
- @param python: which python executable to use. Will form the start of
- the argv array that will launch trial. If you use this,
- you should set 'trial' to an explicit path (like
- /usr/bin/trial or ./bin/trial). Defaults to None, which
- leaves it out entirely (running 'trial args' instead of
- 'python ./bin/trial args'). Likely values are 'python',
- ['python2.2'], ['python', '-Wall'], etc.
-
- @type trial: string
- @param trial: which 'trial' executable to run.
- Defaults to 'trial', which will cause $PATH to be
- searched and probably find /usr/bin/trial . If you set
- 'python', this should be set to an explicit path (because
- 'python2.3 trial' will not work).
-
- @type trialMode: list of strings
- @param trialMode: a list of arguments to pass to trial, specifically
- to set the reporting mode. This defaults to ['-to']
- which means 'verbose colorless output' to the trial
- that comes with Twisted-2.0.x and at least -2.1.0 .
- Newer versions of Twisted may come with a trial
- that prefers ['--reporter=bwverbose'].
-
- @type trialArgs: list of strings
- @param trialArgs: a list of arguments to pass to trial, available to
- turn on any extra flags you like. Defaults to [].
-
- @type tests: list of strings
- @param tests: a list of test modules to run, like
- ['twisted.test.test_defer', 'twisted.test.test_process'].
- If this is a string, it will be converted into a one-item
- list.
-
- @type testChanges: boolean
- @param testChanges: if True, ignore the 'tests' parameter and instead
- ask the Build for all the files that make up the
- Changes going into this build. Pass these filenames
- to trial and ask it to look for test-case-name
- tags, running just the tests necessary to cover the
- changes.
-
- @type recurse: boolean
- @param recurse: If True, pass the --recurse option to trial, allowing
- test cases to be found in deeper subdirectories of the
- modules listed in 'tests'. This does not appear to be
- necessary when using testChanges.
-
- @type reactor: string
- @param reactor: which reactor to use, like 'gtk' or 'java'. If not
- provided, the Twisted's usual platform-dependent
- default is used.
-
- @type randomly: boolean
- @param randomly: if True, add the --random=0 argument, which instructs
- trial to run the unit tests in a random order each
- time. This occasionally catches problems that might be
- masked when one module always runs before another
- (like failing to make registerAdapter calls before
- lookups are done).
-
- @type kwargs: dict
- @param kwargs: parameters. The following parameters are inherited from
- L{ShellCommand} and may be useful to set: workdir,
- haltOnFailure, flunkOnWarnings, flunkOnFailure,
- warnOnWarnings, warnOnFailure, want_stdout, want_stderr,
- timeout.
- """
- ShellCommand.__init__(self, **kwargs)
-
- if python:
- self.python = python
- if self.python is not None:
- if type(self.python) is str:
- self.python = [self.python]
- for s in self.python:
- if " " in s:
- # this is not strictly an error, but I suspect more
- # people will accidentally try to use python="python2.3
- # -Wall" than will use embedded spaces in a python flag
- log.msg("python= component '%s' has spaces")
- log.msg("To add -Wall, use python=['python', '-Wall']")
- why = "python= value has spaces, probably an error"
- raise ValueError(why)
-
- if trial:
- self.trial = trial
- if " " in self.trial:
- raise ValueError("trial= value has spaces")
- if trialMode is not None:
- self.trialMode = trialMode
- if trialArgs is not None:
- self.trialArgs = trialArgs
-
- if testpath is not UNSPECIFIED:
- self.testpath = testpath
- if self.testpath is UNSPECIFIED:
- raise ValueError("You must specify testpath= (it can be None)")
- assert isinstance(self.testpath, str) or self.testpath is None
-
- if reactor is not UNSPECIFIED:
- self.reactor = reactor
-
- if tests is not None:
- self.tests = tests
- if type(self.tests) is str:
- self.tests = [self.tests]
- if testChanges is not None:
- self.testChanges = testChanges
- #self.recurse = True # not sure this is necessary
-
- if not self.testChanges and self.tests is None:
- raise ValueError("Must either set testChanges= or provide tests=")
-
- if recurse is not None:
- self.recurse = recurse
- if randomly is not None:
- self.randomly = randomly
-
- # build up most of the command, then stash it until start()
- command = []
- if self.python:
- command.extend(self.python)
- command.append(self.trial)
- command.extend(self.trialMode)
- if self.recurse:
- command.append("--recurse")
- if self.reactor:
- command.append("--reactor=%s" % reactor)
- if self.randomly:
- command.append("--random=0")
- command.extend(self.trialArgs)
- self.command = command
-
- if self.reactor:
- self.description = ["testing", "(%s)" % self.reactor]
- self.descriptionDone = ["tests"]
- # commandComplete adds (reactorname) to self.text
- else:
- self.description = ["testing"]
- self.descriptionDone = ["tests"]
-
- def setupEnvironment(self, cmd):
- ShellCommand.setupEnvironment(self, cmd)
- if self.testpath != None:
- e = cmd.args['env']
- if e is None:
- cmd.args['env'] = {'PYTHONPATH': self.testpath}
- else:
- # TODO: somehow, each build causes another copy of
- # self.testpath to get prepended
- if e.get('PYTHONPATH', "") == "":
- e['PYTHONPATH'] = self.testpath
- else:
- e['PYTHONPATH'] = self.testpath + ":" + e['PYTHONPATH']
- try:
- p = cmd.args['env']['PYTHONPATH']
- if type(p) is not str:
- log.msg("hey, not a string:", p)
- assert False
- except (KeyError, TypeError):
- # KeyError if args doesn't have ['env']
- # KeyError if args['env'] doesn't have ['PYTHONPATH']
- # TypeError if args is None
- pass
-
- def start(self):
- # now that self.build.allFiles() is nailed down, finish building the
- # command
- if self.testChanges:
- for f in self.build.allFiles():
- if f.endswith(".py"):
- self.command.append("--testmodule=%s" % f)
- else:
- self.command.extend(self.tests)
- log.msg("Trial.start: command is", self.command)
- ShellCommand.start(self)
-
- def _commandComplete(self, cmd):
- # before doing the summary, etc, fetch _trial_temp/test.log
- # TODO: refactor ShellCommand so I don't have to override such
- # an internal method
- catcmd = ["cat", "_trial_temp/test.log"]
- c2 = step.RemoteShellCommand(command=catcmd,
- workdir=self.workdir,
- )
- self.cmd = c2
- loog = self.addLog("test.log")
- c2.useLog(loog, True)
- d = c2.run(self, self.remote)
- d.addCallback(self._commandComplete2, cmd)
- return d
-
- def _commandComplete2(self, c2, cmd):
- # pass the original RemoteShellCommand to the summarizer
- return ShellCommand._commandComplete(self, cmd)
-
- def rtext(self, fmt='%s'):
- if self.reactor:
- rtext = fmt % self.reactor
- return rtext.replace("reactor", "")
- return ""
-
-
- def commandComplete(self, cmd):
- # figure out all status, then let the various hook functions return
- # different pieces of it
-
- output = cmd.log.getText()
- counts = countFailedTests(output)
-
- total = counts['total']
- failures, errors = counts['failures'], counts['errors']
- parsed = (total != None)
- text = []
- text2 = ""
-
- if cmd.rc == 0:
- if parsed:
- results = SUCCESS
- if total:
- text += ["%d %s" % \
- (total,
- total == 1 and "test" or "tests"),
- "passed"]
- else:
- text += ["no tests", "run"]
- else:
- results = FAILURE
- text += ["testlog", "unparseable"]
- text2 = "tests"
- else:
- # something failed
- results = FAILURE
- if parsed:
- text.append("tests")
- if failures:
- text.append("%d %s" % \
- (failures,
- failures == 1 and "failure" or "failures"))
- if errors:
- text.append("%d %s" % \
- (errors,
- errors == 1 and "error" or "errors"))
- count = failures + errors
- text2 = "%d tes%s" % (count, (count == 1 and 't' or 'ts'))
- else:
- text += ["tests", "failed"]
- text2 = "tests"
-
- if counts['skips']:
- text.append("%d %s" % \
- (counts['skips'],
- counts['skips'] == 1 and "skip" or "skips"))
- if counts['expectedFailures']:
- text.append("%d %s" % \
- (counts['expectedFailures'],
- counts['expectedFailures'] == 1 and "todo"
- or "todos"))
- if 0: # TODO
- results = WARNINGS
- if not text2:
- text2 = "todo"
-
- if 0:
- # ignore unexpectedSuccesses for now, but it should really mark
- # the build WARNING
- if counts['unexpectedSuccesses']:
- text.append("%d surprises" % counts['unexpectedSuccesses'])
- results = WARNINGS
- if not text2:
- text2 = "tests"
-
- if self.reactor:
- text.append(self.rtext('(%s)'))
- if text2:
- text2 = "%s %s" % (text2, self.rtext('(%s)'))
-
- self.results = results
- self.text = text
- self.text2 = [text2]
-
- def addTestResult(self, testname, results, text, tlog):
- if self.reactor is not None:
- testname = (self.reactor,) + testname
- tr = builder.TestResult(testname, results, text, logs={'log': tlog})
- #self.step_status.build.addTestResult(tr)
- self.build.build_status.addTestResult(tr)
-
- def createSummary(self, loog):
- output = loog.getText()
- problems = ""
- sio = StringIO.StringIO(output)
- warnings = {}
- while 1:
- line = sio.readline()
- if line == "":
- break
- if line.find(" exceptions.DeprecationWarning: ") != -1:
- # no source
- warning = line # TODO: consider stripping basedir prefix here
- warnings[warning] = warnings.get(warning, 0) + 1
- elif (line.find(" DeprecationWarning: ") != -1 or
- line.find(" UserWarning: ") != -1):
- # next line is the source
- warning = line + sio.readline()
- warnings[warning] = warnings.get(warning, 0) + 1
- elif line.find("Warning: ") != -1:
- warning = line
- warnings[warning] = warnings.get(warning, 0) + 1
-
- if line.find("=" * 60) == 0 or line.find("-" * 60) == 0:
- problems += line
- problems += sio.read()
- break
-
- if problems:
- self.addCompleteLog("problems", problems)
- # now parse the problems for per-test results
- pio = StringIO.StringIO(problems)
- pio.readline() # eat the first separator line
- testname = None
- done = False
- while not done:
- while 1:
- line = pio.readline()
- if line == "":
- done = True
- break
- if line.find("=" * 60) == 0:
- break
- if line.find("-" * 60) == 0:
- # the last case has --- as a separator before the
- # summary counts are printed
- done = True
- break
- if testname is None:
- # the first line after the === is like:
-# EXPECTED FAILURE: testLackOfTB (twisted.test.test_failure.FailureTestCase)
-# SKIPPED: testRETR (twisted.test.test_ftp.TestFTPServer)
-# FAILURE: testBatchFile (twisted.conch.test.test_sftp.TestOurServerBatchFile)
- r = re.search(r'^([^:]+): (\w+) \(([\w\.]+)\)', line)
- if not r:
- # TODO: cleanup, if there are no problems,
- # we hit here
- continue
- result, name, case = r.groups()
- testname = tuple(case.split(".") + [name])
- results = {'SKIPPED': SKIPPED,
- 'EXPECTED FAILURE': SUCCESS,
- 'UNEXPECTED SUCCESS': WARNINGS,
- 'FAILURE': FAILURE,
- 'ERROR': FAILURE,
- 'SUCCESS': SUCCESS, # not reported
- }.get(result, WARNINGS)
- text = result.lower().split()
- loog = line
- # the next line is all dashes
- loog += pio.readline()
- else:
- # the rest goes into the log
- loog += line
- if testname:
- self.addTestResult(testname, results, text, loog)
- testname = None
-
- if warnings:
- lines = warnings.keys()
- lines.sort()
- self.addCompleteLog("warnings", "".join(lines))
-
- def evaluateCommand(self, cmd):
- return self.results
-
- def getText(self, cmd, results):
- return self.text
- def getText2(self, cmd, results):
- return self.text2
-
-
-class ProcessDocs(ShellCommand):
- """I build all docs. This requires some LaTeX packages to be installed.
- It will result in the full documentation book (dvi, pdf, etc).
-
- """
-
- name = "process-docs"
- warnOnWarnings = 1
- command = ["admin/process-docs"]
- description = ["processing", "docs"]
- descriptionDone = ["docs"]
- # TODO: track output and time
-
- def __init__(self, **kwargs):
- """
- @type workdir: string
- @keyword workdir: the workdir to start from: must be the base of the
- Twisted tree
-
- @type results: triple of (int, int, string)
- @keyword results: [rc, warnings, output]
- - rc==0 if all files were converted successfully.
- - warnings is a count of hlint warnings.
- - output is the verbose output of the command.
- """
- ShellCommand.__init__(self, **kwargs)
-
- def createSummary(self, log):
- output = log.getText()
- # hlint warnings are of the format: 'WARNING: file:line:col: stuff
- # latex warnings start with "WARNING: LaTeX Warning: stuff", but
- # sometimes wrap around to a second line.
- lines = output.split("\n")
- warningLines = []
- wantNext = False
- for line in lines:
- wantThis = wantNext
- wantNext = False
- if line.startswith("WARNING: "):
- wantThis = True
- wantNext = True
- if wantThis:
- warningLines.append(line)
-
- if warningLines:
- self.addCompleteLog("warnings", "\n".join(warningLines) + "\n")
- self.warnings = len(warningLines)
-
- def evaluateCommand(self, cmd):
- if cmd.rc != 0:
- return FAILURE
- if self.warnings:
- return WARNINGS
- return SUCCESS
-
- def getText(self, cmd, results):
- if results == SUCCESS:
- return ["docs", "successful"]
- if results == WARNINGS:
- return ["docs",
- "%d warnin%s" % (self.warnings,
- self.warnings == 1 and 'g' or 'gs')]
- if results == FAILURE:
- return ["docs", "failed"]
-
- def getText2(self, cmd, results):
- if results == WARNINGS:
- return ["%d do%s" % (self.warnings,
- self.warnings == 1 and 'c' or 'cs')]
- return ["docs"]
-
-
-
-class BuildDebs(ShellCommand):
- """I build the .deb packages."""
-
- name = "debuild"
- flunkOnFailure = 1
- command = ["debuild", "-uc", "-us"]
- description = ["building", "debs"]
- descriptionDone = ["debs"]
-
- def __init__(self, **kwargs):
- """
- @type workdir: string
- @keyword workdir: the workdir to start from (must be the base of the
- Twisted tree)
- @type results: double of [int, string]
- @keyword results: [rc, output].
- - rc == 0 if all .debs were created successfully
- - output: string with any errors or warnings
- """
- ShellCommand.__init__(self, **kwargs)
-
- def commandComplete(self, cmd):
- errors, warnings = 0, 0
- output = cmd.log.getText()
- summary = ""
- sio = StringIO.StringIO(output)
- for line in sio.readlines():
- if line.find("E: ") == 0:
- summary += line
- errors += 1
- if line.find("W: ") == 0:
- summary += line
- warnings += 1
- if summary:
- self.addCompleteLog("problems", summary)
- self.errors = errors
- self.warnings = warnings
-
- def evaluateCommand(self, cmd):
- if cmd.rc != 0:
- return FAILURE
- if self.errors:
- return FAILURE
- if self.warnings:
- return WARNINGS
- return SUCCESS
-
- def getText(self, cmd, results):
- text = ["debuild"]
- if cmd.rc != 0:
- text.append("failed")
- errors, warnings = self.errors, self.warnings
- if warnings or errors:
- text.append("lintian:")
- if warnings:
- text.append("%d warnin%s" % (warnings,
- warnings == 1 and 'g' or 'gs'))
- if errors:
- text.append("%d erro%s" % (errors,
- errors == 1 and 'r' or 'rs'))
- return text
-
- def getText2(self, cmd, results):
- if cmd.rc != 0:
- return ["debuild"]
- if self.errors or self.warnings:
- return ["%d lintian" % (self.errors + self.warnings)]
- return []
-
-class RemovePYCs(ShellCommand):
- name = "remove-.pyc"
- command = 'find . -name "*.pyc" | xargs rm'
- description = ["removing", ".pyc", "files"]
- descriptionDone = ["remove", ".pycs"]
diff --git a/buildbot/buildbot-source/build/lib/buildbot/process/step_twisted2.py b/buildbot/buildbot-source/build/lib/buildbot/process/step_twisted2.py
deleted file mode 100644
index b684b60d4..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/process/step_twisted2.py
+++ /dev/null
@@ -1,164 +0,0 @@
-#! /usr/bin/python
-
-from buildbot.status import tests
-from buildbot.process.step import SUCCESS, FAILURE, WARNINGS, SKIPPED, \
- BuildStep
-from buildbot.process.step_twisted import RunUnitTests
-
-from zope.interface import implements
-from twisted.python import log, failure
-from twisted.spread import jelly
-from twisted.pb.tokens import BananaError
-from twisted.web.util import formatFailure
-from twisted.web.html import PRE
-from twisted.web.error import NoResource
-
-class Null: pass
-ResultTypes = Null()
-ResultTypeNames = ["SKIP",
- "EXPECTED_FAILURE", "FAILURE", "ERROR",
- "UNEXPECTED_SUCCESS", "SUCCESS"]
-try:
- from twisted.trial import reporter # introduced in Twisted-1.0.5
- # extract the individual result types
- for name in ResultTypeNames:
- setattr(ResultTypes, name, getattr(reporter, name))
-except ImportError:
- from twisted.trial import unittest # Twisted-1.0.4 has them here
- for name in ResultTypeNames:
- setattr(ResultTypes, name, getattr(unittest, name))
-
-log._keepErrors = 0
-from twisted.trial import remote # for trial/jelly parsing
-
-import StringIO
-
-class OneJellyTest(tests.OneTest):
- def html(self, request):
- tpl = "<HTML><BODY>\n\n%s\n\n</body></html>\n"
- pptpl = "<HTML><BODY>\n\n<pre>%s</pre>\n\n</body></html>\n"
- t = request.postpath[0] # one of 'short', 'long' #, or 'html'
- if isinstance(self.results, failure.Failure):
- # it would be nice to remove unittest functions from the
- # traceback like unittest.format_exception() does.
- if t == 'short':
- s = StringIO.StringIO()
- self.results.printTraceback(s)
- return pptpl % PRE(s.getvalue())
- elif t == 'long':
- s = StringIO.StringIO()
- self.results.printDetailedTraceback(s)
- return pptpl % PRE(s.getvalue())
- #elif t == 'html':
- # return tpl % formatFailure(self.results)
- # ACK! source lines aren't stored in the Failure, rather,
- # formatFailure pulls them (by filename) from the local
- # disk. Feh. Even printTraceback() won't work. Double feh.
- return NoResource("No such mode '%s'" % t)
- if self.results == None:
- return tpl % "No results to show: test probably passed."
- # maybe results are plain text?
- return pptpl % PRE(self.results)
-
-class TwistedJellyTestResults(tests.TestResults):
- oneTestClass = OneJellyTest
- def describeOneTest(self, testname):
- return "%s: %s\n" % (testname, self.tests[testname][0])
-
-class RunUnitTestsJelly(RunUnitTests):
- """I run the unit tests with the --jelly option, which generates
- machine-parseable results as the tests are run.
- """
- trialMode = "--jelly"
- implements(remote.IRemoteReporter)
-
- ourtypes = { ResultTypes.SKIP: tests.SKIP,
- ResultTypes.EXPECTED_FAILURE: tests.EXPECTED_FAILURE,
- ResultTypes.FAILURE: tests.FAILURE,
- ResultTypes.ERROR: tests.ERROR,
- ResultTypes.UNEXPECTED_SUCCESS: tests.UNEXPECTED_SUCCESS,
- ResultTypes.SUCCESS: tests.SUCCESS,
- }
-
- def __getstate__(self):
- #d = RunUnitTests.__getstate__(self)
- d = self.__dict__.copy()
- # Banana subclasses are Ephemeral
- if d.has_key("decoder"):
- del d['decoder']
- return d
- def start(self):
- self.decoder = remote.DecodeReport(self)
- # don't accept anything unpleasant from the (untrusted) build slave
- # The jellied stream may have Failures, but everything inside should
- # be a string
- security = jelly.SecurityOptions()
- security.allowBasicTypes()
- security.allowInstancesOf(failure.Failure)
- self.decoder.taster = security
- self.results = TwistedJellyTestResults()
- RunUnitTests.start(self)
-
- def logProgress(self, progress):
- # XXX: track number of tests
- BuildStep.logProgress(self, progress)
-
- def addStdout(self, data):
- if not self.decoder:
- return
- try:
- self.decoder.dataReceived(data)
- except BananaError:
- self.decoder = None
- log.msg("trial --jelly output unparseable, traceback follows")
- log.deferr()
-
- def remote_start(self, expectedTests, times=None):
- print "remote_start", expectedTests
- def remote_reportImportError(self, name, aFailure, times=None):
- pass
- def remote_reportStart(self, testClass, method, times=None):
- print "reportStart", testClass, method
-
- def remote_reportResults(self, testClass, method, resultType, results,
- times=None):
- print "reportResults", testClass, method, resultType
- which = testClass + "." + method
- self.results.addTest(which,
- self.ourtypes.get(resultType, tests.UNKNOWN),
- results)
-
- def finished(self, rc):
- # give self.results to our Build object
- self.build.testsFinished(self.results)
- total = self.results.countTests()
- count = self.results.countFailures()
- result = SUCCESS
- if total == None:
- result = (FAILURE, ['tests%s' % self.rtext(' (%s)')])
- if count:
- result = (FAILURE, ["%d tes%s%s" % (count,
- (count == 1 and 't' or 'ts'),
- self.rtext(' (%s)'))])
- return self.stepComplete(result)
- def finishStatus(self, result):
- total = self.results.countTests()
- count = self.results.countFailures()
- color = "green"
- text = []
- if count == 0:
- text.extend(["%d %s" % \
- (total,
- total == 1 and "test" or "tests"),
- "passed"])
- else:
- text.append("tests")
- text.append("%d %s" % \
- (count,
- count == 1 and "failure" or "failures"))
- color = "red"
- self.updateCurrentActivity(color=color, text=text)
- self.addFileToCurrentActivity("tests", self.results)
- #self.finishStatusSummary()
- self.finishCurrentActivity()
-
diff --git a/buildbot/buildbot-source/build/lib/buildbot/scheduler.py b/buildbot/buildbot-source/build/lib/buildbot/scheduler.py
deleted file mode 100644
index 5a9a3a39e..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/scheduler.py
+++ /dev/null
@@ -1,688 +0,0 @@
-# -*- test-case-name: buildbot.test.test_dependencies -*-
-
-import time, os.path
-
-from twisted.internet import reactor
-from twisted.application import service, internet, strports
-from twisted.python import log, runtime
-from twisted.protocols import basic
-from twisted.cred import portal, checkers
-from twisted.spread import pb
-
-from buildbot import interfaces, buildset, util, pbutil
-from buildbot.util import now
-from buildbot.status import builder
-from buildbot.twcompat import implements, providedBy
-from buildbot.sourcestamp import SourceStamp
-from buildbot.changes import maildirtwisted
-
-
-class BaseScheduler(service.MultiService, util.ComparableMixin):
- if implements:
- implements(interfaces.IScheduler)
- else:
- __implements__ = (interfaces.IScheduler,
- service.MultiService.__implements__)
-
- def __init__(self, name):
- service.MultiService.__init__(self)
- self.name = name
-
- def __repr__(self):
- # TODO: why can't id() return a positive number? %d is ugly.
- return "<Scheduler '%s' at %d>" % (self.name, id(self))
-
- def submit(self, bs):
- self.parent.submitBuildSet(bs)
-
- def addChange(self, change):
- pass
-
-class BaseUpstreamScheduler(BaseScheduler):
- if implements:
- implements(interfaces.IUpstreamScheduler)
- else:
- __implements__ = (interfaces.IUpstreamScheduler,
- BaseScheduler.__implements__)
-
- def __init__(self, name):
- BaseScheduler.__init__(self, name)
- self.successWatchers = []
-
- def subscribeToSuccessfulBuilds(self, watcher):
- self.successWatchers.append(watcher)
- def unsubscribeToSuccessfulBuilds(self, watcher):
- self.successWatchers.remove(watcher)
-
- def submit(self, bs):
- d = bs.waitUntilFinished()
- d.addCallback(self.buildSetFinished)
- self.parent.submitBuildSet(bs)
-
- def buildSetFinished(self, bss):
- if not self.running:
- return
- if bss.getResults() == builder.SUCCESS:
- ss = bss.getSourceStamp()
- for w in self.successWatchers:
- w(ss)
-
-
-class Scheduler(BaseUpstreamScheduler):
- """The default Scheduler class will run a build after some period of time
- called the C{treeStableTimer}, on a given set of Builders. It only pays
- attention to a single branch. You you can provide a C{fileIsImportant}
- function which will evaluate each Change to decide whether or not it
- should trigger a new build.
- """
-
- fileIsImportant = None
- compare_attrs = ('name', 'treeStableTimer', 'builderNames', 'branch',
- 'fileIsImportant')
-
- def __init__(self, name, branch, treeStableTimer, builderNames,
- fileIsImportant=None):
- """
- @param name: the name of this Scheduler
- @param branch: The branch name that the Scheduler should pay
- attention to. Any Change that is not on this branch
- will be ignored. It can be set to None to only pay
- attention to the default branch.
- @param treeStableTimer: the duration, in seconds, for which the tree
- must remain unchanged before a build will be
- triggered. This is intended to avoid builds
- of partially-committed fixes.
- @param builderNames: a list of Builder names. When this Scheduler
- decides to start a set of builds, they will be
- run on the Builders named by this list.
-
- @param fileIsImportant: A callable which takes one argument (a Change
- instance) and returns True if the change is
- worth building, and False if it is not.
- Unimportant Changes are accumulated until the
- build is triggered by an important change.
- The default value of None means that all
- Changes are important.
- """
-
- BaseUpstreamScheduler.__init__(self, name)
- self.treeStableTimer = treeStableTimer
- for b in builderNames:
- assert isinstance(b, str)
- self.builderNames = builderNames
- self.branch = branch
- if fileIsImportant:
- assert callable(fileIsImportant)
- self.fileIsImportant = fileIsImportant
-
- self.importantChanges = []
- self.unimportantChanges = []
- self.nextBuildTime = None
- self.timer = None
-
- def listBuilderNames(self):
- return self.builderNames
-
- def getPendingBuildTimes(self):
- if self.nextBuildTime is not None:
- return [self.nextBuildTime]
- return []
-
- def addChange(self, change):
- if change.branch != self.branch:
- log.msg("%s ignoring off-branch %s" % (self, change))
- return
- if not self.fileIsImportant:
- self.addImportantChange(change)
- elif self.fileIsImportant(change):
- self.addImportantChange(change)
- else:
- self.addUnimportantChange(change)
-
- def addImportantChange(self, change):
- log.msg("%s: change is important, adding %s" % (self, change))
- self.importantChanges.append(change)
- self.nextBuildTime = max(self.nextBuildTime,
- change.when + self.treeStableTimer)
- self.setTimer(self.nextBuildTime)
-
- def addUnimportantChange(self, change):
- log.msg("%s: change is not important, adding %s" % (self, change))
- self.unimportantChanges.append(change)
-
- def setTimer(self, when):
- log.msg("%s: setting timer to %s" %
- (self, time.strftime("%H:%M:%S", time.localtime(when))))
- now = util.now()
- if when < now:
- when = now + 1
- if self.timer:
- self.timer.cancel()
- self.timer = reactor.callLater(when - now, self.fireTimer)
-
- def stopTimer(self):
- if self.timer:
- self.timer.cancel()
- self.timer = None
-
- def fireTimer(self):
- # clear out our state
- self.timer = None
- self.nextBuildTime = None
- changes = self.importantChanges + self.unimportantChanges
- self.importantChanges = []
- self.unimportantChanges = []
-
- # create a BuildSet, submit it to the BuildMaster
- bs = buildset.BuildSet(self.builderNames,
- SourceStamp(changes=changes))
- self.submit(bs)
-
- def stopService(self):
- self.stopTimer()
- return service.MultiService.stopService(self)
-
-
-class AnyBranchScheduler(BaseUpstreamScheduler):
- """This Scheduler will handle changes on a variety of branches. It will
- accumulate Changes for each branch separately. It works by creating a
- separate Scheduler for each new branch it sees."""
-
- schedulerFactory = Scheduler
- fileIsImportant = None
-
- compare_attrs = ('name', 'branches', 'treeStableTimer', 'builderNames',
- 'fileIsImportant')
-
- def __init__(self, name, branches, treeStableTimer, builderNames,
- fileIsImportant=None):
- """
- @param name: the name of this Scheduler
- @param branches: The branch names that the Scheduler should pay
- attention to. Any Change that is not on one of these
- branches will be ignored. It can be set to None to
- accept changes from any branch. Don't use [] (an
- empty list), because that means we don't pay
- attention to *any* branches, so we'll never build
- anything.
- @param treeStableTimer: the duration, in seconds, for which the tree
- must remain unchanged before a build will be
- triggered. This is intended to avoid builds
- of partially-committed fixes.
- @param builderNames: a list of Builder names. When this Scheduler
- decides to start a set of builds, they will be
- run on the Builders named by this list.
-
- @param fileIsImportant: A callable which takes one argument (a Change
- instance) and returns True if the change is
- worth building, and False if it is not.
- Unimportant Changes are accumulated until the
- build is triggered by an important change.
- The default value of None means that all
- Changes are important.
- """
-
- BaseUpstreamScheduler.__init__(self, name)
- self.treeStableTimer = treeStableTimer
- for b in builderNames:
- assert isinstance(b, str)
- self.builderNames = builderNames
- self.branches = branches
- if self.branches == []:
- log.msg("AnyBranchScheduler %s: branches=[], so we will ignore "
- "all branches, and never trigger any builds. Please set "
- "branches=None to mean 'all branches'" % self)
- # consider raising an exception here, to make this warning more
- # prominent, but I can vaguely imagine situations where you might
- # want to comment out branches temporarily and wouldn't
- # appreciate it being treated as an error.
- if fileIsImportant:
- assert callable(fileIsImportant)
- self.fileIsImportant = fileIsImportant
- self.schedulers = {} # one per branch
-
- def __repr__(self):
- return "<AnyBranchScheduler '%s'>" % self.name
-
- def listBuilderNames(self):
- return self.builderNames
-
- def getPendingBuildTimes(self):
- bts = []
- for s in self.schedulers.values():
- if s.nextBuildTime is not None:
- bts.append(s.nextBuildTime)
- return bts
-
- def addChange(self, change):
- branch = change.branch
- if self.branches is not None and branch not in self.branches:
- log.msg("%s ignoring off-branch %s" % (self, change))
- return
- s = self.schedulers.get(branch)
- if not s:
- if branch:
- name = self.name + "." + branch
- else:
- name = self.name + ".<default>"
- s = self.schedulerFactory(name, branch,
- self.treeStableTimer,
- self.builderNames,
- self.fileIsImportant)
- s.successWatchers = self.successWatchers
- s.setServiceParent(self)
- # TODO: does this result in schedulers that stack up forever?
- # When I make the persistify-pass, think about this some more.
- self.schedulers[branch] = s
- s.addChange(change)
-
- def submitBuildSet(self, bs):
- self.parent.submitBuildSet(bs)
-
-
-class Dependent(BaseUpstreamScheduler):
- """This scheduler runs some set of 'downstream' builds when the
- 'upstream' scheduler has completed successfully."""
-
- compare_attrs = ('name', 'upstream', 'builders')
-
- def __init__(self, name, upstream, builderNames):
- assert providedBy(upstream, interfaces.IUpstreamScheduler)
- BaseUpstreamScheduler.__init__(self, name)
- self.upstream = upstream
- self.builderNames = builderNames
-
- def listBuilderNames(self):
- return self.builderNames
-
- def getPendingBuildTimes(self):
- # report the upstream's value
- return self.upstream.getPendingBuildTimes()
-
- def startService(self):
- service.MultiService.startService(self)
- self.upstream.subscribeToSuccessfulBuilds(self.upstreamBuilt)
-
- def stopService(self):
- d = service.MultiService.stopService(self)
- self.upstream.unsubscribeToSuccessfulBuilds(self.upstreamBuilt)
- return d
-
- def upstreamBuilt(self, ss):
- bs = buildset.BuildSet(self.builderNames, ss)
- self.submit(bs)
-
-
-
-class Periodic(BaseUpstreamScheduler):
- """Instead of watching for Changes, this Scheduler can just start a build
- at fixed intervals. The C{periodicBuildTimer} parameter sets the number
- of seconds to wait between such periodic builds. The first build will be
- run immediately."""
-
- # TODO: consider having this watch another (changed-based) scheduler and
- # merely enforce a minimum time between builds.
-
- compare_attrs = ('name', 'builderNames', 'periodicBuildTimer', 'branch')
-
- def __init__(self, name, builderNames, periodicBuildTimer,
- branch=None):
- BaseUpstreamScheduler.__init__(self, name)
- self.builderNames = builderNames
- self.periodicBuildTimer = periodicBuildTimer
- self.branch = branch
- self.timer = internet.TimerService(self.periodicBuildTimer,
- self.doPeriodicBuild)
- self.timer.setServiceParent(self)
-
- def listBuilderNames(self):
- return self.builderNames
-
- def getPendingBuildTimes(self):
- # TODO: figure out when self.timer is going to fire next and report
- # that
- return []
-
- def doPeriodicBuild(self):
- bs = buildset.BuildSet(self.builderNames,
- SourceStamp(branch=self.branch))
- self.submit(bs)
-
-
-
-class Nightly(BaseUpstreamScheduler):
- """Imitate 'cron' scheduling. This can be used to schedule a nightly
- build, or one which runs are certain times of the day, week, or month.
-
- Pass some subset of minute, hour, dayOfMonth, month, and dayOfWeek; each
- may be a single number or a list of valid values. The builds will be
- triggered whenever the current time matches these values. Wildcards are
- represented by a '*' string. All fields default to a wildcard except
- 'minute', so with no fields this defaults to a build every hour, on the
- hour.
-
- For example, the following master.cfg clause will cause a build to be
- started every night at 3:00am::
-
- s = Nightly('nightly', ['builder1', 'builder2'], hour=3, minute=0)
- c['schedules'].append(s)
-
- This scheduler will perform a build each monday morning at 6:23am and
- again at 8:23am::
-
- s = Nightly('BeforeWork', ['builder1'],
- dayOfWeek=0, hour=[6,8], minute=23)
-
- The following runs a build every two hours::
-
- s = Nightly('every2hours', ['builder1'], hour=range(0, 24, 2))
-
- And this one will run only on December 24th::
-
- s = Nightly('SleighPreflightCheck', ['flying_circuits', 'radar'],
- month=12, dayOfMonth=24, hour=12, minute=0)
-
- For dayOfWeek and dayOfMonth, builds are triggered if the date matches
- either of them. Month and day numbers start at 1, not zero.
- """
-
- compare_attrs = ('name', 'builderNames',
- 'minute', 'hour', 'dayOfMonth', 'month',
- 'dayOfWeek', 'branch')
-
- def __init__(self, name, builderNames, minute=0, hour='*',
- dayOfMonth='*', month='*', dayOfWeek='*',
- branch=None):
- # Setting minute=0 really makes this an 'Hourly' scheduler. This
- # seemed like a better default than minute='*', which would result in
- # a build every 60 seconds.
- BaseUpstreamScheduler.__init__(self, name)
- self.builderNames = builderNames
- self.minute = minute
- self.hour = hour
- self.dayOfMonth = dayOfMonth
- self.month = month
- self.dayOfWeek = dayOfWeek
- self.branch = branch
- self.delayedRun = None
- self.nextRunTime = None
-
- def addTime(self, timetuple, secs):
- return time.localtime(time.mktime(timetuple)+secs)
- def findFirstValueAtLeast(self, values, value, default=None):
- for v in values:
- if v >= value: return v
- return default
-
- def setTimer(self):
- self.nextRunTime = self.calculateNextRunTime()
- self.delayedRun = reactor.callLater(self.nextRunTime - time.time(),
- self.doPeriodicBuild)
-
- def startService(self):
- BaseUpstreamScheduler.startService(self)
- self.setTimer()
-
- def stopService(self):
- BaseUpstreamScheduler.stopService(self)
- self.delayedRun.cancel()
-
- def isRunTime(self, timetuple):
- def check(ourvalue, value):
- if ourvalue == '*': return True
- if isinstance(ourvalue, int): return value == ourvalue
- return (value in ourvalue)
-
- if not check(self.minute, timetuple[4]):
- #print 'bad minute', timetuple[4], self.minute
- return False
-
- if not check(self.hour, timetuple[3]):
- #print 'bad hour', timetuple[3], self.hour
- return False
-
- if not check(self.month, timetuple[1]):
- #print 'bad month', timetuple[1], self.month
- return False
-
- if self.dayOfMonth != '*' and self.dayOfWeek != '*':
- # They specified both day(s) of month AND day(s) of week.
- # This means that we only have to match one of the two. If
- # neither one matches, this time is not the right time.
- if not (check(self.dayOfMonth, timetuple[2]) or
- check(self.dayOfWeek, timetuple[6])):
- #print 'bad day'
- return False
- else:
- if not check(self.dayOfMonth, timetuple[2]):
- #print 'bad day of month'
- return False
-
- if not check(self.dayOfWeek, timetuple[6]):
- #print 'bad day of week'
- return False
-
- return True
-
- def calculateNextRunTime(self):
- return self.calculateNextRunTimeFrom(time.time())
-
- def calculateNextRunTimeFrom(self, now):
- dateTime = time.localtime(now)
-
- # Remove seconds by advancing to at least the next minue
- dateTime = self.addTime(dateTime, 60-dateTime[5])
-
- # Now we just keep adding minutes until we find something that matches
-
- # It not an efficient algorithm, but it'll *work* for now
- yearLimit = dateTime[0]+2
- while not self.isRunTime(dateTime):
- dateTime = self.addTime(dateTime, 60)
- #print 'Trying', time.asctime(dateTime)
- assert dateTime[0] < yearLimit, 'Something is wrong with this code'
- return time.mktime(dateTime)
-
- def listBuilderNames(self):
- return self.builderNames
-
- def getPendingBuildTimes(self):
- # TODO: figure out when self.timer is going to fire next and report
- # that
- if self.nextRunTime is None: return []
- return [self.nextRunTime]
-
- def doPeriodicBuild(self):
- # Schedule the next run
- self.setTimer()
-
- # And trigger a build
- bs = buildset.BuildSet(self.builderNames,
- SourceStamp(branch=self.branch))
- self.submit(bs)
-
- def addChange(self, change):
- pass
-
-
-
-class TryBase(service.MultiService, util.ComparableMixin):
- if implements:
- implements(interfaces.IScheduler)
- else:
- __implements__ = (interfaces.IScheduler,
- service.MultiService.__implements__)
-
- def __init__(self, name, builderNames):
- service.MultiService.__init__(self)
- self.name = name
- self.builderNames = builderNames
-
- def listBuilderNames(self):
- return self.builderNames
-
- def getPendingBuildTimes(self):
- # we can't predict what the developers are going to do in the future
- return []
-
- def addChange(self, change):
- # Try schedulers ignore Changes
- pass
-
-
-class BadJobfile(Exception):
- pass
-
-class JobFileScanner(basic.NetstringReceiver):
- def __init__(self):
- self.strings = []
- self.transport = self # so transport.loseConnection works
- self.error = False
-
- def stringReceived(self, s):
- self.strings.append(s)
-
- def loseConnection(self):
- self.error = True
-
-class Try_Jobdir(TryBase):
- compare_attrs = ["name", "builderNames", "jobdir"]
-
- def __init__(self, name, builderNames, jobdir):
- TryBase.__init__(self, name, builderNames)
- self.jobdir = jobdir
- self.watcher = maildirtwisted.MaildirService()
- self.watcher.setServiceParent(self)
-
- def setServiceParent(self, parent):
- self.watcher.setBasedir(os.path.join(parent.basedir, self.jobdir))
- TryBase.setServiceParent(self, parent)
-
- def parseJob(self, f):
- # jobfiles are serialized build requests. Each is a list of
- # serialized netstrings, in the following order:
- # "1", the version number of this format
- # buildsetID, arbitrary string, used to find the buildSet later
- # branch name, "" for default-branch
- # base revision
- # patchlevel, usually "1"
- # patch
- # builderNames...
- p = JobFileScanner()
- p.dataReceived(f.read())
- if p.error:
- raise BadJobfile("unable to parse netstrings")
- s = p.strings
- ver = s.pop(0)
- if ver != "1":
- raise BadJobfile("unknown version '%s'" % ver)
- buildsetID, branch, baserev, patchlevel, diff = s[:5]
- builderNames = s[5:]
- if branch == "":
- branch = None
- patchlevel = int(patchlevel)
- patch = (patchlevel, diff)
- ss = SourceStamp(branch, baserev, patch)
- return builderNames, ss, buildsetID
-
- def messageReceived(self, filename):
- md = os.path.join(self.parent.basedir, self.jobdir)
- if runtime.platformType == "posix":
- # open the file before moving it, because I'm afraid that once
- # it's in cur/, someone might delete it at any moment
- path = os.path.join(md, "new", filename)
- f = open(path, "r")
- os.rename(os.path.join(md, "new", filename),
- os.path.join(md, "cur", filename))
- else:
- # do this backwards under windows, because you can't move a file
- # that somebody is holding open. This was causing a Permission
- # Denied error on bear's win32-twisted1.3 buildslave.
- os.rename(os.path.join(md, "new", filename),
- os.path.join(md, "cur", filename))
- path = os.path.join(md, "cur", filename)
- f = open(path, "r")
-
- try:
- builderNames, ss, bsid = self.parseJob(f)
- except BadJobfile:
- log.msg("%s reports a bad jobfile in %s" % (self, filename))
- log.err()
- return
- # compare builderNames against self.builderNames
- # TODO: think about this some more.. why bother restricting it?
- # perhaps self.builderNames should be used as the default list
- # instead of being used as a restriction?
- for b in builderNames:
- if not b in self.builderNames:
- log.msg("%s got jobfile %s with builder %s" % (self,
- filename, b))
- log.msg(" but that wasn't in our list: %s"
- % (self.builderNames,))
- return
-
- reason = "'try' job"
- bs = buildset.BuildSet(builderNames, ss, reason=reason, bsid=bsid)
- self.parent.submitBuildSet(bs)
-
-class Try_Userpass(TryBase):
- compare_attrs = ["name", "builderNames", "port", "userpass"]
-
- if implements:
- implements(portal.IRealm)
- else:
- __implements__ = (portal.IRealm,
- TryBase.__implements__)
-
- def __init__(self, name, builderNames, port, userpass):
- TryBase.__init__(self, name, builderNames)
- if type(port) is int:
- port = "tcp:%d" % port
- self.port = port
- self.userpass = userpass
- c = checkers.InMemoryUsernamePasswordDatabaseDontUse()
- for user,passwd in self.userpass:
- c.addUser(user, passwd)
-
- p = portal.Portal(self)
- p.registerChecker(c)
- f = pb.PBServerFactory(p)
- s = strports.service(port, f)
- s.setServiceParent(self)
-
- def getPort(self):
- # utility method for tests: figure out which TCP port we just opened.
- return self.services[0]._port.getHost().port
-
- def requestAvatar(self, avatarID, mind, interface):
- log.msg("%s got connection from user %s" % (self, avatarID))
- assert interface == pb.IPerspective
- p = Try_Userpass_Perspective(self, avatarID)
- return (pb.IPerspective, p, lambda: None)
-
- def submitBuildSet(self, bs):
- return self.parent.submitBuildSet(bs)
-
-class Try_Userpass_Perspective(pbutil.NewCredPerspective):
- def __init__(self, parent, username):
- self.parent = parent
- self.username = username
-
- def perspective_try(self, branch, revision, patch, builderNames):
- log.msg("user %s requesting build on builders %s" % (self.username,
- builderNames))
- for b in builderNames:
- if not b in self.parent.builderNames:
- log.msg("%s got job with builder %s" % (self, b))
- log.msg(" but that wasn't in our list: %s"
- % (self.parent.builderNames,))
- return
- ss = SourceStamp(branch, revision, patch)
- reason = "'try' job from user %s" % self.username
- bs = buildset.BuildSet(builderNames, ss, reason=reason)
- self.parent.submitBuildSet(bs)
-
- # return a remotely-usable BuildSetStatus object
- from buildbot.status.client import makeRemote
- return makeRemote(bs.status)
-
diff --git a/buildbot/buildbot-source/build/lib/buildbot/scripts/__init__.py b/buildbot/buildbot-source/build/lib/buildbot/scripts/__init__.py
deleted file mode 100644
index e69de29bb..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/scripts/__init__.py
+++ /dev/null
diff --git a/buildbot/buildbot-source/build/lib/buildbot/scripts/runner.py b/buildbot/buildbot-source/build/lib/buildbot/scripts/runner.py
deleted file mode 100644
index 7d11a8225..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/scripts/runner.py
+++ /dev/null
@@ -1,749 +0,0 @@
-# -*- test-case-name: buildbot.test.test_runner -*-
-
-# N.B.: don't import anything that might pull in a reactor yet. Some of our
-# subcommands want to load modules that need the gtk reactor.
-import os, os.path, sys, shutil, stat, re, time
-from twisted.python import usage, util, runtime
-
-# this is mostly just a front-end for mktap, twistd, and kill(1), but in the
-# future it will also provide an interface to some developer tools that talk
-# directly to a remote buildmaster (like 'try' and a status client)
-
-# the create/start/stop commands should all be run as the same user,
-# preferably a separate 'buildbot' account.
-
-class MakerBase(usage.Options):
- optFlags = [
- ['help', 'h', "Display this message"],
- ["quiet", "q", "Do not emit the commands being run"],
- ]
-
- #["basedir", "d", None, "Base directory for the buildmaster"],
- opt_h = usage.Options.opt_help
-
- def parseArgs(self, *args):
- if len(args) > 0:
- self['basedir'] = args[0]
- else:
- self['basedir'] = None
- if len(args) > 1:
- raise usage.UsageError("I wasn't expecting so many arguments")
-
- def postOptions(self):
- if self['basedir'] is None:
- raise usage.UsageError("<basedir> parameter is required")
- self['basedir'] = os.path.abspath(self['basedir'])
-
-makefile_sample = """# -*- makefile -*-
-
-# This is a simple makefile which lives in a buildmaster/buildslave
-# directory (next to the buildbot.tac file). It allows you to start/stop the
-# master or slave by doing 'make start' or 'make stop'.
-
-# The 'reconfig' target will tell a buildmaster to reload its config file.
-
-start:
- twistd --no_save -y buildbot.tac
-
-stop:
- kill `cat twistd.pid`
-
-reconfig:
- kill -HUP `cat twistd.pid`
-
-log:
- tail -f twistd.log
-"""
-
-class Maker:
- def __init__(self, config):
- self.config = config
- self.basedir = config['basedir']
- self.force = config['force']
- self.quiet = config['quiet']
-
- def mkdir(self):
- if os.path.exists(self.basedir):
- if not self.quiet:
- print "updating existing installation"
- return
- if not self.quiet: print "mkdir", self.basedir
- os.mkdir(self.basedir)
-
- def mkinfo(self):
- path = os.path.join(self.basedir, "info")
- if not os.path.exists(path):
- if not self.quiet: print "mkdir", path
- os.mkdir(path)
- created = False
- admin = os.path.join(path, "admin")
- if not os.path.exists(admin):
- if not self.quiet:
- print "Creating info/admin, you need to edit it appropriately"
- f = open(admin, "wt")
- f.write("Your Name Here <admin@youraddress.invalid>\n")
- f.close()
- created = True
- host = os.path.join(path, "host")
- if not os.path.exists(host):
- if not self.quiet:
- print "Creating info/host, you need to edit it appropriately"
- f = open(host, "wt")
- f.write("Please put a description of this build host here\n")
- f.close()
- created = True
- if created and not self.quiet:
- print "Please edit the files in %s appropriately." % path
-
- def chdir(self):
- if not self.quiet: print "chdir", self.basedir
- os.chdir(self.basedir)
-
- def makeTAC(self, contents, secret=False):
- tacfile = "buildbot.tac"
- if os.path.exists(tacfile):
- oldcontents = open(tacfile, "rt").read()
- if oldcontents == contents:
- if not self.quiet:
- print "buildbot.tac already exists and is correct"
- return
- if not self.quiet:
- print "not touching existing buildbot.tac"
- print "creating buildbot.tac.new instead"
- tacfile = "buildbot.tac.new"
- f = open(tacfile, "wt")
- f.write(contents)
- f.close()
- if secret:
- os.chmod(tacfile, 0600)
-
- def makefile(self):
- target = "Makefile.sample"
- if os.path.exists(target):
- oldcontents = open(target, "rt").read()
- if oldcontents == makefile_sample:
- if not self.quiet:
- print "Makefile.sample already exists and is correct"
- return
- if not self.quiet:
- print "replacing Makefile.sample"
- else:
- if not self.quiet:
- print "creating Makefile.sample"
- f = open(target, "wt")
- f.write(makefile_sample)
- f.close()
-
- def sampleconfig(self, source):
- target = "master.cfg.sample"
- config_sample = open(source, "rt").read()
- if os.path.exists(target):
- oldcontents = open(target, "rt").read()
- if oldcontents == config_sample:
- if not self.quiet:
- print "master.cfg.sample already exists and is up-to-date"
- return
- if not self.quiet:
- print "replacing master.cfg.sample"
- else:
- if not self.quiet:
- print "creating master.cfg.sample"
- f = open(target, "wt")
- f.write(config_sample)
- f.close()
- os.chmod(target, 0600)
-
-class MasterOptions(MakerBase):
- optFlags = [
- ["force", "f",
- "Re-use an existing directory (will not overwrite master.cfg file)"],
- ]
- optParameters = [
- ["config", "c", "master.cfg", "name of the buildmaster config file"],
- ]
- def getSynopsis(self):
- return "Usage: buildbot master [options] <basedir>"
-
- longdesc = """
- This command creates a buildmaster working directory and buildbot.tac
- file. The master will live in <dir> and create various files there.
-
- At runtime, the master will read a configuration file (named
- 'master.cfg' by default) in its basedir. This file should contain python
- code which eventually defines a dictionary named 'BuildmasterConfig'.
- The elements of this dictionary are used to configure the Buildmaster.
- See doc/config.xhtml for details about what can be controlled through
- this interface."""
-
-masterTAC = """
-from twisted.application import service
-from buildbot.master import BuildMaster
-
-basedir = r'%(basedir)s'
-configfile = r'%(config)s'
-
-application = service.Application('buildmaster')
-BuildMaster(basedir, configfile).setServiceParent(application)
-
-"""
-
-def createMaster(config):
- m = Maker(config)
- m.mkdir()
- m.chdir()
- contents = masterTAC % config
- m.makeTAC(contents)
- m.sampleconfig(util.sibpath(__file__, "sample.cfg"))
- m.makefile()
-
- if not m.quiet: print "buildmaster configured in %s" % m.basedir
-
-class SlaveOptions(MakerBase):
- optFlags = [
- ["force", "f", "Re-use an existing directory"],
- ]
- optParameters = [
-# ["name", "n", None, "Name for this build slave"],
-# ["passwd", "p", None, "Password for this build slave"],
-# ["basedir", "d", ".", "Base directory to use"],
-# ["master", "m", "localhost:8007",
-# "Location of the buildmaster (host:port)"],
-
- ["keepalive", "k", 600,
- "Interval at which keepalives should be sent (in seconds)"],
- ["usepty", None, 1,
- "(1 or 0) child processes should be run in a pty"],
- ["umask", None, "None",
- "controls permissions of generated files. Use --umask=022 to be world-readable"],
- ]
-
- longdesc = """
- This command creates a buildslave working directory and buildbot.tac
- file. The bot will use the <name> and <passwd> arguments to authenticate
- itself when connecting to the master. All commands are run in a
- build-specific subdirectory of <basedir>, which defaults to the working
- directory that mktap was run from. <master> is a string of the form
- 'hostname:port', and specifies where the buildmaster can be reached.
-
- <name>, <passwd>, and <master> will be provided by the buildmaster
- administrator for your bot.
- """
-
- def getSynopsis(self):
- return "Usage: buildbot slave [options] <basedir> <master> <name> <passwd>"
-
- def parseArgs(self, *args):
- if len(args) < 4:
- raise usage.UsageError("command needs more arguments")
- basedir, master, name, passwd = args
- self['basedir'] = basedir
- self['master'] = master
- self['name'] = name
- self['passwd'] = passwd
-
- def postOptions(self):
- MakerBase.postOptions(self)
- self['usepty'] = int(self['usepty'])
- self['keepalive'] = int(self['keepalive'])
- if self['master'].find(":") == -1:
- raise usage.UsageError("--master must be in the form host:portnum")
-
-slaveTAC = """
-from twisted.application import service
-from buildbot.slave.bot import BuildSlave
-
-basedir = r'%(basedir)s'
-host = '%(host)s'
-port = %(port)d
-slavename = '%(name)s'
-passwd = '%(passwd)s'
-keepalive = %(keepalive)d
-usepty = %(usepty)d
-umask = %(umask)s
-
-application = service.Application('buildslave')
-s = BuildSlave(host, port, slavename, passwd, basedir, keepalive, usepty,
- umask=umask)
-s.setServiceParent(application)
-
-"""
-
-def createSlave(config):
- m = Maker(config)
- m.mkdir()
- m.chdir()
- try:
- master = config['master']
- host, port = re.search(r'(.+):(\d+)', master).groups()
- config['host'] = host
- config['port'] = int(port)
- except:
- print "unparseable master location '%s'" % master
- print " expecting something more like localhost:8007"
- raise
- contents = slaveTAC % config
-
- m.makeTAC(contents, secret=True)
-
- m.makefile()
- m.mkinfo()
-
- if not m.quiet: print "buildslave configured in %s" % m.basedir
-
-
-def start(config):
- basedir = config['basedir']
- quiet = config['quiet']
- os.chdir(basedir)
- sys.path.insert(0, os.path.abspath(os.getcwd()))
- if os.path.exists("/usr/bin/make") and os.path.exists("Makefile.buildbot"):
- # Preferring the Makefile lets slave admins do useful things like set
- # up environment variables for the buildslave.
- cmd = "make -f Makefile.buildbot start"
- if not quiet: print cmd
- os.system(cmd)
- else:
- # see if we can launch the application without actually having to
- # spawn twistd, since spawning processes correctly is a real hassle
- # on windows.
- from twisted.python.runtime import platformType
- argv = ["twistd",
- "--no_save",
- "--logfile=twistd.log", # windows doesn't use the same default
- "--python=buildbot.tac"]
- if platformType == "win32":
- argv.append("--reactor=win32")
- sys.argv = argv
-
- # this is copied from bin/twistd. twisted-1.3.0 uses twistw, while
- # twisted-2.0.0 uses _twistw.
- if platformType == "win32":
- try:
- from twisted.scripts._twistw import run
- except ImportError:
- from twisted.scripts.twistw import run
- else:
- from twisted.scripts.twistd import run
- run()
-
-
-def stop(config, signame="TERM", wait=False):
- import signal
- basedir = config['basedir']
- quiet = config['quiet']
- os.chdir(basedir)
- f = open("twistd.pid", "rt")
- pid = int(f.read().strip())
- signum = getattr(signal, "SIG"+signame)
- timer = 0
- os.kill(pid, signum)
- if not wait:
- print "sent SIG%s to process" % signame
- return
- time.sleep(0.1)
- while timer < 5:
- # poll once per second until twistd.pid goes away, up to 5 seconds
- try:
- os.kill(pid, 0)
- except OSError:
- print "buildbot process %d is dead" % pid
- return
- timer += 1
- time.sleep(1)
- print "never saw process go away"
-
-def restart(config):
- stop(config, wait=True)
- print "now restarting buildbot process.."
- start(config)
- # this next line might not be printed, if start() ended up running twistd
- # inline
- print "buildbot process has been restarted"
-
-
-def loadOptions(filename="options", here=None, home=None):
- """Find the .buildbot/FILENAME file. Crawl from the current directory up
- towards the root, and also look in ~/.buildbot . The first directory
- that's owned by the user and has the file we're looking for wins. Windows
- skips the owned-by-user test.
-
- @rtype: dict
- @return: a dictionary of names defined in the options file. If no options
- file was found, return an empty dict.
- """
-
- if here is None:
- here = os.getcwd()
- here = os.path.abspath(here)
-
- if home is None:
- if runtime.platformType == 'win32':
- home = os.path.join(os.environ['APPDATA'], "buildbot")
- else:
- home = os.path.expanduser("~/.buildbot")
-
- searchpath = []
- toomany = 20
- while True:
- searchpath.append(os.path.join(here, ".buildbot"))
- next = os.path.dirname(here)
- if next == here:
- break # we've hit the root
- here = next
- toomany -= 1 # just in case
- if toomany == 0:
- raise ValueError("Hey, I seem to have wandered up into the "
- "infinite glories of the heavens. Oops.")
- searchpath.append(home)
-
- localDict = {}
-
- for d in searchpath:
- if os.path.isdir(d):
- if runtime.platformType != 'win32':
- if os.stat(d)[stat.ST_UID] != os.getuid():
- print "skipping %s because you don't own it" % d
- continue # security, skip other people's directories
- optfile = os.path.join(d, filename)
- if os.path.exists(optfile):
- try:
- f = open(optfile, "r")
- options = f.read()
- exec options in localDict
- except:
- print "error while reading %s" % optfile
- raise
- break
-
- for k in localDict.keys():
- if k.startswith("__"):
- del localDict[k]
- return localDict
-
-class StartOptions(MakerBase):
- def getSynopsis(self):
- return "Usage: buildbot start <basedir>"
-
-class StopOptions(MakerBase):
- def getSynopsis(self):
- return "Usage: buildbot stop <basedir>"
-
-class RestartOptions(MakerBase):
- def getSynopsis(self):
- return "Usage: buildbot restart <basedir>"
-
-class DebugClientOptions(usage.Options):
- optFlags = [
- ['help', 'h', "Display this message"],
- ]
- optParameters = [
- ["master", "m", None,
- "Location of the buildmaster's slaveport (host:port)"],
- ["passwd", "p", None, "Debug password to use"],
- ]
-
- def parseArgs(self, *args):
- if len(args) > 0:
- self['master'] = args[0]
- if len(args) > 1:
- self['passwd'] = args[1]
- if len(args) > 2:
- raise usage.UsageError("I wasn't expecting so many arguments")
-
-def debugclient(config):
- from buildbot.clients import debug
- opts = loadOptions()
-
- master = config.get('master')
- if not master:
- master = opts.get('master')
- if master is None:
- raise usage.UsageError("master must be specified: on the command "
- "line or in ~/.buildbot/options")
-
- passwd = config.get('passwd')
- if not passwd:
- passwd = opts.get('debugPassword')
- if passwd is None:
- raise usage.UsageError("passwd must be specified: on the command "
- "line or in ~/.buildbot/options")
-
- d = debug.DebugWidget(master, passwd)
- d.run()
-
-class StatusClientOptions(usage.Options):
- optFlags = [
- ['help', 'h', "Display this message"],
- ]
- optParameters = [
- ["master", "m", None,
- "Location of the buildmaster's status port (host:port)"],
- ]
-
- def parseArgs(self, *args):
- if len(args) > 0:
- self['master'] = args[0]
- if len(args) > 1:
- raise usage.UsageError("I wasn't expecting so many arguments")
-
-def statuslog(config):
- from buildbot.clients import base
- opts = loadOptions()
- master = config.get('master')
- if not master:
- master = opts.get('masterstatus')
- if master is None:
- raise usage.UsageError("master must be specified: on the command "
- "line or in ~/.buildbot/options")
- c = base.TextClient(master)
- c.run()
-
-def statusgui(config):
- from buildbot.clients import gtkPanes
- opts = loadOptions()
- master = config.get('master')
- if not master:
- master = opts.get('masterstatus')
- if master is None:
- raise usage.UsageError("master must be specified: on the command "
- "line or in ~/.buildbot/options")
- c = gtkPanes.GtkClient(master)
- c.run()
-
-class SendChangeOptions(usage.Options):
- optParameters = [
- ("master", "m", None,
- "Location of the buildmaster's PBListener (host:port)"),
- ("username", "u", None, "Username performing the commit"),
- ("branch", "b", None, "Branch specifier"),
- ("revision", "r", None, "Revision specifier (string)"),
- ("revision_number", "n", None, "Revision specifier (integer)"),
- ("revision_file", None, None, "Filename containing revision spec"),
- ("comments", "m", None, "log message"),
- ("logfile", "F", None,
- "Read the log messages from this file (- for stdin)"),
- ]
- def getSynopsis(self):
- return "Usage: buildbot sendchange [options] filenames.."
- def parseArgs(self, *args):
- self['files'] = args
-
-
-def sendchange(config, runReactor=False):
- """Send a single change to the buildmaster's PBChangeSource. The
- connection will be drpoped as soon as the Change has been sent."""
- from buildbot.clients.sendchange import Sender
-
- opts = loadOptions()
- user = config.get('username', opts.get('username'))
- master = config.get('master', opts.get('master'))
- branch = config.get('branch', opts.get('branch'))
- revision = config.get('revision')
- # SVN and P4 use numeric revisions
- if config.get("revision_number"):
- revision = int(config['revision_number'])
- if config.get("revision_file"):
- revision = open(config["revision_file"],"r").read()
-
- comments = config.get('comments')
- if not comments and config.get('logfile'):
- if config['logfile'] == "-":
- f = sys.stdin
- else:
- f = open(config['logfile'], "rt")
- comments = f.read()
- if comments is None:
- comments = ""
-
- files = config.get('files', [])
-
- assert user, "you must provide a username"
- assert master, "you must provide the master location"
-
- s = Sender(master, user)
- d = s.send(branch, revision, comments, files)
- if runReactor:
- d.addCallbacks(s.printSuccess, s.printFailure)
- d.addCallback(s.stop)
- s.run()
- return d
-
-
-class ForceOptions(usage.Options):
- optParameters = [
- ["builder", None, None, "which Builder to start"],
- ["branch", None, None, "which branch to build"],
- ["revision", None, None, "which revision to build"],
- ["reason", None, None, "the reason for starting the build"],
- ]
-
- def parseArgs(self, *args):
- args = list(args)
- if len(args) > 0:
- if self['builder'] is not None:
- raise usage.UsageError("--builder provided in two ways")
- self['builder'] = args.pop(0)
- if len(args) > 0:
- if self['reason'] is not None:
- raise usage.UsageError("--reason provided in two ways")
- self['reason'] = " ".join(args)
-
-
-class TryOptions(usage.Options):
- optParameters = [
- ["connect", "c", None,
- "how to reach the buildmaster, either 'ssh' or 'pb'"],
- # for ssh, use --tryhost, --username, and --trydir
- ["tryhost", None, None,
- "the hostname (used by ssh) for the buildmaster"],
- ["trydir", None, None,
- "the directory (on the tryhost) where tryjobs are deposited"],
- ["username", "u", None, "Username performing the trial build"],
- # for PB, use --master, --username, and --passwd
- ["master", "m", None,
- "Location of the buildmaster's PBListener (host:port)"],
- ["passwd", None, None, "password for PB authentication"],
-
- ["vc", None, None,
- "The VC system in use, one of: cvs,svn,tla,baz,darcs"],
- ["branch", None, None,
- "The branch in use, for VC systems that can't figure it out"
- " themselves"],
-
- ["builder", "b", None,
- "Run the trial build on this Builder. Can be used multiple times."],
- ]
-
- optFlags = [
- ["wait", None, "wait until the builds have finished"],
- ]
-
- def __init__(self):
- super(TryOptions, self).__init__()
- self['builders'] = []
-
- def opt_builder(self, option):
- self['builders'].append(option)
-
- def getSynopsis(self):
- return "Usage: buildbot try [options]"
-
-def doTry(config):
- from buildbot.scripts import tryclient
- t = tryclient.Try(config)
- t.run()
-
-class TryServerOptions(usage.Options):
- optParameters = [
- ["jobdir", None, None, "the jobdir (maildir) for submitting jobs"],
- ]
-
-def doTryServer(config):
- import md5
- jobdir = os.path.expanduser(config["jobdir"])
- job = sys.stdin.read()
- # now do a 'safecat'-style write to jobdir/tmp, then move atomically to
- # jobdir/new . Rather than come up with a unique name randomly, I'm just
- # going to MD5 the contents and prepend a timestamp.
- timestring = "%d" % time.time()
- jobhash = md5.new(job).hexdigest()
- fn = "%s-%s" % (timestring, jobhash)
- tmpfile = os.path.join(jobdir, "tmp", fn)
- newfile = os.path.join(jobdir, "new", fn)
- f = open(tmpfile, "w")
- f.write(job)
- f.close()
- os.rename(tmpfile, newfile)
-
-
-class Options(usage.Options):
- synopsis = "Usage: buildbot <command> [command options]"
-
- subCommands = [
- # the following are all admin commands
- ['master', None, MasterOptions,
- "Create and populate a directory for a new buildmaster"],
- ['slave', None, SlaveOptions,
- "Create and populate a directory for a new buildslave"],
- ['start', None, StartOptions, "Start a buildmaster or buildslave"],
- ['stop', None, StopOptions, "Stop a buildmaster or buildslave"],
- ['restart', None, RestartOptions,
- "Restart a buildmaster or buildslave"],
-
- ['sighup', None, StopOptions,
- "SIGHUP a buildmaster to make it re-read the config file"],
-
- ['sendchange', None, SendChangeOptions,
- "Send a change to the buildmaster"],
-
- ['debugclient', None, DebugClientOptions,
- "Launch a small debug panel GUI"],
-
- ['statuslog', None, StatusClientOptions,
- "Emit current builder status to stdout"],
- ['statusgui', None, StatusClientOptions,
- "Display a small window showing current builder status"],
-
- #['force', None, ForceOptions, "Run a build"],
- ['try', None, TryOptions, "Run a build with your local changes"],
-
- ['tryserver', None, TryServerOptions,
- "buildmaster-side 'try' support function, not for users"],
-
- # TODO: 'watch'
- ]
-
- def opt_version(self):
- import buildbot
- print "Buildbot version: %s" % buildbot.version
- usage.Options.opt_version(self)
-
- def opt_verbose(self):
- from twisted.python import log
- log.startLogging(sys.stderr)
-
- def postOptions(self):
- if not hasattr(self, 'subOptions'):
- raise usage.UsageError("must specify a command")
-
-
-def run():
- config = Options()
- try:
- config.parseOptions()
- except usage.error, e:
- print "%s: %s" % (sys.argv[0], e)
- print
- c = getattr(config, 'subOptions', config)
- print str(c)
- sys.exit(1)
-
- command = config.subCommand
- so = config.subOptions
-
- if command == "master":
- createMaster(so)
- elif command == "slave":
- createSlave(so)
- elif command == "start":
- start(so)
- elif command == "stop":
- stop(so, wait=True)
- elif command == "restart":
- restart(so)
- elif command == "sighup":
- stop(so, "HUP")
- elif command == "sendchange":
- sendchange(so, True)
- elif command == "debugclient":
- debugclient(so)
- elif command == "statuslog":
- statuslog(so)
- elif command == "statusgui":
- statusgui(so)
- elif command == "try":
- doTry(so)
- elif command == "tryserver":
- doTryServer(so)
-
-
diff --git a/buildbot/buildbot-source/build/lib/buildbot/scripts/tryclient.py b/buildbot/buildbot-source/build/lib/buildbot/scripts/tryclient.py
deleted file mode 100644
index 796634468..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/scripts/tryclient.py
+++ /dev/null
@@ -1,580 +0,0 @@
-# -*- test-case-name: buildbot.test.test_scheduler,buildbot.test.test_vc -*-
-
-import sys, os, re, time, random
-from twisted.internet import utils, protocol, defer, reactor, task
-from twisted.spread import pb
-from twisted.cred import credentials
-from twisted.python import log
-
-from buildbot.sourcestamp import SourceStamp
-from buildbot.scripts import runner
-from buildbot.util import now
-from buildbot.status import builder
-from buildbot.twcompat import which
-
-class SourceStampExtractor:
-
- def __init__(self, treetop, branch):
- self.treetop = treetop
- self.branch = branch
- self.exe = which(self.vcexe)[0]
-
- def dovc(self, cmd):
- """This accepts the arguments of a command, without the actual
- command itself."""
- env = os.environ.copy()
- env['LC_ALL'] = "C"
- return utils.getProcessOutput(self.exe, cmd, env=env,
- path=self.treetop)
-
- def get(self):
- """Return a Deferred that fires with a SourceStamp instance."""
- d = self.getBaseRevision()
- d.addCallback(self.getPatch)
- d.addCallback(self.done)
- return d
- def readPatch(self, res, patchlevel):
- self.patch = (patchlevel, res)
- def done(self, res):
- # TODO: figure out the branch too
- ss = SourceStamp(self.branch, self.baserev, self.patch)
- return ss
-
-class CVSExtractor(SourceStampExtractor):
- patchlevel = 0
- vcexe = "cvs"
- def getBaseRevision(self):
- # this depends upon our local clock and the repository's clock being
- # reasonably synchronized with each other. We express everything in
- # UTC because the '%z' format specifier for strftime doesn't always
- # work.
- self.baserev = time.strftime("%Y-%m-%d %H:%M:%S +0000",
- time.gmtime(now()))
- return defer.succeed(None)
-
- def getPatch(self, res):
- # the -q tells CVS to not announce each directory as it works
- if self.branch is not None:
- # 'cvs diff' won't take both -r and -D at the same time (it
- # ignores the -r). As best I can tell, there is no way to make
- # cvs give you a diff relative to a timestamp on the non-trunk
- # branch. A bare 'cvs diff' will tell you about the changes
- # relative to your checked-out versions, but I know of no way to
- # find out what those checked-out versions are.
- raise RuntimeError("Sorry, CVS 'try' builds don't work with "
- "branches")
- args = ['-q', 'diff', '-u', '-D', self.baserev]
- d = self.dovc(args)
- d.addCallback(self.readPatch, self.patchlevel)
- return d
-
-class SVNExtractor(SourceStampExtractor):
- patchlevel = 0
- vcexe = "svn"
-
- def getBaseRevision(self):
- d = self.dovc(["status", "-u"])
- d.addCallback(self.parseStatus)
- return d
- def parseStatus(self, res):
- # svn shows the base revision for each file that has been modified or
- # which needs an update. You can update each file to a different
- # version, so each file is displayed with its individual base
- # revision. It also shows the repository-wide latest revision number
- # on the last line ("Status against revision: \d+").
-
- # for our purposes, we use the latest revision number as the "base"
- # revision, and get a diff against that. This means we will get
- # reverse-diffs for local files that need updating, but the resulting
- # tree will still be correct. The only weirdness is that the baserev
- # that we emit may be different than the version of the tree that we
- # first checked out.
-
- # to do this differently would probably involve scanning the revision
- # numbers to find the max (or perhaps the min) revision, and then
- # using that as a base.
-
- for line in res.split("\n"):
- m = re.search(r'^Status against revision:\s+(\d+)', line)
- if m:
- self.baserev = int(m.group(1))
- return
- raise IndexError("Could not find 'Status against revision' in "
- "SVN output: %s" % res)
- def getPatch(self, res):
- d = self.dovc(["diff", "-r%d" % self.baserev])
- d.addCallback(self.readPatch, self.patchlevel)
- return d
-
-class BazExtractor(SourceStampExtractor):
- vcexe = "baz"
- def getBaseRevision(self):
- d = self.dovc(["tree-id"])
- d.addCallback(self.parseStatus)
- return d
- def parseStatus(self, res):
- tid = res.strip()
- slash = tid.index("/")
- dd = tid.rindex("--")
- self.branch = tid[slash+1:dd]
- self.baserev = tid[dd+2:]
- def getPatch(self, res):
- d = self.dovc(["diff"])
- d.addCallback(self.readPatch, 1)
- return d
-
-class TlaExtractor(SourceStampExtractor):
- vcexe = "tla"
- def getBaseRevision(self):
- # 'tla logs --full' gives us ARCHIVE/BRANCH--REVISION
- # 'tla logs' gives us REVISION
- d = self.dovc(["logs", "--full", "--reverse"])
- d.addCallback(self.parseStatus)
- return d
- def parseStatus(self, res):
- tid = res.split("\n")[0].strip()
- slash = tid.index("/")
- dd = tid.rindex("--")
- self.branch = tid[slash+1:dd]
- self.baserev = tid[dd+2:]
-
- def getPatch(self, res):
- d = self.dovc(["changes", "--diffs"])
- d.addCallback(self.readPatch, 1)
- return d
-
-class MercurialExtractor(SourceStampExtractor):
- patchlevel = 1
- vcexe = "hg"
- def getBaseRevision(self):
- d = self.dovc(["identify"])
- d.addCallback(self.parseStatus)
- return d
- def parseStatus(self, output):
- m = re.search(r'^(\w+)', output)
- self.baserev = m.group(0)
- def getPatch(self, res):
- d = self.dovc(["diff"])
- d.addCallback(self.readPatch, self.patchlevel)
- return d
-
-class DarcsExtractor(SourceStampExtractor):
- patchlevel = 1
- vcexe = "darcs"
- def getBaseRevision(self):
- d = self.dovc(["changes", "--context"])
- d.addCallback(self.parseStatus)
- return d
- def parseStatus(self, res):
- self.baserev = res # the whole context file
- def getPatch(self, res):
- d = self.dovc(["diff", "-u"])
- d.addCallback(self.readPatch, self.patchlevel)
- return d
-
-def getSourceStamp(vctype, treetop, branch=None):
- if vctype == "cvs":
- e = CVSExtractor(treetop, branch)
- elif vctype == "svn":
- e = SVNExtractor(treetop, branch)
- elif vctype == "baz":
- e = BazExtractor(treetop, branch)
- elif vctype == "tla":
- e = TlaExtractor(treetop, branch)
- elif vctype == "hg":
- e = MercurialExtractor(treetop, branch)
- elif vctype == "darcs":
- e = DarcsExtractor(treetop, branch)
- else:
- raise KeyError("unknown vctype '%s'" % vctype)
- return e.get()
-
-
-def ns(s):
- return "%d:%s," % (len(s), s)
-
-def createJobfile(bsid, branch, baserev, patchlevel, diff, builderNames):
- job = ""
- job += ns("1")
- job += ns(bsid)
- job += ns(branch)
- job += ns(str(baserev))
- job += ns("%d" % patchlevel)
- job += ns(diff)
- for bn in builderNames:
- job += ns(bn)
- return job
-
-def getTopdir(topfile, start=None):
- """walk upwards from the current directory until we find this topfile"""
- if not start:
- start = os.getcwd()
- here = start
- toomany = 20
- while toomany > 0:
- if os.path.exists(os.path.join(here, topfile)):
- return here
- next = os.path.dirname(here)
- if next == here:
- break # we've hit the root
- here = next
- toomany -= 1
- raise ValueError("Unable to find topfile '%s' anywhere from %s upwards"
- % (topfile, start))
-
-class RemoteTryPP(protocol.ProcessProtocol):
- def __init__(self, job):
- self.job = job
- self.d = defer.Deferred()
- def connectionMade(self):
- self.transport.write(self.job)
- self.transport.closeStdin()
- def outReceived(self, data):
- sys.stdout.write(data)
- def errReceived(self, data):
- sys.stderr.write(data)
- def processEnded(self, status_object):
- sig = status_object.value.signal
- rc = status_object.value.exitCode
- if sig != None or rc != 0:
- self.d.errback(RuntimeError("remote 'buildbot tryserver' failed"
- ": sig=%s, rc=%s" % (sig, rc)))
- return
- self.d.callback((sig, rc))
-
-class BuildSetStatusGrabber:
- retryCount = 5 # how many times to we try to grab the BuildSetStatus?
- retryDelay = 3 # seconds to wait between attempts
-
- def __init__(self, status, bsid):
- self.status = status
- self.bsid = bsid
-
- def grab(self):
- # return a Deferred that either fires with the BuildSetStatus
- # reference or errbacks because we were unable to grab it
- self.d = defer.Deferred()
- # wait a second before querying to give the master's maildir watcher
- # a chance to see the job
- reactor.callLater(1, self.go)
- return self.d
-
- def go(self, dummy=None):
- if self.retryCount == 0:
- raise RuntimeError("couldn't find matching buildset")
- self.retryCount -= 1
- d = self.status.callRemote("getBuildSets")
- d.addCallback(self._gotSets)
-
- def _gotSets(self, buildsets):
- for bs,bsid in buildsets:
- if bsid == self.bsid:
- # got it
- self.d.callback(bs)
- return
- d = defer.Deferred()
- d.addCallback(self.go)
- reactor.callLater(self.retryDelay, d.callback, None)
-
-
-class Try(pb.Referenceable):
- buildsetStatus = None
- quiet = False
-
- def __init__(self, config):
- self.config = config
- self.opts = runner.loadOptions()
- self.connect = self.getopt('connect', 'try_connect')
- assert self.connect, "you must specify a connect style: ssh or pb"
- self.builderNames = self.getopt('builders', 'try_builders')
- assert self.builderNames, "no builders! use --builder or " \
- "try_builders=[names..] in .buildbot/options"
-
- def getopt(self, config_name, options_name, default=None):
- value = self.config.get(config_name)
- if value is None or value == []:
- value = self.opts.get(options_name)
- if value is None or value == []:
- value = default
- return value
-
- def createJob(self):
- # returns a Deferred which fires when the job parameters have been
- # created
- config = self.config
- opts = self.opts
- # generate a random (unique) string. It would make sense to add a
- # hostname and process ID here, but a) I suspect that would cause
- # windows portability problems, and b) really this is good enough
- self.bsid = "%d-%s" % (time.time(), random.randint(0, 1000000))
-
- # common options
- vc = self.getopt("vc", "try_vc")
- branch = self.getopt("branch", "try_branch")
-
- if vc in ("cvs", "svn"):
- # we need to find the tree-top
- topdir = self.getopt("try_topdir", "try_topdir")
- if topdir:
- treedir = os.path.expanduser(topdir)
- else:
- topfile = self.getopt("try-topfile", "try_topfile")
- treedir = getTopdir(topfile)
- else:
- treedir = os.getcwd()
- d = getSourceStamp(vc, treedir, branch)
- d.addCallback(self._createJob_1)
- return d
- def _createJob_1(self, ss):
- self.sourcestamp = ss
- if self.connect == "ssh":
- patchlevel, diff = ss.patch
- self.jobfile = createJobfile(self.bsid,
- ss.branch or "", ss.revision,
- patchlevel, diff,
- self.builderNames)
-
- def deliverJob(self):
- # returns a Deferred that fires when the job has been delivered
- config = self.config
- opts = self.opts
-
- if self.connect == "ssh":
- tryhost = self.getopt("tryhost", "try_host")
- tryuser = self.getopt("username", "try_username")
- trydir = self.getopt("trydir", "try_dir")
-
- argv = ["ssh", "-l", tryuser, tryhost,
- "buildbot", "tryserver", "--jobdir", trydir]
- # now run this command and feed the contents of 'job' into stdin
-
- pp = RemoteTryPP(self.jobfile)
- p = reactor.spawnProcess(pp, argv[0], argv, os.environ)
- d = pp.d
- return d
- if self.connect == "pb":
- user = self.getopt("username", "try_username")
- passwd = self.getopt("passwd", "try_password")
- master = self.getopt("master", "try_master")
- tryhost, tryport = master.split(":")
- tryport = int(tryport)
- f = pb.PBClientFactory()
- d = f.login(credentials.UsernamePassword(user, passwd))
- reactor.connectTCP(tryhost, tryport, f)
- d.addCallback(self._deliverJob_pb)
- return d
- raise RuntimeError("unknown connecttype '%s', should be 'ssh' or 'pb'"
- % self.connect)
-
- def _deliverJob_pb(self, remote):
- ss = self.sourcestamp
- d = remote.callRemote("try",
- ss.branch, ss.revision, ss.patch,
- self.builderNames)
- d.addCallback(self._deliverJob_pb2)
- return d
- def _deliverJob_pb2(self, status):
- self.buildsetStatus = status
- return status
-
- def getStatus(self):
- # returns a Deferred that fires when the builds have finished, and
- # may emit status messages while we wait
- wait = bool(self.getopt("wait", "try_wait", False))
- if not wait:
- # TODO: emit the URL where they can follow the builds. This
- # requires contacting the Status server over PB and doing
- # getURLForThing() on the BuildSetStatus. To get URLs for
- # individual builds would require we wait for the builds to
- # start.
- print "not waiting for builds to finish"
- return
- d = self.running = defer.Deferred()
- if self.buildsetStatus:
- self._getStatus_1()
- # contact the status port
- # we're probably using the ssh style
- master = self.getopt("master", "masterstatus")
- host, port = master.split(":")
- port = int(port)
- self.announce("contacting the status port at %s:%d" % (host, port))
- f = pb.PBClientFactory()
- creds = credentials.UsernamePassword("statusClient", "clientpw")
- d = f.login(creds)
- reactor.connectTCP(host, port, f)
- d.addCallback(self._getStatus_ssh_1)
- return self.running
-
- def _getStatus_ssh_1(self, remote):
- # find a remotereference to the corresponding BuildSetStatus object
- self.announce("waiting for job to be accepted")
- g = BuildSetStatusGrabber(remote, self.bsid)
- d = g.grab()
- d.addCallback(self._getStatus_1)
- return d
-
- def _getStatus_1(self, res=None):
- if res:
- self.buildsetStatus = res
- # gather the set of BuildRequests
- d = self.buildsetStatus.callRemote("getBuildRequests")
- d.addCallback(self._getStatus_2)
-
- def _getStatus_2(self, brs):
- self.builderNames = []
- self.buildRequests = {}
-
- # self.builds holds the current BuildStatus object for each one
- self.builds = {}
-
- # self.outstanding holds the list of builderNames which haven't
- # finished yet
- self.outstanding = []
-
- # self.results holds the list of build results. It holds a tuple of
- # (result, text)
- self.results = {}
-
- # self.currentStep holds the name of the Step that each build is
- # currently running
- self.currentStep = {}
-
- # self.ETA holds the expected finishing time (absolute time since
- # epoch)
- self.ETA = {}
-
- for n,br in brs:
- self.builderNames.append(n)
- self.buildRequests[n] = br
- self.builds[n] = None
- self.outstanding.append(n)
- self.results[n] = [None,None]
- self.currentStep[n] = None
- self.ETA[n] = None
- # get new Builds for this buildrequest. We follow each one until
- # it finishes or is interrupted.
- br.callRemote("subscribe", self)
-
- # now that those queries are in transit, we can start the
- # display-status-every-30-seconds loop
- self.printloop = task.LoopingCall(self.printStatus)
- self.printloop.start(3, now=False)
-
-
- # these methods are invoked by the status objects we've subscribed to
-
- def remote_newbuild(self, bs, builderName):
- if self.builds[builderName]:
- self.builds[builderName].callRemote("unsubscribe", self)
- self.builds[builderName] = bs
- bs.callRemote("subscribe", self, 20)
- d = bs.callRemote("waitUntilFinished")
- d.addCallback(self._build_finished, builderName)
-
- def remote_stepStarted(self, buildername, build, stepname, step):
- self.currentStep[buildername] = stepname
-
- def remote_stepFinished(self, buildername, build, stepname, step, results):
- pass
-
- def remote_buildETAUpdate(self, buildername, build, eta):
- self.ETA[buildername] = now() + eta
-
- def _build_finished(self, bs, builderName):
- # we need to collect status from the newly-finished build. We don't
- # remove the build from self.outstanding until we've collected
- # everything we want.
- self.builds[builderName] = None
- self.ETA[builderName] = None
- self.currentStep[builderName] = "finished"
- d = bs.callRemote("getResults")
- d.addCallback(self._build_finished_2, bs, builderName)
- return d
- def _build_finished_2(self, results, bs, builderName):
- self.results[builderName][0] = results
- d = bs.callRemote("getText")
- d.addCallback(self._build_finished_3, builderName)
- return d
- def _build_finished_3(self, text, builderName):
- self.results[builderName][1] = text
-
- self.outstanding.remove(builderName)
- if not self.outstanding:
- # all done
- return self.statusDone()
-
- def printStatus(self):
- names = self.buildRequests.keys()
- names.sort()
- for n in names:
- if n not in self.outstanding:
- # the build is finished, and we have results
- code,text = self.results[n]
- t = builder.Results[code]
- if text:
- t += " (%s)" % " ".join(text)
- elif self.builds[n]:
- t = self.currentStep[n] or "building"
- if self.ETA[n]:
- t += " [ETA %ds]" % (self.ETA[n] - now())
- else:
- t = "no build"
- self.announce("%s: %s" % (n, t))
- self.announce("")
-
- def statusDone(self):
- self.printloop.stop()
- print "All Builds Complete"
- # TODO: include a URL for all failing builds
- names = self.buildRequests.keys()
- names.sort()
- happy = True
- for n in names:
- code,text = self.results[n]
- t = "%s: %s" % (n, builder.Results[code])
- if text:
- t += " (%s)" % " ".join(text)
- print t
- if self.results[n] != builder.SUCCESS:
- happy = False
-
- if happy:
- self.exitcode = 0
- else:
- self.exitcode = 1
- self.running.callback(self.exitcode)
-
- def announce(self, message):
- if not self.quiet:
- print message
-
- def run(self):
- # we can't do spawnProcess until we're inside reactor.run(), so get
- # funky
- print "using '%s' connect method" % self.connect
- self.exitcode = 0
- d = defer.Deferred()
- d.addCallback(lambda res: self.createJob())
- d.addCallback(lambda res: self.announce("job created"))
- d.addCallback(lambda res: self.deliverJob())
- d.addCallback(lambda res: self.announce("job has been delivered"))
- d.addCallback(lambda res: self.getStatus())
- d.addErrback(log.err)
- d.addCallback(self.cleanup)
- d.addCallback(lambda res: reactor.stop())
-
- reactor.callLater(0, d.callback, None)
- reactor.run()
- sys.exit(self.exitcode)
-
- def logErr(self, why):
- log.err(why)
- print "error during 'try' processing"
- print why
-
- def cleanup(self, res=None):
- if self.buildsetStatus:
- self.buildsetStatus.broker.transport.loseConnection()
-
-
-
diff --git a/buildbot/buildbot-source/build/lib/buildbot/slave/__init__.py b/buildbot/buildbot-source/build/lib/buildbot/slave/__init__.py
deleted file mode 100644
index e69de29bb..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/slave/__init__.py
+++ /dev/null
diff --git a/buildbot/buildbot-source/build/lib/buildbot/slave/bot.py b/buildbot/buildbot-source/build/lib/buildbot/slave/bot.py
deleted file mode 100644
index 40b9b4798..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/slave/bot.py
+++ /dev/null
@@ -1,495 +0,0 @@
-#! /usr/bin/python
-
-import time, os, os.path, re, sys
-
-from twisted.spread import pb
-from twisted.python import log, usage, failure
-from twisted.internet import reactor, defer
-from twisted.application import service, internet
-from twisted.cred import credentials
-
-from buildbot.util import now
-from buildbot.pbutil import ReconnectingPBClientFactory
-from buildbot.slave import registry
-# make sure the standard commands get registered
-from buildbot.slave import commands
-
-class NoCommandRunning(pb.Error):
- pass
-class WrongCommandRunning(pb.Error):
- pass
-class UnknownCommand(pb.Error):
- pass
-
-class Master:
- def __init__(self, host, port, username, password):
- self.host = host
- self.port = port
- self.username = username
- self.password = password
-
-class SlaveBuild:
-
- """This is an object that can hold state from one step to another in the
- same build. All SlaveCommands have access to it.
- """
- def __init__(self, builder):
- self.builder = builder
-
-class SlaveBuilder(pb.Referenceable, service.Service):
-
- """This is the local representation of a single Builder: it handles a
- single kind of build (like an all-warnings build). It has a name and a
- home directory. The rest of its behavior is determined by the master.
- """
-
- stopCommandOnShutdown = True
-
- # remote is a ref to the Builder object on the master side, and is set
- # when they attach. We use it to detect when the connection to the master
- # is severed.
- remote = None
-
- # .build points to a SlaveBuild object, a new one for each build
- build = None
-
- # .command points to a SlaveCommand instance, and is set while the step
- # is running. We use it to implement the stopBuild method.
- command = None
-
- # .remoteStep is a ref to the master-side BuildStep object, and is set
- # when the step is started
- remoteStep = None
-
- def __init__(self, name, not_really):
- #service.Service.__init__(self) # Service has no __init__ method
- self.setName(name)
- self.not_really = not_really
-
- def __repr__(self):
- return "<SlaveBuilder '%s'>" % self.name
-
- def setServiceParent(self, parent):
- service.Service.setServiceParent(self, parent)
- self.bot = self.parent
- # note that self.parent will go away when the buildmaster's config
- # file changes and this Builder is removed (possibly because it has
- # been changed, so the Builder will be re-added again in a moment).
- # This may occur during a build, while a step is running.
-
- def setBuilddir(self, builddir):
- assert self.parent
- self.builddir = builddir
- self.basedir = os.path.join(self.bot.basedir, self.builddir)
- if not os.path.isdir(self.basedir):
- os.mkdir(self.basedir)
-
- def stopService(self):
- service.Service.stopService(self)
- if self.stopCommandOnShutdown:
- self.stopCommand()
-
- def activity(self):
- bot = self.parent
- if bot:
- buildslave = bot.parent
- if buildslave:
- bf = buildslave.bf
- bf.activity()
-
- def remote_setMaster(self, remote):
- self.remote = remote
- self.remote.notifyOnDisconnect(self.lostRemote)
- def remote_print(self, message):
- log.msg("SlaveBuilder.remote_print(%s): message from master: %s" %
- (self.name, message))
- if message == "ping":
- return self.remote_ping()
-
- def remote_ping(self):
- log.msg("SlaveBuilder.remote_ping(%s)" % self)
- if self.bot and self.bot.parent:
- debugOpts = self.bot.parent.debugOpts
- if debugOpts.get("stallPings"):
- log.msg(" debug_stallPings")
- timeout, timers = debugOpts["stallPings"]
- d = defer.Deferred()
- t = reactor.callLater(timeout, d.callback, None)
- timers.append(t)
- return d
- if debugOpts.get("failPingOnce"):
- log.msg(" debug_failPingOnce")
- class FailPingError(pb.Error): pass
- del debugOpts['failPingOnce']
- raise FailPingError("debug_failPingOnce means we should fail")
-
- def lostRemote(self, remote):
- log.msg("lost remote")
- self.remote = None
-
- def lostRemoteStep(self, remotestep):
- log.msg("lost remote step")
- self.remoteStep = None
- if self.stopCommandOnShutdown:
- self.stopCommand()
-
- # the following are Commands that can be invoked by the master-side
- # Builder
- def remote_startBuild(self):
- """This is invoked before the first step of any new build is run. It
- creates a new SlaveBuild object, which holds slave-side state from
- one step to the next."""
- self.build = SlaveBuild(self)
- log.msg("%s.startBuild" % self)
-
- def remote_startCommand(self, stepref, stepId, command, args):
- """
- This gets invoked by L{buildbot.process.step.RemoteCommand.start}, as
- part of various master-side BuildSteps, to start various commands
- that actually do the build. I return nothing. Eventually I will call
- .commandComplete() to notify the master-side RemoteCommand that I'm
- done.
- """
-
- self.activity()
-
- if self.command:
- log.msg("leftover command, dropping it")
- self.stopCommand()
-
- try:
- factory, version = registry.commandRegistry[command]
- except KeyError:
- raise UnknownCommand, "unrecognized SlaveCommand '%s'" % command
- self.command = factory(self, stepId, args)
-
- log.msg(" startCommand:%s [id %s]" % (command,stepId))
- self.remoteStep = stepref
- self.remoteStep.notifyOnDisconnect(self.lostRemoteStep)
- self.command.running = True
- d = defer.maybeDeferred(self.command.start)
- d.addCallback(lambda res: None)
- d.addBoth(self.commandComplete)
- return None
-
- def remote_interruptCommand(self, stepId, why):
- """Halt the current step."""
- log.msg("asked to interrupt current command: %s" % why)
- self.activity()
- if not self.command:
- # TODO: just log it, a race could result in their interrupting a
- # command that wasn't actually running
- log.msg(" .. but none was running")
- return
- self.command.interrupt()
-
-
- def stopCommand(self):
- """Make any currently-running command die, with no further status
- output. This is used when the buildslave is shutting down or the
- connection to the master has been lost. Interrupt the command,
- silence it, and then forget about it."""
- if not self.command:
- return
- log.msg("stopCommand: halting current command %s" % self.command)
- self.command.running = False # shut up!
- self.command.interrupt() # die!
- self.command = None # forget you!
-
- # sendUpdate is invoked by the Commands we spawn
- def sendUpdate(self, data):
- """This sends the status update to the master-side
- L{buildbot.process.step.RemoteCommand} object, giving it a sequence
- number in the process. It adds the update to a queue, and asks the
- master to acknowledge the update so it can be removed from that
- queue."""
-
- if not self.running:
- # .running comes from service.Service, and says whether the
- # service is running or not. If we aren't running, don't send any
- # status messages.
- return
- # the update[1]=0 comes from the leftover 'updateNum', which the
- # master still expects to receive. Provide it to avoid significant
- # interoperability issues between new slaves and old masters.
- if self.remoteStep:
- update = [data, 0]
- updates = [update]
- d = self.remoteStep.callRemote("update", updates)
- d.addCallback(self.ackUpdate)
- d.addErrback(self._ackFailed, "SlaveBuilder.sendUpdate")
-
- def ackUpdate(self, acknum):
- self.activity() # update the "last activity" timer
-
- def ackComplete(self, dummy):
- self.activity() # update the "last activity" timer
-
- def _ackFailed(self, why, where):
- log.msg("SlaveBuilder._ackFailed:", where)
- #log.err(why) # we don't really care
-
-
- # this is fired by the Deferred attached to each Command
- def commandComplete(self, failure):
- if failure:
- log.msg("SlaveBuilder.commandFailed", self.command)
- log.err(failure)
- # failure, if present, is a failure.Failure. To send it across
- # the wire, we must turn it into a pb.CopyableFailure.
- failure = pb.CopyableFailure(failure)
- failure.unsafeTracebacks = True
- else:
- # failure is None
- log.msg("SlaveBuilder.commandComplete", self.command)
- self.command = None
- if not self.running:
- return
- if self.remoteStep:
- self.remoteStep.dontNotifyOnDisconnect(self.lostRemoteStep)
- d = self.remoteStep.callRemote("complete", failure)
- d.addCallback(self.ackComplete)
- d.addErrback(self._ackFailed, "sendComplete")
- self.remoteStep = None
-
-
- def remote_shutdown(self):
- print "slave shutting down on command from master"
- reactor.stop()
-
-
-class Bot(pb.Referenceable, service.MultiService):
- """I represent the slave-side bot."""
- usePTY = None
- name = "bot"
-
- def __init__(self, basedir, usePTY, not_really=0):
- service.MultiService.__init__(self)
- self.basedir = basedir
- self.usePTY = usePTY
- self.not_really = not_really
- self.builders = {}
-
- def startService(self):
- assert os.path.isdir(self.basedir)
- service.MultiService.startService(self)
-
- def remote_getDirs(self):
- return filter(lambda d: os.path.isdir(d), os.listdir(self.basedir))
-
- def remote_getCommands(self):
- commands = {}
- for name, (factory, version) in registry.commandRegistry.items():
- commands[name] = version
- return commands
-
- def remote_setBuilderList(self, wanted):
- retval = {}
- for (name, builddir) in wanted:
- b = self.builders.get(name, None)
- if b:
- if b.builddir != builddir:
- log.msg("changing builddir for builder %s from %s to %s" \
- % (name, b.builddir, builddir))
- b.setBuilddir(builddir)
- else:
- b = SlaveBuilder(name, self.not_really)
- b.usePTY = self.usePTY
- b.setServiceParent(self)
- b.setBuilddir(builddir)
- self.builders[name] = b
- retval[name] = b
- for name in self.builders.keys():
- if not name in map(lambda a: a[0], wanted):
- log.msg("removing old builder %s" % name)
- self.builders[name].disownServiceParent()
- del(self.builders[name])
- return retval
-
- def remote_print(self, message):
- log.msg("message from master:", message)
-
- def remote_getSlaveInfo(self):
- """This command retrieves data from the files in SLAVEDIR/info/* and
- sends the contents to the buildmaster. These are used to describe
- the slave and its configuration, and should be created and
- maintained by the slave administrator. They will be retrieved each
- time the master-slave connection is established.
- """
-
- files = {}
- basedir = os.path.join(self.basedir, "info")
- if not os.path.isdir(basedir):
- return files
- for f in os.listdir(basedir):
- filename = os.path.join(basedir, f)
- if os.path.isfile(filename):
- files[f] = open(filename, "r").read()
- return files
-
- def debug_forceBuild(self, name):
- d = self.perspective.callRemote("forceBuild", name)
- d.addCallbacks(log.msg, log.err)
-
-class BotFactory(ReconnectingPBClientFactory):
- # 'keepaliveInterval' serves two purposes. The first is to keep the
- # connection alive: it guarantees that there will be at least some
- # traffic once every 'keepaliveInterval' seconds, which may help keep an
- # interposed NAT gateway from dropping the address mapping because it
- # thinks the connection has been abandoned. The second is to put an upper
- # limit on how long the buildmaster might have gone away before we notice
- # it. For this second purpose, we insist upon seeing *some* evidence of
- # the buildmaster at least once every 'keepaliveInterval' seconds.
- keepaliveInterval = None # None = do not use keepalives
-
- # 'keepaliveTimeout' seconds before the interval expires, we will send a
- # keepalive request, both to add some traffic to the connection, and to
- # prompt a response from the master in case all our builders are idle. We
- # don't insist upon receiving a timely response from this message: a slow
- # link might put the request at the wrong end of a large build message.
- keepaliveTimeout = 30 # how long we will go without a response
-
- keepaliveTimer = None
- activityTimer = None
- lastActivity = 0
- unsafeTracebacks = 1
- perspective = None
-
- def __init__(self, keepaliveInterval, keepaliveTimeout):
- ReconnectingPBClientFactory.__init__(self)
- self.keepaliveInterval = keepaliveInterval
- self.keepaliveTimeout = keepaliveTimeout
-
- def startedConnecting(self, connector):
- ReconnectingPBClientFactory.startedConnecting(self, connector)
- self.connector = connector
-
- def gotPerspective(self, perspective):
- ReconnectingPBClientFactory.gotPerspective(self, perspective)
- self.perspective = perspective
- try:
- perspective.broker.transport.setTcpKeepAlive(1)
- except:
- log.msg("unable to set SO_KEEPALIVE")
- if not self.keepaliveInterval:
- self.keepaliveInterval = 10*60
- self.activity()
- if self.keepaliveInterval:
- log.msg("sending application-level keepalives every %d seconds" \
- % self.keepaliveInterval)
- self.startTimers()
-
- def clientConnectionFailed(self, connector, reason):
- self.connector = None
- ReconnectingPBClientFactory.clientConnectionFailed(self,
- connector, reason)
-
- def clientConnectionLost(self, connector, reason):
- self.connector = None
- self.stopTimers()
- self.perspective = None
- ReconnectingPBClientFactory.clientConnectionLost(self,
- connector, reason)
-
- def startTimers(self):
- assert self.keepaliveInterval
- assert not self.keepaliveTimer
- assert not self.activityTimer
- # Insist that doKeepalive fires before checkActivity. Really, it
- # needs to happen at least one RTT beforehand.
- assert self.keepaliveInterval > self.keepaliveTimeout
-
- # arrange to send a keepalive a little while before our deadline
- when = self.keepaliveInterval - self.keepaliveTimeout
- self.keepaliveTimer = reactor.callLater(when, self.doKeepalive)
- # and check for activity too
- self.activityTimer = reactor.callLater(self.keepaliveInterval,
- self.checkActivity)
-
- def stopTimers(self):
- if self.keepaliveTimer:
- self.keepaliveTimer.cancel()
- self.keepaliveTimer = None
- if self.activityTimer:
- self.activityTimer.cancel()
- self.activityTimer = None
-
- def activity(self, res=None):
- self.lastActivity = now()
-
- def doKeepalive(self):
- # send the keepalive request. If it fails outright, the connection
- # was already dropped, so just log and ignore.
- self.keepaliveTimer = None
- log.msg("sending app-level keepalive")
- d = self.perspective.callRemote("keepalive")
- d.addCallback(self.activity)
- d.addErrback(self.keepaliveLost)
-
- def keepaliveLost(self, f):
- log.msg("BotFactory.keepaliveLost")
-
- def checkActivity(self):
- self.activityTimer = None
- if self.lastActivity + self.keepaliveInterval < now():
- log.msg("BotFactory.checkActivity: nothing from master for "
- "%d secs" % (now() - self.lastActivity))
- self.perspective.broker.transport.loseConnection()
- return
- self.startTimers()
-
- def stopFactory(self):
- ReconnectingPBClientFactory.stopFactory(self)
- self.stopTimers()
-
-
-class BuildSlave(service.MultiService):
- botClass = Bot
-
- # debugOpts is a dictionary used during unit tests.
-
- # debugOpts['stallPings'] can be set to a tuple of (timeout, []). Any
- # calls to remote_print will stall for 'timeout' seconds before
- # returning. The DelayedCalls used to implement this are stashed in the
- # list so they can be cancelled later.
-
- # debugOpts['failPingOnce'] can be set to True to make the slaveping fail
- # exactly once.
-
- def __init__(self, host, port, name, passwd, basedir, keepalive,
- usePTY, keepaliveTimeout=30, umask=None, debugOpts={}):
- service.MultiService.__init__(self)
- self.debugOpts = debugOpts.copy()
- bot = self.botClass(basedir, usePTY)
- bot.setServiceParent(self)
- self.bot = bot
- if keepalive == 0:
- keepalive = None
- self.umask = umask
- bf = self.bf = BotFactory(keepalive, keepaliveTimeout)
- bf.startLogin(credentials.UsernamePassword(name, passwd), client=bot)
- self.connection = c = internet.TCPClient(host, port, bf)
- c.setServiceParent(self)
-
- def waitUntilDisconnected(self):
- # utility method for testing. Returns a Deferred that will fire when
- # we lose the connection to the master.
- if not self.bf.perspective:
- return defer.succeed(None)
- d = defer.Deferred()
- self.bf.perspective.notifyOnDisconnect(lambda res: d.callback(None))
- return d
-
- def startService(self):
- if self.umask is not None:
- os.umask(self.umask)
- service.MultiService.startService(self)
-
- def stopService(self):
- self.bf.continueTrying = 0
- self.bf.stopTrying()
- service.MultiService.stopService(self)
- # now kill the TCP connection
- # twisted >2.0.1 does this for us, and leaves _connection=None
- if self.connection._connection:
- self.connection._connection.disconnect()
diff --git a/buildbot/buildbot-source/build/lib/buildbot/slave/commands.py b/buildbot/buildbot-source/build/lib/buildbot/slave/commands.py
deleted file mode 100644
index 24527d6e0..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/slave/commands.py
+++ /dev/null
@@ -1,1822 +0,0 @@
-# -*- test-case-name: buildbot.test.test_slavecommand -*-
-
-import os, os.path, re, signal, shutil, types, time
-
-from twisted.internet.protocol import ProcessProtocol
-from twisted.internet import reactor, defer
-from twisted.python import log, failure, runtime
-
-from buildbot.twcompat import implements, which
-from buildbot.slave.interfaces import ISlaveCommand
-from buildbot.slave.registry import registerSlaveCommand
-
-cvs_ver = '$Revision$'[1+len("Revision: "):-2]
-
-# version history:
-# >=1.17: commands are interruptable
-# >=1.28: Arch understands 'revision', added Bazaar
-# >=1.33: Source classes understand 'retry'
-# >=1.39: Source classes correctly handle changes in branch (except Git)
-# Darcs accepts 'revision' (now all do but Git) (well, and P4Sync)
-# Arch/Baz should accept 'build-config'
-
-class CommandInterrupted(Exception):
- pass
-class TimeoutError(Exception):
- pass
-
-class AbandonChain(Exception):
- """A series of chained steps can raise this exception to indicate that
- one of the intermediate ShellCommands has failed, such that there is no
- point in running the remainder. 'rc' should be the non-zero exit code of
- the failing ShellCommand."""
-
- def __repr__(self):
- return "<AbandonChain rc=%s>" % self.args[0]
-
-def getCommand(name):
- possibles = which(name)
- if not possibles:
- raise RuntimeError("Couldn't find executable for '%s'" % name)
- return possibles[0]
-
-def rmdirRecursive(dir):
- """This is a replacement for shutil.rmtree that works better under
- windows. Thanks to Bear at the OSAF for the code."""
- if not os.path.exists(dir):
- return
-
- if os.path.islink(dir):
- os.remove(dir)
- return
-
- for name in os.listdir(dir):
- full_name = os.path.join(dir, name)
- # on Windows, if we don't have write permission we can't remove
- # the file/directory either, so turn that on
- if os.name == 'nt':
- if not os.access(full_name, os.W_OK):
- os.chmod(full_name, 0600)
- if os.path.isdir(full_name):
- rmdirRecursive(full_name)
- else:
- # print "removing file", full_name
- os.remove(full_name)
- os.rmdir(dir)
-
-class ShellCommandPP(ProcessProtocol):
- debug = False
-
- def __init__(self, command):
- self.command = command
-
- def connectionMade(self):
- if self.debug:
- log.msg("ShellCommandPP.connectionMade")
- if not self.command.process:
- if self.debug:
- log.msg(" assigning self.command.process: %s" %
- (self.transport,))
- self.command.process = self.transport
-
- if self.command.stdin:
- if self.debug: log.msg(" writing to stdin")
- self.transport.write(self.command.stdin)
-
- # TODO: maybe we shouldn't close stdin when using a PTY. I can't test
- # this yet, recent debian glibc has a bug which causes thread-using
- # test cases to SIGHUP trial, and the workaround is to either run
- # the whole test with /bin/sh -c " ".join(argv) (way gross) or to
- # not use a PTY. Once the bug is fixed, I'll be able to test what
- # happens when you close stdin on a pty. My concern is that it will
- # SIGHUP the child (since we are, in a sense, hanging up on them).
- # But it may well be that keeping stdout open prevents the SIGHUP
- # from being sent.
- #if not self.command.usePTY:
-
- if self.debug: log.msg(" closing stdin")
- self.transport.closeStdin()
-
- def outReceived(self, data):
- if self.debug:
- log.msg("ShellCommandPP.outReceived")
- self.command.addStdout(data)
-
- def errReceived(self, data):
- if self.debug:
- log.msg("ShellCommandPP.errReceived")
- self.command.addStderr(data)
-
- def processEnded(self, status_object):
- if self.debug:
- log.msg("ShellCommandPP.processEnded", status_object)
- # status_object is a Failure wrapped around an
- # error.ProcessTerminated or and error.ProcessDone.
- # requires twisted >= 1.0.4 to overcome a bug in process.py
- sig = status_object.value.signal
- rc = status_object.value.exitCode
- self.command.finished(sig, rc)
-
-
-class ShellCommand:
- # This is a helper class, used by SlaveCommands to run programs in a
- # child shell.
-
- notreally = False
- BACKUP_TIMEOUT = 5
- KILL = "KILL"
-
- def __init__(self, builder, command,
- workdir, environ=None,
- sendStdout=True, sendStderr=True, sendRC=True,
- timeout=None, stdin=None, keepStdout=False):
- """
-
- @param keepStdout: if True, we keep a copy of all the stdout text
- that we've seen. This copy is available in
- self.stdout, which can be read after the command
- has finished.
- """
-
- self.builder = builder
- self.command = command
- self.sendStdout = sendStdout
- self.sendStderr = sendStderr
- self.sendRC = sendRC
- self.workdir = workdir
- self.environ = os.environ.copy()
- if environ:
- if (self.environ.has_key('PYTHONPATH')
- and environ.has_key('PYTHONPATH')):
- # special case, prepend the builder's items to the existing
- # ones. This will break if you send over empty strings, so
- # don't do that.
- environ['PYTHONPATH'] = (environ['PYTHONPATH']
- + os.pathsep
- + self.environ['PYTHONPATH'])
- # this will proceed to replace the old one
- self.environ.update(environ)
- self.stdin = stdin
- self.timeout = timeout
- self.timer = None
- self.keepStdout = keepStdout
-
- # usePTY=True is a convenience for cleaning up all children and
- # grandchildren of a hung command. Fall back to usePTY=False on
- # systems where ptys cause problems.
-
- self.usePTY = self.builder.usePTY
- if runtime.platformType != "posix":
- self.usePTY = False # PTYs are posix-only
- if stdin is not None:
- # for .closeStdin to matter, we must use a pipe, not a PTY
- self.usePTY = False
-
- def __repr__(self):
- return "<slavecommand.ShellCommand '%s'>" % self.command
-
- def sendStatus(self, status):
- self.builder.sendUpdate(status)
-
- def start(self):
- # return a Deferred which fires (with the exit code) when the command
- # completes
- if self.keepStdout:
- self.stdout = ""
- self.deferred = defer.Deferred()
- try:
- self._startCommand()
- except:
- log.msg("error in ShellCommand._startCommand")
- log.err()
- # pretend it was a shell error
- self.deferred.errback(AbandonChain(-1))
- return self.deferred
-
- def _startCommand(self):
- log.msg("ShellCommand._startCommand")
- if self.notreally:
- self.sendStatus({'header': "command '%s' in dir %s" % \
- (self.command, self.workdir)})
- self.sendStatus({'header': "(not really)\n"})
- self.finished(None, 0)
- return
-
- self.pp = ShellCommandPP(self)
-
- if type(self.command) in types.StringTypes:
- if runtime.platformType == 'win32':
- argv = ['/bin/bash', '-c', self.command]
- else:
- # for posix, use /bin/sh. for other non-posix, well, doesn't
- # hurt to try
- argv = ['/bin/bash', '-c', self.command]
- else:
- if runtime.platformType == 'win32':
- argv = [os.environ['COMSPEC'], '/c'] + list(self.command)
- else:
- argv = self.command
-
- # self.stdin is handled in ShellCommandPP.connectionMade
-
- # first header line is the command in plain text, argv joined with
- # spaces. You should be able to cut-and-paste this into a shell to
- # obtain the same results. If there are spaces in the arguments, too
- # bad.
- msg = " ".join(argv)
- log.msg(" " + msg)
- self.sendStatus({'header': msg+"\n"})
-
- # then comes the secondary information
- msg = " in dir %s" % (self.workdir,)
- if self.timeout:
- msg += " (timeout %d secs)" % (self.timeout,)
- log.msg(" " + msg)
- self.sendStatus({'header': msg+"\n"})
-
- # then the argv array for resolving unambiguity
- msg = " argv: %s" % (argv,)
- log.msg(" " + msg)
- self.sendStatus({'header': msg+"\n"})
-
- # then the environment, since it sometimes causes problems
- msg = " environment: %s" % (self.environ,)
- log.msg(" " + msg)
- self.sendStatus({'header': msg+"\n"})
-
- # win32eventreactor's spawnProcess (under twisted <= 2.0.1) returns
- # None, as opposed to all the posixbase-derived reactors (which
- # return the new Process object). This is a nuisance. We can make up
- # for it by having the ProcessProtocol give us their .transport
- # attribute after they get one. I'd prefer to get it from
- # spawnProcess because I'm concerned about returning from this method
- # without having a valid self.process to work with. (if kill() were
- # called right after we return, but somehow before connectionMade
- # were called, then kill() would blow up).
- self.process = None
- p = reactor.spawnProcess(self.pp, argv[0], argv,
- self.environ,
- self.workdir,
- usePTY=self.usePTY)
- # connectionMade might have been called during spawnProcess
- if not self.process:
- self.process = p
-
- # connectionMade also closes stdin as long as we're not using a PTY.
- # This is intended to kill off inappropriately interactive commands
- # better than the (long) hung-command timeout. ProcessPTY should be
- # enhanced to allow the same childFDs argument that Process takes,
- # which would let us connect stdin to /dev/null .
-
- if self.timeout:
- self.timer = reactor.callLater(self.timeout, self.doTimeout)
-
- def addStdout(self, data):
- if self.sendStdout: self.sendStatus({'stdout': data})
- if self.keepStdout: self.stdout += data
- if self.timer: self.timer.reset(self.timeout)
-
- def addStderr(self, data):
- if self.sendStderr: self.sendStatus({'stderr': data})
- if self.timer: self.timer.reset(self.timeout)
-
- def finished(self, sig, rc):
- log.msg("command finished with signal %s, exit code %s" % (sig,rc))
- if sig is not None:
- rc = -1
- if self.sendRC:
- if sig is not None:
- self.sendStatus(
- {'header': "process killed by signal %d\n" % sig})
- self.sendStatus({'rc': rc})
- if self.timer:
- self.timer.cancel()
- self.timer = None
- d = self.deferred
- self.deferred = None
- if d:
- d.callback(rc)
- else:
- log.msg("Hey, command %s finished twice" % self)
-
- def failed(self, why):
- log.msg("ShellCommand.failed: command failed: %s" % (why,))
- if self.timer:
- self.timer.cancel()
- self.timer = None
- d = self.deferred
- self.deferred = None
- if d:
- d.errback(why)
- else:
- log.msg("Hey, command %s finished twice" % self)
-
- def doTimeout(self):
- self.timer = None
- msg = "command timed out: %d seconds without output" % self.timeout
- self.kill(msg)
-
- def kill(self, msg):
- # This may be called by the timeout, or when the user has decided to
- # abort this build.
- if self.timer:
- self.timer.cancel()
- self.timer = None
- if hasattr(self.process, "pid"):
- msg += ", killing pid %d" % self.process.pid
- log.msg(msg)
- self.sendStatus({'header': "\n" + msg + "\n"})
-
- hit = 0
- if runtime.platformType == "posix":
- try:
- # really want to kill off all child processes too. Process
- # Groups are ideal for this, but that requires
- # spawnProcess(usePTY=1). Try both ways in case process was
- # not started that way.
-
- # the test suite sets self.KILL=None to tell us we should
- # only pretend to kill the child. This lets us test the
- # backup timer.
-
- sig = None
- if self.KILL is not None:
- sig = getattr(signal, "SIG"+ self.KILL, None)
-
- if self.KILL == None:
- log.msg("self.KILL==None, only pretending to kill child")
- elif sig is None:
- log.msg("signal module is missing SIG%s" % self.KILL)
- elif not hasattr(os, "kill"):
- log.msg("os module is missing the 'kill' function")
- else:
- log.msg("trying os.kill(-pid, %d)" % (sig,))
- os.kill(-self.process.pid, sig)
- log.msg(" signal %s sent successfully" % sig)
- hit = 1
- except OSError:
- # probably no-such-process, maybe because there is no process
- # group
- pass
- if not hit:
- try:
- if self.KILL is None:
- log.msg("self.KILL==None, only pretending to kill child")
- else:
- log.msg("trying process.signalProcess('KILL')")
- self.process.signalProcess(self.KILL)
- log.msg(" signal %s sent successfully" % (self.KILL,))
- hit = 1
- except OSError:
- # could be no-such-process, because they finished very recently
- pass
- if not hit:
- log.msg("signalProcess/os.kill failed both times")
-
- if runtime.platformType == "posix":
- # we only do this under posix because the win32eventreactor
- # blocks here until the process has terminated, while closing
- # stderr. This is weird.
- self.pp.transport.loseConnection()
-
- # finished ought to be called momentarily. Just in case it doesn't,
- # set a timer which will abandon the command.
- self.timer = reactor.callLater(self.BACKUP_TIMEOUT,
- self.doBackupTimeout)
-
- def doBackupTimeout(self):
- log.msg("we tried to kill the process, and it wouldn't die.."
- " finish anyway")
- self.timer = None
- self.sendStatus({'header': "SIGKILL failed to kill process\n"})
- if self.sendRC:
- self.sendStatus({'header': "using fake rc=-1\n"})
- self.sendStatus({'rc': -1})
- self.failed(TimeoutError("SIGKILL failed to kill process"))
-
-
-class TCSHShellCommand:
- # This is a helper class, used by SlaveCommands to run programs in a
- # child shell.
-
- notreally = False
- BACKUP_TIMEOUT = 5
- KILL = "KILL"
-
- def __init__(self, builder, command,
- workdir, environ=None,
- sendStdout=True, sendStderr=True, sendRC=True,
- timeout=None, stdin=None, keepStdout=False):
- """
-
- @param keepStdout: if True, we keep a copy of all the stdout text
- that we've seen. This copy is available in
- self.stdout, which can be read after the command
- has finished.
- """
-
- self.builder = builder
- self.command = command
- self.sendStdout = sendStdout
- self.sendStderr = sendStderr
- self.sendRC = sendRC
- self.workdir = workdir
- self.environ = os.environ.copy()
- if environ:
- if (self.environ.has_key('PYTHONPATH')
- and environ.has_key('PYTHONPATH')):
- # special case, prepend the builder's items to the existing
- # ones. This will break if you send over empty strings, so
- # don't do that.
- environ['PYTHONPATH'] = (environ['PYTHONPATH']
- + os.pathsep
- + self.environ['PYTHONPATH'])
- # this will proceed to replace the old one
- self.environ.update(environ)
- self.stdin = stdin
- self.timeout = timeout
- self.timer = None
- self.keepStdout = keepStdout
-
- # usePTY=True is a convenience for cleaning up all children and
- # grandchildren of a hung command. Fall back to usePTY=False on
- # systems where ptys cause problems.
-
- self.usePTY = self.builder.usePTY
- if runtime.platformType != "posix":
- self.usePTY = False # PTYs are posix-only
- if stdin is not None:
- # for .closeStdin to matter, we must use a pipe, not a PTY
- self.usePTY = False
-
- def __repr__(self):
- return "<slavecommand.ShellCommand '%s'>" % self.command
-
- def sendStatus(self, status):
- self.builder.sendUpdate(status)
-
- def start(self):
- # return a Deferred which fires (with the exit code) when the command
- # completes
- if self.keepStdout:
- self.stdout = ""
- self.deferred = defer.Deferred()
- try:
- self._startCommand()
- except:
- log.msg("error in ShellCommand._startCommand")
- log.err()
- # pretend it was a shell error
- self.deferred.errback(AbandonChain(-1))
- return self.deferred
-
- def _startCommand(self):
- log.msg("ShellCommand._startCommand")
- if self.notreally:
- self.sendStatus({'header': "command '%s' in dir %s" % \
- (self.command, self.workdir)})
- self.sendStatus({'header': "(not really)\n"})
- self.finished(None, 0)
- return
-
- self.pp = ShellCommandPP(self)
-
- if type(self.command) in types.StringTypes:
- if runtime.platformType == 'win32':
- argv = ['/usr/bin/tcsh', '-c', self.command]
- else:
- # for posix, use /bin/sh. for other non-posix, well, doesn't
- # hurt to try
- argv = ['/usr/bin/tcsh', '-c', self.command]
- else:
- if runtime.platformType == 'win32':
- argv = [os.environ['COMSPEC'], '/c'] + list(self.command)
- else:
- argv = self.command
-
- # self.stdin is handled in ShellCommandPP.connectionMade
-
- # first header line is the command in plain text, argv joined with
- # spaces. You should be able to cut-and-paste this into a shell to
- # obtain the same results. If there are spaces in the arguments, too
- # bad.
- msg = " ".join(argv)
- log.msg(" " + msg)
- self.sendStatus({'header': msg+"\n"})
-
- # then comes the secondary information
- msg = " in dir %s" % (self.workdir,)
- if self.timeout:
- msg += " (timeout %d secs)" % (self.timeout,)
- log.msg(" " + msg)
- self.sendStatus({'header': msg+"\n"})
-
- # then the argv array for resolving unambiguity
- msg = " argv: %s" % (argv,)
- log.msg(" " + msg)
- self.sendStatus({'header': msg+"\n"})
-
- # then the environment, since it sometimes causes problems
- msg = " environment: %s" % (self.environ,)
- log.msg(" " + msg)
- self.sendStatus({'header': msg+"\n"})
-
- # win32eventreactor's spawnProcess (under twisted <= 2.0.1) returns
- # None, as opposed to all the posixbase-derived reactors (which
- # return the new Process object). This is a nuisance. We can make up
- # for it by having the ProcessProtocol give us their .transport
- # attribute after they get one. I'd prefer to get it from
- # spawnProcess because I'm concerned about returning from this method
- # without having a valid self.process to work with. (if kill() were
- # called right after we return, but somehow before connectionMade
- # were called, then kill() would blow up).
- self.process = None
- p = reactor.spawnProcess(self.pp, argv[0], argv,
- self.environ,
- self.workdir,
- usePTY=self.usePTY)
- # connectionMade might have been called during spawnProcess
- if not self.process:
- self.process = p
-
- # connectionMade also closes stdin as long as we're not using a PTY.
- # This is intended to kill off inappropriately interactive commands
- # better than the (long) hung-command timeout. ProcessPTY should be
- # enhanced to allow the same childFDs argument that Process takes,
- # which would let us connect stdin to /dev/null .
-
- if self.timeout:
- self.timer = reactor.callLater(self.timeout, self.doTimeout)
-
- def addStdout(self, data):
- if self.sendStdout: self.sendStatus({'stdout': data})
- if self.keepStdout: self.stdout += data
- if self.timer: self.timer.reset(self.timeout)
-
- def addStderr(self, data):
- if self.sendStderr: self.sendStatus({'stderr': data})
- if self.timer: self.timer.reset(self.timeout)
-
- def finished(self, sig, rc):
- log.msg("command finished with signal %s, exit code %s" % (sig,rc))
- if sig is not None:
- rc = -1
- if self.sendRC:
- if sig is not None:
- self.sendStatus(
- {'header': "process killed by signal %d\n" % sig})
- self.sendStatus({'rc': rc})
- if self.timer:
- self.timer.cancel()
- self.timer = None
- d = self.deferred
- self.deferred = None
- if d:
- d.callback(rc)
- else:
- log.msg("Hey, command %s finished twice" % self)
-
- def failed(self, why):
- log.msg("ShellCommand.failed: command failed: %s" % (why,))
- if self.timer:
- self.timer.cancel()
- self.timer = None
- d = self.deferred
- self.deferred = None
- if d:
- d.errback(why)
- else:
- log.msg("Hey, command %s finished twice" % self)
-
- def doTimeout(self):
- self.timer = None
- msg = "command timed out: %d seconds without output" % self.timeout
- self.kill(msg)
-
- def kill(self, msg):
- # This may be called by the timeout, or when the user has decided to
- # abort this build.
- if self.timer:
- self.timer.cancel()
- self.timer = None
- if hasattr(self.process, "pid"):
- msg += ", killing pid %d" % self.process.pid
- log.msg(msg)
- self.sendStatus({'header': "\n" + msg + "\n"})
-
- hit = 0
- if runtime.platformType == "posix":
- try:
- # really want to kill off all child processes too. Process
- # Groups are ideal for this, but that requires
- # spawnProcess(usePTY=1). Try both ways in case process was
- # not started that way.
-
- # the test suite sets self.KILL=None to tell us we should
- # only pretend to kill the child. This lets us test the
- # backup timer.
-
- sig = None
- if self.KILL is not None:
- sig = getattr(signal, "SIG"+ self.KILL, None)
-
- if self.KILL == None:
- log.msg("self.KILL==None, only pretending to kill child")
- elif sig is None:
- log.msg("signal module is missing SIG%s" % self.KILL)
- elif not hasattr(os, "kill"):
- log.msg("os module is missing the 'kill' function")
- else:
- log.msg("trying os.kill(-pid, %d)" % (sig,))
- os.kill(-self.process.pid, sig)
- log.msg(" signal %s sent successfully" % sig)
- hit = 1
- except OSError:
- # probably no-such-process, maybe because there is no process
- # group
- pass
- if not hit:
- try:
- if self.KILL is None:
- log.msg("self.KILL==None, only pretending to kill child")
- else:
- log.msg("trying process.signalProcess('KILL')")
- self.process.signalProcess(self.KILL)
- log.msg(" signal %s sent successfully" % (self.KILL,))
- hit = 1
- except OSError:
- # could be no-such-process, because they finished very recently
- pass
- if not hit:
- log.msg("signalProcess/os.kill failed both times")
-
- if runtime.platformType == "posix":
- # we only do this under posix because the win32eventreactor
- # blocks here until the process has terminated, while closing
- # stderr. This is weird.
- self.pp.transport.loseConnection()
-
- # finished ought to be called momentarily. Just in case it doesn't,
- # set a timer which will abandon the command.
- self.timer = reactor.callLater(self.BACKUP_TIMEOUT,
- self.doBackupTimeout)
-
- def doBackupTimeout(self):
- log.msg("we tried to kill the process, and it wouldn't die.."
- " finish anyway")
- self.timer = None
- self.sendStatus({'header': "SIGKILL failed to kill process\n"})
- if self.sendRC:
- self.sendStatus({'header': "using fake rc=-1\n"})
- self.sendStatus({'rc': -1})
- self.failed(TimeoutError("SIGKILL failed to kill process"))
-
-
-class Command:
- if implements:
- implements(ISlaveCommand)
- else:
- __implements__ = ISlaveCommand
-
- """This class defines one command that can be invoked by the build master.
- The command is executed on the slave side, and always sends back a
- completion message when it finishes. It may also send intermediate status
- as it runs (by calling builder.sendStatus). Some commands can be
- interrupted (either by the build master or a local timeout), in which
- case the step is expected to complete normally with a status message that
- indicates an error occurred.
-
- These commands are used by BuildSteps on the master side. Each kind of
- BuildStep uses a single Command. The slave must implement all the
- Commands required by the set of BuildSteps used for any given build:
- this is checked at startup time.
-
- All Commands are constructed with the same signature:
- c = CommandClass(builder, args)
- where 'builder' is the parent SlaveBuilder object, and 'args' is a
- dict that is interpreted per-command.
-
- The setup(args) method is available for setup, and is run from __init__.
-
- The Command is started with start(). This method must be implemented in a
- subclass, and it should return a Deferred. When your step is done, you
- should fire the Deferred (the results are not used). If the command is
- interrupted, it should fire the Deferred anyway.
-
- While the command runs. it may send status messages back to the
- buildmaster by calling self.sendStatus(statusdict). The statusdict is
- interpreted by the master-side BuildStep however it likes.
-
- A separate completion message is sent when the deferred fires, which
- indicates that the Command has finished, but does not carry any status
- data. If the Command needs to return an exit code of some sort, that
- should be sent as a regular status message before the deferred is fired .
- Once builder.commandComplete has been run, no more status messages may be
- sent.
-
- If interrupt() is called, the Command should attempt to shut down as
- quickly as possible. Child processes should be killed, new ones should
- not be started. The Command should send some kind of error status update,
- then complete as usual by firing the Deferred.
-
- .interrupted should be set by interrupt(), and can be tested to avoid
- sending multiple error status messages.
-
- If .running is False, the bot is shutting down (or has otherwise lost the
- connection to the master), and should not send any status messages. This
- is checked in Command.sendStatus .
-
- """
-
- # builder methods:
- # sendStatus(dict) (zero or more)
- # commandComplete() or commandInterrupted() (one, at end)
-
- debug = False
- interrupted = False
- running = False # set by Builder, cleared on shutdown or when the
- # Deferred fires
-
- def __init__(self, builder, stepId, args):
- self.builder = builder
- self.stepId = stepId # just for logging
- self.args = args
- self.setup(args)
-
- def setup(self, args):
- """Override this in a subclass to extract items from the args dict."""
- pass
-
- def start(self):
- """Start the command. self.running will be set just before this is
- called. This method should return a Deferred that will fire when the
- command has completed. The Deferred's argument will be ignored.
-
- This method should be overridden by subclasses."""
- raise NotImplementedError, "You must implement this in a subclass"
-
- def sendStatus(self, status):
- """Send a status update to the master."""
- if self.debug:
- log.msg("sendStatus", status)
- if not self.running:
- log.msg("would sendStatus but not .running")
- return
- self.builder.sendUpdate(status)
-
- def interrupt(self):
- """Override this in a subclass to allow commands to be interrupted.
- May be called multiple times, test and set self.interrupted=True if
- this matters."""
- pass
-
- # utility methods, mostly used by SlaveShellCommand and the like
-
- def _abandonOnFailure(self, rc):
- if type(rc) is not int:
- log.msg("weird, _abandonOnFailure was given rc=%s (%s)" % \
- (rc, type(rc)))
- assert isinstance(rc, int)
- if rc != 0:
- raise AbandonChain(rc)
- return rc
-
- def _sendRC(self, res):
- self.sendStatus({'rc': 0})
-
- def _checkAbandoned(self, why):
- log.msg("_checkAbandoned", why)
- why.trap(AbandonChain)
- log.msg(" abandoning chain", why.value)
- self.sendStatus({'rc': why.value.args[0]})
- return None
-
-
-class SlaveShellCommand(Command):
- """This is a Command which runs a shell command. The args dict contains
- the following keys:
-
- - ['command'] (required): a shell command to run. If this is a string,
- it will be run with /bin/sh (['/bin/sh', '-c', command]). If it is a
- list (preferred), it will be used directly.
- - ['workdir'] (required): subdirectory in which the command will be run,
- relative to the builder dir
- - ['env']: a dict of environment variables to augment/replace os.environ
- - ['want_stdout']: 0 if stdout should be thrown away
- - ['want_stderr']: 0 if stderr should be thrown away
- - ['not_really']: 1 to skip execution and return rc=0
- - ['timeout']: seconds of silence to tolerate before killing command
-
- ShellCommand creates the following status messages:
- - {'stdout': data} : when stdout data is available
- - {'stderr': data} : when stderr data is available
- - {'header': data} : when headers (command start/stop) are available
- - {'rc': rc} : when the process has terminated
- """
-
- def start(self):
- args = self.args
- sendStdout = args.get('want_stdout', True)
- sendStderr = args.get('want_stderr', True)
- # args['workdir'] is relative to Builder directory, and is required.
- assert args['workdir'] is not None
- workdir = os.path.join(self.builder.basedir, args['workdir'])
- timeout = args.get('timeout', None)
-
- c = ShellCommand(self.builder, args['command'],
- workdir, environ=args.get('env'),
- timeout=timeout,
- sendStdout=sendStdout, sendStderr=sendStderr,
- sendRC=True)
- self.command = c
- d = self.command.start()
- return d
-
- def interrupt(self):
- self.interrupted = True
- self.command.kill("command interrupted")
-
-
-registerSlaveCommand("shell", SlaveShellCommand, cvs_ver)
-
-class SlaveTCSHShellCommand(Command):
- """This is a Command which runs a shell command. The args dict contains
- the following keys:
-
- - ['command'] (required): a shell command to run. If this is a string,
- it will be run with /bin/sh (['/bin/sh', '-c', command]). If it is a
- list (preferred), it will be used directly.
- - ['workdir'] (required): subdirectory in which the command will be run,
- relative to the builder dir
- - ['env']: a dict of environment variables to augment/replace os.environ
- - ['want_stdout']: 0 if stdout should be thrown away
- - ['want_stderr']: 0 if stderr should be thrown away
- - ['not_really']: 1 to skip execution and return rc=0
- - ['timeout']: seconds of silence to tolerate before killing command
-
- ShellCommand creates the following status messages:
- - {'stdout': data} : when stdout data is available
- - {'stderr': data} : when stderr data is available
- - {'header': data} : when headers (command start/stop) are available
- - {'rc': rc} : when the process has terminated
- """
-
- def start(self):
- args = self.args
- sendStdout = args.get('want_stdout', True)
- sendStderr = args.get('want_stderr', True)
- # args['workdir'] is relative to Builder directory, and is required.
- assert args['workdir'] is not None
- workdir = os.path.join(self.builder.basedir, args['workdir'])
- timeout = args.get('timeout', None)
-
- c = TCSHShellCommand(self.builder, args['command'],
- workdir, environ=args.get('env'),
- timeout=timeout,
- sendStdout=sendStdout, sendStderr=sendStderr,
- sendRC=True)
- self.command = c
- d = self.command.start()
- return d
-
- def interrupt(self):
- self.interrupted = True
- self.command.kill("command interrupted")
-
-
-registerSlaveCommand("tcsh", SlaveTCSHShellCommand, cvs_ver)
-
-
-class DummyCommand(Command):
- """
- I am a dummy no-op command that by default takes 5 seconds to complete.
- See L{buildbot.process.step.RemoteDummy}
- """
-
- def start(self):
- self.d = defer.Deferred()
- log.msg(" starting dummy command [%s]" % self.stepId)
- self.timer = reactor.callLater(1, self.doStatus)
- return self.d
-
- def interrupt(self):
- if self.interrupted:
- return
- self.timer.cancel()
- self.timer = None
- self.interrupted = True
- self.finished()
-
- def doStatus(self):
- log.msg(" sending intermediate status")
- self.sendStatus({'stdout': 'data'})
- timeout = self.args.get('timeout', 5) + 1
- self.timer = reactor.callLater(timeout - 1, self.finished)
-
- def finished(self):
- log.msg(" dummy command finished [%s]" % self.stepId)
- if self.interrupted:
- self.sendStatus({'rc': 1})
- else:
- self.sendStatus({'rc': 0})
- self.d.callback(0)
-
-registerSlaveCommand("dummy", DummyCommand, cvs_ver)
-
-
-class SourceBase(Command):
- """Abstract base class for Version Control System operations (checkout
- and update). This class extracts the following arguments from the
- dictionary received from the master:
-
- - ['workdir']: (required) the subdirectory where the buildable sources
- should be placed
-
- - ['mode']: one of update/copy/clobber/export, defaults to 'update'
-
- - ['revision']: If not None, this is an int or string which indicates
- which sources (along a time-like axis) should be used.
- It is the thing you provide as the CVS -r or -D
- argument.
-
- - ['patch']: If not None, this is a tuple of (striplevel, patch)
- which contains a patch that should be applied after the
- checkout has occurred. Once applied, the tree is no
- longer eligible for use with mode='update', and it only
- makes sense to use this in conjunction with a
- ['revision'] argument. striplevel is an int, and patch
- is a string in standard unified diff format. The patch
- will be applied with 'patch -p%d <PATCH', with
- STRIPLEVEL substituted as %d. The command will fail if
- the patch process fails (rejected hunks).
-
- - ['timeout']: seconds of silence tolerated before we kill off the
- command
-
- - ['retry']: If not None, this is a tuple of (delay, repeats)
- which means that any failed VC updates should be
- reattempted, up to REPEATS times, after a delay of
- DELAY seconds. This is intended to deal with slaves
- that experience transient network failures.
- """
-
- sourcedata = ""
-
- def setup(self, args):
- # if we need to parse the output, use this environment. Otherwise
- # command output will be in whatever the buildslave's native language
- # has been set to.
- self.env = os.environ.copy()
- self.env['LC_ALL'] = "C"
-
- self.workdir = args['workdir']
- self.mode = args.get('mode', "update")
- self.revision = args.get('revision')
- self.patch = args.get('patch')
- self.timeout = args.get('timeout', 120)
- self.retry = args.get('retry')
- # VC-specific subclasses should override this to extract more args.
- # Make sure to upcall!
-
- def start(self):
- self.sendStatus({'header': "starting " + self.header + "\n"})
- self.command = None
-
- # self.srcdir is where the VC system should put the sources
- if self.mode == "copy":
- self.srcdir = "source" # hardwired directory name, sorry
- else:
- self.srcdir = self.workdir
- self.sourcedatafile = os.path.join(self.builder.basedir,
- self.srcdir,
- ".buildbot-sourcedata")
-
- d = defer.succeed(None)
- # do we need to clobber anything?
- if self.mode in ("copy", "clobber", "export"):
- d.addCallback(self.doClobber, self.workdir)
- if not (self.sourcedirIsUpdateable() and self.sourcedataMatches()):
- # the directory cannot be updated, so we have to clobber it.
- # Perhaps the master just changed modes from 'export' to
- # 'update'.
- d.addCallback(self.doClobber, self.srcdir)
-
- d.addCallback(self.doVC)
-
- if self.mode == "copy":
- d.addCallback(self.doCopy)
- if self.patch:
- d.addCallback(self.doPatch)
- d.addCallbacks(self._sendRC, self._checkAbandoned)
- return d
-
- def interrupt(self):
- self.interrupted = True
- if self.command:
- self.command.kill("command interrupted")
-
- def doVC(self, res):
- if self.interrupted:
- raise AbandonChain(1)
- if self.sourcedirIsUpdateable() and self.sourcedataMatches():
- d = self.doVCUpdate()
- d.addCallback(self.maybeDoVCFallback)
- else:
- d = self.doVCFull()
- d.addBoth(self.maybeDoVCRetry)
- d.addCallback(self._abandonOnFailure)
- d.addCallback(self._handleGotRevision)
- d.addCallback(self.writeSourcedata)
- return d
-
- def sourcedataMatches(self):
- try:
- olddata = open(self.sourcedatafile, "r").read()
- if olddata != self.sourcedata:
- return False
- except IOError:
- return False
- return True
-
- def _handleGotRevision(self, res):
- d = defer.maybeDeferred(self.parseGotRevision)
- d.addCallback(lambda got_revision:
- self.sendStatus({'got_revision': got_revision}))
- return d
-
- def parseGotRevision(self):
- """Override this in a subclass. It should return a string that
- represents which revision was actually checked out, or a Deferred
- that will fire with such a string. If, in a future build, you were to
- pass this 'got_revision' string in as the 'revision' component of a
- SourceStamp, you should wind up with the same source code as this
- checkout just obtained.
-
- It is probably most useful to scan self.command.stdout for a string
- of some sort. Be sure to set keepStdout=True on the VC command that
- you run, so that you'll have something available to look at.
-
- If this information is unavailable, just return None."""
-
- return None
-
- def writeSourcedata(self, res):
- open(self.sourcedatafile, "w").write(self.sourcedata)
- return res
-
- def sourcedirIsUpdateable(self):
- raise NotImplementedError("this must be implemented in a subclass")
-
- def doVCUpdate(self):
- raise NotImplementedError("this must be implemented in a subclass")
-
- def doVCFull(self):
- raise NotImplementedError("this must be implemented in a subclass")
-
- def maybeDoVCFallback(self, rc):
- if type(rc) is int and rc == 0:
- return rc
- if self.interrupted:
- raise AbandonChain(1)
- msg = "update failed, clobbering and trying again"
- self.sendStatus({'header': msg + "\n"})
- log.msg(msg)
- d = self.doClobber(None, self.srcdir)
- d.addCallback(self.doVCFallback2)
- return d
-
- def doVCFallback2(self, res):
- msg = "now retrying VC operation"
- self.sendStatus({'header': msg + "\n"})
- log.msg(msg)
- d = self.doVCFull()
- d.addBoth(self.maybeDoVCRetry)
- d.addCallback(self._abandonOnFailure)
- return d
-
- def maybeDoVCRetry(self, res):
- """We get here somewhere after a VC chain has finished. res could
- be::
-
- - 0: the operation was successful
- - nonzero: the operation failed. retry if possible
- - AbandonChain: the operation failed, someone else noticed. retry.
- - Failure: some other exception, re-raise
- """
-
- if isinstance(res, failure.Failure):
- if self.interrupted:
- return res # don't re-try interrupted builds
- res.trap(AbandonChain)
- else:
- if type(res) is int and res == 0:
- return res
- if self.interrupted:
- raise AbandonChain(1)
- # if we get here, we should retry, if possible
- if self.retry:
- delay, repeats = self.retry
- if repeats >= 0:
- self.retry = (delay, repeats-1)
- msg = ("update failed, trying %d more times after %d seconds"
- % (repeats, delay))
- self.sendStatus({'header': msg + "\n"})
- log.msg(msg)
- d = defer.Deferred()
- d.addCallback(lambda res: self.doVCFull())
- d.addBoth(self.maybeDoVCRetry)
- reactor.callLater(delay, d.callback, None)
- return d
- return res
-
- def doClobber(self, dummy, dirname):
- # TODO: remove the old tree in the background
-## workdir = os.path.join(self.builder.basedir, self.workdir)
-## deaddir = self.workdir + ".deleting"
-## if os.path.isdir(workdir):
-## try:
-## os.rename(workdir, deaddir)
-## # might fail if deaddir already exists: previous deletion
-## # hasn't finished yet
-## # start the deletion in the background
-## # TODO: there was a solaris/NetApp/NFS problem where a
-## # process that was still running out of the directory we're
-## # trying to delete could prevent the rm-rf from working. I
-## # think it stalled the rm, but maybe it just died with
-## # permission issues. Try to detect this.
-## os.commands("rm -rf %s &" % deaddir)
-## except:
-## # fall back to sequential delete-then-checkout
-## pass
- d = os.path.join(self.builder.basedir, dirname)
- if runtime.platformType != "posix":
- # if we're running on w32, use rmtree instead. It will block,
- # but hopefully it won't take too long.
- rmdirRecursive(d)
- return defer.succeed(0)
- command = ["rm", "-rf", d]
- c = ShellCommand(self.builder, command, self.builder.basedir,
- sendRC=0, timeout=self.timeout)
- self.command = c
- # sendRC=0 means the rm command will send stdout/stderr to the
- # master, but not the rc=0 when it finishes. That job is left to
- # _sendRC
- d = c.start()
- d.addCallback(self._abandonOnFailure)
- return d
-
- def doCopy(self, res):
- # now copy tree to workdir
- fromdir = os.path.join(self.builder.basedir, self.srcdir)
- todir = os.path.join(self.builder.basedir, self.workdir)
- if runtime.platformType != "posix":
- shutil.copytree(fromdir, todir)
- return defer.succeed(0)
- command = ['cp', '-r', '-p', fromdir, todir]
- c = ShellCommand(self.builder, command, self.builder.basedir,
- sendRC=False, timeout=self.timeout)
- self.command = c
- d = c.start()
- d.addCallback(self._abandonOnFailure)
- return d
-
- def doPatch(self, res):
- patchlevel, diff = self.patch
- command = [getCommand("patch"), '-p%d' % patchlevel]
- dir = os.path.join(self.builder.basedir, self.workdir)
- # mark the directory so we don't try to update it later
- open(os.path.join(dir, ".buildbot-patched"), "w").write("patched\n")
- # now apply the patch
- c = ShellCommand(self.builder, command, dir,
- sendRC=False, timeout=self.timeout,
- stdin=diff)
- self.command = c
- d = c.start()
- d.addCallback(self._abandonOnFailure)
- return d
-
-
-class CVS(SourceBase):
- """CVS-specific VC operation. In addition to the arguments handled by
- SourceBase, this command reads the following keys:
-
- ['cvsroot'] (required): the CVSROOT repository string
- ['cvsmodule'] (required): the module to be retrieved
- ['branch']: a '-r' tag or branch name to use for the checkout/update
- ['login']: a string for use as a password to 'cvs login'
- ['global_options']: a list of strings to use before the CVS verb
- """
-
- header = "cvs operation"
-
- def setup(self, args):
- SourceBase.setup(self, args)
- self.vcexe = getCommand("cvs")
- self.vcexeoo = "./tinget.pl"
- self.cvsroot = args['cvsroot']
- self.cvsmodule = args['cvsmodule']
- self.global_options = args.get('global_options', [])
- self.branch = args.get('branch')
- self.login = args.get('login')
- self.sourcedata = "%s\n%s\n%s\n" % (self.cvsroot, self.cvsmodule,
- self.branch)
-
- def sourcedirIsUpdateable(self):
- if os.path.exists(os.path.join(self.builder.basedir,
- self.srcdir, ".buildbot-patched")):
- return False
- return os.path.isdir(os.path.join(self.builder.basedir,
- self.srcdir, "CVS"))
-
- def start(self):
- if self.login is not None:
- # need to do a 'cvs login' command first
- d = self.builder.basedir
- command = ([self.vcexe, '-d', self.cvsroot] + self.global_options
- + ['login'])
- c = ShellCommand(self.builder, command, d,
- sendRC=False, timeout=self.timeout,
- stdin=self.login+"\n")
- self.command = c
- d = c.start()
- d.addCallback(self._abandonOnFailure)
- d.addCallback(self._didLogin)
- return d
- else:
- return self._didLogin(None)
-
- def _didLogin(self, res):
- # now we really start
- return SourceBase.start(self)
-
- def doVCUpdate(self):
- d = os.path.join(self.builder.basedir, self.srcdir)
- #command = [self.vcexe, '-z3'] + self.global_options + ['update', '-dP']
- command = [self.vcexeoo]
- if self.branch:
- # command += ['-r', self.branch]
- command += [self.branch]
- #if self.revision:
- # command += ['-D', self.revision]
- command += [self.cvsmodule]
- command += ['up']
- c = ShellCommand(self.builder, command, d,
- sendRC=False, timeout=self.timeout)
- self.command = c
- return c.start()
-
- def doVCFull(self):
- d = self.builder.basedir
- if self.mode == "export":
- verb = "export"
- else:
- verb = "checkout"
- #command = ([self.vcexe, '-d', self.cvsroot, '-z3'] +
- # self.global_options +
- # [verb, '-N', '-d', self.srcdir])
- command = [self.vcexeoo]
- if self.branch:
- # command += ['-r', self.branch]
- command += [self.branch]
- #if self.revision:
- # command += ['-D', self.revision]
- command += [self.cvsmodule]
- command += ['co']
- c = ShellCommand(self.builder, command, d,
- sendRC=False, timeout=self.timeout)
- self.command = c
- return c.start()
-
- def parseGotRevision(self):
- # CVS does not have any kind of revision stamp to speak of. We return
- # the current timestamp as a best-effort guess, but this depends upon
- # the local system having a clock that is
- # reasonably-well-synchronized with the repository.
- return time.strftime("%Y-%m-%d %H:%M:%S +0000", time.gmtime())
-
-registerSlaveCommand("cvs", CVS, cvs_ver)
-
-class SVN(SourceBase):
- """Subversion-specific VC operation. In addition to the arguments
- handled by SourceBase, this command reads the following keys:
-
- ['svnurl'] (required): the SVN repository string
- """
-
- header = "svn operation"
-
- def setup(self, args):
- SourceBase.setup(self, args)
- self.vcexe = getCommand("svn")
- self.svnurl = args['svnurl']
- self.sourcedata = "%s\n" % self.svnurl
-
- def sourcedirIsUpdateable(self):
- if os.path.exists(os.path.join(self.builder.basedir,
- self.srcdir, ".buildbot-patched")):
- return False
- return os.path.isdir(os.path.join(self.builder.basedir,
- self.srcdir, ".svn"))
-
- def doVCUpdate(self):
- revision = self.args['revision'] or 'HEAD'
- # update: possible for mode in ('copy', 'update')
- d = os.path.join(self.builder.basedir, self.srcdir)
- command = [self.vcexe, 'update', '--revision', str(revision)]
- c = ShellCommand(self.builder, command, d,
- sendRC=False, timeout=self.timeout,
- keepStdout=True)
- self.command = c
- return c.start()
-
- def doVCFull(self):
- revision = self.args['revision'] or 'HEAD'
- d = self.builder.basedir
- if self.mode == "export":
- command = [self.vcexe, 'export', '--revision', str(revision),
- self.svnurl, self.srcdir]
- else:
- # mode=='clobber', or copy/update on a broken workspace
- command = [self.vcexe, 'checkout', '--revision', str(revision),
- self.svnurl, self.srcdir]
- c = ShellCommand(self.builder, command, d,
- sendRC=False, timeout=self.timeout,
- keepStdout=True)
- self.command = c
- return c.start()
-
- def parseGotRevision(self):
- # svn checkout operations finish with 'Checked out revision 16657.'
- # svn update operations finish the line 'At revision 16654.'
- # But we don't use those. Instead, run 'svnversion'.
- svnversion_command = getCommand("svnversion")
- # older versions of 'svnversion' (1.1.4) require the WC_PATH
- # argument, newer ones (1.3.1) do not.
- command = [svnversion_command, "."]
- c = ShellCommand(self.builder, command,
- os.path.join(self.builder.basedir, self.srcdir),
- environ=self.env,
- sendStdout=False, sendStderr=False, sendRC=False,
- keepStdout=True)
- c.usePTY = False
- d = c.start()
- def _parse(res):
- r = c.stdout.strip()
- got_version = None
- try:
- got_version = int(r)
- except ValueError:
- msg =("SVN.parseGotRevision unable to parse output "
- "of svnversion: '%s'" % r)
- log.msg(msg)
- self.sendStatus({'header': msg + "\n"})
- return got_version
- d.addCallback(_parse)
- return d
-
-
-registerSlaveCommand("svn", SVN, cvs_ver)
-
-class Darcs(SourceBase):
- """Darcs-specific VC operation. In addition to the arguments
- handled by SourceBase, this command reads the following keys:
-
- ['repourl'] (required): the Darcs repository string
- """
-
- header = "darcs operation"
-
- def setup(self, args):
- SourceBase.setup(self, args)
- self.vcexe = getCommand("darcs")
- self.repourl = args['repourl']
- self.sourcedata = "%s\n" % self.repourl
- self.revision = self.args.get('revision')
-
- def sourcedirIsUpdateable(self):
- if os.path.exists(os.path.join(self.builder.basedir,
- self.srcdir, ".buildbot-patched")):
- return False
- if self.revision:
- # checking out a specific revision requires a full 'darcs get'
- return False
- return os.path.isdir(os.path.join(self.builder.basedir,
- self.srcdir, "_darcs"))
-
- def doVCUpdate(self):
- assert not self.revision
- # update: possible for mode in ('copy', 'update')
- d = os.path.join(self.builder.basedir, self.srcdir)
- command = [self.vcexe, 'pull', '--all', '--verbose']
- c = ShellCommand(self.builder, command, d,
- sendRC=False, timeout=self.timeout)
- self.command = c
- return c.start()
-
- def doVCFull(self):
- # checkout or export
- d = self.builder.basedir
- command = [self.vcexe, 'get', '--verbose', '--partial',
- '--repo-name', self.srcdir]
- if self.revision:
- # write the context to a file
- n = os.path.join(self.builder.basedir, ".darcs-context")
- f = open(n, "wb")
- f.write(self.revision)
- f.close()
- # tell Darcs to use that context
- command.append('--context')
- command.append(n)
- command.append(self.repourl)
-
- c = ShellCommand(self.builder, command, d,
- sendRC=False, timeout=self.timeout)
- self.command = c
- d = c.start()
- if self.revision:
- d.addCallback(self.removeContextFile, n)
- return d
-
- def removeContextFile(self, res, n):
- os.unlink(n)
- return res
-
- def parseGotRevision(self):
- # we use 'darcs context' to find out what we wound up with
- command = [self.vcexe, "changes", "--context"]
- c = ShellCommand(self.builder, command,
- os.path.join(self.builder.basedir, self.srcdir),
- environ=self.env,
- sendStdout=False, sendStderr=False, sendRC=False,
- keepStdout=True)
- c.usePTY = False
- d = c.start()
- d.addCallback(lambda res: c.stdout)
- return d
-
-registerSlaveCommand("darcs", Darcs, cvs_ver)
-
-class Git(SourceBase):
- """Git specific VC operation. In addition to the arguments
- handled by SourceBase, this command reads the following keys:
-
- ['repourl'] (required): the Cogito repository string
- """
-
- header = "git operation"
-
- def setup(self, args):
- SourceBase.setup(self, args)
- self.repourl = args['repourl']
- #self.sourcedata = "" # TODO
-
- def sourcedirIsUpdateable(self):
- if os.path.exists(os.path.join(self.builder.basedir,
- self.srcdir, ".buildbot-patched")):
- return False
- return os.path.isdir(os.path.join(self.builder.basedir,
- self.srcdir, ".git"))
-
- def doVCUpdate(self):
- d = os.path.join(self.builder.basedir, self.srcdir)
- command = ['cg-update']
- c = ShellCommand(self.builder, command, d,
- sendRC=False, timeout=self.timeout)
- self.command = c
- return c.start()
-
- def doVCFull(self):
- d = os.path.join(self.builder.basedir, self.srcdir)
- os.mkdir(d)
- command = ['cg-clone', '-s', self.repourl]
- c = ShellCommand(self.builder, command, d,
- sendRC=False, timeout=self.timeout)
- self.command = c
- return c.start()
-
-registerSlaveCommand("git", Git, cvs_ver)
-
-class Arch(SourceBase):
- """Arch-specific (tla-specific) VC operation. In addition to the
- arguments handled by SourceBase, this command reads the following keys:
-
- ['url'] (required): the repository string
- ['version'] (required): which version (i.e. branch) to retrieve
- ['revision'] (optional): the 'patch-NN' argument to check out
- ['archive']: the archive name to use. If None, use the archive's default
- ['build-config']: if present, give to 'tla build-config' after checkout
- """
-
- header = "arch operation"
- buildconfig = None
-
- def setup(self, args):
- SourceBase.setup(self, args)
- self.vcexe = getCommand("tla")
- self.archive = args.get('archive')
- self.url = args['url']
- self.version = args['version']
- self.revision = args.get('revision')
- self.buildconfig = args.get('build-config')
- self.sourcedata = "%s\n%s\n%s\n" % (self.url, self.version,
- self.buildconfig)
-
- def sourcedirIsUpdateable(self):
- if self.revision:
- # Arch cannot roll a directory backwards, so if they ask for a
- # specific revision, clobber the directory. Technically this
- # could be limited to the cases where the requested revision is
- # later than our current one, but it's too hard to extract the
- # current revision from the tree.
- return False
- if os.path.exists(os.path.join(self.builder.basedir,
- self.srcdir, ".buildbot-patched")):
- return False
- return os.path.isdir(os.path.join(self.builder.basedir,
- self.srcdir, "{arch}"))
-
- def doVCUpdate(self):
- # update: possible for mode in ('copy', 'update')
- d = os.path.join(self.builder.basedir, self.srcdir)
- command = [self.vcexe, 'replay']
- if self.revision:
- command.append(self.revision)
- c = ShellCommand(self.builder, command, d,
- sendRC=False, timeout=self.timeout)
- self.command = c
- return c.start()
-
- def doVCFull(self):
- # to do a checkout, we must first "register" the archive by giving
- # the URL to tla, which will go to the repository at that URL and
- # figure out the archive name. tla will tell you the archive name
- # when it is done, and all further actions must refer to this name.
-
- command = [self.vcexe, 'register-archive', '--force', self.url]
- c = ShellCommand(self.builder, command, self.builder.basedir,
- sendRC=False, keepStdout=True,
- timeout=self.timeout)
- self.command = c
- d = c.start()
- d.addCallback(self._abandonOnFailure)
- d.addCallback(self._didRegister, c)
- return d
-
- def _didRegister(self, res, c):
- # find out what tla thinks the archive name is. If the user told us
- # to use something specific, make sure it matches.
- r = re.search(r'Registering archive: (\S+)\s*$', c.stdout)
- if r:
- msg = "tla reports archive name is '%s'" % r.group(1)
- log.msg(msg)
- self.builder.sendUpdate({'header': msg+"\n"})
- if self.archive and r.group(1) != self.archive:
- msg = (" mismatch, we wanted an archive named '%s'"
- % self.archive)
- log.msg(msg)
- self.builder.sendUpdate({'header': msg+"\n"})
- raise AbandonChain(-1)
- self.archive = r.group(1)
- assert self.archive, "need archive name to continue"
- return self._doGet()
-
- def _doGet(self):
- ver = self.version
- if self.revision:
- ver += "--%s" % self.revision
- command = [self.vcexe, 'get', '--archive', self.archive,
- '--no-pristine',
- ver, self.srcdir]
- c = ShellCommand(self.builder, command, self.builder.basedir,
- sendRC=False, timeout=self.timeout)
- self.command = c
- d = c.start()
- d.addCallback(self._abandonOnFailure)
- if self.buildconfig:
- d.addCallback(self._didGet)
- return d
-
- def _didGet(self, res):
- d = os.path.join(self.builder.basedir, self.srcdir)
- command = [self.vcexe, 'build-config', self.buildconfig]
- c = ShellCommand(self.builder, command, d,
- sendRC=False, timeout=self.timeout)
- self.command = c
- d = c.start()
- d.addCallback(self._abandonOnFailure)
- return d
-
- def parseGotRevision(self):
- # using code from tryclient.TlaExtractor
- # 'tla logs --full' gives us ARCHIVE/BRANCH--REVISION
- # 'tla logs' gives us REVISION
- command = [self.vcexe, "logs", "--full", "--reverse"]
- c = ShellCommand(self.builder, command,
- os.path.join(self.builder.basedir, self.srcdir),
- environ=self.env,
- sendStdout=False, sendStderr=False, sendRC=False,
- keepStdout=True)
- c.usePTY = False
- d = c.start()
- def _parse(res):
- tid = c.stdout.split("\n")[0].strip()
- slash = tid.index("/")
- dd = tid.rindex("--")
- #branch = tid[slash+1:dd]
- baserev = tid[dd+2:]
- return baserev
- d.addCallback(_parse)
- return d
-
-registerSlaveCommand("arch", Arch, cvs_ver)
-
-class Bazaar(Arch):
- """Bazaar (/usr/bin/baz) is an alternative client for Arch repositories.
- It is mostly option-compatible, but archive registration is different
- enough to warrant a separate Command.
-
- ['archive'] (required): the name of the archive being used
- """
-
- def setup(self, args):
- Arch.setup(self, args)
- self.vcexe = getCommand("baz")
- # baz doesn't emit the repository name after registration (and
- # grepping through the output of 'baz archives' is too hard), so we
- # require that the buildmaster configuration to provide both the
- # archive name and the URL.
- self.archive = args['archive'] # required for Baz
- self.sourcedata = "%s\n%s\n%s\n" % (self.url, self.version,
- self.buildconfig)
-
- # in _didRegister, the regexp won't match, so we'll stick with the name
- # in self.archive
-
- def _doGet(self):
- # baz prefers ARCHIVE/VERSION. This will work even if
- # my-default-archive is not set.
- ver = self.archive + "/" + self.version
- if self.revision:
- ver += "--%s" % self.revision
- command = [self.vcexe, 'get', '--no-pristine',
- ver, self.srcdir]
- c = ShellCommand(self.builder, command, self.builder.basedir,
- sendRC=False, timeout=self.timeout)
- self.command = c
- d = c.start()
- d.addCallback(self._abandonOnFailure)
- if self.buildconfig:
- d.addCallback(self._didGet)
- return d
-
- def parseGotRevision(self):
- # using code from tryclient.BazExtractor
- command = [self.vcexe, "tree-id"]
- c = ShellCommand(self.builder, command,
- os.path.join(self.builder.basedir, self.srcdir),
- environ=self.env,
- sendStdout=False, sendStderr=False, sendRC=False,
- keepStdout=True)
- c.usePTY = False
- d = c.start()
- def _parse(res):
- tid = c.stdout.strip()
- slash = tid.index("/")
- dd = tid.rindex("--")
- #branch = tid[slash+1:dd]
- baserev = tid[dd+2:]
- return baserev
- d.addCallback(_parse)
- return d
-
-registerSlaveCommand("bazaar", Bazaar, cvs_ver)
-
-
-class Mercurial(SourceBase):
- """Mercurial specific VC operation. In addition to the arguments
- handled by SourceBase, this command reads the following keys:
-
- ['repourl'] (required): the Cogito repository string
- """
-
- header = "mercurial operation"
-
- def setup(self, args):
- SourceBase.setup(self, args)
- self.vcexe = getCommand("hg")
- self.repourl = args['repourl']
- self.sourcedata = "%s\n" % self.repourl
- self.stdout = ""
- self.stderr = ""
-
- def sourcedirIsUpdateable(self):
- if os.path.exists(os.path.join(self.builder.basedir,
- self.srcdir, ".buildbot-patched")):
- return False
- # like Darcs, to check out a specific (old) revision, we have to do a
- # full checkout. TODO: I think 'hg pull' plus 'hg update' might work
- if self.revision:
- return False
- return os.path.isdir(os.path.join(self.builder.basedir,
- self.srcdir, ".hg"))
-
- def doVCUpdate(self):
- d = os.path.join(self.builder.basedir, self.srcdir)
- command = [self.vcexe, 'pull', '--update', '--verbose']
- if self.args['revision']:
- command.extend(['--rev', self.args['revision']])
- c = ShellCommand(self.builder, command, d,
- sendRC=False, timeout=self.timeout,
- keepStdout=True)
- self.command = c
- d = c.start()
- d.addCallback(self._handleEmptyUpdate)
- return d
-
- def _handleEmptyUpdate(self, res):
- if type(res) is int and res == 1:
- if self.command.stdout.find("no changes found") != -1:
- # 'hg pull', when it doesn't have anything to do, exits with
- # rc=1, and there appears to be no way to shut this off. It
- # emits a distinctive message to stdout, though. So catch
- # this and pretend that it completed successfully.
- return 0
- return res
-
- def doVCFull(self):
- d = os.path.join(self.builder.basedir, self.srcdir)
- command = [self.vcexe, 'clone']
- if self.args['revision']:
- command.extend(['--rev', self.args['revision']])
- command.extend([self.repourl, d])
- c = ShellCommand(self.builder, command, self.builder.basedir,
- sendRC=False, timeout=self.timeout)
- self.command = c
- return c.start()
-
- def parseGotRevision(self):
- # we use 'hg identify' to find out what we wound up with
- command = [self.vcexe, "identify"]
- c = ShellCommand(self.builder, command,
- os.path.join(self.builder.basedir, self.srcdir),
- environ=self.env,
- sendStdout=False, sendStderr=False, sendRC=False,
- keepStdout=True)
- d = c.start()
- def _parse(res):
- m = re.search(r'^(\w+)', c.stdout)
- return m.group(1)
- d.addCallback(_parse)
- return d
-
-registerSlaveCommand("hg", Mercurial, cvs_ver)
-
-
-class P4Sync(SourceBase):
- """A partial P4 source-updater. Requires manual setup of a per-slave P4
- environment. The only thing which comes from the master is P4PORT.
- 'mode' is required to be 'copy'.
-
- ['p4port'] (required): host:port for server to access
- ['p4user'] (optional): user to use for access
- ['p4passwd'] (optional): passwd to try for the user
- ['p4client'] (optional): client spec to use
- """
-
- header = "p4 sync"
-
- def setup(self, args):
- SourceBase.setup(self, args)
- self.vcexe = getCommand("p4")
- self.p4port = args['p4port']
- self.p4user = args['p4user']
- self.p4passwd = args['p4passwd']
- self.p4client = args['p4client']
-
- def sourcedirIsUpdateable(self):
- return True
-
- def doVCUpdate(self):
- d = os.path.join(self.builder.basedir, self.srcdir)
- command = [self.vcexe]
- if self.p4port:
- command.extend(['-p', self.p4port])
- if self.p4user:
- command.extend(['-u', self.p4user])
- if self.p4passwd:
- command.extend(['-P', self.p4passwd])
- if self.p4client:
- command.extend(['-c', self.p4client])
- command.extend(['sync'])
- if self.revision:
- command.extend(['@' + self.revision])
- env = {}
- c = ShellCommand(self.builder, command, d, environ=env,
- sendRC=False, timeout=self.timeout)
- self.command = c
- return c.start()
-
- def doVCFull(self):
- return self.doVCUpdate()
-
-registerSlaveCommand("p4sync", P4Sync, cvs_ver)
diff --git a/buildbot/buildbot-source/build/lib/buildbot/slave/interfaces.py b/buildbot/buildbot-source/build/lib/buildbot/slave/interfaces.py
deleted file mode 100644
index 45096147e..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/slave/interfaces.py
+++ /dev/null
@@ -1,57 +0,0 @@
-#! /usr/bin/python
-
-from twisted.python.components import Interface
-
-class ISlaveCommand(Interface):
- """This interface is implemented by all of the buildslave's Command
- subclasses. It specifies how the buildslave can start, interrupt, and
- query the various Commands running on behalf of the buildmaster."""
-
- def __init__(builder, stepId, args):
- """Create the Command. 'builder' is a reference to the parent
- buildbot.bot.SlaveBuilder instance, which will be used to send status
- updates (by calling builder.sendStatus). 'stepId' is a random string
- which helps correlate slave logs with the master. 'args' is a dict of
- arguments that comes from the master-side BuildStep, with contents
- that are specific to the individual Command subclass.
-
- This method is not intended to be subclassed."""
-
- def setup(args):
- """This method is provided for subclasses to override, to extract
- parameters from the 'args' dictionary. The default implemention does
- nothing. It will be called from __init__"""
-
- def start():
- """Begin the command, and return a Deferred.
-
- While the command runs, it should send status updates to the
- master-side BuildStep by calling self.sendStatus(status). The
- 'status' argument is typically a dict with keys like 'stdout',
- 'stderr', and 'rc'.
-
- When the step completes, it should fire the Deferred (the results are
- not used). If an exception occurs during execution, it may also
- errback the deferred, however any reasonable errors should be trapped
- and indicated with a non-zero 'rc' status rather than raising an
- exception. Exceptions should indicate problems within the buildbot
- itself, not problems in the project being tested.
-
- """
-
- def interrupt():
- """This is called to tell the Command that the build is being stopped
- and therefore the command should be terminated as quickly as
- possible. The command may continue to send status updates, up to and
- including an 'rc' end-of-command update (which should indicate an
- error condition). The Command's deferred should still be fired when
- the command has finally completed.
-
- If the build is being stopped because the slave it shutting down or
- because the connection to the buildmaster has been lost, the status
- updates will simply be discarded. The Command does not need to be
- aware of this.
-
- Child shell processes should be killed. Simple ShellCommand classes
- can just insert a header line indicating that the process will be
- killed, then os.kill() the child."""
diff --git a/buildbot/buildbot-source/build/lib/buildbot/slave/registry.py b/buildbot/buildbot-source/build/lib/buildbot/slave/registry.py
deleted file mode 100644
index b4497d4fe..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/slave/registry.py
+++ /dev/null
@@ -1,18 +0,0 @@
-#! /usr/bin/python
-
-commandRegistry = {}
-
-def registerSlaveCommand(name, factory, version):
- """
- Register a slave command with the registry, making it available in slaves.
-
- @type name: string
- @param name: name under which the slave command will be registered; used
- for L{buildbot.slave.bot.SlaveBuilder.remote_startCommand}
-
- @type factory: L{buildbot.slave.commands.Command}
- @type version: string
- @param version: version string of the factory code
- """
- assert not commandRegistry.has_key(name)
- commandRegistry[name] = (factory, version)
diff --git a/buildbot/buildbot-source/build/lib/buildbot/slave/trial.py b/buildbot/buildbot-source/build/lib/buildbot/slave/trial.py
deleted file mode 100644
index 9d1fa6f69..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/slave/trial.py
+++ /dev/null
@@ -1,175 +0,0 @@
-# -*- test-case-name: buildbot.test.test_trial.TestRemoteReporter -*-
-
-import types, time
-import zope.interface as zi
-
-from twisted.spread import pb
-from twisted.internet import reactor, defer
-from twisted.python import reflect, failure, log, usage, util
-from twisted.trial import registerAdapter, adaptWithDefault, reporter, runner
-from twisted.trial.interfaces import ITestMethod, ITestSuite, ITestRunner, \
- IJellied, IUnjellied, IRemoteReporter
-from twisted.application import strports
-
-
-class RemoteTestAny(object, util.FancyStrMixin):
- def __init__(self, original):
- self.original = original
-
- def __getattr__(self, attr):
- if attr not in self.original:
- raise AttributeError, "%s has no attribute %s" % (self.__str__(), attr)
- return self.original[attr]
-
-
-class RemoteTestMethod(RemoteTestAny):
- zi.implements(ITestMethod)
-
-class RemoteTestSuite(RemoteTestAny):
- zi.implements(ITestSuite)
-
-
-class RemoteReporter(reporter.Reporter):
- zi.implements(IRemoteReporter)
- pbroot = None
-
- def __init__(self, stream=None, tbformat=None, args=None):
- super(RemoteReporter, self).__init__(stream, tbformat, args)
-
- def setUpReporter(self):
- factory = pb.PBClientFactory()
-
- self.pbcnx = reactor.connectTCP("localhost", self.args, factory)
- assert self.pbcnx is not None
-
- def _cb(root):
- self.pbroot = root
- return root
-
- return factory.getRootObject().addCallback(_cb
- ).addErrback(log.err)
-
- def tearDownReporter(self):
- def _disconnected(passthru):
- log.msg(sekritHQ='_disconnected, passthru: %r' % (passthru,))
- return passthru
-
- d = defer.Deferred().addCallback(_disconnected
- ).addErrback(log.err)
-
- self.pbroot.notifyOnDisconnect(d.callback)
- self.pbcnx.transport.loseConnection()
- return d
-
- def reportImportError(self, name, fail):
- pass
-
- def startTest(self, method):
- return self.pbroot.callRemote('startTest', IJellied(method))
-
- def endTest(self, method):
- return self.pbroot.callRemote('endTest', IJellied(method))
-
- def startSuite(self, arg):
- return self.pbroot.callRemote('startSuite', IJellied(arg))
-
- def endSuite(self, suite):
- return self.pbroot.callRemote('endSuite', IJellied(suite))
-
-
-# -- Adapters --
-
-def jellyList(L):
- return [IJellied(i) for i in L]
-
-def jellyTuple(T):
- return tuple(IJellied(list(T)))
-
-def jellyDict(D):
- def _clean(*a):
- return tuple(map(lambda x: adaptWithDefault(IJellied, x, None), a))
- return dict([_clean(k, v) for k, v in D.iteritems()])
-
-def jellyTimingInfo(d, timed):
- for attr in ('startTime', 'endTime'):
- d[attr] = getattr(timed, attr, 0.0)
- return d
-
-def _logFormatter(eventDict):
- #XXX: this is pretty weak, it's basically the guts of
- # t.p.log.FileLogObserver.emit, but then again, that's been pretty
- # stable over the past few releases....
- edm = eventDict['message']
- if not edm:
- if eventDict['isError'] and eventDict.has_key('failure'):
- text = eventDict['failure'].getTraceback()
- elif eventDict.has_key('format'):
- try:
- text = eventDict['format'] % eventDict
- except:
- try:
- text = ('Invalid format string in log message: %s'
- % eventDict)
- except:
- text = 'UNFORMATTABLE OBJECT WRITTEN TO LOG, MESSAGE LOST'
- else:
- # we don't know how to log this
- return
- else:
- text = ' '.join(map(str, edm))
-
- timeStr = time.strftime("%Y/%m/%d %H:%M %Z", time.localtime(eventDict['time']))
- fmtDict = {'system': eventDict['system'], 'text': text.replace("\n", "\n\t")}
- msgStr = " [%(system)s] %(text)s\n" % fmtDict
- return "%s%s" % (timeStr, msgStr)
-
-def jellyTestMethod(testMethod):
- """@param testMethod: an object that implements L{twisted.trial.interfaces.ITestMethod}"""
- d = {}
- for attr in ('status', 'todo', 'skip', 'stdout', 'stderr',
- 'name', 'fullName', 'runs', 'errors', 'failures', 'module'):
- d[attr] = getattr(testMethod, attr)
-
- q = None
- try:
- q = reflect.qual(testMethod.klass)
- except TypeError:
- # XXX: This may be incorrect somehow
- q = "%s.%s" % (testMethod.module, testMethod.klass.__name__)
- d['klass'] = q
-
- d['logevents'] = [_logFormatter(event) for event in testMethod.logevents]
-
- jellyTimingInfo(d, testMethod)
-
- return d
-
-def jellyTestRunner(testRunner):
- """@param testRunner: an object that implements L{twisted.trial.interfaces.ITestRunner}"""
- d = dict(testMethods=[IJellied(m) for m in testRunner.testMethods])
- jellyTimingInfo(d, testRunner)
- return d
-
-def jellyTestSuite(testSuite):
- d = {}
- for attr in ('tests', 'runners', 'couldNotImport'):
- d[attr] = IJellied(getattr(testSuite, attr))
-
- jellyTimingInfo(d, testSuite)
- return d
-
-
-
-for a, o, i in [(jellyTuple, types.TupleType, IJellied),
- (jellyTestMethod, ITestMethod, IJellied),
- (jellyList, types.ListType, IJellied),
- (jellyTestSuite, ITestSuite, IJellied),
- (jellyTestRunner, ITestRunner, IJellied),
- (jellyDict, types.DictType, IJellied),
- (RemoteTestMethod, types.DictType, ITestMethod),
- (RemoteTestSuite, types.DictType, ITestSuite)]:
- registerAdapter(a, o, i)
-
-for t in [types.StringType, types.IntType, types.FloatType, failure.Failure]:
- zi.classImplements(t, IJellied)
-
diff --git a/buildbot/buildbot-source/build/lib/buildbot/sourcestamp.py b/buildbot/buildbot-source/build/lib/buildbot/sourcestamp.py
deleted file mode 100644
index 2c9e1ab6e..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/sourcestamp.py
+++ /dev/null
@@ -1,85 +0,0 @@
-
-from buildbot import util, interfaces
-from buildbot.twcompat import implements
-
-class SourceStamp(util.ComparableMixin):
- """This is a tuple of (branch, revision, patchspec, changes).
-
- C{branch} is always valid, although it may be None to let the Source
- step use its default branch. There are four possibilities for the
- remaining elements:
- - (revision=REV, patchspec=None, changes=None): build REV
- - (revision=REV, patchspec=(LEVEL, DIFF), changes=None): checkout REV,
- then apply a patch to the source, with C{patch -pPATCHLEVEL <DIFF}.
- - (revision=None, patchspec=None, changes=[CHANGES]): let the Source
- step check out the latest revision indicated by the given Changes.
- CHANGES is a list of L{buildbot.changes.changes.Change} instances,
- and all must be on the same branch.
- - (revision=None, patchspec=None, changes=None): build the latest code
- from the given branch.
- """
-
- # all four of these are publically visible attributes
- branch = None
- revision = None
- patch = None
- changes = []
-
- compare_attrs = ('branch', 'revision', 'patch', 'changes')
-
- if implements:
- implements(interfaces.ISourceStamp)
- else:
- __implements__ = interfaces.ISourceStamp,
-
- def __init__(self, branch=None, revision=None, patch=None,
- changes=None):
- self.branch = branch
- self.revision = revision
- self.patch = patch
- if changes:
- self.changes = changes
- self.branch = changes[0].branch
-
- def canBeMergedWith(self, other):
- if other.branch != self.branch:
- return False # the builds are completely unrelated
-
- if self.changes and other.changes:
- # TODO: consider not merging these. It's a tradeoff between
- # minimizing the number of builds and obtaining finer-grained
- # results.
- return True
- elif self.changes and not other.changes:
- return False # we're using changes, they aren't
- elif not self.changes and other.changes:
- return False # they're using changes, we aren't
-
- if self.patch or other.patch:
- return False # you can't merge patched builds with anything
- if self.revision == other.revision:
- # both builds are using the same specific revision, so they can
- # be merged. It might be the case that revision==None, so they're
- # both building HEAD.
- return True
-
- return False
-
- def mergeWith(self, others):
- """Generate a SourceStamp for the merger of me and all the other
- BuildRequests. This is called by a Build when it starts, to figure
- out what its sourceStamp should be."""
-
- # either we're all building the same thing (changes==None), or we're
- # all building changes (which can be merged)
- changes = []
- changes.extend(self.changes)
- for req in others:
- assert self.canBeMergedWith(req) # should have been checked already
- changes.extend(req.changes)
- newsource = SourceStamp(branch=self.branch,
- revision=self.revision,
- patch=self.patch,
- changes=changes)
- return newsource
-
diff --git a/buildbot/buildbot-source/build/lib/buildbot/status/__init__.py b/buildbot/buildbot-source/build/lib/buildbot/status/__init__.py
deleted file mode 100644
index e69de29bb..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/status/__init__.py
+++ /dev/null
diff --git a/buildbot/buildbot-source/build/lib/buildbot/status/base.py b/buildbot/buildbot-source/build/lib/buildbot/status/base.py
deleted file mode 100644
index 92bace5f8..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/status/base.py
+++ /dev/null
@@ -1,77 +0,0 @@
-#! /usr/bin/python
-
-from twisted.application import service
-from twisted.python import components
-
-try:
- from zope.interface import implements
-except ImportError:
- implements = None
-if not hasattr(components, "interface"):
- implements = None # nope
-
-from buildbot.interfaces import IStatusReceiver
-from buildbot import util, pbutil
-
-class StatusReceiver:
- if implements:
- implements(IStatusReceiver)
- else:
- __implements__ = IStatusReceiver,
-
- def buildsetSubmitted(self, buildset):
- pass
-
- def builderAdded(self, builderName, builder):
- pass
-
- def builderChangedState(self, builderName, state):
- pass
-
- def buildStarted(self, builderName, build):
- pass
-
- def buildETAUpdate(self, build, ETA):
- pass
-
- def stepStarted(self, build, step):
- pass
-
- def stepETAUpdate(self, build, step, ETA, expectations):
- pass
-
- def logStarted(self, build, step, log):
- pass
-
- def logChunk(self, build, step, log, channel, text):
- pass
-
- def logFinished(self, build, step, log):
- pass
-
- def stepFinished(self, build, step, results):
- pass
-
- def buildFinished(self, builderName, build, results):
- pass
-
- def builderRemoved(self, builderName):
- pass
-
-class StatusReceiverMultiService(StatusReceiver, service.MultiService,
- util.ComparableMixin):
- if implements:
- implements(IStatusReceiver)
- else:
- __implements__ = IStatusReceiver, service.MultiService.__implements__
-
- def __init__(self):
- service.MultiService.__init__(self)
-
-
-class StatusReceiverPerspective(StatusReceiver, pbutil.NewCredPerspective):
- if implements:
- implements(IStatusReceiver)
- else:
- __implements__ = (IStatusReceiver,
- pbutil.NewCredPerspective.__implements__)
diff --git a/buildbot/buildbot-source/build/lib/buildbot/status/builder.py b/buildbot/buildbot-source/build/lib/buildbot/status/builder.py
deleted file mode 100644
index 900287a7c..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/status/builder.py
+++ /dev/null
@@ -1,1927 +0,0 @@
-# -*- test-case-name: buildbot.test.test_status -*-
-
-from __future__ import generators
-
-from twisted.python import log
-from twisted.persisted import styles
-from twisted.internet import reactor, defer
-from twisted.protocols import basic
-
-import time, os, os.path, shutil, sys, re, urllib
-try:
- import cPickle as pickle
-except ImportError:
- import pickle
-
-# sibling imports
-from buildbot import interfaces, util, sourcestamp
-from buildbot.twcompat import implements, providedBy
-
-SUCCESS, WARNINGS, FAILURE, SKIPPED, EXCEPTION = range(5)
-Results = ["success", "warnings", "failure", "skipped", "exception"]
-
-
-# build processes call the following methods:
-#
-# setDefaults
-#
-# currentlyBuilding
-# currentlyIdle
-# currentlyInterlocked
-# currentlyOffline
-# currentlyWaiting
-#
-# setCurrentActivity
-# updateCurrentActivity
-# addFileToCurrentActivity
-# finishCurrentActivity
-#
-# startBuild
-# finishBuild
-
-STDOUT = 0
-STDERR = 1
-HEADER = 2
-ChunkTypes = ["stdout", "stderr", "header"]
-
-class LogFileScanner(basic.NetstringReceiver):
- def __init__(self, chunk_cb, channels=[]):
- self.chunk_cb = chunk_cb
- self.channels = channels
-
- def stringReceived(self, line):
- channel = int(line[0])
- if not self.channels or (channel in self.channels):
- self.chunk_cb((channel, line[1:]))
-
-class LogFileProducer:
- """What's the plan?
-
- the LogFile has just one FD, used for both reading and writing.
- Each time you add an entry, fd.seek to the end and then write.
-
- Each reader (i.e. Producer) keeps track of their own offset. The reader
- starts by seeking to the start of the logfile, and reading forwards.
- Between each hunk of file they yield chunks, so they must remember their
- offset before yielding and re-seek back to that offset before reading
- more data. When their read() returns EOF, they're finished with the first
- phase of the reading (everything that's already been written to disk).
-
- After EOF, the remaining data is entirely in the current entries list.
- These entries are all of the same channel, so we can do one "".join and
- obtain a single chunk to be sent to the listener. But since that involves
- a yield, and more data might arrive after we give up control, we have to
- subscribe them before yielding. We can't subscribe them any earlier,
- otherwise they'd get data out of order.
-
- We're using a generator in the first place so that the listener can
- throttle us, which means they're pulling. But the subscription means
- we're pushing. Really we're a Producer. In the first phase we can be
- either a PullProducer or a PushProducer. In the second phase we're only a
- PushProducer.
-
- So the client gives a LogFileConsumer to File.subscribeConsumer . This
- Consumer must have registerProducer(), unregisterProducer(), and
- writeChunk(), and is just like a regular twisted.interfaces.IConsumer,
- except that writeChunk() takes chunks (tuples of (channel,text)) instead
- of the normal write() which takes just text. The LogFileConsumer is
- allowed to call stopProducing, pauseProducing, and resumeProducing on the
- producer instance it is given. """
-
- paused = False
- subscribed = False
- BUFFERSIZE = 2048
-
- def __init__(self, logfile, consumer):
- self.logfile = logfile
- self.consumer = consumer
- self.chunkGenerator = self.getChunks()
- consumer.registerProducer(self, True)
-
- def getChunks(self):
- f = self.logfile.getFile()
- offset = 0
- chunks = []
- p = LogFileScanner(chunks.append)
- f.seek(offset)
- data = f.read(self.BUFFERSIZE)
- offset = f.tell()
- while data:
- p.dataReceived(data)
- while chunks:
- c = chunks.pop(0)
- yield c
- f.seek(offset)
- data = f.read(self.BUFFERSIZE)
- offset = f.tell()
- del f
-
- # now subscribe them to receive new entries
- self.subscribed = True
- self.logfile.watchers.append(self)
- d = self.logfile.waitUntilFinished()
-
- # then give them the not-yet-merged data
- if self.logfile.runEntries:
- channel = self.logfile.runEntries[0][0]
- text = "".join([c[1] for c in self.logfile.runEntries])
- yield (channel, text)
-
- # now we've caught up to the present. Anything further will come from
- # the logfile subscription. We add the callback *after* yielding the
- # data from runEntries, because the logfile might have finished
- # during the yield.
- d.addCallback(self.logfileFinished)
-
- def stopProducing(self):
- # TODO: should we still call consumer.finish? probably not.
- self.paused = True
- self.consumer = None
- self.done()
-
- def done(self):
- if self.chunkGenerator:
- self.chunkGenerator = None # stop making chunks
- if self.subscribed:
- self.logfile.watchers.remove(self)
- self.subscribed = False
-
- def pauseProducing(self):
- self.paused = True
-
- def resumeProducing(self):
- # Twisted-1.3.0 has a bug which causes hangs when resumeProducing
- # calls transport.write (there is a recursive loop, fixed in 2.0 in
- # t.i.abstract.FileDescriptor.doWrite by setting the producerPaused
- # flag *before* calling resumeProducing). To work around this, we
- # just put off the real resumeProducing for a moment. This probably
- # has a performance hit, but I'm going to assume that the log files
- # are not retrieved frequently enough for it to be an issue.
-
- reactor.callLater(0, self._resumeProducing)
-
- def _resumeProducing(self):
- self.paused = False
- if not self.chunkGenerator:
- return
- try:
- while not self.paused:
- chunk = self.chunkGenerator.next()
- self.consumer.writeChunk(chunk)
- # we exit this when the consumer says to stop, or we run out
- # of chunks
- except StopIteration:
- # if the generator finished, it will have done releaseFile
- self.chunkGenerator = None
- # now everything goes through the subscription, and they don't get to
- # pause anymore
-
- def logChunk(self, build, step, logfile, channel, chunk):
- if self.consumer:
- self.consumer.writeChunk((channel, chunk))
-
- def logfileFinished(self, logfile):
- self.done()
- if self.consumer:
- self.consumer.unregisterProducer()
- self.consumer.finish()
- self.consumer = None
-
-class LogFile:
- """A LogFile keeps all of its contents on disk, in a non-pickle format to
- which new entries can easily be appended. The file on disk has a name
- like 12-log-compile-output, under the Builder's directory. The actual
- filename is generated (before the LogFile is created) by
- L{BuildStatus.generateLogfileName}.
-
- Old LogFile pickles (which kept their contents in .entries) must be
- upgraded. The L{BuilderStatus} is responsible for doing this, when it
- loads the L{BuildStatus} into memory. The Build pickle is not modified,
- so users who go from 0.6.5 back to 0.6.4 don't have to lose their
- logs."""
-
- if implements:
- implements(interfaces.IStatusLog)
- else:
- __implements__ = interfaces.IStatusLog,
-
- finished = False
- length = 0
- progress = None
- chunkSize = 10*1000
- runLength = 0
- runEntries = [] # provided so old pickled builds will getChunks() ok
- entries = None
- BUFFERSIZE = 2048
- filename = None # relative to the Builder's basedir
- openfile = None
-
- def __init__(self, parent, name, logfilename):
- """
- @type parent: L{BuildStepStatus}
- @param parent: the Step that this log is a part of
- @type name: string
- @param name: the name of this log, typically 'output'
- @type logfilename: string
- @param logfilename: the Builder-relative pathname for the saved entries
- """
- self.step = parent
- self.name = name
- self.filename = logfilename
- fn = self.getFilename()
- if os.path.exists(fn):
- # the buildmaster was probably stopped abruptly, before the
- # BuilderStatus could be saved, so BuilderStatus.nextBuildNumber
- # is out of date, and we're overlapping with earlier builds now.
- # Warn about it, but then overwrite the old pickle file
- log.msg("Warning: Overwriting old serialized Build at %s" % fn)
- self.openfile = open(fn, "w+")
- self.runEntries = []
- self.watchers = []
- self.finishedWatchers = []
-
- def getFilename(self):
- return os.path.join(self.step.build.builder.basedir, self.filename)
-
- def hasContents(self):
- return os.path.exists(self.getFilename())
-
- def getName(self):
- return self.name
-
- def getStep(self):
- return self.step
-
- def isFinished(self):
- return self.finished
- def waitUntilFinished(self):
- if self.finished:
- d = defer.succeed(self)
- else:
- d = defer.Deferred()
- self.finishedWatchers.append(d)
- return d
-
- def getFile(self):
- if self.openfile:
- # this is the filehandle we're using to write to the log, so
- # don't close it!
- return self.openfile
- # otherwise they get their own read-only handle
- return open(self.getFilename(), "r")
-
- def getText(self):
- # this produces one ginormous string
- return "".join(self.getChunks([STDOUT, STDERR], onlyText=True))
-
- def getTextWithHeaders(self):
- return "".join(self.getChunks(onlyText=True))
-
- def getChunks(self, channels=[], onlyText=False):
- # generate chunks for everything that was logged at the time we were
- # first called, so remember how long the file was when we started.
- # Don't read beyond that point. The current contents of
- # self.runEntries will follow.
-
- # this returns an iterator, which means arbitrary things could happen
- # while we're yielding. This will faithfully deliver the log as it
- # existed when it was started, and not return anything after that
- # point. To use this in subscribe(catchup=True) without missing any
- # data, you must insure that nothing will be added to the log during
- # yield() calls.
-
- f = self.getFile()
- offset = 0
- f.seek(0, 2)
- remaining = f.tell()
-
- leftover = None
- if self.runEntries and (not channels or
- (self.runEntries[0][0] in channels)):
- leftover = (self.runEntries[0][0],
- "".join([c[1] for c in self.runEntries]))
-
- # freeze the state of the LogFile by passing a lot of parameters into
- # a generator
- return self._generateChunks(f, offset, remaining, leftover,
- channels, onlyText)
-
- def _generateChunks(self, f, offset, remaining, leftover,
- channels, onlyText):
- chunks = []
- p = LogFileScanner(chunks.append, channels)
- f.seek(offset)
- data = f.read(min(remaining, self.BUFFERSIZE))
- remaining -= len(data)
- offset = f.tell()
- while data:
- p.dataReceived(data)
- while chunks:
- channel, text = chunks.pop(0)
- if onlyText:
- yield text
- else:
- yield (channel, text)
- f.seek(offset)
- data = f.read(min(remaining, self.BUFFERSIZE))
- remaining -= len(data)
- offset = f.tell()
- del f
-
- if leftover:
- if onlyText:
- yield leftover[1]
- else:
- yield leftover
-
- def subscribe(self, receiver, catchup):
- if self.finished:
- return
- self.watchers.append(receiver)
- if catchup:
- for channel, text in self.getChunks():
- # TODO: add logChunks(), to send over everything at once?
- receiver.logChunk(self.step.build, self.step, self,
- channel, text)
-
- def unsubscribe(self, receiver):
- if receiver in self.watchers:
- self.watchers.remove(receiver)
-
- def subscribeConsumer(self, consumer):
- p = LogFileProducer(self, consumer)
- p.resumeProducing()
-
- # interface used by the build steps to add things to the log
- def logProgressTo(self, progress, name):
- self.progress = progress
- self.progressName = name
-
- def merge(self):
- # merge all .runEntries (which are all of the same type) into a
- # single chunk for .entries
- if not self.runEntries:
- return
- channel = self.runEntries[0][0]
- text = "".join([c[1] for c in self.runEntries])
- assert channel < 10
- f = self.openfile
- f.seek(0, 2)
- offset = 0
- while offset < len(text):
- size = min(len(text)-offset, self.chunkSize)
- f.write("%d:%d" % (1 + size, channel))
- f.write(text[offset:offset+size])
- f.write(",")
- offset += size
- self.runEntries = []
- self.runLength = 0
-
- def addEntry(self, channel, text):
- assert not self.finished
- # we only add to .runEntries here. merge() is responsible for adding
- # merged chunks to .entries
- if self.runEntries and channel != self.runEntries[0][0]:
- self.merge()
- self.runEntries.append((channel, text))
- self.runLength += len(text)
- if self.runLength >= self.chunkSize:
- self.merge()
-
- for w in self.watchers:
- w.logChunk(self.step.build, self.step, self, channel, text)
- self.length += len(text)
- if self.progress:
- self.progress.setProgress(self.progressName, self.length)
-
- def addStdout(self, text):
- self.addEntry(STDOUT, text)
- def addStderr(self, text):
- self.addEntry(STDERR, text)
- def addHeader(self, text):
- self.addEntry(HEADER, text)
-
- def finish(self):
- self.merge()
- if self.openfile:
- # we don't do an explicit close, because there might be readers
- # shareing the filehandle. As soon as they stop reading, the
- # filehandle will be released and automatically closed. We will
- # do a sync, however, to make sure the log gets saved in case of
- # a crash.
- os.fsync(self.openfile.fileno())
- del self.openfile
- self.finished = True
- watchers = self.finishedWatchers
- self.finishedWatchers = []
- for w in watchers:
- w.callback(self)
- if self.progress:
- self.progress.setProgress(self.progressName, self.length)
- del self.progress
- del self.progressName
-
- # persistence stuff
- def __getstate__(self):
- d = self.__dict__.copy()
- del d['step'] # filled in upon unpickling
- del d['watchers']
- del d['finishedWatchers']
- d['entries'] = [] # let 0.6.4 tolerate the saved log. TODO: really?
- if d.has_key('finished'):
- del d['finished']
- if d.has_key('progress'):
- del d['progress']
- del d['progressName']
- if d.has_key('openfile'):
- del d['openfile']
- return d
-
- def __setstate__(self, d):
- self.__dict__ = d
- self.watchers = [] # probably not necessary
- self.finishedWatchers = [] # same
- # self.step must be filled in by our parent
- self.finished = True
-
- def upgrade(self, logfilename):
- """Save our .entries to a new-style offline log file (if necessary),
- and modify our in-memory representation to use it. The original
- pickled LogFile (inside the pickled Build) won't be modified."""
- self.filename = logfilename
- if not os.path.exists(self.getFilename()):
- self.openfile = open(self.getFilename(), "w")
- self.finished = False
- for channel,text in self.entries:
- self.addEntry(channel, text)
- self.finish() # releases self.openfile, which will be closed
- del self.entries
-
-
-class HTMLLogFile:
- if implements:
- implements(interfaces.IStatusLog)
- else:
- __implements__ = interfaces.IStatusLog,
-
- filename = None
-
- def __init__(self, parent, name, logfilename, html):
- self.step = parent
- self.name = name
- self.filename = logfilename
- self.html = html
-
- def getName(self):
- return self.name # set in BuildStepStatus.addLog
- def getStep(self):
- return self.step
-
- def isFinished(self):
- return True
- def waitUntilFinished(self):
- return defer.succeed(self)
-
- def hasContents(self):
- return True
- def getText(self):
- return self.html # looks kinda like text
- def getTextWithHeaders(self):
- return self.html
- def getChunks(self):
- return [(STDERR, self.html)]
-
- def subscribe(self, receiver, catchup):
- pass
- def unsubscribe(self, receiver):
- pass
-
- def finish(self):
- pass
-
- def __getstate__(self):
- d = self.__dict__.copy()
- del d['step']
- return d
-
- def upgrade(self, logfilename):
- pass
-
-
-class Event:
- if implements:
- implements(interfaces.IStatusEvent)
- else:
- __implements__ = interfaces.IStatusEvent,
-
- started = None
- finished = None
- text = []
- color = None
-
- # IStatusEvent methods
- def getTimes(self):
- return (self.started, self.finished)
- def getText(self):
- return self.text
- def getColor(self):
- return self.color
- def getLogs(self):
- return []
-
- def finish(self):
- self.finished = util.now()
-
-class TestResult:
- if implements:
- implements(interfaces.ITestResult)
- else:
- __implements__ = interfaces.ITestResult,
-
- def __init__(self, name, results, text, logs):
- assert isinstance(name, tuple)
- self.name = name
- self.results = results
- self.text = text
- self.logs = logs
-
- def getName(self):
- return self.name
-
- def getResults(self):
- return self.results
-
- def getText(self):
- return self.text
-
- def getLogs(self):
- return self.logs
-
-
-class BuildSetStatus:
- if implements:
- implements(interfaces.IBuildSetStatus)
- else:
- __implements__ = interfaces.IBuildSetStatus,
-
- def __init__(self, source, reason, builderNames, bsid=None):
- self.source = source
- self.reason = reason
- self.builderNames = builderNames
- self.id = bsid
- self.successWatchers = []
- self.finishedWatchers = []
- self.stillHopeful = True
- self.finished = False
-
- def setBuildRequestStatuses(self, buildRequestStatuses):
- self.buildRequests = buildRequestStatuses
- def setResults(self, results):
- # the build set succeeds only if all its component builds succeed
- self.results = results
- def giveUpHope(self):
- self.stillHopeful = False
-
-
- def notifySuccessWatchers(self):
- for d in self.successWatchers:
- d.callback(self)
- self.successWatchers = []
-
- def notifyFinishedWatchers(self):
- self.finished = True
- for d in self.finishedWatchers:
- d.callback(self)
- self.finishedWatchers = []
-
- # methods for our clients
-
- def getSourceStamp(self):
- return self.source
- def getReason(self):
- return self.reason
- def getResults(self):
- return self.results
- def getID(self):
- return self.id
-
- def getBuilderNames(self):
- return self.builderNames
- def getBuildRequests(self):
- return self.buildRequests
- def isFinished(self):
- return self.finished
-
- def waitUntilSuccess(self):
- if self.finished or not self.stillHopeful:
- # the deferreds have already fired
- return defer.succeed(self)
- d = defer.Deferred()
- self.successWatchers.append(d)
- return d
-
- def waitUntilFinished(self):
- if self.finished:
- return defer.succeed(self)
- d = defer.Deferred()
- self.finishedWatchers.append(d)
- return d
-
-class BuildRequestStatus:
- if implements:
- implements(interfaces.IBuildRequestStatus)
- else:
- __implements__ = interfaces.IBuildRequestStatus,
-
- def __init__(self, source, builderName):
- self.source = source
- self.builderName = builderName
- self.builds = [] # list of BuildStatus objects
- self.observers = []
-
- def buildStarted(self, build):
- self.builds.append(build)
- for o in self.observers[:]:
- o(build)
-
- # methods called by our clients
- def getSourceStamp(self):
- return self.source
- def getBuilderName(self):
- return self.builderName
- def getBuilds(self):
- return self.builds
-
- def subscribe(self, observer):
- self.observers.append(observer)
- for b in self.builds:
- observer(b)
- def unsubscribe(self, observer):
- self.observers.remove(observer)
-
-
-class BuildStepStatus:
- """
- I represent a collection of output status for a
- L{buildbot.process.step.BuildStep}.
-
- @type color: string
- @cvar color: color that this step feels best represents its
- current mood. yellow,green,red,orange are the
- most likely choices, although purple indicates
- an exception
- @type progress: L{buildbot.status.progress.StepProgress}
- @cvar progress: tracks ETA for the step
- @type text: list of strings
- @cvar text: list of short texts that describe the command and its status
- @type text2: list of strings
- @cvar text2: list of short texts added to the overall build description
- @type logs: dict of string -> L{buildbot.status.builder.LogFile}
- @ivar logs: logs of steps
- """
- # note that these are created when the Build is set up, before each
- # corresponding BuildStep has started.
- if implements:
- implements(interfaces.IBuildStepStatus, interfaces.IStatusEvent)
- else:
- __implements__ = interfaces.IBuildStepStatus, interfaces.IStatusEvent
-
- started = None
- finished = None
- progress = None
- text = []
- color = None
- results = (None, [])
- text2 = []
- watchers = []
- updates = {}
- finishedWatchers = []
-
- def __init__(self, parent):
- assert interfaces.IBuildStatus(parent)
- self.build = parent
- self.logs = []
- self.watchers = []
- self.updates = {}
- self.finishedWatchers = []
-
- def getName(self):
- """Returns a short string with the name of this step. This string
- may have spaces in it."""
- return self.name
-
- def getBuild(self):
- return self.build
-
- def getTimes(self):
- return (self.started, self.finished)
-
- def getExpectations(self):
- """Returns a list of tuples (name, current, target)."""
- if not self.progress:
- return []
- ret = []
- metrics = self.progress.progress.keys()
- metrics.sort()
- for m in metrics:
- t = (m, self.progress.progress[m], self.progress.expectations[m])
- ret.append(t)
- return ret
-
- def getLogs(self):
- return self.logs
-
-
- def isFinished(self):
- return (self.finished is not None)
-
- def waitUntilFinished(self):
- if self.finished:
- d = defer.succeed(self)
- else:
- d = defer.Deferred()
- self.finishedWatchers.append(d)
- return d
-
- # while the step is running, the following methods make sense.
- # Afterwards they return None
-
- def getETA(self):
- if self.started is None:
- return None # not started yet
- if self.finished is not None:
- return None # already finished
- if not self.progress:
- return None # no way to predict
- return self.progress.remaining()
-
- # Once you know the step has finished, the following methods are legal.
- # Before this step has finished, they all return None.
-
- def getText(self):
- """Returns a list of strings which describe the step. These are
- intended to be displayed in a narrow column. If more space is
- available, the caller should join them together with spaces before
- presenting them to the user."""
- return self.text
-
- def getColor(self):
- """Returns a single string with the color that should be used to
- display this step. 'green', 'orange', 'red', 'yellow' and 'purple'
- are the most likely ones."""
- return self.color
-
- def getResults(self):
- """Return a tuple describing the results of the step.
- 'result' is one of the constants in L{buildbot.status.builder}:
- SUCCESS, WARNINGS, FAILURE, or SKIPPED.
- 'strings' is an optional list of strings that the step wants to
- append to the overall build's results. These strings are usually
- more terse than the ones returned by getText(): in particular,
- successful Steps do not usually contribute any text to the
- overall build.
-
- @rtype: tuple of int, list of strings
- @returns: (result, strings)
- """
- return (self.results, self.text2)
-
- # subscription interface
-
- def subscribe(self, receiver, updateInterval=10):
- # will get logStarted, logFinished, stepETAUpdate
- assert receiver not in self.watchers
- self.watchers.append(receiver)
- self.sendETAUpdate(receiver, updateInterval)
-
- def sendETAUpdate(self, receiver, updateInterval):
- self.updates[receiver] = None
- # they might unsubscribe during stepETAUpdate
- receiver.stepETAUpdate(self.build, self,
- self.getETA(), self.getExpectations())
- if receiver in self.watchers:
- self.updates[receiver] = reactor.callLater(updateInterval,
- self.sendETAUpdate,
- receiver,
- updateInterval)
-
- def unsubscribe(self, receiver):
- if receiver in self.watchers:
- self.watchers.remove(receiver)
- if receiver in self.updates:
- if self.updates[receiver] is not None:
- self.updates[receiver].cancel()
- del self.updates[receiver]
-
-
- # methods to be invoked by the BuildStep
-
- def setName(self, stepname):
- self.name = stepname
-
- def setProgress(self, stepprogress):
- self.progress = stepprogress
-
- def stepStarted(self):
- self.started = util.now()
- if self.build:
- self.build.stepStarted(self)
-
- def addLog(self, name):
- assert self.started # addLog before stepStarted won't notify watchers
- logfilename = self.build.generateLogfileName(self.name, name)
- log = LogFile(self, name, logfilename)
- self.logs.append(log)
- for w in self.watchers:
- receiver = w.logStarted(self.build, self, log)
- if receiver:
- log.subscribe(receiver, True)
- d = log.waitUntilFinished()
- d.addCallback(lambda log: log.unsubscribe(receiver))
- d = log.waitUntilFinished()
- d.addCallback(self.logFinished)
- return log
-
- def addHTMLLog(self, name, html):
- assert self.started # addLog before stepStarted won't notify watchers
- logfilename = self.build.generateLogfileName(self.name, name)
- log = HTMLLogFile(self, name, logfilename, html)
- self.logs.append(log)
- for w in self.watchers:
- receiver = w.logStarted(self.build, self, log)
- # TODO: think about this: there isn't much point in letting
- # them subscribe
- #if receiver:
- # log.subscribe(receiver, True)
- w.logFinished(self.build, self, log)
-
- def logFinished(self, log):
- for w in self.watchers:
- w.logFinished(self.build, self, log)
-
- def setColor(self, color):
- self.color = color
- def setText(self, text):
- self.text = text
- def setText2(self, text):
- self.text2 = text
-
- def stepFinished(self, results):
- self.finished = util.now()
- self.results = results
- for loog in self.logs:
- if not loog.isFinished():
- loog.finish()
-
- for r in self.updates.keys():
- if self.updates[r] is not None:
- self.updates[r].cancel()
- del self.updates[r]
-
- watchers = self.finishedWatchers
- self.finishedWatchers = []
- for w in watchers:
- w.callback(self)
-
- # persistence
-
- def __getstate__(self):
- d = self.__dict__.copy()
- del d['build'] # filled in when loading
- if d.has_key('progress'):
- del d['progress']
- del d['watchers']
- del d['finishedWatchers']
- del d['updates']
- return d
-
- def __setstate__(self, d):
- self.__dict__ = d
- # self.build must be filled in by our parent
- for loog in self.logs:
- loog.step = self
-
-
-class BuildStatus(styles.Versioned):
- if implements:
- implements(interfaces.IBuildStatus, interfaces.IStatusEvent)
- else:
- __implements__ = interfaces.IBuildStatus, interfaces.IStatusEvent
- persistenceVersion = 2
-
- source = None
- username = None
- reason = None
- changes = []
- blamelist = []
- progress = None
- started = None
- finished = None
- currentStep = None
- text = []
- color = None
- results = None
- slavename = "???"
-
- # these lists/dicts are defined here so that unserialized instances have
- # (empty) values. They are set in __init__ to new objects to make sure
- # each instance gets its own copy.
- watchers = []
- updates = {}
- finishedWatchers = []
- testResults = {}
-
- def __init__(self, parent, number):
- """
- @type parent: L{BuilderStatus}
- @type number: int
- """
- assert interfaces.IBuilderStatus(parent)
- self.builder = parent
- self.number = number
- self.watchers = []
- self.updates = {}
- self.finishedWatchers = []
- self.steps = []
- self.testResults = {}
- self.properties = {}
-
- # IBuildStatus
-
- def getBuilder(self):
- """
- @rtype: L{BuilderStatus}
- """
- return self.builder
-
- def getProperty(self, propname):
- return self.properties[propname]
-
- def getNumber(self):
- return self.number
-
- def getPreviousBuild(self):
- if self.number == 0:
- return None
- return self.builder.getBuild(self.number-1)
-
- def getSourceStamp(self):
- return (self.source.branch, self.source.revision, self.source.patch)
-
- def getUsername(self):
- return self.username
-
- def getReason(self):
- return self.reason
-
- def getChanges(self):
- return self.changes
-
- def getResponsibleUsers(self):
- return self.blamelist
-
- def getInterestedUsers(self):
- # TODO: the Builder should add others: sheriffs, domain-owners
- return self.blamelist
-
- def getSteps(self):
- """Return a list of IBuildStepStatus objects. For invariant builds
- (those which always use the same set of Steps), this should be the
- complete list, however some of the steps may not have started yet
- (step.getTimes()[0] will be None). For variant builds, this may not
- be complete (asking again later may give you more of them)."""
- return self.steps
-
- def getTimes(self):
- return (self.started, self.finished)
-
- def isFinished(self):
- return (self.finished is not None)
-
- def waitUntilFinished(self):
- if self.finished:
- d = defer.succeed(self)
- else:
- d = defer.Deferred()
- self.finishedWatchers.append(d)
- return d
-
- # while the build is running, the following methods make sense.
- # Afterwards they return None
-
- def getETA(self):
- if self.finished is not None:
- return None
- if not self.progress:
- return None
- eta = self.progress.eta()
- if eta is None:
- return None
- return eta - util.now()
-
- def getCurrentStep(self):
- return self.currentStep
-
- # Once you know the build has finished, the following methods are legal.
- # Before ths build has finished, they all return None.
-
- def getText(self):
- text = []
- text.extend(self.text)
- for s in self.steps:
- text.extend(s.text2)
- return text
-
- def getColor(self):
- return self.color
-
- def getResults(self):
- return self.results
-
- def getSlavename(self):
- return self.slavename
-
- def getTestResults(self):
- return self.testResults
-
- def getLogs(self):
- # TODO: steps should contribute significant logs instead of this
- # hack, which returns every log from every step. The logs should get
- # names like "compile" and "test" instead of "compile.output"
- logs = []
- for s in self.steps:
- for log in s.getLogs():
- logs.append(log)
- return logs
-
- # subscription interface
-
- def subscribe(self, receiver, updateInterval=None):
- # will receive stepStarted and stepFinished messages
- # and maybe buildETAUpdate
- self.watchers.append(receiver)
- if updateInterval is not None:
- self.sendETAUpdate(receiver, updateInterval)
-
- def sendETAUpdate(self, receiver, updateInterval):
- self.updates[receiver] = None
- ETA = self.getETA()
- if ETA is not None:
- receiver.buildETAUpdate(self, self.getETA())
- # they might have unsubscribed during buildETAUpdate
- if receiver in self.watchers:
- self.updates[receiver] = reactor.callLater(updateInterval,
- self.sendETAUpdate,
- receiver,
- updateInterval)
-
- def unsubscribe(self, receiver):
- if receiver in self.watchers:
- self.watchers.remove(receiver)
- if receiver in self.updates:
- if self.updates[receiver] is not None:
- self.updates[receiver].cancel()
- del self.updates[receiver]
-
- # methods for the base.Build to invoke
-
- def addStep(self, step):
- """The Build is setting up, and has added a new BuildStep to its
- list. The BuildStep object is ready for static queries (everything
- except ETA). Give it a BuildStepStatus object to which it can send
- status updates."""
-
- s = BuildStepStatus(self)
- s.setName(step.name)
- step.step_status = s
- self.steps.append(s)
-
- def setProperty(self, propname, value):
- self.properties[propname] = value
-
- def addTestResult(self, result):
- self.testResults[result.getName()] = result
-
- def setSourceStamp(self, sourceStamp):
- self.source = sourceStamp
- self.changes = self.source.changes
-
- def setUsername(self, username):
- self.username = username
- def setReason(self, reason):
- self.reason = reason
- def setBlamelist(self, blamelist):
- self.blamelist = blamelist
- def setProgress(self, progress):
- self.progress = progress
-
- def buildStarted(self, build):
- """The Build has been set up and is about to be started. It can now
- be safely queried, so it is time to announce the new build."""
-
- self.started = util.now()
- # now that we're ready to report status, let the BuilderStatus tell
- # the world about us
- self.builder.buildStarted(self)
-
- def setSlavename(self, slavename):
- self.slavename = slavename
-
- def setText(self, text):
- assert isinstance(text, (list, tuple))
- self.text = text
- def setColor(self, color):
- self.color = color
- def setResults(self, results):
- self.results = results
-
- def buildFinished(self):
- self.currentStep = None
- self.finished = util.now()
-
- for r in self.updates.keys():
- if self.updates[r] is not None:
- self.updates[r].cancel()
- del self.updates[r]
-
- watchers = self.finishedWatchers
- self.finishedWatchers = []
- for w in watchers:
- w.callback(self)
-
- # methods called by our BuildStepStatus children
-
- def stepStarted(self, step):
- self.currentStep = step
- name = self.getBuilder().getName()
- for w in self.watchers:
- receiver = w.stepStarted(self, step)
- if receiver:
- if type(receiver) == type(()):
- step.subscribe(receiver[0], receiver[1])
- else:
- step.subscribe(receiver)
- d = step.waitUntilFinished()
- d.addCallback(lambda step: step.unsubscribe(receiver))
-
- step.waitUntilFinished().addCallback(self._stepFinished)
-
- def _stepFinished(self, step):
- results = step.getResults()
- for w in self.watchers:
- w.stepFinished(self, step, results)
-
- # methods called by our BuilderStatus parent
-
- def pruneLogs(self):
- # this build is somewhat old: remove the build logs to save space
- # TODO: delete logs visible through IBuildStatus.getLogs
- for s in self.steps:
- s.pruneLogs()
-
- def pruneSteps(self):
- # this build is very old: remove the build steps too
- self.steps = []
-
- # persistence stuff
-
- def generateLogfileName(self, stepname, logname):
- """Return a filename (relative to the Builder's base directory) where
- the logfile's contents can be stored uniquely.
-
- The base filename is made by combining our build number, the Step's
- name, and the log's name, then removing unsuitable characters. The
- filename is then made unique by appending _0, _1, etc, until it does
- not collide with any other logfile.
-
- These files are kept in the Builder's basedir (rather than a
- per-Build subdirectory) because that makes cleanup easier: cron and
- find will help get rid of the old logs, but the empty directories are
- more of a hassle to remove."""
-
- starting_filename = "%d-log-%s-%s" % (self.number, stepname, logname)
- starting_filename = re.sub(r'[^\w\.\-]', '_', starting_filename)
- # now make it unique
- unique_counter = 0
- filename = starting_filename
- while filename in [l.filename
- for step in self.steps
- for l in step.getLogs()
- if l.filename]:
- filename = "%s_%d" % (starting_filename, unique_counter)
- unique_counter += 1
- return filename
-
- def __getstate__(self):
- d = styles.Versioned.__getstate__(self)
- # for now, a serialized Build is always "finished". We will never
- # save unfinished builds.
- if not self.finished:
- d['finished'] = True
- # TODO: push an "interrupted" step so it is clear that the build
- # was interrupted. The builder will have a 'shutdown' event, but
- # someone looking at just this build will be confused as to why
- # the last log is truncated.
- del d['builder'] # filled in by our parent when loading
- del d['watchers']
- del d['updates']
- del d['finishedWatchers']
- return d
-
- def __setstate__(self, d):
- styles.Versioned.__setstate__(self, d)
- # self.builder must be filled in by our parent when loading
- for step in self.steps:
- step.build = self
- self.watchers = []
- self.updates = {}
- self.finishedWatchers = []
-
- def upgradeToVersion1(self):
- if hasattr(self, "sourceStamp"):
- # the old .sourceStamp attribute wasn't actually very useful
- maxChangeNumber, patch = self.sourceStamp
- changes = getattr(self, 'changes', [])
- source = sourcestamp.SourceStamp(branch=None,
- revision=None,
- patch=patch,
- changes=changes)
- self.source = source
- self.changes = source.changes
- del self.sourceStamp
-
- def upgradeToVersion2(self):
- self.properties = {}
-
- def upgradeLogfiles(self):
- # upgrade any LogFiles that need it. This must occur after we've been
- # attached to our Builder, and after we know about all LogFiles of
- # all Steps (to get the filenames right).
- assert self.builder
- for s in self.steps:
- for l in s.getLogs():
- if l.filename:
- pass # new-style, log contents are on disk
- else:
- logfilename = self.generateLogfileName(s.name, l.name)
- # let the logfile update its .filename pointer,
- # transferring its contents onto disk if necessary
- l.upgrade(logfilename)
-
- def saveYourself(self):
- filename = os.path.join(self.builder.basedir, "%d" % self.number)
- if os.path.isdir(filename):
- # leftover from 0.5.0, which stored builds in directories
- shutil.rmtree(filename, ignore_errors=True)
- tmpfilename = filename + ".tmp"
- try:
- pickle.dump(self, open(tmpfilename, "wb"), -1)
- if sys.platform == 'win32':
- # windows cannot rename a file on top of an existing one, so
- # fall back to delete-first. There are ways this can fail and
- # lose the builder's history, so we avoid using it in the
- # general (non-windows) case
- if os.path.exists(filename):
- os.unlink(filename)
- os.rename(tmpfilename, filename)
- except:
- log.msg("unable to save build %s-#%d" % (self.builder.name,
- self.number))
- log.err()
-
-
-
-class BuilderStatus(styles.Versioned):
- """I handle status information for a single process.base.Builder object.
- That object sends status changes to me (frequently as Events), and I
- provide them on demand to the various status recipients, like the HTML
- waterfall display and the live status clients. It also sends build
- summaries to me, which I log and provide to status clients who aren't
- interested in seeing details of the individual build steps.
-
- I am responsible for maintaining the list of historic Events and Builds,
- pruning old ones, and loading them from / saving them to disk.
-
- I live in the buildbot.process.base.Builder object, in the .statusbag
- attribute.
-
- @type category: string
- @ivar category: user-defined category this builder belongs to; can be
- used to filter on in status clients
- """
-
- if implements:
- implements(interfaces.IBuilderStatus)
- else:
- __implements__ = interfaces.IBuilderStatus,
- persistenceVersion = 1
-
- # these limit the amount of memory we consume, as well as the size of the
- # main Builder pickle. The Build and LogFile pickles on disk must be
- # handled separately.
- buildCacheSize = 30
- buildHorizon = 100 # forget builds beyond this
- stepHorizon = 50 # forget steps in builds beyond this
-
- category = None
- currentBigState = "offline" # or idle/waiting/interlocked/building
- basedir = None # filled in by our parent
-
- def __init__(self, buildername, category=None):
- self.name = buildername
- self.category = category
-
- self.slavenames = []
- self.events = []
- # these three hold Events, and are used to retrieve the current
- # state of the boxes.
- self.lastBuildStatus = None
- #self.currentBig = None
- #self.currentSmall = None
- self.currentBuilds = []
- self.pendingBuilds = []
- self.nextBuild = None
- self.watchers = []
- self.buildCache = [] # TODO: age builds out of the cache
-
- # persistence
-
- def __getstate__(self):
- # when saving, don't record transient stuff like what builds are
- # currently running, because they won't be there when we start back
- # up. Nor do we save self.watchers, nor anything that gets set by our
- # parent like .basedir and .status
- d = styles.Versioned.__getstate__(self)
- d['watchers'] = []
- del d['buildCache']
- for b in self.currentBuilds:
- b.saveYourself()
- # TODO: push a 'hey, build was interrupted' event
- del d['currentBuilds']
- del d['pendingBuilds']
- del d['currentBigState']
- del d['basedir']
- del d['status']
- del d['nextBuildNumber']
- return d
-
- def __setstate__(self, d):
- # when loading, re-initialize the transient stuff. Remember that
- # upgradeToVersion1 and such will be called after this finishes.
- styles.Versioned.__setstate__(self, d)
- self.buildCache = []
- self.currentBuilds = []
- self.pendingBuilds = []
- self.watchers = []
- self.slavenames = []
- # self.basedir must be filled in by our parent
- # self.status must be filled in by our parent
-
- def upgradeToVersion1(self):
- if hasattr(self, 'slavename'):
- self.slavenames = [self.slavename]
- del self.slavename
- if hasattr(self, 'nextBuildNumber'):
- del self.nextBuildNumber # determineNextBuildNumber chooses this
-
- def determineNextBuildNumber(self):
- """Scan our directory of saved BuildStatus instances to determine
- what our self.nextBuildNumber should be. Set it one larger than the
- highest-numbered build we discover. This is called by the top-level
- Status object shortly after we are created or loaded from disk.
- """
- existing_builds = [int(f)
- for f in os.listdir(self.basedir)
- if re.match("^\d+$", f)]
- if existing_builds:
- self.nextBuildNumber = max(existing_builds) + 1
- else:
- self.nextBuildNumber = 0
-
- def saveYourself(self):
- for b in self.buildCache:
- if not b.isFinished:
- # interrupted build, need to save it anyway.
- # BuildStatus.saveYourself will mark it as interrupted.
- b.saveYourself()
- filename = os.path.join(self.basedir, "builder")
- tmpfilename = filename + ".tmp"
- try:
- pickle.dump(self, open(tmpfilename, "wb"), -1)
- if sys.platform == 'win32':
- # windows cannot rename a file on top of an existing one
- if os.path.exists(filename):
- os.unlink(filename)
- os.rename(tmpfilename, filename)
- except:
- log.msg("unable to save builder %s" % self.name)
- log.err()
-
-
- # build cache management
-
- def addBuildToCache(self, build):
- if build in self.buildCache:
- return
- self.buildCache.append(build)
- while len(self.buildCache) > self.buildCacheSize:
- self.buildCache.pop(0)
-
- def getBuildByNumber(self, number):
- for b in self.currentBuilds:
- if b.number == number:
- return b
- for build in self.buildCache:
- if build.number == number:
- return build
- filename = os.path.join(self.basedir, "%d" % number)
- try:
- build = pickle.load(open(filename, "rb"))
- styles.doUpgrade()
- build.builder = self
- # handle LogFiles from after 0.5.0 and before 0.6.5
- build.upgradeLogfiles()
- self.addBuildToCache(build)
- return build
- except IOError:
- raise IndexError("no such build %d" % number)
- except EOFError:
- raise IndexError("corrupted build pickle %d" % number)
-
- def prune(self):
- return # TODO: change this to walk through the filesystem
- # first, blow away all builds beyond our build horizon
- self.builds = self.builds[-self.buildHorizon:]
- # then prune steps in builds past the step horizon
- for b in self.builds[0:-self.stepHorizon]:
- b.pruneSteps()
-
- # IBuilderStatus methods
- def getName(self):
- return self.name
-
- def getState(self):
- return (self.currentBigState, self.currentBuilds)
-
- def getSlaves(self):
- return [self.status.getSlave(name) for name in self.slavenames]
-
- def getPendingBuilds(self):
- return self.pendingBuilds
-
- def getCurrentBuilds(self):
- return self.currentBuilds
-
- def getLastFinishedBuild(self):
- b = self.getBuild(-1)
- if not (b and b.isFinished()):
- b = self.getBuild(-2)
- return b
-
- def getBuild(self, number):
- if number < 0:
- number = self.nextBuildNumber + number
- if number < 0 or number >= self.nextBuildNumber:
- return None
-
- try:
- return self.getBuildByNumber(number)
- except IndexError:
- return None
-
- def getEvent(self, number):
- try:
- return self.events[number]
- except IndexError:
- return None
-
- def eventGenerator(self):
- """This function creates a generator which will provide all of this
- Builder's status events, starting with the most recent and
- progressing backwards in time. """
-
- # remember the oldest-to-earliest flow here. "next" means earlier.
-
- # TODO: interleave build steps and self.events by timestamp
-
- eventIndex = -1
- e = self.getEvent(eventIndex)
- for Nb in range(1, self.nextBuildNumber+1):
- b = self.getBuild(-Nb)
- if not b:
- break
- steps = b.getSteps()
- for Ns in range(1, len(steps)+1):
- if steps[-Ns].started:
- step_start = steps[-Ns].getTimes()[0]
- while e is not None and e.getTimes()[0] > step_start:
- yield e
- eventIndex -= 1
- e = self.getEvent(eventIndex)
- yield steps[-Ns]
- yield b
- while e is not None:
- yield e
- eventIndex -= 1
- e = self.getEvent(eventIndex)
-
- def subscribe(self, receiver):
- # will get builderChangedState, buildStarted, and buildFinished
- self.watchers.append(receiver)
- self.publishState(receiver)
-
- def unsubscribe(self, receiver):
- self.watchers.remove(receiver)
-
- ## Builder interface (methods called by the Builder which feeds us)
-
- def setSlavenames(self, names):
- self.slavenames = names
-
- def addEvent(self, text=[], color=None):
- # this adds a duration event. When it is done, the user should call
- # e.finish(). They can also mangle it by modifying .text and .color
- e = Event()
- e.started = util.now()
- e.text = text
- e.color = color
- self.events.append(e)
- return e # they are free to mangle it further
-
- def addPointEvent(self, text=[], color=None):
- # this adds a point event, one which occurs as a single atomic
- # instant of time.
- e = Event()
- e.started = util.now()
- e.finished = 0
- e.text = text
- e.color = color
- self.events.append(e)
- return e # for consistency, but they really shouldn't touch it
-
- def setBigState(self, state):
- needToUpdate = state != self.currentBigState
- self.currentBigState = state
- if needToUpdate:
- self.publishState()
-
- def publishState(self, target=None):
- state = self.currentBigState
-
- if target is not None:
- # unicast
- target.builderChangedState(self.name, state)
- return
- for w in self.watchers:
- w.builderChangedState(self.name, state)
-
- def newBuild(self):
- """The Builder has decided to start a build, but the Build object is
- not yet ready to report status (it has not finished creating the
- Steps). Create a BuildStatus object that it can use."""
- number = self.nextBuildNumber
- self.nextBuildNumber += 1
- # TODO: self.saveYourself(), to make sure we don't forget about the
- # build number we've just allocated. This is not quite as important
- # as it was before we switch to determineNextBuildNumber, but I think
- # it may still be useful to have the new build save itself.
- s = BuildStatus(self, number)
- s.waitUntilFinished().addCallback(self._buildFinished)
- return s
-
- def addBuildRequest(self, brstatus):
- self.pendingBuilds.append(brstatus)
- def removeBuildRequest(self, brstatus):
- self.pendingBuilds.remove(brstatus)
-
- # buildStarted is called by our child BuildStatus instances
- def buildStarted(self, s):
- """Now the BuildStatus object is ready to go (it knows all of its
- Steps, its ETA, etc), so it is safe to notify our watchers."""
-
- assert s.builder is self # paranoia
- assert s.number == self.nextBuildNumber - 1
- assert s not in self.currentBuilds
- self.currentBuilds.append(s)
- self.addBuildToCache(s)
-
- # now that the BuildStatus is prepared to answer queries, we can
- # announce the new build to all our watchers
-
- for w in self.watchers: # TODO: maybe do this later? callLater(0)?
- receiver = w.buildStarted(self.getName(), s)
- if receiver:
- if type(receiver) == type(()):
- s.subscribe(receiver[0], receiver[1])
- else:
- s.subscribe(receiver)
- d = s.waitUntilFinished()
- d.addCallback(lambda s: s.unsubscribe(receiver))
-
-
- def _buildFinished(self, s):
- assert s in self.currentBuilds
- s.saveYourself()
- self.currentBuilds.remove(s)
-
- name = self.getName()
- results = s.getResults()
- for w in self.watchers:
- w.buildFinished(name, s, results)
-
- self.prune() # conserve disk
-
-
- # waterfall display (history)
-
- # I want some kind of build event that holds everything about the build:
- # why, what changes went into it, the results of the build, itemized
- # test results, etc. But, I do kind of need something to be inserted in
- # the event log first, because intermixing step events and the larger
- # build event is fraught with peril. Maybe an Event-like-thing that
- # doesn't have a file in it but does have links. Hmm, that's exactly
- # what it does now. The only difference would be that this event isn't
- # pushed to the clients.
-
- # publish to clients
- def sendLastBuildStatus(self, client):
- #client.newLastBuildStatus(self.lastBuildStatus)
- pass
- def sendCurrentActivityBigToEveryone(self):
- for s in self.subscribers:
- self.sendCurrentActivityBig(s)
- def sendCurrentActivityBig(self, client):
- state = self.currentBigState
- if state == "offline":
- client.currentlyOffline()
- elif state == "idle":
- client.currentlyIdle()
- elif state == "building":
- client.currentlyBuilding()
- else:
- log.msg("Hey, self.currentBigState is weird:", state)
-
-
- ## HTML display interface
-
- def getEventNumbered(self, num):
- # deal with dropped events, pruned events
- first = self.events[0].number
- if first + len(self.events)-1 != self.events[-1].number:
- log.msg(self,
- "lost an event somewhere: [0] is %d, [%d] is %d" % \
- (self.events[0].number,
- len(self.events) - 1,
- self.events[-1].number))
- for e in self.events:
- log.msg("e[%d]: " % e.number, e)
- return None
- offset = num - first
- log.msg(self, "offset", offset)
- try:
- return self.events[offset]
- except IndexError:
- return None
-
- ## Persistence of Status
- def loadYourOldEvents(self):
- if hasattr(self, "allEvents"):
- # first time, nothing to get from file. Note that this is only if
- # the Application gets .run() . If it gets .save()'ed, then the
- # .allEvents attribute goes away in the initial __getstate__ and
- # we try to load a non-existent file.
- return
- self.allEvents = self.loadFile("events", [])
- if self.allEvents:
- self.nextEventNumber = self.allEvents[-1].number + 1
- else:
- self.nextEventNumber = 0
- def saveYourOldEvents(self):
- self.saveFile("events", self.allEvents)
-
- ## clients
-
- def addClient(self, client):
- if client not in self.subscribers:
- self.subscribers.append(client)
- self.sendLastBuildStatus(client)
- self.sendCurrentActivityBig(client)
- client.newEvent(self.currentSmall)
- def removeClient(self, client):
- if client in self.subscribers:
- self.subscribers.remove(client)
-
-class SlaveStatus:
- if implements:
- implements(interfaces.ISlaveStatus)
- else:
- __implements__ = interfaces.ISlaveStatus,
-
- admin = None
- host = None
- connected = False
-
- def __init__(self, name):
- self.name = name
-
- def getName(self):
- return self.name
- def getAdmin(self):
- return self.admin
- def getHost(self):
- return self.host
- def isConnected(self):
- return self.connected
-
-class Status:
- """
- I represent the status of the buildmaster.
- """
- if implements:
- implements(interfaces.IStatus)
- else:
- __implements__ = interfaces.IStatus,
-
- def __init__(self, botmaster, basedir):
- """
- @type botmaster: L{buildbot.master.BotMaster}
- @param botmaster: the Status object uses C{.botmaster} to get at
- both the L{buildbot.master.BuildMaster} (for
- various buildbot-wide parameters) and the
- actual Builders (to get at their L{BuilderStatus}
- objects). It is not allowed to change or influence
- anything through this reference.
- @type basedir: string
- @param basedir: this provides a base directory in which saved status
- information (changes.pck, saved Build status
- pickles) can be stored
- """
- self.botmaster = botmaster
- self.basedir = basedir
- self.watchers = []
- self.activeBuildSets = []
- assert os.path.isdir(basedir)
-
-
- # methods called by our clients
-
- def getProjectName(self):
- return self.botmaster.parent.projectName
- def getProjectURL(self):
- return self.botmaster.parent.projectURL
- def getBuildbotURL(self):
- return self.botmaster.parent.buildbotURL
-
- def getURLForThing(self, thing):
- prefix = self.getBuildbotURL()
- if not prefix:
- return None
- if providedBy(thing, interfaces.IStatus):
- return prefix
- if providedBy(thing, interfaces.ISchedulerStatus):
- pass
- if providedBy(thing, interfaces.IBuilderStatus):
- builder = thing
- return prefix + urllib.quote(builder.getName(), safe='')
- if providedBy(thing, interfaces.IBuildStatus):
- build = thing
- builder = build.getBuilder()
- return "%s%s/builds/%d" % (
- prefix,
- urllib.quote(builder.getName(), safe=''),
- build.getNumber())
- if providedBy(thing, interfaces.IBuildStepStatus):
- step = thing
- build = step.getBuild()
- builder = build.getBuilder()
- return "%s%s/builds/%d/%s" % (
- prefix,
- urllib.quote(builder.getName(), safe=''),
- build.getNumber(),
- "step-" + urllib.quote(step.getName(), safe=''))
- # IBuildSetStatus
- # IBuildRequestStatus
- # ISlaveStatus
-
- # IStatusEvent
- if providedBy(thing, interfaces.IStatusEvent):
- from buildbot.changes import changes
- # TODO: this is goofy, create IChange or something
- if isinstance(thing, changes.Change):
- change = thing
- return "%schanges/%d" % (prefix, change.number)
-
- if providedBy(thing, interfaces.IStatusLog):
- log = thing
- step = log.getStep()
- build = step.getBuild()
- builder = build.getBuilder()
-
- logs = step.getLogs()
- for i in range(len(logs)):
- if log is logs[i]:
- lognum = i
- break
- else:
- return None
- return "%s%s/builds/%d/%s/%d" % (
- prefix,
- urllib.quote(builder.getName(), safe=''),
- build.getNumber(),
- "step-" + urllib.quote(step.getName(), safe=''),
- lognum)
-
-
- def getSchedulers(self):
- return self.botmaster.parent.allSchedulers()
-
- def getBuilderNames(self, categories=None):
- if categories == None:
- return self.botmaster.builderNames[:] # don't let them break it
-
- l = []
- # respect addition order
- for name in self.botmaster.builderNames:
- builder = self.botmaster.builders[name]
- if builder.builder_status.category in categories:
- l.append(name)
- return l
-
- def getBuilder(self, name):
- """
- @rtype: L{BuilderStatus}
- """
- return self.botmaster.builders[name].builder_status
-
- def getSlave(self, slavename):
- return self.botmaster.slaves[slavename].slave_status
-
- def getBuildSets(self):
- return self.activeBuildSets[:]
-
- def subscribe(self, target):
- self.watchers.append(target)
- for name in self.botmaster.builderNames:
- self.announceNewBuilder(target, name, self.getBuilder(name))
- def unsubscribe(self, target):
- self.watchers.remove(target)
-
-
- # methods called by upstream objects
-
- def announceNewBuilder(self, target, name, builder_status):
- t = target.builderAdded(name, builder_status)
- if t:
- builder_status.subscribe(t)
-
- def builderAdded(self, name, basedir, category=None):
- """
- @rtype: L{BuilderStatus}
- """
- filename = os.path.join(self.basedir, basedir, "builder")
- log.msg("trying to load status pickle from %s" % filename)
- builder_status = None
- try:
- builder_status = pickle.load(open(filename, "rb"))
- styles.doUpgrade()
- except IOError:
- log.msg("no saved status pickle, creating a new one")
- except:
- log.msg("error while loading status pickle, creating a new one")
- log.msg("error follows:")
- log.err()
- if not builder_status:
- builder_status = BuilderStatus(name, category)
- builder_status.addPointEvent(["builder", "created"])
- log.msg("added builder %s in category %s" % (name, category))
- # an unpickled object might not have category set from before,
- # so set it here to make sure
- builder_status.category = category
- builder_status.basedir = os.path.join(self.basedir, basedir)
- builder_status.name = name # it might have been updated
- builder_status.status = self
-
- if not os.path.isdir(builder_status.basedir):
- os.mkdir(builder_status.basedir)
- builder_status.determineNextBuildNumber()
-
- builder_status.setBigState("offline")
-
- for t in self.watchers:
- self.announceNewBuilder(t, name, builder_status)
-
- return builder_status
-
- def builderRemoved(self, name):
- for t in self.watchers:
- t.builderRemoved(name)
-
- def prune(self):
- for b in self.botmaster.builders.values():
- b.builder_status.prune()
-
- def buildsetSubmitted(self, bss):
- self.activeBuildSets.append(bss)
- bss.waitUntilFinished().addCallback(self.activeBuildSets.remove)
- for t in self.watchers:
- t.buildsetSubmitted(bss)
diff --git a/buildbot/buildbot-source/build/lib/buildbot/status/client.py b/buildbot/buildbot-source/build/lib/buildbot/status/client.py
deleted file mode 100644
index 7e2b17c12..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/status/client.py
+++ /dev/null
@@ -1,573 +0,0 @@
-# -*- test-case-name: buildbot.test.test_status -*-
-
-from twisted.spread import pb
-from twisted.python import log, components
-from twisted.python.failure import Failure
-from twisted.internet import defer, reactor
-from twisted.application import service, strports
-from twisted.cred import portal, checkers
-
-from buildbot import util, interfaces
-from buildbot.twcompat import Interface, implements
-from buildbot.status import builder, base
-from buildbot.changes import changes
-
-class IRemote(Interface):
- pass
-
-def makeRemote(obj):
- # we want IRemote(None) to be None, but you can't really do that with
- # adapters, so we fake it
- if obj is None:
- return None
- return IRemote(obj)
-
-
-class RemoteBuildSet(pb.Referenceable):
- def __init__(self, buildset):
- self.b = buildset
-
- def remote_getSourceStamp(self):
- return self.b.getSourceStamp()
-
- def remote_getReason(self):
- return self.b.getReason()
-
- def remote_getID(self):
- return self.b.getID()
-
- def remote_getBuilderNames(self):
- return self.b.getBuilderNames()
-
- def remote_getBuildRequests(self):
- """Returns a list of (builderName, BuildRequest) tuples."""
- return [(br.getBuilderName(), IRemote(br))
- for br in self.b.getBuildRequests()]
-
- def remote_isFinished(self):
- return self.b.isFinished()
-
- def remote_waitUntilSuccess(self):
- d = self.b.waitUntilSuccess()
- d.addCallback(lambda res: self)
- return d
-
- def remote_waitUntilFinished(self):
- d = self.b.waitUntilFinished()
- d.addCallback(lambda res: self)
- return d
-
- def remote_getResults(self):
- return self.b.getResults()
-
-components.registerAdapter(RemoteBuildSet,
- interfaces.IBuildSetStatus, IRemote)
-
-
-class RemoteBuilder(pb.Referenceable):
- def __init__(self, builder):
- self.b = builder
-
- def remote_getName(self):
- return self.b.getName()
-
- def remote_getState(self):
- state, builds = self.b.getState()
- return (state,
- None, # TODO: remove leftover ETA
- [makeRemote(b) for b in builds])
-
- def remote_getSlaves(self):
- return [IRemote(s) for s in self.b.getSlaves()]
-
- def remote_getLastFinishedBuild(self):
- return makeRemote(self.b.getLastFinishedBuild())
-
- def remote_getCurrentBuilds(self):
- return makeRemote(self.b.getCurrentBuilds())
-
- def remote_getBuild(self, number):
- return makeRemote(self.b.getBuild(number))
-
- def remote_getEvent(self, number):
- return IRemote(self.b.getEvent(number))
-
-components.registerAdapter(RemoteBuilder,
- interfaces.IBuilderStatus, IRemote)
-
-
-class RemoteBuildRequest(pb.Referenceable):
- def __init__(self, buildreq):
- self.b = buildreq
- self.observers = []
-
- def remote_getSourceStamp(self):
- return self.b.getSourceStamp()
-
- def remote_getBuilderName(self):
- return self.b.getBuilderName()
-
- def remote_subscribe(self, observer):
- """The observer's remote_newbuild method will be called (with two
- arguments: the RemoteBuild object, and our builderName) for each new
- Build that is created to handle this BuildRequest."""
- self.observers.append(observer)
- def send(bs):
- d = observer.callRemote("newbuild",
- IRemote(bs), self.b.getBuilderName())
- d.addErrback(lambda err: None)
- reactor.callLater(0, self.b.subscribe, send)
-
- def remote_unsubscribe(self, observer):
- # PB (well, at least oldpb) doesn't re-use RemoteReference instances,
- # so sending the same object across the wire twice will result in two
- # separate objects that compare as equal ('a is not b' and 'a == b').
- # That means we can't use a simple 'self.observers.remove(observer)'
- # here.
- for o in self.observers:
- if o == observer:
- self.observers.remove(o)
-
-components.registerAdapter(RemoteBuildRequest,
- interfaces.IBuildRequestStatus, IRemote)
-
-class RemoteBuild(pb.Referenceable):
- def __init__(self, build):
- self.b = build
- self.observers = []
-
- def remote_getBuilderName(self):
- return self.b.getBuilder().getName()
-
- def remote_getNumber(self):
- return self.b.getNumber()
-
- def remote_getReason(self):
- return self.b.getReason()
-
- def remote_getChanges(self):
- return [IRemote(c) for c in self.b.getChanges()]
-
- def remote_getResponsibleUsers(self):
- return self.b.getResponsibleUsers()
-
- def remote_getSteps(self):
- return [IRemote(s) for s in self.b.getSteps()]
-
- def remote_getTimes(self):
- return self.b.getTimes()
-
- def remote_isFinished(self):
- return self.b.isFinished()
-
- def remote_waitUntilFinished(self):
- # the Deferred returned by callRemote() will fire when this build is
- # finished
- d = self.b.waitUntilFinished()
- d.addCallback(lambda res: self)
- return d
-
- def remote_getETA(self):
- return self.b.getETA()
-
- def remote_getCurrentStep(self):
- return makeRemote(self.b.getCurrentStep())
-
- def remote_getText(self):
- return self.b.getText()
-
- def remote_getColor(self):
- return self.b.getColor()
-
- def remote_getResults(self):
- return self.b.getResults()
-
- def remote_getLogs(self):
- logs = {}
- for name,log in self.b.getLogs().items():
- logs[name] = IRemote(log)
- return logs
-
- def remote_subscribe(self, observer, updateInterval=None):
- """The observer will have remote_stepStarted(buildername, build,
- stepname, step), remote_stepFinished(buildername, build, stepname,
- step, results), and maybe remote_buildETAUpdate(buildername, build,
- eta)) messages sent to it."""
- self.observers.append(observer)
- s = BuildSubscriber(observer)
- self.b.subscribe(s, updateInterval)
-
- def remote_unsubscribe(self, observer):
- # TODO: is the observer automatically unsubscribed when the build
- # finishes? Or are they responsible for unsubscribing themselves
- # anyway? How do we avoid a race condition here?
- for o in self.observers:
- if o == observer:
- self.observers.remove(o)
-
-
-components.registerAdapter(RemoteBuild,
- interfaces.IBuildStatus, IRemote)
-
-class BuildSubscriber:
- def __init__(self, observer):
- self.observer = observer
-
- def buildETAUpdate(self, build, eta):
- self.observer.callRemote("buildETAUpdate",
- build.getBuilder().getName(),
- IRemote(build),
- eta)
-
- def stepStarted(self, build, step):
- self.observer.callRemote("stepStarted",
- build.getBuilder().getName(),
- IRemote(build),
- step.getName(), IRemote(step))
- return None
-
- def stepFinished(self, build, step, results):
- self.observer.callRemote("stepFinished",
- build.getBuilder().getName(),
- IRemote(build),
- step.getName(), IRemote(step),
- results)
-
-
-class RemoteBuildStep(pb.Referenceable):
- def __init__(self, step):
- self.s = step
-
- def remote_getName(self):
- return self.s.getName()
-
- def remote_getBuild(self):
- return IRemote(self.s.getBuild())
-
- def remote_getTimes(self):
- return self.s.getTimes()
-
- def remote_getExpectations(self):
- return self.s.getExpectations()
-
- def remote_getLogs(self):
- logs = {}
- for name,log in self.s.getLogs().items():
- logs[name] = IRemote(log)
- return logs
-
- def remote_isFinished(self):
- return self.s.isFinished()
-
- def remote_waitUntilFinished(self):
- return self.s.waitUntilFinished() # returns a Deferred
-
- def remote_getETA(self):
- return self.s.getETA()
-
- def remote_getText(self):
- return self.s.getText()
-
- def remote_getColor(self):
- return self.s.getColor()
-
- def remote_getResults(self):
- return self.s.getResults()
-
-components.registerAdapter(RemoteBuildStep,
- interfaces.IBuildStepStatus, IRemote)
-
-class RemoteSlave:
- def __init__(self, slave):
- self.s = slave
-
- def remote_getName(self):
- return self.s.getName()
- def remote_getAdmin(self):
- return self.s.getAdmin()
- def remote_getHost(self):
- return self.s.getHost()
- def remote_isConnected(self):
- return self.s.isConnected()
-
-components.registerAdapter(RemoteSlave,
- interfaces.ISlaveStatus, IRemote)
-
-class RemoteEvent:
- def __init__(self, event):
- self.e = event
-
- def remote_getTimes(self):
- return self.s.getTimes()
- def remote_getText(self):
- return self.s.getText()
- def remote_getColor(self):
- return self.s.getColor()
-
-components.registerAdapter(RemoteEvent,
- interfaces.IStatusEvent, IRemote)
-
-class RemoteLog(pb.Referenceable):
- def __init__(self, log):
- self.l = log
-
- def remote_getName(self):
- return self.l.getName()
-
- def remote_isFinished(self):
- return self.l.isFinished()
- def remote_waitUntilFinished(self):
- d = self.l.waitUntilFinished()
- d.addCallback(lambda res: self)
- return d
-
- def remote_getText(self):
- return self.l.getText()
- def remote_getTextWithHeaders(self):
- return self.l.getTextWithHeaders()
- def remote_getChunks(self):
- return self.l.getChunks()
- # TODO: subscription interface
-
-components.registerAdapter(RemoteLog, builder.LogFile, IRemote)
-# TODO: something similar for builder.HTMLLogfile ?
-
-class RemoteChange:
- def __init__(self, change):
- self.c = change
-
- def getWho(self):
- return self.c.who
- def getFiles(self):
- return self.c.files
- def getComments(self):
- return self.c.comments
-
-components.registerAdapter(RemoteChange, changes.Change, IRemote)
-
-
-class StatusClientPerspective(base.StatusReceiverPerspective):
-
- subscribed = None
- client = None
-
- def __init__(self, status):
- self.status = status # the IStatus
- self.subscribed_to_builders = [] # Builders to which we're subscribed
- self.subscribed_to = [] # everything else we're subscribed to
-
- def __getstate__(self):
- d = self.__dict__.copy()
- d['client'] = None
- return d
-
- def attached(self, mind):
- #log.msg("StatusClientPerspective.attached")
- return self
-
- def detached(self, mind):
- log.msg("PB client detached")
- self.client = None
- for name in self.subscribed_to_builders:
- log.msg(" unsubscribing from Builder(%s)" % name)
- self.status.getBuilder(name).unsubscribe(self)
- for s in self.subscribed_to:
- log.msg(" unsubscribe from %s" % s)
- s.unsubscribe(self)
- self.subscribed = None
-
- def perspective_subscribe(self, mode, interval, target):
- """The remote client wishes to subscribe to some set of events.
- 'target' will be sent remote messages when these events happen.
- 'mode' indicates which events are desired: it is a string with one
- of the following values:
-
- 'builders': builderAdded, builderRemoved
- 'builds': those plus builderChangedState, buildStarted, buildFinished
- 'steps': all those plus buildETAUpdate, stepStarted, stepFinished
- 'logs': all those plus stepETAUpdate, logStarted, logFinished
- 'full': all those plus logChunk (with the log contents)
-
-
- Messages are defined by buildbot.interfaces.IStatusReceiver .
- 'interval' is used to specify how frequently ETAUpdate messages
- should be sent.
-
- Raising or lowering the subscription level will take effect starting
- with the next build or step."""
-
- assert mode in ("builders", "builds", "steps", "logs", "full")
- assert target
- log.msg("PB subscribe(%s)" % mode)
-
- self.client = target
- self.subscribed = mode
- self.interval = interval
- self.subscribed_to.append(self.status)
- # wait a moment before subscribing, so the new-builder messages
- # won't appear before this remote method finishes
- reactor.callLater(0, self.status.subscribe, self)
- return None
-
- def perspective_unsubscribe(self):
- log.msg("PB unsubscribe")
- self.status.unsubscribe(self)
- self.subscribed_to.remove(self.status)
- self.client = None
-
- def perspective_getBuildSets(self):
- """This returns tuples of (buildset, bsid), because that is much more
- convenient for tryclient."""
- return [(IRemote(s), s.getID()) for s in self.status.getBuildSets()]
-
- def perspective_getBuilderNames(self):
- return self.status.getBuilderNames()
-
- def perspective_getBuilder(self, name):
- b = self.status.getBuilder(name)
- return IRemote(b)
-
- def perspective_getSlave(self, name):
- s = self.status.getSlave(name)
- return IRemote(s)
-
- # IStatusReceiver methods, invoked if we've subscribed
-
- # mode >= builder
- def builderAdded(self, name, builder):
- self.client.callRemote("builderAdded", name, IRemote(builder))
- if self.subscribed in ("builds", "steps", "logs", "full"):
- self.subscribed_to_builders.append(name)
- return self
- return None
-
- def builderChangedState(self, name, state):
- self.client.callRemote("builderChangedState", name, state, None)
- # TODO: remove leftover ETA argument
-
- def builderRemoved(self, name):
- if name in self.subscribed_to_builders:
- self.subscribed_to_builders.remove(name)
- self.client.callRemote("builderRemoved", name)
-
- def buildsetSubmitted(self, buildset):
- # TODO: deliver to client, somehow
- pass
-
- # mode >= builds
- def buildStarted(self, name, build):
- self.client.callRemote("buildStarted", name, IRemote(build))
- if self.subscribed in ("steps", "logs", "full"):
- self.subscribed_to.append(build)
- return (self, self.interval)
- return None
-
- def buildFinished(self, name, build, results):
- if build in self.subscribed_to:
- # we might have joined during the build
- self.subscribed_to.remove(build)
- self.client.callRemote("buildFinished",
- name, IRemote(build), results)
-
- # mode >= steps
- def buildETAUpdate(self, build, eta):
- self.client.callRemote("buildETAUpdate",
- build.getBuilder().getName(), IRemote(build),
- eta)
-
- def stepStarted(self, build, step):
- # we add some information here so the client doesn't have to do an
- # extra round-trip
- self.client.callRemote("stepStarted",
- build.getBuilder().getName(), IRemote(build),
- step.getName(), IRemote(step))
- if self.subscribed in ("logs", "full"):
- self.subscribed_to.append(step)
- return (self, self.interval)
- return None
-
- def stepFinished(self, build, step, results):
- self.client.callRemote("stepFinished",
- build.getBuilder().getName(), IRemote(build),
- step.getName(), IRemote(step),
- results)
- if step in self.subscribed_to:
- # eventually (through some new subscription method) we could
- # join in the middle of the step
- self.subscribed_to.remove(step)
-
- # mode >= logs
- def stepETAUpdate(self, build, step, ETA, expectations):
- self.client.callRemote("stepETAUpdate",
- build.getBuilder().getName(), IRemote(build),
- step.getName(), IRemote(step),
- ETA, expectations)
-
- def logStarted(self, build, step, log):
- # TODO: make the HTMLLog adapter
- rlog = IRemote(log, None)
- if not rlog:
- print "hey, couldn't adapt %s to IRemote" % log
- self.client.callRemote("logStarted",
- build.getBuilder().getName(), IRemote(build),
- step.getName(), IRemote(step),
- log.getName(), IRemote(log, None))
- if self.subscribed in ("full",):
- self.subscribed_to.append(log)
- return self
- return None
-
- def logFinished(self, build, step, log):
- self.client.callRemote("logFinished",
- build.getBuilder().getName(), IRemote(build),
- step.getName(), IRemote(step),
- log.getName(), IRemote(log, None))
- if log in self.subscribed_to:
- self.subscribed_to.remove(log)
-
- # mode >= full
- def logChunk(self, build, step, log, channel, text):
- self.client.callRemote("logChunk",
- build.getBuilder().getName(), IRemote(build),
- step.getName(), IRemote(step),
- log.getName(), IRemote(log),
- channel, text)
-
-
-class PBListener(base.StatusReceiverMultiService):
- """I am a listener for PB-based status clients."""
-
- compare_attrs = ["port", "cred"]
- if implements:
- implements(portal.IRealm)
- else:
- __implements__ = (portal.IRealm,
- base.StatusReceiverMultiService.__implements__)
-
- def __init__(self, port, user="statusClient", passwd="clientpw"):
- base.StatusReceiverMultiService.__init__(self)
- if type(port) is int:
- port = "tcp:%d" % port
- self.port = port
- self.cred = (user, passwd)
- p = portal.Portal(self)
- c = checkers.InMemoryUsernamePasswordDatabaseDontUse()
- c.addUser(user, passwd)
- p.registerChecker(c)
- f = pb.PBServerFactory(p)
- s = strports.service(port, f)
- s.setServiceParent(self)
-
- def setServiceParent(self, parent):
- base.StatusReceiverMultiService.setServiceParent(self, parent)
- self.setup()
-
- def setup(self):
- self.status = self.parent.getStatus()
-
- def requestAvatar(self, avatarID, mind, interface):
- assert interface == pb.IPerspective
- p = StatusClientPerspective(self.status)
- p.attached(mind) # perhaps .callLater(0) ?
- return (pb.IPerspective, p,
- lambda p=p,mind=mind: p.detached(mind))
diff --git a/buildbot/buildbot-source/build/lib/buildbot/status/getcws.py b/buildbot/buildbot-source/build/lib/buildbot/status/getcws.py
deleted file mode 100644
index c545b83c8..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/status/getcws.py
+++ /dev/null
@@ -1,133 +0,0 @@
-# Original thanks to David Fraser <davidf@sjsoft.com> and Caolan McNamara <caolanm@redhat.com>
-
-import urllib2, cookielib, cgi
-import os, sys
-
-from HTMLParser import HTMLParser
-
-class cws:
- def __init__(self, cwss):
- self.cwss = cwss
-
-
-class EISScraper(HTMLParser):
- def __init__(self):
- HTMLParser.__init__(self)
- self.state = 0;
- self.cwss = []
-
- def handle_starttag(self, tag, attrs):
- if tag == 'td' and self.state < 3:
- self.state += 1
-
- def handle_data(self, data):
- if self.state == 3:
- self.cwss.append(data.strip())
- self.state = 4
-
-
- def handle_endtag(self, tag):
- if tag == 'tr' and self.state == 4:
- self.state = 0
-
-class EIS:
- def __init__(self, cookiefile="eis.lwp"):
- self.cookiefile = cookiefile
- self.cookiejar = cookielib.LWPCookieJar()
- if os.path.isfile(self.cookiefile):
- self.cookiejar.load(self.cookiefile)
- opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self.cookiejar))
- urllib2.install_opener(opener)
- self.login()
- self.cache = {}
-
- def login(self):
- urllib2.urlopen("http://eis.services.openoffice.org/EIS2/GuestLogon").read()
- self.cookiejar.save(self.cookiefile)
-
- def cacheurl(self, url):
- if url in self.cache:
- return self.cache[url]
- else:
- try:
- contents = urllib2.urlopen(url).read()
- except urllib2.HTTPError, e:
- if e.code == 401:
- self.login()
- contents = urllib2.urlopen(url).read()
- else:
- raise
- self.cache[url] = contents
- return contents
- def findcws(self, cws,):
- thiscwsid = None
- milestoneresults = self.cacheurl("http://eis.services.openoffice.org/EIS2/cws.SearchCWS?DATE_NULL_Integrated_After=&DATE_NULL_DueDateBefore=&INT_NULL_Priority=&Name=" + cws + "&SRC_Step=Search&INT_NULL_IsHelpRelevant=&RSV_NoWait=true&DATE_NULL_DueDateAfter=&TaskId=&DATE_NULL_Integrated_Before=&INT_NULL_IsUIRelevant=")
- for line in milestoneresults.replace("\r", "").split("\n"):
- # cws.ShowCWS?Path=SRC680%2Fm54%2Fdba15&Id=1431
- startmark, endmark = "'cws.ShowCWS?", "'"
- if startmark in line:
- cwsargs = line[line.find(startmark) + len(startmark):]
- cwsargs = cwsargs[:cwsargs.find(endmark)]
- cwsargs = cgi.parse_qs(cwsargs)
- thiscwsid = int(cwsargs["Id"][0])
-
- return thiscwsid
-
-
- def getCWSs(self, query):
- status = -1
- if query == "new":
- status = 1
- elif query == "nominated":
- status = 2
- elif query == "integrated":
- status = 3
- elif query == "cancelled":
- status = 4
- elif query == "deleted":
- status = 5
- elif query == "ready":
- status = 6
- elif query == "planned":
- status = 7
- elif query == "approved":
- status = 8
- elif query == "pre-nominated":
- status = 9
- elif query == "fixed":
- status = 10
- elif query == "finished":
- status = 11
- elif query == "cloned":
- status = 12
-
- cwsresults = self.cacheurl("http://eis.services.openoffice.org/EIS2/cws.SearchCWS?Status=" + `status` +"&MWS=3&RSV_NoWait=true&SRC_Step=Search")
-
- foo = EISScraper()
- foo.feed(cwsresults)
- foo.cwss = foo.cwss[1:]
- foo.cwss.sort(lambda x, y: cmp(x.lower(), y.lower()))
- return cws(foo.cwss)
-
- def getcwsid(self, cwsname):
- somecwsid = self.findcws(cwsname)
- if somecwsid != None:
- return somecwsid
- raise ValueError("no id found for cws %s" % cwsname)
-
- def getcwsurl(self, cwsname):
- cwsid = self.getcwsid(cwsname)
- return self.cacheurl("http://eis.services.openoffice.org/EIS2/cws.ShowCWS?Id=%d" % cwsid)
-
-
-
-class GetCWS:
- def __init__(self, query):
- self.query = query
-
- def getCWSs(self):
- eis = EIS()
- info = eis.getCWSs(self.query)
- return info.cwss
-
-
diff --git a/buildbot/buildbot-source/build/lib/buildbot/status/html.py b/buildbot/buildbot-source/build/lib/buildbot/status/html.py
deleted file mode 100644
index efed7509e..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/status/html.py
+++ /dev/null
@@ -1,2385 +0,0 @@
-# -*- test-case-name: buildbot.test.test_web -*-
-
-from __future__ import generators
-
-from twisted.python import log, components
-from twisted.python.util import sibpath
-import urllib, re
-
-from twisted.internet import defer, reactor
-from twisted.web.resource import Resource
-from twisted.web import static, html, server, distrib
-from twisted.web.error import NoResource
-from twisted.web.util import Redirect, DeferredResource
-from twisted.application import strports
-from twisted.spread import pb
-
-from buildbot.twcompat import implements, Interface
-
-import string, types, time, os.path
-
-from buildbot import interfaces, util
-from buildbot import version
-from buildbot.sourcestamp import SourceStamp
-from buildbot.status import builder, base, getcws
-from buildbot.changes import changes
-from buildbot.process.base import BuildRequest
-
-class ITopBox(Interface):
- """I represent a box in the top row of the waterfall display: the one
- which shows the status of the last build for each builder."""
- pass
-
-class ICurrentBox(Interface):
- """I represent the 'current activity' box, just above the builder name."""
- pass
-
-class IBox(Interface):
- """I represent a box in the waterfall display."""
- pass
-
-class IHTMLLog(Interface):
- pass
-
-ROW_TEMPLATE = '''
-<div class="row">
- <span class="label">%(label)s</span>
- <span class="field">%(field)s</span>
-</div>'''
-
-def make_row(label, field):
- """Create a name/value row for the HTML.
-
- `label` is plain text; it will be HTML-encoded.
-
- `field` is a bit of HTML structure; it will not be encoded in
- any way.
- """
- label = html.escape(label)
- return ROW_TEMPLATE % {"label": label, "field": field}
-
-colormap = {
- 'green': '#72ff75',
- }
-def td(text="", parms={}, **props):
- data = ""
- data += " "
- #if not props.has_key("border"):
- # props["border"] = 1
- props.update(parms)
- if props.has_key("bgcolor"):
- props["bgcolor"] = colormap.get(props["bgcolor"], props["bgcolor"])
- comment = props.get("comment", None)
- if comment:
- data += "<!-- %s -->" % comment
- data += "<td"
- class_ = props.get('class_', None)
- if class_:
- props["class"] = class_
- for prop in ("align", "bgcolor", "colspan", "rowspan", "border",
- "valign", "halign", "class"):
- p = props.get(prop, None)
- if p != None:
- data += " %s=\"%s\"" % (prop, p)
- data += ">"
- if not text:
- text = "&nbsp;"
- if type(text) == types.ListType:
- data += string.join(text, "<br />")
- else:
- data += text
- data += "</td>\n"
- return data
-
-def build_get_class(b):
- """
- Return the class to use for a finished build or buildstep,
- based on the result.
- """
- # FIXME: this getResults duplicity might need to be fixed
- result = b.getResults()
- #print "THOMAS: result for b %r: %r" % (b, result)
- if isinstance(b, builder.BuildStatus):
- result = b.getResults()
- elif isinstance(b, builder.BuildStepStatus):
- result = b.getResults()[0]
- # after forcing a build, b.getResults() returns ((None, []), []), ugh
- if isinstance(result, tuple):
- result = result[0]
- else:
- raise TypeError, "%r is not a BuildStatus or BuildStepStatus" % b
-
- if result == None:
- # FIXME: this happens when a buildstep is running ?
- return "running"
- return builder.Results[result]
-
-class Box:
- # a Box wraps an Event. The Box has HTML <td> parameters that Events
- # lack, and it has a base URL to which each File's name is relative.
- # Events don't know about HTML.
- spacer = False
- def __init__(self, text=[], color=None, class_=None, urlbase=None,
- **parms):
- self.text = text
- self.color = color
- self.class_ = class_
- self.urlbase = urlbase
- self.show_idle = 0
- if parms.has_key('show_idle'):
- del parms['show_idle']
- self.show_idle = 1
-
- self.parms = parms
- # parms is a dict of HTML parameters for the <td> element that will
- # represent this Event in the waterfall display.
-
- def td(self, **props):
- props.update(self.parms)
- text = self.text
- if not text and self.show_idle:
- text = ["[idle]"]
- return td(text, props, bgcolor=self.color, class_=self.class_)
-
-
-class HtmlResource(Resource):
- css = None
- contentType = "text/html; charset=UTF-8"
- def render(self, request):
- data = self.content(request)
- request.setHeader("content-type", self.contentType)
- if request.method == "HEAD":
- request.setHeader("content-length", len(data))
- return ''
- return data
- title = "Dummy"
- def content(self, request):
- data = ('<!DOCTYPE html PUBLIC'
- ' "-//W3C//DTD XHTML 1.0 Transitional//EN"\n'
- '"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">\n'
- '<html'
- ' xmlns="http://www.w3.org/1999/xhtml"'
- ' lang="en"'
- ' xml:lang="en">\n')
- data += "<head>\n"
- data += " <title>" + self.title + "</title>\n"
- if self.css:
- # TODO: use some sort of relative link up to the root page, so
- # this css can be used from child pages too
- data += (' <link href="%s" rel="stylesheet" type="text/css"/>\n'
- % "buildbot.css")
- data += "</head>\n"
- data += '<body vlink="#800080">\n'
- data += self.body(request)
- data += "</body></html>\n"
- return data
- def body(self, request):
- return "Dummy\n"
-
-class StaticHTML(HtmlResource):
- def __init__(self, body, title):
- HtmlResource.__init__(self)
- self.bodyHTML = body
- self.title = title
- def body(self, request):
- return self.bodyHTML
-
-# $builder/builds/NN/stepname
-class StatusResourceBuildStep(HtmlResource):
- title = "Build Step"
-
- def __init__(self, status, step):
- HtmlResource.__init__(self)
- self.status = status
- self.step = step
-
- def body(self, request):
- s = self.step
- b = s.getBuild()
- data = "<h1>BuildStep %s:#%d:%s</h1>\n" % \
- (b.getBuilder().getName(), b.getNumber(), s.getName())
-
- if s.isFinished():
- data += ("<h2>Finished</h2>\n"
- "<p>%s</p>\n" % html.escape("%s" % s.getText()))
- else:
- data += ("<h2>Not Finished</h2>\n"
- "<p>ETA %s seconds</p>\n" % s.getETA())
-
- exp = s.getExpectations()
- if exp:
- data += ("<h2>Expectations</h2>\n"
- "<ul>\n")
- for e in exp:
- data += "<li>%s: current=%s, target=%s</li>\n" % \
- (html.escape(e[0]), e[1], e[2])
- data += "</ul>\n"
- logs = s.getLogs()
- if logs:
- data += ("<h2>Logs</h2>\n"
- "<ul>\n")
- for num in range(len(logs)):
- if logs[num].hasContents():
- # FIXME: If the step name has a / in it, this is broken
- # either way. If we quote it but say '/'s are safe,
- # it chops up the step name. If we quote it and '/'s
- # are not safe, it escapes the / that separates the
- # step name from the log number.
- data += '<li><a href="%s">%s</a></li>\n' % \
- (urllib.quote(request.childLink("%d" % num)),
- html.escape(logs[num].getName()))
- else:
- data += ('<li>%s</li>\n' %
- html.escape(logs[num].getName()))
- data += "</ul>\n"
-
- return data
-
- def getChild(self, path, request):
- logname = path
- if path.endswith("installset.tar.gz"):
- filename = "installsets/" + path
- return static.File(filename)
- try:
- log = self.step.getLogs()[int(logname)]
- if log.hasContents():
- return IHTMLLog(interfaces.IStatusLog(log))
- return NoResource("Empty Log '%s'" % logname)
- except (IndexError, ValueError):
- return NoResource("No such Log '%s'" % logname)
-
-# $builder/builds/NN/tests/TESTNAME
-class StatusResourceTestResult(HtmlResource):
- title = "Test Logs"
-
- def __init__(self, status, name, result):
- HtmlResource.__init__(self)
- self.status = status
- self.name = name
- self.result = result
-
- def body(self, request):
- dotname = ".".join(self.name)
- logs = self.result.getLogs()
- lognames = logs.keys()
- lognames.sort()
- data = "<h1>%s</h1>\n" % html.escape(dotname)
- for name in lognames:
- data += "<h2>%s</h2>\n" % html.escape(name)
- data += "<pre>" + logs[name] + "</pre>\n\n"
-
- return data
-
-
-# $builder/builds/NN/tests
-class StatusResourceTestResults(HtmlResource):
- title = "Test Results"
-
- def __init__(self, status, results):
- HtmlResource.__init__(self)
- self.status = status
- self.results = results
-
- def body(self, request):
- r = self.results
- data = "<h1>Test Results</h1>\n"
- data += "<ul>\n"
- testnames = r.keys()
- testnames.sort()
- for name in testnames:
- res = r[name]
- dotname = ".".join(name)
- data += " <li>%s: " % dotname
- # TODO: this could break on weird test names. At the moment,
- # test names only come from Trial tests, where the name
- # components must be legal python names, but that won't always
- # be a restriction.
- url = request.childLink(dotname)
- data += "<a href=\"%s\">%s</a>" % (url, " ".join(res.getText()))
- data += "</li>\n"
- data += "</ul>\n"
- return data
-
- def getChild(self, path, request):
- try:
- name = tuple(path.split("."))
- result = self.results[name]
- return StatusResourceTestResult(self.status, name, result)
- except KeyError:
- return NoResource("No such test name '%s'" % path)
-
-
-# $builder/builds/NN
-class StatusResourceBuild(HtmlResource):
- title = "Build"
-
- def __init__(self, status, build, builderControl, buildControl):
- HtmlResource.__init__(self)
- self.status = status
- self.build = build
- self.builderControl = builderControl
- self.control = buildControl
-
- def body(self, request):
- b = self.build
- buildbotURL = self.status.getBuildbotURL()
- projectName = self.status.getProjectName()
- data = '<div class="title"><a href="%s">%s</a></div>\n'%(buildbotURL,
- projectName)
- # the color in the following line gives python-mode trouble
- data += ("<h1>Build <a href=\"%s\">%s</a>:#%d</h1>\n"
- "<h2>Reason:</h2>\n%s\n"
- % (self.status.getURLForThing(b.getBuilder()),
- b.getBuilder().getName(), b.getNumber(),
- html.escape(b.getReason())))
-
- branch, revision, patch = b.getSourceStamp()
- data += "<h2>SourceStamp:</h2>\n"
- data += " <ul>\n"
- if branch:
- data += " <li>Branch: %s</li>\n" % html.escape(branch)
- if revision:
- data += " <li>Revision: %s</li>\n" % html.escape(str(revision))
- if patch:
- data += " <li>Patch: YES</li>\n" # TODO: provide link to .diff
- if b.getChanges():
- data += " <li>Changes: see below</li>\n"
- if (branch is None and revision is None and patch is None
- and not b.getChanges()):
- data += " <li>build of most recent revision</li>\n"
- data += " </ul>\n"
- if b.isFinished():
- data += "<h4>Buildslave: %s</h4>\n" % html.escape(b.getSlavename())
- data += "<h2>Results:</h2>\n"
- data += " ".join(b.getText()) + "\n"
- if b.getTestResults():
- url = request.childLink("tests")
- data += "<h3><a href=\"%s\">test results</a></h3>\n" % url
- else:
- data += "<h2>Build In Progress</h2>"
- if self.control is not None:
- stopURL = urllib.quote(request.childLink("stop"))
- data += """
- <form action="%s" class='command stopbuild'>
- <p>To stop this build, fill out the following fields and
- push the 'Stop' button</p>\n""" % stopURL
- data += make_row("Your name:",
- "<input type='text' name='username' />")
- data += make_row("Reason for stopping build:",
- "<input type='text' name='comments' />")
- data += """<input type="submit" value="Stop Builder" />
- </form>
- """
-
- if b.isFinished() and self.builderControl is not None:
- data += "<h3>Resubmit Build:</h3>\n"
- # can we rebuild it exactly?
- exactly = (revision is not None) or b.getChanges()
- if exactly:
- data += ("<p>This tree was built from a specific set of \n"
- "source files, and can be rebuilt exactly</p>\n")
- else:
- data += ("<p>This tree was built from the most recent "
- "revision")
- if branch:
- data += " (along some branch)"
- data += (" and thus it might not be possible to rebuild it \n"
- "exactly. Any changes that have been committed \n"
- "after this build was started <b>will</b> be \n"
- "included in a rebuild.</p>\n")
- rebuildURL = urllib.quote(request.childLink("rebuild"))
- data += ('<form action="%s" class="command rebuild">\n'
- % rebuildURL)
- data += make_row("Your name:",
- "<input type='text' name='username' />")
- data += make_row("Reason for re-running build:",
- "<input type='text' name='comments' />")
- data += '<input type="submit" value="Rebuild" />\n'
-
- data += "<h2>Steps and Logfiles:</h2>\n"
- if b.getLogs():
- data += "<ol>\n"
- for s in b.getSteps():
- data += (" <li><a href=\"%s\">%s</a> [%s]\n"
- % (self.status.getURLForThing(s), s.getName(),
- " ".join(s.getText())))
- if s.getLogs():
- data += " <ol>\n"
- for logfile in s.getLogs():
- data += (" <li><a href=\"%s\">%s</a></li>\n" %
- (self.status.getURLForThing(logfile),
- logfile.getName()))
- data += " </ol>\n"
- data += " </li>\n"
- data += "</ol>\n"
-
- data += ("<h2>Blamelist:</h2>\n"
- " <ol>\n")
- for who in b.getResponsibleUsers():
- data += " <li>%s</li>\n" % html.escape(who)
- data += (" </ol>\n"
- "<h2>All Changes</h2>\n")
- changes = b.getChanges()
- if changes:
- data += "<ol>\n"
- for c in changes:
- data += "<li>" + c.asHTML() + "</li>\n"
- data += "</ol>\n"
- #data += html.PRE(b.changesText()) # TODO
- return data
-
- def stop(self, request):
- log.msg("web stopBuild of build %s:%s" % \
- (self.build.getBuilder().getName(),
- self.build.getNumber()))
- name = request.args.get("username", ["<unknown>"])[0]
- comments = request.args.get("comments", ["<no reason specified>"])[0]
- reason = ("The web-page 'stop build' button was pressed by "
- "'%s': %s\n" % (name, comments))
- self.control.stopBuild(reason)
- # we're at http://localhost:8080/svn-hello/builds/5/stop?[args] and
- # we want to go to: http://localhost:8080/svn-hello/builds/5 or
- # http://localhost:8080/
- #
- #return Redirect("../%d" % self.build.getNumber())
- r = Redirect("../../..")
- d = defer.Deferred()
- reactor.callLater(1, d.callback, r)
- return DeferredResource(d)
-
- def rebuild(self, request):
- log.msg("web rebuild of build %s:%s" % \
- (self.build.getBuilder().getName(),
- self.build.getNumber()))
- name = request.args.get("username", ["<unknown>"])[0]
- comments = request.args.get("comments", ["<no reason specified>"])[0]
- reason = ("The web-page 'rebuild' button was pressed by "
- "'%s': %s\n" % (name, comments))
- if not self.builderControl or not self.build.isFinished():
- log.msg("could not rebuild: bc=%s, isFinished=%s"
- % (self.builderControl, self.build.isFinished()))
- # TODO: indicate an error
- else:
- self.builderControl.resubmitBuild(self.build, reason)
- # we're at http://localhost:8080/svn-hello/builds/5/rebuild?[args] and
- # we want to go to the top, at http://localhost:8080/
- r = Redirect("../../..")
- d = defer.Deferred()
- reactor.callLater(1, d.callback, r)
- return DeferredResource(d)
-
- def getChild(self, path, request):
- if path == "tests":
- return StatusResourceTestResults(self.status,
- self.build.getTestResults())
- if path == "stop":
- return self.stop(request)
- if path == "rebuild":
- return self.rebuild(request)
- if path.startswith("step-"):
- stepname = path[len("step-"):]
- steps = self.build.getSteps()
- for s in steps:
- if s.getName() == stepname:
- return StatusResourceBuildStep(self.status, s)
- return NoResource("No such BuildStep '%s'" % stepname)
- return NoResource("No such resource '%s'" % path)
-
-# $builder
-class StatusResourceBuilder(HtmlResource):
-
- def __init__(self, status, builder, control):
- HtmlResource.__init__(self)
- self.status = status
- self.title = builder.getName() + " Builder"
- self.builder = builder
- self.control = control
-
- def body(self, request):
- b = self.builder
- slaves = b.getSlaves()
- connected_slaves = [s for s in slaves if s.isConnected()]
-
- buildbotURL = self.status.getBuildbotURL()
- projectName = self.status.getProjectName()
- data = "<a href=\"%s\">%s</a>\n" % (buildbotURL, projectName)
- data += make_row("Builder:", html.escape(b.getName()))
- b1 = b.getBuild(-1)
- if b1 is not None:
- data += make_row("Current/last build:", str(b1.getNumber()))
- data += "\n<br />BUILDSLAVES<br />\n"
- data += "<ol>\n"
- for slave in slaves:
- data += "<li><b>%s</b>: " % html.escape(slave.getName())
- if slave.isConnected():
- data += "CONNECTED\n"
- if slave.getAdmin():
- data += make_row("Admin:", html.escape(slave.getAdmin()))
- if slave.getHost():
- data += "<span class='label'>Host info:</span>\n"
- data += html.PRE(slave.getHost())
- else:
- data += ("NOT CONNECTED\n")
- data += "</li>\n"
- data += "</ol>\n"
-
- if self.control is not None and connected_slaves:
- forceURL = urllib.quote(request.childLink("force"))
- data += (
- """
- <form action='%(forceURL)s' class='command forcebuild'>
- <p>To force a build, fill out the following fields and
- push the 'Force Build' button</p>
- <table border='0'>
- <tr>
- <td>
- Your name:
- </td>
- <td>
- <input type='text' name='username' />@openoffice.org (for email notification about build status)
- </td>
- </tr>
- <tr>
- <td>
- Reason for build:
- </td>
- <td>
- <input type='text' name='comments' />
- </td>
- </tr>
- <tr>
- <td>
- CWS to build:
- </td>
- <td>
- <input type='text' name='branch' />(e.g. configdbbe, kaib01, ww8perf02)
- </td>
- </tr>
- <tr>
- <td>
- Config Switches:
- </td>
- <td>
- <input type='text' size='50' name='config' />(if your CWS requires extra config switches)
- </td>
- </tr>
- <tr>
- <td>
- Make Install-Set:
- </td>
- <td>
- <input type='checkbox' name='installsetcheck' />(If you want to download install-sets)
- </td>
- </tr>
- <tr>
- <td colspan='2'>
- <input type='submit' value='Force Build' />
- </td>
- </tr>
- </table>
- </form>
- """) % {"forceURL": forceURL}
- elif self.control is not None:
- data += """
- <p>All buildslaves appear to be offline, so it's not possible
- to force this build to execute at this time.</p>
- """
-
- if self.control is not None:
- pingURL = urllib.quote(request.childLink("ping"))
- data += """
- <form action="%s" class='command pingbuilder'>
- <p>To ping the buildslave(s), push the 'Ping' button</p>
-
- <input type="submit" value="Ping Builder" />
- </form>
- """ % pingURL
-
- return data
-
- def force(self, request):
- name = request.args.get("username", ["<unknown>"])[0]
- reason = request.args.get("comments", ["<no reason specified>"])[0]
- branch = request.args.get("branch", [""])[0]
- revision = request.args.get("revision", [""])[0]
- config = request.args.get("config", [""])[0]
- installsetcheck = request.args.get("installsetcheck", [""])[0]
-
- r = "The web-page 'force build' button was pressed by '%s': %s\n" \
- % (name, reason)
- log.msg("web forcebuild of builder '%s', branch='%s', revision='%s', config='%s', installsetcheck='%s' "
- % (self.builder.name, branch, revision,config, installsetcheck))
-
- if not self.control:
- # TODO: tell the web user that their request was denied
- log.msg("but builder control is disabled")
- return Redirect("..")
-
- # keep weird stuff out of the branch and revision strings. TODO:
- # centralize this somewhere.
- if not re.match(r'^[\w\.\-\/]*$', branch):
- log.msg("bad branch '%s'" % branch)
- return Redirect("..")
- if not re.match(r'^[\w\.\-\/]*$', revision):
- log.msg("bad revision '%s'" % revision)
- return Redirect("..")
- if name == "":
- name = None
- if branch == "":
- branch = None
- if revision == "":
- revision = None
- if config == "":
- config = None
- if installsetcheck == "":
- installsetcheck = None
-
- # TODO: if we can authenticate that a particular User pushed the
- # button, use their name instead of None, so they'll be informed of
- # the results.
- s = SourceStamp(branch=branch, revision=revision)
-
- req = BuildRequest(r, s, self.builder.getName(), name, config, installsetcheck)
- try:
- self.control.requestBuildSoon(req)
- except interfaces.NoSlaveError:
- # TODO: tell the web user that their request could not be
- # honored
- pass
- return Redirect("..")
-
- def ping(self, request):
- log.msg("web ping of builder '%s'" % self.builder.name)
- self.control.ping() # TODO: there ought to be an ISlaveControl
- return Redirect("..")
-
- def getChild(self, path, request):
- if path == "force":
- return self.force(request)
- if path == "ping":
- return self.ping(request)
- if not path in ("events", "builds"):
- return NoResource("Bad URL '%s'" % path)
- num = request.postpath.pop(0)
- request.prepath.append(num)
- num = int(num)
- if path == "events":
- # TODO: is this dead code? .statusbag doesn't exist,right?
- log.msg("getChild['path']: %s" % request.uri)
- return NoResource("events are unavailable until code gets fixed")
- filename = request.postpath.pop(0)
- request.prepath.append(filename)
- e = self.builder.statusbag.getEventNumbered(num)
- if not e:
- return NoResource("No such event '%d'" % num)
- file = e.files.get(filename, None)
- if file == None:
- return NoResource("No such file '%s'" % filename)
- if type(file) == type(""):
- if file[:6] in ("<HTML>", "<html>"):
- return static.Data(file, "text/html")
- return static.Data(file, "text/plain")
- return file
- if path == "builds":
- build = self.builder.getBuild(num)
- if build:
- control = None
- if self.control:
- control = self.control.getBuild(num)
- return StatusResourceBuild(self.status, build,
- self.control, control)
- else:
- return NoResource("No such build '%d'" % num)
- return NoResource("really weird URL %s" % path)
-
-# $changes/NN
-class StatusResourceChanges(HtmlResource):
- def __init__(self, status, changemaster):
- HtmlResource.__init__(self)
- self.status = status
- self.changemaster = changemaster
- def body(self, request):
- data = ""
- data += "Change sources:\n"
- sources = list(self.changemaster)
- if sources:
- data += "<ol>\n"
- for s in sources:
- data += "<li>%s</li>\n" % s.describe()
- data += "</ol>\n"
- else:
- data += "none (push only)\n"
- return data
- def getChild(self, path, request):
- num = int(path)
- c = self.changemaster.getChangeNumbered(num)
- if not c:
- return NoResource("No change number '%d'" % num)
- return StaticHTML(c.asHTML(), "Change #%d" % num)
-
-textlog_stylesheet = """
-<style type="text/css">
- div.data {
- font-family: "Courier New", courier, monotype;
- }
- span.stdout {
- font-family: "Courier New", courier, monotype;
- }
- span.stderr {
- font-family: "Courier New", courier, monotype;
- color: red;
- }
- span.header {
- font-family: "Courier New", courier, monotype;
- color: blue;
- }
-</style>
-"""
-
-class ChunkConsumer:
- if implements:
- implements(interfaces.IStatusLogConsumer)
- else:
- __implements__ = interfaces.IStatusLogConsumer,
-
- def __init__(self, original, textlog):
- self.original = original
- self.textlog = textlog
- def registerProducer(self, producer, streaming):
- self.producer = producer
- self.original.registerProducer(producer, streaming)
- def unregisterProducer(self):
- self.original.unregisterProducer()
- def writeChunk(self, chunk):
- formatted = self.textlog.content([chunk])
- try:
- self.original.write(formatted)
- except pb.DeadReferenceError:
- self.producing.stopProducing()
- def finish(self):
- self.textlog.finished()
-
-class TextLog(Resource):
- # a new instance of this Resource is created for each client who views
- # it, so we can afford to track the request in the Resource.
- if implements:
- implements(IHTMLLog)
- else:
- __implements__ = IHTMLLog,
-
- asText = False
- subscribed = False
-
- def __init__(self, original):
- Resource.__init__(self)
- self.original = original
-
- def getChild(self, path, request):
- if path == "text":
- self.asText = True
- return self
- return NoResource("bad pathname")
-
- def htmlHeader(self, request):
- title = "Log File contents"
- data = "<html>\n<head><title>" + title + "</title>\n"
- data += textlog_stylesheet
- data += "</head>\n"
- data += "<body vlink=\"#800080\">\n"
- texturl = request.childLink("text")
- data += '<a href="%s">(view as text)</a><br />\n' % texturl
- data += "<pre>\n"
- return data
-
- def content(self, entries):
- spanfmt = '<span class="%s">%s</span>'
- data = ""
- for type, entry in entries:
- if self.asText:
- if type != builder.HEADER:
- data += entry
- else:
- data += spanfmt % (builder.ChunkTypes[type],
- html.escape(entry))
- return data
-
- def htmlFooter(self):
- data = "</pre>\n"
- data += "</body></html>\n"
- return data
-
- def render_HEAD(self, request):
- if self.asText:
- request.setHeader("content-type", "text/plain")
- else:
- request.setHeader("content-type", "text/html")
-
- # vague approximation, ignores markup
- request.setHeader("content-length", self.original.length)
- return ''
-
- def render_GET(self, req):
- self.req = req
-
- if self.asText:
- req.setHeader("content-type", "text/plain")
- else:
- req.setHeader("content-type", "text/html")
-
- if not self.asText:
- req.write(self.htmlHeader(req))
-
- self.original.subscribeConsumer(ChunkConsumer(req, self))
- return server.NOT_DONE_YET
-
- def finished(self):
- if not self.req:
- return
- try:
- if not self.asText:
- self.req.write(self.htmlFooter())
- self.req.finish()
- except pb.DeadReferenceError:
- pass
- # break the cycle, the Request's .notifications list includes the
- # Deferred (from req.notifyFinish) that's pointing at us.
- self.req = None
-
-components.registerAdapter(TextLog, interfaces.IStatusLog, IHTMLLog)
-
-
-class HTMLLog(Resource):
- if implements:
- implements(IHTMLLog)
- else:
- __implements__ = IHTMLLog,
-
-
- def __init__(self, original):
- Resource.__init__(self)
- self.original = original
-
- def render(self, request):
- request.setHeader("content-type", "text/html")
- return self.original.html
-
-components.registerAdapter(HTMLLog, builder.HTMLLogFile, IHTMLLog)
-
-
-class CurrentBox(components.Adapter):
- # this provides the "current activity" box, just above the builder name
- if implements:
- implements(ICurrentBox)
- else:
- __implements__ = ICurrentBox,
-
- def formatETA(self, eta):
- if eta is None:
- return []
- if eta < 0:
- return ["Soon"]
- abstime = time.strftime("%H:%M:%S", time.localtime(util.now()+eta))
- return ["ETA in", "%d secs" % eta, "at %s" % abstime]
-
- def getBox(self, status):
- # getState() returns offline, idle, or building
- state, builds = self.original.getState()
-
- # look for upcoming builds. We say the state is "waiting" if the
- # builder is otherwise idle and there is a scheduler which tells us a
- # build will be performed some time in the near future. TODO: this
- # functionality used to be in BuilderStatus.. maybe this code should
- # be merged back into it.
- upcoming = []
- builderName = self.original.getName()
- for s in status.getSchedulers():
- if builderName in s.listBuilderNames():
- upcoming.extend(s.getPendingBuildTimes())
- if state == "idle" and upcoming:
- state = "waiting"
-
- if state == "building":
- color = "yellow"
- text = ["building"]
- if builds:
- for b in builds:
- eta = b.getETA()
- if eta:
- text.extend(self.formatETA(eta))
- elif state == "offline":
- color = "red"
- text = ["offline"]
- elif state == "idle":
- color = "white"
- text = ["idle"]
- elif state == "waiting":
- color = "yellow"
- text = ["waiting"]
- else:
- # just in case I add a state and forget to update this
- color = "white"
- text = [state]
-
- # TODO: for now, this pending/upcoming stuff is in the "current
- # activity" box, but really it should go into a "next activity" row
- # instead. The only times it should show up in "current activity" is
- # when the builder is otherwise idle.
-
- # are any builds pending? (waiting for a slave to be free)
- pbs = self.original.getPendingBuilds()
- if pbs:
- text.append("%d pending" % len(pbs))
- for t in upcoming:
- text.extend(["next at",
- time.strftime("%H:%M:%S", time.localtime(t)),
- "[%d secs]" % (t - util.now()),
- ])
- # TODO: the upcoming-builds box looks like:
- # ['waiting', 'next at', '22:14:15', '[86 secs]']
- # while the currently-building box is reversed:
- # ['building', 'ETA in', '2 secs', 'at 22:12:50']
- # consider swapping one of these to make them look the same. also
- # consider leaving them reversed to make them look different.
- return Box(text, color=color, class_="Activity " + state)
-
-components.registerAdapter(CurrentBox, builder.BuilderStatus, ICurrentBox)
-
-class ChangeBox(components.Adapter):
- if implements:
- implements(IBox)
- else:
- __implements__ = IBox,
-
- def getBox(self):
- url = "changes/%d" % self.original.number
- text = '<a href="%s">%s</a>' % (url, html.escape(self.original.who))
- return Box([text], color="white", class_="Change")
-components.registerAdapter(ChangeBox, changes.Change, IBox)
-
-class BuildBox(components.Adapter):
- # this provides the yellow "starting line" box for each build
- if implements:
- implements(IBox)
- else:
- __implements__ = IBox,
-
- def getBox(self):
- b = self.original
- name = b.getBuilder().getName()
- number = b.getNumber()
- url = "%s/builds/%d" % (urllib.quote(name, safe=''), number)
- text = '<a href="%s">Build %d</a>' % (url, number)
- color = "yellow"
- class_ = "start"
- if b.isFinished() and not b.getSteps():
- # the steps have been pruned, so there won't be any indication
- # of whether it succeeded or failed. Color the box red or green
- # to show its status
- color = b.getColor()
- class_ = build_get_class(b)
- return Box([text], color=color, class_="BuildStep " + class_)
-components.registerAdapter(BuildBox, builder.BuildStatus, IBox)
-
-class StepBox(components.Adapter):
- if implements:
- implements(IBox)
- else:
- __implements__ = IBox,
-
- def getBox(self):
- b = self.original.getBuild()
- urlbase = "%s/builds/%d/step-%s" % (
- urllib.quote(b.getBuilder().getName(), safe=''),
- b.getNumber(),
- urllib.quote(self.original.getName(), safe=''))
- text = self.original.getText()
- if text is None:
- log.msg("getText() gave None", urlbase)
- text = []
- text = text[:]
- logs = self.original.getLogs()
- for num in range(len(logs)):
- name = logs[num].getName()
- if logs[num].hasContents():
- url = "%s/%d" % (urlbase, num)
- text.append("<a href=\"%s\">%s</a>" % (url, html.escape(name)))
- else:
- text.append(html.escape(name))
- color = self.original.getColor()
- class_ = "BuildStep " + build_get_class(self.original)
- return Box(text, color, class_=class_)
-components.registerAdapter(StepBox, builder.BuildStepStatus, IBox)
-
-class EventBox(components.Adapter):
- if implements:
- implements(IBox)
- else:
- __implements__ = IBox,
-
- def getBox(self):
- text = self.original.getText()
- color = self.original.getColor()
- class_ = "Event"
- if color:
- class_ += " " + color
- return Box(text, color, class_=class_)
-components.registerAdapter(EventBox, builder.Event, IBox)
-
-
-class BuildTopBox(components.Adapter):
- # this provides a per-builder box at the very top of the display,
- # showing the results of the most recent build
- if implements:
- implements(IBox)
- else:
- __implements__ = IBox,
-
- def getBox(self):
- assert interfaces.IBuilderStatus(self.original)
- b = self.original.getLastFinishedBuild()
- if not b:
- return Box(["none"], "white", class_="LastBuild")
- name = b.getBuilder().getName()
- number = b.getNumber()
- url = "%s/builds/%d" % (name, number)
- text = b.getText()
- # TODO: add logs?
- # TODO: add link to the per-build page at 'url'
- c = b.getColor()
- class_ = build_get_class(b)
- return Box(text, c, class_="LastBuild %s" % class_)
-components.registerAdapter(BuildTopBox, builder.BuilderStatus, ITopBox)
-
-class Spacer(builder.Event):
- def __init__(self, start, finish):
- self.started = start
- self.finished = finish
-
-class SpacerBox(components.Adapter):
- if implements:
- implements(IBox)
- else:
- __implements__ = IBox,
-
- def getBox(self):
- #b = Box(["spacer"], "white")
- b = Box([])
- b.spacer = True
- return b
-components.registerAdapter(SpacerBox, Spacer, IBox)
-
-def insertGaps(g, lastEventTime, idleGap=2):
- debug = False
-
- e = g.next()
- starts, finishes = e.getTimes()
- if debug: log.msg("E0", starts, finishes)
- if finishes == 0:
- finishes = starts
- if debug: log.msg("E1 finishes=%s, gap=%s, lET=%s" % \
- (finishes, idleGap, lastEventTime))
- if finishes is not None and finishes + idleGap < lastEventTime:
- if debug: log.msg(" spacer0")
- yield Spacer(finishes, lastEventTime)
-
- followingEventStarts = starts
- if debug: log.msg(" fES0", starts)
- yield e
-
- while 1:
- e = g.next()
- starts, finishes = e.getTimes()
- if debug: log.msg("E2", starts, finishes)
- if finishes == 0:
- finishes = starts
- if finishes is not None and finishes + idleGap < followingEventStarts:
- # there is a gap between the end of this event and the beginning
- # of the next one. Insert an idle event so the waterfall display
- # shows a gap here.
- if debug:
- log.msg(" finishes=%s, gap=%s, fES=%s" % \
- (finishes, idleGap, followingEventStarts))
- yield Spacer(finishes, followingEventStarts)
- yield e
- followingEventStarts = starts
- if debug: log.msg(" fES1", starts)
-
-
-class WaterfallStatusResource(HtmlResource):
- """This builds the main status page, with the waterfall display, and
- all child pages."""
- title = "BuildBot"
- def __init__(self, status, changemaster, categories, css=None):
- HtmlResource.__init__(self)
- self.status = status
- self.changemaster = changemaster
- self.categories = categories
- p = self.status.getProjectName()
- if p:
- self.title = "BuildBot: %s" % p
- self.css = css
-
- def body(self, request):
- "This method builds the main waterfall display."
-
- data = ''
-
- projectName = self.status.getProjectName()
- projectURL = self.status.getProjectURL()
-
- phase = request.args.get("phase",["2"])
- phase = int(phase[0])
-
- showBuilders = request.args.get("show", None)
- allBuilders = self.status.getBuilderNames(categories=self.categories)
- if showBuilders:
- builderNames = []
- for b in showBuilders:
- if b not in allBuilders:
- continue
- if b in builderNames:
- continue
- builderNames.append(b)
- else:
- builderNames = allBuilders
- builders = map(lambda name: self.status.getBuilder(name),
- builderNames)
-
- if phase == -1:
- return self.body0(request, builders)
- (changeNames, builderNames, timestamps, eventGrid, sourceEvents) = \
- self.buildGrid(request, builders)
- if phase == 0:
- return self.phase0(request, (changeNames + builderNames),
- timestamps, eventGrid)
- # start the table: top-header material
- data += '<table border="0" cellspacing="0">\n'
-
- if projectName and projectURL:
- # TODO: this is going to look really ugly
- #topleft = "<a href=\"%s\">%s</a><br />last build" % \
- # (projectURL, projectName)
- topleft = "<a href=\"%s\">%s</a><br /><a href=\"cws_view_ready\">Ready For QA</a><br /><a href=\"cws_view_new\">New</a>" % \
- (projectURL, projectName)
- #else:
- topright = "last build"
- data += ' <tr class="LastBuild">\n'
- data += td(topleft, align="right", class_="Project")
- data += td(topright, align="right", class_="Project")
- for b in builders:
- box = ITopBox(b).getBox()
- data += box.td(align="center")
- data += " </tr>\n"
-
- data += ' <tr class="Activity">\n'
- data += td('current activity', align='right', colspan=2)
- for b in builders:
- box = ICurrentBox(b).getBox(self.status)
- data += box.td(align="center")
- data += " </tr>\n"
-
- data += " <tr>\n"
- TZ = time.tzname[time.daylight]
- data += td("time (%s)" % TZ, align="center", class_="Time")
- name = changeNames[0]
- data += td(
- "<a href=\"%s\">%s</a>" % (urllib.quote(name, safe=''), name),
- align="center", class_="Change")
- for name in builderNames:
- data += td(
- #"<a href=\"%s\">%s</a>" % (request.childLink(name), name),
- "<a href=\"%s\">%s</a>" % (urllib.quote(name, safe=''), name),
- align="center", class_="Builder")
- data += " </tr>\n"
-
- if phase == 1:
- f = self.phase1
- else:
- f = self.phase2
- data += f(request, changeNames + builderNames, timestamps, eventGrid,
- sourceEvents)
-
- data += "</table>\n"
-
- data += "<hr />\n"
-
- data += "<a href=\"http://buildbot.sourceforge.net/\">Buildbot</a>"
- data += "-%s " % version
- if projectName:
- data += "working for the "
- if projectURL:
- data += "<a href=\"%s\">%s</a> project." % (projectURL,
- projectName)
- else:
- data += "%s project." % projectName
- data += "<br />\n"
- # TODO: push this to the right edge, if possible
- data += ("Page built: " +
- time.strftime("%a %d %b %Y %H:%M:%S",
- time.localtime(util.now()))
- + "\n")
- return data
-
- def body0(self, request, builders):
- # build the waterfall display
- data = ""
- data += "<h2>Basic display</h2>\n"
- data += "<p>See <a href=\"%s\">here</a>" % \
- urllib.quote(request.childLink("waterfall"))
- data += " for the waterfall display</p>\n"
-
- data += '<table border="0" cellspacing="0">\n'
- names = map(lambda builder: builder.name, builders)
-
- # the top row is two blank spaces, then the top-level status boxes
- data += " <tr>\n"
- data += td("", colspan=2)
- for b in builders:
- text = ""
- color = "#ca88f7"
- state, builds = b.getState()
- if state != "offline":
- text += "%s<br />\n" % state #b.getCurrentBig().text[0]
- else:
- text += "OFFLINE<br />\n"
- color = "#ffe0e0"
- data += td(text, align="center", bgcolor=color)
-
- # the next row has the column headers: time, changes, builder names
- data += " <tr>\n"
- data += td("Time", align="center")
- data += td("Changes", align="center")
- for name in names:
- data += td(
- "<a href=\"%s\">%s</a>" % (urllib.quote(request.childLink(name)), name),
- align="center")
- data += " </tr>\n"
-
- # all further rows involve timestamps, commit events, and build events
- data += " <tr>\n"
- data += td("04:00", align="bottom")
- data += td("fred", align="center")
- for name in names:
- data += td("stuff", align="center", bgcolor="red")
- data += " </tr>\n"
-
- data += "</table>\n"
- return data
-
- def buildGrid(self, request, builders):
- debug = False
-
- # XXX: see if we can use a cached copy
-
- # first step is to walk backwards in time, asking each column
- # (commit, all builders) if they have any events there. Build up the
- # array of events, and stop when we have a reasonable number.
-
- commit_source = self.changemaster
-
- lastEventTime = util.now()
- sources = [commit_source] + builders
- changeNames = ["changes"]
- builderNames = map(lambda builder: builder.getName(), builders)
- sourceNames = changeNames + builderNames
- sourceEvents = []
- sourceGenerators = []
- for s in sources:
- gen = insertGaps(s.eventGenerator(), lastEventTime)
- sourceGenerators.append(gen)
- # get the first event
- try:
- e = gen.next()
- event = interfaces.IStatusEvent(e)
- if debug:
- log.msg("gen %s gave1 %s" % (gen, event.getText()))
- except StopIteration:
- event = None
- sourceEvents.append(event)
- eventGrid = []
- timestamps = []
- spanLength = 10 # ten-second chunks
- tooOld = util.now() - 12*60*60 # never show more than 12 hours
- maxPageLen = 400
-
- lastEventTime = 0
- for e in sourceEvents:
- if e and e.getTimes()[0] > lastEventTime:
- lastEventTime = e.getTimes()[0]
- if lastEventTime == 0:
- lastEventTime = util.now()
-
- spanStart = lastEventTime - spanLength
- debugGather = 0
-
- while 1:
- if debugGather: log.msg("checking (%s,]" % spanStart)
- # the tableau of potential events is in sourceEvents[]. The
- # window crawls backwards, and we examine one source at a time.
- # If the source's top-most event is in the window, is it pushed
- # onto the events[] array and the tableau is refilled. This
- # continues until the tableau event is not in the window (or is
- # missing).
-
- spanEvents = [] # for all sources, in this span. row of eventGrid
- firstTimestamp = None # timestamp of first event in the span
- lastTimestamp = None # last pre-span event, for next span
-
- for c in range(len(sourceGenerators)):
- events = [] # for this source, in this span. cell of eventGrid
- event = sourceEvents[c]
- while event and spanStart < event.getTimes()[0]:
- # to look at windows that don't end with the present,
- # condition the .append on event.time <= spanFinish
- if not IBox(event, None):
- log.msg("BAD EVENT", event, event.getText())
- assert 0
- if debug:
- log.msg("pushing", event.getText(), event)
- events.append(event)
- starts, finishes = event.getTimes()
- firstTimestamp = util.earlier(firstTimestamp, starts)
- try:
- event = sourceGenerators[c].next()
- #event = interfaces.IStatusEvent(event)
- if debug:
- log.msg("gen[%s] gave2 %s" % (sourceNames[c],
- event.getText()))
- except StopIteration:
- event = None
- if debug:
- log.msg("finished span")
-
- if event:
- # this is the last pre-span event for this source
- lastTimestamp = util.later(lastTimestamp,
- event.getTimes()[0])
- if debugGather:
- log.msg(" got %s from %s" % (events, sourceNames[c]))
- sourceEvents[c] = event # refill the tableau
- spanEvents.append(events)
-
- if firstTimestamp is not None:
- eventGrid.append(spanEvents)
- timestamps.append(firstTimestamp)
-
-
- if lastTimestamp:
- spanStart = lastTimestamp - spanLength
- else:
- # no more events
- break
- if lastTimestamp < tooOld:
- pass
- #break
- if len(timestamps) > maxPageLen:
- break
-
-
- # now loop
-
- # loop is finished. now we have eventGrid[] and timestamps[]
- if debugGather: log.msg("finished loop")
- assert(len(timestamps) == len(eventGrid))
- return (changeNames, builderNames, timestamps, eventGrid, sourceEvents)
-
- def phase0(self, request, sourceNames, timestamps, eventGrid):
- # phase0 rendering
- if not timestamps:
- return "no events"
- data = ""
- for r in range(0, len(timestamps)):
- data += "<p>\n"
- data += "[%s]<br />" % timestamps[r]
- row = eventGrid[r]
- assert(len(row) == len(sourceNames))
- for c in range(0, len(row)):
- if row[c]:
- data += "<b>%s</b><br />\n" % sourceNames[c]
- for e in row[c]:
- log.msg("Event", r, c, sourceNames[c], e.getText())
- lognames = [loog.getName() for loog in e.getLogs()]
- data += "%s: %s: %s %s<br />" % (e.getText(),
- e.getTimes()[0],
- e.getColor(),
- lognames)
- else:
- data += "<b>%s</b> [none]<br />\n" % sourceNames[c]
- return data
-
- def phase1(self, request, sourceNames, timestamps, eventGrid,
- sourceEvents):
- # phase1 rendering: table, but boxes do not overlap
- data = ""
- if not timestamps:
- return data
- lastDate = None
- for r in range(0, len(timestamps)):
- chunkstrip = eventGrid[r]
- # chunkstrip is a horizontal strip of event blocks. Each block
- # is a vertical list of events, all for the same source.
- assert(len(chunkstrip) == len(sourceNames))
- maxRows = reduce(lambda x,y: max(x,y),
- map(lambda x: len(x), chunkstrip))
- for i in range(maxRows):
- data += " <tr>\n";
- if i == 0:
- stuff = []
- # add the date at the beginning, and each time it changes
- today = time.strftime("<b>%d %b %Y</b>",
- time.localtime(timestamps[r]))
- todayday = time.strftime("<b>%a</b>",
- time.localtime(timestamps[r]))
- if today != lastDate:
- stuff.append(todayday)
- stuff.append(today)
- lastDate = today
- stuff.append(
- time.strftime("%H:%M:%S",
- time.localtime(timestamps[r])))
- data += td(stuff, valign="bottom", align="center",
- rowspan=maxRows, class_="Time")
- for c in range(0, len(chunkstrip)):
- block = chunkstrip[c]
- assert(block != None) # should be [] instead
- # bottom-justify
- offset = maxRows - len(block)
- if i < offset:
- data += td("")
- else:
- e = block[i-offset]
- box = IBox(e).getBox()
- box.parms["show_idle"] = 1
- data += box.td(valign="top", align="center")
- data += " </tr>\n"
-
- return data
-
- def phase2(self, request, sourceNames, timestamps, eventGrid,
- sourceEvents):
- data = ""
- if not timestamps:
- return data
- # first pass: figure out the height of the chunks, populate grid
- grid = []
- for i in range(1+len(sourceNames)):
- grid.append([])
- # grid is a list of columns, one for the timestamps, and one per
- # event source. Each column is exactly the same height. Each element
- # of the list is a single <td> box.
- lastDate = time.strftime("<b>%d %b %Y</b>",
- time.localtime(util.now()))
- for r in range(0, len(timestamps)):
- chunkstrip = eventGrid[r]
- # chunkstrip is a horizontal strip of event blocks. Each block
- # is a vertical list of events, all for the same source.
- assert(len(chunkstrip) == len(sourceNames))
- maxRows = reduce(lambda x,y: max(x,y),
- map(lambda x: len(x), chunkstrip))
- for i in range(maxRows):
- if i != maxRows-1:
- grid[0].append(None)
- else:
- # timestamp goes at the bottom of the chunk
- stuff = []
- # add the date at the beginning (if it is not the same as
- # today's date), and each time it changes
- todayday = time.strftime("<b>%a</b>",
- time.localtime(timestamps[r]))
- today = time.strftime("<b>%d %b %Y</b>",
- time.localtime(timestamps[r]))
- if today != lastDate:
- stuff.append(todayday)
- stuff.append(today)
- lastDate = today
- stuff.append(
- time.strftime("%H:%M:%S",
- time.localtime(timestamps[r])))
- grid[0].append(Box(text=stuff, class_="Time",
- valign="bottom", align="center"))
-
- # at this point the timestamp column has been populated with
- # maxRows boxes, most None but the last one has the time string
- for c in range(0, len(chunkstrip)):
- block = chunkstrip[c]
- assert(block != None) # should be [] instead
- for i in range(maxRows - len(block)):
- # fill top of chunk with blank space
- grid[c+1].append(None)
- for i in range(len(block)):
- # so the events are bottom-justified
- b = IBox(block[i]).getBox()
- b.parms['valign'] = "top"
- b.parms['align'] = "center"
- grid[c+1].append(b)
- # now all the other columns have maxRows new boxes too
- # populate the last row, if empty
- gridlen = len(grid[0])
- for i in range(len(grid)):
- strip = grid[i]
- assert(len(strip) == gridlen)
- if strip[-1] == None:
- if sourceEvents[i-1]:
- filler = IBox(sourceEvents[i-1]).getBox()
- else:
- # this can happen if you delete part of the build history
- filler = Box(text=["?"], align="center")
- strip[-1] = filler
- strip[-1].parms['rowspan'] = 1
- # second pass: bubble the events upwards to un-occupied locations
- # Every square of the grid that has a None in it needs to have
- # something else take its place.
- noBubble = request.args.get("nobubble",['0'])
- noBubble = int(noBubble[0])
- if not noBubble:
- for col in range(len(grid)):
- strip = grid[col]
- if col == 1: # changes are handled differently
- for i in range(2, len(strip)+1):
- # only merge empty boxes. Don't bubble commit boxes.
- if strip[-i] == None:
- next = strip[-i+1]
- assert(next)
- if next:
- #if not next.event:
- if next.spacer:
- # bubble the empty box up
- strip[-i] = next
- strip[-i].parms['rowspan'] += 1
- strip[-i+1] = None
- else:
- # we are above a commit box. Leave it
- # be, and turn the current box into an
- # empty one
- strip[-i] = Box([], rowspan=1,
- comment="commit bubble")
- strip[-i].spacer = True
- else:
- # we are above another empty box, which
- # somehow wasn't already converted.
- # Shouldn't happen
- pass
- else:
- for i in range(2, len(strip)+1):
- # strip[-i] will go from next-to-last back to first
- if strip[-i] == None:
- # bubble previous item up
- assert(strip[-i+1] != None)
- strip[-i] = strip[-i+1]
- strip[-i].parms['rowspan'] += 1
- strip[-i+1] = None
- else:
- strip[-i].parms['rowspan'] = 1
- # third pass: render the HTML table
- for i in range(gridlen):
- data += " <tr>\n";
- for strip in grid:
- b = strip[i]
- if b:
- data += b.td()
- else:
- if noBubble:
- data += td([])
- # Nones are left empty, rowspan should make it all fit
- data += " </tr>\n"
- return data
-
-
-class CWSStatusResource(HtmlResource):
- """This builds the main status page, with the waterfall display, and
- all child pages."""
- title = "BuildBot"
- def __init__(self, status, changemaster, categories, css=None, branches=None, cws_type='new'):
- HtmlResource.__init__(self)
- self.status = status
- self.changemaster = changemaster
- self.categories = categories
- p = self.status.getProjectName()
- if p:
- self.title = "BuildBot: %s" % p
- self.css = css
- self.branches = branches
- self.cws_type = cws_type
-
- def body(self, request):
- "This method builds the main waterfall display."
-
- data = ''
-
- projectName = self.status.getProjectName()
- projectURL = self.status.getProjectURL()
- buildbotURL = self.status.getBuildbotURL()
-
- phase = request.args.get("phase",["2"])
- phase = int(phase[0])
-
- showBuilders = request.args.get("show", None)
- allBuilders = self.status.getBuilderNames(categories=self.categories)
- if showBuilders:
- builderNames = []
- for b in showBuilders:
- if b not in allBuilders:
- continue
- if b in builderNames:
- continue
- builderNames.append(b)
- else:
- builderNames = allBuilders
- builders = map(lambda name: self.status.getBuilder(name),
- builderNames)
-
- if phase == -1:
- return self.body0(request, builders)
- (changeNames, builderNames, timestamps, eventGrid, sourceEvents) = \
- self.buildGrid(request, builders)
- if phase == 0:
- return self.phase0(request, (changeNames + builderNames),
- timestamps, eventGrid)
- # start the table: top-header material
- data += '<table border="0" cellspacing="0">\n'
-
- if projectName and projectURL:
- # TODO: this is going to look really ugly
- topleft = "<a href=\"%s\">%s</a><br /><a href=\"%s\">slave_view</a>" % \
- (projectURL, projectName, buildbotURL)
- #else:
- #topright = "last build"
- data += ' <tr class="LastBuild">\n'
- data += td(topleft, align="left", class_="Project")
- #data += td(topright, align="right", class_="Project")
- #for b in builders:
- # box = ITopBox(b).getBox()
- # data += box.td(align="center")
- #data += " </tr>\n"
-
- #data += ' <tr class="Activity">\n'
- #data += td('current activity', align='right', colspan=2)
- #for b in builders:
- # box = ICurrentBox(b).getBox(self.status)
- # data += box.td(align="center")
- #data += " </tr>\n"
-
- #data += " <tr>\n"
- #TZ = time.tzname[time.daylight]
- #data += td("time (%s)" % TZ, align="center", class_="Time")
- #name = changeNames[0]
- #data += td(
- # "<a href=\"%s\">%s</a>" % (urllib.quote(name, safe=''), name),
- # align="center", class_="Change")
- #for name in builderNames:
- # data += td(
- # #"<a href=\"%s\">%s</a>" % (request.childLink(name), name),
- # "<a href=\"%s\">%s</a>" % (urllib.quote(name, safe=''), name),
- # align="center", class_="Builder")
- #data += " </tr>\n"
-
- blockList = []
-
- for j in range(0, len(eventGrid)) :
- col = eventGrid[j]
- for k in range(0, len(col)) :
- block = col[k]
-
- for i in range(len(block)):
- blockList.append(block[i])
-
- TZ = time.tzname[time.daylight]
- numBlock = len(blockList)
- data += td("time (%s)" % TZ, align="center", class_="Time", colspan=numBlock)
- data += " </tr>\n"
-
- data += " <tr> \n"
- data += "<td></td>\n"
-
- p = getcws.GetCWS(self.cws_type)
- branchList = p.getCWSs()
-
-
- for i in range(0, len(blockList)) :
- branch, revision, patch = blockList[i].getSourceStamp()
- if branch and branch in branchList:
- start, finish = blockList[i].getTimes()
-
- if start:
- start = time.strftime("%d %b %Y %H:%M",time.localtime(start))
- else:
- start = time.strftime("%d %b %Y %H:%M",time.localtime(util.now()))
- if finish:
- finish = time.strftime("%H:%M",time.localtime(finish))
- else:
- finish = time.strftime("%H:%M",time.localtime(util.now()))
-
- box1 = Box(text=["%s-%s" %(start,finish)], align="center")
- data += box1.td(valign="top", align="center", class_="Time")
- data += " </tr> \n"
-
-
- if self.branches:
-
- #branch_file = open(self.branches, 'r')
-
- #branchList = branch_file.readlines()
-
- #p = getcws.GetCWS(self.cws_type)
- #branchList = p.getCWSs()
-
- last_time = -1
- trcolor = 1
- #for row_branch in branch_file.readlines():
- for row_branch in branchList:
- row_branch = row_branch.replace("\r","")
- row_branch = row_branch.replace("\n","")
- if trcolor == 1:
- data += " <tr border=\"0\" bgcolor=\"#fffccc\">\n"
- trcolor = 0
- else:
- data += " <tr border=\"0\" bgcolor=\"#fffff0\">\n"
- trcolor = 1
- #data += td("%s" % row_branch, align="center")
- branch_box = Box(text=["%s"%row_branch], align="center")
- data += branch_box.td(class_="branch_box")
- #last_time = timestamps[r]
-
- for i in range(len(blockList)):
- #text = block[i].getBuild()
- branch, revision, patch = blockList[i].getSourceStamp()
- slave = blockList[i].getBuilder().getName()
- boxclass = None
- if branch and (branch in branchList):
- if (row_branch == branch):
- box = IBox(blockList[i]).getBox()
- text = blockList[i].getText()
- if ("failed" in text or "exception" in text):
- boxclass = "failure"
- elif ("successful" in text):
- boxclass = "success"
- else:
- boxclass = "empty"
- #box1 = Box(text=["%s" %text], align="center")
- else:
- box = Box(text=[""], align="center")
- #box1 = Box(text=[""], align="center")
- data += box.td(valign="top", align="center", class_=boxclass)
-
- #data += box1.td(valign="top", align="center", class_=boxclass)
- data += " </tr>\n"
- #row_branch = branch_file.readline()
- #branch_file.close()
- else:
- data +="<tr><td> No branches listed in branch_file.txt or no branch_file.txt specified in master.cfg file </td></tr>\n"
-
- #if phase == 1:
- # f = self.phase2
- #else:
- # f = self.phase2
- #data += f(request, changeNames + builderNames, timestamps, eventGrid,
- # sourceEvents)
-
- data += "</table>\n"
-
- data += "<hr />\n"
-
- data += "<a href=\"http://buildbot.sourceforge.net/\">Buildbot</a>"
- data += "-%s " % version
- if projectName:
- data += "working for the "
- if projectURL:
- data += "<a href=\"%s\">%s</a> project." % (projectURL,
- projectName)
- else:
- data += "%s project." % projectName
- data += "<br />\n"
- # TODO: push this to the right edge, if possible
- data += ("Page built: " +
- time.strftime("%a %d %b %Y %H:%M:%S",
- time.localtime(util.now()))
- + "\n")
- return data
-
- def body0(self, request, builders):
- # build the waterfall display
- data = ""
- data += "<h2>Basic display</h2>\n"
- data += "<p>See <a href=\"%s\">here</a>" % \
- urllib.quote(request.childLink("waterfall"))
- data += " for the waterfall display</p>\n"
-
- data += '<table border="0" cellspacing="0">\n'
- names = map(lambda builder: builder.name, builders)
-
- # the top row is two blank spaces, then the top-level status boxes
- data += " <tr>\n"
- data += td("", colspan=2)
- for b in builders:
- text = ""
- color = "#ca88f7"
- state, builds = b.getState()
- if state != "offline":
- text += "%s<br />\n" % state #b.getCurrentBig().text[0]
- else:
- text += "OFFLINE<br />\n"
- color = "#ffe0e0"
- data += td(text, align="center", bgcolor=color)
-
- # the next row has the column headers: time, changes, builder names
- data += " <tr>\n"
- data += td("Time", align="center")
- data += td("Changes", align="center")
- for name in names:
- data += td(
- "<a href=\"%s\">%s</a>" % (urllib.quote(request.childLink(name)), name),
- align="center")
- data += " </tr>\n"
-
- # all further rows involve timestamps, commit events, and build events
- data += " <tr>\n"
- data += td("04:00", align="bottom")
- data += td("fred", align="center")
- for name in names:
- data += td("stuff", align="center", bgcolor="red")
- data += " </tr>\n"
-
- data += "</table>\n"
- return data
-
- def buildGrid(self, request, builders):
- debug = False
-
- # XXX: see if we can use a cached copy
-
- # first step is to walk backwards in time, asking each column
- # (commit, all builders) if they have any events there. Build up the
- # array of events, and stop when we have a reasonable number.
-
- commit_source = self.changemaster
-
- lastEventTime = util.now()
- sources = builders
- changeNames = ["changes"]
- builderNames = map(lambda builder: builder.getName(), builders)
- sourceNames = changeNames + builderNames
- sourceEvents = []
- sourceGenerators = []
- for s in sources:
- gen = insertGaps(s.eventGenerator(), lastEventTime)
- sourceGenerators.append(gen)
- # get the first event
- try:
- e = gen.next()
- event = interfaces.IStatusEvent(e)
- if debug:
- log.msg("gen %s gave1 %s" % (gen, event.getText()))
- except StopIteration:
- event = None
- sourceEvents.append(event)
- eventGrid = []
- timestamps = []
- spanLength = 10 # ten-second chunks
- tooOld = util.now() - 12*60*60 # never show more than 12 hours
- maxPageLen = 400
-
- lastEventTime = 0
- for e in sourceEvents:
- if e and e.getTimes()[0] > lastEventTime:
- lastEventTime = e.getTimes()[0]
- if lastEventTime == 0:
- lastEventTime = util.now()
-
- spanStart = lastEventTime - spanLength
- debugGather = 0
-
- while 1:
- if debugGather: log.msg("checking (%s,]" % spanStart)
- # the tableau of potential events is in sourceEvents[]. The
- # window crawls backwards, and we examine one source at a time.
- # If the source's top-most event is in the window, is it pushed
- # onto the events[] array and the tableau is refilled. This
- # continues until the tableau event is not in the window (or is
- # missing).
-
- spanEvents = [] # for all sources, in this span. row of eventGrid
- firstTimestamp = None # timestamp of first event in the span
- lastTimestamp = None # last pre-span event, for next span
-
- for c in range(len(sourceGenerators)):
- events = [] # for this source, in this span. cell of eventGrid
- event = sourceEvents[c]
- while event and spanStart < event.getTimes()[0]:
- # to look at windows that don't end with the present,
- # condition the .append on event.time <= spanFinish
- if not IBox(event, None):
- log.msg("BAD EVENT", event, event.getText())
- assert 0
- if debug:
- log.msg("pushing", event.getText(), event)
- if isinstance(event, builder.BuildStatus):
- events.append(event)
- starts, finishes = event.getTimes()
- firstTimestamp = util.earlier(firstTimestamp, starts)
- try:
- event = sourceGenerators[c].next()
- #event = interfaces.IStatusEvent(event)
- if debug:
- log.msg("gen[%s] gave2 %s" % (sourceNames[c],
- event.getText()))
- except StopIteration:
- event = None
- if debug:
- log.msg("finished span")
-
- if event:
- # this is the last pre-span event for this source
- lastTimestamp = util.later(lastTimestamp,
- event.getTimes()[0])
- if debugGather:
- log.msg(" got %s from %s" % (events, sourceNames[c]))
- sourceEvents[c] = event # refill the tableau
- spanEvents.append(events)
-
- if firstTimestamp is not None:
- eventGrid.append(spanEvents)
- timestamps.append(firstTimestamp)
-
-
- if lastTimestamp:
- spanStart = lastTimestamp - spanLength
- else:
- # no more events
- break
- if lastTimestamp < tooOld:
- pass
- #break
- if len(timestamps) > maxPageLen:
- break
-
-
- # now loop
-
- # loop is finished. now we have eventGrid[] and timestamps[]
- if debugGather: log.msg("finished loop")
- assert(len(timestamps) == len(eventGrid))
- return (changeNames, builderNames, timestamps, eventGrid, sourceEvents)
-
- def phase0(self, request, sourceNames, timestamps, eventGrid):
- # phase0 rendering
- if not timestamps:
- return "no events"
- data = ""
- for r in range(0, len(timestamps)):
- data += "<p>\n"
- data += "[%s]<br />" % timestamps[r]
- row = eventGrid[r]
- assert(len(row) == len(sourceNames))
- for c in range(0, len(row)):
- if row[c]:
- data += "<b>%s</b><br />\n" % sourceNames[c]
- for e in row[c]:
- log.msg("Event", r, c, sourceNames[c], e.getText())
- lognames = [loog.getName() for loog in e.getLogs()]
- data += "%s: %s: %s %s<br />" % (e.getText(),
- e.getTimes()[0],
- e.getColor(),
- lognames)
- else:
- data += "<b>%s</b> [none]<br />\n" % sourceNames[c]
- return data
-
- def phase1(self, request, sourceNames, timestamps, eventGrid,
- sourceEvents):
- # phase1 rendering: table, but boxes do not overlap
- data = ""
- if not timestamps:
- return data
- lastDate = None
- for r in range(0, len(timestamps)):
- chunkstrip = eventGrid[r]
- # chunkstrip is a horizontal strip of event blocks. Each block
- # is a vertical list of events, all for the same source.
- assert(len(chunkstrip) == len(sourceNames))
- maxRows = reduce(lambda x,y: max(x,y),
- map(lambda x: len(x), chunkstrip))
- for i in range(maxRows):
- data += " <tr>\n";
- if i == 0:
- stuff = []
- # add the date at the beginning, and each time it changes
- today = time.strftime("<b>%d %b %Y</b>",
- time.localtime(timestamps[r]))
- todayday = time.strftime("<b>%a</b>",
- time.localtime(timestamps[r]))
- if today != lastDate:
- stuff.append(todayday)
- stuff.append(today)
- lastDate = today
- stuff.append(
- time.strftime("%H:%M:%S",
- time.localtime(timestamps[r])))
- data += td(stuff, valign="bottom", align="center",
- rowspan=maxRows, class_="Time")
- for c in range(0, len(chunkstrip)):
- block = chunkstrip[c]
- assert(block != None) # should be [] instead
- # bottom-justify
- offset = maxRows - len(block)
- if i < offset:
- data += td("")
- else:
- e = block[i-offset]
- box = IBox(e).getBox()
- box.parms["show_idle"] = 1
- data += box.td(valign="top", align="center")
- data += " </tr>\n"
-
- return data
-
- def phase2(self, request, sourceNames, timestamps, eventGrid,
- sourceEvents):
- data = ""
- if not timestamps:
- return data
- # first pass: figure out the height of the chunks, populate grid
- grid = []
- for i in range(1+len(sourceNames)):
- grid.append([])
- # grid is a list of columns, one for the timestamps, and one per
- # event source. Each column is exactly the same height. Each element
- # of the list is a single <td> box.
- lastDate = time.strftime("<b>%d %b %Y</b>",
- time.localtime(util.now()))
- for r in range(0, len(timestamps)):
- chunkstrip = eventGrid[r]
- # chunkstrip is a horizontal strip of event blocks. Each block
- # is a vertical list of events, all for the same source.
- assert(len(chunkstrip) == len(sourceNames))
- maxRows = reduce(lambda x,y: max(x,y),
- map(lambda x: len(x), chunkstrip))
- for i in range(maxRows):
- if i != maxRows-1:
- grid[0].append(None)
- else:
- # timestamp goes at the bottom of the chunk
- stuff = []
- # add the date at the beginning (if it is not the same as
- # today's date), and each time it changes
- todayday = time.strftime("<b>%a</b>",
- time.localtime(timestamps[r]))
- today = time.strftime("<b>%d %b %Y</b>",
- time.localtime(timestamps[r]))
- if today != lastDate:
- stuff.append(todayday)
- stuff.append(today)
- lastDate = today
- stuff.append(
- time.strftime("%H:%M:%S",
- time.localtime(timestamps[r])))
- grid[0].append(Box(text=stuff, class_="Time",
- valign="bottom", align="center"))
-
- # at this point the timestamp column has been populated with
- # maxRows boxes, most None but the last one has the time string
- for c in range(0, len(chunkstrip)):
- block = chunkstrip[c]
- assert(block != None) # should be [] instead
- for i in range(maxRows - len(block)):
- # fill top of chunk with blank space
- grid[c+1].append(None)
- for i in range(len(block)):
- # so the events are bottom-justified
- b = IBox(block[i]).getBox()
- b.parms['valign'] = "top"
- b.parms['align'] = "center"
- grid[c+1].append(b)
- # now all the other columns have maxRows new boxes too
- # populate the last row, if empty
- gridlen = len(grid[0])
- for i in range(len(grid)):
- strip = grid[i]
- assert(len(strip) == gridlen)
- if strip[-1] == None:
- if sourceEvents[i-1]:
- filler = IBox(sourceEvents[i-1]).getBox()
- else:
- # this can happen if you delete part of the build history
- filler = Box(text=["?"], align="center")
- strip[-1] = filler
- strip[-1].parms['rowspan'] = 1
- # second pass: bubble the events upwards to un-occupied locations
- # Every square of the grid that has a None in it needs to have
- # something else take its place.
- noBubble = request.args.get("nobubble",['0'])
- noBubble = int(noBubble[0])
- if not noBubble:
- for col in range(len(grid)):
- strip = grid[col]
- if col == 1: # changes are handled differently
- for i in range(2, len(strip)+1):
- # only merge empty boxes. Don't bubble commit boxes.
- if strip[-i] == None:
- next = strip[-i+1]
- assert(next)
- if next:
- #if not next.event:
- if next.spacer:
- # bubble the empty box up
- strip[-i] = next
- strip[-i].parms['rowspan'] += 1
- strip[-i+1] = None
- else:
- # we are above a commit box. Leave it
- # be, and turn the current box into an
- # empty one
- strip[-i] = Box([], rowspan=1,
- comment="commit bubble")
- strip[-i].spacer = True
- else:
- # we are above another empty box, which
- # somehow wasn't already converted.
- # Shouldn't happen
- pass
- else:
- for i in range(2, len(strip)+1):
- # strip[-i] will go from next-to-last back to first
- if strip[-i] == None:
- # bubble previous item up
- assert(strip[-i+1] != None)
- strip[-i] = strip[-i+1]
- strip[-i].parms['rowspan'] += 1
- strip[-i+1] = None
- else:
- strip[-i].parms['rowspan'] = 1
- # third pass: render the HTML table
- for i in range(gridlen):
- data += " <tr>\n";
- for strip in grid:
- b = strip[i]
- if b:
- data += b.td()
- else:
- if noBubble:
- data += td([])
- # Nones are left empty, rowspan should make it all fit
- data += " </tr>\n"
- return data
-
-
-
-class StatusResource(Resource):
- status = None
- control = None
- favicon = None
- robots_txt = None
-
- def __init__(self, status, control, changemaster, categories, css, branches):
- """
- @type status: L{buildbot.status.builder.Status}
- @type control: L{buildbot.master.Control}
- @type changemaster: L{buildbot.changes.changes.ChangeMaster}
- """
- Resource.__init__(self)
- self.status = status
- self.control = control
- self.changemaster = changemaster
- self.categories = categories
- self.css = css
- self.branches = branches
- waterfall = WaterfallStatusResource(self.status, changemaster,
- categories, css)
- self.putChild("", waterfall)
-
- def render(self, request):
- request.redirect(request.prePathURL() + '/')
- request.finish()
-
- def getChild(self, path, request):
- if path == "robots.txt" and self.robots_txt:
- return static.File(self.robots_txt)
- if path == "buildbot.css" and self.css:
- return static.File(self.css)
- if path == "changes":
- return StatusResourceChanges(self.status, self.changemaster)
- if path == "favicon.ico":
- if self.favicon:
- return static.File(self.favicon)
- return NoResource("No favicon.ico registered")
-
- if path in self.status.getBuilderNames():
- builder = self.status.getBuilder(path)
- control = None
- if self.control:
- control = self.control.getBuilder(path)
- return StatusResourceBuilder(self.status, builder, control)
-
- if path == "cws_view_ready":
- return CWSStatusResource(self.status, [],
- None, self.css, self.branches, 'ready')
-
- if path == "cws_view_new":
- return CWSStatusResource(self.status, [],
- None, self.css, self.branches, 'new')
-
-
- return NoResource("No such Builder '%s'" % path)
-
-# the icon is sibpath(__file__, "../buildbot.png") . This is for portability.
-up = os.path.dirname
-buildbot_icon = os.path.abspath(os.path.join(up(up(__file__)),
- "buildbot.png"))
-buildbot_css = os.path.abspath(os.path.join(up(__file__), "classic.css"))
-
-class Waterfall(base.StatusReceiverMultiService):
- """I implement the primary web-page status interface, called a 'Waterfall
- Display' because builds and steps are presented in a grid of boxes which
- move downwards over time. The top edge is always the present. Each column
- represents a single builder. Each box describes a single Step, which may
- have logfiles or other status information.
-
- All these pages are served via a web server of some sort. The simplest
- approach is to let the buildmaster run its own webserver, on a given TCP
- port, but it can also publish its pages to a L{twisted.web.distrib}
- distributed web server (which lets the buildbot pages be a subset of some
- other web server).
-
- Since 0.6.3, BuildBot defines class attributes on elements so they can be
- styled with CSS stylesheets. Buildbot uses some generic classes to
- identify the type of object, and some more specific classes for the
- various kinds of those types. It does this by specifying both in the
- class attributes where applicable, separated by a space. It is important
- that in your CSS you declare the more generic class styles above the more
- specific ones. For example, first define a style for .Event, and below
- that for .SUCCESS
-
- The following CSS class names are used:
- - Activity, Event, BuildStep, LastBuild: general classes
- - waiting, interlocked, building, offline, idle: Activity states
- - start, running, success, failure, warnings, skipped, exception:
- LastBuild and BuildStep states
- - Change: box with change
- - Builder: box for builder name (at top)
- - Project
- - Time
-
- @type parent: L{buildbot.master.BuildMaster}
- @ivar parent: like all status plugins, this object is a child of the
- BuildMaster, so C{.parent} points to a
- L{buildbot.master.BuildMaster} instance, through which
- the status-reporting object is acquired.
- """
-
- compare_attrs = ["http_port", "distrib_port", "allowForce",
- "categories", "css", "favicon", "robots_txt", "branches"]
-
- def __init__(self, http_port=None, distrib_port=None, allowForce=True,
- categories=None, css=buildbot_css, favicon=buildbot_icon,
- robots_txt=None, branches=None):
- """To have the buildbot run its own web server, pass a port number to
- C{http_port}. To have it run a web.distrib server
-
- @type http_port: int or L{twisted.application.strports} string
- @param http_port: a strports specification describing which port the
- buildbot should use for its web server, with the
- Waterfall display as the root page. For backwards
- compatibility this can also be an int. Use
- 'tcp:8000' to listen on that port, or
- 'tcp:12345:interface=127.0.0.1' if you only want
- local processes to connect to it (perhaps because
- you are using an HTTP reverse proxy to make the
- buildbot available to the outside world, and do not
- want to make the raw port visible).
-
- @type distrib_port: int or L{twisted.application.strports} string
- @param distrib_port: Use this if you want to publish the Waterfall
- page using web.distrib instead. The most common
- case is to provide a string that is an absolute
- pathname to the unix socket on which the
- publisher should listen
- (C{os.path.expanduser(~/.twistd-web-pb)} will
- match the default settings of a standard
- twisted.web 'personal web server'). Another
- possibility is to pass an integer, which means
- the publisher should listen on a TCP socket,
- allowing the web server to be on a different
- machine entirely. Both forms are provided for
- backwards compatibility; the preferred form is a
- strports specification like
- 'unix:/home/buildbot/.twistd-web-pb'. Providing
- a non-absolute pathname will probably confuse
- the strports parser.
-
- @type allowForce: bool
- @param allowForce: if True, present a 'Force Build' button on the
- per-Builder page that allows visitors to the web
- site to initiate a build. If False, don't provide
- this button.
-
- @type favicon: string
- @param favicon: if set, provide the pathname of an image file that
- will be used for the 'favicon.ico' resource. Many
- browsers automatically request this file and use it
- as an icon in any bookmark generated from this site.
- Defaults to the buildbot/buildbot.png image provided
- in the distribution. Can be set to None to avoid
- using a favicon at all.
-
- @type robots_txt: string
- @param robots_txt: if set, provide the pathname of a robots.txt file.
- Many search engines request this file and obey the
- rules in it. E.g. to disallow them to crawl the
- status page, put the following two lines in
- robots.txt:
- User-agent: *
- Disallow: /
- """
-
- base.StatusReceiverMultiService.__init__(self)
- assert allowForce in (True, False) # TODO: implement others
- if type(http_port) is int:
- http_port = "tcp:%d" % http_port
- self.http_port = http_port
- if distrib_port is not None:
- if type(distrib_port) is int:
- distrib_port = "tcp:%d" % distrib_port
- if distrib_port[0] in "/~.": # pathnames
- distrib_port = "unix:%s" % distrib_port
- self.distrib_port = distrib_port
- self.allowForce = allowForce
- self.categories = categories
- self.css = css
- self.favicon = favicon
- self.robots_txt = robots_txt
- self.branches = branches
-
- def __repr__(self):
- if self.http_port is None:
- return "<Waterfall on path %s>" % self.distrib_port
- if self.distrib_port is None:
- return "<Waterfall on port %s>" % self.http_port
- return "<Waterfall on port %s and path %s>" % (self.http_port,
- self.distrib_port)
-
- def setServiceParent(self, parent):
- """
- @type parent: L{buildbot.master.BuildMaster}
- """
- base.StatusReceiverMultiService.setServiceParent(self, parent)
- self.setup()
-
- def setup(self):
- status = self.parent.getStatus()
- if self.allowForce:
- control = interfaces.IControl(self.parent)
- else:
- control = None
- change_svc = self.parent.change_svc
- sr = StatusResource(status, control, change_svc, self.categories,
- self.css, self.branches)
- sr.favicon = self.favicon
- sr.robots_txt = self.robots_txt
- self.site = server.Site(sr)
-
- if self.http_port is not None:
- s = strports.service(self.http_port, self.site)
- s.setServiceParent(self)
- if self.distrib_port is not None:
- f = pb.PBServerFactory(distrib.ResourcePublisher(self.site))
- s = strports.service(self.distrib_port, f)
- s.setServiceParent(self)
diff --git a/buildbot/buildbot-source/build/lib/buildbot/status/mail.py b/buildbot/buildbot-source/build/lib/buildbot/status/mail.py
deleted file mode 100644
index 69744adff..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/status/mail.py
+++ /dev/null
@@ -1,368 +0,0 @@
-# -*- test-case-name: buildbot.test.test_status -*-
-
-# the email.MIMEMultipart module is only available in python-2.2.2 and later
-
-from email.Message import Message
-from email.Utils import formatdate
-from email.MIMEText import MIMEText
-try:
- from email.MIMEMultipart import MIMEMultipart
- canDoAttachments = True
-except ImportError:
- canDoAttachments = False
-import urllib
-
-from twisted.internet import defer
-from twisted.application import service
-try:
- from twisted.mail.smtp import sendmail # Twisted-2.0
-except ImportError:
- from twisted.protocols.smtp import sendmail # Twisted-1.3
-from twisted.python import log
-
-from buildbot import interfaces, util
-from buildbot.twcompat import implements, providedBy
-from buildbot.status import base
-from buildbot.status.builder import FAILURE, SUCCESS, WARNINGS
-
-
-class Domain(util.ComparableMixin):
- if implements:
- implements(interfaces.IEmailLookup)
- else:
- __implements__ = interfaces.IEmailLookup
- compare_attrs = ["domain"]
-
- def __init__(self, domain):
- assert "@" not in domain
- self.domain = domain
-
- def getAddress(self, name):
- return name + "@" + self.domain
-
-
-class MailNotifier(base.StatusReceiverMultiService):
- """This is a status notifier which sends email to a list of recipients
- upon the completion of each build. It can be configured to only send out
- mail for certain builds, and only send messages when the build fails, or
- when it transitions from success to failure. It can also be configured to
- include various build logs in each message.
-
- By default, the message will be sent to the Interested Users list, which
- includes all developers who made changes in the build. You can add
- additional recipients with the extraRecipients argument.
-
- To get a simple one-message-per-build (say, for a mailing list), use
- sendToInterestedUsers=False, extraRecipients=['listaddr@example.org']
-
- Each MailNotifier sends mail to a single set of recipients. To send
- different kinds of mail to different recipients, use multiple
- MailNotifiers.
- """
-
- if implements:
- implements(interfaces.IEmailSender)
- else:
- __implements__ = (interfaces.IEmailSender,
- base.StatusReceiverMultiService.__implements__)
-
- compare_attrs = ["extraRecipients", "lookup", "fromaddr", "mode",
- "categories", "builders", "addLogs", "relayhost",
- "subject", "sendToInterestedUsers"]
-
- def __init__(self, fromaddr, mode="all", categories=None, builders=None,
- addLogs=False, relayhost="localhost",
- subject="buildbot %(result)s in %(builder)s",
- lookup=None, extraRecipients=[],
- sendToInterestedUsers=True):
- """
- @type fromaddr: string
- @param fromaddr: the email address to be used in the 'From' header.
- @type sendToInterestedUsers: boolean
- @param sendToInterestedUsers: if True (the default), send mail to all
- of the Interested Users. If False, only
- send mail to the extraRecipients list.
-
- @type extraRecipients: tuple of string
- @param extraRecipients: a list of email addresses to which messages
- should be sent (in addition to the
- InterestedUsers list, which includes any
- developers who made Changes that went into this
- build). It is a good idea to create a small
- mailing list and deliver to that, then let
- subscribers come and go as they please.
-
- @type subject: string
- @param subject: a string to be used as the subject line of the message.
- %(builder)s will be replaced with the name of the
- %builder which provoked the message.
-
- @type mode: string (defaults to all)
- @param mode: one of:
- - 'all': send mail about all builds, passing and failing
- - 'failing': only send mail about builds which fail
- - 'problem': only send mail about a build which failed
- when the previous build passed
-
- @type builders: list of strings
- @param builders: a list of builder names for which mail should be
- sent. Defaults to None (send mail for all builds).
- Use either builders or categories, but not both.
-
- @type categories: list of strings
- @param categories: a list of category names to serve status
- information for. Defaults to None (all
- categories). Use either builders or categories,
- but not both.
-
- @type addLogs: boolean.
- @param addLogs: if True, include all build logs as attachments to the
- messages. These can be quite large. This can also be
- set to a list of log names, to send a subset of the
- logs. Defaults to False.
-
- @type relayhost: string
- @param relayhost: the host to which the outbound SMTP connection
- should be made. Defaults to 'localhost'
-
- @type lookup: implementor of {IEmailLookup}
- @param lookup: object which provides IEmailLookup, which is
- responsible for mapping User names (which come from
- the VC system) into valid email addresses. If not
- provided, the notifier will only be able to send mail
- to the addresses in the extraRecipients list. Most of
- the time you can use a simple Domain instance. As a
- shortcut, you can pass as string: this will be
- treated as if you had provided Domain(str). For
- example, lookup='twistedmatrix.com' will allow mail
- to be sent to all developers whose SVN usernames
- match their twistedmatrix.com account names.
- """
-
- base.StatusReceiverMultiService.__init__(self)
- assert isinstance(extraRecipients, (list, tuple))
- for r in extraRecipients:
- assert isinstance(r, str)
- assert "@" in r # require full email addresses, not User names
- self.extraRecipients = extraRecipients
- self.sendToInterestedUsers = sendToInterestedUsers
- self.fromaddr = fromaddr
- self.mode = mode
- self.categories = categories
- self.builders = builders
- self.addLogs = addLogs
- self.relayhost = relayhost
- self.subject = subject
- if lookup is not None:
- if type(lookup) is str:
- lookup = Domain(lookup)
- assert providedBy(lookup, interfaces.IEmailLookup)
- self.lookup = lookup
- self.watched = []
- self.status = None
-
- # you should either limit on builders or categories, not both
- if self.builders != None and self.categories != None:
- log.err("Please specify only builders to ignore or categories to include")
- raise # FIXME: the asserts above do not raise some Exception either
-
- def setServiceParent(self, parent):
- """
- @type parent: L{buildbot.master.BuildMaster}
- """
- base.StatusReceiverMultiService.setServiceParent(self, parent)
- self.setup()
-
- def setup(self):
- self.status = self.parent.getStatus()
- self.status.subscribe(self)
-
- def disownServiceParent(self):
- self.status.unsubscribe(self)
- for w in self.watched:
- w.unsubscribe(self)
- return base.StatusReceiverMultiService.disownServiceParent(self)
-
- def builderAdded(self, name, builder):
- # only subscribe to builders we are interested in
- if self.categories != None and builder.category not in self.categories:
- return None
-
- self.watched.append(builder)
- return self # subscribe to this builder
-
- def builderRemoved(self, name):
- pass
-
- def builderChangedState(self, name, state):
- pass
- def buildStarted(self, name, build):
- pass
- def buildFinished(self, name, build, results):
- # here is where we actually do something.
- builder = build.getBuilder()
- if self.builders is not None and name not in self.builders:
- return # ignore this build
- if self.categories is not None and \
- builder.category not in self.categories:
- return # ignore this build
-
- if self.mode == "failing" and results != FAILURE:
- return
- if self.mode == "problem":
- if results != FAILURE:
- return
- prev = build.getPreviousBuild()
- if prev and prev.getResults() == FAILURE:
- return
- # for testing purposes, buildMessage returns a Deferred that fires
- # when the mail has been sent. To help unit tests, we return that
- # Deferred here even though the normal IStatusReceiver.buildFinished
- # signature doesn't do anything with it. If that changes (if
- # .buildFinished's return value becomes significant), we need to
- # rearrange this.
- return self.buildMessage(name, build, results)
-
- def buildMessage(self, name, build, results):
- text = ""
- if self.mode == "all":
- text += "The Buildbot has finished a build of %s.\n" % name
- elif self.mode == "failing":
- text += "The Buildbot has detected a failed build of %s.\n" % name
- else:
- text += "The Buildbot has detected a new failure of %s.\n" % name
- buildurl = self.status.getURLForThing(build)
- if buildurl:
- text += ("Full details are available at:\n %s\n" %
- urllib.quote(buildurl, '/:'))
- text += "\n"
-
- url = self.status.getBuildbotURL()
- if url:
- text += "Buildbot URL: %s\n\n" % urllib.quote(url, '/:')
-
- text += "Build Reason: %s\n" % build.getReason()
-
- patch = None
- ss = build.getSourceStamp()
- if ss is None:
- source = "unavailable"
- else:
- branch, revision, patch = ss
- source = ""
- if branch:
- source += "[branch %s] " % branch
- if revision:
- source += revision
- else:
- source += "HEAD"
- if patch is not None:
- source += " (plus patch)"
- text += "Build Source Stamp: %s\n" % source
-
- text += "Blamelist: %s\n" % ",".join(build.getResponsibleUsers())
-
- # TODO: maybe display changes here? or in an attachment?
- text += "\n"
-
- t = build.getText()
- if t:
- t = ": " + " ".join(t)
- else:
- t = ""
-
- if results == SUCCESS:
- text += "Build succeeded!\n"
- res = "success"
- elif results == WARNINGS:
- text += "Build Had Warnings%s\n" % t
- res = "warnings"
- else:
- text += "BUILD FAILED%s\n" % t
- res = "failure"
-
- if self.addLogs and build.getLogs():
- text += "Logs are attached.\n"
-
- # TODO: it would be nice to provide a URL for the specific build
- # here. That involves some coordination with html.Waterfall .
- # Ideally we could do:
- # helper = self.parent.getServiceNamed("html")
- # if helper:
- # url = helper.getURLForBuild(build)
-
- text += "\n"
- text += "sincerely,\n"
- text += " -The Buildbot\n"
- text += "\n"
-
- haveAttachments = False
- if patch or self.addLogs:
- haveAttachments = True
- if not canDoAttachments:
- log.msg("warning: I want to send mail with attachments, "
- "but this python is too old to have "
- "email.MIMEMultipart . Please upgrade to python-2.3 "
- "or newer to enable addLogs=True")
-
- if haveAttachments and canDoAttachments:
- m = MIMEMultipart()
- m.attach(MIMEText(text))
- else:
- m = Message()
- m.set_payload(text)
-
- m['Date'] = formatdate(localtime=True)
- m['Subject'] = self.subject % { 'result': res,
- 'builder': name,
- }
- m['From'] = self.fromaddr
- # m['To'] is added later
-
- if patch:
- a = MIMEText(patch)
- a.add_header('Content-Disposition', "attachment",
- filename="source patch")
- m.attach(a)
- if self.addLogs:
- for log in build.getLogs():
- name = "%s.%s" % (log.getStep().getName(),
- log.getName())
- a = MIMEText(log.getText())
- a.add_header('Content-Disposition', "attachment",
- filename=name)
- m.attach(a)
-
- # now, who is this message going to?
- dl = []
- recipients = self.extraRecipients[:]
- username = build.getUsername()
-
- if username:
- recipients.append(username+"@openoffice.org")
-
- if self.sendToInterestedUsers and self.lookup:
- for u in build.getInterestedUsers():
- d = defer.maybeDeferred(self.lookup.getAddress, u)
- d.addCallback(recipients.append)
- dl.append(d)
- d = defer.DeferredList(dl)
- d.addCallback(self._gotRecipients, recipients, m)
- return d
-
- def _gotRecipients(self, res, rlist, m):
- recipients = []
- for r in rlist:
- if r is not None and r not in recipients:
- recipients.append(r)
- recipients.sort()
- m['To'] = ", ".join(recipients)
- return self.sendMessage(m, recipients)
-
- def sendMessage(self, m, recipients):
- s = m.as_string()
- ds = []
- log.msg("sending mail (%d bytes) to" % len(s), recipients)
- for recip in recipients:
- ds.append(sendmail(self.relayhost, self.fromaddr, recip, s))
- return defer.DeferredList(ds)
diff --git a/buildbot/buildbot-source/build/lib/buildbot/status/progress.py b/buildbot/buildbot-source/build/lib/buildbot/status/progress.py
deleted file mode 100644
index dc4d3d572..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/status/progress.py
+++ /dev/null
@@ -1,308 +0,0 @@
-# -*- test-case-name: buildbot.test.test_status -*-
-
-from twisted.internet import reactor
-from twisted.spread import pb
-from twisted.python import log
-from buildbot import util
-
-class StepProgress:
- """I keep track of how much progress a single BuildStep has made.
-
- Progress is measured along various axes. Time consumed is one that is
- available for all steps. Amount of command output is another, and may be
- better quantified by scanning the output for markers to derive number of
- files compiled, directories walked, tests run, etc.
-
- I am created when the build begins, and given to a BuildProgress object
- so it can track the overall progress of the whole build.
-
- """
-
- startTime = None
- stopTime = None
- expectedTime = None
- buildProgress = None
- debug = False
-
- def __init__(self, name, metricNames):
- self.name = name
- self.progress = {}
- self.expectations = {}
- for m in metricNames:
- self.progress[m] = None
- self.expectations[m] = None
-
- def setBuildProgress(self, bp):
- self.buildProgress = bp
-
- def setExpectations(self, metrics):
- """The step can call this to explicitly set a target value for one
- of its metrics. E.g., ShellCommands knows how many commands it will
- execute, so it could set the 'commands' expectation."""
- for metric, value in metrics.items():
- self.expectations[metric] = value
- self.buildProgress.newExpectations()
-
- def setExpectedTime(self, seconds):
- self.expectedTime = seconds
- self.buildProgress.newExpectations()
-
- def start(self):
- if self.debug: print "StepProgress.start[%s]" % self.name
- self.startTime = util.now()
-
- def setProgress(self, metric, value):
- """The step calls this as progress is made along various axes."""
- if self.debug:
- print "setProgress[%s][%s] = %s" % (self.name, metric, value)
- self.progress[metric] = value
- if self.debug:
- r = self.remaining()
- print " step remaining:", r
- self.buildProgress.newProgress()
-
- def finish(self):
- """This stops the 'time' metric and marks the step as finished
- overall. It should be called after the last .setProgress has been
- done for each axis."""
- if self.debug: print "StepProgress.finish[%s]" % self.name
- self.stopTime = util.now()
- self.buildProgress.stepFinished(self.name)
-
- def totalTime(self):
- if self.startTime != None and self.stopTime != None:
- return self.stopTime - self.startTime
-
- def remaining(self):
- if self.startTime == None:
- return self.expectedTime
- if self.stopTime != None:
- return 0 # already finished
- # TODO: replace this with cleverness that graphs each metric vs.
- # time, then finds the inverse function. Will probably need to save
- # a timestamp with each setProgress update, when finished, go back
- # and find the 2% transition points, then save those 50 values in a
- # list. On the next build, do linear interpolation between the two
- # closest samples to come up with a percentage represented by that
- # metric.
-
- # TODO: If no other metrics are available, just go with elapsed
- # time. Given the non-time-uniformity of text output from most
- # steps, this would probably be better than the text-percentage
- # scheme currently implemented.
-
- percentages = []
- for metric, value in self.progress.items():
- expectation = self.expectations[metric]
- if value != None and expectation != None:
- p = 1.0 * value / expectation
- percentages.append(p)
- if percentages:
- avg = reduce(lambda x,y: x+y, percentages) / len(percentages)
- if avg > 1.0:
- # overdue
- avg = 1.0
- if avg < 0.0:
- avg = 0.0
- if percentages and self.expectedTime != None:
- return self.expectedTime - (avg * self.expectedTime)
- if self.expectedTime is not None:
- # fall back to pure time
- return self.expectedTime - (util.now() - self.startTime)
- return None # no idea
-
-
-class WatcherState:
- def __init__(self, interval):
- self.interval = interval
- self.timer = None
- self.needUpdate = 0
-
-class BuildProgress(pb.Referenceable):
- """I keep track of overall build progress. I hold a list of StepProgress
- objects.
- """
-
- def __init__(self, stepProgresses):
- self.steps = {}
- for s in stepProgresses:
- self.steps[s.name] = s
- s.setBuildProgress(self)
- self.finishedSteps = []
- self.watchers = {}
- self.debug = 0
-
- def setExpectationsFrom(self, exp):
- """Set our expectations from the builder's Expectations object."""
- for name, metrics in exp.steps.items():
- s = self.steps[name]
- s.setExpectedTime(exp.times[name])
- s.setExpectations(exp.steps[name])
-
- def newExpectations(self):
- """Call this when one of the steps has changed its expectations.
- This should trigger us to update our ETA value and notify any
- subscribers."""
- pass # subscribers are not implemented: they just poll
-
- def stepFinished(self, stepname):
- assert(stepname not in self.finishedSteps)
- self.finishedSteps.append(stepname)
- if len(self.finishedSteps) == len(self.steps.keys()):
- self.sendLastUpdates()
-
- def newProgress(self):
- r = self.remaining()
- if self.debug:
- print " remaining:", r
- if r != None:
- self.sendAllUpdates()
-
- def remaining(self):
- # sum eta of all steps
- sum = 0
- for name, step in self.steps.items():
- rem = step.remaining()
- if rem == None:
- return None # not sure
- sum += rem
- return sum
- def eta(self):
- left = self.remaining()
- if left == None:
- return None # not sure
- done = util.now() + left
- return done
-
-
- def remote_subscribe(self, remote, interval=5):
- # [interval, timer, needUpdate]
- # don't send an update more than once per interval
- self.watchers[remote] = WatcherState(interval)
- remote.notifyOnDisconnect(self.removeWatcher)
- self.updateWatcher(remote)
- self.startTimer(remote)
- log.msg("BuildProgress.remote_subscribe(%s)" % remote)
- def remote_unsubscribe(self, remote):
- # TODO: this doesn't work. I think 'remote' will always be different
- # than the object that appeared in _subscribe.
- log.msg("BuildProgress.remote_unsubscribe(%s)" % remote)
- self.removeWatcher(remote)
- #remote.dontNotifyOnDisconnect(self.removeWatcher)
- def removeWatcher(self, remote):
- #log.msg("removeWatcher(%s)" % remote)
- try:
- timer = self.watchers[remote].timer
- if timer:
- timer.cancel()
- del self.watchers[remote]
- except KeyError:
- log.msg("Weird, removeWatcher on non-existent subscriber:",
- remote)
- def sendAllUpdates(self):
- for r in self.watchers.keys():
- self.updateWatcher(r)
- def updateWatcher(self, remote):
- # an update wants to go to this watcher. Send it if we can, otherwise
- # queue it for later
- w = self.watchers[remote]
- if not w.timer:
- # no timer, so send update now and start the timer
- self.sendUpdate(remote)
- self.startTimer(remote)
- else:
- # timer is running, just mark as needing an update
- w.needUpdate = 1
- def startTimer(self, remote):
- w = self.watchers[remote]
- timer = reactor.callLater(w.interval, self.watcherTimeout, remote)
- w.timer = timer
- def sendUpdate(self, remote, last=0):
- self.watchers[remote].needUpdate = 0
- #text = self.asText() # TODO: not text, duh
- try:
- remote.callRemote("progress", self.remaining())
- if last:
- remote.callRemote("finished", self)
- except:
- log.deferr()
- self.removeWatcher(remote)
-
- def watcherTimeout(self, remote):
- w = self.watchers.get(remote, None)
- if not w:
- return # went away
- w.timer = None
- if w.needUpdate:
- self.sendUpdate(remote)
- self.startTimer(remote)
- def sendLastUpdates(self):
- for remote in self.watchers.keys():
- self.sendUpdate(remote, 1)
- self.removeWatcher(remote)
-
-
-class Expectations:
- debug = False
- # decay=1.0 ignores all but the last build
- # 0.9 is short time constant. 0.1 is very long time constant
- # TODO: let decay be specified per-metric
- decay = 0.5
-
- def __init__(self, buildprogress):
- """Create us from a successful build. We will expect each step to
- take as long as it did in that build."""
-
- # .steps maps stepname to dict2
- # dict2 maps metricname to final end-of-step value
- self.steps = {}
-
- # .times maps stepname to per-step elapsed time
- self.times = {}
-
- for name, step in buildprogress.steps.items():
- self.steps[name] = {}
- for metric, value in step.progress.items():
- self.steps[name][metric] = value
- self.times[name] = None
- if step.startTime is not None and step.stopTime is not None:
- self.times[name] = step.stopTime - step.startTime
-
- def wavg(self, old, current):
- if old is None:
- return current
- if current is None:
- return old
- else:
- return (current * self.decay) + (old * (1 - self.decay))
-
- def update(self, buildprogress):
- for name, stepprogress in buildprogress.steps.items():
- old = self.times[name]
- current = stepprogress.totalTime()
- if current == None:
- log.msg("Expectations.update: current[%s] was None!" % name)
- continue
- new = self.wavg(old, current)
- self.times[name] = new
- if self.debug:
- print "new expected time[%s] = %s, old %s, cur %s" % \
- (name, new, old, current)
-
- for metric, current in stepprogress.progress.items():
- old = self.steps[name][metric]
- new = self.wavg(old, current)
- if self.debug:
- print "new expectation[%s][%s] = %s, old %s, cur %s" % \
- (name, metric, new, old, current)
- self.steps[name][metric] = new
-
- def expectedBuildTime(self):
- if None in self.times.values():
- return None
- #return sum(self.times.values())
- # python-2.2 doesn't have 'sum'. TODO: drop python-2.2 support
- s = 0
- for v in self.times.values():
- s += v
- return s
diff --git a/buildbot/buildbot-source/build/lib/buildbot/status/tests.py b/buildbot/buildbot-source/build/lib/buildbot/status/tests.py
deleted file mode 100644
index 6b1031a65..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/status/tests.py
+++ /dev/null
@@ -1,75 +0,0 @@
-#! /usr/bin/python
-
-from twisted.web import resource
-from twisted.web.error import NoResource
-from twisted.web.html import PRE
-
-# these are our test result types. Steps are responsible for mapping results
-# into these values.
-SKIP, EXPECTED_FAILURE, FAILURE, ERROR, UNEXPECTED_SUCCESS, SUCCESS = \
- "skip", "expected failure", "failure", "error", "unexpected success", \
- "success"
-UNKNOWN = "unknown" # catch-all
-
-
-class OneTest(resource.Resource):
- isLeaf = 1
- def __init__(self, parent, testName, results):
- self.parent = parent
- self.testName = testName
- self.resultType, self.results = results
-
- def render(self, request):
- request.setHeader("content-type", "text/html")
- if request.method == "HEAD":
- request.setHeader("content-length", len(self.html(request)))
- return ''
- return self.html(request)
-
- def html(self, request):
- # turn ourselves into HTML
- raise NotImplementedError
-
-class TestResults(resource.Resource):
- oneTestClass = OneTest
- def __init__(self):
- resource.Resource.__init__(self)
- self.tests = {}
- def addTest(self, testName, resultType, results=None):
- self.tests[testName] = (resultType, results)
- # TODO: .setName and .delete should be used on our Swappable
- def countTests(self):
- return len(self.tests)
- def countFailures(self):
- failures = 0
- for t in self.tests.values():
- if t[0] in (FAILURE, ERROR):
- failures += 1
- return failures
- def summary(self):
- """Return a short list of text strings as a summary, suitable for
- inclusion in an Event"""
- return ["some", "tests"]
- def describeOneTest(self, testname):
- return "%s: %s\n" % (testname, self.tests[testname][0])
- def html(self):
- data = "<html>\n<head><title>Test Results</title></head>\n"
- data += "<body>\n"
- data += "<pre>\n"
- tests = self.tests.keys()
- tests.sort()
- for testname in tests:
- data += self.describeOneTest(testname)
- data += "</pre>\n"
- data += "</body></html>\n"
- return data
- def render(self, request):
- request.setHeader("content-type", "text/html")
- if request.method == "HEAD":
- request.setHeader("content-length", len(self.html()))
- return ''
- return self.html()
- def getChild(self, path, request):
- if self.tests.has_key(path):
- return self.oneTestClass(self, path, self.tests[path])
- return NoResource("No such test '%s'" % path)
diff --git a/buildbot/buildbot-source/build/lib/buildbot/status/words.py b/buildbot/buildbot-source/build/lib/buildbot/status/words.py
deleted file mode 100644
index 9ea54af91..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/status/words.py
+++ /dev/null
@@ -1,614 +0,0 @@
-#! /usr/bin/python
-
-# code to deliver build status through twisted.words (instant messaging
-# protocols: irc, etc)
-
-import traceback, StringIO, re, shlex
-
-from twisted.internet import protocol, reactor
-try:
- # Twisted-2.0
- from twisted.words.protocols import irc
-except ImportError:
- # Twisted-1.3
- from twisted.protocols import irc
-from twisted.python import log, failure
-from twisted.application import internet
-
-from buildbot import interfaces, util
-from buildbot import version
-from buildbot.sourcestamp import SourceStamp
-from buildbot.process.base import BuildRequest
-from buildbot.status import base
-from buildbot.status.builder import SUCCESS, WARNINGS, FAILURE, EXCEPTION
-from buildbot.scripts.runner import ForceOptions
-
-class UsageError(ValueError):
- def __init__(self, string = "Invalid usage", *more):
- ValueError.__init__(self, string, *more)
-
-class IrcBuildRequest:
- hasStarted = False
- timer = None
-
- def __init__(self, parent, reply):
- self.parent = parent
- self.reply = reply
- self.timer = reactor.callLater(5, self.soon)
-
- def soon(self):
- del self.timer
- if not self.hasStarted:
- self.parent.reply(self.reply,
- "The build has been queued, I'll give a shout"
- " when it starts")
-
- def started(self, c):
- self.hasStarted = True
- if self.timer:
- self.timer.cancel()
- del self.timer
- s = c.getStatus()
- eta = s.getETA()
- response = "build #%d forced" % s.getNumber()
- if eta is not None:
- response = "build forced [ETA %s]" % self.parent.convertTime(eta)
- self.parent.reply(self.reply, response)
- self.parent.reply(self.reply,
- "I'll give a shout when the build finishes")
- d = s.waitUntilFinished()
- d.addCallback(self.parent.buildFinished, self.reply)
-
-
-class IrcStatusBot(irc.IRCClient):
- silly = {
- "What happen ?": "Somebody set up us the bomb.",
- "It's You !!": ["How are you gentlemen !!",
- "All your base are belong to us.",
- "You are on the way to destruction."],
- "What you say !!": ["You have no chance to survive make your time.",
- "HA HA HA HA ...."],
- }
- def __init__(self, nickname, password, channels, status, categories):
- """
- @type nickname: string
- @param nickname: the nickname by which this bot should be known
- @type password: string
- @param password: the password to use for identifying with Nickserv
- @type channels: list of strings
- @param channels: the bot will maintain a presence in these channels
- @type status: L{buildbot.status.builder.Status}
- @param status: the build master's Status object, through which the
- bot retrieves all status information
- """
- self.nickname = nickname
- self.channels = channels
- self.password = password
- self.status = status
- self.categories = categories
- self.counter = 0
- self.hasQuit = 0
-
- def signedOn(self):
- if self.password:
- self.msg("Nickserv", "IDENTIFY " + self.password)
- for c in self.channels:
- self.join(c)
- def joined(self, channel):
- log.msg("I have joined", channel)
- def left(self, channel):
- log.msg("I have left", channel)
- def kickedFrom(self, channel, kicker, message):
- log.msg("I have been kicked from %s by %s: %s" % (channel,
- kicker,
- message))
-
- # input
- def privmsg(self, user, channel, message):
- user = user.split('!', 1)[0] # rest is ~user@hostname
- # channel is '#twisted' or 'buildbot' (for private messages)
- channel = channel.lower()
- #print "privmsg:", user, channel, message
- if channel == self.nickname:
- # private message
- message = "%s: %s" % (self.nickname, message)
- reply = user
- else:
- reply = channel
- if message.startswith("%s:" % self.nickname):
- message = message[len("%s:" % self.nickname):]
-
- message = message.lstrip()
- if self.silly.has_key(message):
- return self.doSilly(user, reply, message)
-
- parts = message.split(' ', 1)
- if len(parts) == 1:
- parts = parts + ['']
- cmd, args = parts
- log.msg("irc command", cmd)
-
- meth = self.getCommandMethod(cmd)
- if not meth and message[-1] == '!':
- meth = self.command_EXCITED
-
- error = None
- try:
- if meth:
- meth(user, reply, args.strip())
- except UsageError, e:
- self.reply(reply, str(e))
- except:
- f = failure.Failure()
- log.err(f)
- error = "Something bad happened (see logs): %s" % f.type
-
- if error:
- try:
- self.reply(reply, error)
- except:
- log.err()
-
- #self.say(channel, "count %d" % self.counter)
- self.counter += 1
- def reply(self, dest, message):
- # maybe self.notice(dest, message) instead?
- self.msg(dest, message)
-
- def getCommandMethod(self, command):
- meth = getattr(self, 'command_' + command.upper(), None)
- return meth
-
- def getBuilder(self, which):
- try:
- b = self.status.getBuilder(which)
- except KeyError:
- raise UsageError, "no such builder '%s'" % which
- return b
-
- def getControl(self, which):
- if not self.control:
- raise UsageError("builder control is not enabled")
- try:
- bc = self.control.getBuilder(which)
- except KeyError:
- raise UsageError("no such builder '%s'" % which)
- return bc
-
- def getAllBuilders(self):
- """
- @rtype: list of L{buildbot.process.builder.Builder}
- """
- names = self.status.getBuilderNames(categories=self.categories)
- names.sort()
- builders = [self.status.getBuilder(n) for n in names]
- return builders
-
- def convertTime(self, seconds):
- if seconds < 60:
- return "%d seconds" % seconds
- minutes = int(seconds / 60)
- seconds = seconds - 60*minutes
- if minutes < 60:
- return "%dm%02ds" % (minutes, seconds)
- hours = int(minutes / 60)
- minutes = minutes - 60*hours
- return "%dh%02dm%02ds" % (hours, minutes, seconds)
-
- def doSilly(self, user, reply, message):
- response = self.silly[message]
- if type(response) != type([]):
- response = [response]
- when = 0.5
- for r in response:
- reactor.callLater(when, self.reply, reply, r)
- when += 2.5
-
- def command_HELLO(self, user, reply, args):
- self.reply(reply, "yes?")
-
- def command_VERSION(self, user, reply, args):
- self.reply(reply, "buildbot-%s at your service" % version)
-
- def command_LIST(self, user, reply, args):
- args = args.split()
- if len(args) == 0:
- raise UsageError, "try 'list builders'"
- if args[0] == 'builders':
- builders = self.getAllBuilders()
- str = "Configured builders: "
- for b in builders:
- str += b.name
- state = b.getState()[0]
- if state == 'offline':
- str += "[offline]"
- str += " "
- str.rstrip()
- self.reply(reply, str)
- return
- command_LIST.usage = "list builders - List configured builders"
-
- def command_STATUS(self, user, reply, args):
- args = args.split()
- if len(args) == 0:
- which = "all"
- elif len(args) == 1:
- which = args[0]
- else:
- raise UsageError, "try 'status <builder>'"
- if which == "all":
- builders = self.getAllBuilders()
- for b in builders:
- self.emit_status(reply, b.name)
- return
- self.emit_status(reply, which)
- command_STATUS.usage = "status [<which>] - List status of a builder (or all builders)"
-
- def command_WATCH(self, user, reply, args):
- args = args.split()
- if len(args) != 1:
- raise UsageError("try 'watch <builder>'")
- which = args[0]
- b = self.getBuilder(which)
- builds = b.getCurrentBuilds()
- if not builds:
- self.reply(reply, "there are no builds currently running")
- return
- for build in builds:
- assert not build.isFinished()
- d = build.waitUntilFinished()
- d.addCallback(self.buildFinished, reply)
- r = "watching build %s #%d until it finishes" \
- % (which, build.getNumber())
- eta = build.getETA()
- if eta is not None:
- r += " [%s]" % self.convertTime(eta)
- r += ".."
- self.reply(reply, r)
- command_WATCH.usage = "watch <which> - announce the completion of an active build"
-
- def buildFinished(self, b, reply):
- results = {SUCCESS: "Success",
- WARNINGS: "Warnings",
- FAILURE: "Failure",
- EXCEPTION: "Exception",
- }
-
- # only notify about builders we are interested in
- builder = b.getBuilder()
- log.msg('builder %r in category %s finished' % (builder,
- builder.category))
- if (self.categories != None and
- builder.category not in self.categories):
- return
-
- r = "Hey! build %s #%d is complete: %s" % \
- (b.getBuilder().getName(),
- b.getNumber(),
- results.get(b.getResults(), "??"))
- r += " [%s]" % " ".join(b.getText())
- self.reply(reply, r)
- buildurl = self.status.getURLForThing(b)
- if buildurl:
- self.reply(reply, "Build details are at %s" % buildurl)
-
- def command_FORCE(self, user, reply, args):
- args = shlex.split(args) # TODO: this requires python2.3 or newer
- if args.pop(0) != "build":
- raise UsageError("try 'force build WHICH <REASON>'")
- opts = ForceOptions()
- opts.parseOptions(args)
-
- which = opts['builder']
- branch = opts['branch']
- revision = opts['revision']
- reason = opts['reason']
-
- # keep weird stuff out of the branch and revision strings. TODO:
- # centralize this somewhere.
- if branch and not re.match(r'^[\w\.\-\/]*$', branch):
- log.msg("bad branch '%s'" % branch)
- self.reply(reply, "sorry, bad branch '%s'" % branch)
- return
- if revision and not re.match(r'^[\w\.\-\/]*$', revision):
- log.msg("bad revision '%s'" % revision)
- self.reply(reply, "sorry, bad revision '%s'" % revision)
- return
-
- bc = self.getControl(which)
-
- who = None # TODO: if we can authenticate that a particular User
- # asked for this, use User Name instead of None so they'll
- # be informed of the results.
- # TODO: or, monitor this build and announce the results through the
- # 'reply' argument.
- r = "forced: by IRC user <%s>: %s" % (user, reason)
- # TODO: maybe give certain users the ability to request builds of
- # certain branches
- s = SourceStamp(branch=branch, revision=revision)
- req = BuildRequest(r, s, which)
- try:
- bc.requestBuildSoon(req)
- except interfaces.NoSlaveError:
- self.reply(reply,
- "sorry, I can't force a build: all slaves are offline")
- return
- ireq = IrcBuildRequest(self, reply)
- req.subscribe(ireq.started)
-
-
- command_FORCE.usage = "force build <which> <reason> - Force a build"
-
- def command_STOP(self, user, reply, args):
- args = args.split(None, 2)
- if len(args) < 3 or args[0] != 'build':
- raise UsageError, "try 'stop build WHICH <REASON>'"
- which = args[1]
- reason = args[2]
-
- buildercontrol = self.getControl(which)
-
- who = None
- r = "stopped: by IRC user <%s>: %s" % (user, reason)
-
- # find an in-progress build
- builderstatus = self.getBuilder(which)
- builds = builderstatus.getCurrentBuilds()
- if not builds:
- self.reply(reply, "sorry, no build is currently running")
- return
- for build in builds:
- num = build.getNumber()
-
- # obtain the BuildControl object
- buildcontrol = buildercontrol.getBuild(num)
-
- # make it stop
- buildcontrol.stopBuild(r)
-
- self.reply(reply, "build %d interrupted" % num)
-
- command_STOP.usage = "stop build <which> <reason> - Stop a running build"
-
- def emit_status(self, reply, which):
- b = self.getBuilder(which)
- str = "%s: " % which
- state, builds = b.getState()
- str += state
- if state == "idle":
- last = b.getLastFinishedBuild()
- if last:
- start,finished = last.getTimes()
- str += ", last build %s secs ago: %s" % \
- (int(util.now() - finished), " ".join(last.getText()))
- if state == "building":
- t = []
- for build in builds:
- step = build.getCurrentStep()
- s = "(%s)" % " ".join(step.getText())
- ETA = build.getETA()
- if ETA is not None:
- s += " [ETA %s]" % self.convertTime(ETA)
- t.append(s)
- str += ", ".join(t)
- self.reply(reply, str)
-
- def emit_last(self, reply, which):
- last = self.getBuilder(which).getLastFinishedBuild()
- if not last:
- str = "(no builds run since last restart)"
- else:
- start,finish = last.getTimes()
- str = "%s secs ago: " % (int(util.now() - finish))
- str += " ".join(last.getText())
- self.reply(reply, "last build [%s]: %s" % (which, str))
-
- def command_LAST(self, user, reply, args):
- args = args.split()
- if len(args) == 0:
- which = "all"
- elif len(args) == 1:
- which = args[0]
- else:
- raise UsageError, "try 'last <builder>'"
- if which == "all":
- builders = self.getAllBuilders()
- for b in builders:
- self.emit_last(reply, b.name)
- return
- self.emit_last(reply, which)
- command_LAST.usage = "last <which> - list last build status for builder <which>"
-
- def build_commands(self):
- commands = []
- for k in self.__class__.__dict__.keys():
- if k.startswith('command_'):
- commands.append(k[8:].lower())
- commands.sort()
- return commands
-
- def command_HELP(self, user, reply, args):
- args = args.split()
- if len(args) == 0:
- self.reply(reply, "Get help on what? (try 'help <foo>', or 'commands' for a command list)")
- return
- command = args[0]
- meth = self.getCommandMethod(command)
- if not meth:
- raise UsageError, "no such command '%s'" % command
- usage = getattr(meth, 'usage', None)
- if usage:
- self.reply(reply, "Usage: %s" % usage)
- else:
- self.reply(reply, "No usage info for '%s'" % command)
- command_HELP.usage = "help <command> - Give help for <command>"
-
- def command_SOURCE(self, user, reply, args):
- banner = "My source can be found at http://buildbot.sourceforge.net/"
- self.reply(reply, banner)
-
- def command_COMMANDS(self, user, reply, args):
- commands = self.build_commands()
- str = "buildbot commands: " + ", ".join(commands)
- self.reply(reply, str)
- command_COMMANDS.usage = "commands - List available commands"
-
- def command_DESTROY(self, user, reply, args):
- self.me(reply, "readies phasers")
-
- def command_DANCE(self, user, reply, args):
- reactor.callLater(1.0, self.reply, reply, "0-<")
- reactor.callLater(3.0, self.reply, reply, "0-/")
- reactor.callLater(3.5, self.reply, reply, "0-\\")
-
- def command_EXCITED(self, user, reply, args):
- # like 'buildbot: destroy the sun!'
- self.reply(reply, "What you say!")
-
- def action(self, user, channel, data):
- #log.msg("action: %s,%s,%s" % (user, channel, data))
- user = user.split('!', 1)[0] # rest is ~user@hostname
- # somebody did an action (/me actions)
- if data.endswith("s buildbot"):
- words = data.split()
- verb = words[-2]
- timeout = 4
- if verb == "kicks":
- response = "%s back" % verb
- timeout = 1
- else:
- response = "%s %s too" % (verb, user)
- reactor.callLater(timeout, self.me, channel, response)
- # userJoined(self, user, channel)
-
- # output
- # self.say(channel, message) # broadcast
- # self.msg(user, message) # unicast
- # self.me(channel, action) # send action
- # self.away(message='')
- # self.quit(message='')
-
-class ThrottledClientFactory(protocol.ClientFactory):
- lostDelay = 2
- failedDelay = 60
- def clientConnectionLost(self, connector, reason):
- reactor.callLater(self.lostDelay, connector.connect)
- def clientConnectionFailed(self, connector, reason):
- reactor.callLater(self.failedDelay, connector.connect)
-
-class IrcStatusFactory(ThrottledClientFactory):
- protocol = IrcStatusBot
-
- status = None
- control = None
- shuttingDown = False
- p = None
-
- def __init__(self, nickname, password, channels, categories):
- #ThrottledClientFactory.__init__(self) # doesn't exist
- self.status = None
- self.nickname = nickname
- self.password = password
- self.channels = channels
- self.categories = categories
-
- def __getstate__(self):
- d = self.__dict__.copy()
- del d['p']
- return d
-
- def shutdown(self):
- self.shuttingDown = True
- if self.p:
- self.p.quit("buildmaster reconfigured: bot disconnecting")
-
- def buildProtocol(self, address):
- p = self.protocol(self.nickname, self.password,
- self.channels, self.status,
- self.categories)
- p.factory = self
- p.status = self.status
- p.control = self.control
- self.p = p
- return p
-
- # TODO: I think a shutdown that occurs while the connection is being
- # established will make this explode
-
- def clientConnectionLost(self, connector, reason):
- if self.shuttingDown:
- log.msg("not scheduling reconnection attempt")
- return
- ThrottledClientFactory.clientConnectionLost(self, connector, reason)
-
- def clientConnectionFailed(self, connector, reason):
- if self.shuttingDown:
- log.msg("not scheduling reconnection attempt")
- return
- ThrottledClientFactory.clientConnectionFailed(self, connector, reason)
-
-
-class IRC(base.StatusReceiverMultiService):
- """I am an IRC bot which can be queried for status information. I
- connect to a single IRC server and am known by a single nickname on that
- server, however I can join multiple channels."""
-
- compare_attrs = ["host", "port", "nick", "password",
- "channels", "allowForce",
- "categories"]
-
- def __init__(self, host, nick, channels, port=6667, allowForce=True,
- categories=None, password=None):
- base.StatusReceiverMultiService.__init__(self)
-
- assert allowForce in (True, False) # TODO: implement others
-
- # need to stash these so we can detect changes later
- self.host = host
- self.port = port
- self.nick = nick
- self.channels = channels
- self.password = password
- self.allowForce = allowForce
- self.categories = categories
-
- # need to stash the factory so we can give it the status object
- self.f = IrcStatusFactory(self.nick, self.password,
- self.channels, self.categories)
-
- c = internet.TCPClient(host, port, self.f)
- c.setServiceParent(self)
-
- def setServiceParent(self, parent):
- base.StatusReceiverMultiService.setServiceParent(self, parent)
- self.f.status = parent.getStatus()
- if self.allowForce:
- self.f.control = interfaces.IControl(parent)
-
- def stopService(self):
- # make sure the factory will stop reconnecting
- self.f.shutdown()
- return base.StatusReceiverMultiService.stopService(self)
-
-
-def main():
- from twisted.internet import app
- a = app.Application("irctest")
- f = IrcStatusFactory()
- host = "localhost"
- port = 6667
- f.addNetwork((host, port), ["private", "other"])
- a.connectTCP(host, port, f)
- a.run(save=0)
-
-
-if __name__ == '__main__':
- main()
-
-## buildbot: list builders
-# buildbot: watch quick
-# print notification when current build in 'quick' finishes
-## buildbot: status
-## buildbot: status full-2.3
-## building, not, % complete, ETA
-## buildbot: force build full-2.3 "reason"
diff --git a/buildbot/buildbot-source/build/lib/buildbot/test/__init__.py b/buildbot/buildbot-source/build/lib/buildbot/test/__init__.py
deleted file mode 100644
index e69de29bb..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/test/__init__.py
+++ /dev/null
diff --git a/buildbot/buildbot-source/build/lib/buildbot/test/emit.py b/buildbot/buildbot-source/build/lib/buildbot/test/emit.py
deleted file mode 100644
index c5bf5677d..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/test/emit.py
+++ /dev/null
@@ -1,10 +0,0 @@
-#! /usr/bin/python
-
-import os, sys
-
-sys.stdout.write("this is stdout\n")
-sys.stderr.write("this is stderr\n")
-if os.environ.has_key("EMIT_TEST"):
- sys.stdout.write("EMIT_TEST: %s\n" % os.environ["EMIT_TEST"])
-rc = int(sys.argv[1])
-sys.exit(rc)
diff --git a/buildbot/buildbot-source/build/lib/buildbot/test/runutils.py b/buildbot/buildbot-source/build/lib/buildbot/test/runutils.py
deleted file mode 100644
index 0f7b99e35..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/test/runutils.py
+++ /dev/null
@@ -1,193 +0,0 @@
-
-import shutil, os, errno
-from twisted.internet import defer
-from twisted.python import log
-
-from buildbot import master, interfaces
-from buildbot.twcompat import maybeWait
-from buildbot.slave import bot
-from buildbot.process.base import BuildRequest
-from buildbot.sourcestamp import SourceStamp
-from buildbot.status.builder import SUCCESS
-
-class MyBot(bot.Bot):
- def remote_getSlaveInfo(self):
- return self.parent.info
-
-class MyBuildSlave(bot.BuildSlave):
- botClass = MyBot
-
-class RunMixin:
- master = None
-
- def rmtree(self, d):
- try:
- shutil.rmtree(d, ignore_errors=1)
- except OSError, e:
- # stupid 2.2 appears to ignore ignore_errors
- if e.errno != errno.ENOENT:
- raise
-
- def setUp(self):
- self.slaves = {}
- self.rmtree("basedir")
- os.mkdir("basedir")
- self.master = master.BuildMaster("basedir")
- self.status = self.master.getStatus()
- self.control = interfaces.IControl(self.master)
-
- def connectOneSlave(self, slavename, opts={}):
- port = self.master.slavePort._port.getHost().port
- self.rmtree("slavebase-%s" % slavename)
- os.mkdir("slavebase-%s" % slavename)
- slave = MyBuildSlave("localhost", port, slavename, "sekrit",
- "slavebase-%s" % slavename,
- keepalive=0, usePTY=1, debugOpts=opts)
- slave.info = {"admin": "one"}
- self.slaves[slavename] = slave
- slave.startService()
-
- def connectSlave(self, builders=["dummy"], slavename="bot1",
- opts={}):
- # connect buildslave 'slavename' and wait for it to connect to all of
- # the given builders
- dl = []
- # initiate call for all of them, before waiting on result,
- # otherwise we might miss some
- for b in builders:
- dl.append(self.master.botmaster.waitUntilBuilderAttached(b))
- d = defer.DeferredList(dl)
- self.connectOneSlave(slavename, opts)
- return d
-
- def connectSlaves(self, slavenames, builders):
- dl = []
- # initiate call for all of them, before waiting on result,
- # otherwise we might miss some
- for b in builders:
- dl.append(self.master.botmaster.waitUntilBuilderAttached(b))
- d = defer.DeferredList(dl)
- for name in slavenames:
- self.connectOneSlave(name)
- return d
-
- def connectSlave2(self):
- # this takes over for bot1, so it has to share the slavename
- port = self.master.slavePort._port.getHost().port
- self.rmtree("slavebase-bot2")
- os.mkdir("slavebase-bot2")
- # this uses bot1, really
- slave = MyBuildSlave("localhost", port, "bot1", "sekrit",
- "slavebase-bot2", keepalive=0, usePTY=1)
- slave.info = {"admin": "two"}
- self.slaves['bot2'] = slave
- slave.startService()
-
- def connectSlaveFastTimeout(self):
- # this slave has a very fast keepalive timeout
- port = self.master.slavePort._port.getHost().port
- self.rmtree("slavebase-bot1")
- os.mkdir("slavebase-bot1")
- slave = MyBuildSlave("localhost", port, "bot1", "sekrit",
- "slavebase-bot1", keepalive=2, usePTY=1,
- keepaliveTimeout=1)
- slave.info = {"admin": "one"}
- self.slaves['bot1'] = slave
- slave.startService()
- d = self.master.botmaster.waitUntilBuilderAttached("dummy")
- return d
-
- # things to start builds
- def requestBuild(self, builder):
- # returns a Deferred that fires with an IBuildStatus object when the
- # build is finished
- req = BuildRequest("forced build", SourceStamp())
- self.control.getBuilder(builder).requestBuild(req)
- return req.waitUntilFinished()
-
- def failUnlessBuildSucceeded(self, bs):
- self.failUnless(bs.getResults() == SUCCESS)
- return bs # useful for chaining
-
- def tearDown(self):
- log.msg("doing tearDown")
- d = self.shutdownAllSlaves()
- d.addCallback(self._tearDown_1)
- d.addCallback(self._tearDown_2)
- return maybeWait(d)
- def _tearDown_1(self, res):
- if self.master:
- return defer.maybeDeferred(self.master.stopService)
- def _tearDown_2(self, res):
- self.master = None
- log.msg("tearDown done")
-
-
- # various forms of slave death
-
- def shutdownAllSlaves(self):
- # the slave has disconnected normally: they SIGINT'ed it, or it shut
- # down willingly. This will kill child processes and give them a
- # chance to finish up. We return a Deferred that will fire when
- # everything is finished shutting down.
-
- log.msg("doing shutdownAllSlaves")
- dl = []
- for slave in self.slaves.values():
- dl.append(slave.waitUntilDisconnected())
- dl.append(defer.maybeDeferred(slave.stopService))
- d = defer.DeferredList(dl)
- d.addCallback(self._shutdownAllSlavesDone)
- return d
- def _shutdownAllSlavesDone(self, res):
- for name in self.slaves.keys():
- del self.slaves[name]
- return self.master.botmaster.waitUntilBuilderFullyDetached("dummy")
-
- def shutdownSlave(self, slavename, buildername):
- # this slave has disconnected normally: they SIGINT'ed it, or it shut
- # down willingly. This will kill child processes and give them a
- # chance to finish up. We return a Deferred that will fire when
- # everything is finished shutting down, and the given Builder knows
- # that the slave has gone away.
-
- s = self.slaves[slavename]
- dl = [self.master.botmaster.waitUntilBuilderDetached(buildername),
- s.waitUntilDisconnected()]
- d = defer.DeferredList(dl)
- d.addCallback(self._shutdownSlave_done, slavename)
- s.stopService()
- return d
- def _shutdownSlave_done(self, res, slavename):
- del self.slaves[slavename]
-
- def killSlave(self):
- # the slave has died, its host sent a FIN. The .notifyOnDisconnect
- # callbacks will terminate the current step, so the build should be
- # flunked (no further steps should be started).
- self.slaves['bot1'].bf.continueTrying = 0
- bot = self.slaves['bot1'].getServiceNamed("bot")
- broker = bot.builders["dummy"].remote.broker
- broker.transport.loseConnection()
- del self.slaves['bot1']
-
- def disappearSlave(self, slavename="bot1", buildername="dummy"):
- # the slave's host has vanished off the net, leaving the connection
- # dangling. This will be detected quickly by app-level keepalives or
- # a ping, or slowly by TCP timeouts.
-
- # simulate this by replacing the slave Broker's .dataReceived method
- # with one that just throws away all data.
- def discard(data):
- pass
- bot = self.slaves[slavename].getServiceNamed("bot")
- broker = bot.builders[buildername].remote.broker
- broker.dataReceived = discard # seal its ears
- broker.transport.write = discard # and take away its voice
-
- def ghostSlave(self):
- # the slave thinks it has lost the connection, and initiated a
- # reconnect. The master doesn't yet realize it has lost the previous
- # connection, and sees two connections at once.
- raise NotImplementedError
-
diff --git a/buildbot/buildbot-source/build/lib/buildbot/test/sleep.py b/buildbot/buildbot-source/build/lib/buildbot/test/sleep.py
deleted file mode 100644
index 48adc39b2..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/test/sleep.py
+++ /dev/null
@@ -1,9 +0,0 @@
-#! /usr/bin/python
-
-import sys, time
-delay = int(sys.argv[1])
-
-sys.stdout.write("sleeping for %d seconds\n" % delay)
-time.sleep(delay)
-sys.stdout.write("woke up\n")
-sys.exit(0)
diff --git a/buildbot/buildbot-source/build/lib/buildbot/test/test__versions.py b/buildbot/buildbot-source/build/lib/buildbot/test/test__versions.py
deleted file mode 100644
index a69fcc425..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/test/test__versions.py
+++ /dev/null
@@ -1,16 +0,0 @@
-
-# This is a fake test which just logs the version of Twisted, to make it
-# easier to track down failures in other tests.
-
-from twisted.trial import unittest
-from twisted.python import log
-from twisted import copyright
-import sys
-import buildbot
-
-class Versions(unittest.TestCase):
- def test_versions(self):
- log.msg("Python Version: %s" % sys.version)
- log.msg("Twisted Version: %s" % copyright.version)
- log.msg("Buildbot Version: %s" % buildbot.version)
-
diff --git a/buildbot/buildbot-source/build/lib/buildbot/test/test_buildreq.py b/buildbot/buildbot-source/build/lib/buildbot/test/test_buildreq.py
deleted file mode 100644
index f59f4970f..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/test/test_buildreq.py
+++ /dev/null
@@ -1,182 +0,0 @@
-# -*- test-case-name: buildbot.test.test_buildreq -*-
-
-from twisted.trial import unittest
-
-from buildbot import buildset, interfaces, sourcestamp
-from buildbot.twcompat import maybeWait
-from buildbot.process import base
-from buildbot.status import builder
-from buildbot.changes.changes import Change
-
-class Request(unittest.TestCase):
- def testMerge(self):
- R = base.BuildRequest
- S = sourcestamp.SourceStamp
- b1 = R("why", S("branch1", None, None, None))
- b1r1 = R("why2", S("branch1", "rev1", None, None))
- b1r1a = R("why not", S("branch1", "rev1", None, None))
- b1r2 = R("why3", S("branch1", "rev2", None, None))
- b2r2 = R("why4", S("branch2", "rev2", None, None))
- b1r1p1 = R("why5", S("branch1", "rev1", (3, "diff"), None))
- c1 = Change("alice", [], "changed stuff", branch="branch1")
- c2 = Change("alice", [], "changed stuff", branch="branch1")
- c3 = Change("alice", [], "changed stuff", branch="branch1")
- c4 = Change("alice", [], "changed stuff", branch="branch1")
- c5 = Change("alice", [], "changed stuff", branch="branch1")
- c6 = Change("alice", [], "changed stuff", branch="branch1")
- b1c1 = R("changes", S("branch1", None, None, [c1,c2,c3]))
- b1c2 = R("changes", S("branch1", None, None, [c4,c5,c6]))
-
- self.failUnless(b1.canBeMergedWith(b1))
- self.failIf(b1.canBeMergedWith(b1r1))
- self.failIf(b1.canBeMergedWith(b2r2))
- self.failIf(b1.canBeMergedWith(b1r1p1))
- self.failIf(b1.canBeMergedWith(b1c1))
-
- self.failIf(b1r1.canBeMergedWith(b1))
- self.failUnless(b1r1.canBeMergedWith(b1r1))
- self.failIf(b1r1.canBeMergedWith(b2r2))
- self.failIf(b1r1.canBeMergedWith(b1r1p1))
- self.failIf(b1r1.canBeMergedWith(b1c1))
-
- self.failIf(b1r2.canBeMergedWith(b1))
- self.failIf(b1r2.canBeMergedWith(b1r1))
- self.failUnless(b1r2.canBeMergedWith(b1r2))
- self.failIf(b1r2.canBeMergedWith(b2r2))
- self.failIf(b1r2.canBeMergedWith(b1r1p1))
-
- self.failIf(b1r1p1.canBeMergedWith(b1))
- self.failIf(b1r1p1.canBeMergedWith(b1r1))
- self.failIf(b1r1p1.canBeMergedWith(b1r2))
- self.failIf(b1r1p1.canBeMergedWith(b2r2))
- self.failIf(b1r1p1.canBeMergedWith(b1c1))
-
- self.failIf(b1c1.canBeMergedWith(b1))
- self.failIf(b1c1.canBeMergedWith(b1r1))
- self.failIf(b1c1.canBeMergedWith(b1r2))
- self.failIf(b1c1.canBeMergedWith(b2r2))
- self.failIf(b1c1.canBeMergedWith(b1r1p1))
- self.failUnless(b1c1.canBeMergedWith(b1c1))
- self.failUnless(b1c1.canBeMergedWith(b1c2))
-
- sm = b1.mergeWith([])
- self.failUnlessEqual(sm.branch, "branch1")
- self.failUnlessEqual(sm.revision, None)
- self.failUnlessEqual(sm.patch, None)
- self.failUnlessEqual(sm.changes, [])
-
- ss = b1r1.mergeWith([b1r1])
- self.failUnlessEqual(ss, S("branch1", "rev1", None, None))
- why = b1r1.mergeReasons([b1r1])
- self.failUnlessEqual(why, "why2")
- why = b1r1.mergeReasons([b1r1a])
- self.failUnlessEqual(why, "why2, why not")
-
- ss = b1c1.mergeWith([b1c2])
- self.failUnlessEqual(ss, S("branch1", None, None, [c1,c2,c3,c4,c5,c6]))
- why = b1c1.mergeReasons([b1c2])
- self.failUnlessEqual(why, "changes")
-
-
-class FakeBuilder:
- name = "fake"
- def __init__(self):
- self.requests = []
- def submitBuildRequest(self, req):
- self.requests.append(req)
-
-
-class Set(unittest.TestCase):
- def testBuildSet(self):
- S = buildset.BuildSet
- a,b = FakeBuilder(), FakeBuilder()
-
- # two builds, the first one fails, the second one succeeds. The
- # waitUntilSuccess watcher fires as soon as the first one fails,
- # while the waitUntilFinished watcher doesn't fire until all builds
- # are complete.
-
- source = sourcestamp.SourceStamp()
- s = S(["a","b"], source, "forced build")
- s.start([a,b])
- self.failUnlessEqual(len(a.requests), 1)
- self.failUnlessEqual(len(b.requests), 1)
- r1 = a.requests[0]
- self.failUnlessEqual(r1.reason, s.reason)
- self.failUnlessEqual(r1.source, s.source)
-
- st = s.status
- self.failUnlessEqual(st.getSourceStamp(), source)
- self.failUnlessEqual(st.getReason(), "forced build")
- self.failUnlessEqual(st.getBuilderNames(), ["a","b"])
- self.failIf(st.isFinished())
- brs = st.getBuildRequests()
- self.failUnlessEqual(len(brs), 2)
-
- res = []
- d1 = s.waitUntilSuccess()
- d1.addCallback(lambda r: res.append(("success", r)))
- d2 = s.waitUntilFinished()
- d2.addCallback(lambda r: res.append(("finished", r)))
-
- self.failUnlessEqual(res, [])
-
- # the first build finishes here, with FAILURE
- builderstatus_a = builder.BuilderStatus("a")
- bsa = builder.BuildStatus(builderstatus_a, 1)
- bsa.setResults(builder.FAILURE)
- a.requests[0].finished(bsa)
-
- # any FAILURE flunks the BuildSet immediately, so the
- # waitUntilSuccess deferred fires right away. However, the
- # waitUntilFinished deferred must wait until all builds have
- # completed.
- self.failUnlessEqual(len(res), 1)
- self.failUnlessEqual(res[0][0], "success")
- bss = res[0][1]
- self.failUnless(interfaces.IBuildSetStatus(bss, None))
- self.failUnlessEqual(bss.getResults(), builder.FAILURE)
-
- # here we finish the second build
- builderstatus_b = builder.BuilderStatus("b")
- bsb = builder.BuildStatus(builderstatus_b, 1)
- bsb.setResults(builder.SUCCESS)
- b.requests[0].finished(bsb)
-
- # .. which ought to fire the waitUntilFinished deferred
- self.failUnlessEqual(len(res), 2)
- self.failUnlessEqual(res[1][0], "finished")
- self.failUnlessEqual(res[1][1], bss)
-
- # and finish the BuildSet overall
- self.failUnless(st.isFinished())
- self.failUnlessEqual(st.getResults(), builder.FAILURE)
-
- def testSuccess(self):
- S = buildset.BuildSet
- a,b = FakeBuilder(), FakeBuilder()
- # this time, both builds succeed
-
- source = sourcestamp.SourceStamp()
- s = S(["a","b"], source, "forced build")
- s.start([a,b])
-
- st = s.status
- self.failUnlessEqual(st.getSourceStamp(), source)
- self.failUnlessEqual(st.getReason(), "forced build")
- self.failUnlessEqual(st.getBuilderNames(), ["a","b"])
- self.failIf(st.isFinished())
-
- builderstatus_a = builder.BuilderStatus("a")
- bsa = builder.BuildStatus(builderstatus_a, 1)
- bsa.setResults(builder.SUCCESS)
- a.requests[0].finished(bsa)
-
- builderstatus_b = builder.BuilderStatus("b")
- bsb = builder.BuildStatus(builderstatus_b, 1)
- bsb.setResults(builder.SUCCESS)
- b.requests[0].finished(bsb)
-
- self.failUnless(st.isFinished())
- self.failUnlessEqual(st.getResults(), builder.SUCCESS)
-
diff --git a/buildbot/buildbot-source/build/lib/buildbot/test/test_changes.py b/buildbot/buildbot-source/build/lib/buildbot/test/test_changes.py
deleted file mode 100644
index df8662368..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/test/test_changes.py
+++ /dev/null
@@ -1,192 +0,0 @@
-# -*- test-case-name: buildbot.test.test_changes -*-
-
-from twisted.trial import unittest
-from twisted.internet import defer, reactor
-from twisted.python import log
-
-from buildbot import master
-from buildbot.twcompat import maybeWait
-from buildbot.changes import pb
-from buildbot.scripts import runner
-
-d1 = {'files': ["Project/foo.c", "Project/bar/boo.c"],
- 'who': "marvin",
- 'comments': "Some changes in Project"}
-d2 = {'files': ["OtherProject/bar.c"],
- 'who': "zaphod",
- 'comments': "other changes"}
-d3 = {'files': ["Project/baz.c", "OtherProject/bloo.c"],
- 'who': "alice",
- 'comments': "mixed changes"}
-
-class TestChangePerspective(unittest.TestCase):
-
- def setUp(self):
- self.changes = []
-
- def addChange(self, c):
- self.changes.append(c)
-
- def testNoPrefix(self):
- p = pb.ChangePerspective(self, None)
- p.perspective_addChange(d1)
- self.failUnlessEqual(len(self.changes), 1)
- c1 = self.changes[0]
- self.failUnlessEqual(c1.files,
- ["Project/foo.c", "Project/bar/boo.c"])
- self.failUnlessEqual(c1.comments, "Some changes in Project")
- self.failUnlessEqual(c1.who, "marvin")
-
- def testPrefix(self):
- p = pb.ChangePerspective(self, "Project")
-
- p.perspective_addChange(d1)
- self.failUnlessEqual(len(self.changes), 1)
- c1 = self.changes[-1]
- self.failUnlessEqual(c1.files, ["foo.c", "bar/boo.c"])
- self.failUnlessEqual(c1.comments, "Some changes in Project")
- self.failUnlessEqual(c1.who, "marvin")
-
- p.perspective_addChange(d2) # should be ignored
- self.failUnlessEqual(len(self.changes), 1)
-
- p.perspective_addChange(d3) # should ignore the OtherProject file
- self.failUnlessEqual(len(self.changes), 2)
-
- c3 = self.changes[-1]
- self.failUnlessEqual(c3.files, ["baz.c"])
- self.failUnlessEqual(c3.comments, "mixed changes")
- self.failUnlessEqual(c3.who, "alice")
-
-config_empty = """
-BuildmasterConfig = c = {}
-c['bots'] = []
-c['builders'] = []
-c['sources'] = []
-c['schedulers'] = []
-c['slavePortnum'] = 0
-"""
-
-config_sender = config_empty + \
-"""
-from buildbot.changes import pb
-c['sources'] = [pb.PBChangeSource(port=None)]
-"""
-
-class Sender(unittest.TestCase):
- def setUp(self):
- self.master = master.BuildMaster(".")
- def tearDown(self):
- d = defer.maybeDeferred(self.master.stopService)
- # TODO: something in Twisted-2.0.0 (and probably 2.0.1) doesn't shut
- # down the Broker listening socket when it's supposed to.
- # Twisted-1.3.0, and current SVN (which will be post-2.0.1) are ok.
- # This iterate() is a quick hack to deal with the problem. I need to
- # investigate more thoroughly and find a better solution.
- d.addCallback(self.stall, 0.1)
- return maybeWait(d)
-
- def stall(self, res, timeout):
- d = defer.Deferred()
- reactor.callLater(timeout, d.callback, res)
- return d
-
- def testSender(self):
- self.master.loadConfig(config_empty)
- self.master.startService()
- # TODO: BuildMaster.loadChanges replaces the change_svc object, so we
- # have to load it twice. Clean this up.
- d = self.master.loadConfig(config_sender)
- d.addCallback(self._testSender_1)
- return maybeWait(d)
-
- def _testSender_1(self, res):
- self.cm = cm = self.master.change_svc
- s1 = list(self.cm)[0]
- port = self.master.slavePort._port.getHost().port
-
- self.options = {'username': "alice",
- 'master': "localhost:%d" % port,
- 'files': ["foo.c"],
- }
-
- d = runner.sendchange(self.options)
- d.addCallback(self._testSender_2)
- return d
-
- def _testSender_2(self, res):
- # now check that the change was received
- self.failUnlessEqual(len(self.cm.changes), 1)
- c = self.cm.changes.pop()
- self.failUnlessEqual(c.who, "alice")
- self.failUnlessEqual(c.files, ["foo.c"])
- self.failUnlessEqual(c.comments, "")
- self.failUnlessEqual(c.revision, None)
-
- self.options['revision'] = "r123"
- self.options['comments'] = "test change"
-
- d = runner.sendchange(self.options)
- d.addCallback(self._testSender_3)
- return d
-
- def _testSender_3(self, res):
- self.failUnlessEqual(len(self.cm.changes), 1)
- c = self.cm.changes.pop()
- self.failUnlessEqual(c.who, "alice")
- self.failUnlessEqual(c.files, ["foo.c"])
- self.failUnlessEqual(c.comments, "test change")
- self.failUnlessEqual(c.revision, "r123")
-
- # test options['logfile'] by creating a temporary file
- logfile = self.mktemp()
- f = open(logfile, "wt")
- f.write("longer test change")
- f.close()
- self.options['comments'] = None
- self.options['logfile'] = logfile
-
- d = runner.sendchange(self.options)
- d.addCallback(self._testSender_4)
- return d
-
- def _testSender_4(self, res):
- self.failUnlessEqual(len(self.cm.changes), 1)
- c = self.cm.changes.pop()
- self.failUnlessEqual(c.who, "alice")
- self.failUnlessEqual(c.files, ["foo.c"])
- self.failUnlessEqual(c.comments, "longer test change")
- self.failUnlessEqual(c.revision, "r123")
-
- # make sure that numeric revisions work too
- self.options['logfile'] = None
- del self.options['revision']
- self.options['revision_number'] = 42
-
- d = runner.sendchange(self.options)
- d.addCallback(self._testSender_5)
- return d
-
- def _testSender_5(self, res):
- self.failUnlessEqual(len(self.cm.changes), 1)
- c = self.cm.changes.pop()
- self.failUnlessEqual(c.who, "alice")
- self.failUnlessEqual(c.files, ["foo.c"])
- self.failUnlessEqual(c.comments, "")
- self.failUnlessEqual(c.revision, 42)
-
- # verify --branch too
- self.options['branch'] = "branches/test"
-
- d = runner.sendchange(self.options)
- d.addCallback(self._testSender_6)
- return d
-
- def _testSender_6(self, res):
- self.failUnlessEqual(len(self.cm.changes), 1)
- c = self.cm.changes.pop()
- self.failUnlessEqual(c.who, "alice")
- self.failUnlessEqual(c.files, ["foo.c"])
- self.failUnlessEqual(c.comments, "")
- self.failUnlessEqual(c.revision, 42)
- self.failUnlessEqual(c.branch, "branches/test")
diff --git a/buildbot/buildbot-source/build/lib/buildbot/test/test_config.py b/buildbot/buildbot-source/build/lib/buildbot/test/test_config.py
deleted file mode 100644
index 6eee7d74e..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/test/test_config.py
+++ /dev/null
@@ -1,1007 +0,0 @@
-# -*- test-case-name: buildbot.test.test_config -*-
-
-from __future__ import generators
-import os, os.path
-
-from twisted.trial import unittest
-from twisted.python import components, failure
-from twisted.internet import defer
-
-try:
- import cvstoys
- from buildbot.changes.freshcvs import FreshCVSSource
-except ImportError:
- cvstoys = None
-
-from buildbot.twcompat import providedBy, maybeWait
-from buildbot.master import BuildMaster
-from buildbot import scheduler
-from buildbot import interfaces as ibb
-from twisted.application import service, internet
-from twisted.spread import pb
-from twisted.web.server import Site
-from twisted.web.distrib import ResourcePublisher
-from buildbot.process.builder import Builder
-from buildbot.process.factory import BasicBuildFactory
-from buildbot.process import step
-from buildbot.status import html, builder, base
-try:
- from buildbot.status import words
-except ImportError:
- words = None
-
-import sys
-from twisted.python import log
-#log.startLogging(sys.stdout)
-
-emptyCfg = \
-"""
-BuildmasterConfig = c = {}
-c['bots'] = []
-c['sources'] = []
-c['schedulers'] = []
-c['builders'] = []
-c['slavePortnum'] = 9999
-c['projectName'] = 'dummy project'
-c['projectURL'] = 'http://dummy.example.com'
-c['buildbotURL'] = 'http://dummy.example.com/buildbot'
-"""
-
-buildersCfg = \
-"""
-from buildbot.process.factory import BasicBuildFactory
-BuildmasterConfig = c = {}
-c['bots'] = [('bot1', 'pw1')]
-c['sources'] = []
-c['schedulers'] = []
-c['slavePortnum'] = 9999
-f1 = BasicBuildFactory('cvsroot', 'cvsmodule')
-c['builders'] = [{'name':'builder1', 'slavename':'bot1',
- 'builddir':'workdir', 'factory':f1}]
-"""
-
-buildersCfg2 = buildersCfg + \
-"""
-f1 = BasicBuildFactory('cvsroot', 'cvsmodule2')
-c['builders'] = [{'name':'builder1', 'slavename':'bot1',
- 'builddir':'workdir', 'factory':f1}]
-"""
-
-buildersCfg3 = buildersCfg2 + \
-"""
-c['builders'].append({'name': 'builder2', 'slavename': 'bot1',
- 'builddir': 'workdir2', 'factory': f1 })
-"""
-
-buildersCfg4 = buildersCfg2 + \
-"""
-c['builders'] = [{ 'name': 'builder1', 'slavename': 'bot1',
- 'builddir': 'newworkdir', 'factory': f1 },
- { 'name': 'builder2', 'slavename': 'bot1',
- 'builddir': 'workdir2', 'factory': f1 }]
-"""
-
-ircCfg1 = emptyCfg + \
-"""
-from buildbot.status import words
-c['status'] = [words.IRC('irc.us.freenode.net', 'buildbot', ['twisted'])]
-"""
-
-ircCfg2 = emptyCfg + \
-"""
-from buildbot.status import words
-c['status'] = [words.IRC('irc.us.freenode.net', 'buildbot', ['twisted']),
- words.IRC('irc.example.com', 'otherbot', ['chan1', 'chan2'])]
-"""
-
-ircCfg3 = emptyCfg + \
-"""
-from buildbot.status import words
-c['status'] = [words.IRC('irc.us.freenode.net', 'buildbot', ['knotted'])]
-"""
-
-webCfg1 = emptyCfg + \
-"""
-from buildbot.status import html
-c['status'] = [html.Waterfall(http_port=9980)]
-"""
-
-webCfg2 = emptyCfg + \
-"""
-from buildbot.status import html
-c['status'] = [html.Waterfall(http_port=9981)]
-"""
-
-webCfg3 = emptyCfg + \
-"""
-from buildbot.status import html
-c['status'] = [html.Waterfall(http_port='tcp:9981:interface=127.0.0.1')]
-"""
-
-webNameCfg1 = emptyCfg + \
-"""
-from buildbot.status import html
-c['status'] = [html.Waterfall(distrib_port='~/.twistd-web-pb')]
-"""
-
-webNameCfg2 = emptyCfg + \
-"""
-from buildbot.status import html
-c['status'] = [html.Waterfall(distrib_port='./bar.socket')]
-"""
-
-debugPasswordCfg = emptyCfg + \
-"""
-c['debugPassword'] = 'sekrit'
-"""
-
-interlockCfgBad = \
-"""
-from buildbot.process.factory import BasicBuildFactory
-c = {}
-c['bots'] = [('bot1', 'pw1')]
-c['sources'] = []
-c['schedulers'] = []
-f1 = BasicBuildFactory('cvsroot', 'cvsmodule')
-c['builders'] = [
- { 'name': 'builder1', 'slavename': 'bot1',
- 'builddir': 'workdir', 'factory': f1 },
- { 'name': 'builder2', 'slavename': 'bot1',
- 'builddir': 'workdir2', 'factory': f1 },
- ]
-# interlocks have been removed
-c['interlocks'] = [('lock1', ['builder1'], ['builder2', 'builder3']),
- ]
-c['slavePortnum'] = 9999
-BuildmasterConfig = c
-"""
-
-lockCfgBad1 = \
-"""
-from buildbot.process.step import Dummy
-from buildbot.process.factory import BuildFactory, s
-from buildbot.locks import MasterLock
-c = {}
-c['bots'] = [('bot1', 'pw1')]
-c['sources'] = []
-c['schedulers'] = []
-l1 = MasterLock('lock1')
-l2 = MasterLock('lock1') # duplicate lock name
-f1 = BuildFactory([s(Dummy, locks=[])])
-c['builders'] = [
- { 'name': 'builder1', 'slavename': 'bot1',
- 'builddir': 'workdir', 'factory': f1, 'locks': [l1, l2] },
- { 'name': 'builder2', 'slavename': 'bot1',
- 'builddir': 'workdir2', 'factory': f1 },
- ]
-c['slavePortnum'] = 9999
-BuildmasterConfig = c
-"""
-
-lockCfgBad2 = \
-"""
-from buildbot.process.step import Dummy
-from buildbot.process.factory import BuildFactory, s
-from buildbot.locks import MasterLock, SlaveLock
-c = {}
-c['bots'] = [('bot1', 'pw1')]
-c['sources'] = []
-c['schedulers'] = []
-l1 = MasterLock('lock1')
-l2 = SlaveLock('lock1') # duplicate lock name
-f1 = BuildFactory([s(Dummy, locks=[])])
-c['builders'] = [
- { 'name': 'builder1', 'slavename': 'bot1',
- 'builddir': 'workdir', 'factory': f1, 'locks': [l1, l2] },
- { 'name': 'builder2', 'slavename': 'bot1',
- 'builddir': 'workdir2', 'factory': f1 },
- ]
-c['slavePortnum'] = 9999
-BuildmasterConfig = c
-"""
-
-lockCfgBad3 = \
-"""
-from buildbot.process.step import Dummy
-from buildbot.process.factory import BuildFactory, s
-from buildbot.locks import MasterLock
-c = {}
-c['bots'] = [('bot1', 'pw1')]
-c['sources'] = []
-c['schedulers'] = []
-l1 = MasterLock('lock1')
-l2 = MasterLock('lock1') # duplicate lock name
-f1 = BuildFactory([s(Dummy, locks=[l2])])
-f2 = BuildFactory([s(Dummy)])
-c['builders'] = [
- { 'name': 'builder1', 'slavename': 'bot1',
- 'builddir': 'workdir', 'factory': f2, 'locks': [l1] },
- { 'name': 'builder2', 'slavename': 'bot1',
- 'builddir': 'workdir2', 'factory': f1 },
- ]
-c['slavePortnum'] = 9999
-BuildmasterConfig = c
-"""
-
-lockCfg1a = \
-"""
-from buildbot.process.factory import BasicBuildFactory
-from buildbot.locks import MasterLock
-c = {}
-c['bots'] = [('bot1', 'pw1')]
-c['sources'] = []
-c['schedulers'] = []
-f1 = BasicBuildFactory('cvsroot', 'cvsmodule')
-l1 = MasterLock('lock1')
-l2 = MasterLock('lock2')
-c['builders'] = [
- { 'name': 'builder1', 'slavename': 'bot1',
- 'builddir': 'workdir', 'factory': f1, 'locks': [l1, l2] },
- { 'name': 'builder2', 'slavename': 'bot1',
- 'builddir': 'workdir2', 'factory': f1 },
- ]
-c['slavePortnum'] = 9999
-BuildmasterConfig = c
-"""
-
-lockCfg1b = \
-"""
-from buildbot.process.factory import BasicBuildFactory
-from buildbot.locks import MasterLock
-c = {}
-c['bots'] = [('bot1', 'pw1')]
-c['sources'] = []
-c['schedulers'] = []
-f1 = BasicBuildFactory('cvsroot', 'cvsmodule')
-l1 = MasterLock('lock1')
-l2 = MasterLock('lock2')
-c['builders'] = [
- { 'name': 'builder1', 'slavename': 'bot1',
- 'builddir': 'workdir', 'factory': f1, 'locks': [l1] },
- { 'name': 'builder2', 'slavename': 'bot1',
- 'builddir': 'workdir2', 'factory': f1 },
- ]
-c['slavePortnum'] = 9999
-BuildmasterConfig = c
-"""
-
-# test out step Locks
-lockCfg2a = \
-"""
-from buildbot.process.step import Dummy
-from buildbot.process.factory import BuildFactory, s
-from buildbot.locks import MasterLock
-c = {}
-c['bots'] = [('bot1', 'pw1')]
-c['sources'] = []
-c['schedulers'] = []
-l1 = MasterLock('lock1')
-l2 = MasterLock('lock2')
-f1 = BuildFactory([s(Dummy, locks=[l1,l2])])
-f2 = BuildFactory([s(Dummy)])
-
-c['builders'] = [
- { 'name': 'builder1', 'slavename': 'bot1',
- 'builddir': 'workdir', 'factory': f1 },
- { 'name': 'builder2', 'slavename': 'bot1',
- 'builddir': 'workdir2', 'factory': f2 },
- ]
-c['slavePortnum'] = 9999
-BuildmasterConfig = c
-"""
-
-lockCfg2b = \
-"""
-from buildbot.process.step import Dummy
-from buildbot.process.factory import BuildFactory, s
-from buildbot.locks import MasterLock
-c = {}
-c['bots'] = [('bot1', 'pw1')]
-c['sources'] = []
-c['schedulers'] = []
-l1 = MasterLock('lock1')
-l2 = MasterLock('lock2')
-f1 = BuildFactory([s(Dummy, locks=[l1])])
-f2 = BuildFactory([s(Dummy)])
-
-c['builders'] = [
- { 'name': 'builder1', 'slavename': 'bot1',
- 'builddir': 'workdir', 'factory': f1 },
- { 'name': 'builder2', 'slavename': 'bot1',
- 'builddir': 'workdir2', 'factory': f2 },
- ]
-c['slavePortnum'] = 9999
-BuildmasterConfig = c
-"""
-
-lockCfg2c = \
-"""
-from buildbot.process.step import Dummy
-from buildbot.process.factory import BuildFactory, s
-from buildbot.locks import MasterLock
-c = {}
-c['bots'] = [('bot1', 'pw1')]
-c['sources'] = []
-c['schedulers'] = []
-l1 = MasterLock('lock1')
-l2 = MasterLock('lock2')
-f1 = BuildFactory([s(Dummy)])
-f2 = BuildFactory([s(Dummy)])
-
-c['builders'] = [
- { 'name': 'builder1', 'slavename': 'bot1',
- 'builddir': 'workdir', 'factory': f1 },
- { 'name': 'builder2', 'slavename': 'bot1',
- 'builddir': 'workdir2', 'factory': f2 },
- ]
-c['slavePortnum'] = 9999
-BuildmasterConfig = c
-"""
-
-class ConfigTest(unittest.TestCase):
- def setUp(self):
- self.buildmaster = BuildMaster(".")
-
- def failUnlessListsEquivalent(self, list1, list2):
- l1 = list1[:]
- l1.sort()
- l2 = list2[:]
- l2.sort()
- self.failUnlessEqual(l1, l2)
-
- def servers(self, s, types):
- # perform a recursive search of s.services, looking for instances of
- # twisted.application.internet.TCPServer, then extract their .args
- # values to find the TCP ports they want to listen on
- for child in s:
- if providedBy(child, service.IServiceCollection):
- for gc in self.servers(child, types):
- yield gc
- if isinstance(child, types):
- yield child
-
- def TCPports(self, s):
- return list(self.servers(s, internet.TCPServer))
- def UNIXports(self, s):
- return list(self.servers(s, internet.UNIXServer))
- def TCPclients(self, s):
- return list(self.servers(s, internet.TCPClient))
-
- def checkPorts(self, svc, expected):
- """Verify that the TCPServer and UNIXServer children of the given
- service have the expected portnum/pathname and factory classes. As a
- side-effect, return a list of servers in the same order as the
- 'expected' list. This can be used to verify properties of the
- factories contained therein."""
-
- expTCP = [e for e in expected if type(e[0]) == int]
- expUNIX = [e for e in expected if type(e[0]) == str]
- haveTCP = [(p.args[0], p.args[1].__class__)
- for p in self.TCPports(svc)]
- haveUNIX = [(p.args[0], p.args[1].__class__)
- for p in self.UNIXports(svc)]
- self.failUnlessListsEquivalent(expTCP, haveTCP)
- self.failUnlessListsEquivalent(expUNIX, haveUNIX)
- ret = []
- for e in expected:
- for have in self.TCPports(svc) + self.UNIXports(svc):
- if have.args[0] == e[0]:
- ret.append(have)
- continue
- assert(len(ret) == len(expected))
- return ret
-
- def testEmpty(self):
- self.failUnlessRaises(KeyError, self.buildmaster.loadConfig, "")
-
- def testSimple(self):
- # covers slavePortnum, base checker passwords
- master = self.buildmaster
- master.loadChanges()
-
- master.loadConfig(emptyCfg)
- # note: this doesn't actually start listening, because the app
- # hasn't been started running
- self.failUnlessEqual(master.slavePortnum, "tcp:9999")
- self.checkPorts(master, [(9999, pb.PBServerFactory)])
- self.failUnlessEqual(list(master.change_svc), [])
- self.failUnlessEqual(master.botmaster.builders, {})
- self.failUnlessEqual(master.checker.users,
- {"change": "changepw"})
- self.failUnlessEqual(master.projectName, "dummy project")
- self.failUnlessEqual(master.projectURL, "http://dummy.example.com")
- self.failUnlessEqual(master.buildbotURL,
- "http://dummy.example.com/buildbot")
-
- def testSlavePortnum(self):
- master = self.buildmaster
- master.loadChanges()
-
- master.loadConfig(emptyCfg)
- self.failUnlessEqual(master.slavePortnum, "tcp:9999")
- ports = self.checkPorts(master, [(9999, pb.PBServerFactory)])
- p = ports[0]
-
- master.loadConfig(emptyCfg)
- self.failUnlessEqual(master.slavePortnum, "tcp:9999")
- ports = self.checkPorts(master, [(9999, pb.PBServerFactory)])
- self.failUnlessIdentical(p, ports[0],
- "the slave port was changed even " + \
- "though the configuration was not")
-
- master.loadConfig(emptyCfg + "c['slavePortnum'] = 9000\n")
- self.failUnlessEqual(master.slavePortnum, "tcp:9000")
- ports = self.checkPorts(master, [(9000, pb.PBServerFactory)])
- self.failIf(p is ports[0],
- "slave port was unchanged but configuration was changed")
-
- def testBots(self):
- master = self.buildmaster
- master.loadChanges()
- master.loadConfig(emptyCfg)
- self.failUnlessEqual(master.botmaster.builders, {})
- self.failUnlessEqual(master.checker.users,
- {"change": "changepw"})
- botsCfg = (emptyCfg +
- "c['bots'] = [('bot1', 'pw1'), ('bot2', 'pw2')]\n")
- master.loadConfig(botsCfg)
- self.failUnlessEqual(master.checker.users,
- {"change": "changepw",
- "bot1": "pw1",
- "bot2": "pw2"})
- master.loadConfig(botsCfg)
- self.failUnlessEqual(master.checker.users,
- {"change": "changepw",
- "bot1": "pw1",
- "bot2": "pw2"})
- master.loadConfig(emptyCfg)
- self.failUnlessEqual(master.checker.users,
- {"change": "changepw"})
-
-
- def testSources(self):
- if not cvstoys:
- raise unittest.SkipTest("this test needs CVSToys installed")
- master = self.buildmaster
- master.loadChanges()
- master.loadConfig(emptyCfg)
- self.failUnlessEqual(list(master.change_svc), [])
-
- self.sourcesCfg = emptyCfg + \
-"""
-from buildbot.changes.freshcvs import FreshCVSSource
-s1 = FreshCVSSource('cvs.example.com', 1000, 'pname', 'spass',
- prefix='Prefix/')
-c['sources'] = [s1]
-"""
-
- d = master.loadConfig(self.sourcesCfg)
- d.addCallback(self._testSources_1)
- return maybeWait(d)
-
- def _testSources_1(self, res):
- self.failUnlessEqual(len(list(self.buildmaster.change_svc)), 1)
- s1 = list(self.buildmaster.change_svc)[0]
- self.failUnless(isinstance(s1, FreshCVSSource))
- self.failUnlessEqual(s1.host, "cvs.example.com")
- self.failUnlessEqual(s1.port, 1000)
- self.failUnlessEqual(s1.prefix, "Prefix/")
- self.failUnlessEqual(s1, list(self.buildmaster.change_svc)[0])
- self.failUnless(s1.parent)
-
- # verify that unchanged sources are not interrupted
- d = self.buildmaster.loadConfig(self.sourcesCfg)
- d.addCallback(self._testSources_2, s1)
- return d
-
- def _testSources_2(self, res, s1):
- self.failUnlessEqual(len(list(self.buildmaster.change_svc)), 1)
- s2 = list(self.buildmaster.change_svc)[0]
- self.failUnlessIdentical(s1, s2)
- self.failUnless(s1.parent)
-
- # make sure we can get rid of the sources too
- d = self.buildmaster.loadConfig(emptyCfg)
- d.addCallback(self._testSources_3)
- return d
-
- def _testSources_3(self, res):
- self.failUnlessEqual(list(self.buildmaster.change_svc), [])
-
- def shouldBeFailure(self, res, *expected):
- self.failUnless(isinstance(res, failure.Failure),
- "we expected this to fail, not produce %s" % (res,))
- res.trap(*expected)
- return None # all is good
-
- def testSchedulers(self):
- master = self.buildmaster
- master.loadChanges()
- master.loadConfig(emptyCfg)
- self.failUnlessEqual(master.allSchedulers(), [])
-
- self.schedulersCfg = \
-"""
-from buildbot.scheduler import Scheduler, Dependent
-from buildbot.process.factory import BasicBuildFactory
-c = {}
-c['bots'] = [('bot1', 'pw1')]
-c['sources'] = []
-c['schedulers'] = [Scheduler('full', None, 60, ['builder1'])]
-f1 = BasicBuildFactory('cvsroot', 'cvsmodule')
-c['builders'] = [{'name':'builder1', 'slavename':'bot1',
- 'builddir':'workdir', 'factory':f1}]
-c['slavePortnum'] = 9999
-c['projectName'] = 'dummy project'
-c['projectURL'] = 'http://dummy.example.com'
-c['buildbotURL'] = 'http://dummy.example.com/buildbot'
-BuildmasterConfig = c
-"""
-
- # c['schedulers'] must be a list
- badcfg = self.schedulersCfg + \
-"""
-c['schedulers'] = Scheduler('full', None, 60, ['builder1'])
-"""
- d = defer.maybeDeferred(self.buildmaster.loadConfig, badcfg)
- d.addBoth(self._testSchedulers_1)
- return maybeWait(d)
- def _testSchedulers_1(self, res):
- self.shouldBeFailure(res, AssertionError)
- # c['schedulers'] must be a list of IScheduler objects
- badcfg = self.schedulersCfg + \
-"""
-c['schedulers'] = ['oops', 'problem']
-"""
- d = defer.maybeDeferred(self.buildmaster.loadConfig, badcfg)
- d.addBoth(self._testSchedulers_2)
- return d
- def _testSchedulers_2(self, res):
- self.shouldBeFailure(res, AssertionError)
- # c['schedulers'] must point at real builders
- badcfg = self.schedulersCfg + \
-"""
-c['schedulers'] = [Scheduler('full', None, 60, ['builder-bogus'])]
-"""
- d = defer.maybeDeferred(self.buildmaster.loadConfig, badcfg)
- d.addBoth(self._testSchedulers_3)
- return d
- def _testSchedulers_3(self, res):
- self.shouldBeFailure(res, AssertionError)
- d = self.buildmaster.loadConfig(self.schedulersCfg)
- d.addCallback(self._testSchedulers_4)
- return d
- def _testSchedulers_4(self, res):
- sch = self.buildmaster.allSchedulers()
- self.failUnlessEqual(len(sch), 1)
- s = sch[0]
- self.failUnless(isinstance(s, scheduler.Scheduler))
- self.failUnlessEqual(s.name, "full")
- self.failUnlessEqual(s.branch, None)
- self.failUnlessEqual(s.treeStableTimer, 60)
- self.failUnlessEqual(s.builderNames, ['builder1'])
-
- newcfg = self.schedulersCfg + \
-"""
-s1 = Scheduler('full', None, 60, ['builder1'])
-c['schedulers'] = [s1, Dependent('downstream', s1, ['builder1'])]
-"""
- d = self.buildmaster.loadConfig(newcfg)
- d.addCallback(self._testSchedulers_5, newcfg)
- return d
- def _testSchedulers_5(self, res, newcfg):
- sch = self.buildmaster.allSchedulers()
- self.failUnlessEqual(len(sch), 2)
- s = sch[0]
- self.failUnless(isinstance(s, scheduler.Scheduler))
- s = sch[1]
- self.failUnless(isinstance(s, scheduler.Dependent))
- self.failUnlessEqual(s.name, "downstream")
- self.failUnlessEqual(s.builderNames, ['builder1'])
-
- # reloading the same config file should leave the schedulers in place
- d = self.buildmaster.loadConfig(newcfg)
- d.addCallback(self._testschedulers_6, sch)
- return d
- def _testschedulers_6(self, res, sch1):
- sch2 = self.buildmaster.allSchedulers()
- self.failUnlessEqual(len(sch2), 2)
- sch1.sort()
- sch2.sort()
- self.failUnlessEqual(sch1, sch2)
- self.failUnlessIdentical(sch1[0], sch2[0])
- self.failUnlessIdentical(sch1[1], sch2[1])
- self.failUnlessIdentical(sch1[0].parent, self.buildmaster)
- self.failUnlessIdentical(sch1[1].parent, self.buildmaster)
-
-
- def testBuilders(self):
- master = self.buildmaster
- master.loadConfig(emptyCfg)
- self.failUnlessEqual(master.botmaster.builders, {})
-
- master.loadConfig(buildersCfg)
- self.failUnlessEqual(master.botmaster.builderNames, ["builder1"])
- self.failUnlessEqual(master.botmaster.builders.keys(), ["builder1"])
- b = master.botmaster.builders["builder1"]
- self.failUnless(isinstance(b, Builder))
- self.failUnlessEqual(b.name, "builder1")
- self.failUnlessEqual(b.slavenames, ["bot1"])
- self.failUnlessEqual(b.builddir, "workdir")
- f1 = b.buildFactory
- self.failUnless(isinstance(f1, BasicBuildFactory))
- steps = f1.steps
- self.failUnlessEqual(len(steps), 3)
- self.failUnlessEqual(steps[0], (step.CVS,
- {'cvsroot': 'cvsroot',
- 'cvsmodule': 'cvsmodule',
- 'mode': 'clobber'}))
- self.failUnlessEqual(steps[1], (step.Compile,
- {'command': 'make all'}))
- self.failUnlessEqual(steps[2], (step.Test,
- {'command': 'make check'}))
-
-
- # make sure a reload of the same data doesn't interrupt the Builder
- master.loadConfig(buildersCfg)
- self.failUnlessEqual(master.botmaster.builderNames, ["builder1"])
- self.failUnlessEqual(master.botmaster.builders.keys(), ["builder1"])
- b2 = master.botmaster.builders["builder1"]
- self.failUnlessIdentical(b, b2)
- # TODO: test that the BuilderStatus object doesn't change
- #statusbag2 = master.client_svc.statusbags["builder1"]
- #self.failUnlessIdentical(statusbag, statusbag2)
-
- # but changing something should result in a new Builder
- master.loadConfig(buildersCfg2)
- self.failUnlessEqual(master.botmaster.builderNames, ["builder1"])
- self.failUnlessEqual(master.botmaster.builders.keys(), ["builder1"])
- b3 = master.botmaster.builders["builder1"]
- self.failIf(b is b3)
- # the statusbag remains the same TODO
- #statusbag3 = master.client_svc.statusbags["builder1"]
- #self.failUnlessIdentical(statusbag, statusbag3)
-
- # adding new builder
- master.loadConfig(buildersCfg3)
- self.failUnlessEqual(master.botmaster.builderNames, ["builder1",
- "builder2"])
- self.failUnlessListsEquivalent(master.botmaster.builders.keys(),
- ["builder1", "builder2"])
- b4 = master.botmaster.builders["builder1"]
- self.failUnlessIdentical(b3, b4)
-
- # changing first builder should leave it at the same place in the list
- master.loadConfig(buildersCfg4)
- self.failUnlessEqual(master.botmaster.builderNames, ["builder1",
- "builder2"])
- self.failUnlessListsEquivalent(master.botmaster.builders.keys(),
- ["builder1", "builder2"])
- b5 = master.botmaster.builders["builder1"]
- self.failIf(b4 is b5)
-
- # and removing it should make the Builder go away
- master.loadConfig(emptyCfg)
- self.failUnlessEqual(master.botmaster.builderNames, [])
- self.failUnlessEqual(master.botmaster.builders, {})
- #self.failUnlessEqual(master.client_svc.statusbags, {}) # TODO
-
- def checkIRC(self, m, expected):
- ircs = {}
- for irc in self.servers(m, words.IRC):
- ircs[irc.host] = (irc.nick, irc.channels)
- self.failUnlessEqual(ircs, expected)
-
- def testIRC(self):
- if not words:
- raise unittest.SkipTest("Twisted Words package is not installed")
- master = self.buildmaster
- master.loadChanges()
- d = master.loadConfig(emptyCfg)
- e1 = {}
- d.addCallback(lambda res: self.checkIRC(master, e1))
- d.addCallback(lambda res: master.loadConfig(ircCfg1))
- e2 = {'irc.us.freenode.net': ('buildbot', ['twisted'])}
- d.addCallback(lambda res: self.checkIRC(master, e2))
- d.addCallback(lambda res: master.loadConfig(ircCfg2))
- e3 = {'irc.us.freenode.net': ('buildbot', ['twisted']),
- 'irc.example.com': ('otherbot', ['chan1', 'chan2'])}
- d.addCallback(lambda res: self.checkIRC(master, e3))
- d.addCallback(lambda res: master.loadConfig(ircCfg3))
- e4 = {'irc.us.freenode.net': ('buildbot', ['knotted'])}
- d.addCallback(lambda res: self.checkIRC(master, e4))
- d.addCallback(lambda res: master.loadConfig(ircCfg1))
- e5 = {'irc.us.freenode.net': ('buildbot', ['twisted'])}
- d.addCallback(lambda res: self.checkIRC(master, e5))
- return maybeWait(d)
-
- def testWebPortnum(self):
- master = self.buildmaster
- master.loadChanges()
-
- d = master.loadConfig(webCfg1)
- d.addCallback(self._testWebPortnum_1)
- return maybeWait(d)
- def _testWebPortnum_1(self, res):
- ports = self.checkPorts(self.buildmaster, [(9999, pb.PBServerFactory),
- (9980, Site)])
- p = ports[1]
-
- d = self.buildmaster.loadConfig(webCfg1) # nothing should be changed
- d.addCallback(self._testWebPortnum_2, p)
- return d
- def _testWebPortnum_2(self, res, p):
- ports = self.checkPorts(self.buildmaster, [(9999, pb.PBServerFactory),
- (9980, Site)])
- self.failUnlessIdentical(p, ports[1],
- "web port was changed even though " + \
- "configuration was not")
-
- d = self.buildmaster.loadConfig(webCfg2) # changes to 9981
- d.addCallback(self._testWebPortnum_3, p)
- return d
- def _testWebPortnum_3(self, res, p):
- ports = self.checkPorts(self.buildmaster, [(9999, pb.PBServerFactory),
- (9981, Site)])
- self.failIf(p is ports[1],
- "configuration was changed but web port was unchanged")
- d = self.buildmaster.loadConfig(webCfg3) # 9981 on only localhost
- d.addCallback(self._testWebPortnum_4, ports[1])
- return d
- def _testWebPortnum_4(self, res, p):
- ports = self.checkPorts(self.buildmaster, [(9999, pb.PBServerFactory),
- (9981, Site)])
- self.failUnlessEqual(ports[1].kwargs['interface'], "127.0.0.1")
- d = self.buildmaster.loadConfig(emptyCfg)
- d.addCallback(lambda res:
- self.checkPorts(self.buildmaster,
- [(9999, pb.PBServerFactory)]))
- return d
-
- def testWebPathname(self):
- master = self.buildmaster
- master.loadChanges()
-
- d = master.loadConfig(webNameCfg1)
- d.addCallback(self._testWebPathname_1)
- return maybeWait(d)
- def _testWebPathname_1(self, res):
- self.checkPorts(self.buildmaster,
- [(9999, pb.PBServerFactory),
- ('~/.twistd-web-pb', pb.PBServerFactory)])
- unixports = self.UNIXports(self.buildmaster)
- f = unixports[0].args[1]
- self.failUnless(isinstance(f.root, ResourcePublisher))
-
- d = self.buildmaster.loadConfig(webNameCfg1)
- # nothing should be changed
- d.addCallback(self._testWebPathname_2, f)
- return d
- def _testWebPathname_2(self, res, f):
- self.checkPorts(self.buildmaster,
- [(9999, pb.PBServerFactory),
- ('~/.twistd-web-pb', pb.PBServerFactory)])
- self.failUnlessIdentical(f,
- self.UNIXports(self.buildmaster)[0].args[1],
- "web factory was changed even though " + \
- "configuration was not")
-
- d = self.buildmaster.loadConfig(webNameCfg2)
- d.addCallback(self._testWebPathname_3, f)
- return d
- def _testWebPathname_3(self, res, f):
- self.checkPorts(self.buildmaster,
- [(9999, pb.PBServerFactory),
- ('./bar.socket', pb.PBServerFactory)])
- self.failIf(f is self.UNIXports(self.buildmaster)[0].args[1],
- "web factory was unchanged but configuration was changed")
-
- d = self.buildmaster.loadConfig(emptyCfg)
- d.addCallback(lambda res:
- self.checkPorts(self.buildmaster,
- [(9999, pb.PBServerFactory)]))
- return d
-
- def testDebugPassword(self):
- master = self.buildmaster
-
- master.loadConfig(debugPasswordCfg)
- self.failUnlessEqual(master.checker.users,
- {"change": "changepw",
- "debug": "sekrit"})
-
- master.loadConfig(debugPasswordCfg)
- self.failUnlessEqual(master.checker.users,
- {"change": "changepw",
- "debug": "sekrit"})
-
- master.loadConfig(emptyCfg)
- self.failUnlessEqual(master.checker.users,
- {"change": "changepw"})
-
- def testLocks(self):
- master = self.buildmaster
- botmaster = master.botmaster
-
- # make sure that c['interlocks'] is rejected properly
- self.failUnlessRaises(KeyError, master.loadConfig, interlockCfgBad)
- # and that duplicate-named Locks are caught
- self.failUnlessRaises(ValueError, master.loadConfig, lockCfgBad1)
- self.failUnlessRaises(ValueError, master.loadConfig, lockCfgBad2)
- self.failUnlessRaises(ValueError, master.loadConfig, lockCfgBad3)
-
- # create a Builder that uses Locks
- master.loadConfig(lockCfg1a)
- b1 = master.botmaster.builders["builder1"]
- self.failUnlessEqual(len(b1.locks), 2)
-
- # reloading the same config should not change the Builder
- master.loadConfig(lockCfg1a)
- self.failUnlessIdentical(b1, master.botmaster.builders["builder1"])
- # but changing the set of locks used should change it
- master.loadConfig(lockCfg1b)
- self.failIfIdentical(b1, master.botmaster.builders["builder1"])
- b1 = master.botmaster.builders["builder1"]
- self.failUnlessEqual(len(b1.locks), 1)
-
- # similar test with step-scoped locks
- master.loadConfig(lockCfg2a)
- b1 = master.botmaster.builders["builder1"]
- # reloading the same config should not change the Builder
- master.loadConfig(lockCfg2a)
- self.failUnlessIdentical(b1, master.botmaster.builders["builder1"])
- # but changing the set of locks used should change it
- master.loadConfig(lockCfg2b)
- self.failIfIdentical(b1, master.botmaster.builders["builder1"])
- b1 = master.botmaster.builders["builder1"]
- # remove the locks entirely
- master.loadConfig(lockCfg2c)
- self.failIfIdentical(b1, master.botmaster.builders["builder1"])
-
-class ConfigElements(unittest.TestCase):
- # verify that ComparableMixin is working
- def testSchedulers(self):
- s1 = scheduler.Scheduler(name='quick', branch=None,
- treeStableTimer=30,
- builderNames=['quick'])
- s2 = scheduler.Scheduler(name="all", branch=None,
- treeStableTimer=5*60,
- builderNames=["a", "b"])
- s3 = scheduler.Try_Userpass("try", ["a","b"], port=9989,
- userpass=[("foo","bar")])
- s1a = scheduler.Scheduler(name='quick', branch=None,
- treeStableTimer=30,
- builderNames=['quick'])
- s2a = scheduler.Scheduler(name="all", branch=None,
- treeStableTimer=5*60,
- builderNames=["a", "b"])
- s3a = scheduler.Try_Userpass("try", ["a","b"], port=9989,
- userpass=[("foo","bar")])
- self.failUnless(s1 == s1)
- self.failUnless(s1 == s1a)
- self.failUnless(s1a in [s1, s2, s3])
- self.failUnless(s2a in [s1, s2, s3])
- self.failUnless(s3a in [s1, s2, s3])
-
-
-
-class ConfigFileTest(unittest.TestCase):
-
- def testFindConfigFile(self):
- os.mkdir("test_cf")
- open(os.path.join("test_cf", "master.cfg"), "w").write(emptyCfg)
- slaveportCfg = emptyCfg + "c['slavePortnum'] = 9000\n"
- open(os.path.join("test_cf", "alternate.cfg"), "w").write(slaveportCfg)
-
- m = BuildMaster("test_cf")
- m.loadTheConfigFile()
- self.failUnlessEqual(m.slavePortnum, "tcp:9999")
-
- m = BuildMaster("test_cf", "alternate.cfg")
- m.loadTheConfigFile()
- self.failUnlessEqual(m.slavePortnum, "tcp:9000")
-
-
-class MyTarget(base.StatusReceiverMultiService):
- def __init__(self, name):
- self.name = name
- base.StatusReceiverMultiService.__init__(self)
- def startService(self):
- # make a note in a list stashed in the BuildMaster
- self.parent.targetevents.append(("start", self.name))
- return base.StatusReceiverMultiService.startService(self)
- def stopService(self):
- self.parent.targetevents.append(("stop", self.name))
- return base.StatusReceiverMultiService.stopService(self)
-
-class MySlowTarget(MyTarget):
- def stopService(self):
- from twisted.internet import reactor
- d = base.StatusReceiverMultiService.stopService(self)
- def stall(res):
- d2 = defer.Deferred()
- reactor.callLater(0.1, d2.callback, res)
- return d2
- d.addCallback(stall)
- m = self.parent
- def finishedStalling(res):
- m.targetevents.append(("stop", self.name))
- return res
- d.addCallback(finishedStalling)
- return d
-
-# we can't actually startService a buildmaster with a config that uses a
-# fixed slavePortnum like 9999, so instead this makes it possible to pass '0'
-# for the first time, and then substitute back in the allocated port number
-# on subsequent passes.
-startableEmptyCfg = emptyCfg + \
-"""
-c['slavePortnum'] = %d
-"""
-
-targetCfg1 = startableEmptyCfg + \
-"""
-from buildbot.test.test_config import MyTarget
-c['status'] = [MyTarget('a')]
-"""
-
-targetCfg2 = startableEmptyCfg + \
-"""
-from buildbot.test.test_config import MySlowTarget
-c['status'] = [MySlowTarget('b')]
-"""
-
-class StartService(unittest.TestCase):
- def tearDown(self):
- return self.master.stopService()
-
- def testStartService(self):
- os.mkdir("test_ss")
- self.master = m = BuildMaster("test_ss")
- m.startService()
- d = m.loadConfig(startableEmptyCfg % 0)
- d.addCallback(self._testStartService_0)
- return maybeWait(d)
-
- def _testStartService_0(self, res):
- m = self.master
- m.targetevents = []
- # figure out what port got allocated
- self.portnum = m.slavePort._port.getHost().port
- d = m.loadConfig(targetCfg1 % self.portnum)
- d.addCallback(self._testStartService_1)
- return d
-
- def _testStartService_1(self, res):
- self.failUnlessEqual(len(self.master.statusTargets), 1)
- self.failUnless(isinstance(self.master.statusTargets[0], MyTarget))
- self.failUnlessEqual(self.master.targetevents,
- [('start', 'a')])
- self.master.targetevents = []
- # reloading the same config should not start or stop the target
- d = self.master.loadConfig(targetCfg1 % self.portnum)
- d.addCallback(self._testStartService_2)
- return d
-
- def _testStartService_2(self, res):
- self.failUnlessEqual(self.master.targetevents, [])
- # but loading a new config file should stop the old one, then
- # start the new one
- d = self.master.loadConfig(targetCfg2 % self.portnum)
- d.addCallback(self._testStartService_3)
- return d
-
- def _testStartService_3(self, res):
- self.failUnlessEqual(self.master.targetevents,
- [('stop', 'a'), ('start', 'b')])
- self.master.targetevents = []
- # and going back to the old one should do the same, in the same
- # order, even though the current MySlowTarget takes a moment to shut
- # down
- d = self.master.loadConfig(targetCfg1 % self.portnum)
- d.addCallback(self._testStartService_4)
- return d
-
- def _testStartService_4(self, res):
- self.failUnlessEqual(self.master.targetevents,
- [('stop', 'b'), ('start', 'a')])
diff --git a/buildbot/buildbot-source/build/lib/buildbot/test/test_control.py b/buildbot/buildbot-source/build/lib/buildbot/test/test_control.py
deleted file mode 100644
index 42cd1ece5..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/test/test_control.py
+++ /dev/null
@@ -1,140 +0,0 @@
-# -*- test-case-name: buildbot.test.test_control -*-
-
-import sys, os, signal, shutil, time, errno
-
-from twisted.trial import unittest
-from twisted.internet import defer, reactor
-
-from buildbot import master, interfaces
-from buildbot.sourcestamp import SourceStamp
-from buildbot.twcompat import providedBy, maybeWait
-from buildbot.slave import bot
-from buildbot.status import builder
-from buildbot.status.builder import SUCCESS
-from buildbot.process import base
-
-config = """
-from buildbot.process import factory, step
-
-def s(klass, **kwargs):
- return (klass, kwargs)
-
-f1 = factory.BuildFactory([
- s(step.Dummy, timeout=1),
- ])
-c = {}
-c['bots'] = [['bot1', 'sekrit']]
-c['sources'] = []
-c['schedulers'] = []
-c['builders'] = [{'name': 'force', 'slavename': 'bot1',
- 'builddir': 'force-dir', 'factory': f1}]
-c['slavePortnum'] = 0
-BuildmasterConfig = c
-"""
-
-class FakeBuilder:
- name = "fake"
- def getSlaveCommandVersion(self, command, oldversion=None):
- return "1.10"
-
-class SignalMixin:
- sigchldHandler = None
-
- def setUpClass(self):
- # make sure SIGCHLD handler is installed, as it should be on
- # reactor.run(). problem is reactor may not have been run when this
- # test runs.
- if hasattr(reactor, "_handleSigchld") and hasattr(signal, "SIGCHLD"):
- self.sigchldHandler = signal.signal(signal.SIGCHLD,
- reactor._handleSigchld)
-
- def tearDownClass(self):
- if self.sigchldHandler:
- signal.signal(signal.SIGCHLD, self.sigchldHandler)
-
-class Force(unittest.TestCase):
-
- def rmtree(self, d):
- try:
- shutil.rmtree(d, ignore_errors=1)
- except OSError, e:
- # stupid 2.2 appears to ignore ignore_errors
- if e.errno != errno.ENOENT:
- raise
-
- def setUp(self):
- self.master = None
- self.slave = None
- self.rmtree("control_basedir")
- os.mkdir("control_basedir")
- self.master = master.BuildMaster("control_basedir")
- self.slavebase = os.path.abspath("control_slavebase")
- self.rmtree(self.slavebase)
- os.mkdir("control_slavebase")
-
- def connectSlave(self):
- port = self.master.slavePort._port.getHost().port
- slave = bot.BuildSlave("localhost", port, "bot1", "sekrit",
- self.slavebase, keepalive=0, usePTY=1)
- self.slave = slave
- slave.startService()
- d = self.master.botmaster.waitUntilBuilderAttached("force")
- return d
-
- def tearDown(self):
- dl = []
- if self.slave:
- dl.append(self.master.botmaster.waitUntilBuilderDetached("force"))
- dl.append(defer.maybeDeferred(self.slave.stopService))
- if self.master:
- dl.append(defer.maybeDeferred(self.master.stopService))
- return maybeWait(defer.DeferredList(dl))
-
- def testForce(self):
- # TODO: since BuilderControl.forceBuild has been deprecated, this
- # test is scheduled to be removed soon
- m = self.master
- m.loadConfig(config)
- m.startService()
- d = self.connectSlave()
- d.addCallback(self._testForce_1)
- return maybeWait(d)
-
- def _testForce_1(self, res):
- c = interfaces.IControl(self.master)
- builder_control = c.getBuilder("force")
- d = builder_control.forceBuild("bob", "I was bored")
- d.addCallback(self._testForce_2)
- return d
-
- def _testForce_2(self, build_control):
- self.failUnless(providedBy(build_control, interfaces.IBuildControl))
- d = build_control.getStatus().waitUntilFinished()
- d.addCallback(self._testForce_3)
- return d
-
- def _testForce_3(self, bs):
- self.failUnless(providedBy(bs, interfaces.IBuildStatus))
- self.failUnless(bs.isFinished())
- self.failUnlessEqual(bs.getResults(), SUCCESS)
- #self.failUnlessEqual(bs.getResponsibleUsers(), ["bob"]) # TODO
- self.failUnlessEqual(bs.getChanges(), [])
- #self.failUnlessEqual(bs.getReason(), "forced") # TODO
-
- def testRequest(self):
- m = self.master
- m.loadConfig(config)
- m.startService()
- d = self.connectSlave()
- d.addCallback(self._testRequest_1)
- return maybeWait(d)
- def _testRequest_1(self, res):
- c = interfaces.IControl(self.master)
- req = base.BuildRequest("I was bored", SourceStamp())
- builder_control = c.getBuilder("force")
- d = defer.Deferred()
- req.subscribe(d.callback)
- builder_control.requestBuild(req)
- d.addCallback(self._testForce_2)
- # we use the same check-the-results code as testForce
- return d
diff --git a/buildbot/buildbot-source/build/lib/buildbot/test/test_dependencies.py b/buildbot/buildbot-source/build/lib/buildbot/test/test_dependencies.py
deleted file mode 100644
index 6871adcf2..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/test/test_dependencies.py
+++ /dev/null
@@ -1,170 +0,0 @@
-# -*- test-case-name: buildbot.test.test_dependencies -*-
-
-from twisted.trial import unittest
-
-from twisted.internet import reactor, defer
-
-from buildbot import interfaces
-from buildbot.process import step
-from buildbot.sourcestamp import SourceStamp
-from buildbot.process.base import BuildRequest
-from buildbot.test.runutils import RunMixin
-from buildbot.twcompat import maybeWait
-from buildbot.status import base
-
-config_1 = """
-from buildbot import scheduler
-from buildbot.process import step, factory
-s = factory.s
-from buildbot.test.test_locks import LockStep
-
-BuildmasterConfig = c = {}
-c['bots'] = [('bot1', 'sekrit'), ('bot2', 'sekrit')]
-c['sources'] = []
-c['schedulers'] = []
-c['slavePortnum'] = 0
-
-# upstream1 (fastfail, slowpass)
-# -> downstream2 (b3, b4)
-# upstream3 (slowfail, slowpass)
-# -> downstream4 (b3, b4)
-# -> downstream5 (b5)
-
-s1 = scheduler.Scheduler('upstream1', None, 10, ['slowpass', 'fastfail'])
-s2 = scheduler.Dependent('downstream2', s1, ['b3', 'b4'])
-s3 = scheduler.Scheduler('upstream3', None, 10, ['fastpass', 'slowpass'])
-s4 = scheduler.Dependent('downstream4', s3, ['b3', 'b4'])
-s5 = scheduler.Dependent('downstream5', s4, ['b5'])
-c['schedulers'] = [s1, s2, s3, s4, s5]
-
-f_fastpass = factory.BuildFactory([s(step.Dummy, timeout=1)])
-f_slowpass = factory.BuildFactory([s(step.Dummy, timeout=2)])
-f_fastfail = factory.BuildFactory([s(step.FailingDummy, timeout=1)])
-
-def builder(name, f):
- d = {'name': name, 'slavename': 'bot1', 'builddir': name, 'factory': f}
- return d
-
-c['builders'] = [builder('slowpass', f_slowpass),
- builder('fastfail', f_fastfail),
- builder('fastpass', f_fastpass),
- builder('b3', f_fastpass),
- builder('b4', f_fastpass),
- builder('b5', f_fastpass),
- ]
-"""
-
-class Logger(base.StatusReceiverMultiService):
- def __init__(self, master):
- base.StatusReceiverMultiService.__init__(self)
- self.builds = []
- for bn in master.status.getBuilderNames():
- master.status.getBuilder(bn).subscribe(self)
-
- def buildStarted(self, builderName, build):
- self.builds.append(builderName)
-
-class Dependencies(RunMixin, unittest.TestCase):
- def setUp(self):
- RunMixin.setUp(self)
- self.master.loadConfig(config_1)
- self.master.startService()
- d = self.connectSlave(["slowpass", "fastfail", "fastpass",
- "b3", "b4", "b5"])
- return maybeWait(d)
-
- def findScheduler(self, name):
- for s in self.master.allSchedulers():
- if s.name == name:
- return s
- raise KeyError("No Scheduler named '%s'" % name)
-
- def testParse(self):
- self.master.loadConfig(config_1)
- # that's it, just make sure this config file is loaded successfully
-
- def testRun_Fail(self):
- # add an extra status target to make pay attention to which builds
- # start and which don't.
- self.logger = Logger(self.master)
-
- # kick off upstream1, which has a failing Builder and thus will not
- # trigger downstream3
- s = self.findScheduler("upstream1")
- # this is an internal function of the Scheduler class
- s.fireTimer() # fires a build
- # t=0: two builders start: 'slowpass' and 'fastfail'
- # t=1: builder 'fastfail' finishes
- # t=2: builder 'slowpass' finishes
- d = defer.Deferred()
- d.addCallback(self._testRun_Fail_1)
- reactor.callLater(5, d.callback, None)
- return maybeWait(d)
-
- def _testRun_Fail_1(self, res):
- # 'slowpass' and 'fastfail' should have run one build each
- b = self.status.getBuilder('slowpass').getLastFinishedBuild()
- self.failUnless(b)
- self.failUnlessEqual(b.getNumber(), 0)
- b = self.status.getBuilder('fastfail').getLastFinishedBuild()
- self.failUnless(b)
- self.failUnlessEqual(b.getNumber(), 0)
-
- # none of the other builders should have run
- self.failIf(self.status.getBuilder('b3').getLastFinishedBuild())
- self.failIf(self.status.getBuilder('b4').getLastFinishedBuild())
- self.failIf(self.status.getBuilder('b5').getLastFinishedBuild())
-
- # in fact, none of them should have even started
- self.failUnlessEqual(len(self.logger.builds), 2)
- self.failUnless("slowpass" in self.logger.builds)
- self.failUnless("fastfail" in self.logger.builds)
- self.failIf("b3" in self.logger.builds)
- self.failIf("b4" in self.logger.builds)
- self.failIf("b5" in self.logger.builds)
-
- def testRun_Pass(self):
- # kick off upstream3, which will fire downstream4 and then
- # downstream5
- s = self.findScheduler("upstream3")
- # this is an internal function of the Scheduler class
- s.fireTimer() # fires a build
- # t=0: slowpass and fastpass start
- # t=1: builder 'fastpass' finishes
- # t=2: builder 'slowpass' finishes
- # scheduler 'downstream4' fires
- # builds b3 and b4 are started
- # t=3: builds b3 and b4 finish
- # scheduler 'downstream5' fires
- # build b5 is started
- # t=4: build b5 is finished
- d = defer.Deferred()
- d.addCallback(self._testRun_Pass_1)
- reactor.callLater(5, d.callback, None)
- return maybeWait(d)
-
- def _testRun_Pass_1(self, res):
- # 'fastpass' and 'slowpass' should have run one build each
- b = self.status.getBuilder('fastpass').getLastFinishedBuild()
- self.failUnless(b)
- self.failUnlessEqual(b.getNumber(), 0)
-
- b = self.status.getBuilder('slowpass').getLastFinishedBuild()
- self.failUnless(b)
- self.failUnlessEqual(b.getNumber(), 0)
-
- self.failIf(self.status.getBuilder('fastfail').getLastFinishedBuild())
-
- b = self.status.getBuilder('b3').getLastFinishedBuild()
- self.failUnless(b)
- self.failUnlessEqual(b.getNumber(), 0)
-
- b = self.status.getBuilder('b4').getLastFinishedBuild()
- self.failUnless(b)
- self.failUnlessEqual(b.getNumber(), 0)
-
- b = self.status.getBuilder('b4').getLastFinishedBuild()
- self.failUnless(b)
- self.failUnlessEqual(b.getNumber(), 0)
-
-
diff --git a/buildbot/buildbot-source/build/lib/buildbot/test/test_locks.py b/buildbot/buildbot-source/build/lib/buildbot/test/test_locks.py
deleted file mode 100644
index 2a3ec58d7..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/test/test_locks.py
+++ /dev/null
@@ -1,165 +0,0 @@
-# -*- test-case-name: buildbot.test.test_locks -*-
-
-from twisted.trial import unittest
-from twisted.internet import defer
-
-from buildbot import interfaces
-from buildbot.process import step
-from buildbot.sourcestamp import SourceStamp
-from buildbot.process.base import BuildRequest
-from buildbot.test.runutils import RunMixin
-from buildbot.twcompat import maybeWait
-
-class LockStep(step.Dummy):
- def start(self):
- number = self.build.requests[0].number
- self.build.requests[0].events.append(("start", number))
- step.Dummy.start(self)
- def done(self):
- number = self.build.requests[0].number
- self.build.requests[0].events.append(("done", number))
- step.Dummy.done(self)
-
-config_1 = """
-from buildbot import locks
-from buildbot.process import step, factory
-s = factory.s
-from buildbot.test.test_locks import LockStep
-
-BuildmasterConfig = c = {}
-c['bots'] = [('bot1', 'sekrit'), ('bot2', 'sekrit')]
-c['sources'] = []
-c['schedulers'] = []
-c['slavePortnum'] = 0
-
-first_lock = locks.SlaveLock('first')
-second_lock = locks.MasterLock('second')
-f1 = factory.BuildFactory([s(LockStep, timeout=2, locks=[first_lock])])
-f2 = factory.BuildFactory([s(LockStep, timeout=3, locks=[second_lock])])
-f3 = factory.BuildFactory([s(LockStep, timeout=2, locks=[])])
-
-b1a = {'name': 'full1a', 'slavename': 'bot1', 'builddir': '1a', 'factory': f1}
-b1b = {'name': 'full1b', 'slavename': 'bot1', 'builddir': '1b', 'factory': f1}
-b1c = {'name': 'full1c', 'slavename': 'bot1', 'builddir': '1c', 'factory': f3,
- 'locks': [first_lock, second_lock]}
-b1d = {'name': 'full1d', 'slavename': 'bot1', 'builddir': '1d', 'factory': f2}
-b2a = {'name': 'full2a', 'slavename': 'bot2', 'builddir': '2a', 'factory': f1}
-b2b = {'name': 'full2b', 'slavename': 'bot2', 'builddir': '2b', 'factory': f3,
- 'locks': [second_lock]}
-c['builders'] = [b1a, b1b, b1c, b1d, b2a, b2b]
-"""
-
-config_1a = config_1 + \
-"""
-b1b = {'name': 'full1b', 'slavename': 'bot1', 'builddir': '1B', 'factory': f1}
-c['builders'] = [b1a, b1b, b1c, b1d, b2a, b2b]
-"""
-
-
-class Locks(RunMixin, unittest.TestCase):
- def setUp(self):
- RunMixin.setUp(self)
- self.req1 = req1 = BuildRequest("forced build", SourceStamp())
- req1.number = 1
- self.req2 = req2 = BuildRequest("forced build", SourceStamp())
- req2.number = 2
- self.req3 = req3 = BuildRequest("forced build", SourceStamp())
- req3.number = 3
- req1.events = req2.events = req3.events = self.events = []
- d = self.master.loadConfig(config_1)
- d.addCallback(lambda res: self.master.startService())
- d.addCallback(lambda res: self.connectSlaves(["bot1", "bot2"],
- ["full1a", "full1b",
- "full1c", "full1d",
- "full2a", "full2b"]))
- return maybeWait(d)
-
- def testLock1(self):
- self.control.getBuilder("full1a").requestBuild(self.req1)
- self.control.getBuilder("full1b").requestBuild(self.req2)
- d = defer.DeferredList([self.req1.waitUntilFinished(),
- self.req2.waitUntilFinished()])
- d.addCallback(self._testLock1_1)
- return maybeWait(d)
-
- def _testLock1_1(self, res):
- # full1a should complete its step before full1b starts it
- self.failUnlessEqual(self.events,
- [("start", 1), ("done", 1),
- ("start", 2), ("done", 2)])
-
- def testLock1a(self):
- # just like testLock1, but we reload the config file first, with a
- # change that causes full1b to be changed. This tickles a design bug
- # in which full1a and full1b wind up with distinct Lock instances.
- d = self.master.loadConfig(config_1a)
- d.addCallback(self._testLock1a_1)
- return maybeWait(d)
- def _testLock1a_1(self, res):
- self.control.getBuilder("full1a").requestBuild(self.req1)
- self.control.getBuilder("full1b").requestBuild(self.req2)
- d = defer.DeferredList([self.req1.waitUntilFinished(),
- self.req2.waitUntilFinished()])
- d.addCallback(self._testLock1a_2)
- return d
-
- def _testLock1a_2(self, res):
- # full1a should complete its step before full1b starts it
- self.failUnlessEqual(self.events,
- [("start", 1), ("done", 1),
- ("start", 2), ("done", 2)])
-
- def testLock2(self):
- # two builds run on separate slaves with slave-scoped locks should
- # not interfere
- self.control.getBuilder("full1a").requestBuild(self.req1)
- self.control.getBuilder("full2a").requestBuild(self.req2)
- d = defer.DeferredList([self.req1.waitUntilFinished(),
- self.req2.waitUntilFinished()])
- d.addCallback(self._testLock2_1)
- return maybeWait(d)
-
- def _testLock2_1(self, res):
- # full2a should start its step before full1a finishes it. They run on
- # different slaves, however, so they might start in either order.
- self.failUnless(self.events[:2] == [("start", 1), ("start", 2)] or
- self.events[:2] == [("start", 2), ("start", 1)])
-
- def testLock3(self):
- # two builds run on separate slaves with master-scoped locks should
- # not overlap
- self.control.getBuilder("full1c").requestBuild(self.req1)
- self.control.getBuilder("full2b").requestBuild(self.req2)
- d = defer.DeferredList([self.req1.waitUntilFinished(),
- self.req2.waitUntilFinished()])
- d.addCallback(self._testLock3_1)
- return maybeWait(d)
-
- def _testLock3_1(self, res):
- # full2b should not start until after full1c finishes. The builds run
- # on different slaves, so we can't really predict which will start
- # first. The important thing is that they don't overlap.
- self.failUnless(self.events == [("start", 1), ("done", 1),
- ("start", 2), ("done", 2)]
- or self.events == [("start", 2), ("done", 2),
- ("start", 1), ("done", 1)]
- )
-
- def testLock4(self):
- self.control.getBuilder("full1a").requestBuild(self.req1)
- self.control.getBuilder("full1c").requestBuild(self.req2)
- self.control.getBuilder("full1d").requestBuild(self.req3)
- d = defer.DeferredList([self.req1.waitUntilFinished(),
- self.req2.waitUntilFinished(),
- self.req3.waitUntilFinished()])
- d.addCallback(self._testLock4_1)
- return maybeWait(d)
-
- def _testLock4_1(self, res):
- # full1a starts, then full1d starts (because they do not interfere).
- # Once both are done, full1c can run.
- self.failUnlessEqual(self.events,
- [("start", 1), ("start", 3),
- ("done", 1), ("done", 3),
- ("start", 2), ("done", 2)])
-
diff --git a/buildbot/buildbot-source/build/lib/buildbot/test/test_maildir.py b/buildbot/buildbot-source/build/lib/buildbot/test/test_maildir.py
deleted file mode 100644
index 40819b9e6..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/test/test_maildir.py
+++ /dev/null
@@ -1,79 +0,0 @@
-# -*- test-case-name: buildbot.test.test_maildir -*-
-
-from twisted.trial import unittest
-import os, shutil
-from buildbot.changes.mail import FCMaildirSource
-from twisted.internet import reactor
-from twisted.python import util
-
-class MaildirTest(unittest.TestCase):
- def setUp(self):
- print "creating empty maildir"
- self.maildir = "test-maildir"
- if os.path.isdir(self.maildir):
- shutil.rmtree(self.maildir)
- print "removing stale maildir"
- os.mkdir(self.maildir)
- os.mkdir(os.path.join(self.maildir, "cur"))
- os.mkdir(os.path.join(self.maildir, "new"))
- os.mkdir(os.path.join(self.maildir, "tmp"))
- self.source = None
- self.done = 0
-
- def tearDown(self):
- print "removing old maildir"
- shutil.rmtree(self.maildir)
- if self.source:
- self.source.stopService()
-
- def addChange(self, c):
- # NOTE: this assumes every message results in a Change, which isn't
- # true for msg8-prefix
- print "got change"
- self.changes.append(c)
-
- def deliverMail(self, msg):
- print "delivering", msg
- newdir = os.path.join(self.maildir, "new")
- # to do this right, use safecat
- shutil.copy(msg, newdir)
-
- def do_timeout(self):
- self.done = 1
-
- def testMaildir(self):
- self.changes = []
- s = self.source = FCMaildirSource(self.maildir)
- s.parent = self
- s.startService()
- testfiles_dir = util.sibpath(__file__, "mail")
- testfiles = [msg for msg in os.listdir(testfiles_dir)
- if msg.startswith("msg")]
- testfiles.sort()
- count = len(testfiles)
- for i in range(count):
- msg = testfiles[i]
- reactor.callLater(2*i, self.deliverMail,
- os.path.join(testfiles_dir, msg))
- t = reactor.callLater(2*i + 15, self.do_timeout)
- while not (self.done or len(self.changes) == count):
- reactor.iterate(0.1)
- s.stopService()
- if self.done:
- return self.fail("timeout: messages weren't received on time")
- t.cancel()
- # TODO: verify the messages, should use code from test_mailparse but
- # I'm not sure how to factor the verification routines out in a
- # useful fashion
- #for i in range(count):
- # msg, check = test_messages[i]
- # check(self, self.changes[i])
-
-
-if __name__ == '__main__':
- suite = unittest.TestSuite()
- suite.addTestClass(MaildirTest)
- import sys
- reporter = unittest.TextReporter(sys.stdout)
- suite.run(reporter)
-
diff --git a/buildbot/buildbot-source/build/lib/buildbot/test/test_mailparse.py b/buildbot/buildbot-source/build/lib/buildbot/test/test_mailparse.py
deleted file mode 100644
index 4bb660477..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/test/test_mailparse.py
+++ /dev/null
@@ -1,248 +0,0 @@
-# -*- test-case-name: buildbot.test.test_mailparse -*-
-
-import os.path
-from twisted.trial import unittest
-from twisted.python import util
-from buildbot.changes.mail import parseFreshCVSMail, parseSyncmail
-
-class Test1(unittest.TestCase):
-
- def get(self, msg):
- msg = util.sibpath(__file__, msg)
- return parseFreshCVSMail(None, open(msg, "r"))
-
- def testMsg1(self):
- c = self.get("mail/msg1")
- self.assertEqual(c.who, "moshez")
- self.assertEqual(c.files, ["Twisted/debian/python-twisted.menu.in"])
- self.assertEqual(c.comments, "Instance massenger, apparently\n")
- self.assertEqual(c.isdir, 0)
-
- def testMsg2(self):
- c = self.get("mail/msg2")
- self.assertEqual(c.who, "itamarst")
- self.assertEqual(c.files, ["Twisted/twisted/web/woven/form.py",
- "Twisted/twisted/python/formmethod.py"])
- self.assertEqual(c.comments,
- "submit formmethod now subclass of Choice\n")
- self.assertEqual(c.isdir, 0)
-
- def testMsg3(self):
- # same as msg2 but missing the ViewCVS section
- c = self.get("mail/msg3")
- self.assertEqual(c.who, "itamarst")
- self.assertEqual(c.files, ["Twisted/twisted/web/woven/form.py",
- "Twisted/twisted/python/formmethod.py"])
- self.assertEqual(c.comments,
- "submit formmethod now subclass of Choice\n")
- self.assertEqual(c.isdir, 0)
-
- def testMsg4(self):
- # same as msg3 but also missing CVS patch section
- c = self.get("mail/msg4")
- self.assertEqual(c.who, "itamarst")
- self.assertEqual(c.files, ["Twisted/twisted/web/woven/form.py",
- "Twisted/twisted/python/formmethod.py"])
- self.assertEqual(c.comments,
- "submit formmethod now subclass of Choice\n")
- self.assertEqual(c.isdir, 0)
-
- def testMsg5(self):
- # creates a directory
- c = self.get("mail/msg5")
- self.assertEqual(c.who, "etrepum")
- self.assertEqual(c.files, ["Twisted/doc/examples/cocoaDemo"])
- self.assertEqual(c.comments,
- "Directory /cvs/Twisted/doc/examples/cocoaDemo added to the repository\n")
- self.assertEqual(c.isdir, 1)
-
- def testMsg6(self):
- # adds files
- c = self.get("mail/msg6")
- self.assertEqual(c.who, "etrepum")
- self.assertEqual(c.files, [
- "Twisted/doc/examples/cocoaDemo/MyAppDelegate.py",
- "Twisted/doc/examples/cocoaDemo/__main__.py",
- "Twisted/doc/examples/cocoaDemo/bin-python-main.m",
- "Twisted/doc/examples/cocoaDemo/English.lproj/InfoPlist.strings",
- "Twisted/doc/examples/cocoaDemo/English.lproj/MainMenu.nib/classes.nib",
- "Twisted/doc/examples/cocoaDemo/English.lproj/MainMenu.nib/info.nib",
- "Twisted/doc/examples/cocoaDemo/English.lproj/MainMenu.nib/keyedobjects.nib",
- "Twisted/doc/examples/cocoaDemo/cocoaDemo.pbproj/project.pbxproj"])
- self.assertEqual(c.comments,
- "Cocoa (OS X) clone of the QT demo, using polling reactor\n\nRequires pyobjc ( http://pyobjc.sourceforge.net ), it's not much different than the template project. The reactor is iterated periodically by a repeating NSTimer.\n")
- self.assertEqual(c.isdir, 0)
-
- def testMsg7(self):
- # deletes files
- c = self.get("mail/msg7")
- self.assertEqual(c.who, "etrepum")
- self.assertEqual(c.files, [
- "Twisted/doc/examples/cocoaDemo/MyAppDelegate.py",
- "Twisted/doc/examples/cocoaDemo/__main__.py",
- "Twisted/doc/examples/cocoaDemo/bin-python-main.m",
- "Twisted/doc/examples/cocoaDemo/English.lproj/InfoPlist.strings",
- "Twisted/doc/examples/cocoaDemo/English.lproj/MainMenu.nib/classes.nib",
- "Twisted/doc/examples/cocoaDemo/English.lproj/MainMenu.nib/info.nib",
- "Twisted/doc/examples/cocoaDemo/English.lproj/MainMenu.nib/keyedobjects.nib",
- "Twisted/doc/examples/cocoaDemo/cocoaDemo.pbproj/project.pbxproj"])
- self.assertEqual(c.comments,
- "Directories break debian build script, waiting for reasonable fix\n")
- self.assertEqual(c.isdir, 0)
-
- def testMsg8(self):
- # files outside Twisted/
- c = self.get("mail/msg8")
- self.assertEqual(c.who, "acapnotic")
- self.assertEqual(c.files, [ "CVSROOT/freshCfg" ])
- self.assertEqual(c.comments, "it doesn't work with invalid syntax\n")
- self.assertEqual(c.isdir, 0)
-
- def testMsg9(self):
- # also creates a directory
- c = self.get("mail/msg9")
- self.assertEqual(c.who, "exarkun")
- self.assertEqual(c.files, ["Twisted/sandbox/exarkun/persist-plugin"])
- self.assertEqual(c.comments,
- "Directory /cvs/Twisted/sandbox/exarkun/persist-plugin added to the repository\n")
- self.assertEqual(c.isdir, 1)
-
-
-class Test2(unittest.TestCase):
- def get(self, msg):
- msg = util.sibpath(__file__, msg)
- return parseFreshCVSMail(None, open(msg, "r"), prefix="Twisted")
-
- def testMsg1p(self):
- c = self.get("mail/msg1")
- self.assertEqual(c.who, "moshez")
- self.assertEqual(c.files, ["debian/python-twisted.menu.in"])
- self.assertEqual(c.comments, "Instance massenger, apparently\n")
-
- def testMsg2p(self):
- c = self.get("mail/msg2")
- self.assertEqual(c.who, "itamarst")
- self.assertEqual(c.files, ["twisted/web/woven/form.py",
- "twisted/python/formmethod.py"])
- self.assertEqual(c.comments,
- "submit formmethod now subclass of Choice\n")
-
- def testMsg3p(self):
- # same as msg2 but missing the ViewCVS section
- c = self.get("mail/msg3")
- self.assertEqual(c.who, "itamarst")
- self.assertEqual(c.files, ["twisted/web/woven/form.py",
- "twisted/python/formmethod.py"])
- self.assertEqual(c.comments,
- "submit formmethod now subclass of Choice\n")
-
- def testMsg4p(self):
- # same as msg3 but also missing CVS patch section
- c = self.get("mail/msg4")
- self.assertEqual(c.who, "itamarst")
- self.assertEqual(c.files, ["twisted/web/woven/form.py",
- "twisted/python/formmethod.py"])
- self.assertEqual(c.comments,
- "submit formmethod now subclass of Choice\n")
-
- def testMsg5p(self):
- # creates a directory
- c = self.get("mail/msg5")
- self.assertEqual(c.who, "etrepum")
- self.assertEqual(c.files, ["doc/examples/cocoaDemo"])
- self.assertEqual(c.comments,
- "Directory /cvs/Twisted/doc/examples/cocoaDemo added to the repository\n")
- self.assertEqual(c.isdir, 1)
-
- def testMsg6p(self):
- # adds files
- c = self.get("mail/msg6")
- self.assertEqual(c.who, "etrepum")
- self.assertEqual(c.files, [
- "doc/examples/cocoaDemo/MyAppDelegate.py",
- "doc/examples/cocoaDemo/__main__.py",
- "doc/examples/cocoaDemo/bin-python-main.m",
- "doc/examples/cocoaDemo/English.lproj/InfoPlist.strings",
- "doc/examples/cocoaDemo/English.lproj/MainMenu.nib/classes.nib",
- "doc/examples/cocoaDemo/English.lproj/MainMenu.nib/info.nib",
- "doc/examples/cocoaDemo/English.lproj/MainMenu.nib/keyedobjects.nib",
- "doc/examples/cocoaDemo/cocoaDemo.pbproj/project.pbxproj"])
- self.assertEqual(c.comments,
- "Cocoa (OS X) clone of the QT demo, using polling reactor\n\nRequires pyobjc ( http://pyobjc.sourceforge.net ), it's not much different than the template project. The reactor is iterated periodically by a repeating NSTimer.\n")
- self.assertEqual(c.isdir, 0)
-
- def testMsg7p(self):
- # deletes files
- c = self.get("mail/msg7")
- self.assertEqual(c.who, "etrepum")
- self.assertEqual(c.files, [
- "doc/examples/cocoaDemo/MyAppDelegate.py",
- "doc/examples/cocoaDemo/__main__.py",
- "doc/examples/cocoaDemo/bin-python-main.m",
- "doc/examples/cocoaDemo/English.lproj/InfoPlist.strings",
- "doc/examples/cocoaDemo/English.lproj/MainMenu.nib/classes.nib",
- "doc/examples/cocoaDemo/English.lproj/MainMenu.nib/info.nib",
- "doc/examples/cocoaDemo/English.lproj/MainMenu.nib/keyedobjects.nib",
- "doc/examples/cocoaDemo/cocoaDemo.pbproj/project.pbxproj"])
- self.assertEqual(c.comments,
- "Directories break debian build script, waiting for reasonable fix\n")
- self.assertEqual(c.isdir, 0)
-
- def testMsg8p(self):
- # files outside Twisted/
- c = self.get("mail/msg8")
- self.assertEqual(c, None)
-
-
-class Test3(unittest.TestCase):
- def get(self, msg):
- msg = util.sibpath(__file__, msg)
- return parseSyncmail(None, open(msg, "r"), prefix="buildbot")
-
- def getNoPrefix(self, msg):
- msg = util.sibpath(__file__, msg)
- return parseSyncmail(None, open(msg, "r"))
-
- def testMsgS1(self):
- c = self.get("mail/syncmail.1")
- self.failUnless(c is not None)
- self.assertEqual(c.who, "warner")
- self.assertEqual(c.files, ["buildbot/changes/freshcvsmail.py"])
- self.assertEqual(c.comments,
- "remove leftover code, leave a temporary compatibility import. Note! Start\nimporting FCMaildirSource from changes.mail instead of changes.freshcvsmail\n")
- self.assertEqual(c.isdir, 0)
-
- def testMsgS2(self):
- c = self.get("mail/syncmail.2")
- self.assertEqual(c.who, "warner")
- self.assertEqual(c.files, ["ChangeLog"])
- self.assertEqual(c.comments, "\t* NEWS: started adding new features\n")
- self.assertEqual(c.isdir, 0)
-
- def testMsgS3(self):
- c = self.get("mail/syncmail.3")
- self.failUnless(c == None)
-
- def testMsgS4(self):
- c = self.get("mail/syncmail.4")
- self.assertEqual(c.who, "warner")
- self.assertEqual(c.files, ["test/mail/syncmail.1",
- "test/mail/syncmail.2",
- "test/mail/syncmail.3"
- ])
- self.assertEqual(c.comments, "test cases for syncmail parser\n")
- self.assertEqual(c.isdir, 0)
- self.assertEqual(c.branch, None)
-
- # tests a tag
- def testMsgS5(self):
- c = self.getNoPrefix("mail/syncmail.5")
- self.failUnless(c)
- self.assertEqual(c.who, "thomas")
- self.assertEqual(c.files, ['test1/MANIFEST',
- 'test1/Makefile.am',
- 'test1/autogen.sh',
- 'test1/configure.in'
- ])
- self.assertEqual(c.branch, "BRANCH-DEVEL")
- self.assertEqual(c.isdir, 0)
diff --git a/buildbot/buildbot-source/build/lib/buildbot/test/test_properties.py b/buildbot/buildbot-source/build/lib/buildbot/test/test_properties.py
deleted file mode 100644
index 1c8560b03..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/test/test_properties.py
+++ /dev/null
@@ -1,152 +0,0 @@
-# -*- test-case-name: buildbot.test.test_properties -*-
-
-import os
-
-from twisted.trial import unittest
-
-from buildbot.twcompat import maybeWait
-from buildbot.sourcestamp import SourceStamp
-from buildbot.process import base
-from buildbot.process.step import ShellCommand, WithProperties
-from buildbot.status import builder
-from buildbot.slave.commands import rmdirRecursive
-from buildbot.test.runutils import RunMixin
-
-class MyBuildStep(ShellCommand):
- def _interpolateProperties(self, command):
- command = ["tar", "czf",
- "build-%s.tar.gz" % self.getProperty("revision"),
- "source"]
- return ShellCommand._interpolateProperties(self, command)
-
-
-class FakeBuild:
- pass
-class FakeBuilder:
- statusbag = None
- name = "fakebuilder"
-class FakeSlave:
- slavename = "bot12"
-class FakeSlaveBuilder:
- slave = FakeSlave()
- def getSlaveCommandVersion(self, command, oldversion=None):
- return "1.10"
-
-class Interpolate(unittest.TestCase):
- def setUp(self):
- self.builder = FakeBuilder()
- self.builder_status = builder.BuilderStatus("fakebuilder")
- self.builder_status.basedir = "test_properties"
- self.builder_status.nextBuildNumber = 5
- rmdirRecursive(self.builder_status.basedir)
- os.mkdir(self.builder_status.basedir)
- self.build_status = self.builder_status.newBuild()
- req = base.BuildRequest("reason", SourceStamp(branch="branch2",
- revision=1234))
- self.build = base.Build([req])
- self.build.setBuilder(self.builder)
- self.build.setupStatus(self.build_status)
- self.build.setupSlaveBuilder(FakeSlaveBuilder())
-
- def testWithProperties(self):
- self.build.setProperty("revision", 47)
- self.failUnlessEqual(self.build_status.getProperty("revision"), 47)
- c = ShellCommand(workdir=dir, build=self.build,
- command=["tar", "czf",
- WithProperties("build-%s.tar.gz",
- "revision"),
- "source"])
- cmd = c._interpolateProperties(c.command)
- self.failUnlessEqual(cmd,
- ["tar", "czf", "build-47.tar.gz", "source"])
-
- def testWithPropertiesDict(self):
- self.build.setProperty("other", "foo")
- self.build.setProperty("missing", None)
- c = ShellCommand(workdir=dir, build=self.build,
- command=["tar", "czf",
- WithProperties("build-%(other)s.tar.gz"),
- "source"])
- cmd = c._interpolateProperties(c.command)
- self.failUnlessEqual(cmd,
- ["tar", "czf", "build-foo.tar.gz", "source"])
-
- def testWithPropertiesEmpty(self):
- self.build.setProperty("empty", None)
- c = ShellCommand(workdir=dir, build=self.build,
- command=["tar", "czf",
- WithProperties("build-%(empty)s.tar.gz"),
- "source"])
- cmd = c._interpolateProperties(c.command)
- self.failUnlessEqual(cmd,
- ["tar", "czf", "build-.tar.gz", "source"])
-
- def testCustomBuildStep(self):
- c = MyBuildStep(workdir=dir, build=self.build)
- cmd = c._interpolateProperties(c.command)
- self.failUnlessEqual(cmd,
- ["tar", "czf", "build-1234.tar.gz", "source"])
-
- def testSourceStamp(self):
- c = ShellCommand(workdir=dir, build=self.build,
- command=["touch",
- WithProperties("%s-dir", "branch"),
- WithProperties("%s-rev", "revision"),
- ])
- cmd = c._interpolateProperties(c.command)
- self.failUnlessEqual(cmd,
- ["touch", "branch2-dir", "1234-rev"])
-
- def testSlaveName(self):
- c = ShellCommand(workdir=dir, build=self.build,
- command=["touch",
- WithProperties("%s-slave", "slavename"),
- ])
- cmd = c._interpolateProperties(c.command)
- self.failUnlessEqual(cmd,
- ["touch", "bot12-slave"])
-
- def testBuildNumber(self):
- c = ShellCommand(workdir=dir, build=self.build,
- command=["touch",
- WithProperties("build-%d", "buildnumber"),
- WithProperties("builder-%s", "buildername"),
- ])
- cmd = c._interpolateProperties(c.command)
- self.failUnlessEqual(cmd,
- ["touch", "build-5", "builder-fakebuilder"])
-
-
-run_config = """
-from buildbot.process import step, factory
-from buildbot.process.step import ShellCommand, WithProperties
-s = factory.s
-
-BuildmasterConfig = c = {}
-c['bots'] = [('bot1', 'sekrit')]
-c['sources'] = []
-c['schedulers'] = []
-c['slavePortnum'] = 0
-
-f1 = factory.BuildFactory([s(step.ShellCommand,
- command=['touch',
- WithProperties('%s-slave', 'slavename'),
- ])])
-
-b1 = {'name': 'full1', 'slavename': 'bot1', 'builddir': 'bd1', 'factory': f1}
-c['builders'] = [b1]
-
-"""
-
-class Run(RunMixin, unittest.TestCase):
- def testInterpolate(self):
- # run an actual build with a step that interpolates a build property
- d = self.master.loadConfig(run_config)
- d.addCallback(lambda res: self.master.startService())
- d.addCallback(lambda res: self.connectOneSlave("bot1"))
- d.addCallback(lambda res: self.requestBuild("full1"))
- d.addCallback(self.failUnlessBuildSucceeded)
- return maybeWait(d)
-
-
-# we test got_revision in test_vc
diff --git a/buildbot/buildbot-source/build/lib/buildbot/test/test_run.py b/buildbot/buildbot-source/build/lib/buildbot/test/test_run.py
deleted file mode 100644
index dc1bcf99a..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/test/test_run.py
+++ /dev/null
@@ -1,524 +0,0 @@
-# -*- test-case-name: buildbot.test.test_run -*-
-
-from twisted.trial import unittest
-from twisted.internet import reactor, defer
-from twisted.python import log
-import sys, os, os.path, shutil, time, errno
-#log.startLogging(sys.stderr)
-
-from buildbot import master, interfaces
-from buildbot.sourcestamp import SourceStamp
-from buildbot.slave import bot
-from buildbot.changes import changes
-from buildbot.status import builder
-from buildbot.process.base import BuildRequest
-from buildbot.twcompat import maybeWait
-
-from buildbot.test.runutils import RunMixin
-
-config_base = """
-from buildbot.process import factory, step
-s = factory.s
-
-f1 = factory.QuickBuildFactory('fakerep', 'cvsmodule', configure=None)
-
-f2 = factory.BuildFactory([
- s(step.Dummy, timeout=1),
- s(step.RemoteDummy, timeout=2),
- ])
-
-BuildmasterConfig = c = {}
-c['bots'] = [['bot1', 'sekrit']]
-c['sources'] = []
-c['schedulers'] = []
-c['builders'] = []
-c['builders'].append({'name':'quick', 'slavename':'bot1',
- 'builddir': 'quickdir', 'factory': f1})
-c['slavePortnum'] = 0
-"""
-
-config_run = config_base + """
-from buildbot.scheduler import Scheduler
-c['schedulers'] = [Scheduler('quick', None, 120, ['quick'])]
-"""
-
-config_2 = config_base + """
-c['builders'] = [{'name': 'dummy', 'slavename': 'bot1',
- 'builddir': 'dummy1', 'factory': f2},
- {'name': 'testdummy', 'slavename': 'bot1',
- 'builddir': 'dummy2', 'factory': f2, 'category': 'test'}]
-"""
-
-config_3 = config_2 + """
-c['builders'].append({'name': 'adummy', 'slavename': 'bot1',
- 'builddir': 'adummy3', 'factory': f2})
-c['builders'].append({'name': 'bdummy', 'slavename': 'bot1',
- 'builddir': 'adummy4', 'factory': f2,
- 'category': 'test'})
-"""
-
-config_4 = config_base + """
-c['builders'] = [{'name': 'dummy', 'slavename': 'bot1',
- 'builddir': 'dummy', 'factory': f2}]
-"""
-
-config_4_newbasedir = config_4 + """
-c['builders'] = [{'name': 'dummy', 'slavename': 'bot1',
- 'builddir': 'dummy2', 'factory': f2}]
-"""
-
-config_4_newbuilder = config_4_newbasedir + """
-c['builders'].append({'name': 'dummy2', 'slavename': 'bot1',
- 'builddir': 'dummy23', 'factory': f2})
-"""
-
-class Run(unittest.TestCase):
- def rmtree(self, d):
- try:
- shutil.rmtree(d, ignore_errors=1)
- except OSError, e:
- # stupid 2.2 appears to ignore ignore_errors
- if e.errno != errno.ENOENT:
- raise
-
- def testMaster(self):
- self.rmtree("basedir")
- os.mkdir("basedir")
- m = master.BuildMaster("basedir")
- m.loadConfig(config_run)
- m.readConfig = True
- m.startService()
- cm = m.change_svc
- c = changes.Change("bob", ["Makefile", "foo/bar.c"], "changed stuff")
- cm.addChange(c)
- # verify that the Scheduler is now waiting
- s = m.allSchedulers()[0]
- self.failUnless(s.timer)
- # halting the service will also stop the timer
- d = defer.maybeDeferred(m.stopService)
- return maybeWait(d)
-
-class Ping(RunMixin, unittest.TestCase):
- def testPing(self):
- self.master.loadConfig(config_2)
- self.master.readConfig = True
- self.master.startService()
-
- d = self.connectSlave()
- d.addCallback(self._testPing_1)
- return maybeWait(d)
-
- def _testPing_1(self, res):
- d = interfaces.IControl(self.master).getBuilder("dummy").ping(1)
- d.addCallback(self._testPing_2)
- return d
-
- def _testPing_2(self, res):
- pass
-
-class BuilderNames(unittest.TestCase):
-
- def testGetBuilderNames(self):
- os.mkdir("bnames")
- m = master.BuildMaster("bnames")
- s = m.getStatus()
-
- m.loadConfig(config_3)
- m.readConfig = True
-
- self.failUnlessEqual(s.getBuilderNames(),
- ["dummy", "testdummy", "adummy", "bdummy"])
- self.failUnlessEqual(s.getBuilderNames(categories=['test']),
- ["testdummy", "bdummy"])
-
-class Disconnect(RunMixin, unittest.TestCase):
-
- def setUp(self):
- RunMixin.setUp(self)
-
- # verify that disconnecting the slave during a build properly
- # terminates the build
- m = self.master
- s = self.status
- c = self.control
-
- m.loadConfig(config_2)
- m.readConfig = True
- m.startService()
-
- self.failUnlessEqual(s.getBuilderNames(), ["dummy", "testdummy"])
- self.s1 = s1 = s.getBuilder("dummy")
- self.failUnlessEqual(s1.getName(), "dummy")
- self.failUnlessEqual(s1.getState(), ("offline", []))
- self.failUnlessEqual(s1.getCurrentBuilds(), [])
- self.failUnlessEqual(s1.getLastFinishedBuild(), None)
- self.failUnlessEqual(s1.getBuild(-1), None)
-
- d = self.connectSlave()
- d.addCallback(self._disconnectSetup_1)
- return maybeWait(d)
-
- def _disconnectSetup_1(self, res):
- self.failUnlessEqual(self.s1.getState(), ("idle", []))
-
-
- def verifyDisconnect(self, bs):
- self.failUnless(bs.isFinished())
-
- step1 = bs.getSteps()[0]
- self.failUnlessEqual(step1.getText(), ["delay", "interrupted"])
- self.failUnlessEqual(step1.getResults()[0], builder.FAILURE)
-
- self.failUnlessEqual(bs.getResults(), builder.FAILURE)
-
- def verifyDisconnect2(self, bs):
- self.failUnless(bs.isFinished())
-
- step1 = bs.getSteps()[1]
- self.failUnlessEqual(step1.getText(), ["remote", "delay", "2 secs",
- "failed", "slave", "lost"])
- self.failUnlessEqual(step1.getResults()[0], builder.FAILURE)
-
- self.failUnlessEqual(bs.getResults(), builder.FAILURE)
-
-
- def testIdle1(self):
- # disconnect the slave before the build starts
- d = self.shutdownAllSlaves() # dies before it gets started
- d.addCallback(self._testIdle1_1)
- return d
- def _testIdle1_1(self, res):
- # trying to force a build now will cause an error. Regular builds
- # just wait for the slave to re-appear, but forced builds that
- # cannot be run right away trigger NoSlaveErrors
- fb = self.control.getBuilder("dummy").forceBuild
- self.failUnlessRaises(interfaces.NoSlaveError,
- fb, None, "forced build")
-
- def testIdle2(self):
- # now suppose the slave goes missing
- self.slaves['bot1'].bf.continueTrying = 0
- self.disappearSlave()
-
- # forcing a build will work: the build detect that the slave is no
- # longer available and will be re-queued. Wait 5 seconds, then check
- # to make sure the build is still in the 'waiting for a slave' queue.
- self.control.getBuilder("dummy").original.START_BUILD_TIMEOUT = 1
- req = BuildRequest("forced build", SourceStamp())
- self.failUnlessEqual(req.startCount, 0)
- self.control.getBuilder("dummy").requestBuild(req)
- # this should ping the slave, which doesn't respond, and then give up
- # after a second. The BuildRequest will be re-queued, and its
- # .startCount will be incremented.
- d = defer.Deferred()
- d.addCallback(self._testIdle2_1, req)
- reactor.callLater(3, d.callback, None)
- return maybeWait(d, 5)
- testIdle2.timeout = 5
-
- def _testIdle2_1(self, res, req):
- self.failUnlessEqual(req.startCount, 1)
- cancelled = req.cancel()
- self.failUnless(cancelled)
-
-
- def testBuild1(self):
- # this next sequence is timing-dependent. The dummy build takes at
- # least 3 seconds to complete, and this batch of commands must
- # complete within that time.
- #
- d = self.control.getBuilder("dummy").forceBuild(None, "forced build")
- d.addCallback(self._testBuild1_1)
- return maybeWait(d)
-
- def _testBuild1_1(self, bc):
- bs = bc.getStatus()
- # now kill the slave before it gets to start the first step
- d = self.shutdownAllSlaves() # dies before it gets started
- d.addCallback(self._testBuild1_2, bs)
- return d # TODO: this used to have a 5-second timeout
-
- def _testBuild1_2(self, res, bs):
- # now examine the just-stopped build and make sure it is really
- # stopped. This is checking for bugs in which the slave-detach gets
- # missed or causes an exception which prevents the build from being
- # marked as "finished due to an error".
- d = bs.waitUntilFinished()
- d2 = self.master.botmaster.waitUntilBuilderDetached("dummy")
- dl = defer.DeferredList([d, d2])
- dl.addCallback(self._testBuild1_3, bs)
- return dl # TODO: this had a 5-second timeout too
-
- def _testBuild1_3(self, res, bs):
- self.failUnlessEqual(self.s1.getState()[0], "offline")
- self.verifyDisconnect(bs)
-
-
- def testBuild2(self):
- # this next sequence is timing-dependent
- d = self.control.getBuilder("dummy").forceBuild(None, "forced build")
- d.addCallback(self._testBuild1_1)
- return maybeWait(d, 30)
- testBuild2.timeout = 30
-
- def _testBuild1_1(self, bc):
- bs = bc.getStatus()
- # shutdown the slave while it's running the first step
- reactor.callLater(0.5, self.shutdownAllSlaves)
-
- d = bs.waitUntilFinished()
- d.addCallback(self._testBuild2_2, bs)
- return d
-
- def _testBuild2_2(self, res, bs):
- # we hit here when the build has finished. The builder is still being
- # torn down, however, so spin for another second to allow the
- # callLater(0) in Builder.detached to fire.
- d = defer.Deferred()
- reactor.callLater(1, d.callback, None)
- d.addCallback(self._testBuild2_3, bs)
- return d
-
- def _testBuild2_3(self, res, bs):
- self.failUnlessEqual(self.s1.getState()[0], "offline")
- self.verifyDisconnect(bs)
-
-
- def testBuild3(self):
- # this next sequence is timing-dependent
- d = self.control.getBuilder("dummy").forceBuild(None, "forced build")
- d.addCallback(self._testBuild3_1)
- return maybeWait(d, 30)
- testBuild3.timeout = 30
-
- def _testBuild3_1(self, bc):
- bs = bc.getStatus()
- # kill the slave while it's running the first step
- reactor.callLater(0.5, self.killSlave)
- d = bs.waitUntilFinished()
- d.addCallback(self._testBuild3_2, bs)
- return d
-
- def _testBuild3_2(self, res, bs):
- # the builder is still being torn down, so give it another second
- d = defer.Deferred()
- reactor.callLater(1, d.callback, None)
- d.addCallback(self._testBuild3_3, bs)
- return d
-
- def _testBuild3_3(self, res, bs):
- self.failUnlessEqual(self.s1.getState()[0], "offline")
- self.verifyDisconnect(bs)
-
-
- def testBuild4(self):
- # this next sequence is timing-dependent
- d = self.control.getBuilder("dummy").forceBuild(None, "forced build")
- d.addCallback(self._testBuild4_1)
- return maybeWait(d, 30)
- testBuild4.timeout = 30
-
- def _testBuild4_1(self, bc):
- bs = bc.getStatus()
- # kill the slave while it's running the second (remote) step
- reactor.callLater(1.5, self.killSlave)
- d = bs.waitUntilFinished()
- d.addCallback(self._testBuild4_2, bs)
- return d
-
- def _testBuild4_2(self, res, bs):
- # at this point, the slave is in the process of being removed, so it
- # could either be 'idle' or 'offline'. I think there is a
- # reactor.callLater(0) standing between here and the offline state.
- #reactor.iterate() # TODO: remove the need for this
-
- self.failUnlessEqual(self.s1.getState()[0], "offline")
- self.verifyDisconnect2(bs)
-
-
- def testInterrupt(self):
- # this next sequence is timing-dependent
- d = self.control.getBuilder("dummy").forceBuild(None, "forced build")
- d.addCallback(self._testInterrupt_1)
- return maybeWait(d, 30)
- testInterrupt.timeout = 30
-
- def _testInterrupt_1(self, bc):
- bs = bc.getStatus()
- # halt the build while it's running the first step
- reactor.callLater(0.5, bc.stopBuild, "bang go splat")
- d = bs.waitUntilFinished()
- d.addCallback(self._testInterrupt_2, bs)
- return d
-
- def _testInterrupt_2(self, res, bs):
- self.verifyDisconnect(bs)
-
-
- def testDisappear(self):
- bc = self.control.getBuilder("dummy")
-
- # ping should succeed
- d = bc.ping(1)
- d.addCallback(self._testDisappear_1, bc)
- return maybeWait(d)
-
- def _testDisappear_1(self, res, bc):
- self.failUnlessEqual(res, True)
-
- # now, before any build is run, make the slave disappear
- self.slaves['bot1'].bf.continueTrying = 0
- self.disappearSlave()
-
- # at this point, a ping to the slave should timeout
- d = bc.ping(1)
- d.addCallback(self. _testDisappear_2)
- return d
- def _testDisappear_2(self, res):
- self.failUnlessEqual(res, False)
-
- def testDuplicate(self):
- bc = self.control.getBuilder("dummy")
- bs = self.status.getBuilder("dummy")
- ss = bs.getSlaves()[0]
-
- self.failUnless(ss.isConnected())
- self.failUnlessEqual(ss.getAdmin(), "one")
-
- # now, before any build is run, make the first slave disappear
- self.slaves['bot1'].bf.continueTrying = 0
- self.disappearSlave()
-
- d = self.master.botmaster.waitUntilBuilderDetached("dummy")
- # now let the new slave take over
- self.connectSlave2()
- d.addCallback(self._testDuplicate_1, ss)
- return maybeWait(d, 2)
- testDuplicate.timeout = 5
-
- def _testDuplicate_1(self, res, ss):
- d = self.master.botmaster.waitUntilBuilderAttached("dummy")
- d.addCallback(self._testDuplicate_2, ss)
- return d
-
- def _testDuplicate_2(self, res, ss):
- self.failUnless(ss.isConnected())
- self.failUnlessEqual(ss.getAdmin(), "two")
-
-
-class Disconnect2(RunMixin, unittest.TestCase):
-
- def setUp(self):
- RunMixin.setUp(self)
- # verify that disconnecting the slave during a build properly
- # terminates the build
- m = self.master
- s = self.status
- c = self.control
-
- m.loadConfig(config_2)
- m.readConfig = True
- m.startService()
-
- self.failUnlessEqual(s.getBuilderNames(), ["dummy", "testdummy"])
- self.s1 = s1 = s.getBuilder("dummy")
- self.failUnlessEqual(s1.getName(), "dummy")
- self.failUnlessEqual(s1.getState(), ("offline", []))
- self.failUnlessEqual(s1.getCurrentBuilds(), [])
- self.failUnlessEqual(s1.getLastFinishedBuild(), None)
- self.failUnlessEqual(s1.getBuild(-1), None)
-
- d = self.connectSlaveFastTimeout()
- d.addCallback(self._setup_disconnect2_1)
- return maybeWait(d)
-
- def _setup_disconnect2_1(self, res):
- self.failUnlessEqual(self.s1.getState(), ("idle", []))
-
-
- def testSlaveTimeout(self):
- # now suppose the slave goes missing. We want to find out when it
- # creates a new Broker, so we reach inside and mark it with the
- # well-known sigil of impending messy death.
- bd = self.slaves['bot1'].getServiceNamed("bot").builders["dummy"]
- broker = bd.remote.broker
- broker.redshirt = 1
-
- # make sure the keepalives will keep the connection up
- d = defer.Deferred()
- reactor.callLater(5, d.callback, None)
- d.addCallback(self._testSlaveTimeout_1)
- return maybeWait(d, 20)
- testSlaveTimeout.timeout = 20
-
- def _testSlaveTimeout_1(self, res):
- bd = self.slaves['bot1'].getServiceNamed("bot").builders["dummy"]
- if not bd.remote or not hasattr(bd.remote.broker, "redshirt"):
- self.fail("slave disconnected when it shouldn't have")
-
- d = self.master.botmaster.waitUntilBuilderDetached("dummy")
- # whoops! how careless of me.
- self.disappearSlave()
- # the slave will realize the connection is lost within 2 seconds, and
- # reconnect.
- d.addCallback(self._testSlaveTimeout_2)
- return d
-
- def _testSlaveTimeout_2(self, res):
- # the ReconnectingPBClientFactory will attempt a reconnect in two
- # seconds.
- d = self.master.botmaster.waitUntilBuilderAttached("dummy")
- d.addCallback(self._testSlaveTimeout_3)
- return d
-
- def _testSlaveTimeout_3(self, res):
- # make sure it is a new connection (i.e. a new Broker)
- bd = self.slaves['bot1'].getServiceNamed("bot").builders["dummy"]
- self.failUnless(bd.remote, "hey, slave isn't really connected")
- self.failIf(hasattr(bd.remote.broker, "redshirt"),
- "hey, slave's Broker is still marked for death")
-
-
-class Basedir(RunMixin, unittest.TestCase):
- def testChangeBuilddir(self):
- m = self.master
- m.loadConfig(config_4)
- m.readConfig = True
- m.startService()
-
- d = self.connectSlave()
- d.addCallback(self._testChangeBuilddir_1)
- return maybeWait(d)
-
- def _testChangeBuilddir_1(self, res):
- self.bot = bot = self.slaves['bot1'].bot
- self.builder = builder = bot.builders.get("dummy")
- self.failUnless(builder)
- self.failUnlessEqual(builder.builddir, "dummy")
- self.failUnlessEqual(builder.basedir,
- os.path.join("slavebase-bot1", "dummy"))
-
- d = self.master.loadConfig(config_4_newbasedir)
- d.addCallback(self._testChangeBuilddir_2)
- return d
-
- def _testChangeBuilddir_2(self, res):
- bot = self.bot
- # this causes the builder to be replaced
- self.failIfIdentical(self.builder, bot.builders.get("dummy"))
- builder = bot.builders.get("dummy")
- self.failUnless(builder)
- # the basedir should be updated
- self.failUnlessEqual(builder.builddir, "dummy2")
- self.failUnlessEqual(builder.basedir,
- os.path.join("slavebase-bot1", "dummy2"))
-
- # add a new builder, which causes the basedir list to be reloaded
- d = self.master.loadConfig(config_4_newbuilder)
- return d
-
-# TODO: test everything, from Change submission to Scheduler to Build to
-# Status. Use all the status types. Specifically I want to catch recurrences
-# of the bug where I forgot to make Waterfall inherit from StatusReceiver
-# such that buildSetSubmitted failed.
-
diff --git a/buildbot/buildbot-source/build/lib/buildbot/test/test_runner.py b/buildbot/buildbot-source/build/lib/buildbot/test/test_runner.py
deleted file mode 100644
index f82e33fb5..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/test/test_runner.py
+++ /dev/null
@@ -1,299 +0,0 @@
-
-# this file tests the 'buildbot' command, with its various sub-commands
-
-from twisted.trial import unittest
-from twisted.python import runtime, usage
-import os, os.path, shutil, shlex
-
-from buildbot.scripts import runner, tryclient
-
-class Options(unittest.TestCase):
- optionsFile = "SDFsfsFSdfsfsFSD"
-
- def make(self, d, key):
- # we use a wacky filename here in case the test code discovers the
- # user's real ~/.buildbot/ directory
- os.makedirs(os.sep.join(d + [".buildbot"]))
- f = open(os.sep.join(d + [".buildbot", self.optionsFile]), "w")
- f.write("key = '%s'\n" % key)
- f.close()
-
- def check(self, d, key):
- basedir = os.sep.join(d)
- options = runner.loadOptions(self.optionsFile, here=basedir,
- home=self.home)
- if key is None:
- self.failIf(options.has_key('key'))
- else:
- self.failUnlessEqual(options['key'], key)
-
- def testFindOptions(self):
- self.make(["home", "dir1", "dir2", "dir3"], "one")
- self.make(["home", "dir1", "dir2"], "two")
- self.make(["home"], "home")
- self.home = os.path.abspath("home")
-
- self.check(["home", "dir1", "dir2", "dir3"], "one")
- self.check(["home", "dir1", "dir2"], "two")
- self.check(["home", "dir1"], "home")
-
- self.home = os.path.abspath("nothome")
- os.makedirs(os.sep.join(["nothome", "dir1"]))
- self.check(["nothome", "dir1"], None)
-
- def doForce(self, args, expected):
- o = runner.ForceOptions()
- o.parseOptions(args)
- self.failUnlessEqual(o.keys(), expected.keys())
- for k in o.keys():
- self.failUnlessEqual(o[k], expected[k],
- "[%s] got %s instead of %s" % (k, o[k],
- expected[k]))
-
- def testForceOptions(self):
- if not hasattr(shlex, "split"):
- raise unittest.SkipTest("need python>=2.3 for shlex.split")
-
- exp = {"builder": "b1", "reason": "reason",
- "branch": None, "revision": None}
- self.doForce(shlex.split("b1 reason"), exp)
- self.doForce(shlex.split("b1 'reason'"), exp)
- self.failUnlessRaises(usage.UsageError, self.doForce,
- shlex.split("--builder b1 'reason'"), exp)
- self.doForce(shlex.split("--builder b1 --reason reason"), exp)
- self.doForce(shlex.split("--builder b1 --reason 'reason'"), exp)
- self.doForce(shlex.split("--builder b1 --reason \"reason\""), exp)
-
- exp['reason'] = "longer reason"
- self.doForce(shlex.split("b1 'longer reason'"), exp)
- self.doForce(shlex.split("b1 longer reason"), exp)
- self.doForce(shlex.split("--reason 'longer reason' b1"), exp)
-
-
-class Create(unittest.TestCase):
- def failUnlessIn(self, substring, string, msg=None):
- # trial provides a version of this that requires python-2.3 to test
- # strings.
- self.failUnless(string.find(substring) != -1, msg)
- def failUnlessExists(self, filename):
- self.failUnless(os.path.exists(filename), "%s should exist" % filename)
- def failIfExists(self, filename):
- self.failIf(os.path.exists(filename), "%s should not exist" % filename)
-
- def testMaster(self):
- basedir = "test_runner.master"
- options = runner.MasterOptions()
- options.parseOptions(["-q", basedir])
- cwd = os.getcwd()
- runner.createMaster(options)
- os.chdir(cwd)
-
- tac = os.path.join(basedir, "buildbot.tac")
- self.failUnless(os.path.exists(tac))
- tacfile = open(tac,"rt").read()
- self.failUnlessIn("basedir", tacfile)
- self.failUnlessIn("configfile = r'master.cfg'", tacfile)
- self.failUnlessIn("BuildMaster(basedir, configfile)", tacfile)
-
- cfg = os.path.join(basedir, "master.cfg")
- self.failIfExists(cfg)
- samplecfg = os.path.join(basedir, "master.cfg.sample")
- self.failUnlessExists(samplecfg)
- cfgfile = open(samplecfg,"rt").read()
- self.failUnlessIn("This is a sample buildmaster config file", cfgfile)
-
- makefile = os.path.join(basedir, "Makefile.sample")
- self.failUnlessExists(makefile)
-
- # now verify that running it a second time (with the same options)
- # does the right thing: nothing changes
- runner.createMaster(options)
- os.chdir(cwd)
-
- self.failIfExists(os.path.join(basedir, "buildbot.tac.new"))
- self.failUnlessExists(os.path.join(basedir, "master.cfg.sample"))
-
- oldtac = open(os.path.join(basedir, "buildbot.tac"), "rt").read()
-
- # mutate Makefile.sample, since it should be rewritten
- f = open(os.path.join(basedir, "Makefile.sample"), "rt")
- oldmake = f.read()
- f = open(os.path.join(basedir, "Makefile.sample"), "wt")
- f.write(oldmake)
- f.write("# additional line added\n")
- f.close()
-
- # also mutate master.cfg.sample
- f = open(os.path.join(basedir, "master.cfg.sample"), "rt")
- oldsamplecfg = f.read()
- f = open(os.path.join(basedir, "master.cfg.sample"), "wt")
- f.write(oldsamplecfg)
- f.write("# additional line added\n")
- f.close()
-
- # now run it again (with different options)
- options = runner.MasterOptions()
- options.parseOptions(["-q", "--config", "other.cfg", basedir])
- runner.createMaster(options)
- os.chdir(cwd)
-
- tac = open(os.path.join(basedir, "buildbot.tac"), "rt").read()
- self.failUnlessEqual(tac, oldtac, "shouldn't change existing .tac")
- self.failUnlessExists(os.path.join(basedir, "buildbot.tac.new"))
-
- make = open(os.path.join(basedir, "Makefile.sample"), "rt").read()
- self.failUnlessEqual(make, oldmake, "*should* rewrite Makefile.sample")
-
- samplecfg = open(os.path.join(basedir, "master.cfg.sample"),
- "rt").read()
- self.failUnlessEqual(samplecfg, oldsamplecfg,
- "*should* rewrite master.cfg.sample")
-
-
- def testSlave(self):
- basedir = "test_runner.slave"
- options = runner.SlaveOptions()
- options.parseOptions(["-q", basedir, "buildmaster:1234",
- "botname", "passwd"])
- cwd = os.getcwd()
- runner.createSlave(options)
- os.chdir(cwd)
-
- tac = os.path.join(basedir, "buildbot.tac")
- self.failUnless(os.path.exists(tac))
- tacfile = open(tac,"rt").read()
- self.failUnlessIn("basedir", tacfile)
- self.failUnlessIn("host = 'buildmaster'", tacfile)
- self.failUnlessIn("port = 1234", tacfile)
- self.failUnlessIn("slavename = 'botname'", tacfile)
- self.failUnlessIn("passwd = 'passwd'", tacfile)
- self.failUnlessIn("keepalive = 600", tacfile)
- self.failUnlessIn("BuildSlave(host, port, slavename", tacfile)
-
- makefile = os.path.join(basedir, "Makefile.sample")
- self.failUnlessExists(makefile)
-
- self.failUnlessExists(os.path.join(basedir, "info", "admin"))
- self.failUnlessExists(os.path.join(basedir, "info", "host"))
- # edit one to make sure the later install doesn't change it
- f = open(os.path.join(basedir, "info", "admin"), "wt")
- f.write("updated@buildbot.example.org\n")
- f.close()
-
- # now verify that running it a second time (with the same options)
- # does the right thing: nothing changes
- runner.createSlave(options)
- os.chdir(cwd)
-
- self.failIfExists(os.path.join(basedir, "buildbot.tac.new"))
- admin = open(os.path.join(basedir, "info", "admin"), "rt").read()
- self.failUnlessEqual(admin, "updated@buildbot.example.org\n")
-
-
- # mutate Makefile.sample, since it should be rewritten
- oldmake = open(os.path.join(basedir, "Makefile.sample"), "rt").read()
- f = open(os.path.join(basedir, "Makefile.sample"), "wt")
- f.write(oldmake)
- f.write("# additional line added\n")
- f.close()
- oldtac = open(os.path.join(basedir, "buildbot.tac"), "rt").read()
-
- # now run it again (with different options)
- options = runner.SlaveOptions()
- options.parseOptions(["-q", "--keepalive", "30",
- basedir, "buildmaster:9999",
- "newbotname", "passwd"])
- runner.createSlave(options)
- os.chdir(cwd)
-
- tac = open(os.path.join(basedir, "buildbot.tac"), "rt").read()
- self.failUnlessEqual(tac, oldtac, "shouldn't change existing .tac")
- self.failUnlessExists(os.path.join(basedir, "buildbot.tac.new"))
- tacfile = open(os.path.join(basedir, "buildbot.tac.new"),"rt").read()
- self.failUnlessIn("basedir", tacfile)
- self.failUnlessIn("host = 'buildmaster'", tacfile)
- self.failUnlessIn("port = 9999", tacfile)
- self.failUnlessIn("slavename = 'newbotname'", tacfile)
- self.failUnlessIn("passwd = 'passwd'", tacfile)
- self.failUnlessIn("keepalive = 30", tacfile)
- self.failUnlessIn("BuildSlave(host, port, slavename", tacfile)
-
- make = open(os.path.join(basedir, "Makefile.sample"), "rt").read()
- self.failUnlessEqual(make, oldmake, "*should* rewrite Makefile.sample")
-
-class Try(unittest.TestCase):
- # test some aspects of the 'buildbot try' command
- def makeOptions(self, contents):
- if os.path.exists(".buildbot"):
- shutil.rmtree(".buildbot")
- os.mkdir(".buildbot")
- open(os.path.join(".buildbot", "options"), "w").write(contents)
-
- def testGetopt1(self):
- opts = "try_connect = 'ssh'\n" + "try_builders = ['a']\n"
- self.makeOptions(opts)
- config = runner.TryOptions()
- config.parseOptions([])
- t = tryclient.Try(config)
- self.failUnlessEqual(t.connect, "ssh")
- self.failUnlessEqual(t.builderNames, ['a'])
-
- def testGetopt2(self):
- opts = ""
- self.makeOptions(opts)
- config = runner.TryOptions()
- config.parseOptions(['--connect=ssh', '--builder', 'a'])
- t = tryclient.Try(config)
- self.failUnlessEqual(t.connect, "ssh")
- self.failUnlessEqual(t.builderNames, ['a'])
-
- def testGetopt3(self):
- opts = ""
- self.makeOptions(opts)
- config = runner.TryOptions()
- config.parseOptions(['--connect=ssh',
- '--builder', 'a', '--builder=b'])
- t = tryclient.Try(config)
- self.failUnlessEqual(t.connect, "ssh")
- self.failUnlessEqual(t.builderNames, ['a', 'b'])
-
- def testGetopt4(self):
- opts = "try_connect = 'ssh'\n" + "try_builders = ['a']\n"
- self.makeOptions(opts)
- config = runner.TryOptions()
- config.parseOptions(['--builder=b'])
- t = tryclient.Try(config)
- self.failUnlessEqual(t.connect, "ssh")
- self.failUnlessEqual(t.builderNames, ['b'])
-
- def testGetTopdir(self):
- os.mkdir("gettopdir")
- os.mkdir(os.path.join("gettopdir", "foo"))
- os.mkdir(os.path.join("gettopdir", "foo", "bar"))
- open(os.path.join("gettopdir", "1"),"w").write("1")
- open(os.path.join("gettopdir", "foo", "2"),"w").write("2")
- open(os.path.join("gettopdir", "foo", "bar", "3"),"w").write("3")
-
- target = os.path.abspath("gettopdir")
- t = tryclient.getTopdir("1", "gettopdir")
- self.failUnlessEqual(os.path.abspath(t), target)
- t = tryclient.getTopdir("1", os.path.join("gettopdir", "foo"))
- self.failUnlessEqual(os.path.abspath(t), target)
- t = tryclient.getTopdir("1", os.path.join("gettopdir", "foo", "bar"))
- self.failUnlessEqual(os.path.abspath(t), target)
-
- target = os.path.abspath(os.path.join("gettopdir", "foo"))
- t = tryclient.getTopdir("2", os.path.join("gettopdir", "foo"))
- self.failUnlessEqual(os.path.abspath(t), target)
- t = tryclient.getTopdir("2", os.path.join("gettopdir", "foo", "bar"))
- self.failUnlessEqual(os.path.abspath(t), target)
-
- target = os.path.abspath(os.path.join("gettopdir", "foo", "bar"))
- t = tryclient.getTopdir("3", os.path.join("gettopdir", "foo", "bar"))
- self.failUnlessEqual(os.path.abspath(t), target)
-
- nonexistent = "nonexistent\n29fis3kq\tBAR"
- # hopefully there won't be a real file with that name between here
- # and the filesystem root.
- self.failUnlessRaises(ValueError, tryclient.getTopdir, nonexistent)
-
diff --git a/buildbot/buildbot-source/build/lib/buildbot/test/test_scheduler.py b/buildbot/buildbot-source/build/lib/buildbot/test/test_scheduler.py
deleted file mode 100644
index d423f6c86..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/test/test_scheduler.py
+++ /dev/null
@@ -1,313 +0,0 @@
-# -*- test-case-name: buildbot.test.test_scheduler -*-
-
-import os, time
-
-from twisted.trial import unittest
-from twisted.internet import defer, reactor
-from twisted.application import service
-from twisted.spread import pb
-
-from buildbot import scheduler, sourcestamp, buildset, status
-from buildbot.twcompat import maybeWait
-from buildbot.changes.changes import Change
-from buildbot.scripts import tryclient
-
-
-class FakeMaster(service.MultiService):
- d = None
- def submitBuildSet(self, bs):
- self.sets.append(bs)
- if self.d:
- reactor.callLater(0, self.d.callback, bs)
- self.d = None
- return pb.Referenceable() # makes the cleanup work correctly
-
-class Scheduling(unittest.TestCase):
- def setUp(self):
- self.master = master = FakeMaster()
- master.sets = []
- master.startService()
-
- def tearDown(self):
- d = self.master.stopService()
- return maybeWait(d)
-
- def addScheduler(self, s):
- s.setServiceParent(self.master)
-
- def testPeriodic1(self):
- self.addScheduler(scheduler.Periodic("quickly", ["a","b"], 2))
- d = defer.Deferred()
- reactor.callLater(5, d.callback, None)
- d.addCallback(self._testPeriodic1_1)
- return maybeWait(d)
- def _testPeriodic1_1(self, res):
- self.failUnless(len(self.master.sets) > 1)
- s1 = self.master.sets[0]
- self.failUnlessEqual(s1.builderNames, ["a","b"])
-
- def testNightly(self):
- # now == 15-Nov-2005, 00:05:36 AM . By using mktime, this is
- # converted into the local timezone, which happens to match what
- # Nightly is going to do anyway.
- MIN=60; HOUR=60*MIN; DAY=24*3600
- now = time.mktime((2005, 11, 15, 0, 5, 36, 1, 319, 0))
-
- s = scheduler.Nightly('nightly', ["a"], hour=3)
- t = s.calculateNextRunTimeFrom(now)
- self.failUnlessEqual(int(t-now), 2*HOUR+54*MIN+24)
-
- s = scheduler.Nightly('nightly', ["a"], minute=[3,8,54])
- t = s.calculateNextRunTimeFrom(now)
- self.failUnlessEqual(int(t-now), 2*MIN+24)
-
- s = scheduler.Nightly('nightly', ["a"],
- dayOfMonth=16, hour=1, minute=6)
- t = s.calculateNextRunTimeFrom(now)
- self.failUnlessEqual(int(t-now), DAY+HOUR+24)
-
- s = scheduler.Nightly('nightly', ["a"],
- dayOfMonth=16, hour=1, minute=3)
- t = s.calculateNextRunTimeFrom(now)
- self.failUnlessEqual(int(t-now), DAY+57*MIN+24)
-
- s = scheduler.Nightly('nightly', ["a"],
- dayOfMonth=15, hour=1, minute=3)
- t = s.calculateNextRunTimeFrom(now)
- self.failUnlessEqual(int(t-now), 57*MIN+24)
-
- s = scheduler.Nightly('nightly', ["a"],
- dayOfMonth=15, hour=0, minute=3)
- t = s.calculateNextRunTimeFrom(now)
- self.failUnlessEqual(int(t-now), 30*DAY-3*MIN+24)
-
-
- def isImportant(self, change):
- if "important" in change.files:
- return True
- return False
-
- def testBranch(self):
- s = scheduler.Scheduler("b1", "branch1", 2, ["a","b"],
- fileIsImportant=self.isImportant)
- self.addScheduler(s)
-
- c0 = Change("carol", ["important"], "other branch", branch="other")
- s.addChange(c0)
- self.failIf(s.timer)
- self.failIf(s.importantChanges)
-
- c1 = Change("alice", ["important", "not important"], "some changes",
- branch="branch1")
- s.addChange(c1)
- c2 = Change("bob", ["not important", "boring"], "some more changes",
- branch="branch1")
- s.addChange(c2)
- c3 = Change("carol", ["important", "dull"], "even more changes",
- branch="branch1")
- s.addChange(c3)
-
- self.failUnlessEqual(s.importantChanges, [c1,c3])
- self.failUnlessEqual(s.unimportantChanges, [c2])
- self.failUnless(s.timer)
-
- d = defer.Deferred()
- reactor.callLater(4, d.callback, None)
- d.addCallback(self._testBranch_1)
- return maybeWait(d)
- def _testBranch_1(self, res):
- self.failUnlessEqual(len(self.master.sets), 1)
- s = self.master.sets[0].source
- self.failUnlessEqual(s.branch, "branch1")
- self.failUnlessEqual(s.revision, None)
- self.failUnlessEqual(len(s.changes), 3)
- self.failUnlessEqual(s.patch, None)
-
-
- def testAnyBranch(self):
- s = scheduler.AnyBranchScheduler("b1", None, 1, ["a","b"],
- fileIsImportant=self.isImportant)
- self.addScheduler(s)
-
- c1 = Change("alice", ["important", "not important"], "some changes",
- branch="branch1")
- s.addChange(c1)
- c2 = Change("bob", ["not important", "boring"], "some more changes",
- branch="branch1")
- s.addChange(c2)
- c3 = Change("carol", ["important", "dull"], "even more changes",
- branch="branch1")
- s.addChange(c3)
-
- c4 = Change("carol", ["important"], "other branch", branch="branch2")
- s.addChange(c4)
-
- c5 = Change("carol", ["important"], "default branch", branch=None)
- s.addChange(c5)
-
- d = defer.Deferred()
- reactor.callLater(2, d.callback, None)
- d.addCallback(self._testAnyBranch_1)
- return maybeWait(d)
- def _testAnyBranch_1(self, res):
- self.failUnlessEqual(len(self.master.sets), 3)
- self.master.sets.sort(lambda a,b: cmp(a.source.branch,
- b.source.branch))
-
- s1 = self.master.sets[0].source
- self.failUnlessEqual(s1.branch, None)
- self.failUnlessEqual(s1.revision, None)
- self.failUnlessEqual(len(s1.changes), 1)
- self.failUnlessEqual(s1.patch, None)
-
- s2 = self.master.sets[1].source
- self.failUnlessEqual(s2.branch, "branch1")
- self.failUnlessEqual(s2.revision, None)
- self.failUnlessEqual(len(s2.changes), 3)
- self.failUnlessEqual(s2.patch, None)
-
- s3 = self.master.sets[2].source
- self.failUnlessEqual(s3.branch, "branch2")
- self.failUnlessEqual(s3.revision, None)
- self.failUnlessEqual(len(s3.changes), 1)
- self.failUnlessEqual(s3.patch, None)
-
- def testAnyBranch2(self):
- # like testAnyBranch but without fileIsImportant
- s = scheduler.AnyBranchScheduler("b1", None, 2, ["a","b"])
- self.addScheduler(s)
- c1 = Change("alice", ["important", "not important"], "some changes",
- branch="branch1")
- s.addChange(c1)
- c2 = Change("bob", ["not important", "boring"], "some more changes",
- branch="branch1")
- s.addChange(c2)
- c3 = Change("carol", ["important", "dull"], "even more changes",
- branch="branch1")
- s.addChange(c3)
-
- c4 = Change("carol", ["important"], "other branch", branch="branch2")
- s.addChange(c4)
-
- d = defer.Deferred()
- reactor.callLater(2, d.callback, None)
- d.addCallback(self._testAnyBranch2_1)
- return maybeWait(d)
- def _testAnyBranch2_1(self, res):
- self.failUnlessEqual(len(self.master.sets), 2)
- self.master.sets.sort(lambda a,b: cmp(a.source.branch,
- b.source.branch))
- s1 = self.master.sets[0].source
- self.failUnlessEqual(s1.branch, "branch1")
- self.failUnlessEqual(s1.revision, None)
- self.failUnlessEqual(len(s1.changes), 3)
- self.failUnlessEqual(s1.patch, None)
-
- s2 = self.master.sets[1].source
- self.failUnlessEqual(s2.branch, "branch2")
- self.failUnlessEqual(s2.revision, None)
- self.failUnlessEqual(len(s2.changes), 1)
- self.failUnlessEqual(s2.patch, None)
-
-
- def createMaildir(self, jobdir):
- os.mkdir(jobdir)
- os.mkdir(os.path.join(jobdir, "new"))
- os.mkdir(os.path.join(jobdir, "cur"))
- os.mkdir(os.path.join(jobdir, "tmp"))
-
- jobcounter = 1
- def pushJob(self, jobdir, job):
- while 1:
- filename = "job_%d" % self.jobcounter
- self.jobcounter += 1
- if os.path.exists(os.path.join(jobdir, "new", filename)):
- continue
- if os.path.exists(os.path.join(jobdir, "tmp", filename)):
- continue
- if os.path.exists(os.path.join(jobdir, "cur", filename)):
- continue
- break
- f = open(os.path.join(jobdir, "tmp", filename), "w")
- f.write(job)
- f.close()
- os.rename(os.path.join(jobdir, "tmp", filename),
- os.path.join(jobdir, "new", filename))
-
- def testTryJobdir(self):
- self.master.basedir = "try_jobdir"
- os.mkdir(self.master.basedir)
- jobdir = "jobdir1"
- jobdir_abs = os.path.join(self.master.basedir, jobdir)
- self.createMaildir(jobdir_abs)
- s = scheduler.Try_Jobdir("try1", ["a", "b"], jobdir)
- self.addScheduler(s)
- self.failIf(self.master.sets)
- job1 = tryclient.createJobfile("buildsetID",
- "branch1", "123", 1, "diff",
- ["a", "b"])
- self.master.d = d = defer.Deferred()
- self.pushJob(jobdir_abs, job1)
- d.addCallback(self._testTryJobdir_1)
- # N.B.: if we don't have DNotify, we poll every 10 seconds, so don't
- # set a .timeout here shorter than that. TODO: make it possible to
- # set the polling interval, so we can make it shorter.
- return maybeWait(d, 5)
-
- def _testTryJobdir_1(self, bs):
- self.failUnlessEqual(bs.builderNames, ["a", "b"])
- self.failUnlessEqual(bs.source.branch, "branch1")
- self.failUnlessEqual(bs.source.revision, "123")
- self.failUnlessEqual(bs.source.patch, (1, "diff"))
-
-
- def testTryUserpass(self):
- up = [("alice","pw1"), ("bob","pw2")]
- s = scheduler.Try_Userpass("try2", ["a", "b"], 0, userpass=up)
- self.addScheduler(s)
- port = s.getPort()
- config = {'connect': 'pb',
- 'username': 'alice',
- 'passwd': 'pw1',
- 'master': "localhost:%d" % port,
- 'builders': ["a", "b"],
- }
- t = tryclient.Try(config)
- ss = sourcestamp.SourceStamp("branch1", "123", (1, "diff"))
- t.sourcestamp = ss
- d2 = self.master.d = defer.Deferred()
- d = t.deliverJob()
- d.addCallback(self._testTryUserpass_1, t, d2)
- return maybeWait(d, 5)
- testTryUserpass.timeout = 5
- def _testTryUserpass_1(self, res, t, d2):
- # at this point, the Try object should have a RemoteReference to the
- # status object. The FakeMaster returns a stub.
- self.failUnless(t.buildsetStatus)
- d2.addCallback(self._testTryUserpass_2, t)
- return d2
- def _testTryUserpass_2(self, bs, t):
- # this should be the BuildSet submitted by the TryScheduler
- self.failUnlessEqual(bs.builderNames, ["a", "b"])
- self.failUnlessEqual(bs.source.branch, "branch1")
- self.failUnlessEqual(bs.source.revision, "123")
- self.failUnlessEqual(bs.source.patch, (1, "diff"))
-
- t.cleanup()
-
- # twisted-2.0.1 (but not later versions) seems to require a reactor
- # iteration before stopListening actually works. TODO: investigate
- # this.
- d = defer.Deferred()
- reactor.callLater(0, d.callback, None)
- return d
-
- def testGetBuildSets(self):
- # validate IStatus.getBuildSets
- s = status.builder.Status(None, ".")
- bs1 = buildset.BuildSet(["a","b"], sourcestamp.SourceStamp(),
- reason="one", bsid="1")
- s.buildsetSubmitted(bs1.status)
- self.failUnlessEqual(s.getBuildSets(), [bs1.status])
- bs1.status.notifyFinishedWatchers()
- self.failUnlessEqual(s.getBuildSets(), [])
diff --git a/buildbot/buildbot-source/build/lib/buildbot/test/test_slavecommand.py b/buildbot/buildbot-source/build/lib/buildbot/test/test_slavecommand.py
deleted file mode 100644
index dd791983e..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/test/test_slavecommand.py
+++ /dev/null
@@ -1,265 +0,0 @@
-# -*- test-case-name: buildbot.test.test_slavecommand -*-
-
-from twisted.trial import unittest
-from twisted.internet import reactor, interfaces
-from twisted.python import util, runtime, failure
-from buildbot.twcompat import maybeWait
-
-noisy = False
-if noisy:
- from twisted.python.log import startLogging
- import sys
- startLogging(sys.stdout)
-
-import os, re, sys
-import signal
-
-from buildbot.slave import commands
-SlaveShellCommand = commands.SlaveShellCommand
-
-# test slavecommand.py by running the various commands with a fake
-# SlaveBuilder object that logs the calls to sendUpdate()
-
-def findDir():
- # the same directory that holds this script
- return util.sibpath(__file__, ".")
-
-class FakeSlaveBuilder:
- def __init__(self, usePTY):
- self.updates = []
- self.basedir = findDir()
- self.usePTY = usePTY
-
- def sendUpdate(self, data):
- if noisy: print "FakeSlaveBuilder.sendUpdate", data
- self.updates.append(data)
-
-
-class SignalMixin:
- sigchldHandler = None
-
- def setUpClass(self):
- # make sure SIGCHLD handler is installed, as it should be on
- # reactor.run(). problem is reactor may not have been run when this
- # test runs.
- if hasattr(reactor, "_handleSigchld") and hasattr(signal, "SIGCHLD"):
- self.sigchldHandler = signal.signal(signal.SIGCHLD,
- reactor._handleSigchld)
-
- def tearDownClass(self):
- if self.sigchldHandler:
- signal.signal(signal.SIGCHLD, self.sigchldHandler)
-
-
-class ShellBase(SignalMixin):
-
- def setUp(self):
- self.builder = FakeSlaveBuilder(self.usePTY)
-
- def failUnlessIn(self, substring, string):
- self.failUnless(string.find(substring) != -1)
-
- def getfile(self, which):
- got = ""
- for r in self.builder.updates:
- if r.has_key(which):
- got += r[which]
- return got
-
- def checkOutput(self, expected):
- """
- @type expected: list of (streamname, contents) tuples
- @param expected: the expected output
- """
- expected_linesep = os.linesep
- if self.usePTY:
- # PTYs change the line ending. I'm not sure why.
- expected_linesep = "\r\n"
- expected = [(stream, contents.replace("\n", expected_linesep, 1000))
- for (stream, contents) in expected]
- if self.usePTY:
- # PTYs merge stdout+stderr into a single stream
- expected = [('stdout', contents)
- for (stream, contents) in expected]
- # now merge everything into one string per stream
- streams = {}
- for (stream, contents) in expected:
- streams[stream] = streams.get(stream, "") + contents
- for (stream, contents) in streams.items():
- got = self.getfile(stream)
- self.assertEquals(got, contents)
-
- def getrc(self):
- self.failUnless(self.builder.updates[-1].has_key('rc'))
- got = self.builder.updates[-1]['rc']
- return got
- def checkrc(self, expected):
- got = self.getrc()
- self.assertEquals(got, expected)
-
- def testShell1(self):
- cmd = sys.executable + " emit.py 0"
- args = {'command': cmd, 'workdir': '.', 'timeout': 60}
- c = SlaveShellCommand(self.builder, None, args)
- d = c.start()
- expected = [('stdout', "this is stdout\n"),
- ('stderr', "this is stderr\n")]
- d.addCallback(self._checkPass, expected, 0)
- return maybeWait(d)
-
- def _checkPass(self, res, expected, rc):
- self.checkOutput(expected)
- self.checkrc(rc)
-
- def testShell2(self):
- cmd = [sys.executable, "emit.py", "0"]
- args = {'command': cmd, 'workdir': '.', 'timeout': 60}
- c = SlaveShellCommand(self.builder, None, args)
- d = c.start()
- expected = [('stdout', "this is stdout\n"),
- ('stderr', "this is stderr\n")]
- d.addCallback(self._checkPass, expected, 0)
- return maybeWait(d)
-
- def testShellRC(self):
- cmd = [sys.executable, "emit.py", "1"]
- args = {'command': cmd, 'workdir': '.', 'timeout': 60}
- c = SlaveShellCommand(self.builder, None, args)
- d = c.start()
- expected = [('stdout', "this is stdout\n"),
- ('stderr', "this is stderr\n")]
- d.addCallback(self._checkPass, expected, 1)
- return maybeWait(d)
-
- def testShellEnv(self):
- cmd = sys.executable + " emit.py 0"
- args = {'command': cmd, 'workdir': '.',
- 'env': {'EMIT_TEST': "envtest"}, 'timeout': 60}
- c = SlaveShellCommand(self.builder, None, args)
- d = c.start()
- expected = [('stdout', "this is stdout\n"),
- ('stderr', "this is stderr\n"),
- ('stdout', "EMIT_TEST: envtest\n"),
- ]
- d.addCallback(self._checkPass, expected, 0)
- return maybeWait(d)
-
- def testShellSubdir(self):
- cmd = sys.executable + " emit.py 0"
- args = {'command': cmd, 'workdir': "subdir", 'timeout': 60}
- c = SlaveShellCommand(self.builder, None, args)
- d = c.start()
- expected = [('stdout', "this is stdout in subdir\n"),
- ('stderr', "this is stderr\n")]
- d.addCallback(self._checkPass, expected, 0)
- return maybeWait(d)
-
- def testShellMissingCommand(self):
- args = {'command': "/bin/EndWorldHungerAndMakePigsFly",
- 'workdir': '.', 'timeout': 10,
- 'env': {"LC_ALL": "C"},
- }
- c = SlaveShellCommand(self.builder, None, args)
- d = c.start()
- d.addCallback(self._testShellMissingCommand_1)
- return maybeWait(d)
- def _testShellMissingCommand_1(self, res):
- self.failIfEqual(self.getrc(), 0)
- # we used to check the error message to make sure it said something
- # about a missing command, but there are a variety of shells out
- # there, and they emit message sin a variety of languages, so we
- # stopped trying.
-
- def testTimeout(self):
- args = {'command': [sys.executable, "sleep.py", "10"],
- 'workdir': '.', 'timeout': 2}
- c = SlaveShellCommand(self.builder, None, args)
- d = c.start()
- d.addCallback(self._testTimeout_1)
- return maybeWait(d)
- def _testTimeout_1(self, res):
- self.failIfEqual(self.getrc(), 0)
- got = self.getfile('header')
- self.failUnlessIn("command timed out: 2 seconds without output", got)
- if runtime.platformType == "posix":
- # the "killing pid" message is not present in windows
- self.failUnlessIn("killing pid", got)
- # but the process *ought* to be killed somehow
- self.failUnlessIn("process killed by signal", got)
- #print got
- if runtime.platformType != 'posix':
- testTimeout.todo = "timeout doesn't appear to work under windows"
-
- def testInterrupt1(self):
- args = {'command': [sys.executable, "sleep.py", "10"],
- 'workdir': '.', 'timeout': 20}
- c = SlaveShellCommand(self.builder, None, args)
- d = c.start()
- reactor.callLater(1, c.interrupt)
- d.addCallback(self._testInterrupt1_1)
- return maybeWait(d)
- def _testInterrupt1_1(self, res):
- self.failIfEqual(self.getrc(), 0)
- got = self.getfile('header')
- self.failUnlessIn("command interrupted", got)
- if runtime.platformType == "posix":
- self.failUnlessIn("process killed by signal", got)
- if runtime.platformType != 'posix':
- testInterrupt1.todo = "interrupt doesn't appear to work under windows"
-
-
- # todo: twisted-specific command tests
-
-class Shell(ShellBase, unittest.TestCase):
- usePTY = False
-
- def testInterrupt2(self):
- # test the backup timeout. This doesn't work under a PTY, because the
- # transport.loseConnection we do in the timeout handler actually
- # *does* kill the process.
- args = {'command': [sys.executable, "sleep.py", "5"],
- 'workdir': '.', 'timeout': 20}
- c = SlaveShellCommand(self.builder, None, args)
- d = c.start()
- c.command.BACKUP_TIMEOUT = 1
- # make it unable to kill the child, by changing the signal it uses
- # from SIGKILL to the do-nothing signal 0.
- c.command.KILL = None
- reactor.callLater(1, c.interrupt)
- d.addBoth(self._testInterrupt2_1)
- return maybeWait(d)
- def _testInterrupt2_1(self, res):
- # the slave should raise a TimeoutError exception. In a normal build
- # process (i.e. one that uses step.RemoteShellCommand), this
- # exception will be handed to the Step, which will acquire an ERROR
- # status. In our test environment, it isn't such a big deal.
- self.failUnless(isinstance(res, failure.Failure),
- "res is not a Failure: %s" % (res,))
- self.failUnless(res.check(commands.TimeoutError))
- self.checkrc(-1)
- return
- # the command is still actually running. Start another command, to
- # make sure that a) the old command's output doesn't interfere with
- # the new one, and b) the old command's actual termination doesn't
- # break anything
- args = {'command': [sys.executable, "sleep.py", "5"],
- 'workdir': '.', 'timeout': 20}
- c = SlaveShellCommand(self.builder, None, args)
- d = c.start()
- d.addCallback(self._testInterrupt2_2)
- return d
- def _testInterrupt2_2(self, res):
- self.checkrc(0)
- # N.B.: under windows, the trial process hangs out for another few
- # seconds. I assume that the win32eventreactor is waiting for one of
- # the lingering child processes to really finish.
-
-haveProcess = interfaces.IReactorProcess(reactor, None)
-if runtime.platformType == 'posix':
- # test with PTYs also
- class ShellPTY(ShellBase, unittest.TestCase):
- usePTY = True
- if not haveProcess:
- ShellPTY.skip = "this reactor doesn't support IReactorProcess"
-if not haveProcess:
- Shell.skip = "this reactor doesn't support IReactorProcess"
diff --git a/buildbot/buildbot-source/build/lib/buildbot/test/test_slaves.py b/buildbot/buildbot-source/build/lib/buildbot/test/test_slaves.py
deleted file mode 100644
index 588e08f0b..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/test/test_slaves.py
+++ /dev/null
@@ -1,228 +0,0 @@
-# -*- test-case-name: buildbot.test.test_slaves -*-
-
-from twisted.trial import unittest
-from buildbot.twcompat import maybeWait
-from twisted.internet import defer, reactor
-
-from buildbot.test.runutils import RunMixin
-from buildbot.sourcestamp import SourceStamp
-from buildbot.process.base import BuildRequest
-from buildbot.status.builder import SUCCESS
-
-config_1 = """
-from buildbot.process import step, factory
-s = factory.s
-
-BuildmasterConfig = c = {}
-c['bots'] = [('bot1', 'sekrit'), ('bot2', 'sekrit'), ('bot3', 'sekrit')]
-c['sources'] = []
-c['schedulers'] = []
-c['slavePortnum'] = 0
-c['schedulers'] = []
-
-f = factory.BuildFactory([s(step.RemoteDummy, timeout=1)])
-
-c['builders'] = [
- {'name': 'b1', 'slavenames': ['bot1','bot2','bot3'],
- 'builddir': 'b1', 'factory': f},
- ]
-"""
-
-class Slave(RunMixin, unittest.TestCase):
-
- def setUp(self):
- RunMixin.setUp(self)
- self.master.loadConfig(config_1)
- self.master.startService()
- d = self.connectSlave(["b1"])
- d.addCallback(lambda res: self.connectSlave(["b1"], "bot2"))
- return maybeWait(d)
-
- def doBuild(self, buildername):
- br = BuildRequest("forced", SourceStamp())
- d = br.waitUntilFinished()
- self.control.getBuilder(buildername).requestBuild(br)
- return d
-
- def testSequence(self):
- # make sure both slaves appear in the list.
- attached_slaves = [c for c in self.master.botmaster.slaves.values()
- if c.slave]
- self.failUnlessEqual(len(attached_slaves), 2)
- b = self.master.botmaster.builders["b1"]
- self.failUnlessEqual(len(b.slaves), 2)
-
- # since the current scheduling algorithm is simple and does not
- # rotate or attempt any sort of load-balancing, two builds in
- # sequence should both use the first slave. This may change later if
- # we move to a more sophisticated scheme.
-
- d = self.doBuild("b1")
- d.addCallback(self._testSequence_1)
- return maybeWait(d)
- def _testSequence_1(self, res):
- self.failUnlessEqual(res.getResults(), SUCCESS)
- self.failUnlessEqual(res.getSlavename(), "bot1")
-
- d = self.doBuild("b1")
- d.addCallback(self._testSequence_2)
- return d
- def _testSequence_2(self, res):
- self.failUnlessEqual(res.getSlavename(), "bot1")
-
-
- def testSimultaneous(self):
- # make sure we can actually run two builds at the same time
- d1 = self.doBuild("b1")
- d2 = self.doBuild("b1")
- d1.addCallback(self._testSimultaneous_1, d2)
- return maybeWait(d1)
- def _testSimultaneous_1(self, res, d2):
- self.failUnlessEqual(res.getResults(), SUCCESS)
- self.failUnlessEqual(res.getSlavename(), "bot1")
- d2.addCallback(self._testSimultaneous_2)
- return d2
- def _testSimultaneous_2(self, res):
- self.failUnlessEqual(res.getResults(), SUCCESS)
- self.failUnlessEqual(res.getSlavename(), "bot2")
-
- def testFallback1(self):
- # detach the first slave, verify that a build is run using the second
- # slave instead
- d = self.shutdownSlave("bot1", "b1")
- d.addCallback(self._testFallback1_1)
- return maybeWait(d)
- def _testFallback1_1(self, res):
- attached_slaves = [c for c in self.master.botmaster.slaves.values()
- if c.slave]
- self.failUnlessEqual(len(attached_slaves), 1)
- self.failUnlessEqual(len(self.master.botmaster.builders["b1"].slaves),
- 1)
- d = self.doBuild("b1")
- d.addCallback(self._testFallback1_2)
- return d
- def _testFallback1_2(self, res):
- self.failUnlessEqual(res.getResults(), SUCCESS)
- self.failUnlessEqual(res.getSlavename(), "bot2")
-
- def testFallback2(self):
- # Disable the first slave, so that a slaveping will timeout. Then
- # start a build, and verify that the non-failing (second) one is
- # claimed for the build, and that the failing one is removed from the
- # list.
-
- # reduce the ping time so we'll failover faster
- self.master.botmaster.builders["b1"].START_BUILD_TIMEOUT = 1
- self.disappearSlave("bot1", "b1")
- d = self.doBuild("b1")
- d.addCallback(self._testFallback2_1)
- return maybeWait(d)
- def _testFallback2_1(self, res):
- self.failUnlessEqual(res.getResults(), SUCCESS)
- self.failUnlessEqual(res.getSlavename(), "bot2")
- b1slaves = self.master.botmaster.builders["b1"].slaves
- self.failUnlessEqual(len(b1slaves), 1)
- self.failUnlessEqual(b1slaves[0].slave.slavename, "bot2")
-
-
- def notFinished(self, brs):
- # utility method
- builds = brs.getBuilds()
- self.failIf(len(builds) > 1)
- if builds:
- self.failIf(builds[0].isFinished())
-
- def testDontClaimPingingSlave(self):
- # have two slaves connect for the same builder. Do something to the
- # first one so that slavepings are delayed (but do not fail
- # outright).
- timers = []
- self.slaves['bot1'].debugOpts["stallPings"] = (10, timers)
- br = BuildRequest("forced", SourceStamp())
- d1 = br.waitUntilFinished()
- self.control.getBuilder("b1").requestBuild(br)
- s1 = br.status # this is a BuildRequestStatus
- # give it a chance to start pinging
- d2 = defer.Deferred()
- d2.addCallback(self._testDontClaimPingingSlave_1, d1, s1, timers)
- reactor.callLater(1, d2.callback, None)
- return maybeWait(d2)
- def _testDontClaimPingingSlave_1(self, res, d1, s1, timers):
- # now the first build is running (waiting on the ping), so start the
- # second build. This should claim the second slave, not the first,
- # because the first is busy doing the ping.
- self.notFinished(s1)
- d3 = self.doBuild("b1")
- d3.addCallback(self._testDontClaimPingingSlave_2, d1, s1, timers)
- return d3
- def _testDontClaimPingingSlave_2(self, res, d1, s1, timers):
- self.failUnlessEqual(res.getSlavename(), "bot2")
- self.notFinished(s1)
- # now let the ping complete
- self.failUnlessEqual(len(timers), 1)
- timers[0].reset(0)
- d1.addCallback(self._testDontClaimPingingSlave_3)
- return d1
- def _testDontClaimPingingSlave_3(self, res):
- self.failUnlessEqual(res.getSlavename(), "bot1")
-
-
-class Slave2(RunMixin, unittest.TestCase):
-
- revision = 0
-
- def setUp(self):
- RunMixin.setUp(self)
- self.master.loadConfig(config_1)
- self.master.startService()
-
- def doBuild(self, buildername, reason="forced"):
- # we need to prevent these builds from being merged, so we create
- # each of them with a different revision specifier. The revision is
- # ignored because our build process does not have a source checkout
- # step.
- self.revision += 1
- br = BuildRequest(reason, SourceStamp(revision=self.revision))
- d = br.waitUntilFinished()
- self.control.getBuilder(buildername).requestBuild(br)
- return d
-
- def testFirstComeFirstServed(self):
- # submit three builds, then connect a slave which fails the
- # slaveping. The first build will claim the slave, do the slaveping,
- # give up, and re-queue the build. Verify that the build gets
- # re-queued in front of all other builds. This may be tricky, because
- # the other builds may attempt to claim the just-failed slave.
-
- d1 = self.doBuild("b1", "first")
- d2 = self.doBuild("b1", "second")
- #buildable = self.master.botmaster.builders["b1"].buildable
- #print [b.reason for b in buildable]
-
- # specifically, I want the poor build to get precedence over any
- # others that were waiting. To test this, we need more builds than
- # slaves.
-
- # now connect a broken slave. The first build started as soon as it
- # connects, so by the time we get to our _1 method, the ill-fated
- # build has already started.
- d = self.connectSlave(["b1"], opts={"failPingOnce": True})
- d.addCallback(self._testFirstComeFirstServed_1, d1, d2)
- return maybeWait(d)
- def _testFirstComeFirstServed_1(self, res, d1, d2):
- # the master has send the slaveping. When this is received, it will
- # fail, causing the master to hang up on the slave. When it
- # reconnects, it should find the first build at the front of the
- # queue. If we simply wait for both builds to complete, then look at
- # the status logs, we should see that the builds ran in the correct
- # order.
-
- d = defer.DeferredList([d1,d2])
- d.addCallback(self._testFirstComeFirstServed_2)
- return d
- def _testFirstComeFirstServed_2(self, res):
- b = self.status.getBuilder("b1")
- builds = b.getBuild(0), b.getBuild(1)
- reasons = [build.getReason() for build in builds]
- self.failUnlessEqual(reasons, ["first", "second"])
-
diff --git a/buildbot/buildbot-source/build/lib/buildbot/test/test_status.py b/buildbot/buildbot-source/build/lib/buildbot/test/test_status.py
deleted file mode 100644
index d8c0eb0da..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/test/test_status.py
+++ /dev/null
@@ -1,949 +0,0 @@
-# -*- test-case-name: buildbot.test.test_status -*-
-
-import email, os
-
-from twisted.internet import defer, reactor
-from twisted.trial import unittest
-
-from buildbot import interfaces
-from buildbot.sourcestamp import SourceStamp
-from buildbot.process.base import BuildRequest
-from buildbot.twcompat import implements, providedBy, maybeWait
-from buildbot.status import builder, base
-try:
- from buildbot.status import mail
-except ImportError:
- mail = None
-from buildbot.status import progress, client # NEEDS COVERAGE
-from buildbot.test.runutils import RunMixin
-
-class MyStep:
- build = None
- def getName(self):
- return "step"
-
-class MyLogFileProducer(builder.LogFileProducer):
- # The reactor.callLater(0) in LogFileProducer.resumeProducing is a bit of
- # a nuisance from a testing point of view. This subclass adds a Deferred
- # to that call so we can find out when it is complete.
- def resumeProducing(self):
- d = defer.Deferred()
- reactor.callLater(0, self._resumeProducing, d)
- return d
- def _resumeProducing(self, d):
- builder.LogFileProducer._resumeProducing(self)
- reactor.callLater(0, d.callback, None)
-
-class MyLog(builder.LogFile):
- def __init__(self, basedir, name, text=None, step=None):
- self.fakeBuilderBasedir = basedir
- if not step:
- step = MyStep()
- builder.LogFile.__init__(self, step, name, name)
- if text:
- self.addStdout(text)
- self.finish()
- def getFilename(self):
- return os.path.join(self.fakeBuilderBasedir, self.name)
-
- def subscribeConsumer(self, consumer):
- p = MyLogFileProducer(self, consumer)
- d = p.resumeProducing()
- return d
-
-class MyHTMLLog(builder.HTMLLogFile):
- def __init__(self, basedir, name, html):
- step = MyStep()
- builder.HTMLLogFile.__init__(self, step, name, name, html)
-
-class MyLogSubscriber:
- def __init__(self):
- self.chunks = []
- def logChunk(self, build, step, log, channel, text):
- self.chunks.append((channel, text))
-
-class MyLogConsumer:
- def __init__(self, limit=None):
- self.chunks = []
- self.finished = False
- self.limit = limit
- def registerProducer(self, producer, streaming):
- self.producer = producer
- self.streaming = streaming
- def unregisterProducer(self):
- self.producer = None
- def writeChunk(self, chunk):
- self.chunks.append(chunk)
- if self.limit:
- self.limit -= 1
- if self.limit == 0:
- self.producer.pauseProducing()
- def finish(self):
- self.finished = True
-
-if mail:
- class MyMailer(mail.MailNotifier):
- def sendMessage(self, m, recipients):
- self.parent.messages.append((m, recipients))
-
-class MyStatus:
- def getBuildbotURL(self):
- return self.url
- def getURLForThing(self, thing):
- return None
-
-class MyBuilder(builder.BuilderStatus):
- nextBuildNumber = 0
-
-class MyBuild(builder.BuildStatus):
- testlogs = []
- def __init__(self, parent, number, results):
- builder.BuildStatus.__init__(self, parent, number)
- self.results = results
- self.source = SourceStamp(revision="1.14")
- self.reason = "build triggered by changes"
- self.finished = True
- def getLogs(self):
- return self.testlogs
-
-class MyLookup:
- if implements:
- implements(interfaces.IEmailLookup)
- else:
- __implements__ = interfaces.IEmailLookup,
-
- def getAddress(self, user):
- d = defer.Deferred()
- # With me now is Mr Thomas Walters of West Hartlepool who is totally
- # invisible.
- if user == "Thomas_Walters":
- d.callback(None)
- else:
- d.callback(user + "@" + "dev.com")
- return d
-
-class Mail(unittest.TestCase):
-
- def setUp(self):
- self.builder = MyBuilder("builder1")
-
- def stall(self, res, timeout):
- d = defer.Deferred()
- reactor.callLater(timeout, d.callback, res)
- return d
-
- def makeBuild(self, number, results):
- return MyBuild(self.builder, number, results)
-
- def failUnlessIn(self, substring, string):
- self.failUnless(string.find(substring) != -1)
-
- def getBuildbotURL(self):
- return "BUILDBOT_URL"
-
- def getURLForThing(self, thing):
- return None
-
- def testBuild1(self):
- mailer = MyMailer(fromaddr="buildbot@example.com",
- extraRecipients=["recip@example.com",
- "recip2@example.com"],
- lookup=mail.Domain("dev.com"))
- mailer.parent = self
- mailer.status = self
- self.messages = []
-
- b1 = self.makeBuild(3, builder.SUCCESS)
- b1.blamelist = ["bob"]
-
- mailer.buildFinished("builder1", b1, b1.results)
- self.failUnless(len(self.messages) == 1)
- m,r = self.messages.pop()
- t = m.as_string()
- self.failUnlessIn("To: bob@dev.com, recip2@example.com, "
- "recip@example.com\n", t)
- self.failUnlessIn("From: buildbot@example.com\n", t)
- self.failUnlessIn("Subject: buildbot success in builder1\n", t)
- self.failUnlessIn("Date: ", t)
- self.failUnlessIn("Build succeeded!\n", t)
- self.failUnlessIn("Buildbot URL: BUILDBOT_URL\n", t)
-
- def testBuild2(self):
- mailer = MyMailer(fromaddr="buildbot@example.com",
- extraRecipients=["recip@example.com",
- "recip2@example.com"],
- lookup="dev.com",
- sendToInterestedUsers=False)
- mailer.parent = self
- mailer.status = self
- self.messages = []
-
- b1 = self.makeBuild(3, builder.SUCCESS)
- b1.blamelist = ["bob"]
-
- mailer.buildFinished("builder1", b1, b1.results)
- self.failUnless(len(self.messages) == 1)
- m,r = self.messages.pop()
- t = m.as_string()
- self.failUnlessIn("To: recip2@example.com, "
- "recip@example.com\n", t)
- self.failUnlessIn("From: buildbot@example.com\n", t)
- self.failUnlessIn("Subject: buildbot success in builder1\n", t)
- self.failUnlessIn("Build succeeded!\n", t)
- self.failUnlessIn("Buildbot URL: BUILDBOT_URL\n", t)
-
- def testBuildStatusCategory(self):
- # a status client only interested in a category should only receive
- # from that category
- mailer = MyMailer(fromaddr="buildbot@example.com",
- extraRecipients=["recip@example.com",
- "recip2@example.com"],
- lookup="dev.com",
- sendToInterestedUsers=False,
- categories=["debug"])
-
- mailer.parent = self
- mailer.status = self
- self.messages = []
-
- b1 = self.makeBuild(3, builder.SUCCESS)
- b1.blamelist = ["bob"]
-
- mailer.buildFinished("builder1", b1, b1.results)
- self.failIf(self.messages)
-
- def testBuilderCategory(self):
- # a builder in a certain category should notify status clients that
- # did not list categories, or categories including this one
- mailer1 = MyMailer(fromaddr="buildbot@example.com",
- extraRecipients=["recip@example.com",
- "recip2@example.com"],
- lookup="dev.com",
- sendToInterestedUsers=False)
- mailer2 = MyMailer(fromaddr="buildbot@example.com",
- extraRecipients=["recip@example.com",
- "recip2@example.com"],
- lookup="dev.com",
- sendToInterestedUsers=False,
- categories=["active"])
- mailer3 = MyMailer(fromaddr="buildbot@example.com",
- extraRecipients=["recip@example.com",
- "recip2@example.com"],
- lookup="dev.com",
- sendToInterestedUsers=False,
- categories=["active", "debug"])
-
- builderd = MyBuilder("builder2", "debug")
-
- mailer1.parent = self
- mailer1.status = self
- mailer2.parent = self
- mailer2.status = self
- mailer3.parent = self
- mailer3.status = self
- self.messages = []
-
- t = mailer1.builderAdded("builder2", builderd)
- self.assertEqual(len(mailer1.watched), 1)
- self.assertEqual(t, mailer1)
- t = mailer2.builderAdded("builder2", builderd)
- self.assertEqual(len(mailer2.watched), 0)
- self.assertEqual(t, None)
- t = mailer3.builderAdded("builder2", builderd)
- self.assertEqual(len(mailer3.watched), 1)
- self.assertEqual(t, mailer3)
-
- b2 = MyBuild(builderd, 3, builder.SUCCESS)
- b2.blamelist = ["bob"]
-
- mailer1.buildFinished("builder2", b2, b2.results)
- self.failUnlessEqual(len(self.messages), 1)
- self.messages = []
- mailer2.buildFinished("builder2", b2, b2.results)
- self.failUnlessEqual(len(self.messages), 0)
- self.messages = []
- mailer3.buildFinished("builder2", b2, b2.results)
- self.failUnlessEqual(len(self.messages), 1)
-
- def testFailure(self):
- mailer = MyMailer(fromaddr="buildbot@example.com", mode="problem",
- extraRecipients=["recip@example.com",
- "recip2@example.com"],
- lookup=MyLookup())
- mailer.parent = self
- mailer.status = self
- self.messages = []
-
- b1 = self.makeBuild(3, builder.SUCCESS)
- b1.blamelist = ["dev1", "dev2"]
- b2 = self.makeBuild(4, builder.FAILURE)
- b2.setText(["snarkleack", "polarization", "failed"])
- b2.blamelist = ["dev3", "dev3", "dev3", "dev4",
- "Thomas_Walters"]
- mailer.buildFinished("builder1", b1, b1.results)
- self.failIf(self.messages)
- mailer.buildFinished("builder1", b2, b2.results)
- self.failUnless(len(self.messages) == 1)
- m,r = self.messages.pop()
- t = m.as_string()
- self.failUnlessIn("To: dev3@dev.com, dev4@dev.com, "
- "recip2@example.com, recip@example.com\n", t)
- self.failUnlessIn("From: buildbot@example.com\n", t)
- self.failUnlessIn("Subject: buildbot failure in builder1\n", t)
- self.failUnlessIn("The Buildbot has detected a new failure", t)
- self.failUnlessIn("BUILD FAILED: snarkleack polarization failed\n", t)
- self.failUnlessEqual(r, ["dev3@dev.com", "dev4@dev.com",
- "recip2@example.com", "recip@example.com"])
-
- def testLogs(self):
- basedir = "test_status_logs"
- os.mkdir(basedir)
- mailer = MyMailer(fromaddr="buildbot@example.com", addLogs=True,
- extraRecipients=["recip@example.com",
- "recip2@example.com"])
- mailer.parent = self
- mailer.status = self
- self.messages = []
-
- b1 = self.makeBuild(3, builder.WARNINGS)
- b1.testlogs = [MyLog(basedir, 'compile', "Compile log here\n"),
- MyLog(basedir,
- 'test', "Test log here\nTest 4 failed\n"),
- ]
- b1.text = ["unusual", "gnarzzler", "output"]
- mailer.buildFinished("builder1", b1, b1.results)
- self.failUnless(len(self.messages) == 1)
- m,r = self.messages.pop()
- t = m.as_string()
- self.failUnlessIn("Subject: buildbot warnings in builder1\n", t)
- m2 = email.message_from_string(t)
- p = m2.get_payload()
- self.failUnlessEqual(len(p), 3)
-
- self.failUnlessIn("Build Had Warnings: unusual gnarzzler output\n",
- p[0].get_payload())
-
- self.failUnlessEqual(p[1].get_filename(), "step.compile")
- self.failUnlessEqual(p[1].get_payload(), "Compile log here\n")
-
- self.failUnlessEqual(p[2].get_filename(), "step.test")
- self.failUnlessIn("Test log here\n", p[2].get_payload())
-
- def testMail(self):
- basedir = "test_status_mail"
- os.mkdir(basedir)
- dest = os.environ.get("BUILDBOT_TEST_MAIL")
- if not dest:
- raise unittest.SkipTest("define BUILDBOT_TEST_MAIL=dest to run this")
- mailer = mail.MailNotifier(fromaddr="buildbot@example.com",
- addLogs=True,
- extraRecipients=[dest])
- s = MyStatus()
- s.url = "project URL"
- mailer.status = s
-
- b1 = self.makeBuild(3, builder.SUCCESS)
- b1.testlogs = [MyLog(basedir, 'compile', "Compile log here\n"),
- MyLog(basedir,
- 'test', "Test log here\nTest 4 failed\n"),
- ]
-
- print "sending mail to", dest
- d = mailer.buildFinished("builder1", b1, b1.results)
- # When this fires, the mail has been sent, but the SMTP connection is
- # still up (because smtp.sendmail relies upon the server to hang up).
- # Spin for a moment to avoid the "unclean reactor" warning that Trial
- # gives us if we finish before the socket is disconnected. Really,
- # sendmail() ought to hang up the connection once it is finished:
- # otherwise a malicious SMTP server could make us consume lots of
- # memory.
- d.addCallback(self.stall, 0.1)
- return maybeWait(d)
-
-if not mail:
- Mail.skip = "the Twisted Mail package is not installed"
-
-class Progress(unittest.TestCase):
- def testWavg(self):
- bp = progress.BuildProgress([])
- e = progress.Expectations(bp)
- # wavg(old, current)
- self.failUnlessEqual(e.wavg(None, None), None)
- self.failUnlessEqual(e.wavg(None, 3), 3)
- self.failUnlessEqual(e.wavg(3, None), 3)
- self.failUnlessEqual(e.wavg(3, 4), 3.5)
- e.decay = 0.1
- self.failUnlessEqual(e.wavg(3, 4), 3.1)
-
-
-class Results(unittest.TestCase):
-
- def testAddResults(self):
- b = builder.BuildStatus(builder.BuilderStatus("test"), 12)
- testname = ("buildbot", "test", "test_status", "Results",
- "testAddResults")
- r1 = builder.TestResult(name=testname,
- results=builder.SUCCESS,
- text=["passed"],
- logs={'output': ""},
- )
- b.addTestResult(r1)
-
- res = b.getTestResults()
- self.failUnlessEqual(res.keys(), [testname])
- t = res[testname]
- self.failUnless(providedBy(t, interfaces.ITestResult))
- self.failUnlessEqual(t.getName(), testname)
- self.failUnlessEqual(t.getResults(), builder.SUCCESS)
- self.failUnlessEqual(t.getText(), ["passed"])
- self.failUnlessEqual(t.getLogs(), {'output': ""})
-
-class Log(unittest.TestCase):
- def setUpClass(self):
- self.basedir = "status_log_add"
- os.mkdir(self.basedir)
-
- def testAdd(self):
- l = MyLog(self.basedir, "compile", step=13)
- self.failUnlessEqual(l.getName(), "compile")
- self.failUnlessEqual(l.getStep(), 13)
- l.addHeader("HEADER\n")
- l.addStdout("Some text\n")
- l.addStderr("Some error\n")
- l.addStdout("Some more text\n")
- self.failIf(l.isFinished())
- l.finish()
- self.failUnless(l.isFinished())
- self.failUnlessEqual(l.getText(),
- "Some text\nSome error\nSome more text\n")
- self.failUnlessEqual(l.getTextWithHeaders(),
- "HEADER\n" +
- "Some text\nSome error\nSome more text\n")
- self.failUnlessEqual(len(list(l.getChunks())), 4)
-
- self.failUnless(l.hasContents())
- os.unlink(l.getFilename())
- self.failIf(l.hasContents())
-
- def TODO_testDuplicate(self):
- # create multiple logs for the same step with the same logname, make
- # sure their on-disk filenames are suitably uniquified. This
- # functionality actually lives in BuildStepStatus and BuildStatus, so
- # this test must involve more than just the MyLog class.
-
- # naieve approach, doesn't work
- l1 = MyLog(self.basedir, "duplicate")
- l1.addStdout("Some text\n")
- l1.finish()
- l2 = MyLog(self.basedir, "duplicate")
- l2.addStdout("Some more text\n")
- l2.finish()
- self.failIfEqual(l1.getFilename(), l2.getFilename())
-
- def testMerge1(self):
- l = MyLog(self.basedir, "merge1")
- l.addHeader("HEADER\n")
- l.addStdout("Some text\n")
- l.addStdout("Some more text\n")
- l.addStdout("more\n")
- l.finish()
- self.failUnlessEqual(l.getText(),
- "Some text\nSome more text\nmore\n")
- self.failUnlessEqual(l.getTextWithHeaders(),
- "HEADER\n" +
- "Some text\nSome more text\nmore\n")
- self.failUnlessEqual(len(list(l.getChunks())), 2)
-
- def testMerge2(self):
- l = MyLog(self.basedir, "merge2")
- l.addHeader("HEADER\n")
- for i in xrange(1000):
- l.addStdout("aaaa")
- for i in xrange(30):
- l.addStderr("bbbb")
- for i in xrange(10):
- l.addStdout("cc")
- target = 1000*"aaaa" + 30 * "bbbb" + 10 * "cc"
- self.failUnlessEqual(len(l.getText()), len(target))
- self.failUnlessEqual(l.getText(), target)
- l.finish()
- self.failUnlessEqual(len(l.getText()), len(target))
- self.failUnlessEqual(l.getText(), target)
- self.failUnlessEqual(len(list(l.getChunks())), 4)
-
- def testMerge3(self):
- l = MyLog(self.basedir, "merge3")
- l.chunkSize = 100
- l.addHeader("HEADER\n")
- for i in xrange(8):
- l.addStdout(10*"a")
- for i in xrange(8):
- l.addStdout(10*"a")
- self.failUnlessEqual(list(l.getChunks()),
- [(builder.HEADER, "HEADER\n"),
- (builder.STDOUT, 100*"a"),
- (builder.STDOUT, 60*"a")])
- l.finish()
- self.failUnlessEqual(l.getText(), 160*"a")
-
- def testChunks(self):
- l = MyLog(self.basedir, "chunks")
- c1 = l.getChunks()
- l.addHeader("HEADER\n")
- l.addStdout("Some text\n")
- self.failUnlessEqual("".join(l.getChunks(onlyText=True)),
- "HEADER\nSome text\n")
- c2 = l.getChunks()
-
- l.addStdout("Some more text\n")
- self.failUnlessEqual("".join(l.getChunks(onlyText=True)),
- "HEADER\nSome text\nSome more text\n")
- c3 = l.getChunks()
-
- l.addStdout("more\n")
- l.finish()
-
- self.failUnlessEqual(list(c1), [])
- self.failUnlessEqual(list(c2), [(builder.HEADER, "HEADER\n"),
- (builder.STDOUT, "Some text\n")])
- self.failUnlessEqual(list(c3), [(builder.HEADER, "HEADER\n"),
- (builder.STDOUT,
- "Some text\nSome more text\n")])
-
- self.failUnlessEqual(l.getText(),
- "Some text\nSome more text\nmore\n")
- self.failUnlessEqual(l.getTextWithHeaders(),
- "HEADER\n" +
- "Some text\nSome more text\nmore\n")
- self.failUnlessEqual(len(list(l.getChunks())), 2)
-
- def testUpgrade(self):
- l = MyLog(self.basedir, "upgrade")
- l.addHeader("HEADER\n")
- l.addStdout("Some text\n")
- l.addStdout("Some more text\n")
- l.addStdout("more\n")
- l.finish()
- self.failUnless(l.hasContents())
- # now doctor it to look like a 0.6.4-era non-upgraded logfile
- l.entries = list(l.getChunks())
- del l.filename
- os.unlink(l.getFilename())
- # now make sure we can upgrade it
- l.upgrade("upgrade")
- self.failUnlessEqual(l.getText(),
- "Some text\nSome more text\nmore\n")
- self.failUnlessEqual(len(list(l.getChunks())), 2)
- self.failIf(l.entries)
-
- # now, do it again, but make it look like an upgraded 0.6.4 logfile
- # (i.e. l.filename is missing, but the contents are there on disk)
- l.entries = list(l.getChunks())
- del l.filename
- l.upgrade("upgrade")
- self.failUnlessEqual(l.getText(),
- "Some text\nSome more text\nmore\n")
- self.failUnlessEqual(len(list(l.getChunks())), 2)
- self.failIf(l.entries)
- self.failUnless(l.hasContents())
-
- def testHTMLUpgrade(self):
- l = MyHTMLLog(self.basedir, "upgrade", "log contents")
- l.upgrade("filename")
-
- def testSubscribe(self):
- l1 = MyLog(self.basedir, "subscribe1")
- l1.finish()
- self.failUnless(l1.isFinished())
-
- s = MyLogSubscriber()
- l1.subscribe(s, True)
- l1.unsubscribe(s)
- self.failIf(s.chunks)
-
- s = MyLogSubscriber()
- l1.subscribe(s, False)
- l1.unsubscribe(s)
- self.failIf(s.chunks)
-
- finished = []
- l2 = MyLog(self.basedir, "subscribe2")
- l2.waitUntilFinished().addCallback(finished.append)
- l2.addHeader("HEADER\n")
- s1 = MyLogSubscriber()
- l2.subscribe(s1, True)
- s2 = MyLogSubscriber()
- l2.subscribe(s2, False)
- self.failUnlessEqual(s1.chunks, [(builder.HEADER, "HEADER\n")])
- self.failUnlessEqual(s2.chunks, [])
-
- l2.addStdout("Some text\n")
- self.failUnlessEqual(s1.chunks, [(builder.HEADER, "HEADER\n"),
- (builder.STDOUT, "Some text\n")])
- self.failUnlessEqual(s2.chunks, [(builder.STDOUT, "Some text\n")])
- l2.unsubscribe(s1)
-
- l2.addStdout("Some more text\n")
- self.failUnlessEqual(s1.chunks, [(builder.HEADER, "HEADER\n"),
- (builder.STDOUT, "Some text\n")])
- self.failUnlessEqual(s2.chunks, [(builder.STDOUT, "Some text\n"),
- (builder.STDOUT, "Some more text\n"),
- ])
- self.failIf(finished)
- l2.finish()
- self.failUnlessEqual(finished, [l2])
-
- def testConsumer(self):
- l1 = MyLog(self.basedir, "consumer1")
- l1.finish()
- self.failUnless(l1.isFinished())
-
- s = MyLogConsumer()
- d = l1.subscribeConsumer(s)
- d.addCallback(self._testConsumer_1, s)
- return maybeWait(d, 5)
- def _testConsumer_1(self, res, s):
- self.failIf(s.chunks)
- self.failUnless(s.finished)
- self.failIf(s.producer) # producer should be registered and removed
-
- l2 = MyLog(self.basedir, "consumer2")
- l2.addHeader("HEADER\n")
- l2.finish()
- self.failUnless(l2.isFinished())
-
- s = MyLogConsumer()
- d = l2.subscribeConsumer(s)
- d.addCallback(self._testConsumer_2, s)
- return d
- def _testConsumer_2(self, res, s):
- self.failUnlessEqual(s.chunks, [(builder.HEADER, "HEADER\n")])
- self.failUnless(s.finished)
- self.failIf(s.producer) # producer should be registered and removed
-
-
- l2 = MyLog(self.basedir, "consumer3")
- l2.chunkSize = 1000
- l2.addHeader("HEADER\n")
- l2.addStdout(800*"a")
- l2.addStdout(800*"a") # should now have two chunks on disk, 1000+600
- l2.addStdout(800*"b") # HEADER,1000+600*a on disk, 800*a in memory
- l2.addStdout(800*"b") # HEADER,1000+600*a,1000+600*b on disk
- l2.addStdout(200*"c") # HEADER,1000+600*a,1000+600*b on disk,
- # 200*c in memory
-
- s = MyLogConsumer(limit=1)
- d = l2.subscribeConsumer(s)
- d.addCallback(self._testConsumer_3, l2, s)
- return d
- def _testConsumer_3(self, res, l2, s):
- self.failUnless(s.streaming)
- self.failUnlessEqual(s.chunks, [(builder.HEADER, "HEADER\n")])
- s.limit = 1
- d = s.producer.resumeProducing()
- d.addCallback(self._testConsumer_4, l2, s)
- return d
- def _testConsumer_4(self, res, l2, s):
- self.failUnlessEqual(s.chunks, [(builder.HEADER, "HEADER\n"),
- (builder.STDOUT, 1000*"a"),
- ])
- s.limit = None
- d = s.producer.resumeProducing()
- d.addCallback(self._testConsumer_5, l2, s)
- return d
- def _testConsumer_5(self, res, l2, s):
- self.failUnlessEqual(s.chunks, [(builder.HEADER, "HEADER\n"),
- (builder.STDOUT, 1000*"a"),
- (builder.STDOUT, 600*"a"),
- (builder.STDOUT, 1000*"b"),
- (builder.STDOUT, 600*"b"),
- (builder.STDOUT, 200*"c")])
- l2.addStdout(1000*"c") # HEADER,1600*a,1600*b,1200*c on disk
- self.failUnlessEqual(s.chunks, [(builder.HEADER, "HEADER\n"),
- (builder.STDOUT, 1000*"a"),
- (builder.STDOUT, 600*"a"),
- (builder.STDOUT, 1000*"b"),
- (builder.STDOUT, 600*"b"),
- (builder.STDOUT, 200*"c"),
- (builder.STDOUT, 1000*"c")])
- l2.finish()
- self.failUnlessEqual(s.chunks, [(builder.HEADER, "HEADER\n"),
- (builder.STDOUT, 1000*"a"),
- (builder.STDOUT, 600*"a"),
- (builder.STDOUT, 1000*"b"),
- (builder.STDOUT, 600*"b"),
- (builder.STDOUT, 200*"c"),
- (builder.STDOUT, 1000*"c")])
- self.failIf(s.producer)
- self.failUnless(s.finished)
-
- def testLargeSummary(self):
- bigtext = "a" * 200000 # exceed the NetstringReceiver 100KB limit
- l = MyLog(self.basedir, "large", bigtext)
- s = MyLogConsumer()
- d = l.subscribeConsumer(s)
- def _check(res):
- for ctype,chunk in s.chunks:
- self.failUnless(len(chunk) < 100000)
- merged = "".join([c[1] for c in s.chunks])
- self.failUnless(merged == bigtext)
- d.addCallback(_check)
- # when this fails, it fails with a timeout, and there is an exception
- # sent to log.err(). This AttributeError exception is in
- # NetstringReceiver.dataReceived where it does
- # self.transport.loseConnection() because of the NetstringParseError,
- # however self.transport is None
- return maybeWait(d, 5)
- testLargeSummary.timeout = 5
-
-config_base = """
-from buildbot.process import factory, step
-s = factory.s
-
-f1 = factory.QuickBuildFactory('fakerep', 'cvsmodule', configure=None)
-
-f2 = factory.BuildFactory([
- s(step.Dummy, timeout=1),
- s(step.RemoteDummy, timeout=2),
- ])
-
-BuildmasterConfig = c = {}
-c['bots'] = [['bot1', 'sekrit']]
-c['sources'] = []
-c['schedulers'] = []
-c['builders'] = []
-c['builders'].append({'name':'quick', 'slavename':'bot1',
- 'builddir': 'quickdir', 'factory': f1})
-c['slavePortnum'] = 0
-"""
-
-config_2 = config_base + """
-c['builders'] = [{'name': 'dummy', 'slavename': 'bot1',
- 'builddir': 'dummy1', 'factory': f2},
- {'name': 'testdummy', 'slavename': 'bot1',
- 'builddir': 'dummy2', 'factory': f2, 'category': 'test'}]
-"""
-
-class STarget(base.StatusReceiver):
- debug = False
-
- def __init__(self, mode):
- self.mode = mode
- self.events = []
- def announce(self):
- if self.debug:
- print self.events[-1]
-
- def builderAdded(self, name, builder):
- self.events.append(("builderAdded", name, builder))
- self.announce()
- if "builder" in self.mode:
- return self
- def builderChangedState(self, name, state):
- self.events.append(("builderChangedState", name, state))
- self.announce()
- def buildStarted(self, name, build):
- self.events.append(("buildStarted", name, build))
- self.announce()
- if "eta" in self.mode:
- self.eta_build = build.getETA()
- if "build" in self.mode:
- return self
- def buildETAUpdate(self, build, ETA):
- self.events.append(("buildETAUpdate", build, ETA))
- self.announce()
- def stepStarted(self, build, step):
- self.events.append(("stepStarted", build, step))
- self.announce()
- if 0 and "eta" in self.mode:
- print "TIMES", step.getTimes()
- print "ETA", step.getETA()
- print "EXP", step.getExpectations()
- if "step" in self.mode:
- return self
- def stepETAUpdate(self, build, step, ETA, expectations):
- self.events.append(("stepETAUpdate", build, step, ETA, expectations))
- self.announce()
- def logStarted(self, build, step, log):
- self.events.append(("logStarted", build, step, log))
- self.announce()
- def logFinished(self, build, step, log):
- self.events.append(("logFinished", build, step, log))
- self.announce()
- def stepFinished(self, build, step, results):
- self.events.append(("stepFinished", build, step, results))
- if 0 and "eta" in self.mode:
- print "post-EXP", step.getExpectations()
- self.announce()
- def buildFinished(self, name, build, results):
- self.events.append(("buildFinished", name, build, results))
- self.announce()
- def builderRemoved(self, name):
- self.events.append(("builderRemoved", name))
- self.announce()
-
-class Subscription(RunMixin, unittest.TestCase):
- # verify that StatusTargets can subscribe/unsubscribe properly
-
- def testSlave(self):
- m = self.master
- s = m.getStatus()
- self.t1 = t1 = STarget(["builder"])
- #t1.debug = True; print
- s.subscribe(t1)
- self.failUnlessEqual(len(t1.events), 0)
-
- self.t3 = t3 = STarget(["builder", "build", "step"])
- s.subscribe(t3)
-
- m.loadConfig(config_2)
- m.readConfig = True
- m.startService()
-
- self.failUnlessEqual(len(t1.events), 4)
- self.failUnlessEqual(t1.events[0][0:2], ("builderAdded", "dummy"))
- self.failUnlessEqual(t1.events[1],
- ("builderChangedState", "dummy", "offline"))
- self.failUnlessEqual(t1.events[2][0:2], ("builderAdded", "testdummy"))
- self.failUnlessEqual(t1.events[3],
- ("builderChangedState", "testdummy", "offline"))
- t1.events = []
-
- self.failUnlessEqual(s.getBuilderNames(), ["dummy", "testdummy"])
- self.failUnlessEqual(s.getBuilderNames(categories=['test']),
- ["testdummy"])
- self.s1 = s1 = s.getBuilder("dummy")
- self.failUnlessEqual(s1.getName(), "dummy")
- self.failUnlessEqual(s1.getState(), ("offline", []))
- self.failUnlessEqual(s1.getCurrentBuilds(), [])
- self.failUnlessEqual(s1.getLastFinishedBuild(), None)
- self.failUnlessEqual(s1.getBuild(-1), None)
- #self.failUnlessEqual(s1.getEvent(-1), foo("created"))
-
- # status targets should, upon being subscribed, immediately get a
- # list of all current builders matching their category
- self.t2 = t2 = STarget([])
- s.subscribe(t2)
- self.failUnlessEqual(len(t2.events), 2)
- self.failUnlessEqual(t2.events[0][0:2], ("builderAdded", "dummy"))
- self.failUnlessEqual(t2.events[1][0:2], ("builderAdded", "testdummy"))
-
- d = self.connectSlave(builders=["dummy", "testdummy"])
- d.addCallback(self._testSlave_1, t1)
- return maybeWait(d)
-
- def _testSlave_1(self, res, t1):
- self.failUnlessEqual(len(t1.events), 2)
- self.failUnlessEqual(t1.events[0],
- ("builderChangedState", "dummy", "idle"))
- self.failUnlessEqual(t1.events[1],
- ("builderChangedState", "testdummy", "idle"))
- t1.events = []
-
- c = interfaces.IControl(self.master)
- req = BuildRequest("forced build for testing", SourceStamp())
- c.getBuilder("dummy").requestBuild(req)
- d = req.waitUntilFinished()
- d2 = self.master.botmaster.waitUntilBuilderIdle("dummy")
- dl = defer.DeferredList([d, d2])
- dl.addCallback(self._testSlave_2)
- return dl
-
- def _testSlave_2(self, res):
- # t1 subscribes to builds, but not anything lower-level
- ev = self.t1.events
- self.failUnlessEqual(len(ev), 4)
- self.failUnlessEqual(ev[0][0:3],
- ("builderChangedState", "dummy", "building"))
- self.failUnlessEqual(ev[1][0], "buildStarted")
- self.failUnlessEqual(ev[2][0:2]+ev[2][3:4],
- ("buildFinished", "dummy", builder.SUCCESS))
- self.failUnlessEqual(ev[3][0:3],
- ("builderChangedState", "dummy", "idle"))
-
- self.failUnlessEqual([ev[0] for ev in self.t3.events],
- ["builderAdded",
- "builderChangedState", # offline
- "builderAdded",
- "builderChangedState", # idle
- "builderChangedState", # offline
- "builderChangedState", # idle
- "builderChangedState", # building
- "buildStarted",
- "stepStarted", "stepETAUpdate", "stepFinished",
- "stepStarted", "stepETAUpdate",
- "logStarted", "logFinished", "stepFinished",
- "buildFinished",
- "builderChangedState", # idle
- ])
-
- b = self.s1.getLastFinishedBuild()
- self.failUnless(b)
- self.failUnlessEqual(b.getBuilder().getName(), "dummy")
- self.failUnlessEqual(b.getNumber(), 0)
- self.failUnlessEqual(b.getSourceStamp(), (None, None, None))
- self.failUnlessEqual(b.getReason(), "forced build for testing")
- self.failUnlessEqual(b.getChanges(), [])
- self.failUnlessEqual(b.getResponsibleUsers(), [])
- self.failUnless(b.isFinished())
- self.failUnlessEqual(b.getText(), ['build', 'successful'])
- self.failUnlessEqual(b.getColor(), "green")
- self.failUnlessEqual(b.getResults(), builder.SUCCESS)
-
- steps = b.getSteps()
- self.failUnlessEqual(len(steps), 2)
-
- eta = 0
- st1 = steps[0]
- self.failUnlessEqual(st1.getName(), "dummy")
- self.failUnless(st1.isFinished())
- self.failUnlessEqual(st1.getText(), ["delay", "1 secs"])
- start,finish = st1.getTimes()
- self.failUnless(0.5 < (finish-start) < 10)
- self.failUnlessEqual(st1.getExpectations(), [])
- self.failUnlessEqual(st1.getLogs(), [])
- eta += finish-start
-
- st2 = steps[1]
- self.failUnlessEqual(st2.getName(), "remote dummy")
- self.failUnless(st2.isFinished())
- self.failUnlessEqual(st2.getText(),
- ["remote", "delay", "2 secs"])
- start,finish = st2.getTimes()
- self.failUnless(1.5 < (finish-start) < 10)
- eta += finish-start
- self.failUnlessEqual(st2.getExpectations(), [('output', 38, None)])
- logs = st2.getLogs()
- self.failUnlessEqual(len(logs), 1)
- self.failUnlessEqual(logs[0].getName(), "log")
- self.failUnlessEqual(logs[0].getText(), "data")
-
- self.eta = eta
- # now we run it a second time, and we should have an ETA
-
- self.t4 = t4 = STarget(["builder", "build", "eta"])
- self.master.getStatus().subscribe(t4)
- c = interfaces.IControl(self.master)
- req = BuildRequest("forced build for testing", SourceStamp())
- c.getBuilder("dummy").requestBuild(req)
- d = req.waitUntilFinished()
- d2 = self.master.botmaster.waitUntilBuilderIdle("dummy")
- dl = defer.DeferredList([d, d2])
- dl.addCallback(self._testSlave_3)
- return dl
-
- def _testSlave_3(self, res):
- t4 = self.t4
- eta = self.eta
- self.failUnless(eta-1 < t4.eta_build < eta+1, # should be 3 seconds
- "t4.eta_build was %g, not in (%g,%g)"
- % (t4.eta_build, eta-1, eta+1))
-
-
-class Client(unittest.TestCase):
- def testAdaptation(self):
- b = builder.BuilderStatus("bname")
- b2 = client.makeRemote(b)
- self.failUnless(isinstance(b2, client.RemoteBuilder))
- b3 = client.makeRemote(None)
- self.failUnless(b3 is None)
diff --git a/buildbot/buildbot-source/build/lib/buildbot/test/test_steps.py b/buildbot/buildbot-source/build/lib/buildbot/test/test_steps.py
deleted file mode 100644
index bbe2871c2..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/test/test_steps.py
+++ /dev/null
@@ -1,236 +0,0 @@
-# -*- test-case-name: buildbot.test.test_steps -*-
-
-# create the BuildStep with a fake .remote instance that logs the
-# .callRemote invocations and compares them against the expected calls. Then
-# the test harness should send statusUpdate() messages in with assorted
-# data, eventually calling remote_complete(). Then we can verify that the
-# Step's rc was correct, and that the status it was supposed to return
-# mathces.
-
-# sometimes, .callRemote should raise an exception because of a stale
-# reference. Sometimes it should errBack with an UnknownCommand failure.
-# Or other failure.
-
-# todo: test batched updates, by invoking remote_update(updates) instead of
-# statusUpdate(update). Also involves interrupted builds.
-
-import os, sys, time
-
-from twisted.trial import unittest
-from twisted.internet import reactor
-from twisted.internet.defer import Deferred
-
-from buildbot.sourcestamp import SourceStamp
-from buildbot.process import step, base, factory
-from buildbot.process.step import ShellCommand #, ShellCommands
-from buildbot.status import builder
-from buildbot.test.runutils import RunMixin
-from buildbot.twcompat import maybeWait
-from buildbot.slave import commands
-
-from twisted.python import log
-#log.startLogging(sys.stdout)
-
-class MyShellCommand(ShellCommand):
- started = False
- def runCommand(self, c):
- self.started = True
- self.rc = c
- return ShellCommand.runCommand(self, c)
-
-class FakeBuild:
- pass
-class FakeBuilder:
- statusbag = None
- name = "fakebuilder"
-class FakeSlaveBuilder:
- def getSlaveCommandVersion(self, command, oldversion=None):
- return "1.10"
-
-class FakeRemote:
- def __init__(self):
- self.events = []
- self.remoteCalls = 0
- #self.callRemoteNotifier = None
- def callRemote(self, methname, *args):
- event = ["callRemote", methname, args]
- self.events.append(event)
-## if self.callRemoteNotifier:
-## reactor.callLater(0, self.callRemoteNotifier, event)
- self.remoteCalls += 1
- self.deferred = Deferred()
- return self.deferred
- def notifyOnDisconnect(self, callback):
- pass
- def dontNotifyOnDisconnect(self, callback):
- pass
-
-
-class BuildStep(unittest.TestCase):
- def setUp(self):
- self.builder = FakeBuilder()
- self.builder_status = builder.BuilderStatus("fakebuilder")
- self.builder_status.basedir = "test_steps"
- self.builder_status.nextBuildNumber = 0
- os.mkdir(self.builder_status.basedir)
- self.build_status = self.builder_status.newBuild()
- req = base.BuildRequest("reason", SourceStamp())
- self.build = base.Build([req])
- self.build.build_status = self.build_status # fake it
- self.build.builder = self.builder
- self.build.slavebuilder = FakeSlaveBuilder()
- self.remote = FakeRemote()
- self.finished = 0
-
- def callback(self, results):
- self.failed = 0
- self.failure = None
- self.results = results
- self.finished = 1
- def errback(self, failure):
- self.failed = 1
- self.failure = failure
- self.results = None
- self.finished = 1
-
- def testShellCommand1(self):
- cmd = "argle bargle"
- dir = "murkle"
- expectedEvents = []
- step.RemoteCommand.commandCounter[0] = 3
- c = MyShellCommand(workdir=dir, command=cmd, build=self.build,
- timeout=10)
- self.assertEqual(self.remote.events, expectedEvents)
- self.build_status.addStep(c)
- d = c.startStep(self.remote)
- self.failUnless(c.started)
- rc = c.rc
- d.addCallbacks(self.callback, self.errback)
- timeout = time.time() + 10
- while self.remote.remoteCalls == 0:
- if time.time() > timeout:
- self.fail("timeout")
- reactor.iterate(0.01)
- expectedEvents.append(["callRemote", "startCommand",
- (rc, "3",
- "shell",
- {'command': "argle bargle",
- 'workdir': "murkle",
- 'want_stdout': 1,
- 'want_stderr': 1,
- 'timeout': 10,
- 'env': None}) ] )
- self.assertEqual(self.remote.events, expectedEvents)
-
- # we could do self.remote.deferred.errback(UnknownCommand) here. We
- # could also do .callback(), but generally the master end silently
- # ignores the slave's ack
-
- logs = c.step_status.getLogs()
- for log in logs:
- if log.getName() == "log":
- break
-
- rc.remoteUpdate({'header':
- "command 'argle bargle' in dir 'murkle'\n\n"})
- rc.remoteUpdate({'stdout': "foo\n"})
- self.assertEqual(log.getText(), "foo\n")
- self.assertEqual(log.getTextWithHeaders(),
- "command 'argle bargle' in dir 'murkle'\n\n"
- "foo\n")
- rc.remoteUpdate({'stderr': "bar\n"})
- self.assertEqual(log.getText(), "foo\nbar\n")
- self.assertEqual(log.getTextWithHeaders(),
- "command 'argle bargle' in dir 'murkle'\n\n"
- "foo\nbar\n")
- rc.remoteUpdate({'rc': 0})
- self.assertEqual(rc.rc, 0)
-
- rc.remote_complete()
- # that should fire the Deferred
- timeout = time.time() + 10
- while not self.finished:
- if time.time() > timeout:
- self.fail("timeout")
- reactor.iterate(0.01)
- self.assertEqual(self.failed, 0)
- self.assertEqual(self.results, 0)
-
-class Steps(unittest.TestCase):
- def testMultipleStepInstances(self):
- steps = [
- (step.CVS, {'cvsroot': "root", 'cvsmodule': "module"}),
- (step.Configure, {'command': "./configure"}),
- (step.Compile, {'command': "make"}),
- (step.Compile, {'command': "make more"}),
- (step.Compile, {'command': "make evenmore"}),
- (step.Test, {'command': "make test"}),
- (step.Test, {'command': "make testharder"}),
- ]
- f = factory.ConfigurableBuildFactory(steps)
- req = base.BuildRequest("reason", SourceStamp())
- b = f.newBuild([req])
- #for s in b.steps: print s.name
-
-class VersionCheckingStep(step.BuildStep):
- def start(self):
- # give our test a chance to run. It is non-trivial for a buildstep to
- # claw its way back out to the test case which is currently running.
- master = self.build.builder.botmaster.parent
- checker = master._checker
- checker(self)
- # then complete
- self.finished(step.SUCCESS)
-
-version_config = """
-from buildbot.process import factory, step
-from buildbot.test.test_steps import VersionCheckingStep
-BuildmasterConfig = c = {}
-f1 = factory.BuildFactory([
- factory.s(VersionCheckingStep),
- ])
-c['bots'] = [['bot1', 'sekrit']]
-c['sources'] = []
-c['schedulers'] = []
-c['builders'] = [{'name':'quick', 'slavename':'bot1',
- 'builddir': 'quickdir', 'factory': f1}]
-c['slavePortnum'] = 0
-"""
-
-class Version(RunMixin, unittest.TestCase):
- def setUp(self):
- RunMixin.setUp(self)
- self.master.loadConfig(version_config)
- self.master.startService()
- d = self.connectSlave(["quick"])
- return maybeWait(d)
-
- def doBuild(self, buildername):
- br = base.BuildRequest("forced", SourceStamp())
- d = br.waitUntilFinished()
- self.control.getBuilder(buildername).requestBuild(br)
- return d
-
-
- def checkCompare(self, s):
- v = s.slaveVersion("svn", None)
- # this insures that we are getting the version correctly
- self.failUnlessEqual(s.slaveVersion("svn", None), commands.cvs_ver)
- # and that non-existent commands do not provide a version
- self.failUnlessEqual(s.slaveVersion("NOSUCHCOMMAND"), None)
- # TODO: verify that a <=0.5.0 buildslave (which does not implement
- # remote_getCommands) handles oldversion= properly. This requires a
- # mutant slave which does not offer that method.
- #self.failUnlessEqual(s.slaveVersion("NOSUCHCOMMAND", "old"), "old")
-
- # now check the comparison functions
- self.failIf(s.slaveVersionIsOlderThan("svn", commands.cvs_ver))
- self.failIf(s.slaveVersionIsOlderThan("svn", "1.1"))
- self.failUnless(s.slaveVersionIsOlderThan("svn",
- commands.cvs_ver + ".1"))
-
- def testCompare(self):
- self.master._checker = self.checkCompare
- d = self.doBuild("quick")
- return maybeWait(d)
-
diff --git a/buildbot/buildbot-source/build/lib/buildbot/test/test_twisted.py b/buildbot/buildbot-source/build/lib/buildbot/test/test_twisted.py
deleted file mode 100644
index aa295477c..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/test/test_twisted.py
+++ /dev/null
@@ -1,184 +0,0 @@
-# -*- test-case-name: buildbot.test.test_twisted -*-
-
-from twisted.trial import unittest
-
-from buildbot.process.step_twisted import countFailedTests, Trial
-from buildbot.status import builder
-
-noisy = 0
-if noisy:
- from twisted.python.log import startLogging
- import sys
- startLogging(sys.stdout)
-
-out1 = """
--------------------------------------------------------------------------------
-Ran 13 tests in 1.047s
-
-OK
-"""
-
-out2 = """
--------------------------------------------------------------------------------
-Ran 12 tests in 1.040s
-
-FAILED (failures=1)
-"""
-
-out3 = """
- NotImplementedError
--------------------------------------------------------------------------------
-Ran 13 tests in 1.042s
-
-FAILED (failures=1, errors=1)
-"""
-
-out4 = """
-unparseable
-"""
-
-out5 = """
- File "/usr/home/warner/stuff/python/twisted/Twisted-CVS/twisted/test/test_defer.py", line 79, in testTwoCallbacks
- self.fail("just because")
- File "/usr/home/warner/stuff/python/twisted/Twisted-CVS/twisted/trial/unittest.py", line 21, in fail
- raise AssertionError, message
- AssertionError: just because
-unparseable
-"""
-
-out6 = """
-===============================================================================
-SKIPPED: testProtocolLocalhost (twisted.flow.test.test_flow.FlowTest)
--------------------------------------------------------------------------------
-XXX freezes, fixme
-===============================================================================
-SKIPPED: testIPv6 (twisted.names.test.test_names.HostsTestCase)
--------------------------------------------------------------------------------
-IPv6 support is not in our hosts resolver yet
-===============================================================================
-EXPECTED FAILURE: testSlots (twisted.test.test_rebuild.NewStyleTestCase)
--------------------------------------------------------------------------------
-Traceback (most recent call last):
- File "/Users/buildbot/Buildbot/twisted/OSX-full2.3/Twisted/twisted/trial/unittest.py", line 240, in _runPhase
- stage(*args, **kwargs)
- File "/Users/buildbot/Buildbot/twisted/OSX-full2.3/Twisted/twisted/trial/unittest.py", line 262, in _main
- self.runner(self.method)
- File "/Users/buildbot/Buildbot/twisted/OSX-full2.3/Twisted/twisted/trial/runner.py", line 95, in runTest
- method()
- File "/Users/buildbot/Buildbot/twisted/OSX-full2.3/Twisted/twisted/test/test_rebuild.py", line 130, in testSlots
- rebuild.updateInstance(self.m.SlottedClass())
- File "/Users/buildbot/Buildbot/twisted/OSX-full2.3/Twisted/twisted/python/rebuild.py", line 114, in updateInstance
- self.__class__ = latestClass(self.__class__)
-TypeError: __class__ assignment: 'SlottedClass' object layout differs from 'SlottedClass'
-===============================================================================
-FAILURE: testBatchFile (twisted.conch.test.test_sftp.TestOurServerBatchFile)
--------------------------------------------------------------------------------
-Traceback (most recent call last):
- File "/Users/buildbot/Buildbot/twisted/OSX-full2.3/Twisted/twisted/trial/unittest.py", line 240, in _runPhase
- stage(*args, **kwargs)
- File "/Users/buildbot/Buildbot/twisted/OSX-full2.3/Twisted/twisted/trial/unittest.py", line 262, in _main
- self.runner(self.method)
- File "/Users/buildbot/Buildbot/twisted/OSX-full2.3/Twisted/twisted/trial/runner.py", line 95, in runTest
- method()
- File "/Users/buildbot/Buildbot/twisted/OSX-full2.3/Twisted/twisted/conch/test/test_sftp.py", line 450, in testBatchFile
- self.failUnlessEqual(res[1:-2], ['testDirectory', 'testRemoveFile', 'testRenameFile', 'testfile1'])
- File "/Users/buildbot/Buildbot/twisted/OSX-full2.3/Twisted/twisted/trial/unittest.py", line 115, in failUnlessEqual
- raise FailTest, (msg or '%r != %r' % (first, second))
-FailTest: [] != ['testDirectory', 'testRemoveFile', 'testRenameFile', 'testfile1']
--------------------------------------------------------------------------------
-Ran 1454 tests in 911.579s
-
-FAILED (failures=2, skips=49, expectedFailures=9)
-Exception exceptions.AttributeError: "'NoneType' object has no attribute 'StringIO'" in <bound method RemoteReference.__del__ of <twisted.spread.pb.RemoteReference instance at 0x27036c0>> ignored
-"""
-
-class MyTrial(Trial):
- def addTestResult(self, testname, results, text, logs):
- self.results.append((testname, results, text, logs))
- def addCompleteLog(self, name, log):
- pass
-
-class MyLogFile:
- def __init__(self, text):
- self.text = text
- def getText(self):
- return self.text
-
-
-class Count(unittest.TestCase):
-
- def count(self, total, failures=0, errors=0,
- expectedFailures=0, unexpectedSuccesses=0, skips=0):
- d = {
- 'total': total,
- 'failures': failures,
- 'errors': errors,
- 'expectedFailures': expectedFailures,
- 'unexpectedSuccesses': unexpectedSuccesses,
- 'skips': skips,
- }
- return d
-
- def testCountFailedTests(self):
- count = countFailedTests(out1)
- self.assertEquals(count, self.count(total=13))
- count = countFailedTests(out2)
- self.assertEquals(count, self.count(total=12, failures=1))
- count = countFailedTests(out3)
- self.assertEquals(count, self.count(total=13, failures=1, errors=1))
- count = countFailedTests(out4)
- self.assertEquals(count, self.count(total=None))
- count = countFailedTests(out5)
- self.assertEquals(count, self.count(total=None))
-
-class Parse(unittest.TestCase):
- def failUnlessIn(self, substr, string):
- self.failUnless(string.find(substr) != -1)
-
- def testParse(self):
- t = MyTrial(build=None, workdir=".", testpath=None, testChanges=True)
- t.results = []
- log = MyLogFile(out6)
- t.createSummary(log)
-
- self.failUnlessEqual(len(t.results), 4)
- r1, r2, r3, r4 = t.results
- testname, results, text, logs = r1
- self.failUnlessEqual(testname,
- ("twisted", "flow", "test", "test_flow",
- "FlowTest", "testProtocolLocalhost"))
- self.failUnlessEqual(results, builder.SKIPPED)
- self.failUnlessEqual(text, ['skipped'])
- self.failUnlessIn("XXX freezes, fixme", logs)
- self.failUnless(logs.startswith("SKIPPED:"))
- self.failUnless(logs.endswith("fixme\n"))
-
- testname, results, text, logs = r2
- self.failUnlessEqual(testname,
- ("twisted", "names", "test", "test_names",
- "HostsTestCase", "testIPv6"))
- self.failUnlessEqual(results, builder.SKIPPED)
- self.failUnlessEqual(text, ['skipped'])
- self.failUnless(logs.startswith("SKIPPED: testIPv6"))
- self.failUnless(logs.endswith("IPv6 support is not in our hosts resolver yet\n"))
-
- testname, results, text, logs = r3
- self.failUnlessEqual(testname,
- ("twisted", "test", "test_rebuild",
- "NewStyleTestCase", "testSlots"))
- self.failUnlessEqual(results, builder.SUCCESS)
- self.failUnlessEqual(text, ['expected', 'failure'])
- self.failUnless(logs.startswith("EXPECTED FAILURE: "))
- self.failUnlessIn("\nTraceback ", logs)
- self.failUnless(logs.endswith("layout differs from 'SlottedClass'\n"))
-
- testname, results, text, logs = r4
- self.failUnlessEqual(testname,
- ("twisted", "conch", "test", "test_sftp",
- "TestOurServerBatchFile", "testBatchFile"))
- self.failUnlessEqual(results, builder.FAILURE)
- self.failUnlessEqual(text, ['failure'])
- self.failUnless(logs.startswith("FAILURE: "))
- self.failUnlessIn("Traceback ", logs)
- self.failUnless(logs.endswith("'testRenameFile', 'testfile1']\n"))
-
diff --git a/buildbot/buildbot-source/build/lib/buildbot/test/test_util.py b/buildbot/buildbot-source/build/lib/buildbot/test/test_util.py
deleted file mode 100644
index b375390a7..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/test/test_util.py
+++ /dev/null
@@ -1,26 +0,0 @@
-# -*- test-case-name: buildbot.test.test_util -*-
-
-from twisted.trial import unittest
-
-from buildbot import util
-
-
-class Foo(util.ComparableMixin):
- compare_attrs = ["a", "b"]
-
- def __init__(self, a, b, c):
- self.a, self.b, self.c = a,b,c
-
-
-class Bar(Foo, util.ComparableMixin):
- compare_attrs = ["b", "c"]
-
-class Compare(unittest.TestCase):
- def testCompare(self):
- f1 = Foo(1, 2, 3)
- f2 = Foo(1, 2, 4)
- f3 = Foo(1, 3, 4)
- b1 = Bar(1, 2, 3)
- self.failUnless(f1 == f2)
- self.failIf(f1 == f3)
- self.failIf(f1 == b1)
diff --git a/buildbot/buildbot-source/build/lib/buildbot/test/test_vc.py b/buildbot/buildbot-source/build/lib/buildbot/test/test_vc.py
deleted file mode 100644
index f65e75575..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/test/test_vc.py
+++ /dev/null
@@ -1,2162 +0,0 @@
-# -*- test-case-name: buildbot.test.test_vc -*-
-
-from __future__ import generators
-
-import sys, os, signal, shutil, time, re
-from email.Utils import mktime_tz, parsedate_tz
-
-from twisted.trial import unittest
-from twisted.internet import defer, reactor, utils
-
-#defer.Deferred.debug = True
-
-from twisted.python import log
-#log.startLogging(sys.stderr)
-
-from buildbot import master, interfaces
-from buildbot.slave import bot, commands
-from buildbot.slave.commands import rmdirRecursive
-from buildbot.status.builder import SUCCESS, FAILURE
-from buildbot.process import step, base
-from buildbot.changes import changes
-from buildbot.sourcestamp import SourceStamp
-from buildbot.twcompat import maybeWait, which
-from buildbot.scripts import tryclient
-
-#step.LoggedRemoteCommand.debug = True
-
-# buildbot.twcompat will patch these into t.i.defer if necessary
-from twisted.internet.defer import waitForDeferred, deferredGenerator
-
-# Most of these tests (all but SourceStamp) depend upon having a set of
-# repositories from which we can perform checkouts. These repositories are
-# created by the setUp method at the start of each test class. In earlier
-# versions these repositories were created offline and distributed with a
-# separate tarball named 'buildbot-test-vc-1.tar.gz'. This is no longer
-# necessary.
-
-# CVS requires a local file repository. Providing remote access is beyond
-# the feasible abilities of this test program (needs pserver or ssh).
-
-# SVN requires a local file repository. To provide remote access over HTTP
-# requires an apache server with DAV support and mod_svn, way beyond what we
-# can test from here.
-
-# Arch and Darcs both allow remote (read-only) operation with any web
-# server. We test both local file access and HTTP access (by spawning a
-# small web server to provide access to the repository files while the test
-# is running).
-
-
-config_vc = """
-from buildbot.process import factory, step
-s = factory.s
-
-f1 = factory.BuildFactory([
- %s,
- ])
-c = {}
-c['bots'] = [['bot1', 'sekrit']]
-c['sources'] = []
-c['schedulers'] = []
-c['builders'] = [{'name': 'vc', 'slavename': 'bot1',
- 'builddir': 'vc-dir', 'factory': f1}]
-c['slavePortnum'] = 0
-BuildmasterConfig = c
-"""
-
-p0_diff = r"""
-Index: subdir/subdir.c
-===================================================================
-RCS file: /home/warner/stuff/Projects/BuildBot/code-arch/_trial_temp/test_vc/repositories/CVS-Repository/sample/subdir/subdir.c,v
-retrieving revision 1.1.1.1
-diff -u -r1.1.1.1 subdir.c
---- subdir/subdir.c 14 Aug 2005 01:32:49 -0000 1.1.1.1
-+++ subdir/subdir.c 14 Aug 2005 01:36:15 -0000
-@@ -4,6 +4,6 @@
- int
- main(int argc, const char *argv[])
- {
-- printf("Hello subdir.\n");
-+ printf("Hello patched subdir.\n");
- return 0;
- }
-"""
-
-# this patch does not include the filename headers, so it is
-# patchlevel-neutral
-TRY_PATCH = '''
-@@ -5,6 +5,6 @@
- int
- main(int argc, const char *argv[])
- {
-- printf("Hello subdir.\\n");
-+ printf("Hello try.\\n");
- return 0;
- }
-'''
-
-MAIN_C = '''
-// this is main.c
-#include <stdio.h>
-
-int
-main(int argc, const char *argv[])
-{
- printf("Hello world.\\n");
- return 0;
-}
-'''
-
-BRANCH_C = '''
-// this is main.c
-#include <stdio.h>
-
-int
-main(int argc, const char *argv[])
-{
- printf("Hello branch.\\n");
- return 0;
-}
-'''
-
-VERSION_C = '''
-// this is version.c
-#include <stdio.h>
-
-int
-main(int argc, const char *argv[])
-{
- printf("Hello world, version=%d\\n");
- return 0;
-}
-'''
-
-SUBDIR_C = '''
-// this is subdir/subdir.c
-#include <stdio.h>
-
-int
-main(int argc, const char *argv[])
-{
- printf("Hello subdir.\\n");
- return 0;
-}
-'''
-
-TRY_C = '''
-// this is subdir/subdir.c
-#include <stdio.h>
-
-int
-main(int argc, const char *argv[])
-{
- printf("Hello try.\\n");
- return 0;
-}
-'''
-
-class VCS_Helper:
- # this is a helper class which keeps track of whether each VC system is
- # available, and whether the repository for each has been created. There
- # is one instance of this class, at module level, shared between all test
- # cases.
-
- def __init__(self):
- self._helpers = {}
- self._isCapable = {}
- self._excuses = {}
- self._repoReady = {}
-
- def registerVC(self, name, helper):
- self._helpers[name] = helper
- self._repoReady[name] = False
-
- def skipIfNotCapable(self, name):
- """Either return None, or raise SkipTest"""
- d = self.capable(name)
- def _maybeSkip(res):
- if not res[0]:
- raise unittest.SkipTest(res[1])
- d.addCallback(_maybeSkip)
- return d
-
- def capable(self, name):
- """Return a Deferred that fires with (True,None) if this host offers
- the given VC tool, or (False,excuse) if it does not (and therefore
- the tests should be skipped)."""
-
- if self._isCapable.has_key(name):
- if self._isCapable[name]:
- return defer.succeed((True,None))
- else:
- return defer.succeed((False, self._excuses[name]))
- d = defer.maybeDeferred(self._helpers[name].capable)
- def _capable(res):
- if res[0]:
- self._isCapable[name] = True
- else:
- self._excuses[name] = res[1]
- return res
- d.addCallback(_capable)
- return d
-
- def getHelper(self, name):
- return self._helpers[name]
-
- def createRepository(self, name):
- """Return a Deferred that fires when the repository is set up."""
- if self._repoReady[name]:
- return defer.succeed(True)
- d = self._helpers[name].createRepository()
- def _ready(res):
- self._repoReady[name] = True
- d.addCallback(_ready)
- return d
-
-VCS = VCS_Helper()
-
-class SignalMixin:
- sigchldHandler = None
-
- def setUpClass(self):
- # make sure SIGCHLD handler is installed, as it should be on
- # reactor.run(). problem is reactor may not have been run when this
- # test runs.
- if hasattr(reactor, "_handleSigchld") and hasattr(signal, "SIGCHLD"):
- self.sigchldHandler = signal.signal(signal.SIGCHLD,
- reactor._handleSigchld)
-
- def tearDownClass(self):
- if self.sigchldHandler:
- signal.signal(signal.SIGCHLD, self.sigchldHandler)
-
-
-# the overall plan here:
-#
-# Each VC system is tested separately, all using the same source tree defined
-# in the 'files' dictionary above. Each VC system gets its own TestCase
-# subclass. The first test case that is run will create the repository during
-# setUp(), making two branches: 'trunk' and 'branch'. The trunk gets a copy
-# of all the files in 'files'. The variant of good.c is committed on the
-# branch.
-#
-# then testCheckout is run, which does a number of checkout/clobber/update
-# builds. These all use trunk r1. It then runs self.fix(), which modifies
-# 'fixable.c', then performs another build and makes sure the tree has been
-# updated.
-#
-# testBranch uses trunk-r1 and branch-r1, making sure that we clobber the
-# tree properly when we switch between them
-#
-# testPatch does a trunk-r1 checkout and applies a patch.
-#
-# testTryGetPatch performs a trunk-r1 checkout, modifies some files, then
-# verifies that tryclient.getSourceStamp figures out the base revision and
-# what got changed.
-
-
-# vc_create makes a repository at r1 with three files: main.c, version.c, and
-# subdir/foo.c . It also creates a branch from r1 (called b1) in which main.c
-# says "hello branch" instead of "hello world". self.trunk[] contains
-# revision stamps for everything on the trunk, and self.branch[] does the
-# same for the branch.
-
-# vc_revise() checks out a tree at HEAD, changes version.c, then checks it
-# back in. The new version stamp is appended to self.trunk[]. The tree is
-# removed afterwards.
-
-# vc_try_checkout(workdir, rev) checks out a tree at REV, then changes
-# subdir/subdir.c to say 'Hello try'
-# vc_try_finish(workdir) removes the tree and cleans up any VC state
-# necessary (like deleting the Arch archive entry).
-
-
-class BaseHelper:
- def __init__(self):
- self.trunk = []
- self.branch = []
- self.allrevs = []
-
- def capable(self):
- # this is also responsible for setting self.vcexe
- raise NotImplementedError
-
- def createBasedir(self):
- # you must call this from createRepository
- self.repbase = os.path.abspath(os.path.join("test_vc",
- "repositories"))
- if not os.path.isdir(self.repbase):
- os.makedirs(self.repbase)
-
- def createRepository(self):
- # this will only be called once per process
- raise NotImplementedError
-
- def populate(self, basedir):
- os.makedirs(basedir)
- os.makedirs(os.path.join(basedir, "subdir"))
- open(os.path.join(basedir, "main.c"), "w").write(MAIN_C)
- self.version = 1
- version_c = VERSION_C % self.version
- open(os.path.join(basedir, "version.c"), "w").write(version_c)
- open(os.path.join(basedir, "main.c"), "w").write(MAIN_C)
- open(os.path.join(basedir, "subdir", "subdir.c"), "w").write(SUBDIR_C)
-
- def populate_branch(self, basedir):
- open(os.path.join(basedir, "main.c"), "w").write(BRANCH_C)
-
- def addTrunkRev(self, rev):
- self.trunk.append(rev)
- self.allrevs.append(rev)
- def addBranchRev(self, rev):
- self.branch.append(rev)
- self.allrevs.append(rev)
-
- def runCommand(self, basedir, command, failureIsOk=False):
- # all commands passed to do() should be strings or lists. If they are
- # strings, none of the arguments may have spaces. This makes the
- # commands less verbose at the expense of restricting what they can
- # specify.
- if type(command) not in (list, tuple):
- command = command.split(" ")
- #print "do %s" % command
- env = os.environ.copy()
- env['LC_ALL'] = "C"
- d = utils.getProcessOutputAndValue(command[0], command[1:],
- env=env, path=basedir)
- def check((out, err, code)):
- #print
- #print "command: %s" % command
- #print "out: %s" % out
- #print "code: %s" % code
- if code != 0 and not failureIsOk:
- log.msg("command %s finished with exit code %d" %
- (command, code))
- log.msg(" and stdout %s" % (out,))
- log.msg(" and stderr %s" % (err,))
- raise RuntimeError("command %s finished with exit code %d"
- % (command, code)
- + ": see logs for stdout")
- return out
- d.addCallback(check)
- return d
-
- def do(self, basedir, command, failureIsOk=False):
- d = self.runCommand(basedir, command, failureIsOk=failureIsOk)
- return waitForDeferred(d)
-
- def dovc(self, basedir, command, failureIsOk=False):
- """Like do(), but the VC binary will be prepended to COMMAND."""
- command = self.vcexe + " " + command
- return self.do(basedir, command, failureIsOk)
-
-class VCBase(SignalMixin):
- metadir = None
- createdRepository = False
- master = None
- slave = None
- httpServer = None
- httpPort = None
- skip = None
- has_got_revision = False
- has_got_revision_branches_are_merged = False # for SVN
-
- def failUnlessIn(self, substring, string, msg=None):
- # trial provides a version of this that requires python-2.3 to test
- # strings.
- if msg is None:
- msg = ("did not see the expected substring '%s' in string '%s'" %
- (substring, string))
- self.failUnless(string.find(substring) != -1, msg)
-
- def setUp(self):
- d = VCS.skipIfNotCapable(self.vc_name)
- d.addCallback(self._setUp1)
- return maybeWait(d)
-
- def _setUp1(self, res):
- self.helper = VCS.getHelper(self.vc_name)
-
- if os.path.exists("basedir"):
- rmdirRecursive("basedir")
- os.mkdir("basedir")
- self.master = master.BuildMaster("basedir")
- self.slavebase = os.path.abspath("slavebase")
- if os.path.exists(self.slavebase):
- rmdirRecursive(self.slavebase)
- os.mkdir("slavebase")
-
- d = VCS.createRepository(self.vc_name)
- return d
-
- def connectSlave(self):
- port = self.master.slavePort._port.getHost().port
- slave = bot.BuildSlave("localhost", port, "bot1", "sekrit",
- self.slavebase, keepalive=0, usePTY=1)
- self.slave = slave
- slave.startService()
- d = self.master.botmaster.waitUntilBuilderAttached("vc")
- return d
-
- def loadConfig(self, config):
- # reloading the config file causes a new 'listDirs' command to be
- # sent to the slave. To synchronize on this properly, it is easiest
- # to stop and restart the slave.
- d = defer.succeed(None)
- if self.slave:
- d = self.master.botmaster.waitUntilBuilderDetached("vc")
- self.slave.stopService()
- d.addCallback(lambda res: self.master.loadConfig(config))
- d.addCallback(lambda res: self.connectSlave())
- return d
-
- def serveHTTP(self):
- # launch an HTTP server to serve the repository files
- from twisted.web import static, server
- from twisted.internet import reactor
- self.root = static.File(self.helper.repbase)
- self.site = server.Site(self.root)
- self.httpServer = reactor.listenTCP(0, self.site)
- self.httpPort = self.httpServer.getHost().port
-
- def doBuild(self, shouldSucceed=True, ss=None):
- c = interfaces.IControl(self.master)
-
- if ss is None:
- ss = SourceStamp()
- #print "doBuild(ss: b=%s rev=%s)" % (ss.branch, ss.revision)
- req = base.BuildRequest("test_vc forced build", ss)
- d = req.waitUntilFinished()
- c.getBuilder("vc").requestBuild(req)
- d.addCallback(self._doBuild_1, shouldSucceed)
- return d
- def _doBuild_1(self, bs, shouldSucceed):
- r = bs.getResults()
- if r != SUCCESS and shouldSucceed:
- print
- print
- if not bs.isFinished():
- print "Hey, build wasn't even finished!"
- print "Build did not succeed:", r, bs.getText()
- for s in bs.getSteps():
- for l in s.getLogs():
- print "--- START step %s / log %s ---" % (s.getName(),
- l.getName())
- print l.getTextWithHeaders()
- print "--- STOP ---"
- print
- self.fail("build did not succeed")
- return bs
-
- def touch(self, d, f):
- open(os.path.join(d,f),"w").close()
- def shouldExist(self, *args):
- target = os.path.join(*args)
- self.failUnless(os.path.exists(target),
- "expected to find %s but didn't" % target)
- def shouldNotExist(self, *args):
- target = os.path.join(*args)
- self.failIf(os.path.exists(target),
- "expected to NOT find %s, but did" % target)
- def shouldContain(self, d, f, contents):
- c = open(os.path.join(d, f), "r").read()
- self.failUnlessIn(contents, c)
-
- def checkGotRevision(self, bs, expected):
- if self.has_got_revision:
- self.failUnlessEqual(bs.getProperty("got_revision"), expected)
-
- def checkGotRevisionIsLatest(self, bs):
- expected = self.helper.trunk[-1]
- if self.has_got_revision_branches_are_merged:
- expected = self.helper.allrevs[-1]
- self.checkGotRevision(bs, expected)
-
- def do_vctest(self, testRetry=True):
- vctype = self.vctype
- args = self.helper.vcargs
- m = self.master
- self.vcdir = os.path.join(self.slavebase, "vc-dir", "source")
- self.workdir = os.path.join(self.slavebase, "vc-dir", "build")
- # woo double-substitution
- s = "s(%s, timeout=200, workdir='build', mode='%%s'" % (vctype,)
- for k,v in args.items():
- s += ", %s=%s" % (k, repr(v))
- s += ")"
- config = config_vc % s
-
- m.loadConfig(config % 'clobber')
- m.readConfig = True
- m.startService()
-
- d = self.connectSlave()
- d.addCallback(lambda res: log.msg("testing clobber"))
- d.addCallback(self._do_vctest_clobber)
- d.addCallback(lambda res: log.msg("doing update"))
- d.addCallback(lambda res: self.loadConfig(config % 'update'))
- d.addCallback(lambda res: log.msg("testing update"))
- d.addCallback(self._do_vctest_update)
- if testRetry:
- d.addCallback(lambda res: log.msg("testing update retry"))
- d.addCallback(self._do_vctest_update_retry)
- d.addCallback(lambda res: log.msg("doing copy"))
- d.addCallback(lambda res: self.loadConfig(config % 'copy'))
- d.addCallback(lambda res: log.msg("testing copy"))
- d.addCallback(self._do_vctest_copy)
- if self.metadir:
- d.addCallback(lambda res: log.msg("doing export"))
- d.addCallback(lambda res: self.loadConfig(config % 'export'))
- d.addCallback(lambda res: log.msg("testing export"))
- d.addCallback(self._do_vctest_export)
- return d
-
- def _do_vctest_clobber(self, res):
- d = self.doBuild() # initial checkout
- d.addCallback(self._do_vctest_clobber_1)
- return d
- def _do_vctest_clobber_1(self, bs):
- self.shouldExist(self.workdir, "main.c")
- self.shouldExist(self.workdir, "version.c")
- self.shouldExist(self.workdir, "subdir", "subdir.c")
- if self.metadir:
- self.shouldExist(self.workdir, self.metadir)
- self.failUnlessEqual(bs.getProperty("revision"), None)
- self.failUnlessEqual(bs.getProperty("branch"), None)
- self.checkGotRevisionIsLatest(bs)
-
- self.touch(self.workdir, "newfile")
- self.shouldExist(self.workdir, "newfile")
- d = self.doBuild() # rebuild clobbers workdir
- d.addCallback(self._do_vctest_clobber_2)
- return d
- def _do_vctest_clobber_2(self, res):
- self.shouldNotExist(self.workdir, "newfile")
-
- def _do_vctest_update(self, res):
- log.msg("_do_vctest_update")
- d = self.doBuild() # rebuild with update
- d.addCallback(self._do_vctest_update_1)
- return d
- def _do_vctest_update_1(self, bs):
- log.msg("_do_vctest_update_1")
- self.shouldExist(self.workdir, "main.c")
- self.shouldExist(self.workdir, "version.c")
- self.shouldContain(self.workdir, "version.c",
- "version=%d" % self.helper.version)
- if self.metadir:
- self.shouldExist(self.workdir, self.metadir)
- self.failUnlessEqual(bs.getProperty("revision"), None)
- self.checkGotRevisionIsLatest(bs)
-
- self.touch(self.workdir, "newfile")
- d = self.doBuild() # update rebuild leaves new files
- d.addCallback(self._do_vctest_update_2)
- return d
- def _do_vctest_update_2(self, bs):
- log.msg("_do_vctest_update_2")
- self.shouldExist(self.workdir, "main.c")
- self.shouldExist(self.workdir, "version.c")
- self.touch(self.workdir, "newfile")
- # now make a change to the repository and make sure we pick it up
- d = self.helper.vc_revise()
- d.addCallback(lambda res: self.doBuild())
- d.addCallback(self._do_vctest_update_3)
- return d
- def _do_vctest_update_3(self, bs):
- log.msg("_do_vctest_update_3")
- self.shouldExist(self.workdir, "main.c")
- self.shouldExist(self.workdir, "version.c")
- self.shouldContain(self.workdir, "version.c",
- "version=%d" % self.helper.version)
- self.shouldExist(self.workdir, "newfile")
- self.failUnlessEqual(bs.getProperty("revision"), None)
- self.checkGotRevisionIsLatest(bs)
-
- # now "update" to an older revision
- d = self.doBuild(ss=SourceStamp(revision=self.helper.trunk[-2]))
- d.addCallback(self._do_vctest_update_4)
- return d
- def _do_vctest_update_4(self, bs):
- log.msg("_do_vctest_update_4")
- self.shouldExist(self.workdir, "main.c")
- self.shouldExist(self.workdir, "version.c")
- self.shouldContain(self.workdir, "version.c",
- "version=%d" % (self.helper.version-1))
- self.failUnlessEqual(bs.getProperty("revision"),
- self.helper.trunk[-2])
- self.checkGotRevision(bs, self.helper.trunk[-2])
-
- # now update to the newer revision
- d = self.doBuild(ss=SourceStamp(revision=self.helper.trunk[-1]))
- d.addCallback(self._do_vctest_update_5)
- return d
- def _do_vctest_update_5(self, bs):
- log.msg("_do_vctest_update_5")
- self.shouldExist(self.workdir, "main.c")
- self.shouldExist(self.workdir, "version.c")
- self.shouldContain(self.workdir, "version.c",
- "version=%d" % self.helper.version)
- self.failUnlessEqual(bs.getProperty("revision"),
- self.helper.trunk[-1])
- self.checkGotRevision(bs, self.helper.trunk[-1])
-
-
- def _do_vctest_update_retry(self, res):
- # certain local changes will prevent an update from working. The
- # most common is to replace a file with a directory, or vice
- # versa. The slave code should spot the failure and do a
- # clobber/retry.
- os.unlink(os.path.join(self.workdir, "main.c"))
- os.mkdir(os.path.join(self.workdir, "main.c"))
- self.touch(os.path.join(self.workdir, "main.c"), "foo")
- self.touch(self.workdir, "newfile")
-
- d = self.doBuild() # update, but must clobber to handle the error
- d.addCallback(self._do_vctest_update_retry_1)
- return d
- def _do_vctest_update_retry_1(self, bs):
- self.shouldNotExist(self.workdir, "newfile")
-
- def _do_vctest_copy(self, res):
- d = self.doBuild() # copy rebuild clobbers new files
- d.addCallback(self._do_vctest_copy_1)
- return d
- def _do_vctest_copy_1(self, bs):
- if self.metadir:
- self.shouldExist(self.workdir, self.metadir)
- self.shouldNotExist(self.workdir, "newfile")
- self.touch(self.workdir, "newfile")
- self.touch(self.vcdir, "newvcfile")
- self.failUnlessEqual(bs.getProperty("revision"), None)
- self.checkGotRevisionIsLatest(bs)
-
- d = self.doBuild() # copy rebuild clobbers new files
- d.addCallback(self._do_vctest_copy_2)
- return d
- def _do_vctest_copy_2(self, bs):
- if self.metadir:
- self.shouldExist(self.workdir, self.metadir)
- self.shouldNotExist(self.workdir, "newfile")
- self.shouldExist(self.vcdir, "newvcfile")
- self.shouldExist(self.workdir, "newvcfile")
- self.failUnlessEqual(bs.getProperty("revision"), None)
- self.checkGotRevisionIsLatest(bs)
- self.touch(self.workdir, "newfile")
-
- def _do_vctest_export(self, res):
- d = self.doBuild() # export rebuild clobbers new files
- d.addCallback(self._do_vctest_export_1)
- return d
- def _do_vctest_export_1(self, bs):
- self.shouldNotExist(self.workdir, self.metadir)
- self.shouldNotExist(self.workdir, "newfile")
- self.failUnlessEqual(bs.getProperty("revision"), None)
- #self.checkGotRevisionIsLatest(bs)
- # VC 'export' is not required to have a got_revision
- self.touch(self.workdir, "newfile")
-
- d = self.doBuild() # export rebuild clobbers new files
- d.addCallback(self._do_vctest_export_2)
- return d
- def _do_vctest_export_2(self, bs):
- self.shouldNotExist(self.workdir, self.metadir)
- self.shouldNotExist(self.workdir, "newfile")
- self.failUnlessEqual(bs.getProperty("revision"), None)
- #self.checkGotRevisionIsLatest(bs)
- # VC 'export' is not required to have a got_revision
-
- def do_patch(self):
- vctype = self.vctype
- args = self.helper.vcargs
- m = self.master
- self.vcdir = os.path.join(self.slavebase, "vc-dir", "source")
- self.workdir = os.path.join(self.slavebase, "vc-dir", "build")
- s = "s(%s, timeout=200, workdir='build', mode='%%s'" % (vctype,)
- for k,v in args.items():
- s += ", %s=%s" % (k, repr(v))
- s += ")"
- self.config = config_vc % s
-
- m.loadConfig(self.config % "clobber")
- m.readConfig = True
- m.startService()
-
- ss = SourceStamp(revision=self.helper.trunk[-1], patch=(0, p0_diff))
-
- d = self.connectSlave()
- d.addCallback(lambda res: self.doBuild(ss=ss))
- d.addCallback(self._doPatch_1)
- return d
- def _doPatch_1(self, bs):
- self.shouldContain(self.workdir, "version.c",
- "version=%d" % self.helper.version)
- # make sure the file actually got patched
- subdir_c = os.path.join(self.slavebase, "vc-dir", "build",
- "subdir", "subdir.c")
- data = open(subdir_c, "r").read()
- self.failUnlessIn("Hello patched subdir.\\n", data)
- self.failUnlessEqual(bs.getProperty("revision"),
- self.helper.trunk[-1])
- self.checkGotRevision(bs, self.helper.trunk[-1])
-
- # make sure that a rebuild does not use the leftover patched workdir
- d = self.master.loadConfig(self.config % "update")
- d.addCallback(lambda res: self.doBuild(ss=None))
- d.addCallback(self._doPatch_2)
- return d
- def _doPatch_2(self, bs):
- # make sure the file is back to its original
- subdir_c = os.path.join(self.slavebase, "vc-dir", "build",
- "subdir", "subdir.c")
- data = open(subdir_c, "r").read()
- self.failUnlessIn("Hello subdir.\\n", data)
- self.failUnlessEqual(bs.getProperty("revision"), None)
- self.checkGotRevisionIsLatest(bs)
-
- # now make sure we can patch an older revision. We need at least two
- # revisions here, so we might have to create one first
- if len(self.helper.trunk) < 2:
- d = self.helper.vc_revise()
- d.addCallback(self._doPatch_3)
- return d
- return self._doPatch_3()
-
- def _doPatch_3(self, res=None):
- ss = SourceStamp(revision=self.helper.trunk[-2], patch=(0, p0_diff))
- d = self.doBuild(ss=ss)
- d.addCallback(self._doPatch_4)
- return d
- def _doPatch_4(self, bs):
- self.shouldContain(self.workdir, "version.c",
- "version=%d" % (self.helper.version-1))
- # and make sure the file actually got patched
- subdir_c = os.path.join(self.slavebase, "vc-dir", "build",
- "subdir", "subdir.c")
- data = open(subdir_c, "r").read()
- self.failUnlessIn("Hello patched subdir.\\n", data)
- self.failUnlessEqual(bs.getProperty("revision"),
- self.helper.trunk[-2])
- self.checkGotRevision(bs, self.helper.trunk[-2])
-
- # now check that we can patch a branch
- ss = SourceStamp(branch=self.helper.branchname,
- revision=self.helper.branch[-1],
- patch=(0, p0_diff))
- d = self.doBuild(ss=ss)
- d.addCallback(self._doPatch_5)
- return d
- def _doPatch_5(self, bs):
- self.shouldContain(self.workdir, "version.c",
- "version=%d" % 1)
- self.shouldContain(self.workdir, "main.c", "Hello branch.")
- subdir_c = os.path.join(self.slavebase, "vc-dir", "build",
- "subdir", "subdir.c")
- data = open(subdir_c, "r").read()
- self.failUnlessIn("Hello patched subdir.\\n", data)
- self.failUnlessEqual(bs.getProperty("revision"),
- self.helper.branch[-1])
- self.failUnlessEqual(bs.getProperty("branch"), self.helper.branchname)
- self.checkGotRevision(bs, self.helper.branch[-1])
-
-
- def do_vctest_once(self, shouldSucceed):
- m = self.master
- vctype = self.vctype
- args = self.helper.vcargs
- vcdir = os.path.join(self.slavebase, "vc-dir", "source")
- workdir = os.path.join(self.slavebase, "vc-dir", "build")
- # woo double-substitution
- s = "s(%s, timeout=200, workdir='build', mode='clobber'" % (vctype,)
- for k,v in args.items():
- s += ", %s=%s" % (k, repr(v))
- s += ")"
- config = config_vc % s
-
- m.loadConfig(config)
- m.readConfig = True
- m.startService()
-
- self.connectSlave()
- d = self.doBuild(shouldSucceed) # initial checkout
- return d
-
- def do_branch(self):
- log.msg("do_branch")
- vctype = self.vctype
- args = self.helper.vcargs
- m = self.master
- self.vcdir = os.path.join(self.slavebase, "vc-dir", "source")
- self.workdir = os.path.join(self.slavebase, "vc-dir", "build")
- s = "s(%s, timeout=200, workdir='build', mode='%%s'" % (vctype,)
- for k,v in args.items():
- s += ", %s=%s" % (k, repr(v))
- s += ")"
- self.config = config_vc % s
-
- m.loadConfig(self.config % "update")
- m.readConfig = True
- m.startService()
-
- # first we do a build of the trunk
- d = self.connectSlave()
- d.addCallback(lambda res: self.doBuild(ss=SourceStamp()))
- d.addCallback(self._doBranch_1)
- return d
- def _doBranch_1(self, bs):
- log.msg("_doBranch_1")
- # make sure the checkout was of the trunk
- main_c = os.path.join(self.slavebase, "vc-dir", "build", "main.c")
- data = open(main_c, "r").read()
- self.failUnlessIn("Hello world.", data)
-
- # now do a checkout on the branch. The change in branch name should
- # trigger a clobber.
- self.touch(self.workdir, "newfile")
- d = self.doBuild(ss=SourceStamp(branch=self.helper.branchname))
- d.addCallback(self._doBranch_2)
- return d
- def _doBranch_2(self, bs):
- log.msg("_doBranch_2")
- # make sure it was on the branch
- main_c = os.path.join(self.slavebase, "vc-dir", "build", "main.c")
- data = open(main_c, "r").read()
- self.failUnlessIn("Hello branch.", data)
- # and make sure the tree was clobbered
- self.shouldNotExist(self.workdir, "newfile")
-
- # doing another build on the same branch should not clobber the tree
- self.touch(self.workdir, "newbranchfile")
- d = self.doBuild(ss=SourceStamp(branch=self.helper.branchname))
- d.addCallback(self._doBranch_3)
- return d
- def _doBranch_3(self, bs):
- log.msg("_doBranch_3")
- # make sure it is still on the branch
- main_c = os.path.join(self.slavebase, "vc-dir", "build", "main.c")
- data = open(main_c, "r").read()
- self.failUnlessIn("Hello branch.", data)
- # and make sure the tree was not clobbered
- self.shouldExist(self.workdir, "newbranchfile")
-
- # now make sure that a non-branch checkout clobbers the tree
- d = self.doBuild(ss=SourceStamp())
- d.addCallback(self._doBranch_4)
- return d
- def _doBranch_4(self, bs):
- log.msg("_doBranch_4")
- # make sure it was on the trunk
- main_c = os.path.join(self.slavebase, "vc-dir", "build", "main.c")
- data = open(main_c, "r").read()
- self.failUnlessIn("Hello world.", data)
- self.shouldNotExist(self.workdir, "newbranchfile")
-
- def do_getpatch(self, doBranch=True):
- log.msg("do_getpatch")
- # prepare a buildslave to do checkouts
- vctype = self.vctype
- args = self.helper.vcargs
- m = self.master
- self.vcdir = os.path.join(self.slavebase, "vc-dir", "source")
- self.workdir = os.path.join(self.slavebase, "vc-dir", "build")
- # woo double-substitution
- s = "s(%s, timeout=200, workdir='build', mode='%%s'" % (vctype,)
- for k,v in args.items():
- s += ", %s=%s" % (k, repr(v))
- s += ")"
- config = config_vc % s
-
- m.loadConfig(config % 'clobber')
- m.readConfig = True
- m.startService()
-
- d = self.connectSlave()
-
- # then set up the "developer's tree". first we modify a tree from the
- # head of the trunk
- tmpdir = "try_workdir"
- self.trydir = os.path.join(self.helper.repbase, tmpdir)
- rmdirRecursive(self.trydir)
- d.addCallback(self.do_getpatch_trunkhead)
- d.addCallback(self.do_getpatch_trunkold)
- if doBranch:
- d.addCallback(self.do_getpatch_branch)
- d.addCallback(self.do_getpatch_finish)
- return d
-
- def do_getpatch_finish(self, res):
- log.msg("do_getpatch_finish")
- self.helper.vc_try_finish(self.trydir)
- return res
-
- def try_shouldMatch(self, filename):
- devfilename = os.path.join(self.trydir, filename)
- devfile = open(devfilename, "r").read()
- slavefilename = os.path.join(self.workdir, filename)
- slavefile = open(slavefilename, "r").read()
- self.failUnlessEqual(devfile, slavefile,
- ("slavefile (%s) contains '%s'. "
- "developer's file (%s) contains '%s'. "
- "These ought to match") %
- (slavefilename, slavefile,
- devfilename, devfile))
-
- def do_getpatch_trunkhead(self, res):
- log.msg("do_getpatch_trunkhead")
- d = self.helper.vc_try_checkout(self.trydir, self.helper.trunk[-1])
- d.addCallback(self._do_getpatch_trunkhead_1)
- return d
- def _do_getpatch_trunkhead_1(self, res):
- log.msg("_do_getpatch_trunkhead_1")
- d = tryclient.getSourceStamp(self.vctype_try, self.trydir, None)
- d.addCallback(self._do_getpatch_trunkhead_2)
- return d
- def _do_getpatch_trunkhead_2(self, ss):
- log.msg("_do_getpatch_trunkhead_2")
- d = self.doBuild(ss=ss)
- d.addCallback(self._do_getpatch_trunkhead_3)
- return d
- def _do_getpatch_trunkhead_3(self, res):
- log.msg("_do_getpatch_trunkhead_3")
- # verify that the resulting buildslave tree matches the developer's
- self.try_shouldMatch("main.c")
- self.try_shouldMatch("version.c")
- self.try_shouldMatch(os.path.join("subdir", "subdir.c"))
-
- def do_getpatch_trunkold(self, res):
- log.msg("do_getpatch_trunkold")
- # now try a tree from an older revision. We need at least two
- # revisions here, so we might have to create one first
- if len(self.helper.trunk) < 2:
- d = self.helper.vc_revise()
- d.addCallback(self._do_getpatch_trunkold_1)
- return d
- return self._do_getpatch_trunkold_1()
- def _do_getpatch_trunkold_1(self, res=None):
- log.msg("_do_getpatch_trunkold_1")
- d = self.helper.vc_try_checkout(self.trydir, self.helper.trunk[-2])
- d.addCallback(self._do_getpatch_trunkold_2)
- return d
- def _do_getpatch_trunkold_2(self, res):
- log.msg("_do_getpatch_trunkold_2")
- d = tryclient.getSourceStamp(self.vctype_try, self.trydir, None)
- d.addCallback(self._do_getpatch_trunkold_3)
- return d
- def _do_getpatch_trunkold_3(self, ss):
- log.msg("_do_getpatch_trunkold_3")
- d = self.doBuild(ss=ss)
- d.addCallback(self._do_getpatch_trunkold_4)
- return d
- def _do_getpatch_trunkold_4(self, res):
- log.msg("_do_getpatch_trunkold_4")
- # verify that the resulting buildslave tree matches the developer's
- self.try_shouldMatch("main.c")
- self.try_shouldMatch("version.c")
- self.try_shouldMatch(os.path.join("subdir", "subdir.c"))
-
- def do_getpatch_branch(self, res):
- log.msg("do_getpatch_branch")
- # now try a tree from a branch
- d = self.helper.vc_try_checkout(self.trydir, self.helper.branch[-1],
- self.helper.branchname)
- d.addCallback(self._do_getpatch_branch_1)
- return d
- def _do_getpatch_branch_1(self, res):
- log.msg("_do_getpatch_branch_1")
- d = tryclient.getSourceStamp(self.vctype_try, self.trydir,
- self.helper.try_branchname)
- d.addCallback(self._do_getpatch_branch_2)
- return d
- def _do_getpatch_branch_2(self, ss):
- log.msg("_do_getpatch_branch_2")
- d = self.doBuild(ss=ss)
- d.addCallback(self._do_getpatch_branch_3)
- return d
- def _do_getpatch_branch_3(self, res):
- log.msg("_do_getpatch_branch_3")
- # verify that the resulting buildslave tree matches the developer's
- self.try_shouldMatch("main.c")
- self.try_shouldMatch("version.c")
- self.try_shouldMatch(os.path.join("subdir", "subdir.c"))
-
-
- def dumpPatch(self, patch):
- # this exists to help me figure out the right 'patchlevel' value
- # should be returned by tryclient.getSourceStamp
- n = self.mktemp()
- open(n,"w").write(patch)
- d = self.runCommand(".", ["lsdiff", n])
- def p(res): print "lsdiff:", res.strip().split("\n")
- d.addCallback(p)
- return d
-
-
- def tearDown(self):
- d = defer.succeed(None)
- if self.slave:
- d2 = self.master.botmaster.waitUntilBuilderDetached("vc")
- d.addCallback(lambda res: self.slave.stopService())
- d.addCallback(lambda res: d2)
- if self.master:
- d.addCallback(lambda res: self.master.stopService())
- if self.httpServer:
- d.addCallback(lambda res: self.httpServer.stopListening())
- def stopHTTPTimer():
- try:
- from twisted.web import http # Twisted-2.0
- except ImportError:
- from twisted.protocols import http # Twisted-1.3
- http._logDateTimeStop() # shut down the internal timer. DUMB!
- d.addCallback(lambda res: stopHTTPTimer())
- d.addCallback(lambda res: self.tearDown2())
- return maybeWait(d)
-
- def tearDown2(self):
- pass
-
-class CVSHelper(BaseHelper):
- branchname = "branch"
- try_branchname = "branch"
-
- def capable(self):
- cvspaths = which('cvs')
- if not cvspaths:
- return (False, "CVS is not installed")
- # cvs-1.10 (as shipped with OS-X 10.3 "Panther") is too old for this
- # test. There is a situation where we check out a tree, make a
- # change, then commit it back, and CVS refuses to believe that we're
- # operating in a CVS tree. I tested cvs-1.12.9 and it works ok, OS-X
- # 10.4 "Tiger" comes with cvs-1.11, but I haven't tested that yet.
- # For now, skip the tests if we've got 1.10 .
- log.msg("running %s --version.." % (cvspaths[0],))
- d = utils.getProcessOutput(cvspaths[0], ["--version"],
- env=os.environ)
- d.addCallback(self._capable, cvspaths[0])
- return d
-
- def _capable(self, v, vcexe):
- m = re.search(r'\(CVS\) ([\d\.]+) ', v)
- if not m:
- log.msg("couldn't identify CVS version number in output:")
- log.msg("'''%s'''" % v)
- log.msg("skipping tests")
- return (False, "Found CVS but couldn't identify its version")
- ver = m.group(1)
- log.msg("found CVS version '%s'" % ver)
- if ver == "1.10":
- return (False, "Found CVS, but it is too old")
- self.vcexe = vcexe
- return (True, None)
-
- def getdate(self):
- # this timestamp is eventually passed to CVS in a -D argument, and
- # strftime's %z specifier doesn't seem to work reliably (I get +0000
- # where I should get +0700 under linux sometimes, and windows seems
- # to want to put a verbose 'Eastern Standard Time' in there), so
- # leave off the timezone specifier and treat this as localtime. A
- # valid alternative would be to use a hard-coded +0000 and
- # time.gmtime().
- return time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
-
- def createRepository(self):
- self.createBasedir()
- self.cvsrep = cvsrep = os.path.join(self.repbase, "CVS-Repository")
- tmp = os.path.join(self.repbase, "cvstmp")
-
- w = self.dovc(self.repbase, "-d %s init" % cvsrep)
- yield w; w.getResult() # we must getResult() to raise any exceptions
-
- self.populate(tmp)
- cmd = ("-d %s import" % cvsrep +
- " -m sample_project_files sample vendortag start")
- w = self.dovc(tmp, cmd)
- yield w; w.getResult()
- rmdirRecursive(tmp)
- # take a timestamp as the first revision number
- time.sleep(2)
- self.addTrunkRev(self.getdate())
- time.sleep(2)
-
- w = self.dovc(self.repbase,
- "-d %s checkout -d cvstmp sample" % self.cvsrep)
- yield w; w.getResult()
-
- w = self.dovc(tmp, "tag -b %s" % self.branchname)
- yield w; w.getResult()
- self.populate_branch(tmp)
- w = self.dovc(tmp,
- "commit -m commit_on_branch -r %s" % self.branchname)
- yield w; w.getResult()
- rmdirRecursive(tmp)
- time.sleep(2)
- self.addBranchRev(self.getdate())
- time.sleep(2)
- self.vcargs = { 'cvsroot': self.cvsrep, 'cvsmodule': "sample" }
- createRepository = deferredGenerator(createRepository)
-
-
- def vc_revise(self):
- tmp = os.path.join(self.repbase, "cvstmp")
-
- w = self.dovc(self.repbase,
- "-d %s checkout -d cvstmp sample" % self.cvsrep)
- yield w; w.getResult()
- self.version += 1
- version_c = VERSION_C % self.version
- open(os.path.join(tmp, "version.c"), "w").write(version_c)
- w = self.dovc(tmp,
- "commit -m revised_to_%d version.c" % self.version)
- yield w; w.getResult()
- rmdirRecursive(tmp)
- time.sleep(2)
- self.addTrunkRev(self.getdate())
- time.sleep(2)
- vc_revise = deferredGenerator(vc_revise)
-
- def vc_try_checkout(self, workdir, rev, branch=None):
- # 'workdir' is an absolute path
- assert os.path.abspath(workdir) == workdir
- cmd = [self.vcexe, "-d", self.cvsrep, "checkout",
- "-d", workdir,
- "-D", rev]
- if branch is not None:
- cmd.append("-r")
- cmd.append(branch)
- cmd.append("sample")
- w = self.do(self.repbase, cmd)
- yield w; w.getResult()
- open(os.path.join(workdir, "subdir", "subdir.c"), "w").write(TRY_C)
- vc_try_checkout = deferredGenerator(vc_try_checkout)
-
- def vc_try_finish(self, workdir):
- rmdirRecursive(workdir)
-
-class CVS(VCBase, unittest.TestCase):
- vc_name = "cvs"
-
- metadir = "CVS"
- vctype = "step.CVS"
- vctype_try = "cvs"
- # CVS gives us got_revision, but it is based entirely upon the local
- # clock, which means it is unlikely to match the timestamp taken earlier.
- # This might be enough for common use, but won't be good enough for our
- # tests to accept, so pretend it doesn't have got_revision at all.
- has_got_revision = False
-
- def testCheckout(self):
- d = self.do_vctest()
- return maybeWait(d)
-
- def testPatch(self):
- d = self.do_patch()
- return maybeWait(d)
-
- def testCheckoutBranch(self):
- d = self.do_branch()
- return maybeWait(d)
-
- def testTry(self):
- d = self.do_getpatch(doBranch=False)
- return maybeWait(d)
-
-VCS.registerVC(CVS.vc_name, CVSHelper())
-
-
-class SVNHelper(BaseHelper):
- branchname = "sample/branch"
- try_branchname = "sample/branch"
-
- def capable(self):
- svnpaths = which('svn')
- svnadminpaths = which('svnadmin')
- if not svnpaths:
- return (False, "SVN is not installed")
- if not svnadminpaths:
- return (False, "svnadmin is not installed")
- # we need svn to be compiled with the ra_local access
- # module
- log.msg("running svn --version..")
- env = os.environ.copy()
- env['LC_ALL'] = "C"
- d = utils.getProcessOutput(svnpaths[0], ["--version"],
- env=env)
- d.addCallback(self._capable, svnpaths[0], svnadminpaths[0])
- return d
-
- def _capable(self, v, vcexe, svnadmin):
- if v.find("handles 'file' schem") != -1:
- # older versions say 'schema', 1.2.0 and beyond say 'scheme'
- self.vcexe = vcexe
- self.svnadmin = svnadmin
- return (True, None)
- excuse = ("%s found but it does not support 'file:' " +
- "schema, skipping svn tests") % vcexe
- log.msg(excuse)
- return (False, excuse)
-
- def createRepository(self):
- self.createBasedir()
- self.svnrep = os.path.join(self.repbase,
- "SVN-Repository").replace('\\','/')
- tmp = os.path.join(self.repbase, "svntmp")
- if sys.platform == 'win32':
- # On Windows Paths do not start with a /
- self.svnurl = "file:///%s" % self.svnrep
- else:
- self.svnurl = "file://%s" % self.svnrep
- self.svnurl_trunk = self.svnurl + "/sample/trunk"
- self.svnurl_branch = self.svnurl + "/sample/branch"
-
- w = self.do(self.repbase, self.svnadmin+" create %s" % self.svnrep)
- yield w; w.getResult()
-
- self.populate(tmp)
- w = self.dovc(tmp,
- "import -m sample_project_files %s" %
- self.svnurl_trunk)
- yield w; out = w.getResult()
- rmdirRecursive(tmp)
- m = re.search(r'Committed revision (\d+)\.', out)
- assert m.group(1) == "1" # first revision is always "1"
- self.addTrunkRev(int(m.group(1)))
-
- w = self.dovc(self.repbase,
- "checkout %s svntmp" % self.svnurl_trunk)
- yield w; w.getResult()
-
- w = self.dovc(tmp, "cp -m make_branch %s %s" % (self.svnurl_trunk,
- self.svnurl_branch))
- yield w; w.getResult()
- w = self.dovc(tmp, "switch %s" % self.svnurl_branch)
- yield w; w.getResult()
- self.populate_branch(tmp)
- w = self.dovc(tmp, "commit -m commit_on_branch")
- yield w; out = w.getResult()
- rmdirRecursive(tmp)
- m = re.search(r'Committed revision (\d+)\.', out)
- self.addBranchRev(int(m.group(1)))
- createRepository = deferredGenerator(createRepository)
-
- def vc_revise(self):
- tmp = os.path.join(self.repbase, "svntmp")
- rmdirRecursive(tmp)
- log.msg("vc_revise" + self.svnurl_trunk)
- w = self.dovc(self.repbase,
- "checkout %s svntmp" % self.svnurl_trunk)
- yield w; w.getResult()
- self.version += 1
- version_c = VERSION_C % self.version
- open(os.path.join(tmp, "version.c"), "w").write(version_c)
- w = self.dovc(tmp, "commit -m revised_to_%d" % self.version)
- yield w; out = w.getResult()
- m = re.search(r'Committed revision (\d+)\.', out)
- self.addTrunkRev(int(m.group(1)))
- rmdirRecursive(tmp)
- vc_revise = deferredGenerator(vc_revise)
-
- def vc_try_checkout(self, workdir, rev, branch=None):
- assert os.path.abspath(workdir) == workdir
- if os.path.exists(workdir):
- rmdirRecursive(workdir)
- if not branch:
- svnurl = self.svnurl_trunk
- else:
- # N.B.: this is *not* os.path.join: SVN URLs use slashes
- # regardless of the host operating system's filepath separator
- svnurl = self.svnurl + "/" + branch
- w = self.dovc(self.repbase,
- "checkout %s %s" % (svnurl, workdir))
- yield w; w.getResult()
- open(os.path.join(workdir, "subdir", "subdir.c"), "w").write(TRY_C)
- vc_try_checkout = deferredGenerator(vc_try_checkout)
-
- def vc_try_finish(self, workdir):
- rmdirRecursive(workdir)
-
-
-class SVN(VCBase, unittest.TestCase):
- vc_name = "svn"
-
- metadir = ".svn"
- vctype = "step.SVN"
- vctype_try = "svn"
- has_got_revision = True
- has_got_revision_branches_are_merged = True
-
- def testCheckout(self):
- # we verify this one with the svnurl style of vcargs. We test the
- # baseURL/defaultBranch style in testPatch and testCheckoutBranch.
- self.helper.vcargs = { 'svnurl': self.helper.svnurl_trunk }
- d = self.do_vctest()
- return maybeWait(d)
-
- def testPatch(self):
- self.helper.vcargs = { 'baseURL': self.helper.svnurl + "/",
- 'defaultBranch': "sample/trunk",
- }
- d = self.do_patch()
- return maybeWait(d)
-
- def testCheckoutBranch(self):
- self.helper.vcargs = { 'baseURL': self.helper.svnurl + "/",
- 'defaultBranch': "sample/trunk",
- }
- d = self.do_branch()
- return maybeWait(d)
-
- def testTry(self):
- # extract the base revision and patch from a modified tree, use it to
- # create the same contents on the buildslave
- self.helper.vcargs = { 'baseURL': self.helper.svnurl + "/",
- 'defaultBranch': "sample/trunk",
- }
- d = self.do_getpatch()
- return maybeWait(d)
-
-VCS.registerVC(SVN.vc_name, SVNHelper())
-
-class DarcsHelper(BaseHelper):
- branchname = "branch"
- try_branchname = "branch"
-
- def capable(self):
- darcspaths = which('darcs')
- if not darcspaths:
- return (False, "Darcs is not installed")
- self.vcexe = darcspaths[0]
- return (True, None)
-
- def createRepository(self):
- self.createBasedir()
- self.darcs_base = os.path.join(self.repbase, "Darcs-Repository")
- self.rep_trunk = os.path.join(self.darcs_base, "trunk")
- self.rep_branch = os.path.join(self.darcs_base, "branch")
- tmp = os.path.join(self.repbase, "darcstmp")
-
- os.makedirs(self.rep_trunk)
- w = self.dovc(self.rep_trunk, "initialize")
- yield w; w.getResult()
- os.makedirs(self.rep_branch)
- w = self.dovc(self.rep_branch, "initialize")
- yield w; w.getResult()
-
- self.populate(tmp)
- w = self.dovc(tmp, "initialize")
- yield w; w.getResult()
- w = self.dovc(tmp, "add -r .")
- yield w; w.getResult()
- w = self.dovc(tmp, "record -a -m initial_import --skip-long-comment -A test@buildbot.sf.net")
- yield w; w.getResult()
- w = self.dovc(tmp, "push -a %s" % self.rep_trunk)
- yield w; w.getResult()
- w = self.dovc(tmp, "changes --context")
- yield w; out = w.getResult()
- self.addTrunkRev(out)
-
- self.populate_branch(tmp)
- w = self.dovc(tmp, "record -a --ignore-times -m commit_on_branch --skip-long-comment -A test@buildbot.sf.net")
- yield w; w.getResult()
- w = self.dovc(tmp, "push -a %s" % self.rep_branch)
- yield w; w.getResult()
- w = self.dovc(tmp, "changes --context")
- yield w; out = w.getResult()
- self.addBranchRev(out)
- rmdirRecursive(tmp)
- createRepository = deferredGenerator(createRepository)
-
- def vc_revise(self):
- tmp = os.path.join(self.repbase, "darcstmp")
- os.makedirs(tmp)
- w = self.dovc(tmp, "initialize")
- yield w; w.getResult()
- w = self.dovc(tmp, "pull -a %s" % self.rep_trunk)
- yield w; w.getResult()
-
- self.version += 1
- version_c = VERSION_C % self.version
- open(os.path.join(tmp, "version.c"), "w").write(version_c)
- w = self.dovc(tmp, "record -a --ignore-times -m revised_to_%d --skip-long-comment -A test@buildbot.sf.net" % self.version)
- yield w; w.getResult()
- w = self.dovc(tmp, "push -a %s" % self.rep_trunk)
- yield w; w.getResult()
- w = self.dovc(tmp, "changes --context")
- yield w; out = w.getResult()
- self.addTrunkRev(out)
- rmdirRecursive(tmp)
- vc_revise = deferredGenerator(vc_revise)
-
- def vc_try_checkout(self, workdir, rev, branch=None):
- assert os.path.abspath(workdir) == workdir
- if os.path.exists(workdir):
- rmdirRecursive(workdir)
- os.makedirs(workdir)
- w = self.dovc(workdir, "initialize")
- yield w; w.getResult()
- if not branch:
- rep = self.rep_trunk
- else:
- rep = os.path.join(self.darcs_base, branch)
- w = self.dovc(workdir, "pull -a %s" % rep)
- yield w; w.getResult()
- open(os.path.join(workdir, "subdir", "subdir.c"), "w").write(TRY_C)
- vc_try_checkout = deferredGenerator(vc_try_checkout)
-
- def vc_try_finish(self, workdir):
- rmdirRecursive(workdir)
-
-
-class Darcs(VCBase, unittest.TestCase):
- vc_name = "darcs"
-
- # Darcs has a metadir="_darcs", but it does not have an 'export'
- # mode
- metadir = None
- vctype = "step.Darcs"
- vctype_try = "darcs"
- has_got_revision = True
-
- def testCheckout(self):
- self.helper.vcargs = { 'repourl': self.helper.rep_trunk }
- d = self.do_vctest(testRetry=False)
-
- # TODO: testRetry has the same problem with Darcs as it does for
- # Arch
- return maybeWait(d)
-
- def testPatch(self):
- self.helper.vcargs = { 'baseURL': self.helper.darcs_base + "/",
- 'defaultBranch': "trunk" }
- d = self.do_patch()
- return maybeWait(d)
-
- def testCheckoutBranch(self):
- self.helper.vcargs = { 'baseURL': self.helper.darcs_base + "/",
- 'defaultBranch': "trunk" }
- d = self.do_branch()
- return maybeWait(d)
-
- def testCheckoutHTTP(self):
- self.serveHTTP()
- repourl = "http://localhost:%d/Darcs-Repository/trunk" % self.httpPort
- self.helper.vcargs = { 'repourl': repourl }
- d = self.do_vctest(testRetry=False)
- return maybeWait(d)
-
- def testTry(self):
- self.helper.vcargs = { 'baseURL': self.helper.darcs_base + "/",
- 'defaultBranch': "trunk" }
- d = self.do_getpatch()
- return maybeWait(d)
-
-VCS.registerVC(Darcs.vc_name, DarcsHelper())
-
-
-class ArchCommon:
- def registerRepository(self, coordinates):
- a = self.archname
- w = self.dovc(self.repbase, "archives %s" % a)
- yield w; out = w.getResult()
- if out:
- w = self.dovc(self.repbase, "register-archive -d %s" % a)
- yield w; w.getResult()
- w = self.dovc(self.repbase, "register-archive %s" % coordinates)
- yield w; w.getResult()
- registerRepository = deferredGenerator(registerRepository)
-
- def unregisterRepository(self):
- a = self.archname
- w = self.dovc(self.repbase, "archives %s" % a)
- yield w; out = w.getResult()
- if out:
- w = self.dovc(self.repbase, "register-archive -d %s" % a)
- yield w; out = w.getResult()
- unregisterRepository = deferredGenerator(unregisterRepository)
-
-class TlaHelper(BaseHelper, ArchCommon):
- defaultbranch = "testvc--mainline--1"
- branchname = "testvc--branch--1"
- try_branchname = None # TlaExtractor can figure it out by itself
- archcmd = "tla"
-
- def capable(self):
- tlapaths = which('tla')
- if not tlapaths:
- return (False, "Arch (tla) is not installed")
- self.vcexe = tlapaths[0]
- return (True, None)
-
- def do_get(self, basedir, archive, branch, newdir):
- # the 'get' syntax is different between tla and baz. baz, while
- # claiming to honor an --archive argument, in fact ignores it. The
- # correct invocation is 'baz get archive/revision newdir'.
- if self.archcmd == "tla":
- w = self.dovc(basedir,
- "get -A %s %s %s" % (archive, branch, newdir))
- else:
- w = self.dovc(basedir,
- "get %s/%s %s" % (archive, branch, newdir))
- return w
-
- def createRepository(self):
- self.createBasedir()
- # first check to see if bazaar is around, since we'll need to know
- # later
- d = VCS.capable(Bazaar.vc_name)
- d.addCallback(self._createRepository_1)
- return d
-
- def _createRepository_1(self, res):
- has_baz = res[0]
-
- # pick a hopefully unique string for the archive name, in the form
- # test-%d@buildbot.sf.net--testvc, since otherwise multiple copies of
- # the unit tests run in the same user account will collide (since the
- # archive names are kept in the per-user ~/.arch-params/ directory).
- pid = os.getpid()
- self.archname = "test-%s-%d@buildbot.sf.net--testvc" % (self.archcmd,
- pid)
- trunk = self.defaultbranch
- branch = self.branchname
-
- repword = self.archcmd.capitalize()
- self.archrep = os.path.join(self.repbase, "%s-Repository" % repword)
- tmp = os.path.join(self.repbase, "archtmp")
- a = self.archname
-
- self.populate(tmp)
-
- w = self.dovc(tmp, "my-id", failureIsOk=True)
- yield w; res = w.getResult()
- if not res:
- # tla will fail a lot of operations if you have not set an ID
- w = self.do(tmp, [self.vcexe, "my-id",
- "Buildbot Test Suite <test@buildbot.sf.net>"])
- yield w; w.getResult()
-
- if has_baz:
- # bazaar keeps a cache of revisions, but this test creates a new
- # archive each time it is run, so the cache causes errors.
- # Disable the cache to avoid these problems. This will be
- # slightly annoying for people who run the buildbot tests under
- # the same UID as one which uses baz on a regular basis, but
- # bazaar doesn't give us a way to disable the cache just for this
- # one archive.
- cmd = "%s cache-config --disable" % VCS.getHelper('bazaar').vcexe
- w = self.do(tmp, cmd)
- yield w; w.getResult()
-
- w = waitForDeferred(self.unregisterRepository())
- yield w; w.getResult()
-
- # these commands can be run in any directory
- w = self.dovc(tmp, "make-archive -l %s %s" % (a, self.archrep))
- yield w; w.getResult()
- if self.archcmd == "tla":
- w = self.dovc(tmp, "archive-setup -A %s %s" % (a, trunk))
- yield w; w.getResult()
- w = self.dovc(tmp, "archive-setup -A %s %s" % (a, branch))
- yield w; w.getResult()
- else:
- # baz does not require an 'archive-setup' step
- pass
-
- # these commands must be run in the directory that is to be imported
- w = self.dovc(tmp, "init-tree --nested %s/%s" % (a, trunk))
- yield w; w.getResult()
- files = " ".join(["main.c", "version.c", "subdir",
- os.path.join("subdir", "subdir.c")])
- w = self.dovc(tmp, "add-id %s" % files)
- yield w; w.getResult()
-
- w = self.dovc(tmp, "import %s/%s" % (a, trunk))
- yield w; out = w.getResult()
- self.addTrunkRev("base-0")
-
- # create the branch
- if self.archcmd == "tla":
- branchstart = "%s--base-0" % trunk
- w = self.dovc(tmp, "tag -A %s %s %s" % (a, branchstart, branch))
- yield w; w.getResult()
- else:
- w = self.dovc(tmp, "branch %s" % branch)
- yield w; w.getResult()
-
- rmdirRecursive(tmp)
-
- # check out the branch
- w = self.do_get(self.repbase, a, branch, "archtmp")
- yield w; w.getResult()
- # and edit the file
- self.populate_branch(tmp)
- logfile = "++log.%s--%s" % (branch, a)
- logmsg = "Summary: commit on branch\nKeywords:\n\n"
- open(os.path.join(tmp, logfile), "w").write(logmsg)
- w = self.dovc(tmp, "commit")
- yield w; out = w.getResult()
- m = re.search(r'committed %s/%s--([\S]+)' % (a, branch),
- out)
- assert (m.group(1) == "base-0" or m.group(1).startswith("patch-"))
- self.addBranchRev(m.group(1))
-
- w = waitForDeferred(self.unregisterRepository())
- yield w; w.getResult()
- rmdirRecursive(tmp)
-
- # we unregister the repository each time, because we might have
- # changed the coordinates (since we switch from a file: URL to an
- # http: URL for various tests). The buildslave code doesn't forcibly
- # unregister the archive, so we have to do it here.
- w = waitForDeferred(self.unregisterRepository())
- yield w; w.getResult()
-
- _createRepository_1 = deferredGenerator(_createRepository_1)
-
- def vc_revise(self):
- # the fix needs to be done in a workspace that is linked to a
- # read-write version of the archive (i.e., using file-based
- # coordinates instead of HTTP ones), so we re-register the repository
- # before we begin. We unregister it when we're done to make sure the
- # build will re-register the correct one for whichever test is
- # currently being run.
-
- # except, that step.Bazaar really doesn't like it when the archive
- # gets unregistered behind its back. The slave tries to do a 'baz
- # replay' in a tree with an archive that is no longer recognized, and
- # baz aborts with a botched invariant exception. This causes
- # mode=update to fall back to clobber+get, which flunks one of the
- # tests (the 'newfile' check in _do_vctest_update_3 fails)
-
- # to avoid this, we take heroic steps here to leave the archive
- # registration in the same state as we found it.
-
- tmp = os.path.join(self.repbase, "archtmp")
- a = self.archname
-
- w = self.dovc(self.repbase, "archives %s" % a)
- yield w; out = w.getResult()
- assert out
- lines = out.split("\n")
- coordinates = lines[1].strip()
-
- # now register the read-write location
- w = waitForDeferred(self.registerRepository(self.archrep))
- yield w; w.getResult()
-
- trunk = self.defaultbranch
-
- w = self.do_get(self.repbase, a, trunk, "archtmp")
- yield w; w.getResult()
-
- # tla appears to use timestamps to determine which files have
- # changed, so wait long enough for the new file to have a different
- # timestamp
- time.sleep(2)
- self.version += 1
- version_c = VERSION_C % self.version
- open(os.path.join(tmp, "version.c"), "w").write(version_c)
-
- logfile = "++log.%s--%s" % (trunk, a)
- logmsg = "Summary: revised_to_%d\nKeywords:\n\n" % self.version
- open(os.path.join(tmp, logfile), "w").write(logmsg)
- w = self.dovc(tmp, "commit")
- yield w; out = w.getResult()
- m = re.search(r'committed %s/%s--([\S]+)' % (a, trunk),
- out)
- assert (m.group(1) == "base-0" or m.group(1).startswith("patch-"))
- self.addTrunkRev(m.group(1))
-
- # now re-register the original coordinates
- w = waitForDeferred(self.registerRepository(coordinates))
- yield w; w.getResult()
- rmdirRecursive(tmp)
- vc_revise = deferredGenerator(vc_revise)
-
- def vc_try_checkout(self, workdir, rev, branch=None):
- assert os.path.abspath(workdir) == workdir
- if os.path.exists(workdir):
- rmdirRecursive(workdir)
-
- a = self.archname
-
- # register the read-write location, if it wasn't already registered
- w = waitForDeferred(self.registerRepository(self.archrep))
- yield w; w.getResult()
-
- w = self.do_get(self.repbase, a, "testvc--mainline--1", workdir)
- yield w; w.getResult()
-
- # timestamps. ick.
- time.sleep(2)
- open(os.path.join(workdir, "subdir", "subdir.c"), "w").write(TRY_C)
- vc_try_checkout = deferredGenerator(vc_try_checkout)
-
- def vc_try_finish(self, workdir):
- rmdirRecursive(workdir)
-
-class Arch(VCBase, unittest.TestCase):
- vc_name = "tla"
-
- metadir = None
- # Arch has a metadir="{arch}", but it does not have an 'export' mode.
- vctype = "step.Arch"
- vctype_try = "tla"
- has_got_revision = True
-
- def testCheckout(self):
- # these are the coordinates of the read-write archive used by all the
- # non-HTTP tests. testCheckoutHTTP overrides these.
- self.helper.vcargs = {'url': self.helper.archrep,
- 'version': self.helper.defaultbranch }
- d = self.do_vctest(testRetry=False)
- # the current testRetry=True logic doesn't have the desired effect:
- # "update" is a no-op because arch knows that the repository hasn't
- # changed. Other VC systems will re-checkout missing files on
- # update, arch just leaves the tree untouched. TODO: come up with
- # some better test logic, probably involving a copy of the
- # repository that has a few changes checked in.
-
- return maybeWait(d)
-
- def testCheckoutHTTP(self):
- self.serveHTTP()
- url = "http://localhost:%d/Tla-Repository" % self.httpPort
- self.helper.vcargs = { 'url': url,
- 'version': "testvc--mainline--1" }
- d = self.do_vctest(testRetry=False)
- return maybeWait(d)
-
- def testPatch(self):
- self.helper.vcargs = {'url': self.helper.archrep,
- 'version': self.helper.defaultbranch }
- d = self.do_patch()
- return maybeWait(d)
-
- def testCheckoutBranch(self):
- self.helper.vcargs = {'url': self.helper.archrep,
- 'version': self.helper.defaultbranch }
- d = self.do_branch()
- return maybeWait(d)
-
- def testTry(self):
- self.helper.vcargs = {'url': self.helper.archrep,
- 'version': self.helper.defaultbranch }
- d = self.do_getpatch()
- return maybeWait(d)
-
-VCS.registerVC(Arch.vc_name, TlaHelper())
-
-
-class BazaarHelper(TlaHelper):
- archcmd = "baz"
-
- def capable(self):
- bazpaths = which('baz')
- if not bazpaths:
- return (False, "Arch (baz) is not installed")
- self.vcexe = bazpaths[0]
- return (True, None)
-
- def setUp2(self, res):
- # we unregister the repository each time, because we might have
- # changed the coordinates (since we switch from a file: URL to an
- # http: URL for various tests). The buildslave code doesn't forcibly
- # unregister the archive, so we have to do it here.
- d = self.unregisterRepository()
- return d
-
-
-class Bazaar(Arch):
- vc_name = "bazaar"
-
- vctype = "step.Bazaar"
- vctype_try = "baz"
- has_got_revision = True
-
- fixtimer = None
-
- def testCheckout(self):
- self.helper.vcargs = {'url': self.helper.archrep,
- # Baz adds the required 'archive' argument
- 'archive': self.helper.archname,
- 'version': self.helper.defaultbranch,
- }
- d = self.do_vctest(testRetry=False)
- # the current testRetry=True logic doesn't have the desired effect:
- # "update" is a no-op because arch knows that the repository hasn't
- # changed. Other VC systems will re-checkout missing files on
- # update, arch just leaves the tree untouched. TODO: come up with
- # some better test logic, probably involving a copy of the
- # repository that has a few changes checked in.
-
- return maybeWait(d)
-
- def testCheckoutHTTP(self):
- self.serveHTTP()
- url = "http://localhost:%d/Baz-Repository" % self.httpPort
- self.helper.vcargs = { 'url': url,
- 'archive': self.helper.archname,
- 'version': self.helper.defaultbranch,
- }
- d = self.do_vctest(testRetry=False)
- return maybeWait(d)
-
- def testPatch(self):
- self.helper.vcargs = {'url': self.helper.archrep,
- # Baz adds the required 'archive' argument
- 'archive': self.helper.archname,
- 'version': self.helper.defaultbranch,
- }
- d = self.do_patch()
- return maybeWait(d)
-
- def testCheckoutBranch(self):
- self.helper.vcargs = {'url': self.helper.archrep,
- # Baz adds the required 'archive' argument
- 'archive': self.helper.archname,
- 'version': self.helper.defaultbranch,
- }
- d = self.do_branch()
- return maybeWait(d)
-
- def testTry(self):
- self.helper.vcargs = {'url': self.helper.archrep,
- # Baz adds the required 'archive' argument
- 'archive': self.helper.archname,
- 'version': self.helper.defaultbranch,
- }
- d = self.do_getpatch()
- return maybeWait(d)
-
- def fixRepository(self):
- self.fixtimer = None
- self.site.resource = self.root
-
- def testRetry(self):
- # we want to verify that step.Source(retry=) works, and the easiest
- # way to make VC updates break (temporarily) is to break the HTTP
- # server that's providing the repository. Anything else pretty much
- # requires mutating the (read-only) BUILDBOT_TEST_VC repository, or
- # modifying the buildslave's checkout command while it's running.
-
- # this test takes a while to run, so don't bother doing it with
- # anything other than baz
-
- self.serveHTTP()
-
- # break the repository server
- from twisted.web import static
- self.site.resource = static.Data("Sorry, repository is offline",
- "text/plain")
- # and arrange to fix it again in 5 seconds, while the test is
- # running.
- self.fixtimer = reactor.callLater(5, self.fixRepository)
-
- url = "http://localhost:%d/Baz-Repository" % self.httpPort
- self.helper.vcargs = { 'url': url,
- 'archive': self.helper.archname,
- 'version': self.helper.defaultbranch,
- 'retry': (5.0, 4),
- }
- d = self.do_vctest_once(True)
- d.addCallback(self._testRetry_1)
- return maybeWait(d)
- def _testRetry_1(self, bs):
- # make sure there was mention of the retry attempt in the logs
- l = bs.getLogs()[0]
- self.failUnlessIn("unable to access URL", l.getText(),
- "funny, VC operation didn't fail at least once")
- self.failUnlessIn("update failed, trying 4 more times after 5 seconds",
- l.getTextWithHeaders(),
- "funny, VC operation wasn't reattempted")
-
- def testRetryFails(self):
- # make sure that the build eventually gives up on a repository which
- # is completely unavailable
-
- self.serveHTTP()
-
- # break the repository server, and leave it broken
- from twisted.web import static
- self.site.resource = static.Data("Sorry, repository is offline",
- "text/plain")
-
- url = "http://localhost:%d/Baz-Repository" % self.httpPort
- self.helper.vcargs = {'url': url,
- 'archive': self.helper.archname,
- 'version': self.helper.defaultbranch,
- 'retry': (0.5, 3),
- }
- d = self.do_vctest_once(False)
- d.addCallback(self._testRetryFails_1)
- return maybeWait(d)
- def _testRetryFails_1(self, bs):
- self.failUnlessEqual(bs.getResults(), FAILURE)
-
- def tearDown2(self):
- if self.fixtimer:
- self.fixtimer.cancel()
- # tell tla to get rid of the leftover archive this test leaves in the
- # user's 'tla archives' listing. The name of this archive is provided
- # by the repository tarball, so the following command must use the
- # same name. We could use archive= to set it explicitly, but if you
- # change it from the default, then 'tla update' won't work.
- d = self.helper.unregisterRepository()
- return d
-
-VCS.registerVC(Bazaar.vc_name, BazaarHelper())
-
-class MercurialHelper(BaseHelper):
- branchname = "branch"
- try_branchname = "branch"
-
- def capable(self):
- hgpaths = which("hg")
- if not hgpaths:
- return (False, "Mercurial is not installed")
- self.vcexe = hgpaths[0]
- return (True, None)
-
- def extract_id(self, output):
- m = re.search(r'^(\w+)', output)
- return m.group(0)
-
- def createRepository(self):
- self.createBasedir()
- self.hg_base = os.path.join(self.repbase, "Mercurial-Repository")
- self.rep_trunk = os.path.join(self.hg_base, "trunk")
- self.rep_branch = os.path.join(self.hg_base, "branch")
- tmp = os.path.join(self.hg_base, "hgtmp")
-
- os.makedirs(self.rep_trunk)
- w = self.dovc(self.rep_trunk, "init")
- yield w; w.getResult()
- os.makedirs(self.rep_branch)
- w = self.dovc(self.rep_branch, "init")
- yield w; w.getResult()
-
- self.populate(tmp)
- w = self.dovc(tmp, "init")
- yield w; w.getResult()
- w = self.dovc(tmp, "add")
- yield w; w.getResult()
- w = self.dovc(tmp, "commit -m initial_import")
- yield w; w.getResult()
- w = self.dovc(tmp, "push %s" % self.rep_trunk)
- # note that hg-push does not actually update the working directory
- yield w; w.getResult()
- w = self.dovc(tmp, "identify")
- yield w; out = w.getResult()
- self.addTrunkRev(self.extract_id(out))
-
- self.populate_branch(tmp)
- w = self.dovc(tmp, "commit -m commit_on_branch")
- yield w; w.getResult()
- w = self.dovc(tmp, "push %s" % self.rep_branch)
- yield w; w.getResult()
- w = self.dovc(tmp, "identify")
- yield w; out = w.getResult()
- self.addBranchRev(self.extract_id(out))
- rmdirRecursive(tmp)
- createRepository = deferredGenerator(createRepository)
-
- def vc_revise(self):
- tmp = os.path.join(self.hg_base, "hgtmp2")
- w = self.dovc(self.hg_base, "clone %s %s" % (self.rep_trunk, tmp))
- yield w; w.getResult()
-
- self.version += 1
- version_c = VERSION_C % self.version
- version_c_filename = os.path.join(tmp, "version.c")
- open(version_c_filename, "w").write(version_c)
- # hg uses timestamps to distinguish files which have changed, so we
- # force the mtime forward a little bit
- future = time.time() + 2*self.version
- os.utime(version_c_filename, (future, future))
- w = self.dovc(tmp, "commit -m revised_to_%d" % self.version)
- yield w; w.getResult()
- w = self.dovc(tmp, "push %s" % self.rep_trunk)
- yield w; w.getResult()
- w = self.dovc(tmp, "identify")
- yield w; out = w.getResult()
- self.addTrunkRev(self.extract_id(out))
- rmdirRecursive(tmp)
- vc_revise = deferredGenerator(vc_revise)
-
- def vc_try_checkout(self, workdir, rev, branch=None):
- assert os.path.abspath(workdir) == workdir
- if os.path.exists(workdir):
- rmdirRecursive(workdir)
- if branch:
- src = self.rep_branch
- else:
- src = self.rep_trunk
- w = self.dovc(self.hg_base, "clone %s %s" % (src, workdir))
- yield w; w.getResult()
- try_c_filename = os.path.join(workdir, "subdir", "subdir.c")
- open(try_c_filename, "w").write(TRY_C)
- future = time.time() + 2*self.version
- os.utime(try_c_filename, (future, future))
- vc_try_checkout = deferredGenerator(vc_try_checkout)
-
- def vc_try_finish(self, workdir):
- rmdirRecursive(workdir)
-
-
-class Mercurial(VCBase, unittest.TestCase):
- vc_name = "hg"
-
- # Mercurial has a metadir=".hg", but it does not have an 'export' mode.
- metadir = None
- vctype = "step.Mercurial"
- vctype_try = "hg"
- has_got_revision = True
-
- def testCheckout(self):
- self.helper.vcargs = { 'repourl': self.helper.rep_trunk }
- d = self.do_vctest(testRetry=False)
-
- # TODO: testRetry has the same problem with Mercurial as it does for
- # Arch
- return maybeWait(d)
-
- def testPatch(self):
- self.helper.vcargs = { 'baseURL': self.helper.hg_base + "/",
- 'defaultBranch': "trunk" }
- d = self.do_patch()
- return maybeWait(d)
-
- def testCheckoutBranch(self):
- self.helper.vcargs = { 'baseURL': self.helper.hg_base + "/",
- 'defaultBranch': "trunk" }
- d = self.do_branch()
- return maybeWait(d)
-
- def testCheckoutHTTP(self):
- self.serveHTTP()
- repourl = "http://localhost:%d/Mercurial-Repository/trunk/.hg" % self.httpPort
- self.helper.vcargs = { 'repourl': repourl }
- d = self.do_vctest(testRetry=False)
- return maybeWait(d)
- # TODO: The easiest way to publish hg over HTTP is by running 'hg serve'
- # as a child process while the test is running. (you can also use a CGI
- # script, which sounds difficult, or you can publish the files directly,
- # which isn't well documented).
- testCheckoutHTTP.skip = "not yet implemented, use 'hg serve'"
-
- def testTry(self):
- self.helper.vcargs = { 'baseURL': self.helper.hg_base + "/",
- 'defaultBranch': "trunk" }
- d = self.do_getpatch()
- return maybeWait(d)
-
-VCS.registerVC(Mercurial.vc_name, MercurialHelper())
-
-
-class Sources(unittest.TestCase):
- # TODO: this needs serious rethink
- def makeChange(self, when=None, revision=None):
- if when:
- when = mktime_tz(parsedate_tz(when))
- return changes.Change("fred", [], "", when=when, revision=revision)
-
- def testCVS1(self):
- r = base.BuildRequest("forced build", SourceStamp())
- b = base.Build([r])
- s = step.CVS(cvsroot=None, cvsmodule=None, workdir=None, build=b)
- self.failUnlessEqual(s.computeSourceRevision(b.allChanges()), None)
-
- def testCVS2(self):
- c = []
- c.append(self.makeChange("Wed, 08 Sep 2004 09:00:00 -0700"))
- c.append(self.makeChange("Wed, 08 Sep 2004 09:01:00 -0700"))
- c.append(self.makeChange("Wed, 08 Sep 2004 09:02:00 -0700"))
- r = base.BuildRequest("forced", SourceStamp(changes=c))
- submitted = "Wed, 08 Sep 2004 09:04:00 -0700"
- r.submittedAt = mktime_tz(parsedate_tz(submitted))
- b = base.Build([r])
- s = step.CVS(cvsroot=None, cvsmodule=None, workdir=None, build=b)
- self.failUnlessEqual(s.computeSourceRevision(b.allChanges()),
- "Wed, 08 Sep 2004 16:03:00 -0000")
-
- def testCVS3(self):
- c = []
- c.append(self.makeChange("Wed, 08 Sep 2004 09:00:00 -0700"))
- c.append(self.makeChange("Wed, 08 Sep 2004 09:01:00 -0700"))
- c.append(self.makeChange("Wed, 08 Sep 2004 09:02:00 -0700"))
- r = base.BuildRequest("forced", SourceStamp(changes=c))
- submitted = "Wed, 08 Sep 2004 09:04:00 -0700"
- r.submittedAt = mktime_tz(parsedate_tz(submitted))
- b = base.Build([r])
- s = step.CVS(cvsroot=None, cvsmodule=None, workdir=None, build=b,
- checkoutDelay=10)
- self.failUnlessEqual(s.computeSourceRevision(b.allChanges()),
- "Wed, 08 Sep 2004 16:02:10 -0000")
-
- def testCVS4(self):
- c = []
- c.append(self.makeChange("Wed, 08 Sep 2004 09:00:00 -0700"))
- c.append(self.makeChange("Wed, 08 Sep 2004 09:01:00 -0700"))
- c.append(self.makeChange("Wed, 08 Sep 2004 09:02:00 -0700"))
- r1 = base.BuildRequest("forced", SourceStamp(changes=c))
- submitted = "Wed, 08 Sep 2004 09:04:00 -0700"
- r1.submittedAt = mktime_tz(parsedate_tz(submitted))
-
- c = []
- c.append(self.makeChange("Wed, 08 Sep 2004 09:05:00 -0700"))
- r2 = base.BuildRequest("forced", SourceStamp(changes=c))
- submitted = "Wed, 08 Sep 2004 09:07:00 -0700"
- r2.submittedAt = mktime_tz(parsedate_tz(submitted))
-
- b = base.Build([r1, r2])
- s = step.CVS(cvsroot=None, cvsmodule=None, workdir=None, build=b)
- self.failUnlessEqual(s.computeSourceRevision(b.allChanges()),
- "Wed, 08 Sep 2004 16:06:00 -0000")
-
- def testSVN1(self):
- r = base.BuildRequest("forced", SourceStamp())
- b = base.Build([r])
- s = step.SVN(svnurl="dummy", workdir=None, build=b)
- self.failUnlessEqual(s.computeSourceRevision(b.allChanges()), None)
-
- def testSVN2(self):
- c = []
- c.append(self.makeChange(revision=4))
- c.append(self.makeChange(revision=10))
- c.append(self.makeChange(revision=67))
- r = base.BuildRequest("forced", SourceStamp(changes=c))
- b = base.Build([r])
- s = step.SVN(svnurl="dummy", workdir=None, build=b)
- self.failUnlessEqual(s.computeSourceRevision(b.allChanges()), 67)
-
-class Patch(VCBase, unittest.TestCase):
- def setUp(self):
- pass
-
- def tearDown(self):
- pass
-
- def testPatch(self):
- # invoke 'patch' all by itself, to see if it works the way we think
- # it should. This is intended to ferret out some windows test
- # failures.
- helper = BaseHelper()
- self.workdir = os.path.join("test_vc", "testPatch")
- helper.populate(self.workdir)
- patch = which("patch")[0]
-
- command = [patch, "-p0"]
- class FakeBuilder:
- usePTY = False
- def sendUpdate(self, status):
- pass
- c = commands.ShellCommand(FakeBuilder(), command, self.workdir,
- sendRC=False, stdin=p0_diff)
- d = c.start()
- d.addCallback(self._testPatch_1)
- return maybeWait(d)
-
- def _testPatch_1(self, res):
- # make sure the file actually got patched
- subdir_c = os.path.join(self.workdir, "subdir", "subdir.c")
- data = open(subdir_c, "r").read()
- self.failUnlessIn("Hello patched subdir.\\n", data)
diff --git a/buildbot/buildbot-source/build/lib/buildbot/test/test_web.py b/buildbot/buildbot-source/build/lib/buildbot/test/test_web.py
deleted file mode 100644
index 4be9c26aa..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/test/test_web.py
+++ /dev/null
@@ -1,493 +0,0 @@
-# -*- test-case-name: buildbot.test.test_web -*-
-
-import sys, os, os.path, time, shutil
-from twisted.python import log, components, util
-#log.startLogging(sys.stderr)
-
-from twisted.trial import unittest
-from buildbot.test.runutils import RunMixin
-
-from twisted.internet import reactor, defer, protocol
-from twisted.internet.interfaces import IReactorUNIX
-from twisted.web import client
-
-from buildbot import master, interfaces, buildset, sourcestamp
-from buildbot.twcompat import providedBy, maybeWait
-from buildbot.status import html, builder
-from buildbot.changes.changes import Change
-from buildbot.process import step, base
-
-class ConfiguredMaster(master.BuildMaster):
- """This BuildMaster variant has a static config file, provided as a
- string when it is created."""
-
- def __init__(self, basedir, config):
- self.config = config
- master.BuildMaster.__init__(self, basedir)
-
- def loadTheConfigFile(self):
- self.loadConfig(self.config)
-
-components.registerAdapter(master.Control, ConfiguredMaster,
- interfaces.IControl)
-
-
-base_config = """
-from buildbot.status import html
-BuildmasterConfig = c = {
- 'bots': [],
- 'sources': [],
- 'schedulers': [],
- 'builders': [],
- 'slavePortnum': 0,
- }
-"""
-
-
-
-class DistribUNIX:
- def __init__(self, unixpath):
- from twisted.web import server, resource, distrib
- root = resource.Resource()
- self.r = r = distrib.ResourceSubscription("unix", unixpath)
- root.putChild('remote', r)
- self.p = p = reactor.listenTCP(0, server.Site(root))
- self.portnum = p.getHost().port
- def shutdown(self):
- d = defer.maybeDeferred(self.p.stopListening)
- return d
-
-class DistribTCP:
- def __init__(self, port):
- from twisted.web import server, resource, distrib
- root = resource.Resource()
- self.r = r = distrib.ResourceSubscription("localhost", port)
- root.putChild('remote', r)
- self.p = p = reactor.listenTCP(0, server.Site(root))
- self.portnum = p.getHost().port
- def shutdown(self):
- d = defer.maybeDeferred(self.p.stopListening)
- d.addCallback(self._shutdown_1)
- return d
- def _shutdown_1(self, res):
- return self.r.publisher.broker.transport.loseConnection()
-
-class SlowReader(protocol.Protocol):
- didPause = False
- count = 0
- data = ""
- def __init__(self, req):
- self.req = req
- self.d = defer.Deferred()
- def connectionMade(self):
- self.transport.write(self.req)
- def dataReceived(self, data):
- self.data += data
- self.count += len(data)
- if not self.didPause and self.count > 10*1000:
- self.didPause = True
- self.transport.pauseProducing()
- reactor.callLater(2, self.resume)
- def resume(self):
- self.transport.resumeProducing()
- def connectionLost(self, why):
- self.d.callback(None)
-
-class CFactory(protocol.ClientFactory):
- def __init__(self, p):
- self.p = p
- def buildProtocol(self, addr):
- self.p.factory = self
- return self.p
-
-def stopHTTPLog():
- # grr.
- try:
- from twisted.web import http # Twisted-2.0
- except ImportError:
- from twisted.protocols import http # Twisted-1.3
- http._logDateTimeStop()
-
-class BaseWeb:
- master = None
-
- def failUnlessIn(self, substr, string):
- self.failUnless(string.find(substr) != -1)
-
- def tearDown(self):
- stopHTTPLog()
- if self.master:
- d = self.master.stopService()
- return maybeWait(d)
-
- def find_waterfall(self, master):
- return filter(lambda child: isinstance(child, html.Waterfall),
- list(master))
-
-class Ports(BaseWeb, unittest.TestCase):
-
- def test_webPortnum(self):
- # run a regular web server on a TCP socket
- config = base_config + "c['status'] = [html.Waterfall(http_port=0)]\n"
- os.mkdir("test_web1")
- self.master = m = ConfiguredMaster("test_web1", config)
- m.startService()
- # hack to find out what randomly-assigned port it is listening on
- port = list(self.find_waterfall(m)[0])[0]._port.getHost().port
-
- d = client.getPage("http://localhost:%d/" % port)
- d.addCallback(self._test_webPortnum_1)
- return maybeWait(d)
- test_webPortnum.timeout = 10
- def _test_webPortnum_1(self, page):
- #print page
- self.failUnless(page)
-
- def test_webPathname(self):
- # running a t.web.distrib server over a UNIX socket
- if not providedBy(reactor, IReactorUNIX):
- raise unittest.SkipTest("UNIX sockets not supported here")
- config = (base_config +
- "c['status'] = [html.Waterfall(distrib_port='.web-pb')]\n")
- os.mkdir("test_web2")
- self.master = m = ConfiguredMaster("test_web2", config)
- m.startService()
-
- p = DistribUNIX("test_web2/.web-pb")
-
- d = client.getPage("http://localhost:%d/remote/" % p.portnum)
- d.addCallback(self._test_webPathname_1, p)
- return maybeWait(d)
- test_webPathname.timeout = 10
- def _test_webPathname_1(self, page, p):
- #print page
- self.failUnless(page)
- return p.shutdown()
-
-
- def test_webPathname_port(self):
- # running a t.web.distrib server over TCP
- config = (base_config +
- "c['status'] = [html.Waterfall(distrib_port=0)]\n")
- os.mkdir("test_web3")
- self.master = m = ConfiguredMaster("test_web3", config)
- m.startService()
- dport = list(self.find_waterfall(m)[0])[0]._port.getHost().port
-
- p = DistribTCP(dport)
-
- d = client.getPage("http://localhost:%d/remote/" % p.portnum)
- d.addCallback(self._test_webPathname_port_1, p)
- return maybeWait(d)
- test_webPathname_port.timeout = 10
- def _test_webPathname_port_1(self, page, p):
- self.failUnlessIn("BuildBot", page)
- return p.shutdown()
-
-
-class Waterfall(BaseWeb, unittest.TestCase):
- def test_waterfall(self):
- os.mkdir("test_web4")
- os.mkdir("my-maildir"); os.mkdir("my-maildir/new")
- self.robots_txt = os.path.abspath(os.path.join("test_web4",
- "robots.txt"))
- self.robots_txt_contents = "User-agent: *\nDisallow: /\n"
- f = open(self.robots_txt, "w")
- f.write(self.robots_txt_contents)
- f.close()
- # this is the right way to configure the Waterfall status
- config1 = base_config + """
-from buildbot.changes import mail
-c['sources'] = [mail.SyncmailMaildirSource('my-maildir')]
-c['status'] = [html.Waterfall(http_port=0, robots_txt=%s)]
-""" % repr(self.robots_txt)
-
- self.master = m = ConfiguredMaster("test_web4", config1)
- m.startService()
- # hack to find out what randomly-assigned port it is listening on
- port = list(self.find_waterfall(m)[0])[0]._port.getHost().port
- self.port = port
- # insert an event
- m.change_svc.addChange(Change("user", ["foo.c"], "comments"))
-
- d = client.getPage("http://localhost:%d/" % port)
- d.addCallback(self._test_waterfall_1)
- return maybeWait(d)
- test_waterfall.timeout = 10
- def _test_waterfall_1(self, page):
- self.failUnless(page)
- self.failUnlessIn("current activity", page)
- self.failUnlessIn("<html", page)
- TZ = time.tzname[time.daylight]
- self.failUnlessIn("time (%s)" % TZ, page)
-
- # phase=0 is really for debugging the waterfall layout
- d = client.getPage("http://localhost:%d/?phase=0" % self.port)
- d.addCallback(self._test_waterfall_2)
- return d
- def _test_waterfall_2(self, page):
- self.failUnless(page)
- self.failUnlessIn("<html", page)
-
- d = client.getPage("http://localhost:%d/favicon.ico" % self.port)
- d.addCallback(self._test_waterfall_3)
- return d
- def _test_waterfall_3(self, icon):
- expected = open(html.buildbot_icon,"rb").read()
- self.failUnless(icon == expected)
-
- d = client.getPage("http://localhost:%d/changes" % self.port)
- d.addCallback(self._test_waterfall_4)
- return d
- def _test_waterfall_4(self, changes):
- self.failUnlessIn("<li>Syncmail mailing list in maildir " +
- "my-maildir</li>", changes)
-
- d = client.getPage("http://localhost:%d/robots.txt" % self.port)
- d.addCallback(self._test_waterfall_5)
- return d
- def _test_waterfall_5(self, robotstxt):
- self.failUnless(robotstxt == self.robots_txt_contents)
-
-
-geturl_config = """
-from buildbot.status import html
-from buildbot.changes import mail
-from buildbot.process import step, factory
-from buildbot.scheduler import Scheduler
-from buildbot.changes.base import ChangeSource
-s = factory.s
-
-class DiscardScheduler(Scheduler):
- def addChange(self, change):
- pass
-class DummyChangeSource(ChangeSource):
- pass
-
-BuildmasterConfig = c = {}
-c['bots'] = [('bot1', 'sekrit'), ('bot2', 'sekrit')]
-c['sources'] = [DummyChangeSource()]
-c['schedulers'] = [DiscardScheduler('discard', None, 60, ['b1'])]
-c['slavePortnum'] = 0
-c['status'] = [html.Waterfall(http_port=0)]
-
-f = factory.BuildFactory([s(step.RemoteDummy, timeout=1)])
-
-c['builders'] = [
- {'name': 'b1', 'slavenames': ['bot1','bot2'],
- 'builddir': 'b1', 'factory': f},
- ]
-c['buildbotURL'] = 'http://dummy.example.org:8010/'
-
-"""
-
-class GetURL(RunMixin, unittest.TestCase):
-
- def setUp(self):
- RunMixin.setUp(self)
- self.master.loadConfig(geturl_config)
- self.master.startService()
- d = self.connectSlave(["b1"])
- return maybeWait(d)
-
- def tearDown(self):
- stopHTTPLog()
- return RunMixin.tearDown(self)
-
- def doBuild(self, buildername):
- br = base.BuildRequest("forced", sourcestamp.SourceStamp())
- d = br.waitUntilFinished()
- self.control.getBuilder(buildername).requestBuild(br)
- return d
-
- def assertNoURL(self, target):
- self.failUnlessIdentical(self.status.getURLForThing(target), None)
-
- def assertURLEqual(self, target, expected):
- got = self.status.getURLForThing(target)
- full_expected = "http://dummy.example.org:8010/" + expected
- self.failUnlessEqual(got, full_expected)
-
- def testMissingBase(self):
- noweb_config1 = geturl_config + "del c['buildbotURL']\n"
- d = self.master.loadConfig(noweb_config1)
- d.addCallback(self._testMissingBase_1)
- return maybeWait(d)
- def _testMissingBase_1(self, res):
- s = self.status
- self.assertNoURL(s)
- builder = s.getBuilder("b1")
- self.assertNoURL(builder)
-
- def testBase(self):
- s = self.status
- self.assertURLEqual(s, "")
- builder = s.getBuilder("b1")
- self.assertURLEqual(builder, "b1")
-
- def testBrokenStuff(self):
- s = self.status
- self.assertURLEqual(s.getSchedulers()[0], "schedulers/0")
- self.assertURLEqual(s.getSlave("bot1"), "slaves/bot1")
- # we didn't put a Change into the actual Build before, so this fails
- #self.assertURLEqual(build.getChanges()[0], "changes/1")
- testBrokenStuff.todo = "not implemented yet"
-
- def testChange(self):
- s = self.status
- c = Change("user", ["foo.c"], "comments")
- self.master.change_svc.addChange(c)
- # TODO: something more like s.getChanges(), requires IChange and
- # an accessor in IStatus. The HTML page exists already, though
- self.assertURLEqual(c, "changes/1")
-
- def testBuild(self):
- # first we do some stuff so we'll have things to look at.
- s = self.status
- d = self.doBuild("b1")
- # maybe check IBuildSetStatus here?
- d.addCallback(self._testBuild_1)
- return maybeWait(d)
-
- def _testBuild_1(self, res):
- s = self.status
- builder = s.getBuilder("b1")
- build = builder.getLastFinishedBuild()
- self.assertURLEqual(build, "b1/builds/0")
- # no page for builder.getEvent(-1)
- step = build.getSteps()[0]
- self.assertURLEqual(step, "b1/builds/0/step-remote%20dummy")
- # maybe page for build.getTestResults?
- self.assertURLEqual(step.getLogs()[0],
- "b1/builds/0/step-remote%20dummy/0")
-
-
-
-class Logfile(BaseWeb, RunMixin, unittest.TestCase):
- def setUp(self):
- config = """
-from buildbot.status import html
-from buildbot.process.factory import BasicBuildFactory
-f1 = BasicBuildFactory('cvsroot', 'cvsmodule')
-BuildmasterConfig = {
- 'bots': [('bot1', 'passwd1')],
- 'sources': [],
- 'schedulers': [],
- 'builders': [{'name': 'builder1', 'slavename': 'bot1',
- 'builddir':'workdir', 'factory':f1}],
- 'slavePortnum': 0,
- 'status': [html.Waterfall(http_port=0)],
- }
-"""
- if os.path.exists("test_logfile"):
- shutil.rmtree("test_logfile")
- os.mkdir("test_logfile")
- self.master = m = ConfiguredMaster("test_logfile", config)
- m.startService()
- # hack to find out what randomly-assigned port it is listening on
- port = list(self.find_waterfall(m)[0])[0]._port.getHost().port
- self.port = port
- # insert an event
-
- s = m.status.getBuilder("builder1")
- req = base.BuildRequest("reason", sourcestamp.SourceStamp())
- bs = s.newBuild()
- build1 = base.Build([req])
- step1 = step.BuildStep(build=build1)
- step1.name = "setup"
- bs.addStep(step1)
- bs.buildStarted(build1)
- step1.step_status.stepStarted()
-
- log1 = step1.addLog("output")
- log1.addStdout("some stdout\n")
- log1.finish()
-
- log2 = step1.addHTMLLog("error", "<html>ouch</html>")
-
- log3 = step1.addLog("big")
- log3.addStdout("big log\n")
- for i in range(1000):
- log3.addStdout("a" * 500)
- log3.addStderr("b" * 500)
- log3.finish()
-
- log4 = step1.addCompleteLog("bigcomplete",
- "big2 log\n" + "a" * 1*1000*1000)
-
- step1.step_status.stepFinished(builder.SUCCESS)
- bs.buildFinished()
-
- def getLogURL(self, stepname, lognum):
- logurl = "http://localhost:%d/builder1/builds/0/step-%s/%d" \
- % (self.port, stepname, lognum)
- return logurl
-
- def test_logfile1(self):
- d = client.getPage("http://localhost:%d/" % self.port)
- d.addCallback(self._test_logfile1_1)
- return maybeWait(d)
- test_logfile1.timeout = 20
- def _test_logfile1_1(self, page):
- self.failUnless(page)
-
- def test_logfile2(self):
- logurl = self.getLogURL("setup", 0)
- d = client.getPage(logurl)
- d.addCallback(self._test_logfile2_1)
- return maybeWait(d)
- def _test_logfile2_1(self, logbody):
- self.failUnless(logbody)
-
- def test_logfile3(self):
- logurl = self.getLogURL("setup", 0)
- d = client.getPage(logurl + "/text")
- d.addCallback(self._test_logfile3_1)
- return maybeWait(d)
- def _test_logfile3_1(self, logtext):
- self.failUnlessEqual(logtext, "some stdout\n")
-
- def test_logfile4(self):
- logurl = self.getLogURL("setup", 1)
- d = client.getPage(logurl)
- d.addCallback(self._test_logfile4_1)
- return maybeWait(d)
- def _test_logfile4_1(self, logbody):
- self.failUnlessEqual(logbody, "<html>ouch</html>")
-
- def test_logfile5(self):
- # this is log3, which is about 1MB in size, made up of alternating
- # stdout/stderr chunks. buildbot-0.6.6, when run against
- # twisted-1.3.0, fails to resume sending chunks after the client
- # stalls for a few seconds, because of a recursive doWrite() call
- # that was fixed in twisted-2.0.0
- p = SlowReader("GET /builder1/builds/0/step-setup/2 HTTP/1.0\r\n\r\n")
- f = CFactory(p)
- c = reactor.connectTCP("localhost", self.port, f)
- d = p.d
- d.addCallback(self._test_logfile5_1, p)
- return maybeWait(d, 10)
- test_logfile5.timeout = 10
- def _test_logfile5_1(self, res, p):
- self.failUnlessIn("big log", p.data)
- self.failUnlessIn("a"*100, p.data)
- self.failUnless(p.count > 1*1000*1000)
-
- def test_logfile6(self):
- # this is log4, which is about 1MB in size, one big chunk.
- # buildbot-0.6.6 dies as the NetstringReceiver barfs on the
- # saved logfile, because it was using one big chunk and exceeding
- # NetstringReceiver.MAX_LENGTH
- p = SlowReader("GET /builder1/builds/0/step-setup/3 HTTP/1.0\r\n\r\n")
- f = CFactory(p)
- c = reactor.connectTCP("localhost", self.port, f)
- d = p.d
- d.addCallback(self._test_logfile6_1, p)
- return maybeWait(d, 10)
- test_logfile6.timeout = 10
- def _test_logfile6_1(self, res, p):
- self.failUnlessIn("big2 log", p.data)
- self.failUnlessIn("a"*100, p.data)
- self.failUnless(p.count > 1*1000*1000)
-
-
diff --git a/buildbot/buildbot-source/build/lib/buildbot/twcompat.py b/buildbot/buildbot-source/build/lib/buildbot/twcompat.py
deleted file mode 100644
index 02c89c5eb..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/twcompat.py
+++ /dev/null
@@ -1,285 +0,0 @@
-
-if 0:
- print "hey python-mode, stop thinking I want 8-char indentation"
-
-"""
-utilities to be compatible with both Twisted-1.3 and 2.0
-
-implements. Use this like the following.
-
-from buildbot.tcompat import implements
-class Foo:
- if implements:
- implements(IFoo)
- else:
- __implements__ = IFoo,
-
-Interface:
- from buildbot.tcompat import Interface
- class IFoo(Interface)
-
-providedBy:
- from buildbot.tcompat import providedBy
- assert providedBy(obj, IFoo)
-"""
-
-import os, os.path
-
-from twisted.copyright import version
-from twisted.python import components
-
-# does our Twisted use zope.interface?
-if hasattr(components, "interface"):
- # yes
- from zope.interface import implements
- from zope.interface import Interface
- def providedBy(obj, iface):
- return iface.providedBy(obj)
-else:
- # nope
- implements = None
- from twisted.python.components import Interface
- providedBy = components.implements
-
-# are we using a version of Trial that allows setUp/testFoo/tearDown to
-# return Deferreds?
-oldtrial = version.startswith("1.3")
-
-# use this at the end of setUp/testFoo/tearDown methods
-def maybeWait(d, timeout="none"):
- from twisted.python import failure
- from twisted.trial import unittest
- if oldtrial:
- # this is required for oldtrial (twisted-1.3.0) compatibility. When we
- # move to retrial (twisted-2.0.0), replace these with a simple 'return
- # d'.
- try:
- if timeout == "none":
- unittest.deferredResult(d)
- else:
- unittest.deferredResult(d, timeout)
- except failure.Failure, f:
- if f.check(unittest.SkipTest):
- raise f.value
- raise
- return None
- return d
-
-# waitForDeferred and getProcessOutputAndValue are twisted-2.0 things. If
-# we're running under 1.3, patch them into place. These versions are copied
-# from twisted somewhat after 2.0.1 .
-
-from twisted.internet import defer
-if not hasattr(defer, 'waitForDeferred'):
- Deferred = defer.Deferred
- class waitForDeferred:
- """
- API Stability: semi-stable
-
- Maintainer: U{Christopher Armstrong<mailto:radix@twistedmatrix.com>}
-
- waitForDeferred and deferredGenerator help you write
- Deferred-using code that looks like it's blocking (but isn't
- really), with the help of generators.
-
- There are two important functions involved: waitForDeferred, and
- deferredGenerator.
-
- def thingummy():
- thing = waitForDeferred(makeSomeRequestResultingInDeferred())
- yield thing
- thing = thing.getResult()
- print thing #the result! hoorj!
- thingummy = deferredGenerator(thingummy)
-
- waitForDeferred returns something that you should immediately yield;
- when your generator is resumed, calling thing.getResult() will either
- give you the result of the Deferred if it was a success, or raise an
- exception if it was a failure.
-
- deferredGenerator takes one of these waitForDeferred-using
- generator functions and converts it into a function that returns a
- Deferred. The result of the Deferred will be the last
- value that your generator yielded (remember that 'return result' won't
- work; use 'yield result; return' in place of that).
-
- Note that not yielding anything from your generator will make the
- Deferred result in None. Yielding a Deferred from your generator
- is also an error condition; always yield waitForDeferred(d)
- instead.
-
- The Deferred returned from your deferred generator may also
- errback if your generator raised an exception.
-
- def thingummy():
- thing = waitForDeferred(makeSomeRequestResultingInDeferred())
- yield thing
- thing = thing.getResult()
- if thing == 'I love Twisted':
- # will become the result of the Deferred
- yield 'TWISTED IS GREAT!'
- return
- else:
- # will trigger an errback
- raise Exception('DESTROY ALL LIFE')
- thingummy = deferredGenerator(thingummy)
-
- Put succinctly, these functions connect deferred-using code with this
- 'fake blocking' style in both directions: waitForDeferred converts from
- a Deferred to the 'blocking' style, and deferredGenerator converts from
- the 'blocking' style to a Deferred.
- """
- def __init__(self, d):
- if not isinstance(d, Deferred):
- raise TypeError("You must give waitForDeferred a Deferred. You gave it %r." % (d,))
- self.d = d
-
- def getResult(self):
- if hasattr(self, 'failure'):
- self.failure.raiseException()
- return self.result
-
- def _deferGenerator(g, deferred=None, result=None):
- """
- See L{waitForDeferred}.
- """
- while 1:
- if deferred is None:
- deferred = defer.Deferred()
- try:
- result = g.next()
- except StopIteration:
- deferred.callback(result)
- return deferred
- except:
- deferred.errback()
- return deferred
-
- # Deferred.callback(Deferred) raises an error; we catch this case
- # early here and give a nicer error message to the user in case
- # they yield a Deferred. Perhaps eventually these semantics may
- # change.
- if isinstance(result, defer.Deferred):
- return defer.fail(TypeError("Yield waitForDeferred(d), not d!"))
-
- if isinstance(result, waitForDeferred):
- waiting=[True, None]
- # Pass vars in so they don't get changed going around the loop
- def gotResult(r, waiting=waiting, result=result):
- result.result = r
- if waiting[0]:
- waiting[0] = False
- waiting[1] = r
- else:
- _deferGenerator(g, deferred, r)
- def gotError(f, waiting=waiting, result=result):
- result.failure = f
- if waiting[0]:
- waiting[0] = False
- waiting[1] = f
- else:
- _deferGenerator(g, deferred, f)
- result.d.addCallbacks(gotResult, gotError)
- if waiting[0]:
- # Haven't called back yet, set flag so that we get reinvoked
- # and return from the loop
- waiting[0] = False
- return deferred
- else:
- result = waiting[1]
-
- def func_metamerge(f, g):
- """
- Merge function metadata from f -> g and return g
- """
- try:
- g.__doc__ = f.__doc__
- g.__dict__.update(f.__dict__)
- g.__name__ = f.__name__
- except (TypeError, AttributeError):
- pass
- return g
-
- def deferredGenerator(f):
- """
- See L{waitForDeferred}.
- """
- def unwindGenerator(*args, **kwargs):
- return _deferGenerator(f(*args, **kwargs))
- return func_metamerge(f, unwindGenerator)
-
- defer.waitForDeferred = waitForDeferred
- defer.deferredGenerator = deferredGenerator
-
-from twisted.internet import utils
-if not hasattr(utils, "getProcessOutputAndValue"):
- from twisted.internet import reactor, protocol
- _callProtocolWithDeferred = utils._callProtocolWithDeferred
- try:
- import cStringIO as StringIO
- except ImportError:
- import StringIO
-
- class _EverythingGetter(protocol.ProcessProtocol):
-
- def __init__(self, deferred):
- self.deferred = deferred
- self.outBuf = StringIO.StringIO()
- self.errBuf = StringIO.StringIO()
- self.outReceived = self.outBuf.write
- self.errReceived = self.errBuf.write
-
- def processEnded(self, reason):
- out = self.outBuf.getvalue()
- err = self.errBuf.getvalue()
- e = reason.value
- code = e.exitCode
- if e.signal:
- self.deferred.errback((out, err, e.signal))
- else:
- self.deferred.callback((out, err, code))
-
- def getProcessOutputAndValue(executable, args=(), env={}, path='.',
- reactor=reactor):
- """Spawn a process and returns a Deferred that will be called back
- with its output (from stdout and stderr) and it's exit code as (out,
- err, code) If a signal is raised, the Deferred will errback with the
- stdout and stderr up to that point, along with the signal, as (out,
- err, signalNum)
- """
- return _callProtocolWithDeferred(_EverythingGetter,
- executable, args, env, path,
- reactor)
- utils.getProcessOutputAndValue = getProcessOutputAndValue
-
-
-# copied from Twisted circa 2.2.0
-def _which(name, flags=os.X_OK):
- """Search PATH for executable files with the given name.
-
- @type name: C{str}
- @param name: The name for which to search.
-
- @type flags: C{int}
- @param flags: Arguments to L{os.access}.
-
- @rtype: C{list}
- @param: A list of the full paths to files found, in the
- order in which they were found.
- """
- result = []
- exts = filter(None, os.environ.get('PATHEXT', '').split(os.pathsep))
- for p in os.environ['PATH'].split(os.pathsep):
- p = os.path.join(p, name)
- if os.access(p, flags):
- result.append(p)
- for e in exts:
- pext = p + e
- if os.access(pext, flags):
- result.append(pext)
- return result
-
-try:
- from twisted.python.procutils import which
-except ImportError:
- which = _which
diff --git a/buildbot/buildbot-source/build/lib/buildbot/util.py b/buildbot/buildbot-source/build/lib/buildbot/util.py
deleted file mode 100644
index bb9d9943b..000000000
--- a/buildbot/buildbot-source/build/lib/buildbot/util.py
+++ /dev/null
@@ -1,71 +0,0 @@
-# -*- test-case-name: buildbot.test.test_util -*-
-
-from twisted.internet.defer import Deferred
-from twisted.python import log
-from twisted.spread import pb
-import time
-
-def now():
- #return int(time.time())
- return time.time()
-
-def earlier(old, new):
- # minimum of two things, but "None" counts as +infinity
- if old:
- if new < old:
- return new
- return old
- return new
-
-def later(old, new):
- # maximum of two things, but "None" counts as -infinity
- if old:
- if new > old:
- return new
- return old
- return new
-
-class CancelableDeferred(Deferred):
- """I am a version of Deferred that can be canceled by calling my
- .cancel() method. After being canceled, no callbacks or errbacks will be
- executed.
- """
- def __init__(self):
- Deferred.__init__(self)
- self.canceled = 0
- def cancel(self):
- self.canceled = 1
- def _runCallbacks(self):
- if self.canceled:
- self.callbacks = []
- return
- Deferred._runCallbacks(self)
-
-def ignoreStaleRefs(failure):
- """d.addErrback(util.ignoreStaleRefs)"""
- r = failure.trap(pb.DeadReferenceError, pb.PBConnectionLost)
- return None
-
-class _None:
- pass
-
-class ComparableMixin:
- """Specify a list of attributes that are 'important'. These will be used
- for all comparison operations."""
-
- compare_attrs = []
-
- def __hash__(self):
- alist = [self.__class__] + \
- [getattr(self, name, _None) for name in self.compare_attrs]
- return hash(tuple(alist))
-
- def __cmp__(self, them):
- if cmp(type(self), type(them)):
- return cmp(type(self), type(them))
- if cmp(self.__class__, them.__class__):
- return cmp(self.__class__, them.__class__)
- assert self.compare_attrs == them.compare_attrs
- self_list= [getattr(self, name, _None) for name in self.compare_attrs]
- them_list= [getattr(them, name, _None) for name in self.compare_attrs]
- return cmp(self_list, them_list)
diff --git a/buildbot/buildbot-source/build/scripts-2.3/buildbot b/buildbot/buildbot-source/build/scripts-2.3/buildbot
deleted file mode 100755
index cf3628dd5..000000000
--- a/buildbot/buildbot-source/build/scripts-2.3/buildbot
+++ /dev/null
@@ -1,4 +0,0 @@
-#!/usr/bin/python
-
-from buildbot.scripts import runner
-runner.run()
diff --git a/buildbot/buildbot-source/build/scripts-2.4/buildbot b/buildbot/buildbot-source/build/scripts-2.4/buildbot
deleted file mode 100755
index 45421cfa5..000000000
--- a/buildbot/buildbot-source/build/scripts-2.4/buildbot
+++ /dev/null
@@ -1,4 +0,0 @@
-#!/usr/bin/python2.4
-
-from buildbot.scripts import runner
-runner.run()
diff --git a/buildbot/buildbot-source/buildbot/__init__.py b/buildbot/buildbot-source/buildbot/__init__.py
deleted file mode 100644
index ed1ce3fd3..000000000
--- a/buildbot/buildbot-source/buildbot/__init__.py
+++ /dev/null
@@ -1,3 +0,0 @@
-#! /usr/bin/python
-
-version = "0.7.3"
diff --git a/buildbot/buildbot-source/buildbot/__init__.pyc b/buildbot/buildbot-source/buildbot/__init__.pyc
deleted file mode 100644
index d02b03f25..000000000
--- a/buildbot/buildbot-source/buildbot/__init__.pyc
+++ /dev/null
Binary files differ
diff --git a/buildbot/buildbot-source/buildbot/buildbot.png b/buildbot/buildbot-source/buildbot/buildbot.png
deleted file mode 100644
index 387ba15f4..000000000
--- a/buildbot/buildbot-source/buildbot/buildbot.png
+++ /dev/null
Binary files differ
diff --git a/buildbot/buildbot-source/buildbot/buildset.py b/buildbot/buildbot-source/buildbot/buildset.py
deleted file mode 100644
index 0e163738d..000000000
--- a/buildbot/buildbot-source/buildbot/buildset.py
+++ /dev/null
@@ -1,77 +0,0 @@
-
-from twisted.internet import defer
-
-from buildbot.process import base
-from buildbot.status import builder
-
-
-class BuildSet:
- """I represent a set of potential Builds, all of the same source tree,
- across a specified list of Builders. I can represent a build of a
- specific version of the source tree (named by source.branch and
- source.revision), or a build of a certain set of Changes
- (source.changes=list)."""
-
- def __init__(self, builderNames, source, reason=None, bsid=None):
- """
- @param source: a L{buildbot.sourcestamp.SourceStamp}
- """
- self.builderNames = builderNames
- self.source = source
- self.reason = reason
- self.stillHopeful = True
- self.status = bss = builder.BuildSetStatus(source, reason,
- builderNames, bsid)
-
- def waitUntilSuccess(self):
- return self.status.waitUntilSuccess()
- def waitUntilFinished(self):
- return self.status.waitUntilFinished()
-
- def start(self, builders):
- """This is called by the BuildMaster to actually create and submit
- the BuildRequests."""
- self.requests = []
- reqs = []
-
- # create the requests
- for b in builders:
- req = base.BuildRequest(self.reason, self.source, b.name)
- reqs.append((b, req))
- self.requests.append(req)
- d = req.waitUntilFinished()
- d.addCallback(self.requestFinished, req)
-
- # tell our status about them
- req_statuses = [req.status for req in self.requests]
- self.status.setBuildRequestStatuses(req_statuses)
-
- # now submit them
- for b,req in reqs:
- b.submitBuildRequest(req)
-
- def requestFinished(self, buildstatus, req):
- # TODO: this is where individual build status results are aggregated
- # into a BuildSet-wide status. Consider making a rule that says one
- # WARNINGS results in the overall status being WARNINGS too. The
- # current rule is that any FAILURE means FAILURE, otherwise you get
- # SUCCESS.
- self.requests.remove(req)
- results = buildstatus.getResults()
- if results == builder.FAILURE:
- self.status.setResults(results)
- if self.stillHopeful:
- # oh, cruel reality cuts deep. no joy for you. This is the
- # first failure. This flunks the overall BuildSet, so we can
- # notify success watchers that they aren't going to be happy.
- self.stillHopeful = False
- self.status.giveUpHope()
- self.status.notifySuccessWatchers()
- if not self.requests:
- # that was the last build, so we can notify finished watchers. If
- # we haven't failed by now, we can claim success.
- if self.stillHopeful:
- self.status.setResults(builder.SUCCESS)
- self.status.notifySuccessWatchers()
- self.status.notifyFinishedWatchers()
-
diff --git a/buildbot/buildbot-source/buildbot/changes/__init__.py b/buildbot/buildbot-source/buildbot/changes/__init__.py
deleted file mode 100644
index e69de29bb..000000000
--- a/buildbot/buildbot-source/buildbot/changes/__init__.py
+++ /dev/null
diff --git a/buildbot/buildbot-source/buildbot/changes/base.py b/buildbot/buildbot-source/buildbot/changes/base.py
deleted file mode 100644
index 2b0a331f2..000000000
--- a/buildbot/buildbot-source/buildbot/changes/base.py
+++ /dev/null
@@ -1,14 +0,0 @@
-#! /usr/bin/python
-
-from twisted.application import service
-from twisted.python import components
-
-from buildbot.twcompat import implements
-from buildbot.interfaces import IChangeSource
-from buildbot import util
-
-class ChangeSource(service.Service, util.ComparableMixin):
- if implements:
- implements(IChangeSource)
- else:
- __implements__ = IChangeSource, service.Service.__implements__
diff --git a/buildbot/buildbot-source/buildbot/changes/changes.py b/buildbot/buildbot-source/buildbot/changes/changes.py
deleted file mode 100644
index 9ca9112f0..000000000
--- a/buildbot/buildbot-source/buildbot/changes/changes.py
+++ /dev/null
@@ -1,265 +0,0 @@
-#! /usr/bin/python
-
-from __future__ import generators
-import string, sys, os, os.path, time, types
-try:
- import cPickle as pickle
-except ImportError:
- import pickle
-
-from twisted.python import log, components
-from twisted.internet import defer
-from twisted.spread import pb
-from twisted.application import service
-from twisted.cred import portal
-from twisted.web import html
-
-from buildbot import interfaces, util
-from buildbot.twcompat import implements, providedBy
-
-html_tmpl = """
-<p>Changed by: <b>%(who)s</b><br />
-Changed at: <b>%(at)s</b><br />
-%(branch)s
-%(revision)s
-<br />
-
-Changed files:
-%(files)s
-
-Comments:
-%(comments)s
-</p>
-"""
-
-class Change:
- """I represent a single change to the source tree. This may involve
- several files, but they are all changed by the same person, and there is
- a change comment for the group as a whole.
-
- If the version control system supports sequential repository- (or
- branch-) wide change numbers (like SVN, P4, and Arch), then revision=
- should be set to that number. The highest such number will be used at
- checkout time to get the correct set of files.
-
- If it does not (like CVS), when= should be set to the timestamp (seconds
- since epoch, as returned by time.time()) when the change was made. when=
- will be filled in for you (to the current time) if you omit it, which is
- suitable for ChangeSources which have no way of getting more accurate
- timestamps.
-
- Changes should be submitted to ChangeMaster.addChange() in
- chronologically increasing order. Out-of-order changes will probably
- cause the html.Waterfall display to be corrupted."""
-
- if implements:
- implements(interfaces.IStatusEvent)
- else:
- __implements__ = interfaces.IStatusEvent,
-
- number = None
-
- links = []
- branch = None
- revision = None # used to create a source-stamp
-
- def __init__(self, who, files, comments, isdir=0, links=[],
- revision=None, when=None, branch=None):
- self.who = who
- self.files = files
- self.comments = comments
- self.isdir = isdir
- self.links = links
- self.revision = revision
- if when is None:
- when = util.now()
- self.when = when
- self.branch = branch
-
- def asText(self):
- data = ""
- data += self.getFileContents()
- data += "At: %s\n" % self.getTime()
- data += "Changed By: %s\n" % self.who
- data += "Comments: %s\n\n" % self.comments
- return data
-
- def asHTML(self):
- links = []
- for file in self.files:
- link = filter(lambda s: s.find(file) != -1, self.links)
- if len(link) == 1:
- # could get confused
- links.append('<a href="%s"><b>%s</b></a>' % (link[0], file))
- else:
- links.append('<b>%s</b>' % file)
- revision = ""
- if self.revision:
- revision = "Revision: <b>%s</b><br />\n" % self.revision
- branch = ""
- if self.branch:
- branch = "Branch: <b>%s</b><br />\n" % self.branch
-
- kwargs = { 'who' : html.escape(self.who),
- 'at' : self.getTime(),
- 'files' : html.UL(links) + '\n',
- 'revision': revision,
- 'branch' : branch,
- 'comments': html.PRE(self.comments) }
- return html_tmpl % kwargs
-
- def getTime(self):
- if not self.when:
- return "?"
- return time.strftime("%a %d %b %Y %H:%M:%S",
- time.localtime(self.when))
-
- def getTimes(self):
- return (self.when, None)
-
- def getText(self):
- return [html.escape(self.who)]
- def getColor(self):
- return "white"
- def getLogs(self):
- return {}
-
- def getFileContents(self):
- data = ""
- if len(self.files) == 1:
- if self.isdir:
- data += "Directory: %s\n" % self.files[0]
- else:
- data += "File: %s\n" % self.files[0]
- else:
- data += "Files:\n"
- for f in self.files:
- data += " %s\n" % f
- return data
-
-class ChangeMaster(service.MultiService):
-
- """This is the master-side service which receives file change
- notifications from CVS. It keeps a log of these changes, enough to
- provide for the HTML waterfall display, and to tell
- temporarily-disconnected bots what they missed while they were
- offline.
-
- Change notifications come from two different kinds of sources. The first
- is a PB service (servicename='changemaster', perspectivename='change'),
- which provides a remote method called 'addChange', which should be
- called with a dict that has keys 'filename' and 'comments'.
-
- The second is a list of objects derived from the ChangeSource class.
- These are added with .addSource(), which also sets the .changemaster
- attribute in the source to point at the ChangeMaster. When the
- application begins, these will be started with .start() . At shutdown
- time, they will be terminated with .stop() . They must be persistable.
- They are expected to call self.changemaster.addChange() with Change
- objects.
-
- There are several different variants of the second type of source:
-
- - L{buildbot.changes.mail.MaildirSource} watches a maildir for CVS
- commit mail. It uses DNotify if available, or polls every 10
- seconds if not. It parses incoming mail to determine what files
- were changed.
-
- - L{buildbot.changes.freshcvs.FreshCVSSource} makes a PB
- connection to the CVSToys 'freshcvs' daemon and relays any
- changes it announces.
-
- """
-
- debug = False
- # todo: use Maildir class to watch for changes arriving by mail
-
- def __init__(self):
- service.MultiService.__init__(self)
- self.changes = []
- # self.basedir must be filled in by the parent
- self.nextNumber = 1
-
- def addSource(self, source):
- assert providedBy(source, interfaces.IChangeSource)
- assert providedBy(source, service.IService)
- if self.debug:
- print "ChangeMaster.addSource", source
- source.setServiceParent(self)
-
- def removeSource(self, source):
- assert source in self
- if self.debug:
- print "ChangeMaster.removeSource", source, source.parent
- d = defer.maybeDeferred(source.disownServiceParent)
- return d
-
- def addChange(self, change):
- """Deliver a file change event. The event should be a Change object.
- This method will timestamp the object as it is received."""
- log.msg("adding change, who %s, %d files, rev=%s, branch=%s, "
- "comments %s" % (change.who, len(change.files),
- change.revision, change.branch,
- change.comments))
- change.number = self.nextNumber
- self.nextNumber += 1
- self.changes.append(change)
- self.parent.addChange(change)
- # TODO: call pruneChanges after a while
-
- def pruneChanges(self):
- self.changes = self.changes[-100:] # or something
-
- def eventGenerator(self):
- for i in range(len(self.changes)-1, -1, -1):
- c = self.changes[i]
- yield c
-
- def getChangeNumbered(self, num):
- if not self.changes:
- return None
- first = self.changes[0].number
- if first + len(self.changes)-1 != self.changes[-1].number:
- log.msg(self,
- "lost a change somewhere: [0] is %d, [%d] is %d" % \
- (self.changes[0].number,
- len(self.changes) - 1,
- self.changes[-1].number))
- for c in self.changes:
- log.msg("c[%d]: " % c.number, c)
- return None
- offset = num - first
- log.msg(self, "offset", offset)
- return self.changes[offset]
-
- def __getstate__(self):
- d = service.MultiService.__getstate__(self)
- del d['parent']
- del d['services'] # lose all children
- del d['namedServices']
- return d
-
- def __setstate__(self, d):
- self.__dict__ = d
- # self.basedir must be set by the parent
- self.services = [] # they'll be repopulated by readConfig
- self.namedServices = {}
-
-
- def saveYourself(self):
- filename = os.path.join(self.basedir, "changes.pck")
- tmpfilename = filename + ".tmp"
- try:
- pickle.dump(self, open(tmpfilename, "wb"))
- if sys.platform == 'win32':
- # windows cannot rename a file on top of an existing one
- if os.path.exists(filename):
- os.unlink(filename)
- os.rename(tmpfilename, filename)
- except Exception, e:
- log.msg("unable to save changes")
- log.err()
-
- def stopService(self):
- self.saveYourself()
- return service.MultiService.stopService(self)
diff --git a/buildbot/buildbot-source/buildbot/changes/dnotify.py b/buildbot/buildbot-source/buildbot/changes/dnotify.py
deleted file mode 100644
index ac566a8eb..000000000
--- a/buildbot/buildbot-source/buildbot/changes/dnotify.py
+++ /dev/null
@@ -1,103 +0,0 @@
-#! /usr/bin/python
-
-import fcntl, signal, os
-
-class DNotify_Handler:
- def __init__(self):
- self.watchers = {}
- self.installed = 0
- def install(self):
- if self.installed:
- return
- signal.signal(signal.SIGIO, self.fire)
- self.installed = 1
- def uninstall(self):
- if not self.installed:
- return
- signal.signal(signal.SIGIO, signal.SIG_DFL)
- self.installed = 0
- def add(self, watcher):
- self.watchers[watcher.fd] = watcher
- self.install()
- def remove(self, watcher):
- if self.watchers.has_key(watcher.fd):
- del(self.watchers[watcher.fd])
- if not self.watchers:
- self.uninstall()
- def fire(self, signum, frame):
- # this is the signal handler
- # without siginfo_t, we must fire them all
- for watcher in self.watchers.values():
- watcher.callback()
-
-class DNotify:
- DN_ACCESS = fcntl.DN_ACCESS # a file in the directory was read
- DN_MODIFY = fcntl.DN_MODIFY # a file was modified (write,truncate)
- DN_CREATE = fcntl.DN_CREATE # a file was created
- DN_DELETE = fcntl.DN_DELETE # a file was unlinked
- DN_RENAME = fcntl.DN_RENAME # a file was renamed
- DN_ATTRIB = fcntl.DN_ATTRIB # a file had attributes changed (chmod,chown)
-
- handler = [None]
-
- def __init__(self, dirname, callback=None,
- flags=[DN_MODIFY,DN_CREATE,DN_DELETE,DN_RENAME]):
-
- """This object watches a directory for changes. The .callback
- attribute should be set to a function to be run every time something
- happens to it. Be aware that it will be called more times than you
- expect."""
-
- if callback:
- self.callback = callback
- else:
- self.callback = self.fire
- self.dirname = dirname
- self.flags = reduce(lambda x, y: x | y, flags) | fcntl.DN_MULTISHOT
- self.fd = os.open(dirname, os.O_RDONLY)
- # ideally we would move the notification to something like SIGRTMIN,
- # (to free up SIGIO) and use sigaction to have the signal handler
- # receive a structure with the fd number. But python doesn't offer
- # either.
- if not self.handler[0]:
- self.handler[0] = DNotify_Handler()
- self.handler[0].add(self)
- fcntl.fcntl(self.fd, fcntl.F_NOTIFY, self.flags)
- def remove(self):
- self.handler[0].remove(self)
- os.close(self.fd)
- def fire(self):
- print self.dirname, "changed!"
-
-def test_dnotify1():
- d = DNotify(".")
- import time
- while 1:
- signal.pause()
-
-def test_dnotify2():
- # create ./foo/, create/delete files in ./ and ./foo/ while this is
- # running. Notice how both notifiers are fired when anything changes;
- # this is an unfortunate side-effect of the lack of extended sigaction
- # support in Python.
- count = [0]
- d1 = DNotify(".")
- def fire1(count=count, d1=d1):
- print "./ changed!", count[0]
- count[0] += 1
- if count[0] > 5:
- d1.remove()
- del(d1)
- # change the callback, since we can't define it until after we have the
- # dnotify object. Hmm, unless we give the dnotify to the callback.
- d1.callback = fire1
- def fire2(): print "foo/ changed!"
- d2 = DNotify("foo", fire2)
- import time
- while 1:
- signal.pause()
-
-
-if __name__ == '__main__':
- test_dnotify2()
-
diff --git a/buildbot/buildbot-source/buildbot/changes/freshcvs.py b/buildbot/buildbot-source/buildbot/changes/freshcvs.py
deleted file mode 100644
index e88d351ba..000000000
--- a/buildbot/buildbot-source/buildbot/changes/freshcvs.py
+++ /dev/null
@@ -1,148 +0,0 @@
-#! /usr/bin/python
-
-import os.path
-
-from twisted.cred import credentials
-from twisted.spread import pb
-from twisted.application.internet import TCPClient
-from twisted.python import log
-
-import cvstoys.common # to make sure VersionedPatch gets registered
-
-from buildbot.twcompat import implements
-from buildbot.interfaces import IChangeSource
-from buildbot.pbutil import ReconnectingPBClientFactory
-from buildbot.changes.changes import Change
-from buildbot import util
-
-class FreshCVSListener(pb.Referenceable):
- def remote_notify(self, root, files, message, user):
- try:
- self.source.notify(root, files, message, user)
- except Exception, e:
- print "notify failed"
- log.err()
-
- def remote_goodbye(self, message):
- pass
-
-class FreshCVSConnectionFactory(ReconnectingPBClientFactory):
-
- def gotPerspective(self, perspective):
- log.msg("connected to FreshCVS daemon")
- ReconnectingPBClientFactory.gotPerspective(self, perspective)
- self.source.connected = True
- # TODO: freshcvs-1.0.10 doesn't handle setFilter correctly, it will
- # be fixed in the upcoming 1.0.11 . I haven't been able to test it
- # to make sure the failure mode is survivable, so I'll just leave
- # this out for now.
- return
- if self.source.prefix is not None:
- pathfilter = "^%s" % self.source.prefix
- d = perspective.callRemote("setFilter",
- None, pathfilter, None)
- # ignore failures, setFilter didn't work in 1.0.10 and this is
- # just an optimization anyway
- d.addErrback(lambda f: None)
-
- def clientConnectionLost(self, connector, reason):
- ReconnectingPBClientFactory.clientConnectionLost(self, connector,
- reason)
- self.source.connected = False
-
-class FreshCVSSourceNewcred(TCPClient, util.ComparableMixin):
- """This source will connect to a FreshCVS server associated with one or
- more CVS repositories. Each time a change is committed to a repository,
- the server will send us a message describing the change. This message is
- used to build a Change object, which is then submitted to the
- ChangeMaster.
-
- This class handles freshcvs daemons which use newcred. CVSToys-1.0.9
- does not, later versions might.
- """
-
- if implements:
- implements(IChangeSource)
- else:
- __implements__ = IChangeSource, TCPClient.__implements__
- compare_attrs = ["host", "port", "username", "password", "prefix"]
-
- changemaster = None # filled in when we're added
- connected = False
-
- def __init__(self, host, port, user, passwd, prefix=None):
- self.host = host
- self.port = port
- self.username = user
- self.password = passwd
- if prefix is not None and not prefix.endswith("/"):
- log.msg("WARNING: prefix '%s' should probably end with a slash" \
- % prefix)
- self.prefix = prefix
- self.listener = l = FreshCVSListener()
- l.source = self
- self.factory = f = FreshCVSConnectionFactory()
- f.source = self
- self.creds = credentials.UsernamePassword(user, passwd)
- f.startLogin(self.creds, client=l)
- TCPClient.__init__(self, host, port, f)
-
- def __repr__(self):
- return "<FreshCVSSource where=%s, prefix=%s>" % \
- ((self.host, self.port), self.prefix)
-
- def describe(self):
- online = ""
- if not self.connected:
- online = " [OFFLINE]"
- return "freshcvs %s:%s%s" % (self.host, self.port, online)
-
- def notify(self, root, files, message, user):
- pathnames = []
- isdir = 0
- for f in files:
- if not isinstance(f, (cvstoys.common.VersionedPatch,
- cvstoys.common.Directory)):
- continue
- pathname, filename = f.pathname, f.filename
- #r1, r2 = getattr(f, 'r1', None), getattr(f, 'r2', None)
- if isinstance(f, cvstoys.common.Directory):
- isdir = 1
- path = os.path.join(pathname, filename)
- log.msg("FreshCVS notify '%s'" % path)
- if self.prefix:
- if path.startswith(self.prefix):
- path = path[len(self.prefix):]
- else:
- continue
- pathnames.append(path)
- if pathnames:
- # now() is close enough: FreshCVS *is* realtime, after all
- when=util.now()
- c = Change(user, pathnames, message, isdir, when=when)
- self.parent.addChange(c)
-
-class FreshCVSSourceOldcred(FreshCVSSourceNewcred):
- """This is for older freshcvs daemons (from CVSToys-1.0.9 and earlier).
- """
-
- def __init__(self, host, port, user, passwd,
- serviceName="cvstoys.notify", prefix=None):
- self.host = host
- self.port = port
- self.prefix = prefix
- self.listener = l = FreshCVSListener()
- l.source = self
- self.factory = f = FreshCVSConnectionFactory()
- f.source = self
- f.startGettingPerspective(user, passwd, serviceName, client=l)
- TCPClient.__init__(self, host, port, f)
-
- def __repr__(self):
- return "<FreshCVSSourceOldcred where=%s, prefix=%s>" % \
- ((self.host, self.port), self.prefix)
-
-# this is suitable for CVSToys-1.0.10 and later. If you run CVSToys-1.0.9 or
-# earlier, use FreshCVSSourceOldcred instead.
-FreshCVSSource = FreshCVSSourceNewcred
-
diff --git a/buildbot/buildbot-source/buildbot/changes/freshcvsmail.py b/buildbot/buildbot-source/buildbot/changes/freshcvsmail.py
deleted file mode 100644
index e897f4990..000000000
--- a/buildbot/buildbot-source/buildbot/changes/freshcvsmail.py
+++ /dev/null
@@ -1,5 +0,0 @@
-#! /usr/bin/python
-
-# leftover import for compatibility
-
-from buildbot.changes.mail import FCMaildirSource
diff --git a/buildbot/buildbot-source/buildbot/changes/mail.py b/buildbot/buildbot-source/buildbot/changes/mail.py
deleted file mode 100644
index b5237e9a9..000000000
--- a/buildbot/buildbot-source/buildbot/changes/mail.py
+++ /dev/null
@@ -1,475 +0,0 @@
-# -*- test-case-name: buildbot.test.test_mailparse -*-
-
-"""
-Parse various kinds of 'CVS notify' email.
-"""
-import os, os.path, re
-from rfc822 import Message
-
-from buildbot import util
-from buildbot.twcompat import implements
-from buildbot.changes import base, changes, maildirtwisted
-
-
-def parseOOAllCVSmail(self, fd, prefix=None, sep="/"):
- """Parse messages sent by the 'allcvs' program
- """
- # pretty much the same as freshcvs mail, not surprising since CVS is the
- # one creating most of the text
-
- m = Message(fd)
- # The mail is sent from the person doing the checkin. Assume that the
- # local username is enough to identify them (this assumes a one-server
- # cvs-over-rsh environment rather than the server-dirs-shared-over-NFS
- # model)
- name, addr = m.getaddr("from")
- if not addr:
- return None # no From means this message isn't from FreshCVS
- at = addr.find("@")
- if at == -1:
- who = addr # might still be useful
- else:
- who = addr[:at]
-
- # we take the time of receipt as the time of checkin. Not correct (it
- # depends upon the email latency), but it avoids the out-of-order-changes
- # issue. Also syncmail doesn't give us anything better to work with,
- # unless you count pulling the v1-vs-v2 timestamp out of the diffs, which
- # would be ugly. TODO: Pulling the 'Date:' header from the mail is a
- # possibility, and email.Utils.parsedate_tz may be useful. It should be
- # configurable, however, because there are a lot of broken clocks out
- # there.
- when = util.now()
- subject = m.getheader("subject")
- # syncmail puts the repository-relative directory in the subject:
- # mprefix + "%(dir)s %(file)s,%(oldversion)s,%(newversion)s", where
- # 'mprefix' is something that could be added by a mailing list
- # manager.
- # this is the only reasonable way to determine the directory name
- space = subject.find(" ")
- if space != -1:
- directory = subject[:space]
- else:
- directory = subject
- files = []
- comments = ""
- isdir = 0
- branch = None
- lines = m.fp.readlines()
-
- while lines:
- line = lines.pop(0)
- #if line == "\n":
- # break
- #if line == "Log:\n":
- # lines.insert(0, line)
- # break
- line = line.lstrip()
- line = line.rstrip()
-
- if line.startswith('Tag:'):
- branch = line.split(' ')[-1].rstrip()
- branch = branch.replace("cws_src680_","")
- break
- else:
- continue
-
- #thesefiles = line.split(" ")
- #for f in thesefiles:
- # f = sep.join([directory, f])
- # if prefix:
- # bits = f.split(sep)
- # if bits[0] == prefix:
- # f = sep.join(bits[1:])
- # else:
- # break
-
- # files.append(f)
-
- while lines:
- line = lines.pop(0)
- if (line == "Modified:\n" or
- line == "Added:\n" or
- line == "Removed:\n"):
- break
-
- while lines:
- line = lines.pop(0)
- if line == "\n":
- break
- if line == "Log:\n":
- lines.insert(0, line)
- break
- line = line.lstrip()
- line = line.rstrip()
-
- thesefiles = line.split(" ")
- for f in thesefiles:
- f = sep.join([directory, f])
- if prefix:
- bits = f.split(sep)
- if bits[0] == prefix:
- f = sep.join(bits[1:])
- else:
- break
- files.append(f)
-
-
- #if not files:
- # return None
-
- if not branch:
- return None
-
- while lines:
- line = lines.pop(0)
- if line == "Log:\n":
- break
-
- while lines:
- line = lines.pop(0)
- #if line.find("Directory: ") == 0:
- # break
- #if re.search(r"^--- NEW FILE", line):
- # break
- #if re.search(r" DELETED ---$", line):
- # break
- comments += line
- comments = comments.rstrip() + "\n"
- change = changes.Change(who, files, comments, isdir, when=when,
- branch=branch)
- return change
-
-
-
-def parseFreshCVSMail(self, fd, prefix=None, sep="/"):
- """Parse mail sent by FreshCVS"""
- # this uses rfc822.Message so it can run under python2.1 . In the future
- # it will be updated to use python2.2's "email" module.
-
- m = Message(fd)
- # FreshCVS sets From: to "user CVS <user>", but the <> part may be
- # modified by the MTA (to include a local domain)
- name, addr = m.getaddr("from")
- if not name:
- return None # no From means this message isn't from FreshCVS
- cvs = name.find(" CVS")
- if cvs == -1:
- return None # this message isn't from FreshCVS
- who = name[:cvs]
-
- # we take the time of receipt as the time of checkin. Not correct, but it
- # avoids the out-of-order-changes issue. See the comment in parseSyncmail
- # about using the 'Date:' header
- when = util.now()
-
- files = []
- comments = ""
- isdir = 0
- lines = m.fp.readlines()
- while lines:
- line = lines.pop(0)
- if line == "Modified files:\n":
- break
- while lines:
- line = lines.pop(0)
- if line == "\n":
- break
- line = line.rstrip("\n")
- linebits = line.split(None, 1)
- file = linebits[0]
- if prefix:
- # insist that the file start with the prefix: FreshCVS sends
- # changes we don't care about too
- bits = file.split(sep)
- if bits[0] == prefix:
- file = sep.join(bits[1:])
- else:
- break
- if len(linebits) == 1:
- isdir = 1
- elif linebits[1] == "0 0":
- isdir = 1
- files.append(file)
- while lines:
- line = lines.pop(0)
- if line == "Log message:\n":
- break
- # message is terminated by "ViewCVS links:" or "Index:..." (patch)
- while lines:
- line = lines.pop(0)
- if line == "ViewCVS links:\n":
- break
- if line.find("Index: ") == 0:
- break
- comments += line
- comments = comments.rstrip() + "\n"
-
- if not files:
- return None
-
- change = changes.Change(who, files, comments, isdir, when=when)
-
- return change
-
-def parseSyncmail(self, fd, prefix=None, sep="/"):
- """Parse messages sent by the 'syncmail' program, as suggested by the
- sourceforge.net CVS Admin documentation. Syncmail is maintained at
- syncmail.sf.net .
- """
- # pretty much the same as freshcvs mail, not surprising since CVS is the
- # one creating most of the text
-
- m = Message(fd)
- # The mail is sent from the person doing the checkin. Assume that the
- # local username is enough to identify them (this assumes a one-server
- # cvs-over-rsh environment rather than the server-dirs-shared-over-NFS
- # model)
- name, addr = m.getaddr("from")
- if not addr:
- return None # no From means this message isn't from FreshCVS
- at = addr.find("@")
- if at == -1:
- who = addr # might still be useful
- else:
- who = addr[:at]
-
- # we take the time of receipt as the time of checkin. Not correct (it
- # depends upon the email latency), but it avoids the out-of-order-changes
- # issue. Also syncmail doesn't give us anything better to work with,
- # unless you count pulling the v1-vs-v2 timestamp out of the diffs, which
- # would be ugly. TODO: Pulling the 'Date:' header from the mail is a
- # possibility, and email.Utils.parsedate_tz may be useful. It should be
- # configurable, however, because there are a lot of broken clocks out
- # there.
- when = util.now()
-
- subject = m.getheader("subject")
- # syncmail puts the repository-relative directory in the subject:
- # mprefix + "%(dir)s %(file)s,%(oldversion)s,%(newversion)s", where
- # 'mprefix' is something that could be added by a mailing list
- # manager.
- # this is the only reasonable way to determine the directory name
- space = subject.find(" ")
- if space != -1:
- directory = subject[:space]
- else:
- directory = subject
-
- files = []
- comments = ""
- isdir = 0
- branch = None
-
- lines = m.fp.readlines()
- #while lines:
- # line = lines.pop(0)
-
- # if (line == "Modified:\n" or
- # line == "Added:\n" or
- # line == "Removed:\n"):
- # break
-
- while lines:
- line = lines.pop(0)
- #if line == "\n":
- # break
- #if line == "Log:\n":
- # lines.insert(0, line)
- # break
- line = line.lstrip()
- line = line.rstrip()
- # note: syncmail will send one email per directory involved in a
- # commit, with multiple files if they were in the same directory.
- # Unlike freshCVS, it makes no attempt to collect all related
- # commits into a single message.
-
- # note: syncmail will report a Tag underneath the ... Files: line
- # e.g.: Tag: BRANCH-DEVEL
-
- if line.startswith('Tag:'):
- branch = line.split(' ')[-1].rstrip()
- branch = branch.replace("cws_src680_","")
- continue
-
- # note: it doesn't actually make sense to use portable functions
- # like os.path.join and os.sep, because these filenames all use
- # separator conventions established by the remote CVS server (which
- # is probably running on unix), not the local buildmaster system.
- thesefiles = line.split(" ")
- for f in thesefiles:
- f = sep.join([directory, f])
- if prefix:
- # insist that the file start with the prefix: we may get
- # changes we don't care about too
- bits = f.split(sep)
- if bits[0] == prefix:
- f = sep.join(bits[1:])
- else:
- break
- # TODO: figure out how new directories are described, set .isdir
- files.append(f)
-
- #if not files:
- # return None
-
- if not branch:
- return None
-
- while lines:
- line = lines.pop(0)
- if line == "Log:\n":
- break
- # message is terminated by "Index:..." (patch) or "--- NEW FILE.."
- # or "--- filename DELETED ---". Sigh.
- while lines:
- line = lines.pop(0)
- if line.find("Index: ") == 0:
- break
- if re.search(r"^--- NEW FILE", line):
- break
- if re.search(r" DELETED ---$", line):
- break
- comments += line
- comments = comments.rstrip() + "\n"
-
- change = changes.Change(who, files, comments, isdir, when=when,
- branch=branch)
-
- return change
-
-# Bonsai mail parser by Stephen Davis.
-#
-# This handles changes for CVS repositories that are watched by Bonsai
-# (http://www.mozilla.org/bonsai.html)
-
-# A Bonsai-formatted email message looks like:
-#
-# C|1071099907|stephend|/cvs|Sources/Scripts/buildbot|bonsai.py|1.2|||18|7
-# A|1071099907|stephend|/cvs|Sources/Scripts/buildbot|master.cfg|1.1|||18|7
-# R|1071099907|stephend|/cvs|Sources/Scripts/buildbot|BuildMaster.py|||
-# LOGCOMMENT
-# Updated bonsai parser and switched master config to buildbot-0.4.1 style.
-#
-# :ENDLOGCOMMENT
-#
-# In the first example line, stephend is the user, /cvs the repository,
-# buildbot the directory, bonsai.py the file, 1.2 the revision, no sticky
-# and branch, 18 lines added and 7 removed. All of these fields might not be
-# present (during "removes" for example).
-#
-# There may be multiple "control" lines or even none (imports, directory
-# additions) but there is one email per directory. We only care about actual
-# changes since it is presumed directory additions don't actually affect the
-# build. At least one file should need to change (the makefile, say) to
-# actually make a new directory part of the build process. That's my story
-# and I'm sticking to it.
-
-def parseBonsaiMail(self, fd, prefix=None):
- """Parse mail sent by the Bonsai cvs loginfo script."""
-
- msg = Message(fd)
-
- # we don't care who the email came from b/c the cvs user is in the msg
- # text
-
- who = "unknown"
- timestamp = None
- files = []
- lines = msg.fp.readlines()
-
- # read the control lines (what/who/where/file/etc.)
- while lines:
- line = lines.pop(0)
- if line == "LOGCOMMENT\n":
- break;
- line = line.rstrip("\n")
-
- # we'd like to do the following but it won't work if the number of
- # items doesn't match so...
- # what, timestamp, user, repo, module, file = line.split( '|' )
- items = line.split('|')
- if len(items) < 6:
- # not a valid line, assume this isn't a bonsai message
- return None
-
- try:
- # just grab the bottom-most timestamp, they're probably all the
- # same. TODO: I'm assuming this is relative to the epoch, but
- # this needs testing.
- timestamp = int(items[1])
- except ValueError:
- pass
-
- user = items[2]
- if user:
- who = user
-
- module = items[4]
- file = items[5]
- if module and file:
- path = "%s/%s" % (module, file)
- files.append(path)
-
- # if no files changed, return nothing
- if not files:
- return None
-
- # read the comments
- comments = ""
- while lines:
- line = lines.pop(0)
- if line == ":ENDLOGCOMMENT\n":
- break
- comments += line
- comments = comments.rstrip() + "\n"
-
- # return buildbot Change object
- return changes.Change(who, files, comments, when=timestamp)
-
-
-class MaildirSource(maildirtwisted.MaildirTwisted, base.ChangeSource):
- """This source will watch a maildir that is subscribed to a FreshCVS
- change-announcement mailing list.
- """
- # we need our own implements() here, at least for twisted-1.3, because
- # the double-inheritance of Service shadows __implements__ from
- # ChangeSource.
- if not implements:
- __implements__ = base.ChangeSource.__implements__
-
- compare_attrs = ["basedir", "newdir", "pollinterval", "parser"]
- parser = None
- name = None
-
- def __init__(self, maildir, prefix=None, sep="/"):
- maildirtwisted.MaildirTwisted.__init__(self, maildir)
- self.prefix = prefix
- self.sep = sep
-
- def describe(self):
- return "%s mailing list in maildir %s" % (self.name, self.basedir)
-
- def messageReceived(self, filename):
- path = os.path.join(self.basedir, "new", filename)
- change = self.parser(open(path, "r"), self.prefix, self.sep)
- if change:
- self.parent.addChange(change)
- os.rename(os.path.join(self.basedir, "new", filename),
- os.path.join(self.basedir, "cur", filename))
-
-class FCMaildirSource(MaildirSource):
- parser = parseFreshCVSMail
- name = "FreshCVS"
-
-class OOMaildirSource(MaildirSource):
- parser = parseOOAllCVSmail
- name = "AllCVS"
-
-class SyncmailMaildirSource(MaildirSource):
- parser = parseSyncmail
- name = "Syncmail"
-
-class BonsaiMaildirSource(MaildirSource):
- parser = parseBonsaiMail
- name = "Bonsai"
diff --git a/buildbot/buildbot-source/buildbot/changes/maildir.py b/buildbot/buildbot-source/buildbot/changes/maildir.py
deleted file mode 100644
index 83ff5ae14..000000000
--- a/buildbot/buildbot-source/buildbot/changes/maildir.py
+++ /dev/null
@@ -1,115 +0,0 @@
-#! /usr/bin/python
-
-# This is a class which watches a maildir for new messages. It uses the
-# linux dirwatcher API (if available) to look for new files. The
-# .messageReceived method is invoked with the filename of the new message,
-# relative to the 'new' directory of the maildir.
-
-# this is an abstract base class. It must be subclassed by something to
-# provide a delay function (which polls in the case that DNotify isn't
-# available) and a way to safely schedule code to run after a signal handler
-# has fired. See maildirgtk.py and maildirtwisted.py for forms that use the
-# event loops provided by Gtk+ and Twisted.
-
-try:
- from dnotify import DNotify
- have_dnotify = 1
-except:
- have_dnotify = 0
-import os, os.path
-
-class Maildir:
- """This is a class which watches a maildir for new messages. Once
- started, it will run its .messageReceived method when a message is
- available.
- """
- def __init__(self, basedir=None):
- """Create the Maildir watcher. BASEDIR is the maildir directory (the
- one which contains new/ and tmp/)
- """
- self.basedir = basedir
- self.files = []
- self.pollinterval = 10 # only used if we don't have DNotify
- self.running = 0
- self.dnotify = None
-
- def setBasedir(self, basedir):
- self.basedir = basedir
-
- def start(self):
- """You must run start to receive any messages."""
- assert self.basedir
- self.newdir = os.path.join(self.basedir, "new")
- if self.running:
- return
- self.running = 1
- if not os.path.isdir(self.basedir) or not os.path.isdir(self.newdir):
- raise "invalid maildir '%s'" % self.basedir
- # we must hold an fd open on the directory, so we can get notified
- # when it changes.
- global have_dnotify
- if have_dnotify:
- try:
- self.dnotify = DNotify(self.newdir, self.dnotify_callback,
- [DNotify.DN_CREATE])
- except (IOError, OverflowError):
- # IOError is probably linux<2.4.19, which doesn't support
- # dnotify. OverflowError will occur on some 64-bit machines
- # because of a python bug
- print "DNotify failed, falling back to polling"
- have_dnotify = 0
-
- self.poll()
-
- def startTimeout(self):
- raise NotImplemented
- def stopTimeout(self):
- raise NotImplemented
- def dnotify_callback(self):
- print "callback"
- self.poll()
- raise NotImplemented
-
- def stop(self):
- if self.dnotify:
- self.dnotify.remove()
- self.dnotify = None
- else:
- self.stopTimeout()
- self.running = 0
-
- def poll(self):
- assert self.basedir
- # see what's new
- for f in self.files:
- if not os.path.isfile(os.path.join(self.newdir, f)):
- self.files.remove(f)
- newfiles = []
- for f in os.listdir(self.newdir):
- if not f in self.files:
- newfiles.append(f)
- self.files.extend(newfiles)
- # TODO: sort by ctime, then filename, since safecat uses a rather
- # fine-grained timestamp in the filename
- for n in newfiles:
- # TODO: consider catching exceptions in messageReceived
- self.messageReceived(n)
- if not have_dnotify:
- self.startTimeout()
-
- def messageReceived(self, filename):
- """Called when a new file is noticed. Override it in subclasses.
- Will receive path relative to maildir/new."""
- print filename
-
-
-def test1():
- m = Maildir("ddir")
- m.start()
- import signal
- while 1:
- signal.pause()
-
-if __name__ == '__main__':
- test1()
-
diff --git a/buildbot/buildbot-source/buildbot/changes/maildirgtk.py b/buildbot/buildbot-source/buildbot/changes/maildirgtk.py
deleted file mode 100644
index 4bc03c4c5..000000000
--- a/buildbot/buildbot-source/buildbot/changes/maildirgtk.py
+++ /dev/null
@@ -1,55 +0,0 @@
-#! /usr/bin/python
-
-# This is a class which watches a maildir for new messages. It uses the
-# linux dirwatcher API (if available) to look for new files. The
-# .messageReceived method is invoked with the filename of the new message,
-# relative to the top of the maildir (so it will look like "new/blahblah").
-
-# This form uses the Gtk event loop to handle polling and signal safety
-
-if __name__ == '__main__':
- import pygtk
- pygtk.require("2.0")
-
-import gtk
-from maildir import Maildir
-
-class MaildirGtk(Maildir):
- def __init__(self, basedir):
- Maildir.__init__(self, basedir)
- self.idler = None
- def startTimeout(self):
- self.timeout = gtk.timeout_add(self.pollinterval*1000, self.doTimeout)
- def doTimeout(self):
- self.poll()
- return gtk.TRUE # keep going
- def stopTimeout(self):
- if self.timeout:
- gtk.timeout_remove(self.timeout)
- self.timeout = None
- def dnotify_callback(self):
- # make it safe
- self.idler = gtk.idle_add(self.idlePoll)
- def idlePoll(self):
- gtk.idle_remove(self.idler)
- self.idler = None
- self.poll()
- return gtk.FALSE
-
-def test1():
- class MaildirTest(MaildirGtk):
- def messageReceived(self, filename):
- print "changed:", filename
- m = MaildirTest("ddir")
- print "watching ddir/new/"
- m.start()
- #gtk.main()
- # to allow the python-side signal handler to run, we must surface from
- # gtk (which blocks on the C-side) every once in a while.
- while 1:
- gtk.mainiteration() # this will block until there is something to do
- m.stop()
- print "done"
-
-if __name__ == '__main__':
- test1()
diff --git a/buildbot/buildbot-source/buildbot/changes/maildirtwisted.py b/buildbot/buildbot-source/buildbot/changes/maildirtwisted.py
deleted file mode 100644
index ec1bb98b9..000000000
--- a/buildbot/buildbot-source/buildbot/changes/maildirtwisted.py
+++ /dev/null
@@ -1,76 +0,0 @@
-#! /usr/bin/python
-
-# This is a class which watches a maildir for new messages. It uses the
-# linux dirwatcher API (if available) to look for new files. The
-# .messageReceived method is invoked with the filename of the new message,
-# relative to the top of the maildir (so it will look like "new/blahblah").
-
-# This version is implemented as a Twisted Python "Service". It uses the
-# twisted Reactor to handle polling and signal safety.
-
-from twisted.application import service
-from twisted.internet import reactor
-from maildir import Maildir
-
-class MaildirTwisted(Maildir, service.Service):
- timeout = None
-
- def startService(self):
- self.start()
- service.Service.startService(self)
- def stopService(self):
- self.stop()
- service.Service.stopService(self)
-
- def startTimeout(self):
- self.timeout = reactor.callLater(self.pollinterval, self.poll)
- def stopTimeout(self):
- if self.timeout:
- self.timeout.cancel()
- self.timeout = None
-
- def dnotify_callback(self):
- # make it safe
- #reactor.callFromThread(self.poll)
- reactor.callLater(1, self.poll)
- # give it a moment. I found that qmail had problems when the message
- # was removed from the maildir instantly. It shouldn't, that's what
- # maildirs are made for. I wasn't able to eyeball any reason for the
- # problem, and safecat didn't behave the same way, but qmail reports
- # "Temporary_error_on_maildir_delivery" (qmail-local.c:165,
- # maildir_child() process exited with rc not in 0,2,3,4). Not sure why,
- # would have to hack qmail to investigate further, easier to just
- # wait a second before yanking the message out of new/ .
-
-## def messageReceived(self, filename):
-## if self.callback:
-## self.callback(filename)
-
-class MaildirService(MaildirTwisted):
- """I watch a maildir for new messages. I should be placed as the service
- child of some MultiService instance. When running, I use the linux
- dirwatcher API (if available) or poll for new files in the 'new'
- subdirectory of my maildir path. When I discover a new message, I invoke
- my parent's .messageReceived() method with the short filename of the new
- message, so the full name of the new file can be obtained with
- os.path.join(maildir, 'new', filename). I will not move or delete the
- file on my own: the parent should do this in messageReceived().
- """
- def messageReceived(self, filename):
- self.parent.messageReceived(filename)
-
-
-def test1():
- class MaildirTest(MaildirTwisted):
- def messageReceived(self, filename):
- print "changed:", filename
- m = MaildirTest(basedir="ddir")
- print "watching ddir/new/"
- m.startService()
- reactor.run()
- print "done"
-
-if __name__ == '__main__':
- test1()
-
-
diff --git a/buildbot/buildbot-source/buildbot/changes/p4poller.py b/buildbot/buildbot-source/buildbot/changes/p4poller.py
deleted file mode 100644
index d14e57c49..000000000
--- a/buildbot/buildbot-source/buildbot/changes/p4poller.py
+++ /dev/null
@@ -1,142 +0,0 @@
-#! /usr/bin/python
-
-# Many thanks to Dave Peticolas for contributing this module
-
-from twisted.internet import defer
-from twisted.internet.utils import getProcessOutput
-from twisted.internet.task import LoopingCall
-
-from buildbot import util
-from buildbot.changes import base, changes
-
-class P4Source(base.ChangeSource, util.ComparableMixin):
- """This source will poll a perforce repository for changes and submit
- them to the change master."""
-
- compare_attrs = ["p4port", "p4user", "p4passwd", "p4client", "p4base",
- "p4bin", "pollinterval", "histmax"]
-
- parent = None # filled in when we're added
- last_change = None
- loop = None
- volatile = ['loop']
-
- def __init__(self, p4port, p4user, p4passwd=None, p4client=None,
- p4base='//...', p4bin='p4',
- pollinterval=60 * 10, histmax=100):
- """
- @type p4port: string
- @param p4port: p4 port definition (host:portno)
- @type p4user: string
- @param p4user: p4 user
- @type p4passwd: string
- @param p4passwd: p4 passwd
- @type p4client: string
- @param p4client: name of p4 client to poll
- @type p4base: string
- @param p4base: p4 file specification to limit a poll to
- (i.e., //...)
- @type p4bin: string
- @param p4bin: path to p4 binary, defaults to just 'p4'
- @type pollinterval: int
- @param pollinterval: interval in seconds between polls
- @type histmax: int
- @param histmax: maximum number of changes to look back through
- """
-
- self.p4port = p4port
- self.p4user = p4user
- self.p4passwd = p4passwd
- self.p4client = p4client
- self.p4base = p4base
- self.p4bin = p4bin
- self.pollinterval = pollinterval
- self.histmax = histmax
-
- def startService(self):
- self.loop = LoopingCall(self.checkp4)
- self.loop.start(self.pollinterval)
- base.ChangeSource.startService(self)
-
- def stopService(self):
- self.loop.stop()
- return base.ChangeSource.stopService(self)
-
- def describe(self):
- return "p4source %s-%s %s" % (self.p4port, self.p4client, self.p4base)
-
- def checkp4(self):
- d = self._get_changes()
- d.addCallback(self._process_changes)
- d.addCallback(self._handle_changes)
-
- def _get_changes(self):
- args = []
- if self.p4port:
- args.extend(['-p', self.p4port])
- if self.p4user:
- args.extend(['-u', self.p4user])
- if self.p4passwd:
- args.extend(['-P', self.p4passwd])
- if self.p4client:
- args.extend(['-c', self.p4client])
- args.extend(['changes', '-m', str(self.histmax), self.p4base])
- env = {}
- return getProcessOutput(self.p4bin, args, env)
-
- def _process_changes(self, result):
- last_change = self.last_change
- changelists = []
- for line in result.split('\n'):
- line = line.strip()
- if not line: continue
- _, num, _, date, _, user, _ = line.split(' ', 6)
- if last_change is None:
- self.last_change = num
- return []
- if last_change == num: break
- change = {'num' : num, 'date' : date, 'user' : user.split('@')[0]}
- changelists.append(change)
- changelists.reverse() # oldest first
- ds = [self._get_change(c) for c in changelists]
- return defer.DeferredList(ds)
-
- def _get_change(self, change):
- args = []
- if self.p4port:
- args.extend(['-p', self.p4port])
- if self.p4user:
- args.extend(['-u', self.p4user])
- if self.p4passwd:
- args.extend(['-P', self.p4passwd])
- if self.p4client:
- args.extend(['-c', self.p4client])
- args.extend(['describe', '-s', change['num']])
- env = {}
- d = getProcessOutput(self.p4bin, args, env)
- d.addCallback(self._process_change, change)
- return d
-
- def _process_change(self, result, change):
- lines = result.split('\n')
- comments = ''
- while not lines[0].startswith('Affected files'):
- comments += lines.pop(0) + '\n'
- change['comments'] = comments
- lines.pop(0) # affected files
- files = []
- while lines:
- line = lines.pop(0).strip()
- if not line: continue
- files.append(line.split(' ')[1])
- change['files'] = files
- return change
-
- def _handle_changes(self, result):
- for success, change in result:
- if not success: continue
- c = changes.Change(change['user'], change['files'],
- change['comments'],
- revision=change['num'])
- self.parent.addChange(c)
- self.last_change = change['num']
diff --git a/buildbot/buildbot-source/buildbot/changes/pb.py b/buildbot/buildbot-source/buildbot/changes/pb.py
deleted file mode 100644
index 105f1efdf..000000000
--- a/buildbot/buildbot-source/buildbot/changes/pb.py
+++ /dev/null
@@ -1,89 +0,0 @@
-# -*- test-case-name: buildbot.test.test_changes -*-
-
-import os, os.path
-
-from twisted.application import service
-from twisted.python import log
-
-from buildbot.pbutil import NewCredPerspective
-from buildbot.changes import base, changes
-
-class ChangePerspective(NewCredPerspective):
-
- def __init__(self, changemaster, prefix, sep="/"):
- self.changemaster = changemaster
- self.prefix = prefix
- # this is the separator as used by the VC system, not the local host.
- # If for some reason you're running your CVS repository under
- # windows, you'll need to use a PBChangeSource(sep="\\")
- self.sep = sep
-
- def attached(self, mind):
- return self
- def detached(self, mind):
- pass
-
- def perspective_addChange(self, changedict):
- log.msg("perspective_addChange called")
- pathnames = []
- for path in changedict['files']:
- if self.prefix:
- bits = path.split(self.sep)
- if bits[0] == self.prefix:
- if bits[1:]:
- path = self.sep.join(bits[1:])
- else:
- path = ''
- else:
- break
- pathnames.append(path)
-
- if pathnames:
- change = changes.Change(changedict['who'],
- pathnames,
- changedict['comments'],
- branch=changedict.get('branch'),
- revision=changedict.get('revision'),
- )
- self.changemaster.addChange(change)
-
-class PBChangeSource(base.ChangeSource):
- compare_attrs = ["user", "passwd", "port", "prefix", "sep"]
-
- def __init__(self, user="change", passwd="changepw", port=None,
- prefix=None, sep="/"):
- # TODO: current limitations
- assert user == "change"
- assert passwd == "changepw"
- assert port == None
- self.user = user
- self.passwd = passwd
- self.port = port
- self.prefix = prefix
- self.sep = sep
-
- def describe(self):
- # TODO: when the dispatcher is fixed, report the specific port
- #d = "PB listener on port %d" % self.port
- d = "PBChangeSource listener on all-purpose slaveport"
- if self.prefix is not None:
- d += " (prefix '%s')" % self.prefix
- return d
-
- def startService(self):
- base.ChangeSource.startService(self)
- # our parent is the ChangeMaster object
- # find the master's Dispatch object and register our username
- # TODO: the passwd should be registered here too
- master = self.parent.parent
- master.dispatcher.register(self.user, self)
-
- def stopService(self):
- base.ChangeSource.stopService(self)
- # unregister our username
- master = self.parent.parent
- master.dispatcher.unregister(self.user)
-
- def getPerspective(self):
- return ChangePerspective(self.parent, self.prefix, self.sep)
-
diff --git a/buildbot/buildbot-source/buildbot/clients/__init__.py b/buildbot/buildbot-source/buildbot/clients/__init__.py
deleted file mode 100644
index e69de29bb..000000000
--- a/buildbot/buildbot-source/buildbot/clients/__init__.py
+++ /dev/null
diff --git a/buildbot/buildbot-source/buildbot/clients/base.py b/buildbot/buildbot-source/buildbot/clients/base.py
deleted file mode 100644
index c5d12a322..000000000
--- a/buildbot/buildbot-source/buildbot/clients/base.py
+++ /dev/null
@@ -1,111 +0,0 @@
-#! /usr/bin/python
-
-import sys, re
-
-from twisted.spread import pb
-from twisted.cred import credentials
-from twisted.internet import reactor
-
-class StatusClient(pb.Referenceable):
- """To use this, call my .connected method with a RemoteReference to the
- buildmaster's StatusClientPerspective object.
- """
-
- def __init__(self, events):
- self.builders = {}
- self.events = events
-
- def connected(self, remote):
- print "connected"
- self.remote = remote
- remote.callRemote("subscribe", self.events, 5, self)
-
- def remote_builderAdded(self, buildername, builder):
- print "builderAdded", buildername
-
- def remote_builderRemoved(self, buildername):
- print "builderRemoved", buildername
-
- def remote_builderChangedState(self, buildername, state, eta):
- print "builderChangedState", buildername, state, eta
-
- def remote_buildStarted(self, buildername, build):
- print "buildStarted", buildername
-
- def remote_buildFinished(self, buildername, build, results):
- print "buildFinished", results
-
- def remote_buildETAUpdate(self, buildername, build, eta):
- print "ETA", buildername, eta
-
- def remote_stepStarted(self, buildername, build, stepname, step):
- print "stepStarted", buildername, stepname
-
- def remote_stepFinished(self, buildername, build, stepname, step, results):
- print "stepFinished", buildername, stepname, results
-
- def remote_stepETAUpdate(self, buildername, build, stepname, step,
- eta, expectations):
- print "stepETA", buildername, stepname, eta
-
- def remote_logStarted(self, buildername, build, stepname, step,
- logname, log):
- print "logStarted", buildername, stepname
-
- def remote_logFinished(self, buildername, build, stepname, step,
- logname, log):
- print "logFinished", buildername, stepname
-
- def remote_logChunk(self, buildername, build, stepname, step, logname, log,
- channel, text):
- ChunkTypes = ["STDOUT", "STDERR", "HEADER"]
- print "logChunk[%s]: %s" % (ChunkTypes[channel], text)
-
-class TextClient:
- def __init__(self, master, events="steps"):
- """
- @type events: string, one of builders, builds, steps, logs, full
- @param events: specify what level of detail should be reported.
- - 'builders': only announce new/removed Builders
- - 'builds': also announce builderChangedState, buildStarted, and
- buildFinished
- - 'steps': also announce buildETAUpdate, stepStarted, stepFinished
- - 'logs': also announce stepETAUpdate, logStarted, logFinished
- - 'full': also announce log contents
- """
- self.master = master
- self.listener = StatusClient(events)
-
- def run(self):
- """Start the TextClient."""
- self.startConnecting()
- reactor.run()
-
- def startConnecting(self):
- try:
- host, port = re.search(r'(.+):(\d+)', self.master).groups()
- port = int(port)
- except:
- print "unparseable master location '%s'" % self.master
- print " expecting something more like localhost:8007"
- raise
- cf = pb.PBClientFactory()
- creds = credentials.UsernamePassword("statusClient", "clientpw")
- d = cf.login(creds)
- reactor.connectTCP(host, port, cf)
- d.addCallback(self.connected)
- return d
- def connected(self, ref):
- ref.notifyOnDisconnect(self.disconnected)
- self.listener.connected(ref)
-
- def disconnected(self, ref):
- print "lost connection"
- reactor.stop()
-
-if __name__ == '__main__':
- master = "localhost:8007"
- if len(sys.argv) > 1:
- master = sys.argv[1]
- c = TextClient()
- c.run()
diff --git a/buildbot/buildbot-source/buildbot/clients/debug.glade b/buildbot/buildbot-source/buildbot/clients/debug.glade
deleted file mode 100644
index 9c56787c8..000000000
--- a/buildbot/buildbot-source/buildbot/clients/debug.glade
+++ /dev/null
@@ -1,669 +0,0 @@
-<?xml version="1.0" standalone="no"?> <!--*- mode: xml -*-->
-<!DOCTYPE glade-interface SYSTEM "http://glade.gnome.org/glade-2.0.dtd">
-
-<glade-interface>
-<requires lib="gnome"/>
-
-<widget class="GtkWindow" id="window1">
- <property name="visible">True</property>
- <property name="title" translatable="yes">Buildbot Debug Tool</property>
- <property name="type">GTK_WINDOW_TOPLEVEL</property>
- <property name="window_position">GTK_WIN_POS_NONE</property>
- <property name="modal">False</property>
- <property name="resizable">True</property>
- <property name="destroy_with_parent">False</property>
- <property name="decorated">True</property>
- <property name="skip_taskbar_hint">False</property>
- <property name="skip_pager_hint">False</property>
- <property name="type_hint">GDK_WINDOW_TYPE_HINT_NORMAL</property>
- <property name="gravity">GDK_GRAVITY_NORTH_WEST</property>
- <property name="focus_on_map">True</property>
-
- <child>
- <widget class="GtkVBox" id="vbox1">
- <property name="visible">True</property>
- <property name="homogeneous">False</property>
- <property name="spacing">0</property>
-
- <child>
- <widget class="GtkHBox" id="connection">
- <property name="visible">True</property>
- <property name="homogeneous">False</property>
- <property name="spacing">0</property>
-
- <child>
- <widget class="GtkButton" id="connectbutton">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- <property name="label" translatable="yes">Connect</property>
- <property name="use_underline">True</property>
- <property name="relief">GTK_RELIEF_NORMAL</property>
- <property name="focus_on_click">True</property>
- <signal name="clicked" handler="do_connect"/>
- </widget>
- <packing>
- <property name="padding">0</property>
- <property name="expand">False</property>
- <property name="fill">False</property>
- </packing>
- </child>
-
- <child>
- <widget class="GtkLabel" id="connectlabel">
- <property name="visible">True</property>
- <property name="label" translatable="yes">Disconnected</property>
- <property name="use_underline">False</property>
- <property name="use_markup">False</property>
- <property name="justify">GTK_JUSTIFY_CENTER</property>
- <property name="wrap">False</property>
- <property name="selectable">False</property>
- <property name="xalign">0.5</property>
- <property name="yalign">0.5</property>
- <property name="xpad">0</property>
- <property name="ypad">0</property>
- <property name="ellipsize">PANGO_ELLIPSIZE_NONE</property>
- <property name="width_chars">-1</property>
- <property name="single_line_mode">False</property>
- <property name="angle">0</property>
- </widget>
- <packing>
- <property name="padding">0</property>
- <property name="expand">True</property>
- <property name="fill">True</property>
- </packing>
- </child>
- </widget>
- <packing>
- <property name="padding">0</property>
- <property name="expand">False</property>
- <property name="fill">False</property>
- </packing>
- </child>
-
- <child>
- <widget class="GtkHBox" id="commands">
- <property name="visible">True</property>
- <property name="homogeneous">False</property>
- <property name="spacing">0</property>
-
- <child>
- <widget class="GtkButton" id="reload">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- <property name="label" translatable="yes">Reload .cfg</property>
- <property name="use_underline">True</property>
- <property name="relief">GTK_RELIEF_NORMAL</property>
- <property name="focus_on_click">True</property>
- <signal name="clicked" handler="do_reload" last_modification_time="Wed, 24 Sep 2003 20:47:55 GMT"/>
- </widget>
- <packing>
- <property name="padding">0</property>
- <property name="expand">False</property>
- <property name="fill">False</property>
- </packing>
- </child>
-
- <child>
- <widget class="GtkButton" id="rebuild">
- <property name="visible">True</property>
- <property name="sensitive">False</property>
- <property name="can_focus">True</property>
- <property name="label" translatable="yes">Rebuild .py</property>
- <property name="use_underline">True</property>
- <property name="relief">GTK_RELIEF_NORMAL</property>
- <property name="focus_on_click">True</property>
- <signal name="clicked" handler="do_rebuild" last_modification_time="Wed, 24 Sep 2003 20:49:18 GMT"/>
- </widget>
- <packing>
- <property name="padding">0</property>
- <property name="expand">False</property>
- <property name="fill">False</property>
- </packing>
- </child>
-
- <child>
- <widget class="GtkButton" id="button7">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- <property name="label" translatable="yes">poke IRC</property>
- <property name="use_underline">True</property>
- <property name="relief">GTK_RELIEF_NORMAL</property>
- <property name="focus_on_click">True</property>
- <signal name="clicked" handler="do_poke_irc" last_modification_time="Wed, 14 Jan 2004 22:23:59 GMT"/>
- </widget>
- <packing>
- <property name="padding">0</property>
- <property name="expand">False</property>
- <property name="fill">False</property>
- </packing>
- </child>
- </widget>
- <packing>
- <property name="padding">0</property>
- <property name="expand">True</property>
- <property name="fill">True</property>
- </packing>
- </child>
-
- <child>
- <widget class="GtkFrame" id="Commit">
- <property name="border_width">4</property>
- <property name="visible">True</property>
- <property name="label_xalign">0</property>
- <property name="label_yalign">0.5</property>
- <property name="shadow_type">GTK_SHADOW_ETCHED_IN</property>
-
- <child>
- <widget class="GtkAlignment" id="alignment1">
- <property name="visible">True</property>
- <property name="xalign">0.5</property>
- <property name="yalign">0.5</property>
- <property name="xscale">1</property>
- <property name="yscale">1</property>
- <property name="top_padding">0</property>
- <property name="bottom_padding">0</property>
- <property name="left_padding">0</property>
- <property name="right_padding">0</property>
-
- <child>
- <widget class="GtkVBox" id="vbox3">
- <property name="visible">True</property>
- <property name="homogeneous">False</property>
- <property name="spacing">0</property>
-
- <child>
- <widget class="GtkHBox" id="commit">
- <property name="visible">True</property>
- <property name="homogeneous">False</property>
- <property name="spacing">0</property>
-
- <child>
- <widget class="GtkButton" id="button2">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- <property name="label" translatable="yes">commit</property>
- <property name="use_underline">True</property>
- <property name="relief">GTK_RELIEF_NORMAL</property>
- <property name="focus_on_click">True</property>
- <signal name="clicked" handler="do_commit"/>
- </widget>
- <packing>
- <property name="padding">0</property>
- <property name="expand">False</property>
- <property name="fill">False</property>
- </packing>
- </child>
-
- <child>
- <widget class="GtkEntry" id="filename">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- <property name="editable">True</property>
- <property name="visibility">True</property>
- <property name="max_length">0</property>
- <property name="text" translatable="yes">twisted/internet/app.py</property>
- <property name="has_frame">True</property>
- <property name="invisible_char">*</property>
- <property name="activates_default">False</property>
- </widget>
- <packing>
- <property name="padding">0</property>
- <property name="expand">True</property>
- <property name="fill">True</property>
- </packing>
- </child>
- </widget>
- <packing>
- <property name="padding">0</property>
- <property name="expand">True</property>
- <property name="fill">True</property>
- </packing>
- </child>
-
- <child>
- <widget class="GtkHBox" id="hbox2">
- <property name="visible">True</property>
- <property name="homogeneous">False</property>
- <property name="spacing">0</property>
-
- <child>
- <widget class="GtkLabel" id="label5">
- <property name="visible">True</property>
- <property name="label" translatable="yes">Who: </property>
- <property name="use_underline">False</property>
- <property name="use_markup">False</property>
- <property name="justify">GTK_JUSTIFY_LEFT</property>
- <property name="wrap">False</property>
- <property name="selectable">False</property>
- <property name="xalign">0.5</property>
- <property name="yalign">0.5</property>
- <property name="xpad">0</property>
- <property name="ypad">0</property>
- <property name="ellipsize">PANGO_ELLIPSIZE_NONE</property>
- <property name="width_chars">-1</property>
- <property name="single_line_mode">False</property>
- <property name="angle">0</property>
- </widget>
- <packing>
- <property name="padding">0</property>
- <property name="expand">False</property>
- <property name="fill">False</property>
- </packing>
- </child>
-
- <child>
- <widget class="GtkEntry" id="who">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- <property name="editable">True</property>
- <property name="visibility">True</property>
- <property name="max_length">0</property>
- <property name="text" translatable="yes">bob</property>
- <property name="has_frame">True</property>
- <property name="invisible_char">*</property>
- <property name="activates_default">False</property>
- </widget>
- <packing>
- <property name="padding">0</property>
- <property name="expand">True</property>
- <property name="fill">True</property>
- </packing>
- </child>
- </widget>
- <packing>
- <property name="padding">0</property>
- <property name="expand">True</property>
- <property name="fill">True</property>
- </packing>
- </child>
-
- <child>
- <widget class="GtkHBox" id="hbox3">
- <property name="visible">True</property>
- <property name="homogeneous">False</property>
- <property name="spacing">0</property>
-
- <child>
- <widget class="GtkCheckButton" id="usebranch">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- <property name="label" translatable="yes">Branch:</property>
- <property name="use_underline">True</property>
- <property name="relief">GTK_RELIEF_NORMAL</property>
- <property name="focus_on_click">True</property>
- <property name="active">False</property>
- <property name="inconsistent">False</property>
- <property name="draw_indicator">True</property>
- <signal name="toggled" handler="on_usebranch_toggled" last_modification_time="Tue, 25 Oct 2005 01:42:45 GMT"/>
- </widget>
- <packing>
- <property name="padding">0</property>
- <property name="expand">False</property>
- <property name="fill">False</property>
- </packing>
- </child>
-
- <child>
- <widget class="GtkEntry" id="branch">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- <property name="editable">True</property>
- <property name="visibility">True</property>
- <property name="max_length">0</property>
- <property name="text" translatable="yes"></property>
- <property name="has_frame">True</property>
- <property name="invisible_char">*</property>
- <property name="activates_default">False</property>
- </widget>
- <packing>
- <property name="padding">0</property>
- <property name="expand">True</property>
- <property name="fill">True</property>
- </packing>
- </child>
- </widget>
- <packing>
- <property name="padding">0</property>
- <property name="expand">True</property>
- <property name="fill">True</property>
- </packing>
- </child>
-
- <child>
- <widget class="GtkHBox" id="hbox1">
- <property name="visible">True</property>
- <property name="homogeneous">False</property>
- <property name="spacing">0</property>
-
- <child>
- <widget class="GtkCheckButton" id="userevision">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- <property name="label" translatable="yes">Revision:</property>
- <property name="use_underline">True</property>
- <property name="relief">GTK_RELIEF_NORMAL</property>
- <property name="focus_on_click">True</property>
- <property name="active">False</property>
- <property name="inconsistent">False</property>
- <property name="draw_indicator">True</property>
- <signal name="toggled" handler="on_userevision_toggled" last_modification_time="Wed, 08 Sep 2004 17:58:33 GMT"/>
- </widget>
- <packing>
- <property name="padding">0</property>
- <property name="expand">False</property>
- <property name="fill">False</property>
- </packing>
- </child>
-
- <child>
- <widget class="GtkEntry" id="revision">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- <property name="editable">True</property>
- <property name="visibility">True</property>
- <property name="max_length">0</property>
- <property name="text" translatable="yes"></property>
- <property name="has_frame">True</property>
- <property name="invisible_char">*</property>
- <property name="activates_default">False</property>
- </widget>
- <packing>
- <property name="padding">0</property>
- <property name="expand">True</property>
- <property name="fill">True</property>
- </packing>
- </child>
- </widget>
- <packing>
- <property name="padding">0</property>
- <property name="expand">True</property>
- <property name="fill">True</property>
- </packing>
- </child>
- </widget>
- </child>
- </widget>
- </child>
-
- <child>
- <widget class="GtkLabel" id="label4">
- <property name="visible">True</property>
- <property name="label" translatable="yes">Commit</property>
- <property name="use_underline">False</property>
- <property name="use_markup">False</property>
- <property name="justify">GTK_JUSTIFY_LEFT</property>
- <property name="wrap">False</property>
- <property name="selectable">False</property>
- <property name="xalign">0.5</property>
- <property name="yalign">0.5</property>
- <property name="xpad">2</property>
- <property name="ypad">0</property>
- <property name="ellipsize">PANGO_ELLIPSIZE_NONE</property>
- <property name="width_chars">-1</property>
- <property name="single_line_mode">False</property>
- <property name="angle">0</property>
- </widget>
- <packing>
- <property name="type">label_item</property>
- </packing>
- </child>
- </widget>
- <packing>
- <property name="padding">0</property>
- <property name="expand">True</property>
- <property name="fill">True</property>
- </packing>
- </child>
-
- <child>
- <widget class="GtkFrame" id="builderframe">
- <property name="border_width">4</property>
- <property name="visible">True</property>
- <property name="label_xalign">0</property>
- <property name="label_yalign">0.5</property>
- <property name="shadow_type">GTK_SHADOW_ETCHED_IN</property>
-
- <child>
- <widget class="GtkVBox" id="vbox2">
- <property name="visible">True</property>
- <property name="homogeneous">False</property>
- <property name="spacing">0</property>
-
- <child>
- <widget class="GtkHBox" id="builder">
- <property name="visible">True</property>
- <property name="homogeneous">False</property>
- <property name="spacing">3</property>
-
- <child>
- <widget class="GtkLabel" id="label1">
- <property name="visible">True</property>
- <property name="label" translatable="yes">Builder:</property>
- <property name="use_underline">False</property>
- <property name="use_markup">False</property>
- <property name="justify">GTK_JUSTIFY_CENTER</property>
- <property name="wrap">False</property>
- <property name="selectable">False</property>
- <property name="xalign">0.5</property>
- <property name="yalign">0.5</property>
- <property name="xpad">0</property>
- <property name="ypad">0</property>
- <property name="ellipsize">PANGO_ELLIPSIZE_NONE</property>
- <property name="width_chars">-1</property>
- <property name="single_line_mode">False</property>
- <property name="angle">0</property>
- </widget>
- <packing>
- <property name="padding">0</property>
- <property name="expand">False</property>
- <property name="fill">False</property>
- </packing>
- </child>
-
- <child>
- <widget class="GtkEntry" id="buildname">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- <property name="editable">True</property>
- <property name="visibility">True</property>
- <property name="max_length">0</property>
- <property name="text" translatable="yes">one</property>
- <property name="has_frame">True</property>
- <property name="invisible_char">*</property>
- <property name="activates_default">False</property>
- </widget>
- <packing>
- <property name="padding">0</property>
- <property name="expand">True</property>
- <property name="fill">True</property>
- </packing>
- </child>
- </widget>
- <packing>
- <property name="padding">0</property>
- <property name="expand">True</property>
- <property name="fill">True</property>
- </packing>
- </child>
-
- <child>
- <widget class="GtkHBox" id="buildercontrol">
- <property name="visible">True</property>
- <property name="homogeneous">False</property>
- <property name="spacing">0</property>
-
- <child>
- <widget class="GtkButton" id="button1">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- <property name="label" translatable="yes">Force
-Build</property>
- <property name="use_underline">True</property>
- <property name="relief">GTK_RELIEF_NORMAL</property>
- <property name="focus_on_click">True</property>
- <signal name="clicked" handler="do_build"/>
- </widget>
- <packing>
- <property name="padding">0</property>
- <property name="expand">False</property>
- <property name="fill">False</property>
- </packing>
- </child>
-
- <child>
- <placeholder/>
- </child>
-
- <child>
- <placeholder/>
- </child>
- </widget>
- <packing>
- <property name="padding">0</property>
- <property name="expand">True</property>
- <property name="fill">True</property>
- </packing>
- </child>
-
- <child>
- <widget class="GtkHBox" id="status">
- <property name="visible">True</property>
- <property name="homogeneous">False</property>
- <property name="spacing">0</property>
-
- <child>
- <widget class="GtkLabel" id="label2">
- <property name="visible">True</property>
- <property name="label" translatable="yes">Currently:</property>
- <property name="use_underline">False</property>
- <property name="use_markup">False</property>
- <property name="justify">GTK_JUSTIFY_CENTER</property>
- <property name="wrap">False</property>
- <property name="selectable">False</property>
- <property name="xalign">0.5</property>
- <property name="yalign">0.5</property>
- <property name="xpad">7</property>
- <property name="ypad">0</property>
- <property name="ellipsize">PANGO_ELLIPSIZE_NONE</property>
- <property name="width_chars">-1</property>
- <property name="single_line_mode">False</property>
- <property name="angle">0</property>
- </widget>
- <packing>
- <property name="padding">0</property>
- <property name="expand">False</property>
- <property name="fill">False</property>
- </packing>
- </child>
-
- <child>
- <widget class="GtkButton" id="button3">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- <property name="label" translatable="yes">offline</property>
- <property name="use_underline">True</property>
- <property name="relief">GTK_RELIEF_NORMAL</property>
- <property name="focus_on_click">True</property>
- <signal name="clicked" handler="do_current_offline"/>
- </widget>
- <packing>
- <property name="padding">0</property>
- <property name="expand">False</property>
- <property name="fill">False</property>
- </packing>
- </child>
-
- <child>
- <widget class="GtkButton" id="button4">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- <property name="label" translatable="yes">idle</property>
- <property name="use_underline">True</property>
- <property name="relief">GTK_RELIEF_NORMAL</property>
- <property name="focus_on_click">True</property>
- <signal name="clicked" handler="do_current_idle"/>
- </widget>
- <packing>
- <property name="padding">0</property>
- <property name="expand">False</property>
- <property name="fill">False</property>
- </packing>
- </child>
-
- <child>
- <widget class="GtkButton" id="button5">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- <property name="label" translatable="yes">waiting</property>
- <property name="use_underline">True</property>
- <property name="relief">GTK_RELIEF_NORMAL</property>
- <property name="focus_on_click">True</property>
- <signal name="clicked" handler="do_current_waiting"/>
- </widget>
- <packing>
- <property name="padding">0</property>
- <property name="expand">False</property>
- <property name="fill">False</property>
- </packing>
- </child>
-
- <child>
- <widget class="GtkButton" id="button6">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- <property name="label" translatable="yes">building</property>
- <property name="use_underline">True</property>
- <property name="relief">GTK_RELIEF_NORMAL</property>
- <property name="focus_on_click">True</property>
- <signal name="clicked" handler="do_current_building"/>
- </widget>
- <packing>
- <property name="padding">0</property>
- <property name="expand">False</property>
- <property name="fill">False</property>
- </packing>
- </child>
- </widget>
- <packing>
- <property name="padding">0</property>
- <property name="expand">True</property>
- <property name="fill">True</property>
- </packing>
- </child>
- </widget>
- </child>
-
- <child>
- <widget class="GtkLabel" id="label3">
- <property name="visible">True</property>
- <property name="label" translatable="yes">Builder</property>
- <property name="use_underline">False</property>
- <property name="use_markup">False</property>
- <property name="justify">GTK_JUSTIFY_LEFT</property>
- <property name="wrap">False</property>
- <property name="selectable">False</property>
- <property name="xalign">0.5</property>
- <property name="yalign">0.5</property>
- <property name="xpad">2</property>
- <property name="ypad">0</property>
- <property name="ellipsize">PANGO_ELLIPSIZE_NONE</property>
- <property name="width_chars">-1</property>
- <property name="single_line_mode">False</property>
- <property name="angle">0</property>
- </widget>
- <packing>
- <property name="type">label_item</property>
- </packing>
- </child>
- </widget>
- <packing>
- <property name="padding">0</property>
- <property name="expand">True</property>
- <property name="fill">True</property>
- </packing>
- </child>
- </widget>
- </child>
-</widget>
-
-</glade-interface>
diff --git a/buildbot/buildbot-source/buildbot/clients/debug.py b/buildbot/buildbot-source/buildbot/clients/debug.py
deleted file mode 100644
index 5e0fa6e4b..000000000
--- a/buildbot/buildbot-source/buildbot/clients/debug.py
+++ /dev/null
@@ -1,163 +0,0 @@
-#! /usr/bin/python
-
-from twisted.internet import gtk2reactor
-gtk2reactor.install()
-from twisted.internet import reactor
-from twisted.python import util
-from twisted.spread import pb
-from twisted.cred import credentials
-import gtk, gtk.glade, gnome.ui
-import os, sys, re
-
-class DebugWidget:
- def __init__(self, master="localhost:8007", passwd="debugpw"):
- self.connected = 0
- try:
- host, port = re.search(r'(.+):(\d+)', master).groups()
- except:
- print "unparseable master location '%s'" % master
- print " expecting something more like localhost:8007"
- raise
- self.host = host
- self.port = int(port)
- self.passwd = passwd
- self.remote = None
- xml = self.xml = gtk.glade.XML(util.sibpath(__file__, "debug.glade"))
- g = xml.get_widget
- self.buildname = g('buildname')
- self.filename = g('filename')
- self.connectbutton = g('connectbutton')
- self.connectlabel = g('connectlabel')
- g('window1').connect('destroy', lambda win: gtk.mainquit())
- # put the master info in the window's titlebar
- g('window1').set_title("Buildbot Debug Tool: %s" % master)
- c = xml.signal_connect
- c('do_connect', self.do_connect)
- c('do_reload', self.do_reload)
- c('do_rebuild', self.do_rebuild)
- c('do_poke_irc', self.do_poke_irc)
- c('do_build', self.do_build)
- c('do_commit', self.do_commit)
- c('on_usebranch_toggled', self.usebranch_toggled)
- self.usebranch_toggled(g('usebranch'))
- c('on_userevision_toggled', self.userevision_toggled)
- self.userevision_toggled(g('userevision'))
- c('do_current_offline', self.do_current, "offline")
- c('do_current_idle', self.do_current, "idle")
- c('do_current_waiting', self.do_current, "waiting")
- c('do_current_building', self.do_current, "building")
-
- def do_connect(self, widget):
- if self.connected:
- self.connectlabel.set_text("Disconnecting...")
- if self.remote:
- self.remote.broker.transport.loseConnection()
- else:
- self.connectlabel.set_text("Connecting...")
- f = pb.PBClientFactory()
- creds = credentials.UsernamePassword("debug", self.passwd)
- d = f.login(creds)
- reactor.connectTCP(self.host, int(self.port), f)
- d.addCallbacks(self.connect_complete, self.connect_failed)
- def connect_complete(self, ref):
- self.connectbutton.set_label("Disconnect")
- self.connectlabel.set_text("Connected")
- self.connected = 1
- self.remote = ref
- self.remote.callRemote("print", "hello cleveland")
- self.remote.notifyOnDisconnect(self.disconnected)
- def connect_failed(self, why):
- self.connectlabel.set_text("Failed")
- print why
- def disconnected(self, ref):
- self.connectbutton.set_label("Connect")
- self.connectlabel.set_text("Disconnected")
- self.connected = 0
- self.remote = None
-
- def do_reload(self, widget):
- if not self.remote:
- return
- d = self.remote.callRemote("reload")
- d.addErrback(self.err)
- def do_rebuild(self, widget):
- print "Not yet implemented"
- return
- def do_poke_irc(self, widget):
- if not self.remote:
- return
- d = self.remote.callRemote("pokeIRC")
- d.addErrback(self.err)
-
- def do_build(self, widget):
- if not self.remote:
- return
- name = self.buildname.get_text()
- d = self.remote.callRemote("forceBuild", name)
- d.addErrback(self.err)
-
- def usebranch_toggled(self, widget):
- rev = self.xml.get_widget('branch')
- if widget.get_active():
- rev.set_sensitive(True)
- else:
- rev.set_sensitive(False)
-
- def userevision_toggled(self, widget):
- rev = self.xml.get_widget('revision')
- if widget.get_active():
- rev.set_sensitive(True)
- else:
- rev.set_sensitive(False)
-
- def do_commit(self, widget):
- if not self.remote:
- return
- filename = self.filename.get_text()
- who = self.xml.get_widget("who").get_text()
-
- branch = None
- if self.xml.get_widget("usebranch").get_active():
- branch = self.xml.get_widget('branch').get_text()
- if branch == '':
- branch = None
-
- revision = None
- if self.xml.get_widget("userevision").get_active():
- revision = self.xml.get_widget('revision').get_text()
- try:
- revision = int(revision)
- except ValueError:
- pass
- if revision == '':
- revision = None
-
- kwargs = { 'revision': revision, 'who': who }
- if branch:
- kwargs['branch'] = branch
- d = self.remote.callRemote("fakeChange", filename, **kwargs)
- d.addErrback(self.err)
-
- def do_current(self, widget, state):
- if not self.remote:
- return
- name = self.buildname.get_text()
- d = self.remote.callRemote("setCurrentState", name, state)
- d.addErrback(self.err)
- def err(self, failure):
- print "received error"
- failure.printTraceback()
-
-
- def run(self):
- reactor.run()
-
-if __name__ == '__main__':
- master = "localhost:8007"
- if len(sys.argv) > 1:
- master = sys.argv[1]
- passwd = "debugpw"
- if len(sys.argv) > 2:
- passwd = sys.argv[2]
- d = DebugWidget(master, passwd)
- d.run()
diff --git a/buildbot/buildbot-source/buildbot/clients/gtkPanes.py b/buildbot/buildbot-source/buildbot/clients/gtkPanes.py
deleted file mode 100644
index b82ac509c..000000000
--- a/buildbot/buildbot-source/buildbot/clients/gtkPanes.py
+++ /dev/null
@@ -1,428 +0,0 @@
-#! /usr/bin/python
-
-from twisted.internet import gtk2reactor
-gtk2reactor.install()
-
-from twisted.internet import reactor
-
-import sys, time
-
-import pygtk
-pygtk.require("2.0")
-import gtk
-assert(gtk.Window) # in gtk1 it's gtk.GtkWindow
-
-from twisted.spread import pb
-
-#from buildbot.clients.base import Builder, Client
-from buildbot.clients.base import TextClient
-#from buildbot.util import now
-
-'''
-class Pane:
- def __init__(self):
- pass
-
-class OneRow(Pane):
- """This is a one-row status bar. It has one square per Builder, and that
- square is either red, yellow, or green. """
-
- def __init__(self):
- Pane.__init__(self)
- self.widget = gtk.VBox(gtk.FALSE, 2)
- self.nameBox = gtk.HBox(gtk.TRUE)
- self.statusBox = gtk.HBox(gtk.TRUE)
- self.widget.add(self.nameBox)
- self.widget.add(self.statusBox)
- self.widget.show_all()
- self.builders = []
-
- def getWidget(self):
- return self.widget
- def addBuilder(self, builder):
- print "OneRow.addBuilder"
- # todo: ordering. Should follow the order in which they were added
- # to the original BotMaster
- self.builders.append(builder)
- # add the name to the left column, and a label (with background) to
- # the right
- name = gtk.Label(builder.name)
- status = gtk.Label('??')
- status.set_size_request(64,64)
- box = gtk.EventBox()
- box.add(status)
- name.show()
- box.show_all()
- self.nameBox.add(name)
- self.statusBox.add(box)
- builder.haveSomeWidgets([name, status, box])
-
-class R2Builder(Builder):
- def start(self):
- self.nameSquare.set_text(self.name)
- self.statusSquare.set_text("???")
- self.subscribe()
- def haveSomeWidgets(self, widgets):
- self.nameSquare, self.statusSquare, self.statusBox = widgets
-
- def remote_newLastBuildStatus(self, event):
- color = None
- if event:
- text = "\n".join(event.text)
- color = event.color
- else:
- text = "none"
- self.statusSquare.set_text(text)
- if color:
- print "color", color
- self.statusBox.modify_bg(gtk.STATE_NORMAL,
- gtk.gdk.color_parse(color))
-
- def remote_currentlyOffline(self):
- self.statusSquare.set_text("offline")
- def remote_currentlyIdle(self):
- self.statusSquare.set_text("idle")
- def remote_currentlyWaiting(self, seconds):
- self.statusSquare.set_text("waiting")
- def remote_currentlyInterlocked(self):
- self.statusSquare.set_text("interlocked")
- def remote_currentlyBuilding(self, eta):
- self.statusSquare.set_text("building")
-
-
-class CompactRow(Pane):
- def __init__(self):
- Pane.__init__(self)
- self.widget = gtk.VBox(gtk.FALSE, 3)
- self.nameBox = gtk.HBox(gtk.TRUE, 2)
- self.lastBuildBox = gtk.HBox(gtk.TRUE, 2)
- self.statusBox = gtk.HBox(gtk.TRUE, 2)
- self.widget.add(self.nameBox)
- self.widget.add(self.lastBuildBox)
- self.widget.add(self.statusBox)
- self.widget.show_all()
- self.builders = []
-
- def getWidget(self):
- return self.widget
-
- def addBuilder(self, builder):
- self.builders.append(builder)
-
- name = gtk.Label(builder.name)
- name.show()
- self.nameBox.add(name)
-
- last = gtk.Label('??')
- last.set_size_request(64,64)
- lastbox = gtk.EventBox()
- lastbox.add(last)
- lastbox.show_all()
- self.lastBuildBox.add(lastbox)
-
- status = gtk.Label('??')
- status.set_size_request(64,64)
- statusbox = gtk.EventBox()
- statusbox.add(status)
- statusbox.show_all()
- self.statusBox.add(statusbox)
-
- builder.haveSomeWidgets([name, last, lastbox, status, statusbox])
-
- def removeBuilder(self, name, builder):
- self.nameBox.remove(builder.nameSquare)
- self.lastBuildBox.remove(builder.lastBuildBox)
- self.statusBox.remove(builder.statusBox)
- self.builders.remove(builder)
-
-class CompactBuilder(Builder):
- def setup(self):
- self.timer = None
- self.text = []
- self.eta = None
- def start(self):
- self.nameSquare.set_text(self.name)
- self.statusSquare.set_text("???")
- self.subscribe()
- def haveSomeWidgets(self, widgets):
- (self.nameSquare,
- self.lastBuildSquare, self.lastBuildBox,
- self.statusSquare, self.statusBox) = widgets
-
- def remote_currentlyOffline(self):
- self.eta = None
- self.stopTimer()
- self.statusSquare.set_text("offline")
- self.statusBox.modify_bg(gtk.STATE_NORMAL,
- gtk.gdk.color_parse("red"))
- def remote_currentlyIdle(self):
- self.eta = None
- self.stopTimer()
- self.statusSquare.set_text("idle")
- def remote_currentlyWaiting(self, seconds):
- self.nextBuild = now() + seconds
- self.startTimer(self.updateWaiting)
- def remote_currentlyInterlocked(self):
- self.stopTimer()
- self.statusSquare.set_text("interlocked")
- def startTimer(self, func):
- # the func must clear self.timer and return gtk.FALSE when the event
- # has arrived
- self.stopTimer()
- self.timer = gtk.timeout_add(1000, func)
- func()
- def stopTimer(self):
- if self.timer:
- gtk.timeout_remove(self.timer)
- self.timer = None
- def updateWaiting(self):
- when = self.nextBuild
- if now() < when:
- next = time.strftime("%H:%M:%S", time.localtime(when))
- secs = "[%d seconds]" % (when - now())
- self.statusSquare.set_text("waiting\n%s\n%s" % (next, secs))
- return gtk.TRUE # restart timer
- else:
- # done
- self.statusSquare.set_text("waiting\n[RSN]")
- self.timer = None
- return gtk.FALSE
-
- def remote_currentlyBuilding(self, eta):
- self.stopTimer()
- self.statusSquare.set_text("building")
- if eta:
- d = eta.callRemote("subscribe", self, 5)
-
- def remote_newLastBuildStatus(self, event):
- color = None
- if event:
- text = "\n".join(event.text)
- color = event.color
- else:
- text = "none"
- if not color: color = "gray"
- self.lastBuildSquare.set_text(text)
- self.lastBuildBox.modify_bg(gtk.STATE_NORMAL,
- gtk.gdk.color_parse(color))
-
- def remote_newEvent(self, event):
- assert(event.__class__ == GtkUpdatingEvent)
- self.current = event
- event.builder = self
- self.text = event.text
- if not self.text: self.text = ["idle"]
- self.eta = None
- self.stopTimer()
- self.updateText()
- color = event.color
- if not color: color = "gray"
- self.statusBox.modify_bg(gtk.STATE_NORMAL,
- gtk.gdk.color_parse(color))
-
- def updateCurrent(self):
- text = self.current.text
- if text:
- self.text = text
- self.updateText()
- color = self.current.color
- if color:
- self.statusBox.modify_bg(gtk.STATE_NORMAL,
- gtk.gdk.color_parse(color))
- def updateText(self):
- etatext = []
- if self.eta:
- etatext = [time.strftime("%H:%M:%S", time.localtime(self.eta))]
- if now() > self.eta:
- etatext += ["RSN"]
- else:
- seconds = self.eta - now()
- etatext += ["[%d secs]" % seconds]
- text = "\n".join(self.text + etatext)
- self.statusSquare.set_text(text)
- def updateTextTimer(self):
- self.updateText()
- return gtk.TRUE # restart timer
-
- def remote_progress(self, seconds):
- if seconds == None:
- self.eta = None
- else:
- self.eta = now() + seconds
- self.startTimer(self.updateTextTimer)
- self.updateText()
- def remote_finished(self, eta):
- self.eta = None
- self.stopTimer()
- self.updateText()
- eta.callRemote("unsubscribe", self)
-'''
-
-class TwoRowBuilder:
- def __init__(self, ref):
- self.lastbox = lastbox = gtk.EventBox()
- self.lastlabel = lastlabel = gtk.Label("?")
- lastbox.add(lastlabel)
- lastbox.set_size_request(64,64)
-
- self.currentbox = currentbox = gtk.EventBox()
- self.currentlabel = currentlabel = gtk.Label("?")
- currentbox.add(currentlabel)
- currentbox.set_size_request(64,64)
-
- self.ref = ref
-
- def setColor(self, box, color):
- box.modify_bg(gtk.STATE_NORMAL, gtk.gdk.color_parse(color))
-
- def getLastBuild(self):
- d = self.ref.callRemote("getLastFinishedBuild")
- d.addCallback(self.gotLastBuild)
- def gotLastBuild(self, build):
- if build:
- build.callRemote("getText").addCallback(self.gotLastText)
- build.callRemote("getColor").addCallback(self.gotLastColor)
-
- def gotLastText(self, text):
- self.lastlabel.set_text("\n".join(text))
- def gotLastColor(self, color):
- self.setColor(self.lastbox, color)
-
- def getState(self):
- self.ref.callRemote("getState").addCallback(self.gotState)
- def gotState(self, res):
- state, ETA, builds = res
- # state is one of: offline, idle, waiting, interlocked, building
- # TODO: ETA is going away, you have to look inside the builds to get
- # that value
- currentmap = {"offline": "red",
- "idle": "white",
- "waiting": "yellow",
- "interlocked": "yellow",
- "building": "yellow",}
- text = state
- self.setColor(self.currentbox, currentmap[state])
- if ETA is not None:
- text += "\nETA=%s secs" % ETA
- self.currentlabel.set_text(state)
-
- def buildStarted(self, build):
- pass
- def buildFinished(self, build, results):
- self.gotLastBuild(build)
-
-
-class TwoRowClient(pb.Referenceable):
- def __init__(self, window):
- self.window = window
- self.buildernames = []
- self.builders = {}
-
- def connected(self, ref):
- print "connected"
- self.ref = ref
- self.pane = gtk.VBox(False, 2)
- self.table = gtk.Table(1+2, 1)
- self.pane.add(self.table)
- self.window.vb.add(self.pane)
- self.pane.show_all()
- ref.callRemote("subscribe", "builds", 5, self)
-
- def removeTable(self):
- for child in self.table.get_children():
- self.table.remove(child)
- self.pane.remove(self.table)
-
- def makeTable(self):
- columns = len(self.builders)
- self.table = gtk.Table(2, columns)
- self.pane.add(self.table)
- for i in range(len(self.buildernames)):
- name = self.buildernames[i]
- b = self.builders[name]
- self.table.attach(gtk.Label(name), i, i+1, 0, 1)
- self.table.attach(b.lastbox, i, i+1, 1, 2,
- xpadding=1, ypadding=1)
- self.table.attach(b.currentbox, i, i+1, 2, 3,
- xpadding=1, ypadding=1)
- self.table.show_all()
-
- def rebuildTable(self):
- self.removeTable()
- self.makeTable()
-
- def remote_builderAdded(self, buildername, builder):
- print "builderAdded", buildername
- assert buildername not in self.buildernames
- self.buildernames.append(buildername)
-
- b = TwoRowBuilder(builder)
- self.builders[buildername] = b
- self.rebuildTable()
- b.getLastBuild()
- b.getState()
-
- def remote_builderRemoved(self, buildername):
- del self.builders[buildername]
- self.buildernames.remove(buildername)
- self.rebuildTable()
-
- def remote_builderChangedState(self, name, state, eta):
- self.builders[name].gotState((state, eta, None))
- def remote_buildStarted(self, name, build):
- self.builders[name].buildStarted(build)
- def remote_buildFinished(self, name, build, results):
- self.builders[name].buildFinished(build, results)
-
-
-class GtkClient(TextClient):
- ClientClass = TwoRowClient
-
- def __init__(self, master):
- self.master = master
-
- w = gtk.Window()
- self.w = w
- #w.set_size_request(64,64)
- w.connect('destroy', lambda win: gtk.main_quit())
- self.vb = gtk.VBox(False, 2)
- self.status = gtk.Label("unconnected")
- self.vb.add(self.status)
- self.listener = self.ClientClass(self)
- w.add(self.vb)
- w.show_all()
-
- def connected(self, ref):
- self.status.set_text("connected")
- TextClient.connected(self, ref)
-
-"""
- def addBuilder(self, name, builder):
- Client.addBuilder(self, name, builder)
- self.pane.addBuilder(builder)
- def removeBuilder(self, name):
- self.pane.removeBuilder(name, self.builders[name])
- Client.removeBuilder(self, name)
-
- def startConnecting(self, master):
- self.master = master
- Client.startConnecting(self, master)
- self.status.set_text("connecting to %s.." % master)
- def connected(self, remote):
- Client.connected(self, remote)
- self.status.set_text(self.master)
- remote.notifyOnDisconnect(self.disconnected)
- def disconnected(self, remote):
- self.status.set_text("disconnected, will retry")
-"""
-
-def main():
- master = "localhost:8007"
- if len(sys.argv) > 1:
- master = sys.argv[1]
- c = GtkClient(master)
- c.run()
-
-if __name__ == '__main__':
- main()
-
diff --git a/buildbot/buildbot-source/buildbot/clients/sendchange.py b/buildbot/buildbot-source/buildbot/clients/sendchange.py
deleted file mode 100644
index 3887505e5..000000000
--- a/buildbot/buildbot-source/buildbot/clients/sendchange.py
+++ /dev/null
@@ -1,39 +0,0 @@
-
-from twisted.spread import pb
-from twisted.cred import credentials
-from twisted.internet import reactor
-from twisted.python import log
-
-class Sender:
- def __init__(self, master, user):
- self.user = user
- self.host, self.port = master.split(":")
- self.port = int(self.port)
-
- def send(self, branch, revision, comments, files):
- change = {'who': self.user, 'files': files, 'comments': comments,
- 'branch': branch, 'revision': revision}
-
- f = pb.PBClientFactory()
- d = f.login(credentials.UsernamePassword("change", "changepw"))
- reactor.connectTCP(self.host, self.port, f)
- d.addCallback(self.addChange, change)
- return d
-
- def addChange(self, remote, change):
- d = remote.callRemote('addChange', change)
- d.addCallback(lambda res: remote.broker.transport.loseConnection())
- return d
-
- def printSuccess(self, res):
- print "change sent successfully"
- def printFailure(self, why):
- print "change NOT sent"
- print why
-
- def stop(self, res):
- reactor.stop()
- return res
-
- def run(self):
- reactor.run()
diff --git a/buildbot/buildbot-source/buildbot/dnotify.py b/buildbot/buildbot-source/buildbot/dnotify.py
deleted file mode 100644
index d4c5eda34..000000000
--- a/buildbot/buildbot-source/buildbot/dnotify.py
+++ /dev/null
@@ -1,105 +0,0 @@
-#! /usr/bin/python
-
-# spiv wants this
-
-import fcntl, signal
-
-class DNotify_Handler:
- def __init__(self):
- self.watchers = {}
- self.installed = 0
- def install(self):
- if self.installed:
- return
- signal.signal(signal.SIGIO, self.fire)
- self.installed = 1
- def uninstall(self):
- if not self.installed:
- return
- signal.signal(signal.SIGIO, signal.SIG_DFL)
- self.installed = 0
- def add(self, watcher):
- self.watchers[watcher.fd.fileno()] = watcher
- self.install()
- def remove(self, watcher):
- if self.watchers.has_key(watcher.fd.fileno()):
- del(self.watchers[watcher.fd.fileno()])
- if not self.watchers:
- self.uninstall()
- def fire(self, signum, frame):
- # this is the signal handler
- # without siginfo_t, we must fire them all
- for watcher in self.watchers.values():
- watcher.callback()
-
-class DNotify:
- DN_ACCESS = fcntl.DN_ACCESS # a file in the directory was read
- DN_MODIFY = fcntl.DN_MODIFY # a file was modified (write,truncate)
- DN_CREATE = fcntl.DN_CREATE # a file was created
- DN_DELETE = fcntl.DN_DELETE # a file was unlinked
- DN_RENAME = fcntl.DN_RENAME # a file was renamed
- DN_ATTRIB = fcntl.DN_ATTRIB # a file had attributes changed (chmod,chown)
-
- handler = [None]
-
- def __init__(self, dirname, callback=None,
- flags=[DN_MODIFY,DN_CREATE,DN_DELETE,DN_RENAME]):
-
- """This object watches a directory for changes. The .callback
- attribute should be set to a function to be run every time something
- happens to it. Be aware that it will be called more times than you
- expect."""
-
- if callback:
- self.callback = callback
- else:
- self.callback = self.fire
- self.dirname = dirname
- self.flags = reduce(lambda x, y: x | y, flags) | fcntl.DN_MULTISHOT
- self.fd = open(dirname, "r")
- # ideally we would move the notification to something like SIGRTMIN,
- # (to free up SIGIO) and use sigaction to have the signal handler
- # receive a structure with the fd number. But python doesn't offer
- # either.
- if not self.handler[0]:
- self.handler[0] = DNotify_Handler()
- self.handler[0].add(self)
- fcntl.fcntl(self.fd, fcntl.F_NOTIFY, self.flags)
- def remove(self):
- self.handler[0].remove(self)
- self.fd.close()
- def fire(self):
- print self.dirname, "changed!"
-
-def test_dnotify1():
- d = DNotify(".")
- import time
- while 1:
- signal.pause()
-
-def test_dnotify2():
- # create ./foo/, create/delete files in ./ and ./foo/ while this is
- # running. Notice how both notifiers are fired when anything changes;
- # this is an unfortunate side-effect of the lack of extended sigaction
- # support in Python.
- count = [0]
- d1 = DNotify(".")
- def fire1(count=count, d1=d1):
- print "./ changed!", count[0]
- count[0] += 1
- if count[0] > 5:
- d1.remove()
- del(d1)
- # change the callback, since we can't define it until after we have the
- # dnotify object. Hmm, unless we give the dnotify to the callback.
- d1.callback = fire1
- def fire2(): print "foo/ changed!"
- d2 = DNotify("foo", fire2)
- import time
- while 1:
- signal.pause()
-
-
-if __name__ == '__main__':
- test_dnotify2()
-
diff --git a/buildbot/buildbot-source/buildbot/interfaces.py b/buildbot/buildbot-source/buildbot/interfaces.py
deleted file mode 100644
index e3986317b..000000000
--- a/buildbot/buildbot-source/buildbot/interfaces.py
+++ /dev/null
@@ -1,890 +0,0 @@
-#! /usr/bin/python
-
-"""Interface documentation.
-
-Define the interfaces that are implemented by various buildbot classes.
-"""
-
-from twisted.python.components import Interface
-
-# exceptions that can be raised while trying to start a build
-class NoSlaveError(Exception):
- pass
-class BuilderInUseError(Exception):
- pass
-class BuildSlaveTooOldError(Exception):
- pass
-
-class IChangeSource(Interface):
- """Object which feeds Change objects to the changemaster. When files or
- directories are changed and the version control system provides some
- kind of notification, this object should turn it into a Change object
- and pass it through::
-
- self.changemaster.addChange(change)
- """
-
- def start():
- """Called when the buildmaster starts. Can be used to establish
- connections to VC daemons or begin polling."""
-
- def stop():
- """Called when the buildmaster shuts down. Connections should be
- terminated, polling timers should be canceled."""
-
- def describe():
- """Should return a string which briefly describes this source. This
- string will be displayed in an HTML status page."""
-
-class IScheduler(Interface):
- """I watch for Changes in the source tree and decide when to trigger
- Builds. I create BuildSet objects and submit them to the BuildMaster. I
- am a service, and the BuildMaster is always my parent."""
-
- def addChange(change):
- """A Change has just been dispatched by one of the ChangeSources.
- Each Scheduler will receive this Change. I may decide to start a
- build as a result, or I might choose to ignore it."""
-
- def listBuilderNames():
- """Return a list of strings indicating the Builders that this
- Scheduler might feed."""
-
- def getPendingBuildTimes():
- """Return a list of timestamps for any builds that are waiting in the
- tree-stable-timer queue. This is only relevant for Change-based
- schedulers, all others can just return an empty list."""
- # TODO: it might be nice to make this into getPendingBuildSets, which
- # would let someone subscribe to the buildset being finished.
- # However, the Scheduler doesn't actually create the buildset until
- # it gets submitted, so doing this would require some major rework.
-
-class IUpstreamScheduler(Interface):
- """This marks an IScheduler as being eligible for use as the 'upstream='
- argument to a buildbot.scheduler.Dependent instance."""
-
- def subscribeToSuccessfulBuilds(target):
- """Request that the target callbable be invoked after every
- successful buildset. The target will be called with a single
- argument: the SourceStamp used by the successful builds."""
-
- def listBuilderNames():
- """Return a list of strings indicating the Builders that this
- Scheduler might feed."""
-
-class ISourceStamp(Interface):
- pass
-
-class IEmailSender(Interface):
- """I know how to send email, and can be used by other parts of the
- Buildbot to contact developers."""
- pass
-
-class IEmailLookup(Interface):
- def getAddress(user):
- """Turn a User-name string into a valid email address. Either return
- a string (with an @ in it), None (to indicate that the user cannot
- be reached by email), or a Deferred which will fire with the same."""
-
-class IStatus(Interface):
- """I am an object, obtainable from the buildmaster, which can provide
- status information."""
-
- def getProjectName():
- """Return the name of the project that this Buildbot is working
- for."""
- def getProjectURL():
- """Return the URL of this Buildbot's project."""
- def getBuildbotURL():
- """Return the URL of the top-most Buildbot status page, or None if
- this Buildbot does not provide a web status page."""
- def getURLFor(thing):
- """Return the URL of a page which provides information on 'thing',
- which should be an object that implements one of the status
- interfaces defined in L{buildbot.interfaces}. Returns None if no
- suitable page is available (or if no Waterfall is running)."""
-
- def getSchedulers():
- """Return a list of ISchedulerStatus objects for all
- currently-registered Schedulers."""
-
- def getBuilderNames(categories=None):
- """Return a list of the names of all current Builders."""
- def getBuilder(name):
- """Return the IBuilderStatus object for a given named Builder."""
- def getSlave(name):
- """Return the ISlaveStatus object for a given named buildslave."""
-
- def getBuildSets():
- """Return a list of active (non-finished) IBuildSetStatus objects."""
-
- def subscribe(receiver):
- """Register an IStatusReceiver to receive new status events. The
- receiver will immediately be sent a set of 'builderAdded' messages
- for all current builders. It will receive further 'builderAdded' and
- 'builderRemoved' messages as the config file is reloaded and builders
- come and go. It will also receive 'buildsetSubmitted' messages for
- all outstanding BuildSets (and each new BuildSet that gets
- submitted). No additional messages will be sent unless the receiver
- asks for them by calling .subscribe on the IBuilderStatus objects
- which accompany the addedBuilder message."""
-
- def unsubscribe(receiver):
- """Unregister an IStatusReceiver. No further status messgaes will be
- delivered."""
-
-class IBuildSetStatus(Interface):
- """I represent a set of Builds, each run on a separate Builder but all
- using the same source tree."""
-
- def getSourceStamp():
- pass
- def getReason():
- pass
- def getID():
- """Return the BuildSet's ID string, if any. The 'try' feature uses a
- random string as a BuildSetID to relate submitted jobs with the
- resulting BuildSet."""
- def getResponsibleUsers():
- pass # not implemented
- def getInterestedUsers():
- pass # not implemented
- def getBuilderNames():
- """Return a list of the names of all Builders on which this set will
- do builds."""
- def getBuildRequests():
- """Return a list of IBuildRequestStatus objects that represent my
- component Builds. This list might correspond to the Builders named by
- getBuilderNames(), but if builder categories are used, or 'Builder
- Aliases' are implemented, then they may not."""
- def isFinished():
- pass
- def waitUntilSuccess():
- """Return a Deferred that fires (with this IBuildSetStatus object)
- when the outcome of the BuildSet is known, i.e., upon the first
- failure, or after all builds complete successfully."""
- def waitUntilFinished():
- """Return a Deferred that fires (with this IBuildSetStatus object)
- when all builds have finished."""
- def getResults():
- pass
-
-class IBuildRequestStatus(Interface):
- """I represent a request to build a particular set of source code on a
- particular Builder. These requests may be merged by the time they are
- finally turned into a Build."""
-
- def getSourceStamp():
- pass
- def getBuilderName():
- pass
- def getBuilds():
- """Return a list of IBuildStatus objects for each Build that has been
- started in an attempt to satify this BuildRequest."""
-
- def subscribe(observer):
- """Register a callable that will be invoked (with a single
- IBuildStatus object) for each Build that is created to satisfy this
- request. There may be multiple Builds created in an attempt to handle
- the request: they may be interrupted by the user or abandoned due to
- a lost slave. The last Build (the one which actually gets to run to
- completion) is said to 'satisfy' the BuildRequest. The observer will
- be called once for each of these Builds, both old and new."""
- def unsubscribe(observer):
- """Unregister the callable that was registered with subscribe()."""
-
-
-class ISlaveStatus(Interface):
- def getName():
- """Return the name of the build slave."""
-
- def getAdmin():
- """Return a string with the slave admin's contact data."""
-
- def getHost():
- """Return a string with the slave host info."""
-
- def isConnected():
- """Return True if the slave is currently online, False if not."""
-
-class ISchedulerStatus(Interface):
- def getName():
- """Return the name of this Scheduler (a string)."""
-
- def getPendingBuildsets():
- """Return an IBuildSet for all BuildSets that are pending. These
- BuildSets are waiting for their tree-stable-timers to expire."""
- # TODO: this is not implemented anywhere
-
-
-class IBuilderStatus(Interface):
- def getName():
- """Return the name of this Builder (a string)."""
-
- def getState():
- # TODO: this isn't nearly as meaningful as it used to be
- """Return a tuple (state, builds) for this Builder. 'state' is the
- so-called 'big-status', indicating overall status (as opposed to
- which step is currently running). It is a string, one of 'offline',
- 'idle', or 'building'. 'builds' is a list of IBuildStatus objects
- (possibly empty) representing the currently active builds."""
-
- def getSlaves():
- """Return a list of ISlaveStatus objects for the buildslaves that are
- used by this builder."""
-
- def getPendingBuilds():
- """Return an IBuildRequestStatus object for all upcoming builds
- (those which are ready to go but which are waiting for a buildslave
- to be available."""
-
- def getCurrentBuilds():
- """Return a list containing an IBuildStatus object for each build
- currently in progress."""
- # again, we could probably provide an object for 'waiting' and
- # 'interlocked' too, but things like the Change list might still be
- # subject to change
-
- def getLastFinishedBuild():
- """Return the IBuildStatus object representing the last finished
- build, which may be None if the builder has not yet finished any
- builds."""
-
- def getBuild(number):
- """Return an IBuildStatus object for a historical build. Each build
- is numbered (starting at 0 when the Builder is first added),
- getBuild(n) will retrieve the Nth such build. getBuild(-n) will
- retrieve a recent build, with -1 being the most recent build
- started. If the Builder is idle, this will be the same as
- getLastFinishedBuild(). If the Builder is active, it will be an
- unfinished build. This method will return None if the build is no
- longer available. Older builds are likely to have less information
- stored: Logs are the first to go, then Steps."""
-
- def getEvent(number):
- """Return an IStatusEvent object for a recent Event. Builders
- connecting and disconnecting are events, as are ping attempts.
- getEvent(-1) will return the most recent event. Events are numbered,
- but it probably doesn't make sense to ever do getEvent(+n)."""
-
- def subscribe(receiver):
- """Register an IStatusReceiver to receive new status events. The
- receiver will be given builderChangedState, buildStarted, and
- buildFinished messages."""
-
- def unsubscribe(receiver):
- """Unregister an IStatusReceiver. No further status messgaes will be
- delivered."""
-
-class IBuildStatus(Interface):
- """I represent the status of a single Build/BuildRequest. It could be
- in-progress or finished."""
-
- def getBuilder():
- """
- Return the BuilderStatus that owns this build.
-
- @rtype: implementor of L{IBuilderStatus}
- """
-
- def isFinished():
- """Return a boolean. True means the build has finished, False means
- it is still running."""
-
- def waitUntilFinished():
- """Return a Deferred that will fire when the build finishes. If the
- build has already finished, this deferred will fire right away. The
- callback is given this IBuildStatus instance as an argument."""
-
- def getProperty(propname):
- """Return the value of the build property with the given name."""
-
- def getReason():
- """Return a string that indicates why the build was run. 'changes',
- 'forced', and 'periodic' are the most likely values. 'try' will be
- added in the future."""
-
- def getSourceStamp():
- """Return a tuple of (branch, revision, patch) which can be used to
- re-create the source tree that this build used. 'branch' is a string
- with a VC-specific meaning, or None to indicate that the checkout
- step used its default branch. 'revision' is a string, the sort you
- would pass to 'cvs co -r REVISION'. 'patch' is either None, or a
- (level, diff) tuple which represents a patch that should be applied
- with 'patch -pLEVEL < DIFF' from the directory created by the
- checkout operation.
-
- This method will return None if the source information is no longer
- available."""
- # TODO: it should be possible to expire the patch but still remember
- # that the build was r123+something.
-
- # TODO: change this to return the actual SourceStamp instance, and
- # remove getChanges()
-
- def getChanges():
- """Return a list of Change objects which represent which source
- changes went into the build."""
-
- def getResponsibleUsers():
- """Return a list of Users who are to blame for the changes that went
- into this build. If anything breaks (at least anything that wasn't
- already broken), blame them. Specifically, this is the set of users
- who were responsible for the Changes that went into this build. Each
- User is a string, corresponding to their name as known by the VC
- repository."""
-
- def getInterestedUsers():
- """Return a list of Users who will want to know about the results of
- this build. This is a superset of getResponsibleUsers(): it adds
- people who are interested in this build but who did not actually
- make the Changes that went into it (build sheriffs, code-domain
- owners)."""
-
- def getNumber():
- """Within each builder, each Build has a number. Return it."""
-
- def getPreviousBuild():
- """Convenience method. Returns None if the previous build is
- unavailable."""
-
- def getSteps():
- """Return a list of IBuildStepStatus objects. For invariant builds
- (those which always use the same set of Steps), this should always
- return the complete list, however some of the steps may not have
- started yet (step.getTimes()[0] will be None). For variant builds,
- this may not be complete (asking again later may give you more of
- them)."""
-
- def getTimes():
- """Returns a tuple of (start, end). 'start' and 'end' are the times
- (seconds since the epoch) when the Build started and finished. If
- the build is still running, 'end' will be None."""
-
- # while the build is running, the following methods make sense.
- # Afterwards they return None
-
- def getETA():
- """Returns the number of seconds from now in which the build is
- expected to finish, or None if we can't make a guess. This guess will
- be refined over time."""
-
- def getCurrentStep():
- """Return an IBuildStepStatus object representing the currently
- active step."""
-
- # Once you know the build has finished, the following methods are legal.
- # Before ths build has finished, they all return None.
-
- def getSlavename():
- """Return the name of the buildslave which handled this build."""
-
- def getText():
- """Returns a list of strings to describe the build. These are
- intended to be displayed in a narrow column. If more space is
- available, the caller should join them together with spaces before
- presenting them to the user."""
-
- def getColor():
- """Returns a single string with the color that should be used to
- display the build. 'green', 'orange', or 'red' are the most likely
- ones."""
-
- def getResults():
- """Return a constant describing the results of the build: one of the
- constants in buildbot.status.builder: SUCCESS, WARNINGS, or
- FAILURE."""
-
- def getLogs():
- """Return a list of logs that describe the build as a whole. Some
- steps will contribute their logs, while others are are less important
- and will only be accessible through the IBuildStepStatus objects.
- Each log is an object which implements the IStatusLog interface."""
-
- def getTestResults():
- """Return a dictionary that maps test-name tuples to ITestResult
- objects. This may return an empty or partially-filled dictionary
- until the build has completed."""
-
- # subscription interface
-
- def subscribe(receiver, updateInterval=None):
- """Register an IStatusReceiver to receive new status events. The
- receiver will be given stepStarted and stepFinished messages. If
- 'updateInterval' is non-None, buildETAUpdate messages will be sent
- every 'updateInterval' seconds."""
-
- def unsubscribe(receiver):
- """Unregister an IStatusReceiver. No further status messgaes will be
- delivered."""
-
-class ITestResult(Interface):
- """I describe the results of a single unit test."""
-
- def getName():
- """Returns a tuple of strings which make up the test name. Tests may
- be arranged in a hierarchy, so looking for common prefixes may be
- useful."""
-
- def getResults():
- """Returns a constant describing the results of the test: SUCCESS,
- WARNINGS, FAILURE."""
-
- def getText():
- """Returns a list of short strings which describe the results of the
- test in slightly more detail. Suggested components include
- 'failure', 'error', 'passed', 'timeout'."""
-
- def getLogs():
- # in flux, it may be possible to provide more structured information
- # like python Failure instances
- """Returns a dictionary of test logs. The keys are strings like
- 'stdout', 'log', 'exceptions'. The values are strings."""
-
-
-class IBuildStepStatus(Interface):
- """I hold status for a single BuildStep."""
-
- def getName():
- """Returns a short string with the name of this step. This string
- may have spaces in it."""
-
- def getBuild():
- """Returns the IBuildStatus object which contains this step."""
-
- def getTimes():
- """Returns a tuple of (start, end). 'start' and 'end' are the times
- (seconds since the epoch) when the Step started and finished. If the
- step has not yet started, 'start' will be None. If the step is still
- running, 'end' will be None."""
-
- def getExpectations():
- """Returns a list of tuples (name, current, target). Each tuple
- describes a single axis along which the step's progress can be
- measured. 'name' is a string which describes the axis itself, like
- 'filesCompiled' or 'tests run' or 'bytes of output'. 'current' is a
- number with the progress made so far, while 'target' is the value
- that we expect (based upon past experience) to get to when the build
- is finished.
-
- 'current' will change over time until the step is finished. It is
- 'None' until the step starts. When the build is finished, 'current'
- may or may not equal 'target' (which is merely the expectation based
- upon previous builds)."""
-
- def getLogs():
- """Returns a list of IStatusLog objects. If the step has not yet
- finished, this list may be incomplete (asking again later may give
- you more of them)."""
-
-
- def isFinished():
- """Return a boolean. True means the step has finished, False means it
- is still running."""
-
- def waitUntilFinished():
- """Return a Deferred that will fire when the step finishes. If the
- step has already finished, this deferred will fire right away. The
- callback is given this IBuildStepStatus instance as an argument."""
-
- # while the step is running, the following methods make sense.
- # Afterwards they return None
-
- def getETA():
- """Returns the number of seconds from now in which the step is
- expected to finish, or None if we can't make a guess. This guess will
- be refined over time."""
-
- # Once you know the step has finished, the following methods are legal.
- # Before ths step has finished, they all return None.
-
- def getText():
- """Returns a list of strings which describe the step. These are
- intended to be displayed in a narrow column. If more space is
- available, the caller should join them together with spaces before
- presenting them to the user."""
-
- def getColor():
- """Returns a single string with the color that should be used to
- display this step. 'green', 'orange', 'red' and 'yellow' are the
- most likely ones."""
-
- def getResults():
- """Return a tuple describing the results of the step: (result,
- strings). 'result' is one of the constants in
- buildbot.status.builder: SUCCESS, WARNINGS, FAILURE, or SKIPPED.
- 'strings' is an optional list of strings that the step wants to
- append to the overall build's results. These strings are usually
- more terse than the ones returned by getText(): in particular,
- successful Steps do not usually contribute any text to the overall
- build."""
-
- # subscription interface
-
- def subscribe(receiver, updateInterval=10):
- """Register an IStatusReceiver to receive new status events. The
- receiver will be given logStarted and logFinished messages. It will
- also be given a ETAUpdate message every 'updateInterval' seconds."""
-
- def unsubscribe(receiver):
- """Unregister an IStatusReceiver. No further status messgaes will be
- delivered."""
-
-class IStatusEvent(Interface):
- """I represent a Builder Event, something non-Build related that can
- happen to a Builder."""
-
- def getTimes():
- """Returns a tuple of (start, end) like IBuildStepStatus, but end==0
- indicates that this is a 'point event', which has no duration.
- SlaveConnect/Disconnect are point events. Ping is not: it starts
- when requested and ends when the response (positive or negative) is
- returned"""
-
- def getText():
- """Returns a list of strings which describe the event. These are
- intended to be displayed in a narrow column. If more space is
- available, the caller should join them together with spaces before
- presenting them to the user."""
-
- def getColor():
- """Returns a single string with the color that should be used to
- display this event. 'red' and 'yellow' are the most likely ones."""
-
-class IStatusLog(Interface):
- """I represent a single Log, which is a growing list of text items that
- contains some kind of output for a single BuildStep. I might be finished,
- in which case this list has stopped growing.
-
- Each Log has a name, usually something boring like 'log' or 'output'.
- These names are not guaranteed to be unique, however they are usually
- chosen to be useful within the scope of a single step (i.e. the Compile
- step might produce both 'log' and 'warnings'). The name may also have
- spaces. If you want something more globally meaningful, at least within a
- given Build, try::
-
- '%s.%s' % (log.getStep.getName(), log.getName())
-
- The Log can be presented as plain text, or it can be accessed as a list
- of items, each of which has a channel indicator (header, stdout, stderr)
- and a text chunk. An HTML display might represent the interleaved
- channels with different styles, while a straight download-the-text
- interface would just want to retrieve a big string.
-
- The 'header' channel is used by ShellCommands to prepend a note about
- which command is about to be run ('running command FOO in directory
- DIR'), and append another note giving the exit code of the process.
-
- Logs can be streaming: if the Log has not yet finished, you can
- subscribe to receive new chunks as they are added.
-
- A ShellCommand will have a Log associated with it that gathers stdout
- and stderr. Logs may also be created by parsing command output or
- through other synthetic means (grepping for all the warnings in a
- compile log, or listing all the test cases that are going to be run).
- Such synthetic Logs are usually finished as soon as they are created."""
-
-
- def getName():
- """Returns a short string with the name of this log, probably 'log'.
- """
-
- def getStep():
- """Returns the IBuildStepStatus which owns this log."""
- # TODO: can there be non-Step logs?
-
- def isFinished():
- """Return a boolean. True means the log has finished and is closed,
- False means it is still open and new chunks may be added to it."""
-
- def waitUntilFinished():
- """Return a Deferred that will fire when the log is closed. If the
- log has already finished, this deferred will fire right away. The
- callback is given this IStatusLog instance as an argument."""
-
- def subscribe(receiver, catchup):
- """Register an IStatusReceiver to receive chunks (with logChunk) as
- data is added to the Log. If you use this, you will also want to use
- waitUntilFinished to find out when the listener can be retired.
- Subscribing to a closed Log is a no-op.
-
- If 'catchup' is True, the receiver will immediately be sent a series
- of logChunk messages to bring it up to date with the partially-filled
- log. This allows a status client to join a Log already in progress
- without missing any data. If the Log has already finished, it is too
- late to catch up: just do getText() instead.
-
- If the Log is very large, the receiver will be called many times with
- a lot of data. There is no way to throttle this data. If the receiver
- is planning on sending the data on to somewhere else, over a narrow
- connection, you can get a throttleable subscription by using
- C{subscribeConsumer} instead."""
-
- def unsubscribe(receiver):
- """Remove a receiver previously registered with subscribe(). Attempts
- to remove a receiver which was not previously registered is a no-op.
- """
-
- def subscribeConsumer(consumer):
- """Register an L{IStatusLogConsumer} to receive all chunks of the
- logfile, including all the old entries and any that will arrive in
- the future. The consumer will first have their C{registerProducer}
- method invoked with a reference to an object that can be told
- C{pauseProducing}, C{resumeProducing}, and C{stopProducing}. Then the
- consumer's C{writeChunk} method will be called repeatedly with each
- (channel, text) tuple in the log, starting with the very first. The
- consumer will be notified with C{finish} when the log has been
- exhausted (which can only happen when the log is finished). Note that
- a small amount of data could be written via C{writeChunk} even after
- C{pauseProducing} has been called.
-
- To unsubscribe the consumer, use C{producer.stopProducing}."""
-
- # once the log has finished, the following methods make sense. They can
- # be called earlier, but they will only return the contents of the log up
- # to the point at which they were called. You will lose items that are
- # added later. Use C{subscribe} or C{subscribeConsumer} to avoid missing
- # anything.
-
- def hasContents():
- """Returns True if the LogFile still has contents available. Returns
- False for logs that have been pruned. Clients should test this before
- offering to show the contents of any log."""
-
- def getText():
- """Return one big string with the contents of the Log. This merges
- all non-header chunks together."""
-
- def getTextWithHeaders():
- """Return one big string with the contents of the Log. This merges
- all chunks (including headers) together."""
-
- def getChunks():
- """Generate a list of (channel, text) tuples. 'channel' is a number,
- 0 for stdout, 1 for stderr, 2 for header. (note that stderr is merged
- into stdout if PTYs are in use)."""
-
-class IStatusLogConsumer(Interface):
- """I am an object which can be passed to IStatusLog.subscribeConsumer().
- I represent a target for writing the contents of an IStatusLog. This
- differs from a regular IStatusReceiver in that it can pause the producer.
- This makes it more suitable for use in streaming data over network
- sockets, such as an HTTP request. Note that the consumer can only pause
- the producer until it has caught up with all the old data. After that
- point, C{pauseProducing} is ignored and all new output from the log is
- sent directoy to the consumer."""
-
- def registerProducer(producer, streaming):
- """A producer is being hooked up to this consumer. The consumer only
- has to handle a single producer. It should send .pauseProducing and
- .resumeProducing messages to the producer when it wants to stop or
- resume the flow of data. 'streaming' will be set to True because the
- producer is always a PushProducer.
- """
-
- def unregisterProducer():
- """The previously-registered producer has been removed. No further
- pauseProducing or resumeProducing calls should be made. The consumer
- should delete its reference to the Producer so it can be released."""
-
- def writeChunk(chunk):
- """A chunk (i.e. a tuple of (channel, text)) is being written to the
- consumer."""
-
- def finish():
- """The log has finished sending chunks to the consumer."""
-
-class IStatusReceiver(Interface):
- """I am an object which can receive build status updates. I may be
- subscribed to an IStatus, an IBuilderStatus, or an IBuildStatus."""
-
- def buildsetSubmitted(buildset):
- """A new BuildSet has been submitted to the buildmaster.
-
- @type buildset: implementor of L{IBuildSetStatus}
- """
-
- def builderAdded(builderName, builder):
- """
- A new Builder has just been added. This method may return an
- IStatusReceiver (probably 'self') which will be subscribed to receive
- builderChangedState and buildStarted/Finished events.
-
- @type builderName: string
- @type builder: L{buildbot.status.builder.BuilderStatus}
- @rtype: implementor of L{IStatusReceiver}
- """
-
- def builderChangedState(builderName, state):
- """Builder 'builderName' has changed state. The possible values for
- 'state' are 'offline', 'idle', and 'building'."""
-
- def buildStarted(builderName, build):
- """Builder 'builderName' has just started a build. The build is an
- object which implements IBuildStatus, and can be queried for more
- information.
-
- This method may return an IStatusReceiver (it could even return
- 'self'). If it does so, stepStarted and stepFinished methods will be
- invoked on the object for the steps of this one build. This is a
- convenient way to subscribe to all build steps without missing any.
- This receiver will automatically be unsubscribed when the build
- finishes.
-
- It can also return a tuple of (IStatusReceiver, interval), in which
- case buildETAUpdate messages are sent ever 'interval' seconds, in
- addition to the stepStarted and stepFinished messages."""
-
- def buildETAUpdate(build, ETA):
- """This is a periodic update on the progress this Build has made
- towards completion."""
-
- def stepStarted(build, step):
- """A step has just started. 'step' is the IBuildStepStatus which
- represents the step: it can be queried for more information.
-
- This method may return an IStatusReceiver (it could even return
- 'self'). If it does so, logStarted and logFinished methods will be
- invoked on the object for logs created by this one step. This
- receiver will be automatically unsubscribed when the step finishes.
-
- Alternatively, the method may return a tuple of an IStatusReceiver
- and an integer named 'updateInterval'. In addition to
- logStarted/logFinished messages, it will also receive stepETAUpdate
- messages about every updateInterval seconds."""
-
- def stepETAUpdate(build, step, ETA, expectations):
- """This is a periodic update on the progress this Step has made
- towards completion. It gets an ETA (in seconds from the present) of
- when the step ought to be complete, and a list of expectation tuples
- (as returned by IBuildStepStatus.getExpectations) with more detailed
- information."""
-
- def logStarted(build, step, log):
- """A new Log has been started, probably because a step has just
- started running a shell command. 'log' is the IStatusLog object
- which can be queried for more information.
-
- This method may return an IStatusReceiver (such as 'self'), in which
- case the target's logChunk method will be invoked as text is added to
- the logfile. This receiver will automatically be unsubsribed when the
- log finishes."""
-
- def logChunk(build, step, log, channel, text):
- """Some text has been added to this log. 'channel' is 0, 1, or 2, as
- defined in IStatusLog.getChunks."""
-
- def logFinished(build, step, log):
- """A Log has been closed."""
-
- def stepFinished(build, step, results):
- """A step has just finished. 'results' is the result tuple described
- in IBuildStepStatus.getResults."""
-
- def buildFinished(builderName, build, results):
- """
- A build has just finished. 'results' is the result tuple described
- in L{IBuildStatus.getResults}.
-
- @type builderName: string
- @type build: L{buildbot.status.builder.BuildStatus}
- @type results: tuple
- """
-
- def builderRemoved(builderName):
- """The Builder has been removed."""
-
-class IControl(Interface):
- def addChange(change):
- """Add a change to all builders. Each Builder will decide for
- themselves whether the change is interesting or not, and may initiate
- a build as a result."""
-
- def submitBuildSet(buildset):
- """Submit a BuildSet object, which will eventually be run on all of
- the builders listed therein."""
-
- def getBuilder(name):
- """Retrieve the IBuilderControl object for the given Builder."""
-
-class IBuilderControl(Interface):
- def forceBuild(who, reason):
- """DEPRECATED, please use L{requestBuild} instead.
-
- Start a build of the latest sources. If 'who' is not None, it is
- string with the name of the user who is responsible for starting the
- build: they will be added to the 'interested users' list (so they may
- be notified via email or another Status object when it finishes).
- 'reason' is a string describing why this user requested the build.
-
- The results of forced builds are always sent to the Interested Users,
- even if the Status object would normally only send results upon
- failures.
-
- forceBuild() may raise L{NoSlaveError} or L{BuilderInUseError} if it
- cannot start the build.
-
- forceBuild() returns a Deferred which fires with an L{IBuildControl}
- object that can be used to further control the new build, or from
- which an L{IBuildStatus} object can be obtained."""
-
- def requestBuild(request):
- """Queue a L{buildbot.process.base.BuildRequest} object for later
- building."""
-
- def requestBuildSoon(request):
- """Submit a BuildRequest like requestBuild, but raise a
- L{buildbot.interfaces.NoSlaveError} if no slaves are currently
- available, so it cannot be used to queue a BuildRequest in the hopes
- that a slave will eventually connect. This method is appropriate for
- use by things like the web-page 'Force Build' button."""
-
- def resubmitBuild(buildStatus, reason="<rebuild, no reason given>"):
- """Rebuild something we've already built before. This submits a
- BuildRequest to our Builder using the same SourceStamp as the earlier
- build. This has no effect (but may eventually raise an exception) if
- this Build has not yet finished."""
-
- def getPendingBuilds():
- """Return a list of L{IBuildRequestControl} objects for this Builder.
- Each one corresponds to a pending build that has not yet started (due
- to a scarcity of build slaves). These upcoming builds can be canceled
- through the control object."""
-
- def getBuild(number):
- """Attempt to return an IBuildControl object for the given build.
- Returns None if no such object is available. This will only work for
- the build that is currently in progress: once the build finishes,
- there is nothing to control anymore."""
-
- def ping(timeout=30):
- """Attempt to contact the slave and see if it is still alive. This
- returns a Deferred which fires with either True (the slave is still
- alive) or False (the slave did not respond). As a side effect, adds
- an event to this builder's column in the waterfall display
- containing the results of the ping."""
- # TODO: this ought to live in ISlaveControl, maybe with disconnect()
- # or something. However the event that is emitted is most useful in
- # the Builder column, so it kinda fits here too.
-
-class IBuildRequestControl(Interface):
- def subscribe(observer):
- """Register a callable that will be invoked (with a single
- IBuildControl object) for each Build that is created to satisfy this
- request. There may be multiple Builds created in an attempt to handle
- the request: they may be interrupted by the user or abandoned due to
- a lost slave. The last Build (the one which actually gets to run to
- completion) is said to 'satisfy' the BuildRequest. The observer will
- be called once for each of these Builds, both old and new."""
- def unsubscribe(observer):
- """Unregister the callable that was registered with subscribe()."""
- def cancel():
- """Remove the build from the pending queue. Has no effect if the
- build has already been started."""
-
-class IBuildControl(Interface):
- def getStatus():
- """Return an IBuildStatus object for the Build that I control."""
- def stopBuild(reason="<no reason given>"):
- """Halt the build. This has no effect if the build has already
- finished."""
diff --git a/buildbot/buildbot-source/buildbot/locks.py b/buildbot/buildbot-source/buildbot/locks.py
deleted file mode 100644
index a5ae40b93..000000000
--- a/buildbot/buildbot-source/buildbot/locks.py
+++ /dev/null
@@ -1,89 +0,0 @@
-# -*- test-case-name: buildbot.test.test_locks -*-
-
-from twisted.python import log
-from twisted.internet import reactor, defer
-from buildbot import util
-
-class BaseLock:
- owner = None
- description = "<BaseLock>"
-
- def __init__(self, name):
- self.name = name
- self.waiting = []
-
- def __repr__(self):
- return self.description
-
- def isAvailable(self):
- log.msg("%s isAvailable: self.owner=%s" % (self, self.owner))
- return not self.owner
-
- def claim(self, owner):
- log.msg("%s claim(%s)" % (self, owner))
- assert owner is not None
- self.owner = owner
- log.msg(" %s is claimed" % (self,))
-
- def release(self, owner):
- log.msg("%s release(%s)" % (self, owner))
- assert owner is self.owner
- self.owner = None
- reactor.callLater(0, self.nowAvailable)
-
- def waitUntilAvailable(self, owner):
- log.msg("%s waitUntilAvailable(%s)" % (self, owner))
- assert self.owner, "You aren't supposed to call this on a free Lock"
- d = defer.Deferred()
- self.waiting.append((d, owner))
- return d
-
- def nowAvailable(self):
- log.msg("%s nowAvailable" % self)
- assert not self.owner
- if not self.waiting:
- return
- d,owner = self.waiting.pop(0)
- d.callback(self)
-
-class RealMasterLock(BaseLock):
- def __init__(self, name):
- BaseLock.__init__(self, name)
- self.description = "<MasterLock(%s)>" % (name,)
-
- def getLock(self, slave):
- return self
-
-class RealSlaveLock(BaseLock):
- def __init__(self, name):
- BaseLock.__init__(self, name)
- self.description = "<SlaveLock(%s)>" % (name,)
- self.locks = {}
-
- def getLock(self, slavebuilder):
- slavename = slavebuilder.slave.slavename
- if not self.locks.has_key(slavename):
- lock = self.locks[slavename] = BaseLock(self.name)
- lock.description = "<SlaveLock(%s)[%s] %d>" % (self.name,
- slavename,
- id(lock))
- self.locks[slavename] = lock
- return self.locks[slavename]
-
-
-# master.cfg should only reference the following MasterLock and SlaveLock
-# classes. They are identifiers that will be turned into real Locks later,
-# via the BotMaster.getLockByID method.
-
-class MasterLock(util.ComparableMixin):
- compare_attrs = ['name']
- lockClass = RealMasterLock
- def __init__(self, name):
- self.name = name
-
-class SlaveLock(util.ComparableMixin):
- compare_attrs = ['name']
- lockClass = RealSlaveLock
- def __init__(self, name):
- self.name = name
-
diff --git a/buildbot/buildbot-source/buildbot/master.py b/buildbot/buildbot-source/buildbot/master.py
deleted file mode 100644
index 784807bd9..000000000
--- a/buildbot/buildbot-source/buildbot/master.py
+++ /dev/null
@@ -1,1066 +0,0 @@
-# -*- test-case-name: buildbot.test.test_run -*-
-
-from __future__ import generators
-import string, sys, os, time, warnings
-try:
- import signal
-except ImportError:
- signal = None
-try:
- import cPickle as pickle
-except ImportError:
- import pickle
-
-from twisted.python import log, usage, components
-from twisted.internet import defer, reactor
-from twisted.spread import pb
-from twisted.cred import portal, checkers
-from twisted.application import service, strports
-from twisted.persisted import styles
-from twisted.manhole import telnet
-
-# sibling imports
-from buildbot import util
-from buildbot.twcompat import implements
-from buildbot.util import now
-from buildbot.pbutil import NewCredPerspective
-from buildbot.process.builder import Builder, IDLE
-from buildbot.status.builder import BuilderStatus, SlaveStatus, Status
-from buildbot.changes.changes import Change, ChangeMaster
-from buildbot import interfaces
-
-########################################
-
-
-
-
-class BotPerspective(NewCredPerspective):
- """This is the master-side representative for a remote buildbot slave.
- There is exactly one for each slave described in the config file (the
- c['bots'] list). When buildbots connect in (.attach), they get a
- reference to this instance. The BotMaster object is stashed as the
- .service attribute."""
-
- slave_commands = None
-
- def __init__(self, name):
- self.slavename = name
- self.slave_status = SlaveStatus(name)
- self.builders = [] # list of b.p.builder.Builder instances
- self.slave = None # a RemoteReference to the Bot, when connected
-
- def addBuilder(self, builder):
- """Called to add a builder after the slave has connected.
-
- @return: a Deferred that indicates when an attached slave has
- accepted the new builder."""
-
- self.builders.append(builder)
- if self.slave:
- return self.sendBuilderList()
- return defer.succeed(None)
-
- def removeBuilder(self, builder):
- """Tell the slave that the given builder has been removed, allowing
- it to discard the associated L{buildbot.slave.bot.SlaveBuilder}
- object.
-
- @return: a Deferred that fires when the slave has finished removing
- the SlaveBuilder
- """
- self.builders.remove(builder)
- if self.slave:
- builder.detached(self)
- return self.sendBuilderList()
- return defer.succeed(None)
-
- def __repr__(self):
- return "<BotPerspective '%s', builders: %s>" % \
- (self.slavename,
- string.join(map(lambda b: b.name, self.builders), ','))
-
- def attached(self, mind):
- """This is called when the slave connects.
-
- @return: a Deferred that fires with a suitable pb.IPerspective to
- give to the slave (i.e. 'self')"""
-
- if self.slave:
- # uh-oh, we've got a duplicate slave. The most likely
- # explanation is that the slave is behind a slow link, thinks we
- # went away, and has attempted to reconnect, so we've got two
- # "connections" from the same slave, but the previous one is
- # stale. Give the new one precedence.
- log.msg("duplicate slave %s replacing old one" % self.slavename)
-
- # just in case we've got two identically-configured slaves,
- # report the IP addresses of both so someone can resolve the
- # squabble
- tport = self.slave.broker.transport
- log.msg("old slave was connected from", tport.getPeer())
- log.msg("new slave is from", mind.broker.transport.getPeer())
- d = self.disconnect()
- d.addCallback(lambda res: self._attached(mind))
- return d
-
- return self._attached(mind)
-
- def disconnect(self):
- if not self.slave:
- return defer.succeed(None)
- log.msg("disconnecting old slave %s now" % self.slavename)
-
- # all kinds of teardown will happen as a result of
- # loseConnection(), but it happens after a reactor iteration or
- # two. Hook the actual disconnect so we can know when it is safe
- # to connect the new slave. We have to wait one additional
- # iteration (with callLater(0)) to make sure the *other*
- # notifyOnDisconnect handlers have had a chance to run.
- d = defer.Deferred()
-
- self.slave.notifyOnDisconnect(lambda res: # TODO: d=d ?
- reactor.callLater(0, d.callback, None))
- tport = self.slave.broker.transport
- # this is the polite way to request that a socket be closed
- tport.loseConnection()
- try:
- # but really we don't want to wait for the transmit queue to
- # drain. The remote end is unlikely to ACK the data, so we'd
- # probably have to wait for a (20-minute) TCP timeout.
- #tport._closeSocket()
- # however, doing _closeSocket (whether before or after
- # loseConnection) somehow prevents the notifyOnDisconnect
- # handlers from being run. Bummer.
- tport.offset = 0
- tport.dataBuffer = ""
- pass
- except:
- # however, these hacks are pretty internal, so don't blow up if
- # they fail or are unavailable
- log.msg("failed to accelerate the shutdown process")
- pass
- log.msg("waiting for slave to finish disconnecting")
-
- # When this Deferred fires, we'll be ready to accept the new slave
- return d
-
- def _attached(self, mind):
- """We go through a sequence of calls, gathering information, then
- tell our Builders that they have a slave to work with.
-
- @return: a Deferred that fires (with 'self') when our Builders are
- prepared to deal with the slave.
- """
- self.slave = mind
- d = self.slave.callRemote("print", "attached")
- d.addErrback(lambda why: 0)
- self.slave_status.connected = True
- log.msg("bot attached")
-
- # TODO: there is a window here (while we're retrieving slaveinfo)
- # during which a disconnect or a duplicate-slave will be confusing
- d.addCallback(lambda res: self.slave.callRemote("getSlaveInfo"))
- d.addCallbacks(self.got_info, self.infoUnavailable)
- d.addCallback(self._attached2)
- d.addCallback(lambda res: self)
- return d
-
- def got_info(self, info):
- log.msg("Got slaveinfo from '%s'" % self.slavename)
- # TODO: info{} might have other keys
- self.slave_status.admin = info.get("admin")
- self.slave_status.host = info.get("host")
-
- def infoUnavailable(self, why):
- # maybe an old slave, doesn't implement remote_getSlaveInfo
- log.msg("BotPerspective.infoUnavailable")
- log.err(why)
-
- def _attached2(self, res):
- d = self.slave.callRemote("getCommands")
- d.addCallback(self.got_commands)
- d.addErrback(self._commandsUnavailable)
- d.addCallback(self._attached3)
- return d
-
- def got_commands(self, commands):
- self.slave_commands = commands
-
- def _commandsUnavailable(self, why):
- # probably an old slave
- log.msg("BotPerspective._commandsUnavailable")
- if why.check(AttributeError):
- return
- log.err(why)
-
- def _attached3(self, res):
- d = self.slave.callRemote("getDirs")
- d.addCallback(self.got_dirs)
- d.addErrback(self._dirsFailed)
- d.addCallback(self._attached4)
- return d
-
- def got_dirs(self, dirs):
- wanted = map(lambda b: b.builddir, self.builders)
- unwanted = []
- for d in dirs:
- if d not in wanted and d != "info":
- unwanted.append(d)
- if unwanted:
- log.msg("slave %s has leftover directories (%s): " % \
- (self.slavename, string.join(unwanted, ',')) + \
- "you can delete them now")
-
- def _dirsFailed(self, why):
- log.msg("BotPerspective._dirsFailed")
- log.err(why)
-
- def _attached4(self, res):
- return self.sendBuilderList()
-
- def sendBuilderList(self):
- # now make sure their list of Builders matches ours
- blist = []
- for b in self.builders:
- blist.append((b.name, b.builddir))
- d = self.slave.callRemote("setBuilderList", blist)
- d.addCallback(self.list_done)
- d.addErrback(self._listFailed)
- return d
-
- def list_done(self, blist):
- # this could come back at weird times. be prepared to handle oddness
- dl = []
- for name, remote in blist.items():
- for b in self.builders:
- if b.name == name:
- # if we sent the builders list because of a config
- # change, the Builder might already be attached.
- # Builder.attached will ignore us if this happens.
- d = b.attached(self, remote, self.slave_commands)
- dl.append(d)
- continue
- return defer.DeferredList(dl)
-
- def _listFailed(self, why):
- log.msg("BotPerspective._listFailed")
- log.err(why)
- # TODO: hang up on them, without setBuilderList we can't use them
-
- def perspective_forceBuild(self, name, who=None):
- # slave admins are allowed to force any of their own builds
- for b in self.builders:
- if name == b.name:
- try:
- b.forceBuild(who, "slave requested build")
- return "ok, starting build"
- except interfaces.BuilderInUseError:
- return "sorry, builder was in use"
- except interfaces.NoSlaveError:
- return "sorry, there is no slave to run the build"
- else:
- log.msg("slave requested build for unknown builder '%s'" % name)
- return "sorry, invalid builder name"
-
- def perspective_keepalive(self):
- pass
-
- def detached(self, mind):
- self.slave = None
- self.slave_status.connected = False
- for b in self.builders:
- b.detached(self)
- log.msg("Botmaster.detached(%s)" % self.slavename)
-
-
-class BotMaster(service.Service):
-
- """This is the master-side service which manages remote buildbot slaves.
- It provides them with BotPerspectives, and distributes file change
- notification messages to them.
- """
-
- debug = 0
-
- def __init__(self):
- self.builders = {}
- self.builderNames = []
- # builders maps Builder names to instances of bb.p.builder.Builder,
- # which is the master-side object that defines and controls a build.
- # They are added by calling botmaster.addBuilder() from the startup
- # code.
-
- # self.slaves contains a ready BotPerspective instance for each
- # potential buildslave, i.e. all the ones listed in the config file.
- # If the slave is connected, self.slaves[slavename].slave will
- # contain a RemoteReference to their Bot instance. If it is not
- # connected, that attribute will hold None.
- self.slaves = {} # maps slavename to BotPerspective
- self.statusClientService = None
- self.watchers = {}
-
- # self.locks holds the real Lock instances
- self.locks = {}
-
- # these four are convenience functions for testing
-
- def waitUntilBuilderAttached(self, name):
- b = self.builders[name]
- #if b.slaves:
- # return defer.succeed(None)
- d = defer.Deferred()
- b.watchers['attach'].append(d)
- return d
-
- def waitUntilBuilderDetached(self, name):
- b = self.builders.get(name)
- if not b or not b.slaves:
- return defer.succeed(None)
- d = defer.Deferred()
- b.watchers['detach'].append(d)
- return d
-
- def waitUntilBuilderFullyDetached(self, name):
- b = self.builders.get(name)
- # TODO: this looks too deeply inside the Builder object
- if not b or not b.slaves:
- return defer.succeed(None)
- d = defer.Deferred()
- b.watchers['detach_all'].append(d)
- return d
-
- def waitUntilBuilderIdle(self, name):
- b = self.builders[name]
- # TODO: this looks way too deeply inside the Builder object
- for sb in b.slaves:
- if sb.state != IDLE:
- d = defer.Deferred()
- b.watchers['idle'].append(d)
- return d
- return defer.succeed(None)
-
-
- def addSlave(self, slavename):
- slave = BotPerspective(slavename)
- self.slaves[slavename] = slave
-
- def removeSlave(self, slavename):
- d = self.slaves[slavename].disconnect()
- del self.slaves[slavename]
- return d
-
- def getBuildernames(self):
- return self.builderNames
-
- def addBuilder(self, builder):
- """This is called by the setup code to define what builds should be
- performed. Each Builder object has a build slave that should host
- that build: the builds cannot be done until the right slave
- connects.
-
- @return: a Deferred that fires when an attached slave has accepted
- the new builder.
- """
-
- if self.debug: print "addBuilder", builder
- log.msg("Botmaster.addBuilder(%s)" % builder.name)
-
- if builder.name in self.builderNames:
- raise KeyError("muliply defined builder '%s'" % builder.name)
- for slavename in builder.slavenames:
- if not self.slaves.has_key(slavename):
- raise KeyError("builder %s uses undefined slave %s" % \
- (builder.name, slavename))
-
- self.builders[builder.name] = builder
- self.builderNames.append(builder.name)
- builder.setBotmaster(self)
-
- dl = [self.slaves[slavename].addBuilder(builder)
- for slavename in builder.slavenames]
- return defer.DeferredList(dl)
-
- def removeBuilder(self, builder):
- """Stop using a Builder.
- This removes the Builder from the list of active Builders.
-
- @return: a Deferred that fires when an attached slave has finished
- removing the SlaveBuilder
- """
- if self.debug: print "removeBuilder", builder
- log.msg("Botmaster.removeBuilder(%s)" % builder.name)
- b = self.builders[builder.name]
- del self.builders[builder.name]
- self.builderNames.remove(builder.name)
- for slavename in builder.slavenames:
- slave = self.slaves.get(slavename)
- if slave:
- return slave.removeBuilder(builder)
- return defer.succeed(None)
-
- def getPerspective(self, slavename):
- return self.slaves[slavename]
-
- def shutdownSlaves(self):
- # TODO: make this into a bot method rather than a builder method
- for b in self.slaves.values():
- b.shutdownSlave()
-
- def stopService(self):
- for b in self.builders.values():
- b.builder_status.addPointEvent(["master", "shutdown"])
- b.builder_status.saveYourself()
- return service.Service.stopService(self)
-
- def getLockByID(self, lockid):
- """Convert a Lock identifier into an actual Lock instance.
- @param lockid: a locks.MasterLock or locks.SlaveLock instance
- @return: a locks.RealMasterLock or locks.RealSlaveLock instance
- """
- k = (lockid.__class__, lockid.name)
- if not k in self.locks:
- self.locks[k] = lockid.lockClass(lockid.name)
- return self.locks[k]
-
-########################################
-
-class Manhole(service.MultiService, util.ComparableMixin):
- compare_attrs = ["port", "username", "password"]
-
- def __init__(self, port, username, password):
- service.MultiService.__init__(self)
- if type(port) is int:
- port = "tcp:%d" % port
- self.port = port
- self.username = username
- self.password = password
- self.f = f = telnet.ShellFactory()
- f.username = username
- f.password = password
- s = strports.service(port, f)
- s.setServiceParent(self)
-
- def startService(self):
- log.msg("Manhole listening on port %s" % self.port)
- service.MultiService.startService(self)
- master = self.parent
- self.f.namespace['master'] = master
- self.f.namespace['status'] = master.getStatus()
-
-class DebugPerspective(NewCredPerspective):
- def attached(self, mind):
- return self
- def detached(self, mind):
- pass
-
- def perspective_forceBuild(self, buildername, who=None):
- c = interfaces.IControl(self.master)
- bc = c.getBuilder(buildername)
- bc.forceBuild(who, "debug tool 'Force Build' button pushed")
-
- def perspective_fakeChange(self, file, revision=None, who="fakeUser",
- branch=None):
- change = Change(who, [file], "some fake comments\n",
- branch=branch, revision=revision)
- c = interfaces.IControl(self.master)
- c.addChange(change)
-
- def perspective_setCurrentState(self, buildername, state):
- builder = self.botmaster.builders.get(buildername)
- if not builder: return
- if state == "offline":
- builder.statusbag.currentlyOffline()
- if state == "idle":
- builder.statusbag.currentlyIdle()
- if state == "waiting":
- builder.statusbag.currentlyWaiting(now()+10)
- if state == "building":
- builder.statusbag.currentlyBuilding(None)
- def perspective_reload(self):
- print "doing reload of the config file"
- self.master.loadTheConfigFile()
- def perspective_pokeIRC(self):
- print "saying something on IRC"
- from buildbot.status import words
- for s in self.master:
- if isinstance(s, words.IRC):
- bot = s.f
- for channel in bot.channels:
- print " channel", channel
- bot.p.msg(channel, "Ow, quit it")
-
- def perspective_print(self, msg):
- print "debug", msg
-
-class Dispatcher(styles.Versioned):
- if implements:
- implements(portal.IRealm)
- else:
- __implements__ = portal.IRealm,
- persistenceVersion = 2
-
- def __init__(self):
- self.names = {}
-
- def upgradeToVersion1(self):
- self.master = self.botmaster.parent
- def upgradeToVersion2(self):
- self.names = {}
-
- def register(self, name, afactory):
- self.names[name] = afactory
- def unregister(self, name):
- del self.names[name]
-
- def requestAvatar(self, avatarID, mind, interface):
- assert interface == pb.IPerspective
- afactory = self.names.get(avatarID)
- if afactory:
- p = afactory.getPerspective()
- elif avatarID == "debug":
- p = DebugPerspective()
- p.master = self.master
- p.botmaster = self.botmaster
- elif avatarID == "statusClient":
- p = self.statusClientService.getPerspective()
- else:
- # it must be one of the buildslaves: no other names will make it
- # past the checker
- p = self.botmaster.getPerspective(avatarID)
-
- if not p:
- raise ValueError("no perspective for '%s'" % avatarID)
-
- d = defer.maybeDeferred(p.attached, mind)
- d.addCallback(self._avatarAttached, mind)
- return d
-
- def _avatarAttached(self, p, mind):
- return (pb.IPerspective, p, lambda p=p,mind=mind: p.detached(mind))
-
-########################################
-
-# service hierarchy:
-# BuildMaster
-# BotMaster
-# ChangeMaster
-# all IChangeSource objects
-# StatusClientService
-# TCPClient(self.ircFactory)
-# TCPServer(self.slaveFactory) -> dispatcher.requestAvatar
-# TCPServer(self.site)
-# UNIXServer(ResourcePublisher(self.site))
-
-
-class BuildMaster(service.MultiService, styles.Versioned):
- debug = 0
- persistenceVersion = 3
- manhole = None
- debugPassword = None
- projectName = "(unspecified)"
- projectURL = None
- buildbotURL = None
- change_svc = None
-
- def __init__(self, basedir, configFileName="master.cfg"):
- service.MultiService.__init__(self)
- self.setName("buildmaster")
- self.basedir = basedir
- self.configFileName = configFileName
-
- # the dispatcher is the realm in which all inbound connections are
- # looked up: slave builders, change notifications, status clients, and
- # the debug port
- dispatcher = Dispatcher()
- dispatcher.master = self
- self.dispatcher = dispatcher
- self.checker = checkers.InMemoryUsernamePasswordDatabaseDontUse()
- # the checker starts with no user/passwd pairs: they are added later
- p = portal.Portal(dispatcher)
- p.registerChecker(self.checker)
- self.slaveFactory = pb.PBServerFactory(p)
- self.slaveFactory.unsafeTracebacks = True # let them see exceptions
-
- self.slavePortnum = None
- self.slavePort = None
-
- self.botmaster = BotMaster()
- self.botmaster.setName("botmaster")
- self.botmaster.setServiceParent(self)
- dispatcher.botmaster = self.botmaster
-
- self.status = Status(self.botmaster, self.basedir)
-
- self.statusTargets = []
-
- self.bots = []
- # this ChangeMaster is a dummy, only used by tests. In the real
- # buildmaster, where the BuildMaster instance is activated
- # (startService is called) by twistd, this attribute is overwritten.
- self.useChanges(ChangeMaster())
-
- self.readConfig = False
-
- def upgradeToVersion1(self):
- self.dispatcher = self.slaveFactory.root.portal.realm
-
- def upgradeToVersion2(self): # post-0.4.3
- self.webServer = self.webTCPPort
- del self.webTCPPort
- self.webDistribServer = self.webUNIXPort
- del self.webUNIXPort
- self.configFileName = "master.cfg"
-
- def upgradeToVersion3(self):
- # post 0.6.3, solely to deal with the 0.6.3 breakage. Starting with
- # 0.6.5 I intend to do away with .tap files altogether
- self.services = []
- self.namedServices = {}
- del self.change_svc
-
- def startService(self):
- service.MultiService.startService(self)
- self.loadChanges() # must be done before loading the config file
- if not self.readConfig:
- # TODO: consider catching exceptions during this call to
- # loadTheConfigFile and bailing (reactor.stop) if it fails,
- # since without a config file we can't do anything except reload
- # the config file, and it would be nice for the user to discover
- # this quickly.
- self.loadTheConfigFile()
- if signal and hasattr(signal, "SIGHUP"):
- signal.signal(signal.SIGHUP, self._handleSIGHUP)
- for b in self.botmaster.builders.values():
- b.builder_status.addPointEvent(["master", "started"])
- b.builder_status.saveYourself()
-
- def useChanges(self, changes):
- if self.change_svc:
- # TODO: can return a Deferred
- self.change_svc.disownServiceParent()
- self.change_svc = changes
- self.change_svc.basedir = self.basedir
- self.change_svc.setName("changemaster")
- self.dispatcher.changemaster = self.change_svc
- self.change_svc.setServiceParent(self)
-
- def loadChanges(self):
- filename = os.path.join(self.basedir, "changes.pck")
- try:
- changes = pickle.load(open(filename, "rb"))
- styles.doUpgrade()
- except IOError:
- log.msg("changes.pck missing, using new one")
- changes = ChangeMaster()
- except EOFError:
- log.msg("corrupted changes.pck, using new one")
- changes = ChangeMaster()
- self.useChanges(changes)
-
- def _handleSIGHUP(self, *args):
- reactor.callLater(0, self.loadTheConfigFile)
-
- def getStatus(self):
- """
- @rtype: L{buildbot.status.builder.Status}
- """
- return self.status
-
- def loadTheConfigFile(self, configFile=None):
- if not configFile:
- configFile = os.path.join(self.basedir, self.configFileName)
-
- log.msg("loading configuration from %s" % configFile)
- configFile = os.path.expanduser(configFile)
-
- try:
- f = open(configFile, "r")
- except IOError, e:
- log.msg("unable to open config file '%s'" % configFile)
- log.msg("leaving old configuration in place")
- log.err(e)
- return
-
- try:
- self.loadConfig(f)
- except:
- log.msg("error during loadConfig")
- log.err()
- f.close()
-
- def loadConfig(self, f):
- """Internal function to load a specific configuration file. Any
- errors in the file will be signalled by raising an exception.
-
- @return: a Deferred that will fire (with None) when the configuration
- changes have been completed. This may involve a round-trip to each
- buildslave that was involved."""
-
- localDict = {'basedir': os.path.expanduser(self.basedir)}
- try:
- exec f in localDict
- except:
- log.msg("error while parsing config file")
- raise
-
- try:
- config = localDict['BuildmasterConfig']
- except KeyError:
- log.err("missing config dictionary")
- log.err("config file must define BuildmasterConfig")
- raise
-
- known_keys = "bots sources schedulers builders slavePortnum " + \
- "debugPassword manhole " + \
- "status projectName projectURL buildbotURL"
- known_keys = known_keys.split()
- for k in config.keys():
- if k not in known_keys:
- log.msg("unknown key '%s' defined in config dictionary" % k)
-
- try:
- # required
- bots = config['bots']
- sources = config['sources']
- schedulers = config['schedulers']
- builders = config['builders']
- slavePortnum = config['slavePortnum']
-
- # optional
- debugPassword = config.get('debugPassword')
- manhole = config.get('manhole')
- status = config.get('status', [])
- projectName = config.get('projectName')
- projectURL = config.get('projectURL')
- buildbotURL = config.get('buildbotURL')
-
- except KeyError, e:
- log.msg("config dictionary is missing a required parameter")
- log.msg("leaving old configuration in place")
- raise
-
- # do some validation first
- for name, passwd in bots:
- if name in ("debug", "change", "status"):
- raise KeyError, "reserved name '%s' used for a bot" % name
- if config.has_key('interlocks'):
- raise KeyError("c['interlocks'] is no longer accepted")
-
- assert isinstance(sources, (list, tuple))
- for s in sources:
- assert interfaces.IChangeSource(s, None)
- # this assertion catches c['schedulers'] = Scheduler(), since
- # Schedulers are service.MultiServices and thus iterable.
- assert isinstance(schedulers, (list, tuple))
- for s in schedulers:
- assert interfaces.IScheduler(s, None)
- assert isinstance(status, (list, tuple))
- for s in status:
- assert interfaces.IStatusReceiver(s, None)
-
- slavenames = [name for name,pw in bots]
- buildernames = []
- dirnames = []
- for b in builders:
- if type(b) is tuple:
- raise ValueError("builder %s must be defined with a dict, "
- "not a tuple" % b[0])
- if b.has_key('slavename') and b['slavename'] not in slavenames:
- raise ValueError("builder %s uses undefined slave %s" \
- % (b['name'], b['slavename']))
- for n in b.get('slavenames', []):
- if n not in slavenames:
- raise ValueError("builder %s uses undefined slave %s" \
- % (b['name'], n))
- if b['name'] in buildernames:
- raise ValueError("duplicate builder name %s"
- % b['name'])
- buildernames.append(b['name'])
- if b['builddir'] in dirnames:
- raise ValueError("builder %s reuses builddir %s"
- % (b['name'], b['builddir']))
- dirnames.append(b['builddir'])
-
- for s in schedulers:
- for b in s.listBuilderNames():
- assert b in buildernames, \
- "%s uses unknown builder %s" % (s, b)
-
- # assert that all locks used by the Builds and their Steps are
- # uniquely named.
- locks = {}
- for b in builders:
- for l in b.get('locks', []):
- if locks.has_key(l.name):
- if locks[l.name] is not l:
- raise ValueError("Two different locks (%s and %s) "
- "share the name %s"
- % (l, locks[l.name], l.name))
- else:
- locks[l.name] = l
- # TODO: this will break with any BuildFactory that doesn't use a
- # .steps list, but I think the verification step is more
- # important.
- for s in b['factory'].steps:
- for l in s[1].get('locks', []):
- if locks.has_key(l.name):
- if locks[l.name] is not l:
- raise ValueError("Two different locks (%s and %s)"
- " share the name %s"
- % (l, locks[l.name], l.name))
- else:
- locks[l.name] = l
-
- # slavePortnum supposed to be a strports specification
- if type(slavePortnum) is int:
- slavePortnum = "tcp:%d" % slavePortnum
-
- # now we're committed to implementing the new configuration, so do
- # it atomically
- # TODO: actually, this is spread across a couple of Deferreds, so it
- # really isn't atomic.
-
- d = defer.succeed(None)
-
- self.projectName = projectName
- self.projectURL = projectURL
- self.buildbotURL = buildbotURL
-
- # self.bots: Disconnect any that were attached and removed from the
- # list. Update self.checker with the new list of passwords,
- # including debug/change/status.
- d.addCallback(lambda res: self.loadConfig_Slaves(bots))
-
- # self.debugPassword
- if debugPassword:
- self.checker.addUser("debug", debugPassword)
- self.debugPassword = debugPassword
-
- # self.manhole
- if manhole != self.manhole:
- # changing
- if self.manhole:
- # disownServiceParent may return a Deferred
- d.addCallback(lambda res: self.manhole.disownServiceParent())
- self.manhole = None
- if manhole:
- self.manhole = manhole
- manhole.setServiceParent(self)
-
- # add/remove self.botmaster.builders to match builders. The
- # botmaster will handle startup/shutdown issues.
- d.addCallback(lambda res: self.loadConfig_Builders(builders))
-
- d.addCallback(lambda res: self.loadConfig_status(status))
-
- # Schedulers are added after Builders in case they start right away
- d.addCallback(lambda res: self.loadConfig_Schedulers(schedulers))
- # and Sources go after Schedulers for the same reason
- d.addCallback(lambda res: self.loadConfig_Sources(sources))
-
- # self.slavePort
- if self.slavePortnum != slavePortnum:
- if self.slavePort:
- def closeSlavePort(res):
- d1 = self.slavePort.disownServiceParent()
- self.slavePort = None
- return d1
- d.addCallback(closeSlavePort)
- if slavePortnum is not None:
- def openSlavePort(res):
- self.slavePort = strports.service(slavePortnum,
- self.slaveFactory)
- self.slavePort.setServiceParent(self)
- d.addCallback(openSlavePort)
- log.msg("BuildMaster listening on port %s" % slavePortnum)
- self.slavePortnum = slavePortnum
-
- log.msg("configuration update started")
- d.addCallback(lambda res: log.msg("configuration update complete"))
- self.readConfig = True # TODO: consider not setting this until the
- # Deferred fires.
- return d
-
- def loadConfig_Slaves(self, bots):
- # set up the Checker with the names and passwords of all valid bots
- self.checker.users = {} # violates abstraction, oh well
- for user, passwd in bots:
- self.checker.addUser(user, passwd)
- self.checker.addUser("change", "changepw")
-
- # identify new/old bots
- old = self.bots; oldnames = [name for name,pw in old]
- new = bots; newnames = [name for name,pw in new]
- # removeSlave will hang up on the old bot
- dl = [self.botmaster.removeSlave(name)
- for name in oldnames if name not in newnames]
- [self.botmaster.addSlave(name)
- for name in newnames if name not in oldnames]
-
- # all done
- self.bots = bots
- return defer.DeferredList(dl, fireOnOneErrback=1, consumeErrors=0)
-
- def loadConfig_Sources(self, sources):
- log.msg("loadConfig_Sources, change_svc is", self.change_svc,
- self.change_svc.parent)
- # shut down any that were removed, start any that were added
- deleted_sources = [s for s in self.change_svc if s not in sources]
- added_sources = [s for s in sources if s not in self.change_svc]
- dl = [self.change_svc.removeSource(s) for s in deleted_sources]
- def addNewOnes(res):
- [self.change_svc.addSource(s) for s in added_sources]
- d = defer.DeferredList(dl, fireOnOneErrback=1, consumeErrors=0)
- d.addCallback(addNewOnes)
- return d
-
- def allSchedulers(self):
- # TODO: when twisted-1.3 compatibility is dropped, switch to the
- # providedBy form, because it's faster (no actual adapter lookup)
- return [child for child in self
- #if interfaces.IScheduler.providedBy(child)]
- if interfaces.IScheduler(child, None)]
-
-
- def loadConfig_Schedulers(self, newschedulers):
- oldschedulers = self.allSchedulers()
- removed = [s for s in oldschedulers if s not in newschedulers]
- added = [s for s in newschedulers if s not in oldschedulers]
- dl = [defer.maybeDeferred(s.disownServiceParent) for s in removed]
- def addNewOnes(res):
- for s in added:
- s.setServiceParent(self)
- d = defer.DeferredList(dl, fireOnOneErrback=1)
- d.addCallback(addNewOnes)
- return d
-
- def loadConfig_Builders(self, newBuilders):
- dl = []
- old = self.botmaster.getBuildernames()
- newNames = []
- newList = {}
- for data in newBuilders:
- name = data['name']
- newList[name] = data
- newNames.append(name)
-
- # identify all that were removed
- for old in self.botmaster.builders.values()[:]:
- if old.name not in newList.keys():
- log.msg("removing old builder %s" % old.name)
- d = self.botmaster.removeBuilder(old)
- dl.append(d)
- # announce the change
- self.status.builderRemoved(old.name)
-
- # everything in newList is either unchanged, changed, or new
- for newName, data in newList.items():
- old = self.botmaster.builders.get(newName)
- name = data['name']
- basedir = data['builddir'] # used on both master and slave
- #name, slave, builddir, factory = data
- if not old: # new
- # category added after 0.6.2
- category = data.get('category', None)
- log.msg("adding new builder %s for category %s" %
- (name, category))
- statusbag = self.status.builderAdded(name, basedir, category)
- builder = Builder(data, statusbag)
- d = self.botmaster.addBuilder(builder)
- dl.append(d)
- else:
- diffs = old.compareToSetup(data)
- if not diffs: # unchanged: leave it alone
- log.msg("builder %s is unchanged" % name)
- pass
- else:
- # changed: remove and re-add. Don't touch the statusbag
- # object: the clients won't see a remove/add cycle
- log.msg("updating builder %s: %s" % (name,
- "\n".join(diffs)))
- # TODO: if the basedir was changed, we probably need to
- # make a new statusbag
- # TODO: if a slave is connected and we're re-using the
- # same slave, try to avoid a disconnect/reconnect cycle.
- statusbag = old.builder_status
- statusbag.saveYourself() # seems like a good idea
- d = self.botmaster.removeBuilder(old)
- dl.append(d)
- builder = Builder(data, statusbag)
- # point out that the builder was updated
- statusbag.addPointEvent(["config", "updated"])
- d = self.botmaster.addBuilder(builder)
- dl.append(d)
- # now that everything is up-to-date, make sure the names are in the
- # desired order
- self.botmaster.builderNames = newNames
- return defer.DeferredList(dl, fireOnOneErrback=1, consumeErrors=0)
-
- def loadConfig_status(self, status):
- dl = []
-
- # remove old ones
- for s in self.statusTargets[:]:
- if not s in status:
- log.msg("removing IStatusReceiver", s)
- d = defer.maybeDeferred(s.disownServiceParent)
- dl.append(d)
- self.statusTargets.remove(s)
- # after those are finished going away, add new ones
- def addNewOnes(res):
- for s in status:
- if not s in self.statusTargets:
- log.msg("adding IStatusReceiver", s)
- s.setServiceParent(self)
- self.statusTargets.append(s)
- d = defer.DeferredList(dl, fireOnOneErrback=1)
- d.addCallback(addNewOnes)
- return d
-
-
- def addChange(self, change):
- for s in self.allSchedulers():
- s.addChange(change)
-
- def submitBuildSet(self, bs):
- # determine the set of Builders to use
- builders = []
- for name in bs.builderNames:
- b = self.botmaster.builders.get(name)
- if b:
- if b not in builders:
- builders.append(b)
- continue
- # TODO: add aliases like 'all'
- raise KeyError("no such builder named '%s'" % name)
-
- # now tell the BuildSet to create BuildRequests for all those
- # Builders and submit them
- bs.start(builders)
- self.status.buildsetSubmitted(bs.status)
-
-
-class Control:
- if implements:
- implements(interfaces.IControl)
- else:
- __implements__ = interfaces.IControl,
-
- def __init__(self, master):
- self.master = master
-
- def addChange(self, change):
- self.master.change_svc.addChange(change)
-
- def submitBuildSet(self, bs):
- self.master.submitBuildSet(bs)
-
- def getBuilder(self, name):
- b = self.master.botmaster.builders[name]
- return interfaces.IBuilderControl(b)
-
-components.registerAdapter(Control, BuildMaster, interfaces.IControl)
-
-# so anybody who can get a handle on the BuildMaster can force a build with:
-# IControl(master).getBuilder("full-2.3").forceBuild("me", "boredom")
-
diff --git a/buildbot/buildbot-source/buildbot/pbutil.py b/buildbot/buildbot-source/buildbot/pbutil.py
deleted file mode 100644
index bc85a016d..000000000
--- a/buildbot/buildbot-source/buildbot/pbutil.py
+++ /dev/null
@@ -1,147 +0,0 @@
-
-"""Base classes handy for use with PB clients.
-"""
-
-from twisted.spread import pb
-
-from twisted.spread.pb import PBClientFactory
-from twisted.internet import protocol
-from twisted.python import log
-
-class NewCredPerspective(pb.Avatar):
- def attached(self, mind):
- return self
- def detached(self, mind):
- pass
-
-class ReconnectingPBClientFactory(PBClientFactory,
- protocol.ReconnectingClientFactory):
- """Reconnecting client factory for PB brokers.
-
- Like PBClientFactory, but if the connection fails or is lost, the factory
- will attempt to reconnect.
-
- Instead of using f.getRootObject (which gives a Deferred that can only
- be fired once), override the gotRootObject method.
-
- Instead of using the newcred f.login (which is also one-shot), call
- f.startLogin() with the credentials and client, and override the
- gotPerspective method.
-
- Instead of using the oldcred f.getPerspective (also one-shot), call
- f.startGettingPerspective() with the same arguments, and override
- gotPerspective.
-
- gotRootObject and gotPerspective will be called each time the object is
- received (once per successful connection attempt). You will probably want
- to use obj.notifyOnDisconnect to find out when the connection is lost.
-
- If an authorization error occurs, failedToGetPerspective() will be
- invoked.
-
- To use me, subclass, then hand an instance to a connector (like
- TCPClient).
- """
-
- def __init__(self):
- PBClientFactory.__init__(self)
- self._doingLogin = False
- self._doingGetPerspective = False
-
- def clientConnectionFailed(self, connector, reason):
- PBClientFactory.clientConnectionFailed(self, connector, reason)
- # Twisted-1.3 erroneously abandons the connection on non-UserErrors.
- # To avoid this bug, don't upcall, and implement the correct version
- # of the method here.
- if self.continueTrying:
- self.connector = connector
- self.retry()
-
- def clientConnectionLost(self, connector, reason):
- PBClientFactory.clientConnectionLost(self, connector, reason,
- reconnecting=True)
- RCF = protocol.ReconnectingClientFactory
- RCF.clientConnectionLost(self, connector, reason)
-
- def clientConnectionMade(self, broker):
- self.resetDelay()
- PBClientFactory.clientConnectionMade(self, broker)
- if self._doingLogin:
- self.doLogin(self._root)
- if self._doingGetPerspective:
- self.doGetPerspective(self._root)
- self.gotRootObject(self._root)
-
- def __getstate__(self):
- # this should get folded into ReconnectingClientFactory
- d = self.__dict__.copy()
- d['connector'] = None
- d['_callID'] = None
- return d
-
- # oldcred methods
-
- def getPerspective(self, *args):
- raise RuntimeError, "getPerspective is one-shot: use startGettingPerspective instead"
-
- def startGettingPerspective(self, username, password, serviceName,
- perspectiveName=None, client=None):
- self._doingGetPerspective = True
- if perspectiveName == None:
- perspectiveName = username
- self._oldcredArgs = (username, password, serviceName,
- perspectiveName, client)
-
- def doGetPerspective(self, root):
- # oldcred getPerspective()
- (username, password,
- serviceName, perspectiveName, client) = self._oldcredArgs
- d = self._cbAuthIdentity(root, username, password)
- d.addCallback(self._cbGetPerspective,
- serviceName, perspectiveName, client)
- d.addCallbacks(self.gotPerspective, self.failedToGetPerspective)
-
-
- # newcred methods
-
- def login(self, *args):
- raise RuntimeError, "login is one-shot: use startLogin instead"
-
- def startLogin(self, credentials, client=None):
- self._credentials = credentials
- self._client = client
- self._doingLogin = True
-
- def doLogin(self, root):
- # newcred login()
- d = self._cbSendUsername(root, self._credentials.username,
- self._credentials.password, self._client)
- d.addCallbacks(self.gotPerspective, self.failedToGetPerspective)
-
-
- # methods to override
-
- def gotPerspective(self, perspective):
- """The remote avatar or perspective (obtained each time this factory
- connects) is now available."""
- pass
-
- def gotRootObject(self, root):
- """The remote root object (obtained each time this factory connects)
- is now available. This method will be called each time the connection
- is established and the object reference is retrieved."""
- pass
-
- def failedToGetPerspective(self, why):
- """The login process failed, most likely because of an authorization
- failure (bad password), but it is also possible that we lost the new
- connection before we managed to send our credentials.
- """
- log.msg("ReconnectingPBClientFactory.failedToGetPerspective")
- if why.check(pb.PBConnectionLost):
- log.msg("we lost the brand-new connection")
- # retrying might help here, let clientConnectionLost decide
- return
- # probably authorization
- self.stopTrying() # logging in harder won't help
- log.err(why)
diff --git a/buildbot/buildbot-source/buildbot/process/__init__.py b/buildbot/buildbot-source/buildbot/process/__init__.py
deleted file mode 100644
index e69de29bb..000000000
--- a/buildbot/buildbot-source/buildbot/process/__init__.py
+++ /dev/null
diff --git a/buildbot/buildbot-source/buildbot/process/base.py b/buildbot/buildbot-source/buildbot/process/base.py
deleted file mode 100644
index 82412564d..000000000
--- a/buildbot/buildbot-source/buildbot/process/base.py
+++ /dev/null
@@ -1,608 +0,0 @@
-# -*- test-case-name: buildbot.test.test_step -*-
-
-import types, time
-from StringIO import StringIO
-
-from twisted.python import log, components
-from twisted.python.failure import Failure
-from twisted.internet import reactor, defer, error
-from twisted.spread import pb
-
-from buildbot import interfaces
-from buildbot.twcompat import implements
-from buildbot.util import now
-from buildbot.status.builder import SUCCESS, WARNINGS, FAILURE, EXCEPTION
-from buildbot.status.builder import Results, BuildRequestStatus
-from buildbot.status.progress import BuildProgress
-
-class BuildRequest:
- """I represent a request to a specific Builder to run a single build.
-
- I have a SourceStamp which specifies what sources I will build. This may
- specify a specific revision of the source tree (so source.branch,
- source.revision, and source.patch are used). The .patch attribute is
- either None or a tuple of (patchlevel, diff), consisting of a number to
- use in 'patch -pN', and a unified-format context diff.
-
- Alternatively, the SourceStamp may specify a set of Changes to be built,
- contained in source.changes. In this case, I may be mergeable with other
- BuildRequests on the same branch.
-
- I may be part of a BuildSet, in which case I will report status results
- to it.
-
- I am paired with a BuildRequestStatus object, to which I feed status
- information.
-
- @type source: a L{buildbot.sourcestamp.SourceStamp} instance.
- @ivar source: the source code that this BuildRequest use
-
- @type reason: string
- @ivar reason: the reason this Build is being requested. Schedulers
- provide this, but for forced builds the user requesting the
- build will provide a string.
-
- @ivar status: the IBuildStatus object which tracks our status
-
- @ivar submittedAt: a timestamp (seconds since epoch) when this request
- was submitted to the Builder. This is used by the CVS
- step to compute a checkout timestamp.
- """
-
- source = None
- builder = None
- startCount = 0 # how many times we have tried to start this build
-
- if implements:
- implements(interfaces.IBuildRequestControl)
- else:
- __implements__ = interfaces.IBuildRequestControl,
-
- def __init__(self, reason, source, builderName=None, username=None, config=None, installsetcheck=None):
- # TODO: remove the =None on builderName, it is there so I don't have
- # to change a lot of tests that create BuildRequest objects
- assert interfaces.ISourceStamp(source, None)
- self.username = username
- self.config = config
- self.installsetcheck = installsetcheck
- self.reason = reason
- self.source = source
- self.start_watchers = []
- self.finish_watchers = []
- self.status = BuildRequestStatus(source, builderName)
-
- def canBeMergedWith(self, other):
- return self.source.canBeMergedWith(other.source)
-
- def mergeWith(self, others):
- return self.source.mergeWith([o.source for o in others])
-
- def mergeReasons(self, others):
- """Return a reason for the merged build request."""
- reasons = []
- for req in [self] + others:
- if req.reason and req.reason not in reasons:
- reasons.append(req.reason)
- return ", ".join(reasons)
-
- def mergeConfig(self, others):
- """Return a config for the merged build request."""
- configs = []
- for con in [self] + others:
- if con.config and con.config not in configs:
- configs.append(con.config)
- return ", ".join(configs)
-
- def mergeInstallSet(self, others):
- """Return a installsetcheck for the merged build request."""
- installsetchecks = []
- for isc in [self] + others:
- if isc.installsetcheck and isc.installsetcheck not in installsetchecks:
- installsetchecks.append(isc.installsetcheck)
- return ", ".join(installsetchecks)
-
- def mergeUsername(self, others):
- """Return a username for the merged build request."""
- usernames = []
- for isc in [self] + others:
- if isc.username and isc.username not in usernames:
- usernames.append(isc.username)
- return ", ".join(usernames)
-
- def waitUntilFinished(self):
- """Get a Deferred that will fire (with a
- L{buildbot.interfaces.IBuildStatus} instance when the build
- finishes."""
- d = defer.Deferred()
- self.finish_watchers.append(d)
- return d
-
- # these are called by the Builder
-
- def requestSubmitted(self, builder):
- # the request has been placed on the queue
- self.builder = builder
-
- def buildStarted(self, build, buildstatus):
- """This is called by the Builder when a Build has been started in the
- hopes of satifying this BuildRequest. It may be called multiple
- times, since interrupted builds and lost buildslaves may force
- multiple Builds to be run until the fate of the BuildRequest is known
- for certain."""
- for o in self.start_watchers[:]:
- # these observers get the IBuildControl
- o(build)
- # while these get the IBuildStatus
- self.status.buildStarted(buildstatus)
-
- def finished(self, buildstatus):
- """This is called by the Builder when the BuildRequest has been
- retired. This happens when its Build has either succeeded (yay!) or
- failed (boo!). TODO: If it is halted due to an exception (oops!), or
- some other retryable error, C{finished} will not be called yet."""
-
- for w in self.finish_watchers:
- w.callback(buildstatus)
- self.finish_watchers = []
-
- # IBuildRequestControl
-
- def subscribe(self, observer):
- self.start_watchers.append(observer)
- def unsubscribe(self, observer):
- self.start_watchers.remove(observer)
-
- def cancel(self):
- """Cancel this request. This can only be successful if the Build has
- not yet been started.
-
- @return: a boolean indicating if the cancel was successful."""
- if self.builder:
- return self.builder.cancelBuildRequest(self)
- return False
-
-
-class Build:
- """I represent a single build by a single bot. Specialized Builders can
- use subclasses of Build to hold status information unique to those build
- processes.
-
- I control B{how} the build proceeds. The actual build is broken up into a
- series of steps, saved in the .buildSteps[] array as a list of
- L{buildbot.process.step.BuildStep} objects. Each step is a single remote
- command, possibly a shell command.
-
- During the build, I put status information into my C{BuildStatus}
- gatherer.
-
- After the build, I go away.
-
- I can be used by a factory by setting buildClass on
- L{buildbot.process.factory.BuildFactory}
-
- @ivar request: the L{BuildRequest} that triggered me
- @ivar build_status: the L{buildbot.status.builder.BuildStatus} that
- collects our status
- """
-
- if implements:
- implements(interfaces.IBuildControl)
- else:
- __implements__ = interfaces.IBuildControl,
-
- workdir = "build"
- build_status = None
- reason = "changes"
- finished = False
- results = None
- config = None
- installsetcheck = None
- username = None
-
- def __init__(self, requests):
- self.requests = requests
- for req in self.requests:
- req.startCount += 1
- self.locks = []
- # build a source stamp
- self.source = requests[0].mergeWith(requests[1:])
- self.reason = requests[0].mergeReasons(requests[1:])
- self.config = requests[0].mergeConfig(requests[1:])
- self.installsetcheck = requests[0].mergeInstallSet(requests[1:])
- self.username = requests[0].mergeUsername(requests[1:])
- #self.abandoned = False
-
- self.progress = None
- self.currentStep = None
- self.slaveEnvironment = {}
-
- def setBuilder(self, builder):
- """
- Set the given builder as our builder.
-
- @type builder: L{buildbot.process.builder.Builder}
- """
- self.builder = builder
-
- def setLocks(self, locks):
- self.locks = locks
-
- def getSourceStamp(self):
- return self.source
-
- def setProperty(self, propname, value):
- """Set a property on this build. This may only be called after the
- build has started, so that it has a BuildStatus object where the
- properties can live."""
- self.build_status.setProperty(propname, value)
-
- def getProperty(self, propname):
- return self.build_status.properties[propname]
-
-
- def allChanges(self):
- return self.source.changes
-
- def allFiles(self):
- # return a list of all source files that were changed
- files = []
- havedirs = 0
- for c in self.allChanges():
- for f in c.files:
- files.append(f)
- if c.isdir:
- havedirs = 1
- return files
-
- def __repr__(self):
- return "<Build %s>" % (self.builder.name,)
-
- def __getstate__(self):
- d = self.__dict__.copy()
- if d.has_key('remote'):
- del d['remote']
- return d
-
- def blamelist(self):
- blamelist = []
- for c in self.allChanges():
- if c.who not in blamelist:
- blamelist.append(c.who)
- blamelist.sort()
- return blamelist
-
- def changesText(self):
- changetext = ""
- for c in self.allChanges():
- changetext += "-" * 60 + "\n\n" + c.asText() + "\n"
- # consider sorting these by number
- return changetext
-
- def setSteps(self, steps):
- """Set a list of StepFactories, which are generally just class
- objects which derive from step.BuildStep . These are used to create
- the Steps themselves when the Build starts (as opposed to when it is
- first created). By creating the steps later, their __init__ method
- will have access to things like build.allFiles() ."""
- self.stepFactories = steps # tuples of (factory, kwargs)
- for s in steps:
- pass
-
-
-
-
- useProgress = True
-
- def getSlaveCommandVersion(self, command, oldversion=None):
- return self.slavebuilder.getSlaveCommandVersion(command, oldversion)
-
- def setupStatus(self, build_status):
- self.build_status = build_status
- self.setProperty("buildername", self.builder.name)
- self.setProperty("buildnumber", self.build_status.number)
- self.setProperty("branch", self.source.branch)
- self.setProperty("revision", self.source.revision)
- self.setProperty("config", self.config)
- self.setProperty("installsetcheck", self.installsetcheck)
- self.setProperty("username", self.username)
-
- def setupSlaveBuilder(self, slavebuilder):
- self.slavebuilder = slavebuilder
- self.slavename = slavebuilder.slave.slavename
- self.setProperty("slavename", self.slavename)
-
- def startBuild(self, build_status, expectations, slavebuilder):
- """This method sets up the build, then starts it by invoking the
- first Step. It returns a Deferred which will fire when the build
- finishes. This Deferred is guaranteed to never errback."""
-
- # we are taking responsibility for watching the connection to the
- # remote. This responsibility was held by the Builder until our
- # startBuild was called, and will not return to them until we fire
- # the Deferred returned by this method.
-
- log.msg("%s.startBuild" % self)
- self.setupStatus(build_status)
- # now that we have a build_status, we can set properties
- self.setupSlaveBuilder(slavebuilder)
-
- # convert all locks into their real forms
- self.locks = [self.builder.botmaster.getLockByID(l)
- for l in self.locks]
- # then narrow SlaveLocks down to the right slave
- self.locks = [l.getLock(self.slavebuilder) for l in self.locks]
- self.remote = slavebuilder.remote
- self.remote.notifyOnDisconnect(self.lostRemote)
- d = self.deferred = defer.Deferred()
-
- try:
- self.setupBuild(expectations) # create .steps
- except:
- # the build hasn't started yet, so log the exception as a point
- # event instead of flunking the build. TODO: associate this
- # failure with the build instead. this involves doing
- # self.build_status.buildStarted() from within the exception
- # handler
- log.msg("Build.setupBuild failed")
- log.err(Failure())
- self.builder.builder_status.addPointEvent(["setupBuild",
- "exception"],
- color="purple")
- self.finished = True
- self.results = FAILURE
- self.deferred = None
- d.callback(self)
- return d
-
- self.build_status.buildStarted(self)
- self.acquireLocks().addCallback(self._startBuild_2)
- return d
-
- def acquireLocks(self, res=None):
- log.msg("acquireLocks(step %s, locks %s)" % (self, self.locks))
- if not self.locks:
- return defer.succeed(None)
- for lock in self.locks:
- if not lock.isAvailable():
- log.msg("Build %s waiting for lock %s" % (self, lock))
- d = lock.waitUntilAvailable(self)
- d.addCallback(self.acquireLocks)
- return d
- # all locks are available, claim them all
- for lock in self.locks:
- lock.claim(self)
- return defer.succeed(None)
-
- def _startBuild_2(self, res):
- self.startNextStep()
-
- def setupBuild(self, expectations):
- # create the actual BuildSteps. If there are any name collisions, we
- # add a count to the loser until it is unique.
- self.steps = []
- self.stepStatuses = {}
- stepnames = []
- sps = []
-
- for factory, args in self.stepFactories:
- args = args.copy()
- if not args.has_key("workdir"):
- args['workdir'] = self.workdir
- try:
- step = factory(build=self, **args)
- except:
- log.msg("error while creating step, factory=%s, args=%s"
- % (factory, args))
- raise
- name = step.name
- count = 1
- while name in stepnames and count < 100:
- count += 1
- name = step.name + "_%d" % count
- if name in stepnames:
- raise RuntimeError("duplicate step '%s'" % step.name)
- if name != "Install_Set" or (self.installsetcheck and name == "Install_Set") :
- #continue
- step.name = name
- stepnames.append(name)
- self.steps.append(step)
-
- # tell the BuildStatus about the step. This will create a
- # BuildStepStatus and bind it to the Step.
- self.build_status.addStep(step)
-
- sp = None
- if self.useProgress:
- # XXX: maybe bail if step.progressMetrics is empty? or skip
- # progress for that one step (i.e. "it is fast"), or have a
- # separate "variable" flag that makes us bail on progress
- # tracking
- sp = step.setupProgress()
- if sp:
- sps.append(sp)
-
- # Create a buildbot.status.progress.BuildProgress object. This is
- # called once at startup to figure out how to build the long-term
- # Expectations object, and again at the start of each build to get a
- # fresh BuildProgress object to track progress for that individual
- # build. TODO: revisit at-startup call
-
- if self.useProgress:
- self.progress = BuildProgress(sps)
- if self.progress and expectations:
- self.progress.setExpectationsFrom(expectations)
-
- # we are now ready to set up our BuildStatus.
- self.build_status.setSourceStamp(self.source)
- self.build_status.setUsername(self.username)
- self.build_status.setReason(self.reason)
- self.build_status.setBlamelist(self.blamelist())
- self.build_status.setProgress(self.progress)
-
- self.results = [] # list of FAILURE, SUCCESS, WARNINGS, SKIPPED
- self.result = SUCCESS # overall result, may downgrade after each step
- self.text = [] # list of text string lists (text2)
-
- def getNextStep(self):
- """This method is called to obtain the next BuildStep for this build.
- When it returns None (or raises a StopIteration exception), the build
- is complete."""
- if not self.steps:
- return None
- return self.steps.pop(0)
-
- def startNextStep(self):
- try:
- s = self.getNextStep()
- except StopIteration:
- s = None
- if not s:
- return self.allStepsDone()
- self.currentStep = s
- d = defer.maybeDeferred(s.startStep, self.remote)
- d.addCallback(self._stepDone, s)
- d.addErrback(self.buildException)
-
- def _stepDone(self, results, step):
- self.currentStep = None
- if self.finished:
- return # build was interrupted, don't keep building
- terminate = self.stepDone(results, step) # interpret/merge results
- if terminate:
- return self.allStepsDone()
- self.startNextStep()
-
- def stepDone(self, result, step):
- """This method is called when the BuildStep completes. It is passed a
- status object from the BuildStep and is responsible for merging the
- Step's results into those of the overall Build."""
-
- terminate = False
- text = None
- if type(result) == types.TupleType:
- result, text = result
- assert type(result) == type(SUCCESS)
- log.msg(" step '%s' complete: %s" % (step.name, Results[result]))
- self.results.append(result)
- if text:
- self.text.extend(text)
- if not self.remote:
- terminate = True
- if result == FAILURE:
- if step.warnOnFailure:
- if self.result != FAILURE:
- self.result = WARNINGS
- if step.flunkOnFailure:
- self.result = FAILURE
- if step.haltOnFailure:
- self.result = FAILURE
- terminate = True
- elif result == WARNINGS:
- if step.warnOnWarnings:
- if self.result != FAILURE:
- self.result = WARNINGS
- if step.flunkOnWarnings:
- self.result = FAILURE
- elif result == EXCEPTION:
- self.result = EXCEPTION
- terminate = True
- return terminate
-
- def lostRemote(self, remote=None):
- # the slave went away. There are several possible reasons for this,
- # and they aren't necessarily fatal. For now, kill the build, but
- # TODO: see if we can resume the build when it reconnects.
- log.msg("%s.lostRemote" % self)
- self.remote = None
- if self.currentStep:
- # this should cause the step to finish.
- log.msg(" stopping currentStep", self.currentStep)
- self.currentStep.interrupt(Failure(error.ConnectionLost()))
-
- def stopBuild(self, reason="<no reason given>"):
- # the idea here is to let the user cancel a build because, e.g.,
- # they realized they committed a bug and they don't want to waste
- # the time building something that they know will fail. Another
- # reason might be to abandon a stuck build. We want to mark the
- # build as failed quickly rather than waiting for the slave's
- # timeout to kill it on its own.
-
- log.msg(" %s: stopping build: %s" % (self, reason))
- if self.finished:
- return
- # TODO: include 'reason' in this point event
- self.builder.builder_status.addPointEvent(['interrupt'])
- self.currentStep.interrupt(reason)
- if 0:
- # TODO: maybe let its deferred do buildFinished
- if self.currentStep and self.currentStep.progress:
- # XXX: really .fail or something
- self.currentStep.progress.finish()
- text = ["stopped", reason]
- self.buildFinished(text, "red", FAILURE)
-
- def allStepsDone(self):
- if self.result == FAILURE:
- color = "red"
- text = ["failed"]
- elif self.result == WARNINGS:
- color = "orange"
- text = ["warnings"]
- elif self.result == EXCEPTION:
- color = "purple"
- text = ["exception"]
- else:
- color = "green"
- text = ["build", "successful"]
- text.extend(self.text)
- return self.buildFinished(text, color, self.result)
-
- def buildException(self, why):
- log.msg("%s.buildException" % self)
- log.err(why)
- self.buildFinished(["build", "exception"], "purple", FAILURE)
-
- def buildFinished(self, text, color, results):
- """This method must be called when the last Step has completed. It
- marks the Build as complete and returns the Builder to the 'idle'
- state.
-
- It takes three arguments which describe the overall build status:
- text, color, results. 'results' is one of SUCCESS, WARNINGS, or
- FAILURE.
-
- If 'results' is SUCCESS or WARNINGS, we will permit any dependant
- builds to start. If it is 'FAILURE', those builds will be
- abandoned."""
-
- self.finished = True
- if self.remote:
- self.remote.dontNotifyOnDisconnect(self.lostRemote)
- self.results = results
-
- log.msg(" %s: build finished" % self)
- self.build_status.setSlavename(self.slavename)
- self.build_status.setText(text)
- self.build_status.setColor(color)
- self.build_status.setResults(results)
- self.build_status.buildFinished()
- if self.progress:
- # XXX: also test a 'timing consistent' flag?
- log.msg(" setting expectations for next time")
- self.builder.setExpectations(self.progress)
- reactor.callLater(0, self.releaseLocks)
- self.deferred.callback(self)
- self.deferred = None
-
- def releaseLocks(self):
- log.msg("releaseLocks(%s): %s" % (self, self.locks))
- for lock in self.locks:
- lock.release(self)
-
- # IBuildControl
-
- def getStatus(self):
- return self.build_status
-
- # stopBuild is defined earlier
-
diff --git a/buildbot/buildbot-source/buildbot/process/base.py.newbak3aug b/buildbot/buildbot-source/buildbot/process/base.py.newbak3aug
deleted file mode 100644
index 86c7b4b9a..000000000
--- a/buildbot/buildbot-source/buildbot/process/base.py.newbak3aug
+++ /dev/null
@@ -1,596 +0,0 @@
-# -*- test-case-name: buildbot.test.test_step -*-
-
-import types, time
-from StringIO import StringIO
-
-from twisted.python import log, components
-from twisted.python.failure import Failure
-from twisted.internet import reactor, defer, error
-from twisted.spread import pb
-
-from buildbot import interfaces
-from buildbot.twcompat import implements
-from buildbot.util import now
-from buildbot.status.builder import SUCCESS, WARNINGS, FAILURE, EXCEPTION
-from buildbot.status.builder import Results, BuildRequestStatus
-from buildbot.status.progress import BuildProgress
-
-class BuildRequest:
- """I represent a request to a specific Builder to run a single build.
-
- I have a SourceStamp which specifies what sources I will build. This may
- specify a specific revision of the source tree (so source.branch,
- source.revision, and source.patch are used). The .patch attribute is
- either None or a tuple of (patchlevel, diff), consisting of a number to
- use in 'patch -pN', and a unified-format context diff.
-
- Alternatively, the SourceStamp may specify a set of Changes to be built,
- contained in source.changes. In this case, I may be mergeable with other
- BuildRequests on the same branch.
-
- I may be part of a BuildSet, in which case I will report status results
- to it.
-
- I am paired with a BuildRequestStatus object, to which I feed status
- information.
-
- @type source: a L{buildbot.sourcestamp.SourceStamp} instance.
- @ivar source: the source code that this BuildRequest use
-
- @type reason: string
- @ivar reason: the reason this Build is being requested. Schedulers
- provide this, but for forced builds the user requesting the
- build will provide a string.
-
- @ivar status: the IBuildStatus object which tracks our status
-
- @ivar submittedAt: a timestamp (seconds since epoch) when this request
- was submitted to the Builder. This is used by the CVS
- step to compute a checkout timestamp.
- """
-
- source = None
- builder = None
- startCount = 0 # how many times we have tried to start this build
-
- if implements:
- implements(interfaces.IBuildRequestControl)
- else:
- __implements__ = interfaces.IBuildRequestControl,
-
- def __init__(self, config, installsetcheck, reason, source, builderName=None):
- # TODO: remove the =None on builderName, it is there so I don't have
- # to change a lot of tests that create BuildRequest objects
- assert interfaces.ISourceStamp(source, None)
- self.config = config
- self.installsetcheck = installsetcheck
- self.reason = reason
- self.source = source
- self.start_watchers = []
- self.finish_watchers = []
- self.status = BuildRequestStatus(source, builderName)
-
- def canBeMergedWith(self, other):
- return self.source.canBeMergedWith(other.source)
-
- def mergeWith(self, others):
- return self.source.mergeWith([o.source for o in others])
-
- def mergeReasons(self, others):
- """Return a reason for the merged build request."""
- reasons = []
- for req in [self] + others:
- if req.reason and req.reason not in reasons:
- reasons.append(req.reason)
- return ", ".join(reasons)
-
- def mergeConfig(self, others):
- """Return a config for the merged build request."""
- configs = []
- for con in [self] + others:
- if con.config and con.config not in configs:
- configs.append(con.config)
- return ", ".join(configs)
-
- def mergeInstallSet(self, others):
- """Return a installsetcheck for the merged build request."""
- installsetchecks = []
- for isc in [self] + others:
- if isc.installsetcheck and isc.installsetcheck not in installsetchecks:
- installsetchecks.append(isc.installsetcheck)
- return ", ".join(installsetchecks)
-
-
- def waitUntilFinished(self):
- """Get a Deferred that will fire (with a
- L{buildbot.interfaces.IBuildStatus} instance when the build
- finishes."""
- d = defer.Deferred()
- self.finish_watchers.append(d)
- return d
-
- # these are called by the Builder
-
- def requestSubmitted(self, builder):
- # the request has been placed on the queue
- self.builder = builder
-
- def buildStarted(self, build, buildstatus):
- """This is called by the Builder when a Build has been started in the
- hopes of satifying this BuildRequest. It may be called multiple
- times, since interrupted builds and lost buildslaves may force
- multiple Builds to be run until the fate of the BuildRequest is known
- for certain."""
- for o in self.start_watchers[:]:
- # these observers get the IBuildControl
- o(build)
- # while these get the IBuildStatus
- self.status.buildStarted(buildstatus)
-
- def finished(self, buildstatus):
- """This is called by the Builder when the BuildRequest has been
- retired. This happens when its Build has either succeeded (yay!) or
- failed (boo!). TODO: If it is halted due to an exception (oops!), or
- some other retryable error, C{finished} will not be called yet."""
-
- for w in self.finish_watchers:
- w.callback(buildstatus)
- self.finish_watchers = []
-
- # IBuildRequestControl
-
- def subscribe(self, observer):
- self.start_watchers.append(observer)
- def unsubscribe(self, observer):
- self.start_watchers.remove(observer)
-
- def cancel(self):
- """Cancel this request. This can only be successful if the Build has
- not yet been started.
-
- @return: a boolean indicating if the cancel was successful."""
- if self.builder:
- return self.builder.cancelBuildRequest(self)
- return False
-
-
-class Build:
- """I represent a single build by a single bot. Specialized Builders can
- use subclasses of Build to hold status information unique to those build
- processes.
-
- I control B{how} the build proceeds. The actual build is broken up into a
- series of steps, saved in the .buildSteps[] array as a list of
- L{buildbot.process.step.BuildStep} objects. Each step is a single remote
- command, possibly a shell command.
-
- During the build, I put status information into my C{BuildStatus}
- gatherer.
-
- After the build, I go away.
-
- I can be used by a factory by setting buildClass on
- L{buildbot.process.factory.BuildFactory}
-
- @ivar request: the L{BuildRequest} that triggered me
- @ivar build_status: the L{buildbot.status.builder.BuildStatus} that
- collects our status
- """
-
- if implements:
- implements(interfaces.IBuildControl)
- else:
- __implements__ = interfaces.IBuildControl,
-
- workdir = "build"
- build_status = None
- reason = "changes"
- finished = False
- results = None
- config = None
- installsetcheck = None
-
- def __init__(self, requests):
- self.requests = requests
- for req in self.requests:
- req.startCount += 1
- self.locks = []
- # build a source stamp
- self.source = requests[0].mergeWith(requests[1:])
- self.reason = requests[0].mergeReasons(requests[1:])
- self.config = requests[0].mergeConfig(requests[1:])
- self.installsetcheck = requests[0].mergeInstallSet(requests[1:])
- #self.abandoned = False
-
- self.progress = None
- self.currentStep = None
- self.slaveEnvironment = {}
-
- def setBuilder(self, builder):
- """
- Set the given builder as our builder.
-
- @type builder: L{buildbot.process.builder.Builder}
- """
- self.builder = builder
-
- def setLocks(self, locks):
- self.locks = locks
-
- def getSourceStamp(self):
- return self.source
-
- def setProperty(self, propname, value):
- """Set a property on this build. This may only be called after the
- build has started, so that it has a BuildStatus object where the
- properties can live."""
- self.build_status.setProperty(propname, value)
-
- def getProperty(self, propname):
- return self.build_status.properties[propname]
-
-
- def allChanges(self):
- return self.source.changes
-
- def allFiles(self):
- # return a list of all source files that were changed
- files = []
- havedirs = 0
- for c in self.allChanges():
- for f in c.files:
- files.append(f)
- if c.isdir:
- havedirs = 1
- return files
-
- def __repr__(self):
- return "<Build %s>" % (self.builder.name,)
-
- def __getstate__(self):
- d = self.__dict__.copy()
- if d.has_key('remote'):
- del d['remote']
- return d
-
- def blamelist(self):
- blamelist = []
- for c in self.allChanges():
- if c.who not in blamelist:
- blamelist.append(c.who)
- blamelist.sort()
- return blamelist
-
- def changesText(self):
- changetext = ""
- for c in self.allChanges():
- changetext += "-" * 60 + "\n\n" + c.asText() + "\n"
- # consider sorting these by number
- return changetext
-
- def setSteps(self, steps):
- """Set a list of StepFactories, which are generally just class
- objects which derive from step.BuildStep . These are used to create
- the Steps themselves when the Build starts (as opposed to when it is
- first created). By creating the steps later, their __init__ method
- will have access to things like build.allFiles() ."""
- self.stepFactories = steps # tuples of (factory, kwargs)
- for s in steps:
- pass
-
-
-
-
- useProgress = True
-
- def getSlaveCommandVersion(self, command, oldversion=None):
- return self.slavebuilder.getSlaveCommandVersion(command, oldversion)
-
- def setupStatus(self, build_status):
- self.build_status = build_status
- self.setProperty("buildername", self.builder.name)
- self.setProperty("buildnumber", self.build_status.number)
- self.setProperty("branch", self.source.branch)
- self.setProperty("revision", self.source.revision)
- self.setProperty("config", self.config)
- self.setProperty("installsetcheck", self.installsetcheck)
-
- def setupSlaveBuilder(self, slavebuilder):
- self.slavebuilder = slavebuilder
- self.slavename = slavebuilder.slave.slavename
- self.setProperty("slavename", self.slavename)
-
- def startBuild(self, build_status, expectations, slavebuilder):
- """This method sets up the build, then starts it by invoking the
- first Step. It returns a Deferred which will fire when the build
- finishes. This Deferred is guaranteed to never errback."""
-
- # we are taking responsibility for watching the connection to the
- # remote. This responsibility was held by the Builder until our
- # startBuild was called, and will not return to them until we fire
- # the Deferred returned by this method.
-
- log.msg("%s.startBuild" % self)
- self.setupStatus(build_status)
- # now that we have a build_status, we can set properties
- self.setupSlaveBuilder(slavebuilder)
-
- # convert all locks into their real forms
- self.locks = [self.builder.botmaster.getLockByID(l)
- for l in self.locks]
- # then narrow SlaveLocks down to the right slave
- self.locks = [l.getLock(self.slavebuilder) for l in self.locks]
- self.remote = slavebuilder.remote
- self.remote.notifyOnDisconnect(self.lostRemote)
- d = self.deferred = defer.Deferred()
-
- try:
- self.setupBuild(expectations) # create .steps
- except:
- # the build hasn't started yet, so log the exception as a point
- # event instead of flunking the build. TODO: associate this
- # failure with the build instead. this involves doing
- # self.build_status.buildStarted() from within the exception
- # handler
- log.msg("Build.setupBuild failed")
- log.err(Failure())
- self.builder.builder_status.addPointEvent(["setupBuild",
- "exception"],
- color="purple")
- self.finished = True
- self.results = FAILURE
- self.deferred = None
- d.callback(self)
- return d
-
- self.build_status.buildStarted(self)
- self.acquireLocks().addCallback(self._startBuild_2)
- return d
-
- def acquireLocks(self, res=None):
- log.msg("acquireLocks(step %s, locks %s)" % (self, self.locks))
- if not self.locks:
- return defer.succeed(None)
- for lock in self.locks:
- if not lock.isAvailable():
- log.msg("Build %s waiting for lock %s" % (self, lock))
- d = lock.waitUntilAvailable(self)
- d.addCallback(self.acquireLocks)
- return d
- # all locks are available, claim them all
- for lock in self.locks:
- lock.claim(self)
- return defer.succeed(None)
-
- def _startBuild_2(self, res):
- self.startNextStep()
-
- def setupBuild(self, expectations):
- # create the actual BuildSteps. If there are any name collisions, we
- # add a count to the loser until it is unique.
- self.steps = []
- self.stepStatuses = {}
- stepnames = []
- sps = []
-
- for factory, args in self.stepFactories:
- args = args.copy()
- if not args.has_key("workdir"):
- args['workdir'] = self.workdir
- try:
- step = factory(build=self, **args)
- except:
- log.msg("error while creating step, factory=%s, args=%s"
- % (factory, args))
- raise
- name = step.name
- count = 1
- while name in stepnames and count < 100:
- count += 1
- name = step.name + "_%d" % count
- if name in stepnames:
- raise RuntimeError("duplicate step '%s'" % step.name)
- if not self.installsetcheck and name == "Install_Set" :
- continue
- step.name = name
- stepnames.append(name)
- self.steps.append(step)
-
- # tell the BuildStatus about the step. This will create a
- # BuildStepStatus and bind it to the Step.
- self.build_status.addStep(step)
-
- sp = None
- if self.useProgress:
- # XXX: maybe bail if step.progressMetrics is empty? or skip
- # progress for that one step (i.e. "it is fast"), or have a
- # separate "variable" flag that makes us bail on progress
- # tracking
- sp = step.setupProgress()
- if sp:
- sps.append(sp)
-
- # Create a buildbot.status.progress.BuildProgress object. This is
- # called once at startup to figure out how to build the long-term
- # Expectations object, and again at the start of each build to get a
- # fresh BuildProgress object to track progress for that individual
- # build. TODO: revisit at-startup call
-
- if self.useProgress:
- self.progress = BuildProgress(sps)
- if self.progress and expectations:
- self.progress.setExpectationsFrom(expectations)
-
- # we are now ready to set up our BuildStatus.
- self.build_status.setSourceStamp(self.source)
- self.build_status.setReason(self.reason)
- self.build_status.setBlamelist(self.blamelist())
- self.build_status.setProgress(self.progress)
-
- self.results = [] # list of FAILURE, SUCCESS, WARNINGS, SKIPPED
- self.result = SUCCESS # overall result, may downgrade after each step
- self.text = [] # list of text string lists (text2)
-
- def getNextStep(self):
- """This method is called to obtain the next BuildStep for this build.
- When it returns None (or raises a StopIteration exception), the build
- is complete."""
- if not self.steps:
- return None
- return self.steps.pop(0)
-
- def startNextStep(self):
- try:
- s = self.getNextStep()
- except StopIteration:
- s = None
- if not s:
- return self.allStepsDone()
- self.currentStep = s
- d = defer.maybeDeferred(s.startStep, self.remote)
- d.addCallback(self._stepDone, s)
- d.addErrback(self.buildException)
-
- def _stepDone(self, results, step):
- self.currentStep = None
- if self.finished:
- return # build was interrupted, don't keep building
- terminate = self.stepDone(results, step) # interpret/merge results
- if terminate:
- return self.allStepsDone()
- self.startNextStep()
-
- def stepDone(self, result, step):
- """This method is called when the BuildStep completes. It is passed a
- status object from the BuildStep and is responsible for merging the
- Step's results into those of the overall Build."""
-
- terminate = False
- text = None
- if type(result) == types.TupleType:
- result, text = result
- assert type(result) == type(SUCCESS)
- log.msg(" step '%s' complete: %s" % (step.name, Results[result]))
- self.results.append(result)
- if text:
- self.text.extend(text)
- if not self.remote:
- terminate = True
- if result == FAILURE:
- if step.warnOnFailure:
- if self.result != FAILURE:
- self.result = WARNINGS
- if step.flunkOnFailure:
- self.result = FAILURE
- if step.haltOnFailure:
- self.result = FAILURE
- terminate = True
- elif result == WARNINGS:
- if step.warnOnWarnings:
- if self.result != FAILURE:
- self.result = WARNINGS
- if step.flunkOnWarnings:
- self.result = FAILURE
- elif result == EXCEPTION:
- self.result = EXCEPTION
- terminate = True
- return terminate
-
- def lostRemote(self, remote=None):
- # the slave went away. There are several possible reasons for this,
- # and they aren't necessarily fatal. For now, kill the build, but
- # TODO: see if we can resume the build when it reconnects.
- log.msg("%s.lostRemote" % self)
- self.remote = None
- if self.currentStep:
- # this should cause the step to finish.
- log.msg(" stopping currentStep", self.currentStep)
- self.currentStep.interrupt(Failure(error.ConnectionLost()))
-
- def stopBuild(self, reason="<no reason given>"):
- # the idea here is to let the user cancel a build because, e.g.,
- # they realized they committed a bug and they don't want to waste
- # the time building something that they know will fail. Another
- # reason might be to abandon a stuck build. We want to mark the
- # build as failed quickly rather than waiting for the slave's
- # timeout to kill it on its own.
-
- log.msg(" %s: stopping build: %s" % (self, reason))
- if self.finished:
- return
- # TODO: include 'reason' in this point event
- self.builder.builder_status.addPointEvent(['interrupt'])
- self.currentStep.interrupt(reason)
- if 0:
- # TODO: maybe let its deferred do buildFinished
- if self.currentStep and self.currentStep.progress:
- # XXX: really .fail or something
- self.currentStep.progress.finish()
- text = ["stopped", reason]
- self.buildFinished(text, "red", FAILURE)
-
- def allStepsDone(self):
- if self.result == FAILURE:
- color = "red"
- text = ["failed"]
- elif self.result == WARNINGS:
- color = "orange"
- text = ["warnings"]
- elif self.result == EXCEPTION:
- color = "purple"
- text = ["exception"]
- else:
- color = "green"
- text = ["build", "successful"]
- text.extend(self.text)
- return self.buildFinished(text, color, self.result)
-
- def buildException(self, why):
- log.msg("%s.buildException" % self)
- log.err(why)
- self.buildFinished(["build", "exception"], "purple", FAILURE)
-
- def buildFinished(self, text, color, results):
- """This method must be called when the last Step has completed. It
- marks the Build as complete and returns the Builder to the 'idle'
- state.
-
- It takes three arguments which describe the overall build status:
- text, color, results. 'results' is one of SUCCESS, WARNINGS, or
- FAILURE.
-
- If 'results' is SUCCESS or WARNINGS, we will permit any dependant
- builds to start. If it is 'FAILURE', those builds will be
- abandoned."""
-
- self.finished = True
- if self.remote:
- self.remote.dontNotifyOnDisconnect(self.lostRemote)
- self.results = results
-
- log.msg(" %s: build finished" % self)
- self.build_status.setSlavename(self.slavename)
- self.build_status.setText(text)
- self.build_status.setColor(color)
- self.build_status.setResults(results)
- self.build_status.buildFinished()
- if self.progress:
- # XXX: also test a 'timing consistent' flag?
- log.msg(" setting expectations for next time")
- self.builder.setExpectations(self.progress)
- reactor.callLater(0, self.releaseLocks)
- self.deferred.callback(self)
- self.deferred = None
-
- def releaseLocks(self):
- log.msg("releaseLocks(%s): %s" % (self, self.locks))
- for lock in self.locks:
- lock.release(self)
-
- # IBuildControl
-
- def getStatus(self):
- return self.build_status
-
- # stopBuild is defined earlier
-
diff --git a/buildbot/buildbot-source/buildbot/process/builder.py b/buildbot/buildbot-source/buildbot/process/builder.py
deleted file mode 100644
index 59f3c3cd2..000000000
--- a/buildbot/buildbot-source/buildbot/process/builder.py
+++ /dev/null
@@ -1,689 +0,0 @@
-#! /usr/bin/python
-
-import warnings
-
-from twisted.python import log, components, failure
-from twisted.spread import pb
-from twisted.internet import reactor, defer
-
-from buildbot import interfaces, sourcestamp
-from buildbot.twcompat import implements
-from buildbot.status.progress import Expectations
-from buildbot.status import builder
-from buildbot.util import now
-from buildbot.process import base
-
-(ATTACHING, # slave attached, still checking hostinfo/etc
- IDLE, # idle, available for use
- PINGING, # build about to start, making sure it is still alive
- BUILDING, # build is running
- ) = range(4)
-
-class SlaveBuilder(pb.Referenceable):
- """I am the master-side representative for one of the
- L{buildbot.slave.bot.SlaveBuilder} objects that lives in a remote
- buildbot. When a remote builder connects, I query it for command versions
- and then make it available to any Builds that are ready to run. """
-
- state = ATTACHING
- remote = None
- build = None
-
- def __init__(self, builder):
- self.builder = builder
- self.ping_watchers = []
-
- def getSlaveCommandVersion(self, command, oldversion=None):
- if self.remoteCommands is None:
- # the slave is 0.5.0 or earlier
- return oldversion
- return self.remoteCommands.get(command)
-
- def attached(self, slave, remote, commands):
- self.slave = slave
- self.remote = remote
- self.remoteCommands = commands # maps command name to version
- log.msg("Buildslave %s attached to %s" % (slave.slavename,
- self.builder.name))
- d = self.remote.callRemote("setMaster", self)
- d.addErrback(self._attachFailure, "Builder.setMaster")
- d.addCallback(self._attached2)
- return d
-
- def _attached2(self, res):
- d = self.remote.callRemote("print", "attached")
- d.addErrback(self._attachFailure, "Builder.print 'attached'")
- d.addCallback(self._attached3)
- return d
-
- def _attached3(self, res):
- # now we say they're really attached
- return self
-
- def _attachFailure(self, why, where):
- assert isinstance(where, str)
- log.msg(where)
- log.err(why)
- return why
-
- def detached(self):
- log.msg("Buildslave %s detached from %s" % (self.slave.slavename,
- self.builder.name))
- self.slave = None
- self.remote = None
- self.remoteCommands = None
-
- def startBuild(self, build):
- self.build = build
-
- def finishBuild(self):
- self.build = None
-
-
- def ping(self, timeout, status=None):
- """Ping the slave to make sure it is still there. Returns a Deferred
- that fires with True if it is.
-
- @param status: if you point this at a BuilderStatus, a 'pinging'
- event will be pushed.
- """
-
- newping = not self.ping_watchers
- d = defer.Deferred()
- self.ping_watchers.append(d)
- if newping:
- if status:
- event = status.addEvent(["pinging"], "yellow")
- d2 = defer.Deferred()
- d2.addCallback(self._pong_status, event)
- self.ping_watchers.insert(0, d2)
- # I think it will make the tests run smoother if the status
- # is updated before the ping completes
- Ping().ping(self.remote, timeout).addCallback(self._pong)
-
- return d
-
- def _pong(self, res):
- watchers, self.ping_watchers = self.ping_watchers, []
- for d in watchers:
- d.callback(res)
-
- def _pong_status(self, res, event):
- if res:
- event.text = ["ping", "success"]
- event.color = "green"
- else:
- event.text = ["ping", "failed"]
- event.color = "red"
- event.finish()
-
-class Ping:
- running = False
- timer = None
-
- def ping(self, remote, timeout):
- assert not self.running
- self.running = True
- log.msg("sending ping")
- self.d = defer.Deferred()
- # TODO: add a distinct 'ping' command on the slave.. using 'print'
- # for this purpose is kind of silly.
- remote.callRemote("print", "ping").addCallbacks(self._pong,
- self._ping_failed,
- errbackArgs=(remote,))
-
- # We use either our own timeout or the (long) TCP timeout to detect
- # silently-missing slaves. This might happen because of a NAT
- # timeout or a routing loop. If the slave just shuts down (and we
- # somehow missed the FIN), we should get a "connection refused"
- # message.
- self.timer = reactor.callLater(timeout, self._ping_timeout, remote)
- return self.d
-
- def _ping_timeout(self, remote):
- log.msg("ping timeout")
- # force the BotPerspective to disconnect, since this indicates that
- # the bot is unreachable.
- del self.timer
- remote.broker.transport.loseConnection()
- # the forcibly-lost connection will now cause the ping to fail
-
- def _stopTimer(self):
- if not self.running:
- return
- self.running = False
-
- if self.timer:
- self.timer.cancel()
- del self.timer
-
- def _pong(self, res):
- log.msg("ping finished: success")
- self._stopTimer()
- self.d.callback(True)
-
- def _ping_failed(self, res, remote):
- log.msg("ping finished: failure")
- self._stopTimer()
- # the slave has some sort of internal error, disconnect them. If we
- # don't, we'll requeue a build and ping them again right away,
- # creating a nasty loop.
- remote.broker.transport.loseConnection()
- # TODO: except, if they actually did manage to get this far, they'll
- # probably reconnect right away, and we'll do this game again. Maybe
- # it would be better to leave them in the PINGING state.
- self.d.callback(False)
-
-
-class Builder(pb.Referenceable):
- """I manage all Builds of a given type.
-
- Each Builder is created by an entry in the config file (the c['builders']
- list), with a number of parameters.
-
- One of these parameters is the L{buildbot.process.factory.BuildFactory}
- object that is associated with this Builder. The factory is responsible
- for creating new L{Build<buildbot.process.base.Build>} objects. Each
- Build object defines when and how the build is performed, so a new
- Factory or Builder should be defined to control this behavior.
-
- The Builder holds on to a number of L{base.BuildRequest} objects in a
- list named C{.buildable}. Incoming BuildRequest objects will be added to
- this list, or (if possible) merged into an existing request. When a slave
- becomes available, I will use my C{BuildFactory} to turn the request into
- a new C{Build} object. The C{BuildRequest} is forgotten, the C{Build}
- goes into C{.building} while it runs. Once the build finishes, I will
- discard it.
-
- I maintain a list of available SlaveBuilders, one for each connected
- slave that the C{slavenames} parameter says we can use. Some of these
- will be idle, some of them will be busy running builds for me. If there
- are multiple slaves, I can run multiple builds at once.
-
- I also manage forced builds, progress expectation (ETA) management, and
- some status delivery chores.
-
- I am persisted in C{BASEDIR/BUILDERNAME/builder}, so I can remember how
- long a build usually takes to run (in my C{expectations} attribute). This
- pickle also includes the L{buildbot.status.builder.BuilderStatus} object,
- which remembers the set of historic builds.
-
- @type buildable: list of L{buildbot.process.base.BuildRequest}
- @ivar buildable: BuildRequests that are ready to build, but which are
- waiting for a buildslave to be available.
-
- @type building: list of L{buildbot.process.base.Build}
- @ivar building: Builds that are actively running
-
- """
-
- expectations = None # this is created the first time we get a good build
- START_BUILD_TIMEOUT = 10
-
- def __init__(self, setup, builder_status):
- """
- @type setup: dict
- @param setup: builder setup data, as stored in
- BuildmasterConfig['builders']. Contains name,
- slavename(s), builddir, factory, locks.
- @type builder_status: L{buildbot.status.builder.BuilderStatus}
- """
- self.name = setup['name']
- self.slavenames = []
- if setup.has_key('slavename'):
- self.slavenames.append(setup['slavename'])
- if setup.has_key('slavenames'):
- self.slavenames.extend(setup['slavenames'])
- self.builddir = setup['builddir']
- self.buildFactory = setup['factory']
- self.locks = setup.get("locks", [])
- if setup.has_key('periodicBuildTime'):
- raise ValueError("periodicBuildTime can no longer be defined as"
- " part of the Builder: use scheduler.Periodic"
- " instead")
-
- # build/wannabuild slots: Build objects move along this sequence
- self.buildable = []
- self.building = []
-
- # buildslaves which have connected but which are not yet available.
- # These are always in the ATTACHING state.
- self.attaching_slaves = []
-
- # buildslaves at our disposal. Each SlaveBuilder instance has a
- # .state that is IDLE, PINGING, or BUILDING. "PINGING" is used when a
- # Build is about to start, to make sure that they're still alive.
- self.slaves = []
-
- self.builder_status = builder_status
- self.builder_status.setSlavenames(self.slavenames)
-
- # for testing, to help synchronize tests
- self.watchers = {'attach': [], 'detach': [], 'detach_all': [],
- 'idle': []}
-
- def setBotmaster(self, botmaster):
- self.botmaster = botmaster
-
- def compareToSetup(self, setup):
- diffs = []
- setup_slavenames = []
- if setup.has_key('slavename'):
- setup_slavenames.append(setup['slavename'])
- setup_slavenames.extend(setup.get('slavenames', []))
- if setup_slavenames != self.slavenames:
- diffs.append('slavenames changed from %s to %s' \
- % (self.slavenames, setup_slavenames))
- if setup['builddir'] != self.builddir:
- diffs.append('builddir changed from %s to %s' \
- % (self.builddir, setup['builddir']))
- if setup['factory'] != self.buildFactory: # compare objects
- diffs.append('factory changed')
- oldlocks = [(lock.__class__, lock.name)
- for lock in setup.get('locks',[])]
- newlocks = [(lock.__class__, lock.name)
- for lock in self.locks]
- if oldlocks != newlocks:
- diffs.append('locks changed from %s to %s' % (oldlocks, newlocks))
- return diffs
-
- def __repr__(self):
- return "<Builder '%s'>" % self.name
-
-
- def submitBuildRequest(self, req):
- req.submittedAt = now()
- self.buildable.append(req)
- req.requestSubmitted(self)
- self.builder_status.addBuildRequest(req.status)
- self.maybeStartBuild()
-
- def cancelBuildRequest(self, req):
- if req in self.buildable:
- self.buildable.remove(req)
- self.builder_status.removeBuildRequest(req.status)
- return True
- return False
-
- def __getstate__(self):
- d = self.__dict__.copy()
- # TODO: note that d['buildable'] can contain Deferreds
- del d['building'] # TODO: move these back to .buildable?
- del d['slaves']
- return d
-
- def __setstate__(self, d):
- self.__dict__ = d
- self.building = []
- self.slaves = []
-
- def fireTestEvent(self, name, with=None):
- if with is None:
- with = self
- watchers = self.watchers[name]
- self.watchers[name] = []
- for w in watchers:
- reactor.callLater(0, w.callback, with)
-
- def attached(self, slave, remote, commands):
- """This is invoked by the BotPerspective when the self.slavename bot
- registers their builder.
-
- @type slave: L{buildbot.master.BotPerspective}
- @param slave: the BotPerspective that represents the buildslave as a
- whole
- @type remote: L{twisted.spread.pb.RemoteReference}
- @param remote: a reference to the L{buildbot.slave.bot.SlaveBuilder}
- @type commands: dict: string -> string, or None
- @param commands: provides the slave's version of each RemoteCommand
-
- @rtype: L{twisted.internet.defer.Deferred}
- @return: a Deferred that fires (with 'self') when the slave-side
- builder is fully attached and ready to accept commands.
- """
- for s in self.attaching_slaves + self.slaves:
- if s.slave == slave:
- # already attached to them. This is fairly common, since
- # attached() gets called each time we receive the builder
- # list from the slave, and we ask for it each time we add or
- # remove a builder. So if the slave is hosting builders
- # A,B,C, and the config file changes A, we'll remove A and
- # re-add it, triggering two builder-list requests, getting
- # two redundant calls to attached() for B, and another two
- # for C.
- #
- # Therefore, when we see that we're already attached, we can
- # just ignore it. TODO: build a diagram of the state
- # transitions here, I'm concerned about sb.attached() failing
- # and leaving sb.state stuck at 'ATTACHING', and about
- # the detached() message arriving while there's some
- # transition pending such that the response to the transition
- # re-vivifies sb
- return defer.succeed(self)
-
- sb = SlaveBuilder(self)
- self.attaching_slaves.append(sb)
- d = sb.attached(slave, remote, commands)
- d.addCallback(self._attached)
- d.addErrback(self._not_attached, slave)
- return d
-
- def _attached(self, sb):
- # TODO: make this .addSlaveEvent(slave.slavename, ['connect']) ?
- self.builder_status.addPointEvent(['connect', sb.slave.slavename])
- sb.state = IDLE
- self.attaching_slaves.remove(sb)
- self.slaves.append(sb)
- self.maybeStartBuild()
-
- self.fireTestEvent('attach')
- return self
-
- def _not_attached(self, why, slave):
- # already log.err'ed by SlaveBuilder._attachFailure
- # TODO: make this .addSlaveEvent?
- # TODO: remove from self.slaves (except that detached() should get
- # run first, right?)
- self.builder_status.addPointEvent(['failed', 'connect',
- slave.slave.slavename])
- # TODO: add an HTMLLogFile of the exception
- self.fireTestEvent('attach', why)
-
- def detached(self, slave):
- """This is called when the connection to the bot is lost."""
- log.msg("%s.detached" % self, slave.slavename)
- for sb in self.attaching_slaves + self.slaves:
- if sb.slave == slave:
- break
- else:
- log.msg("WEIRD: Builder.detached(%s) (%s)"
- " not in attaching_slaves(%s)"
- " or slaves(%s)" % (slave, slave.slavename,
- self.attaching_slaves,
- self.slaves))
- return
- if sb.state == BUILDING:
- # the Build's .lostRemote method (invoked by a notifyOnDisconnect
- # handler) will cause the Build to be stopped, probably right
- # after the notifyOnDisconnect that invoked us finishes running.
-
- # TODO: should failover to a new Build
- #self.retryBuild(sb.build)
- pass
-
- if sb in self.attaching_slaves:
- self.attaching_slaves.remove(sb)
- if sb in self.slaves:
- self.slaves.remove(sb)
-
- # TODO: make this .addSlaveEvent?
- self.builder_status.addPointEvent(['disconnect', slave.slavename])
- sb.detached() # inform the SlaveBuilder that their slave went away
- self.updateBigStatus()
- self.fireTestEvent('detach')
- if not self.slaves:
- self.fireTestEvent('detach_all')
-
- def updateBigStatus(self):
- if not self.slaves:
- self.builder_status.setBigState("offline")
- elif self.building:
- self.builder_status.setBigState("building")
- else:
- self.builder_status.setBigState("idle")
- self.fireTestEvent('idle')
-
- def maybeStartBuild(self):
- log.msg("maybeStartBuild: %s %s" % (self.buildable, self.slaves))
- if not self.buildable:
- self.updateBigStatus()
- return # nothing to do
- # find the first idle slave
- for sb in self.slaves:
- if sb.state == IDLE:
- break
- else:
- log.msg("%s: want to start build, but we don't have a remote"
- % self)
- self.updateBigStatus()
- return
-
- # there is something to build, and there is a slave on which to build
- # it. Grab the oldest request, see if we can merge it with anything
- # else.
- req = self.buildable.pop(0)
- self.builder_status.removeBuildRequest(req.status)
- mergers = []
- for br in self.buildable[:]:
- if req.canBeMergedWith(br):
- self.buildable.remove(br)
- self.builder_status.removeBuildRequest(br.status)
- mergers.append(br)
- requests = [req] + mergers
-
- # Create a new build from our build factory and set ourself as the
- # builder.
- build = self.buildFactory.newBuild(requests)
- build.setBuilder(self)
- build.setLocks(self.locks)
-
- # start it
- self.startBuild(build, sb)
-
- def startBuild(self, build, sb):
- """Start a build on the given slave.
- @param build: the L{base.Build} to start
- @param sb: the L{SlaveBuilder} which will host this build
-
- @return: a Deferred which fires with a
- L{buildbot.interfaces.IBuildControl} that can be used to stop the
- Build, or to access a L{buildbot.interfaces.IBuildStatus} which will
- watch the Build as it runs. """
-
- self.building.append(build)
-
- # claim the slave. TODO: consider moving changes to sb.state inside
- # SlaveBuilder.. that would be cleaner.
- sb.state = PINGING
- sb.startBuild(build)
-
- self.updateBigStatus()
-
- log.msg("starting build %s.. pinging the slave" % build)
- # ping the slave to make sure they're still there. If they're fallen
- # off the map (due to a NAT timeout or something), this will fail in
- # a couple of minutes, depending upon the TCP timeout. TODO: consider
- # making this time out faster, or at least characterize the likely
- # duration.
- d = sb.ping(self.START_BUILD_TIMEOUT)
- d.addCallback(self._startBuild_1, build, sb)
- return d
-
- def _startBuild_1(self, res, build, sb):
- if not res:
- return self._startBuildFailed("slave ping failed", build, sb)
- # The buildslave is ready to go.
- sb.state = BUILDING
- d = sb.remote.callRemote("startBuild")
- d.addCallbacks(self._startBuild_2, self._startBuildFailed,
- callbackArgs=(build,sb), errbackArgs=(build,sb))
- return d
-
- def _startBuild_2(self, res, build, sb):
- # create the BuildStatus object that goes with the Build
- bs = self.builder_status.newBuild()
-
- # start the build. This will first set up the steps, then tell the
- # BuildStatus that it has started, which will announce it to the
- # world (through our BuilderStatus object, which is its parent).
- # Finally it will start the actual build process.
- d = build.startBuild(bs, self.expectations, sb)
- d.addCallback(self.buildFinished, sb)
- d.addErrback(log.err) # this shouldn't happen. if it does, the slave
- # will be wedged
- for req in build.requests:
- req.buildStarted(build, bs)
- return build # this is the IBuildControl
-
- def _startBuildFailed(self, why, build, sb):
- # put the build back on the buildable list
- log.msg("I tried to tell the slave that the build %s started, but "
- "remote_startBuild failed: %s" % (build, why))
- # release the slave
- sb.finishBuild()
- sb.state = IDLE
-
- log.msg("re-queueing the BuildRequest")
- self.building.remove(build)
- for req in build.requests:
- self.buildable.insert(0, req) # they get first priority
- self.builder_status.addBuildRequest(req.status)
-
- # other notifyOnDisconnect calls will mark the slave as disconnected.
- # Re-try after they have fired, maybe there's another slave
- # available. TODO: I don't like these un-synchronizable callLaters..
- # a better solution is to mark the SlaveBuilder as disconnected
- # ourselves, but we'll need to make sure that they can tolerate
- # multiple disconnects first.
- reactor.callLater(0, self.maybeStartBuild)
-
- def buildFinished(self, build, sb):
- """This is called when the Build has finished (either success or
- failure). Any exceptions during the build are reported with
- results=FAILURE, not with an errback."""
-
- # release the slave
- sb.finishBuild()
- sb.state = IDLE
- # otherwise the slave probably got removed in detach()
-
- self.building.remove(build)
- for req in build.requests:
- req.finished(build.build_status)
- self.maybeStartBuild()
-
- def setExpectations(self, progress):
- """Mark the build as successful and update expectations for the next
- build. Only call this when the build did not fail in any way that
- would invalidate the time expectations generated by it. (if the
- compile failed and thus terminated early, we can't use the last
- build to predict how long the next one will take).
- """
- if self.expectations:
- self.expectations.update(progress)
- else:
- # the first time we get a good build, create our Expectations
- # based upon its results
- self.expectations = Expectations(progress)
- log.msg("new expectations: %s seconds" % \
- self.expectations.expectedBuildTime())
-
- def shutdownSlave(self):
- if self.remote:
- self.remote.callRemote("shutdown")
-
-
-class BuilderControl(components.Adapter):
- if implements:
- implements(interfaces.IBuilderControl)
- else:
- __implements__ = interfaces.IBuilderControl,
-
- def forceBuild(self, who, reason):
- """This is a shortcut for building the current HEAD.
-
- (false: You get back a BuildRequest, just as if you'd asked politely.
- To get control of the resulting build, you'll need use
- req.subscribe() .)
-
- (true: You get back a Deferred that fires with an IBuildControl)
-
- This shortcut peeks into the Builder and raises an exception if there
- is no slave available, to make backwards-compatibility a little
- easier.
- """
-
- warnings.warn("Please use BuilderControl.requestBuildSoon instead",
- category=DeprecationWarning, stacklevel=1)
-
- # see if there is an idle slave, so we can emit an appropriate error
- # message
- for sb in self.original.slaves:
- if sb.state == IDLE:
- break
- else:
- if self.original.building:
- raise interfaces.BuilderInUseError("All slaves are in use")
- raise interfaces.NoSlaveError("There are no slaves connected")
-
- req = base.BuildRequest(reason, sourcestamp.SourceStamp())
- self.requestBuild(req)
- # this is a hack that fires the Deferred for the first build and
- # ignores any others
- class Watcher:
- def __init__(self, req):
- self.req = req
- def wait(self):
- self.d = d = defer.Deferred()
- req.subscribe(self.started)
- return d
- def started(self, bs):
- if self.d:
- self.req.unsubscribe(self.started)
- self.d.callback(bs)
- self.d = None
- w = Watcher(req)
- return w.wait()
-
- def requestBuild(self, req):
- """Submit a BuildRequest to this Builder."""
- self.original.submitBuildRequest(req)
-
- def requestBuildSoon(self, req):
- """Submit a BuildRequest like requestBuild, but raise a
- L{buildbot.interfaces.NoSlaveError} if no slaves are currently
- available, so it cannot be used to queue a BuildRequest in the hopes
- that a slave will eventually connect. This method is appropriate for
- use by things like the web-page 'Force Build' button."""
- if not self.original.slaves:
- raise interfaces.NoSlaveError
- self.requestBuild(req)
-
- def resubmitBuild(self, bs, reason="<rebuild, no reason given>"):
- if not bs.isFinished():
- return
- branch, revision, patch = bs.getSourceStamp()
- changes = bs.getChanges()
- ss = sourcestamp.SourceStamp(branch, revision, patch, changes)
- req = base.BuildRequest(reason, ss, self.original.name)
- self.requestBuild(req)
-
- def getPendingBuilds(self):
- # return IBuildRequestControl objects
- raise NotImplementedError
-
- def getBuild(self, number):
- for b in self.original.building:
- if b.build_status.number == number:
- return b
- return None
-
- def ping(self, timeout=30):
- if not self.original.slaves:
- self.original.builder_status.addPointEvent(["ping", "no slave"],
- "red")
- return defer.succeed(False) # interfaces.NoSlaveError
- dl = []
- for s in self.original.slaves:
- dl.append(s.ping(timeout, self.original.builder_status))
- d = defer.DeferredList(dl)
- d.addCallback(self._gatherPingResults)
- return d
-
- def _gatherPingResults(self, res):
- for ignored,success in res:
- if not success:
- return False
- return True
-
-components.registerAdapter(BuilderControl, Builder, interfaces.IBuilderControl)
diff --git a/buildbot/buildbot-source/buildbot/process/factory.py b/buildbot/buildbot-source/buildbot/process/factory.py
deleted file mode 100644
index 295aee9ec..000000000
--- a/buildbot/buildbot-source/buildbot/process/factory.py
+++ /dev/null
@@ -1,177 +0,0 @@
-# -*- test-case-name: buildbot.test.test_step -*-
-
-from buildbot import util
-from buildbot.process.base import Build
-from buildbot.process import step
-
-# deprecated, use BuildFactory.addStep
-def s(steptype, **kwargs):
- # convenience function for master.cfg files, to create step
- # specification tuples
- return (steptype, kwargs)
-
-class BuildFactory(util.ComparableMixin):
- """
- @cvar buildClass: class to use when creating builds
- @type buildClass: L{buildbot.process.base.Build}
- """
- buildClass = Build
- useProgress = 1
- compare_attrs = ['buildClass', 'steps', 'useProgress']
-
- def __init__(self, steps=None):
- if steps is None:
- steps = []
- self.steps = steps
-
- def newBuild(self, request):
- """Create a new Build instance.
- @param request: a L{base.BuildRequest} describing what is to be built
- """
- b = self.buildClass(request)
- b.useProgress = self.useProgress
- b.setSteps(self.steps)
- return b
-
- def addStep(self, steptype, **kwargs):
- self.steps.append((steptype, kwargs))
-
-
-# BuildFactory subclasses for common build tools
-
-class GNUAutoconf(BuildFactory):
- def __init__(self, source, configure="./configure",
- configureEnv={},
- configureFlags=[],
- compile=["make", "all"],
- test=["make", "check"]):
- assert isinstance(source, tuple)
- assert issubclass(source[0], step.BuildStep)
- BuildFactory.__init__(self, [source])
- if configure is not None:
- # we either need to wind up with a string (which will be
- # space-split), or with a list of strings (which will not). The
- # list of strings is the preferred form.
- if type(configure) is str:
- if configureFlags:
- assert not " " in configure # please use list instead
- command = [configure] + configureFlags
- else:
- command = configure
- else:
- assert isinstance(configure, (list, tuple))
- command = configure + configureFlags
- self.addStep(step.Configure, command=command, env=configureEnv)
- if compile is not None:
- self.addStep(step.Compile, command=compile)
- if test is not None:
- self.addStep(step.Test, command=test)
-
-class CPAN(BuildFactory):
- def __init__(self, source, perl="perl"):
- assert isinstance(source, tuple)
- assert issubclass(source[0], step.BuildStep)
- BuildFactory.__init__(self, [source])
- self.addStep(step.Configure, command=[perl, "Makefile.PL"])
- self.addStep(step.Compile, command=["make"])
- self.addStep(step.Test, command=["make", "test"])
-
-class Distutils(BuildFactory):
- def __init__(self, source, python="python", test=None):
- assert isinstance(source, tuple)
- assert issubclass(source[0], step.BuildStep)
- BuildFactory.__init__(self, [source])
- self.addStep(step.Compile, command=[python, "./setup.py", "build"])
- if test is not None:
- self.addStep(step.Test, command=test)
-
-class Trial(BuildFactory):
- """Build a python module that uses distutils and trial. Set 'tests' to
- the module in which the tests can be found, or set useTestCaseNames=True
- to always have trial figure out which tests to run (based upon which
- files have been changed).
-
- See docs/factories.xhtml for usage samples. Not all of the Trial
- BuildStep options are available here, only the most commonly used ones.
- To get complete access, you will need to create a custom
- BuildFactory."""
-
- trial = "trial"
- randomly = False
- recurse = False
-
- def __init__(self, source,
- buildpython=["python"], trialpython=[], trial=None,
- testpath=".", randomly=None, recurse=None,
- tests=None, useTestCaseNames=False, env=None):
- BuildFactory.__init__(self, [source])
- assert isinstance(source, tuple)
- assert issubclass(source[0], step.BuildStep)
- assert tests or useTestCaseNames, "must use one or the other"
- if trial is not None:
- self.trial = trial
- if randomly is not None:
- self.randomly = randomly
- if recurse is not None:
- self.recurse = recurse
-
- from buildbot.process import step_twisted
- buildcommand = buildpython + ["./setup.py", "build"]
- self.addStep(step.Compile, command=buildcommand, env=env)
- self.addStep(step_twisted.Trial,
- python=trialpython, trial=self.trial,
- testpath=testpath,
- tests=tests, testChanges=useTestCaseNames,
- randomly=self.randomly,
- recurse=self.recurse,
- env=env,
- )
-
-
-# compatibility classes, will go away. Note that these only offer
-# compatibility at the constructor level: if you have subclassed these
-# factories, your subclasses are unlikely to still work correctly.
-
-ConfigurableBuildFactory = BuildFactory
-
-class BasicBuildFactory(GNUAutoconf):
- # really a "GNU Autoconf-created tarball -in-CVS tree" builder
-
- def __init__(self, cvsroot, cvsmodule,
- configure=None, configureEnv={},
- compile="make all",
- test="make check", cvsCopy=False):
- mode = "clobber"
- if cvsCopy:
- mode = "copy"
- source = s(step.CVS, cvsroot=cvsroot, cvsmodule=cvsmodule, mode=mode)
- GNUAutoconf.__init__(self, source,
- configure=configure, configureEnv=configureEnv,
- compile=compile,
- test=test)
-
-class QuickBuildFactory(BasicBuildFactory):
- useProgress = False
-
- def __init__(self, cvsroot, cvsmodule,
- configure=None, configureEnv={},
- compile="make all",
- test="make check", cvsCopy=False):
- mode = "update"
- source = s(step.CVS, cvsroot=cvsroot, cvsmodule=cvsmodule, mode=mode)
- GNUAutoconf.__init__(self, source,
- configure=configure, configureEnv=configureEnv,
- compile=compile,
- test=test)
-
-class BasicSVN(GNUAutoconf):
-
- def __init__(self, svnurl,
- configure=None, configureEnv={},
- compile="make all",
- test="make check"):
- source = s(step.SVN, svnurl=svnurl, mode="update")
- GNUAutoconf.__init__(self, source,
- configure=configure, configureEnv=configureEnv,
- compile=compile,
- test=test)
diff --git a/buildbot/buildbot-source/buildbot/process/maxq.py b/buildbot/buildbot-source/buildbot/process/maxq.py
deleted file mode 100644
index 9ea0ddd30..000000000
--- a/buildbot/buildbot-source/buildbot/process/maxq.py
+++ /dev/null
@@ -1,46 +0,0 @@
-from buildbot.process import step
-from buildbot.status import event, builder
-
-class MaxQ(step.ShellCommand):
- flunkOnFailure = True
- name = "maxq"
-
- def __init__(self, testdir=None, **kwargs):
- if not testdir:
- raise TypeError("please pass testdir")
- command = 'run_maxq.py %s' % (testdir,)
- step.ShellCommand.__init__(self, command=command, **kwargs)
-
- def startStatus(self):
- evt = event.Event("yellow", ['running', 'maxq', 'tests'],
- files={'log': self.log})
- self.setCurrentActivity(evt)
-
-
- def finished(self, rc):
- self.failures = 0
- if rc:
- self.failures = 1
- output = self.log.getAll()
- self.failures += output.count('\nTEST FAILURE:')
-
- result = (builder.SUCCESS, ['maxq'])
-
- if self.failures:
- result = (builder.FAILURE,
- [str(self.failures), 'maxq', 'failures'])
-
- return self.stepComplete(result)
-
- def finishStatus(self, result):
- if self.failures:
- color = "red"
- text = ["maxq", "failed"]
- else:
- color = "green"
- text = ['maxq', 'tests']
- self.updateCurrentActivity(color=color, text=text)
- self.finishStatusSummary()
- self.finishCurrentActivity()
-
-
diff --git a/buildbot/buildbot-source/buildbot/process/process_twisted.py b/buildbot/buildbot-source/buildbot/process/process_twisted.py
deleted file mode 100644
index 34052679f..000000000
--- a/buildbot/buildbot-source/buildbot/process/process_twisted.py
+++ /dev/null
@@ -1,119 +0,0 @@
-#! /usr/bin/python
-
-# Build classes specific to the Twisted codebase
-
-from buildbot.process.base import Build
-from buildbot.process.factory import BuildFactory
-from buildbot.process import step
-from buildbot.process.step_twisted import HLint, ProcessDocs, BuildDebs, \
- Trial, RemovePYCs
-
-class TwistedBuild(Build):
- workdir = "Twisted" # twisted's bin/trial expects to live in here
- def isFileImportant(self, filename):
- if filename.startswith("doc/fun/"):
- return 0
- if filename.startswith("sandbox/"):
- return 0
- return 1
-
-class TwistedTrial(Trial):
- tests = "twisted"
- # the Trial in Twisted >=2.1.0 has --recurse on by default, and -to
- # turned into --reporter=bwverbose .
- recurse = False
- trialMode = ["--reporter=bwverbose"]
- testpath = None
- trial = "./bin/trial"
-
-class TwistedBaseFactory(BuildFactory):
- buildClass = TwistedBuild
- # bin/trial expects its parent directory to be named "Twisted": it uses
- # this to add the local tree to PYTHONPATH during tests
- workdir = "Twisted"
-
- def __init__(self, source):
- BuildFactory.__init__(self, [source])
-
-class QuickTwistedBuildFactory(TwistedBaseFactory):
- treeStableTimer = 30
- useProgress = 0
-
- def __init__(self, source, python="python"):
- TwistedBaseFactory.__init__(self, source)
- if type(python) is str:
- python = [python]
- self.addStep(HLint, python=python[0])
- self.addStep(RemovePYCs)
- for p in python:
- cmd = [p, "setup.py", "build_ext", "-i"]
- self.addStep(step.Compile, command=cmd, flunkOnFailure=True)
- self.addStep(TwistedTrial, python=p, testChanges=True)
-
-class FullTwistedBuildFactory(TwistedBaseFactory):
- treeStableTimer = 5*60
-
- def __init__(self, source, python="python",
- processDocs=False, runTestsRandomly=False,
- compileOpts=[], compileOpts2=[]):
- TwistedBaseFactory.__init__(self, source)
- if processDocs:
- self.addStep(ProcessDocs)
-
- if type(python) == str:
- python = [python]
- assert isinstance(compileOpts, list)
- assert isinstance(compileOpts2, list)
- cmd = (python + compileOpts + ["setup.py", "build_ext"]
- + compileOpts2 + ["-i"])
-
- self.addStep(step.Compile, command=cmd, flunkOnFailure=True)
- self.addStep(RemovePYCs)
- self.addStep(TwistedTrial, python=python, randomly=runTestsRandomly)
-
-class TwistedDebsBuildFactory(TwistedBaseFactory):
- treeStableTimer = 10*60
-
- def __init__(self, source, python="python"):
- TwistedBaseFactory.__init__(self, source)
- self.addStep(ProcessDocs, haltOnFailure=True)
- self.addStep(BuildDebs, warnOnWarnings=True)
-
-class TwistedReactorsBuildFactory(TwistedBaseFactory):
- treeStableTimer = 5*60
-
- def __init__(self, source,
- python="python", compileOpts=[], compileOpts2=[],
- reactors=None):
- TwistedBaseFactory.__init__(self, source)
-
- if type(python) == str:
- python = [python]
- assert isinstance(compileOpts, list)
- assert isinstance(compileOpts2, list)
- cmd = (python + compileOpts + ["setup.py", "build_ext"]
- + compileOpts2 + ["-i"])
-
- self.addStep(step.Compile, command=cmd, warnOnFailure=True)
-
- if reactors == None:
- reactors = [
- 'gtk2',
- 'gtk',
- #'kqueue',
- 'poll',
- 'c',
- 'qt',
- #'win32',
- ]
- for reactor in reactors:
- flunkOnFailure = 1
- warnOnFailure = 0
- #if reactor in ['c', 'qt', 'win32']:
- # # these are buggy, so tolerate failures for now
- # flunkOnFailure = 0
- # warnOnFailure = 1
- self.addStep(RemovePYCs) # TODO: why?
- self.addStep(TwistedTrial, name=reactor, python=python,
- reactor=reactor, flunkOnFailure=flunkOnFailure,
- warnOnFailure=warnOnFailure)
diff --git a/buildbot/buildbot-source/buildbot/process/step.py b/buildbot/buildbot-source/buildbot/process/step.py
deleted file mode 100644
index c723ab8c5..000000000
--- a/buildbot/buildbot-source/buildbot/process/step.py
+++ /dev/null
@@ -1,2359 +0,0 @@
-# -*- test-case-name: buildbot.test.test_steps -*-
-
-import time, random, types, re, warnings, os
-from email.Utils import formatdate
-
-from twisted.internet import reactor, defer, error
-from twisted.spread import pb
-from twisted.python import log
-from twisted.python.failure import Failure
-from twisted.web.util import formatFailure
-
-from buildbot.interfaces import BuildSlaveTooOldError
-from buildbot.util import now
-from buildbot.status import progress, builder
-from buildbot.status.builder import SUCCESS, WARNINGS, FAILURE, SKIPPED, \
- EXCEPTION
-
-"""
-BuildStep and RemoteCommand classes for master-side representation of the
-build process
-"""
-
-class RemoteCommand(pb.Referenceable):
- """
- I represent a single command to be run on the slave. I handle the details
- of reliably gathering status updates from the slave (acknowledging each),
- and (eventually, in a future release) recovering from interrupted builds.
- This is the master-side object that is known to the slave-side
- L{buildbot.slave.bot.SlaveBuilder}, to which status update are sent.
-
- My command should be started by calling .run(), which returns a
- Deferred that will fire when the command has finished, or will
- errback if an exception is raised.
-
- Typically __init__ or run() will set up self.remote_command to be a
- string which corresponds to one of the SlaveCommands registered in
- the buildslave, and self.args to a dictionary of arguments that will
- be passed to the SlaveCommand instance.
-
- start, remoteUpdate, and remoteComplete are available to be overridden
-
- @type commandCounter: list of one int
- @cvar commandCounter: provides a unique value for each
- RemoteCommand executed across all slaves
- @type active: boolean
- @cvar active: whether the command is currently running
- """
- commandCounter = [0] # we use a list as a poor man's singleton
- active = False
-
- def __init__(self, remote_command, args):
- """
- @type remote_command: string
- @param remote_command: remote command to start. This will be
- passed to
- L{buildbot.slave.bot.SlaveBuilder.remote_startCommand}
- and needs to have been registered
- slave-side by
- L{buildbot.slave.registry.registerSlaveCommand}
- @type args: dict
- @param args: arguments to send to the remote command
- """
-
- self.remote_command = remote_command
- self.args = args
-
- def __getstate__(self):
- dict = self.__dict__.copy()
- # Remove the remote ref: if necessary (only for resumed builds), it
- # will be reattached at resume time
- if dict.has_key("remote"):
- del dict["remote"]
- return dict
-
- def run(self, step, remote):
- self.active = True
- self.step = step
- self.remote = remote
- c = self.commandCounter[0]
- self.commandCounter[0] += 1
- #self.commandID = "%d %d" % (c, random.randint(0, 1000000))
- self.commandID = "%d" % c
- log.msg("%s: RemoteCommand.run [%s]" % (self, self.commandID))
- self.deferred = defer.Deferred()
-
- d = defer.maybeDeferred(self.start)
-
- # _finished is called with an error for unknown commands, errors
- # that occur while the command is starting (including OSErrors in
- # exec()), StaleBroker (when the connection was lost before we
- # started), and pb.PBConnectionLost (when the slave isn't responding
- # over this connection, perhaps it had a power failure, or NAT
- # weirdness). If this happens, self.deferred is fired right away.
- d.addErrback(self._finished)
-
- # Connections which are lost while the command is running are caught
- # when our parent Step calls our .lostRemote() method.
- return self.deferred
-
- def start(self):
- """
- Tell the slave to start executing the remote command.
-
- @rtype: L{twisted.internet.defer.Deferred}
- @returns: a deferred that will fire when the remote command is
- done (with None as the result)
- """
- # This method only initiates the remote command.
- # We will receive remote_update messages as the command runs.
- # We will get a single remote_complete when it finishes.
- # We should fire self.deferred when the command is done.
- d = self.remote.callRemote("startCommand", self, self.commandID,
- self.remote_command, self.args)
- return d
-
- def interrupt(self, why):
- # TODO: consider separating this into interrupt() and stop(), where
- # stop() unconditionally calls _finished, but interrupt() merely
- # asks politely for the command to stop soon.
-
- log.msg("RemoteCommand.interrupt", self, why)
- if not self.active:
- log.msg(" but this RemoteCommand is already inactive")
- return
- if not self.remote:
- log.msg(" but our .remote went away")
- return
- if isinstance(why, Failure) and why.check(error.ConnectionLost):
- log.msg("RemoteCommand.disconnect: lost slave")
- self.remote = None
- self._finished(why)
- return
-
- # tell the remote command to halt. Returns a Deferred that will fire
- # when the interrupt command has been delivered.
-
- d = defer.maybeDeferred(self.remote.callRemote, "interruptCommand",
- self.commandID, str(why))
- # the slave may not have remote_interruptCommand
- d.addErrback(self._interruptFailed)
- return d
-
- def _interruptFailed(self, why):
- log.msg("RemoteCommand._interruptFailed", self)
- # TODO: forcibly stop the Command now, since we can't stop it
- # cleanly
- return None
-
- def remote_update(self, updates):
- """
- I am called by the slave's L{buildbot.slave.bot.SlaveBuilder} so
- I can receive updates from the running remote command.
-
- @type updates: list of [object, int]
- @param updates: list of updates from the remote command
- """
- max_updatenum = 0
- for (update, num) in updates:
- #log.msg("update[%d]:" % num)
- try:
- if self.active: # ignore late updates
- self.remoteUpdate(update)
- except:
- # log failure, terminate build, let slave retire the update
- self._finished(Failure())
- # TODO: what if multiple updates arrive? should
- # skip the rest but ack them all
- if num > max_updatenum:
- max_updatenum = num
- return max_updatenum
-
- def remoteUpdate(self, update):
- raise NotImplementedError("You must implement this in a subclass")
-
- def remote_complete(self, failure=None):
- """
- Called by the slave's L{buildbot.slave.bot.SlaveBuilder} to
- notify me the remote command has finished.
-
- @type failure: L{twisted.python.failure.Failure} or None
-
- @rtype: None
- """
- # call the real remoteComplete a moment later, but first return an
- # acknowledgement so the slave can retire the completion message.
- if self.active:
- reactor.callLater(0, self._finished, failure)
- return None
-
- def _finished(self, failure=None):
- self.active = False
- # call .remoteComplete. If it raises an exception, or returns the
- # Failure that we gave it, our self.deferred will be errbacked. If
- # it does not (either it ate the Failure or there the step finished
- # normally and it didn't raise a new exception), self.deferred will
- # be callbacked.
- d = defer.maybeDeferred(self.remoteComplete, failure)
- # arrange for the callback to get this RemoteCommand instance
- # instead of just None
- d.addCallback(lambda r: self)
- # this fires the original deferred we returned from .run(),
- # with self as the result, or a failure
- d.addBoth(self.deferred.callback)
-
- def remoteComplete(self, maybeFailure):
- """Subclasses can override this.
-
- This is called when the RemoteCommand has finished. 'maybeFailure'
- will be None if the command completed normally, or a Failure
- instance in one of the following situations:
-
- - the slave was lost before the command was started
- - the slave didn't respond to the startCommand message
- - the slave raised an exception while starting the command
- (bad command name, bad args, OSError from missing executable)
- - the slave raised an exception while finishing the command
- (they send back a remote_complete message with a Failure payload)
-
- and also (for now):
- - slave disconnected while the command was running
-
- This method should do cleanup, like closing log files. It should
- normally return the 'failure' argument, so that any exceptions will
- be propagated to the Step. If it wants to consume them, return None
- instead."""
-
- return maybeFailure
-
-class LoggedRemoteCommand(RemoteCommand):
- """
- I am a L{RemoteCommand} which expects the slave to send back
- stdout/stderr/rc updates. I gather these updates into a
- L{buildbot.status.builder.LogFile} named C{self.log}. You can give me a
- LogFile to use by calling useLog(), or I will create my own when the
- command is started. Unless you tell me otherwise, I will close the log
- when the command is complete.
- """
-
- log = None
- closeWhenFinished = False
- rc = None
- debug = False
-
- def __repr__(self):
- return "<RemoteCommand '%s' at %d>" % (self.remote_command, id(self))
-
- def useLog(self, loog, closeWhenFinished=False):
- self.log = loog
- self.closeWhenFinished = closeWhenFinished
-
- def start(self):
- if self.log is None:
- # orphan LogFile, cannot be subscribed to
- self.log = builder.LogFile(None)
- self.closeWhenFinished = True
- self.updates = {}
- log.msg("LoggedRemoteCommand.start", self.log)
- return RemoteCommand.start(self)
-
- def addStdout(self, data):
- self.log.addStdout(data)
- def addStderr(self, data):
- self.log.addStderr(data)
- def addHeader(self, data):
- self.log.addHeader(data)
- def remoteUpdate(self, update):
- if self.debug:
- for k,v in update.items():
- log.msg("Update[%s]: %s" % (k,v))
- if update.has_key('stdout'):
- self.addStdout(update['stdout'])
- if update.has_key('stderr'):
- self.addStderr(update['stderr'])
- if update.has_key('header'):
- self.addHeader(update['header'])
- if update.has_key('rc'):
- rc = self.rc = update['rc']
- log.msg("%s rc=%s" % (self, rc))
- self.addHeader("program finished with exit code %d\n" % rc)
- for k in update:
- if k not in ('stdout', 'stderr', 'header', 'rc'):
- if k not in self.updates:
- self.updates[k] = []
- self.updates[k].append(update[k])
-
- def remoteComplete(self, maybeFailure):
- if self.closeWhenFinished:
- if maybeFailure:
- self.addHeader("\nremoteFailed: %s" % maybeFailure)
- else:
- log.msg("closing log")
- self.log.finish()
- return maybeFailure
-
-class RemoteShellCommand(LoggedRemoteCommand):
- """This class helps you run a shell command on the build slave. It will
- accumulate all the command's output into a Log. When the command is
- finished, it will fire a Deferred. You can then check the results of the
- command and parse the output however you like."""
-
- def __init__(self, workdir, command, env=None,
- want_stdout=1, want_stderr=1,
- timeout=20*60, **kwargs):
- """
- @type workdir: string
- @param workdir: directory where the command ought to run,
- relative to the Builder's home directory. Defaults to
- '.': the same as the Builder's homedir. This should
- probably be '.' for the initial 'cvs checkout'
- command (which creates a workdir), and the Build-wide
- workdir for all subsequent commands (including
- compiles and 'cvs update').
-
- @type command: list of strings (or string)
- @param command: the shell command to run, like 'make all' or
- 'cvs update'. This should be a list or tuple
- which can be used directly as the argv array.
- For backwards compatibility, if this is a
- string, the text will be given to '/bin/sh -c
- %s'.
-
- @type env: dict of string->string
- @param env: environment variables to add or change for the
- slave. Each command gets a separate
- environment; all inherit the slave's initial
- one. TODO: make it possible to delete some or
- all of the slave's environment.
-
- @type want_stdout: bool
- @param want_stdout: defaults to True. Set to False if stdout should
- be thrown away. Do this to avoid storing or
- sending large amounts of useless data.
-
- @type want_stderr: bool
- @param want_stderr: False if stderr should be thrown away
-
- @type timeout: int
- @param timeout: tell the remote that if the command fails to
- produce any output for this number of seconds,
- the command is hung and should be killed. Use
- None to disable the timeout.
- """
- self.command = command # stash .command, set it later
- if env is not None:
- # avoid mutating the original master.cfg dictionary. Each
- # ShellCommand gets its own copy, any start() methods won't be
- # able to modify the original.
- env = env.copy()
- args = {'workdir': workdir,
- 'env': env,
- 'want_stdout': want_stdout,
- 'want_stderr': want_stderr,
- 'timeout': timeout,
- }
- LoggedRemoteCommand.__init__(self, "shell", args)
-
- def start(self):
- self.args['command'] = self.command
- if self.remote_command == "shell":
- # non-ShellCommand slavecommands are responsible for doing this
- # fixup themselves
- if self.step.slaveVersion("shell", "old") == "old":
- self.args['dir'] = self.args['workdir']
- what = "command '%s' in dir '%s'" % (self.args['command'],
- self.args['workdir'])
- log.msg(what)
- return LoggedRemoteCommand.start(self)
-
- def __repr__(self):
- return "<RemoteShellCommand '%s'>" % self.command
-
-
-class RemoteTCSHCommand(LoggedRemoteCommand):
- """This class helps you run a shell command on the build slave. It will
- accumulate all the command's output into a Log. When the command is
- finished, it will fire a Deferred. You can then check the results of the
- command and parse the output however you like."""
-
- def __init__(self, workdir, command, env=None,
- want_stdout=1, want_stderr=1,
- timeout=240*60, **kwargs):
- """
- @type workdir: string
- @param workdir: directory where the command ought to run,
- relative to the Builder's home directory. Defaults to
- '.': the same as the Builder's homedir. This should
- probably be '.' for the initial 'cvs checkout'
- command (which creates a workdir), and the Build-wide
- workdir for all subsequent commands (including
- compiles and 'cvs update').
-
- @type command: list of strings (or string)
- @param command: the shell command to run, like 'make all' or
- 'cvs update'. This should be a list or tuple
- which can be used directly as the argv array.
- For backwards compatibility, if this is a
- string, the text will be given to '/bin/sh -c
- %s'.
-
- @type env: dict of string->string
- @param env: environment variables to add or change for the
- slave. Each command gets a separate
- environment; all inherit the slave's initial
- one. TODO: make it possible to delete some or
- all of the slave's environment.
-
- @type want_stdout: bool
- @param want_stdout: defaults to True. Set to False if stdout should
- be thrown away. Do this to avoid storing or
- sending large amounts of useless data.
-
- @type want_stderr: bool
- @param want_stderr: False if stderr should be thrown away
-
- @type timeout: int
- @param timeout: tell the remote that if the command fails to
- produce any output for this number of seconds,
- the command is hung and should be killed. Use
- None to disable the timeout.
- """
- self.command = command # stash .command, set it later
- if env is not None:
- # avoid mutating the original master.cfg dictionary. Each
- # ShellCommand gets its own copy, any start() methods won't be
- # able to modify the original.
- env = env.copy()
- args = {'workdir': workdir,
- 'env': env,
- 'want_stdout': want_stdout,
- 'want_stderr': want_stderr,
- 'timeout': timeout,
- }
- LoggedRemoteCommand.__init__(self, "tcsh", args)
-
- def start(self):
- self.args['command'] = self.command
- if self.remote_command == "tcsh":
- # non-ShellCommand slavecommands are responsible for doing this
- # fixup themselves
- if self.step.slaveVersion("tcsh", "old") == "old":
- self.args['dir'] = self.args['workdir']
- what = "command '%s' in dir '%s'" % (self.args['command'],
- self.args['workdir'])
- log.msg(what)
- return LoggedRemoteCommand.start(self)
-
- def __repr__(self):
- return "<RemoteShellCommand '%s'>" % self.command
-
-
-class BuildStep:
- """
- I represent a single step of the build process. This step may involve
- zero or more commands to be run in the build slave, as well as arbitrary
- processing on the master side. Regardless of how many slave commands are
- run, the BuildStep will result in a single status value.
-
- The step is started by calling startStep(), which returns a Deferred that
- fires when the step finishes. See C{startStep} for a description of the
- results provided by that Deferred.
-
- __init__ and start are good methods to override. Don't forget to upcall
- BuildStep.__init__ or bad things will happen.
-
- To launch a RemoteCommand, pass it to .runCommand and wait on the
- Deferred it returns.
-
- Each BuildStep generates status as it runs. This status data is fed to
- the L{buildbot.status.builder.BuildStepStatus} listener that sits in
- C{self.step_status}. It can also feed progress data (like how much text
- is output by a shell command) to the
- L{buildbot.status.progress.StepProgress} object that lives in
- C{self.progress}, by calling C{progress.setProgress(metric, value)} as it
- runs.
-
- @type build: L{buildbot.process.base.Build}
- @ivar build: the parent Build which is executing this step
-
- @type progress: L{buildbot.status.progress.StepProgress}
- @ivar progress: tracks ETA for the step
-
- @type step_status: L{buildbot.status.builder.BuildStepStatus}
- @ivar step_status: collects output status
- """
-
- # these parameters are used by the parent Build object to decide how to
- # interpret our results. haltOnFailure will affect the build process
- # immediately, the others will be taken into consideration when
- # determining the overall build status.
- #
- haltOnFailure = False
- flunkOnWarnings = False
- flunkOnFailure = False
- warnOnWarnings = False
- warnOnFailure = False
-
- # 'parms' holds a list of all the parameters we care about, to allow
- # users to instantiate a subclass of BuildStep with a mixture of
- # arguments, some of which are for us, some of which are for the subclass
- # (or a delegate of the subclass, like how ShellCommand delivers many
- # arguments to the RemoteShellCommand that it creates). Such delegating
- # subclasses will use this list to figure out which arguments are meant
- # for us and which should be given to someone else.
- parms = ['build', 'name', 'locks',
- 'haltOnFailure',
- 'flunkOnWarnings',
- 'flunkOnFailure',
- 'warnOnWarnings',
- 'warnOnFailure',
- 'progressMetrics',
- ]
-
- name = "generic"
- locks = []
- progressMetrics = [] # 'time' is implicit
- useProgress = True # set to False if step is really unpredictable
- build = None
- step_status = None
- progress = None
-
- def __init__(self, build, **kwargs):
- self.build = build
- for p in self.__class__.parms:
- if kwargs.has_key(p):
- setattr(self, p, kwargs[p])
- del kwargs[p]
- # we want to encourage all steps to get a workdir, so tolerate its
- # presence here. It really only matters for non-ShellCommand steps
- # like Dummy
- if kwargs.has_key('workdir'):
- del kwargs['workdir']
- if kwargs:
- why = "%s.__init__ got unexpected keyword argument(s) %s" \
- % (self, kwargs.keys())
- raise TypeError(why)
-
- def setupProgress(self):
- if self.useProgress:
- sp = progress.StepProgress(self.name, self.progressMetrics)
- self.progress = sp
- self.step_status.setProgress(sp)
- return sp
- return None
-
- def getProperty(self, propname):
- return self.build.getProperty(propname)
-
- def setProperty(self, propname, value):
- self.build.setProperty(propname, value)
-
- def startStep(self, remote):
- """Begin the step. This returns a Deferred that will fire when the
- step finishes.
-
- This deferred fires with a tuple of (result, [extra text]), although
- older steps used to return just the 'result' value, so the receiving
- L{base.Build} needs to be prepared to handle that too. C{result} is
- one of the SUCCESS/WARNINGS/FAILURE/SKIPPED constants from
- L{buildbot.status.builder}, and the extra text is a list of short
- strings which should be appended to the Build's text results. This
- text allows a test-case step which fails to append B{17 tests} to the
- Build's status, in addition to marking the build as failing.
-
- The deferred will errback if the step encounters an exception,
- including an exception on the slave side (or if the slave goes away
- altogether). Failures in shell commands (rc!=0) will B{not} cause an
- errback, in general the BuildStep will evaluate the results and
- decide whether to treat it as a WARNING or FAILURE.
-
- @type remote: L{twisted.spread.pb.RemoteReference}
- @param remote: a reference to the slave's
- L{buildbot.slave.bot.SlaveBuilder} instance where any
- RemoteCommands may be run
- """
-
- self.remote = remote
- self.deferred = defer.Deferred()
- # convert all locks into their real form
- self.locks = [self.build.builder.botmaster.getLockByID(l)
- for l in self.locks]
- # then narrow SlaveLocks down to the slave that this build is being
- # run on
- self.locks = [l.getLock(self.build.slavebuilder) for l in self.locks]
- for l in self.locks:
- if l in self.build.locks:
- log.msg("Hey, lock %s is claimed by both a Step (%s) and the"
- " parent Build (%s)" % (l, self, self.build))
- raise RuntimeError("lock claimed by both Step and Build")
- d = self.acquireLocks()
- d.addCallback(self._startStep_2)
- return self.deferred
-
- def acquireLocks(self, res=None):
- log.msg("acquireLocks(step %s, locks %s)" % (self, self.locks))
- if not self.locks:
- return defer.succeed(None)
- for lock in self.locks:
- if not lock.isAvailable():
- log.msg("step %s waiting for lock %s" % (self, lock))
- d = lock.waitUntilAvailable(self)
- d.addCallback(self.acquireLocks)
- return d
- # all locks are available, claim them all
- for lock in self.locks:
- lock.claim(self)
- return defer.succeed(None)
-
- def _startStep_2(self, res):
- if self.progress:
- self.progress.start()
- self.step_status.stepStarted()
- try:
- skip = self.start()
- if skip == SKIPPED:
- reactor.callLater(0, self.releaseLocks)
- reactor.callLater(0, self.deferred.callback, SKIPPED)
- except:
- log.msg("BuildStep.startStep exception in .start")
- self.failed(Failure())
-
- def start(self):
- """Begin the step. Override this method and add code to do local
- processing, fire off remote commands, etc.
-
- To spawn a command in the buildslave, create a RemoteCommand instance
- and run it with self.runCommand::
-
- c = RemoteCommandFoo(args)
- d = self.runCommand(c)
- d.addCallback(self.fooDone).addErrback(self.failed)
-
- As the step runs, it should send status information to the
- BuildStepStatus::
-
- self.step_status.setColor('red')
- self.step_status.setText(['compile', 'failed'])
- self.step_status.setText2(['4', 'warnings'])
-
- To add a LogFile, use self.addLog. Make sure it gets closed when it
- finishes. When giving a Logfile to a RemoteShellCommand, just ask it
- to close the log when the command completes::
-
- log = self.addLog('output')
- cmd = RemoteShellCommand(args)
- cmd.useLog(log, closeWhenFinished=True)
-
- You can also create complete Logfiles with generated text in a single
- step::
-
- self.addCompleteLog('warnings', text)
-
- When the step is done, it should call self.finished(result). 'result'
- will be provided to the L{buildbot.process.base.Build}, and should be
- one of the constants defined above: SUCCESS, WARNINGS, FAILURE, or
- SKIPPED.
-
- If the step encounters an exception, it should call self.failed(why).
- 'why' should be a Failure object. This automatically fails the whole
- build with an exception. It is a good idea to add self.failed as an
- errback to any Deferreds you might obtain.
-
- If the step decides it does not need to be run, start() can return
- the constant SKIPPED. This fires the callback immediately: it is not
- necessary to call .finished yourself. This can also indicate to the
- status-reporting mechanism that this step should not be displayed."""
-
- raise NotImplementedError("your subclass must implement this method")
-
- def interrupt(self, reason):
- """Halt the command, either because the user has decided to cancel
- the build ('reason' is a string), or because the slave has
- disconnected ('reason' is a ConnectionLost Failure). Any further
- local processing should be skipped, and the Step completed with an
- error status. The results text should say something useful like
- ['step', 'interrupted'] or ['remote', 'lost']"""
- pass
-
- def releaseLocks(self):
- log.msg("releaseLocks(%s): %s" % (self, self.locks))
- for lock in self.locks:
- lock.release(self)
-
- def finished(self, results):
- if self.progress:
- self.progress.finish()
- self.step_status.stepFinished(results)
- self.releaseLocks()
- self.deferred.callback(results)
-
- def failed(self, why):
- # if isinstance(why, pb.CopiedFailure): # a remote exception might
- # only have short traceback, so formatFailure is not as useful as
- # you'd like (no .frames, so no traceback is displayed)
- log.msg("BuildStep.failed, traceback follows")
- log.err(why)
- try:
- if self.progress:
- self.progress.finish()
- self.addHTMLLog("err.html", formatFailure(why))
- self.addCompleteLog("err.text", why.getTraceback())
- # could use why.getDetailedTraceback() for more information
- self.step_status.setColor("purple")
- self.step_status.setText([self.name, "exception"])
- self.step_status.setText2([self.name])
- self.step_status.stepFinished(EXCEPTION)
- except:
- log.msg("exception during failure processing")
- log.err()
- # the progress stuff may still be whacked (the StepStatus may
- # think that it is still running), but the build overall will now
- # finish
- try:
- self.releaseLocks()
- except:
- log.msg("exception while releasing locks")
- log.err()
-
- log.msg("BuildStep.failed now firing callback")
- self.deferred.callback(EXCEPTION)
-
- # utility methods that BuildSteps may find useful
-
- def slaveVersion(self, command, oldversion=None):
- """Return the version number of the given slave command. For the
- commands defined in buildbot.slave.commands, this is the value of
- 'cvs_ver' at the top of that file. Non-existent commands will return
- a value of None. Buildslaves running buildbot-0.5.0 or earlier did
- not respond to the version query: commands on those slaves will
- return a value of OLDVERSION, so you can distinguish between old
- buildslaves and missing commands.
-
- If you know that <=0.5.0 buildslaves have the command you want (CVS
- and SVN existed back then, but none of the other VC systems), then it
- makes sense to call this with oldversion='old'. If the command you
- want is newer than that, just leave oldversion= unspecified, and the
- command will return None for a buildslave that does not implement the
- command.
- """
- return self.build.getSlaveCommandVersion(command, oldversion)
-
- def slaveVersionIsOlderThan(self, command, minversion):
- sv = self.build.getSlaveCommandVersion(command, None)
- if sv is None:
- return True
- # the version we get back is a string form of the CVS version number
- # of the slave's buildbot/slave/commands.py, something like 1.39 .
- # This might change in the future (I might move away from CVS), but
- # if so I'll keep updating that string with suitably-comparable
- # values.
- if sv.split(".") < minversion.split("."):
- return True
- return False
-
- def addLog(self, name):
- loog = self.step_status.addLog(name)
- return loog
-
- def addCompleteLog(self, name, text):
- log.msg("addCompleteLog(%s)" % name)
- loog = self.step_status.addLog(name)
- size = loog.chunkSize
- for start in range(0, len(text), size):
- loog.addStdout(text[start:start+size])
- loog.finish()
-
- def addHTMLLog(self, name, html):
- log.msg("addHTMLLog(%s)" % name)
- self.step_status.addHTMLLog(name, html)
-
- def runCommand(self, c):
- d = c.run(self, self.remote)
- return d
-
-
-
-class LoggingBuildStep(BuildStep):
- # This is an abstract base class, suitable for inheritance by all
- # BuildSteps that invoke RemoteCommands which emit stdout/stderr messages
-
- progressMetrics = ['output']
-
- def describe(self, done=False):
- raise NotImplementedError("implement this in a subclass")
-
- def startCommand(self, cmd, errorMessages=[]):
- """
- @param cmd: a suitable RemoteCommand which will be launched, with
- all output being put into a LogFile named 'log'
- """
- self.cmd = cmd # so we can interrupt it
- self.step_status.setColor("yellow")
- self.step_status.setText(self.describe(False))
- loog = self.addLog("log")
- for em in errorMessages:
- loog.addHeader(em)
- log.msg("ShellCommand.start using log", loog)
- log.msg(" for cmd", cmd)
- cmd.useLog(loog, True)
- loog.logProgressTo(self.progress, "output")
- d = self.runCommand(cmd)
- d.addCallbacks(self._commandComplete, self.checkDisconnect)
- d.addErrback(self.failed)
-
- def interrupt(self, reason):
- # TODO: consider adding an INTERRUPTED or STOPPED status to use
- # instead of FAILURE, might make the text a bit more clear.
- # 'reason' can be a Failure, or text
- self.addCompleteLog('interrupt', str(reason))
- d = self.cmd.interrupt(reason)
- return d
-
- def checkDisconnect(self, f):
- f.trap(error.ConnectionLost)
- self.step_status.setColor("red")
- self.step_status.setText(self.describe(True) +
- ["failed", "slave", "lost"])
- self.step_status.setText2(["failed", "slave", "lost"])
- return self.finished(FAILURE)
-
- def _commandComplete(self, cmd):
- self.commandComplete(cmd)
- self.createSummary(cmd.log)
- results = self.evaluateCommand(cmd)
- self.setStatus(cmd, results)
- return self.finished(results)
-
- # to refine the status output, override one or more of the following
- # methods. Change as little as possible: start with the first ones on
- # this list and only proceed further if you have to
- #
- # createSummary: add additional Logfiles with summarized results
- # evaluateCommand: decides whether the step was successful or not
- #
- # getText: create the final per-step text strings
- # describeText2: create the strings added to the overall build status
- #
- # getText2: only adds describeText2() when the step affects build status
- #
- # setStatus: handles all status updating
-
- # commandComplete is available for general-purpose post-completion work.
- # It is a good place to do one-time parsing of logfiles, counting
- # warnings and errors. It should probably stash such counts in places
- # like self.warnings so they can be picked up later by your getText
- # method.
-
- # TODO: most of this stuff should really be on BuildStep rather than
- # ShellCommand. That involves putting the status-setup stuff in
- # .finished, which would make it hard to turn off.
-
- def commandComplete(self, cmd):
- """This is a general-purpose hook method for subclasses. It will be
- called after the remote command has finished, but before any of the
- other hook functions are called."""
- pass
-
-
- def createSummary(self, log):
- """To create summary logs, do something like this:
- warnings = grep('^Warning:', log.getText())
- self.addCompleteLog('warnings', warnings)
- """
- file = open('process_log','w')
- file.write(log.getText())
- file.close()
- command = "grep warning: process_log"
- warnings = os.popen(command).read()
- errors = os.popen("grep error: process_log").read()
- tail = os.popen("tail -50 process_log").read()
- if warnings != "" :
- self.addCompleteLog('warnings',warnings)
- if errors != "":
- self.addCompleteLog('errors',errors)
- self.addCompleteLog('tail',tail)
-
-
-
- def evaluateCommand(self, cmd):
- """Decide whether the command was SUCCESS, WARNINGS, or FAILURE.
- Override this to, say, declare WARNINGS if there is any stderr
- activity, or to say that rc!=0 is not actually an error."""
-
- if cmd.rc != 0:
- return FAILURE
- # if cmd.log.getStderr(): return WARNINGS
- return SUCCESS
-
- def getText(self, cmd, results):
- if results == SUCCESS:
- return self.describe(True)
- elif results == WARNINGS:
- return self.describe(True) + ["warnings"]
- else:
- return self.describe(True) + ["failed"]
-
- def getText2(self, cmd, results):
- """We have decided to add a short note about ourselves to the overall
- build description, probably because something went wrong. Return a
- short list of short strings. If your subclass counts test failures or
- warnings of some sort, this is a good place to announce the count."""
- # return ["%d warnings" % warningcount]
- # return ["%d tests" % len(failedTests)]
- return [self.name]
-
- def maybeGetText2(self, cmd, results):
- if results == SUCCESS:
- # successful steps do not add anything to the build's text
- pass
- elif results == WARNINGS:
- if (self.flunkOnWarnings or self.warnOnWarnings):
- # we're affecting the overall build, so tell them why
- return self.getText2(cmd, results)
- else:
- if (self.haltOnFailure or self.flunkOnFailure
- or self.warnOnFailure):
- # we're affecting the overall build, so tell them why
- return self.getText2(cmd, results)
- return []
-
- def getColor(self, cmd, results):
- assert results in (SUCCESS, WARNINGS, FAILURE)
- if results == SUCCESS:
- return "green"
- elif results == WARNINGS:
- return "orange"
- else:
- return "red"
-
- def setStatus(self, cmd, results):
- # this is good enough for most steps, but it can be overridden to
- # get more control over the displayed text
- self.step_status.setColor(self.getColor(cmd, results))
- self.step_status.setText(self.getText(cmd, results))
- self.step_status.setText2(self.maybeGetText2(cmd, results))
-
-
-# -*- test-case-name: buildbot.test.test_properties -*-
-
-class _BuildPropertyDictionary:
- def __init__(self, build):
- self.build = build
- def __getitem__(self, name):
- p = self.build.getProperty(name)
- if p is None:
- p = ""
- return p
-
-class WithProperties:
- """This is a marker class, used in ShellCommand's command= argument to
- indicate that we want to interpolate a build property.
- """
-
- def __init__(self, fmtstring, *args):
- self.fmtstring = fmtstring
- self.args = args
-
- def render(self, build):
- if self.args:
- strings = []
- for name in self.args:
- p = build.getProperty(name)
- if p is None:
- p = ""
- strings.append(p)
- s = self.fmtstring % tuple(strings)
- else:
- s = self.fmtstring % _BuildPropertyDictionary(build)
- return s
-
-
-class TCSHShellCommand(LoggingBuildStep):
- """I run a single shell command on the buildslave. I return FAILURE if
- the exit code of that command is non-zero, SUCCESS otherwise. To change
- this behavior, override my .evaluateCommand method.
-
- I create a single Log named 'log' which contains the output of the
- command. To create additional summary Logs, override my .createSummary
- method.
-
- The shell command I run (a list of argv strings) can be provided in
- several ways:
- - a class-level .command attribute
- - a command= parameter to my constructor (overrides .command)
- - set explicitly with my .setCommand() method (overrides both)
-
- @ivar command: a list of argv strings (or WithProperties instances).
- This will be used by start() to create a
- RemoteShellCommand instance.
-
- """
-
- name = "shell"
- description = None # set this to a list of short strings to override
- descriptionDone = None # alternate description when the step is complete
- command = None # set this to a command, or set in kwargs
-
- def __init__(self, workdir,
- description=None, descriptionDone=None,
- command=None,
- **kwargs):
- # most of our arguments get passed through to the RemoteShellCommand
- # that we create, but first strip out the ones that we pass to
- # BuildStep (like haltOnFailure and friends), and a couple that we
- # consume ourselves.
- self.workdir = workdir # required by RemoteShellCommand
- if description:
- self.description = description
- if descriptionDone:
- self.descriptionDone = descriptionDone
- if command:
- self.command = command
-
- # pull out the ones that BuildStep wants, then upcall
- buildstep_kwargs = {}
- for k in kwargs.keys()[:]:
- if k in self.__class__.parms:
- buildstep_kwargs[k] = kwargs[k]
- del kwargs[k]
- LoggingBuildStep.__init__(self, **buildstep_kwargs)
-
- # everything left over goes to the RemoteShellCommand
- kwargs['workdir'] = workdir # including a copy of 'workdir'
- self.remote_kwargs = kwargs
-
-
- def setCommand(self, command):
- self.command = command
-
- def describe(self, done=False):
- """Return a list of short strings to describe this step, for the
- status display. This uses the first few words of the shell command.
- You can replace this by setting .description in your subclass, or by
- overriding this method to describe the step better.
-
- @type done: boolean
- @param done: whether the command is complete or not, to improve the
- way the command is described. C{done=False} is used
- while the command is still running, so a single
- imperfect-tense verb is appropriate ('compiling',
- 'testing', ...) C{done=True} is used when the command
- has finished, and the default getText() method adds some
- text, so a simple noun is appropriate ('compile',
- 'tests' ...)
- """
-
- if done and self.descriptionDone is not None:
- return self.descriptionDone
- if self.description is not None:
- return self.description
-
- words = self.command
- # TODO: handle WithProperties here
- if isinstance(words, types.StringTypes):
- words = words.split()
- if len(words) < 1:
- return ["???"]
- if len(words) == 1:
- return ["'%s'" % words[0]]
- if len(words) == 2:
- return ["'%s" % words[0], "%s'" % words[1]]
- return ["'%s" % words[0], "%s" % words[1], "...'"]
-
- def _interpolateProperties(self, command):
- # interpolate any build properties into our command
- if not isinstance(command, (list, tuple)):
- return command
- command_argv = []
- for argv in command:
- if isinstance(argv, WithProperties):
- command_argv.append(argv.render(self.build))
- else:
- command_argv.append(argv)
- return command_argv
-
- def setupEnvironment(self, cmd):
- # merge in anything from Build.slaveEnvironment . Earlier steps
- # (perhaps ones which compile libraries or sub-projects that need to
- # be referenced by later steps) can add keys to
- # self.build.slaveEnvironment to affect later steps.
- slaveEnv = self.build.slaveEnvironment
- if slaveEnv:
- if cmd.args['env'] is None:
- cmd.args['env'] = {}
- cmd.args['env'].update(slaveEnv)
- # note that each RemoteShellCommand gets its own copy of the
- # dictionary, so we shouldn't be affecting anyone but ourselves.
-
- def start(self):
- command = self._interpolateProperties(self.command)
- # create the actual RemoteShellCommand instance now
- kwargs = self.remote_kwargs
- kwargs['command'] = command
- cmd = RemoteTCSHCommand(**kwargs)
- self.setupEnvironment(cmd)
- self.startCommand(cmd)
-
-
-
-class ShellCommand(LoggingBuildStep):
- """I run a single shell command on the buildslave. I return FAILURE if
- the exit code of that command is non-zero, SUCCESS otherwise. To change
- this behavior, override my .evaluateCommand method.
-
- I create a single Log named 'log' which contains the output of the
- command. To create additional summary Logs, override my .createSummary
- method.
-
- The shell command I run (a list of argv strings) can be provided in
- several ways:
- - a class-level .command attribute
- - a command= parameter to my constructor (overrides .command)
- - set explicitly with my .setCommand() method (overrides both)
-
- @ivar command: a list of argv strings (or WithProperties instances).
- This will be used by start() to create a
- RemoteShellCommand instance.
-
- """
-
- name = "shell"
- description = None # set this to a list of short strings to override
- descriptionDone = None # alternate description when the step is complete
- command = None # set this to a command, or set in kwargs
-
- def __init__(self, workdir,
- description=None, descriptionDone=None,
- command=None,
- **kwargs):
- # most of our arguments get passed through to the RemoteShellCommand
- # that we create, but first strip out the ones that we pass to
- # BuildStep (like haltOnFailure and friends), and a couple that we
- # consume ourselves.
- self.workdir = workdir # required by RemoteShellCommand
- if description:
- self.description = description
- if descriptionDone:
- self.descriptionDone = descriptionDone
- if command:
- self.command = command
-
- # pull out the ones that BuildStep wants, then upcall
- buildstep_kwargs = {}
- for k in kwargs.keys()[:]:
- if k in self.__class__.parms:
- buildstep_kwargs[k] = kwargs[k]
- del kwargs[k]
- LoggingBuildStep.__init__(self, **buildstep_kwargs)
-
- # everything left over goes to the RemoteShellCommand
- kwargs['workdir'] = workdir # including a copy of 'workdir'
- self.remote_kwargs = kwargs
-
-
- def setCommand(self, command):
- self.command = command
-
- def describe(self, done=False):
- """Return a list of short strings to describe this step, for the
- status display. This uses the first few words of the shell command.
- You can replace this by setting .description in your subclass, or by
- overriding this method to describe the step better.
-
- @type done: boolean
- @param done: whether the command is complete or not, to improve the
- way the command is described. C{done=False} is used
- while the command is still running, so a single
- imperfect-tense verb is appropriate ('compiling',
- 'testing', ...) C{done=True} is used when the command
- has finished, and the default getText() method adds some
- text, so a simple noun is appropriate ('compile',
- 'tests' ...)
- """
-
- if done and self.descriptionDone is not None:
- return self.descriptionDone
- if self.description is not None:
- return self.description
-
- words = self.command
- # TODO: handle WithProperties here
- if isinstance(words, types.StringTypes):
- words = words.split()
- if len(words) < 1:
- return ["???"]
- if len(words) == 1:
- return ["'%s'" % words[0]]
- if len(words) == 2:
- return ["'%s" % words[0], "%s'" % words[1]]
- return ["'%s" % words[0], "%s" % words[1], "...'"]
-
- def _interpolateProperties(self, command):
- # interpolate any build properties into our command
- if not isinstance(command, (list, tuple)):
- return command
- command_argv = []
- for argv in command:
- if isinstance(argv, WithProperties):
- command_argv.append(argv.render(self.build))
- else:
- command_argv.append(argv)
- return command_argv
-
- def setupEnvironment(self, cmd):
- # merge in anything from Build.slaveEnvironment . Earlier steps
- # (perhaps ones which compile libraries or sub-projects that need to
- # be referenced by later steps) can add keys to
- # self.build.slaveEnvironment to affect later steps.
- slaveEnv = self.build.slaveEnvironment
- if slaveEnv:
- if cmd.args['env'] is None:
- cmd.args['env'] = {}
- cmd.args['env'].update(slaveEnv)
- # note that each RemoteShellCommand gets its own copy of the
- # dictionary, so we shouldn't be affecting anyone but ourselves.
-
- def start(self):
- command = self._interpolateProperties(self.command)
- # create the actual RemoteShellCommand instance now
- kwargs = self.remote_kwargs
- kwargs['command'] = command
- cmd = RemoteShellCommand(**kwargs)
- self.setupEnvironment(cmd)
- self.startCommand(cmd)
-
-
-
-
-class TreeSize(ShellCommand):
- name = "treesize"
- command = ["du", "-s", "."]
- kb = None
-
- def commandComplete(self, cmd):
- out = cmd.log.getText()
- m = re.search(r'^(\d+)', out)
- if m:
- self.kb = int(m.group(1))
-
- def evaluateCommand(self, cmd):
- if cmd.rc != 0:
- return FAILURE
- if self.kb is None:
- return WARNINGS # not sure how 'du' could fail, but whatever
- return SUCCESS
-
- def getText(self, cmd, results):
- if self.kb is not None:
- return ["treesize", "%d kb" % self.kb]
- return ["treesize", "unknown"]
-
-
-class Source(LoggingBuildStep):
- """This is a base class to generate a source tree in the buildslave.
- Each version control system has a specialized subclass, and is expected
- to override __init__ and implement computeSourceRevision() and
- startVC(). The class as a whole builds up the self.args dictionary, then
- starts a LoggedRemoteCommand with those arguments.
- """
-
- # if the checkout fails, there's no point in doing anything else
- haltOnFailure = True
- notReally = False
-
- branch = None # the default branch, should be set in __init__
-
- def __init__(self, workdir, mode='update', alwaysUseLatest=False,
- timeout=20*60, retry=None, **kwargs):
- """
- @type workdir: string
- @param workdir: local directory (relative to the Builder's root)
- where the tree should be placed
-
- @type mode: string
- @param mode: the kind of VC operation that is desired:
- - 'update': specifies that the checkout/update should be
- performed directly into the workdir. Each build is performed
- in the same directory, allowing for incremental builds. This
- minimizes disk space, bandwidth, and CPU time. However, it
- may encounter problems if the build process does not handle
- dependencies properly (if you must sometimes do a 'clean
- build' to make sure everything gets compiled), or if source
- files are deleted but generated files can influence test
- behavior (e.g. python's .pyc files), or when source
- directories are deleted but generated files prevent CVS from
- removing them.
-
- - 'copy': specifies that the source-controlled workspace
- should be maintained in a separate directory (called the
- 'copydir'), using checkout or update as necessary. For each
- build, a new workdir is created with a copy of the source
- tree (rm -rf workdir; cp -r copydir workdir). This doubles
- the disk space required, but keeps the bandwidth low
- (update instead of a full checkout). A full 'clean' build
- is performed each time. This avoids any generated-file
- build problems, but is still occasionally vulnerable to
- problems such as a CVS repository being manually rearranged
- (causing CVS errors on update) which are not an issue with
- a full checkout.
-
- - 'clobber': specifies that the working directory should be
- deleted each time, necessitating a full checkout for each
- build. This insures a clean build off a complete checkout,
- avoiding any of the problems described above, but is
- bandwidth intensive, as the whole source tree must be
- pulled down for each build.
-
- - 'export': is like 'clobber', except that e.g. the 'cvs
- export' command is used to create the working directory.
- This command removes all VC metadata files (the
- CVS/.svn/{arch} directories) from the tree, which is
- sometimes useful for creating source tarballs (to avoid
- including the metadata in the tar file). Not all VC systems
- support export.
-
- @type alwaysUseLatest: boolean
- @param alwaysUseLatest: whether to always update to the most
- recent available sources for this build.
-
- Normally the Source step asks its Build for a list of all
- Changes that are supposed to go into the build, then computes a
- 'source stamp' (revision number or timestamp) that will cause
- exactly that set of changes to be present in the checked out
- tree. This is turned into, e.g., 'cvs update -D timestamp', or
- 'svn update -r revnum'. If alwaysUseLatest=True, bypass this
- computation and always update to the latest available sources
- for each build.
-
- The source stamp helps avoid a race condition in which someone
- commits a change after the master has decided to start a build
- but before the slave finishes checking out the sources. At best
- this results in a build which contains more changes than the
- buildmaster thinks it has (possibly resulting in the wrong
- person taking the blame for any problems that result), at worst
- is can result in an incoherent set of sources (splitting a
- non-atomic commit) which may not build at all.
-
- @type retry: tuple of ints (delay, repeats) (or None)
- @param retry: if provided, VC update failures are re-attempted up
- to REPEATS times, with DELAY seconds between each
- attempt. Some users have slaves with poor connectivity
- to their VC repository, and they say that up to 80% of
- their build failures are due to transient network
- failures that could be handled by simply retrying a
- couple times.
-
- """
-
- LoggingBuildStep.__init__(self, **kwargs)
-
- assert mode in ("update", "copy", "clobber", "export")
- if retry:
- delay, repeats = retry
- assert isinstance(repeats, int)
- assert repeats > 0
- self.args = {'mode': mode,
- 'workdir': workdir,
- 'timeout': timeout,
- 'retry': retry,
- 'patch': None, # set during .start
- }
- self.alwaysUseLatest = alwaysUseLatest
-
- # Compute defaults for descriptions:
- description = ["updating"]
- descriptionDone = ["update"]
- if mode == "clobber":
- description = ["checkout"]
- # because checkingouting takes too much space
- descriptionDone = ["checkout"]
- elif mode == "export":
- description = ["exporting"]
- descriptionDone = ["export"]
- self.description = description
- self.descriptionDone = descriptionDone
-
- def describe(self, done=False):
- if done:
- return self.descriptionDone
- return self.description
-
- def computeSourceRevision(self, changes):
- """Each subclass must implement this method to do something more
- precise than -rHEAD every time. For version control systems that use
- repository-wide change numbers (SVN, P4), this can simply take the
- maximum such number from all the changes involved in this build. For
- systems that do not (CVS), it needs to create a timestamp based upon
- the latest Change, the Build's treeStableTimer, and an optional
- self.checkoutDelay value."""
- return None
-
- def start(self):
- if self.notReally:
- log.msg("faking %s checkout/update" % self.name)
- self.step_status.setColor("green")
- self.step_status.setText(["fake", self.name, "successful"])
- self.addCompleteLog("log",
- "Faked %s checkout/update 'successful'\n" \
- % self.name)
- return SKIPPED
-
- # what source stamp would this build like to use?
- s = self.build.getSourceStamp()
- # if branch is None, then use the Step's "default" branch
- branch = s.branch or self.branch
- # if revision is None, use the latest sources (-rHEAD)
- revision = s.revision
- if not revision and not self.alwaysUseLatest:
- revision = self.computeSourceRevision(s.changes)
- # if patch is None, then do not patch the tree after checkout
-
- # 'patch' is None or a tuple of (patchlevel, diff)
- patch = s.patch
-
- self.startVC(branch, revision, patch)
-
- def commandComplete(self, cmd):
- got_revision = None
- if cmd.updates.has_key("got_revision"):
- got_revision = cmd.updates["got_revision"][-1]
- self.setProperty("got_revision", got_revision)
-
-
-
-class CVS(Source):
- """I do CVS checkout/update operations.
-
- Note: if you are doing anonymous/pserver CVS operations, you will need
- to manually do a 'cvs login' on each buildslave before the slave has any
- hope of success. XXX: fix then, take a cvs password as an argument and
- figure out how to do a 'cvs login' on each build
- """
-
- name = "cvs"
-
- #progressMetrics = ['output']
- #
- # additional things to track: update gives one stderr line per directory
- # (starting with 'cvs server: Updating ') (and is fairly stable if files
- # is empty), export gives one line per directory (starting with 'cvs
- # export: Updating ') and another line per file (starting with U). Would
- # be nice to track these, requires grepping LogFile data for lines,
- # parsing each line. Might be handy to have a hook in LogFile that gets
- # called with each complete line.
-
- def __init__(self, cvsroot, cvsmodule, slavedir, filename="buildbotget.pl",
- global_options=[], branch=None, checkoutDelay=None,
- login=None,
- clobber=0, export=0, copydir=None,
- **kwargs):
-
- """
- @type cvsroot: string
- @param cvsroot: CVS Repository from which the source tree should
- be obtained. '/home/warner/Repository' for local
- or NFS-reachable repositories,
- ':pserver:anon@foo.com:/cvs' for anonymous CVS,
- 'user@host.com:/cvs' for non-anonymous CVS or
- CVS over ssh. Lots of possibilities, check the
- CVS documentation for more.
-
- @type cvsmodule: string
- @param cvsmodule: subdirectory of CVS repository that should be
- retrieved
-
- @type login: string or None
- @param login: if not None, a string which will be provided as a
- password to the 'cvs login' command, used when a
- :pserver: method is used to access the repository.
- This login is only needed once, but must be run
- each time (just before the CVS operation) because
- there is no way for the buildslave to tell whether
- it was previously performed or not.
-
- @type branch: string
- @param branch: the default branch name, will be used in a '-r'
- argument to specify which branch of the source tree
- should be used for this checkout. Defaults to None,
- which means to use 'HEAD'.
-
- @type checkoutDelay: int or None
- @param checkoutDelay: if not None, the number of seconds to put
- between the last known Change and the
- timestamp given to the -D argument. This
- defaults to exactly half of the parent
- Build's .treeStableTimer, but it could be
- set to something else if your CVS change
- notification has particularly weird
- latency characteristics.
-
- @type global_options: list of strings
- @param global_options: these arguments are inserted in the cvs
- command line, before the
- 'checkout'/'update' command word. See
- 'cvs --help-options' for a list of what
- may be accepted here. ['-r'] will make
- the checked out files read only. ['-r',
- '-R'] will also assume the repository is
- read-only (I assume this means it won't
- use locks to insure atomic access to the
- ,v files)."""
-
- self.checkoutDelay = checkoutDelay
- self.branch = branch
- self.workdir = kwargs['workdir']
- self.slavedir = slavedir
- self.filename = filename
-
- if not kwargs.has_key('mode') and (clobber or export or copydir):
- # deal with old configs
- warnings.warn("Please use mode=, not clobber/export/copydir",
- DeprecationWarning)
- if export:
- kwargs['mode'] = "export"
- elif clobber:
- kwargs['mode'] = "clobber"
- elif copydir:
- kwargs['mode'] = "copy"
- else:
- kwargs['mode'] = "update"
-
- Source.__init__(self, **kwargs)
-
- self.args.update({'cvsroot': cvsroot,
- 'cvsmodule': cvsmodule,
- 'filename':filename,
- 'slavedir':slavedir,
- 'global_options': global_options,
- 'login': login,
- })
-
- def computeSourceRevision(self, changes):
- if not changes:
- return None
- lastChange = max([c.when for c in changes])
- if self.checkoutDelay is not None:
- when = lastChange + self.checkoutDelay
- else:
- lastSubmit = max([r.submittedAt for r in self.build.requests])
- when = (lastChange + lastSubmit) / 2
- return formatdate(when)
-
- def startVC(self, branch, revision, patch):
- #if self.slaveVersionIsOlderThan("cvs", "1.39"):
- # the slave doesn't know to avoid re-using the same sourcedir
- # when the branch changes. We have no way of knowing which branch
- # the last build used, so if we're using a non-default branch and
- # either 'update' or 'copy' modes, it is safer to refuse to
- # build, and tell the user they need to upgrade the buildslave.
- # if (branch != self.branch
- # and self.args['mode'] in ("update", "copy")):
- # m = ("This buildslave (%s) does not know about multiple "
- # "branches, and using mode=%s would probably build the "
- # "wrong tree. "
- # "Refusing to build. Please upgrade the buildslave to "
- # "buildbot-0.7.0 or newer." % (self.build.slavename,
- # self.args['mode']))
- # log.msg(m)
- # raise BuildSlaveTooOldError(m)
-
- if branch is None:
- branch = "HEAD"
- self.args['branch'] = branch
- self.args['revision'] = revision
- self.args['patch'] = patch
-
- if self.args['branch'] == "HEAD" and self.args['revision']:
- # special case. 'cvs update -r HEAD -D today' gives no files
- # TODO: figure out why, see if it applies to -r BRANCH
- self.args['branch'] = None
-
- # deal with old slaves
- warnings = []
- slavever = self.slaveVersion("cvs", "old")
-
- if slavever == "old":
- # 0.5.0
- if self.args['mode'] == "export":
- self.args['export'] = 1
- elif self.args['mode'] == "clobber":
- self.args['clobber'] = 1
- elif self.args['mode'] == "copy":
- self.args['copydir'] = "source"
- self.args['tag'] = self.args['branch']
- assert not self.args['patch'] # 0.5.0 slave can't do patch
-
- #cmd = LoggedRemoteCommand("cvs", self.args)
- self.args['command'] = "./" + self.args['filename'] + " " + self.args['branch'] + " " + self.args['workdir'] + " " + self.args['slavedir'] + " "+"up"
- cmd = LoggedRemoteCommand("shell", self.args)
- self.startCommand(cmd, warnings)
-
-
-class SVN(Source):
- """I perform Subversion checkout/update operations."""
-
- name = 'svn'
-
- def __init__(self, svnurl=None, baseURL=None, defaultBranch=None,
- directory=None, **kwargs):
- """
- @type svnurl: string
- @param svnurl: the URL which points to the Subversion server,
- combining the access method (HTTP, ssh, local file),
- the repository host/port, the repository path, the
- sub-tree within the repository, and the branch to
- check out. Using C{svnurl} does not enable builds of
- alternate branches: use C{baseURL} to enable this.
- Use exactly one of C{svnurl} and C{baseURL}.
-
- @param baseURL: if branches are enabled, this is the base URL to
- which a branch name will be appended. It should
- probably end in a slash. Use exactly one of
- C{svnurl} and C{baseURL}.
-
- @param defaultBranch: if branches are enabled, this is the branch
- to use if the Build does not specify one
- explicitly. It will simply be appended
- to C{baseURL} and the result handed to
- the SVN command.
- """
-
- if not kwargs.has_key('workdir') and directory is not None:
- # deal with old configs
- warnings.warn("Please use workdir=, not directory=",
- DeprecationWarning)
- kwargs['workdir'] = directory
-
- self.svnurl = svnurl
- self.baseURL = baseURL
- self.branch = defaultBranch
-
- Source.__init__(self, **kwargs)
-
- if not svnurl and not baseURL:
- raise ValueError("you must use exactly one of svnurl and baseURL")
-
-
- def computeSourceRevision(self, changes):
- if not changes:
- return None
- lastChange = max([int(c.revision) for c in changes])
- return lastChange
-
- def startVC(self, branch, revision, patch):
-
- # handle old slaves
- warnings = []
- slavever = self.slaveVersion("svn", "old")
- if not slavever:
- m = "slave does not have the 'svn' command"
- raise BuildSlaveTooOldError(m)
-
- if self.slaveVersionIsOlderThan("svn", "1.39"):
- # the slave doesn't know to avoid re-using the same sourcedir
- # when the branch changes. We have no way of knowing which branch
- # the last build used, so if we're using a non-default branch and
- # either 'update' or 'copy' modes, it is safer to refuse to
- # build, and tell the user they need to upgrade the buildslave.
- if (branch != self.branch
- and self.args['mode'] in ("update", "copy")):
- m = ("This buildslave (%s) does not know about multiple "
- "branches, and using mode=%s would probably build the "
- "wrong tree. "
- "Refusing to build. Please upgrade the buildslave to "
- "buildbot-0.7.0 or newer." % (self.build.slavename,
- self.args['mode']))
- raise BuildSlaveTooOldError(m)
-
- if slavever == "old":
- # 0.5.0 compatibility
- if self.args['mode'] in ("clobber", "copy"):
- # TODO: use some shell commands to make up for the
- # deficiency, by blowing away the old directory first (thus
- # forcing a full checkout)
- warnings.append("WARNING: this slave can only do SVN updates"
- ", not mode=%s\n" % self.args['mode'])
- log.msg("WARNING: this slave only does mode=update")
- if self.args['mode'] == "export":
- raise BuildSlaveTooOldError("old slave does not have "
- "mode=export")
- self.args['directory'] = self.args['workdir']
- if revision is not None:
- # 0.5.0 can only do HEAD. We have no way of knowing whether
- # the requested revision is HEAD or not, and for
- # slowly-changing trees this will probably do the right
- # thing, so let it pass with a warning
- m = ("WARNING: old slave can only update to HEAD, not "
- "revision=%s" % revision)
- log.msg(m)
- warnings.append(m + "\n")
- revision = "HEAD" # interprets this key differently
- if patch:
- raise BuildSlaveTooOldError("old slave can't do patch")
-
- if self.svnurl:
- assert not branch # we need baseURL= to use branches
- self.args['svnurl'] = self.svnurl
- else:
- self.args['svnurl'] = self.baseURL + branch
- self.args['revision'] = revision
- self.args['patch'] = patch
-
- revstuff = []
- if branch is not None and branch != self.branch:
- revstuff.append("[branch]")
- if revision is not None:
- revstuff.append("r%s" % revision)
- self.description.extend(revstuff)
- self.descriptionDone.extend(revstuff)
-
- cmd = LoggedRemoteCommand("svn", self.args)
- self.startCommand(cmd, warnings)
-
-
-class Darcs(Source):
- """Check out a source tree from a Darcs repository at 'repourl'.
-
- To the best of my knowledge, Darcs has no concept of file modes. This
- means the eXecute-bit will be cleared on all source files. As a result,
- you may need to invoke configuration scripts with something like:
-
- C{s(step.Configure, command=['/bin/sh', './configure'])}
- """
-
- name = "darcs"
-
- def __init__(self, repourl=None, baseURL=None, defaultBranch=None,
- **kwargs):
- """
- @type repourl: string
- @param repourl: the URL which points at the Darcs repository. This
- is used as the default branch. Using C{repourl} does
- not enable builds of alternate branches: use
- C{baseURL} to enable this. Use either C{repourl} or
- C{baseURL}, not both.
-
- @param baseURL: if branches are enabled, this is the base URL to
- which a branch name will be appended. It should
- probably end in a slash. Use exactly one of
- C{repourl} and C{baseURL}.
-
- @param defaultBranch: if branches are enabled, this is the branch
- to use if the Build does not specify one
- explicitly. It will simply be appended to
- C{baseURL} and the result handed to the
- 'darcs pull' command.
- """
- self.repourl = repourl
- self.baseURL = baseURL
- self.branch = defaultBranch
- Source.__init__(self, **kwargs)
- assert kwargs['mode'] != "export", \
- "Darcs does not have an 'export' mode"
- if (not repourl and not baseURL) or (repourl and baseURL):
- raise ValueError("you must provide exactly one of repourl and"
- " baseURL")
-
- def startVC(self, branch, revision, patch):
- slavever = self.slaveVersion("darcs")
- if not slavever:
- m = "slave is too old, does not know about darcs"
- raise BuildSlaveTooOldError(m)
-
- if self.slaveVersionIsOlderThan("darcs", "1.39"):
- if revision:
- # TODO: revisit this once we implement computeSourceRevision
- m = "0.6.6 slaves can't handle args['revision']"
- raise BuildSlaveTooOldError(m)
-
- # the slave doesn't know to avoid re-using the same sourcedir
- # when the branch changes. We have no way of knowing which branch
- # the last build used, so if we're using a non-default branch and
- # either 'update' or 'copy' modes, it is safer to refuse to
- # build, and tell the user they need to upgrade the buildslave.
- if (branch != self.branch
- and self.args['mode'] in ("update", "copy")):
- m = ("This buildslave (%s) does not know about multiple "
- "branches, and using mode=%s would probably build the "
- "wrong tree. "
- "Refusing to build. Please upgrade the buildslave to "
- "buildbot-0.7.0 or newer." % (self.build.slavename,
- self.args['mode']))
- raise BuildSlaveTooOldError(m)
-
- if self.repourl:
- assert not branch # we need baseURL= to use branches
- self.args['repourl'] = self.repourl
- else:
- self.args['repourl'] = self.baseURL + branch
- self.args['revision'] = revision
- self.args['patch'] = patch
-
- revstuff = []
- if branch is not None and branch != self.branch:
- revstuff.append("[branch]")
- self.description.extend(revstuff)
- self.descriptionDone.extend(revstuff)
-
- cmd = LoggedRemoteCommand("darcs", self.args)
- self.startCommand(cmd)
-
-
-class Git(Source):
- """Check out a source tree from a git repository 'repourl'."""
-
- name = "git"
-
- def __init__(self, repourl, **kwargs):
- """
- @type repourl: string
- @param repourl: the URL which points at the git repository
- """
- self.branch = None # TODO
- Source.__init__(self, **kwargs)
- self.args['repourl'] = repourl
-
- def startVC(self, branch, revision, patch):
- self.args['branch'] = branch
- self.args['revision'] = revision
- self.args['patch'] = patch
- slavever = self.slaveVersion("git")
- if not slavever:
- raise BuildSlaveTooOldError("slave is too old, does not know "
- "about git")
- cmd = LoggedRemoteCommand("git", self.args)
- self.startCommand(cmd)
-
-
-class Arch(Source):
- """Check out a source tree from an Arch repository named 'archive'
- available at 'url'. 'version' specifies which version number (development
- line) will be used for the checkout: this is mostly equivalent to a
- branch name. This version uses the 'tla' tool to do the checkout, to use
- 'baz' see L{Bazaar} instead.
- """
-
- name = "arch"
- # TODO: slaves >0.6.6 will accept args['build-config'], so use it
-
- def __init__(self, url, version, archive=None, **kwargs):
- """
- @type url: string
- @param url: the Arch coordinates of the repository. This is
- typically an http:// URL, but could also be the absolute
- pathname of a local directory instead.
-
- @type version: string
- @param version: the category--branch--version to check out. This is
- the default branch. If a build specifies a different
- branch, it will be used instead of this.
-
- @type archive: string
- @param archive: The archive name. If provided, it must match the one
- that comes from the repository. If not, the
- repository's default will be used.
- """
- self.branch = version
- Source.__init__(self, **kwargs)
- self.args.update({'url': url,
- 'archive': archive,
- })
-
- def computeSourceRevision(self, changes):
- # in Arch, fully-qualified revision numbers look like:
- # arch@buildbot.sourceforge.net--2004/buildbot--dev--0--patch-104
- # For any given builder, all of this is fixed except the patch-104.
- # The Change might have any part of the fully-qualified string, so we
- # just look for the last part. We return the "patch-NN" string.
- if not changes:
- return None
- lastChange = None
- for c in changes:
- if not c.revision:
- continue
- if c.revision.endswith("--base-0"):
- rev = 0
- else:
- i = c.revision.rindex("patch")
- rev = int(c.revision[i+len("patch-"):])
- lastChange = max(lastChange, rev)
- if lastChange is None:
- return None
- if lastChange == 0:
- return "base-0"
- return "patch-%d" % lastChange
-
- def checkSlaveVersion(self, cmd, branch):
- warnings = []
- slavever = self.slaveVersion(cmd)
- if not slavever:
- m = "slave is too old, does not know about %s" % cmd
- raise BuildSlaveTooOldError(m)
-
- # slave 1.28 and later understand 'revision'
- if self.slaveVersionIsOlderThan(cmd, "1.28"):
- if not self.alwaysUseLatest:
- # we don't know whether our requested revision is the latest
- # or not. If the tree does not change very quickly, this will
- # probably build the right thing, so emit a warning rather
- # than refuse to build at all
- m = "WARNING, buildslave is too old to use a revision"
- log.msg(m)
- warnings.append(m + "\n")
-
- if self.slaveVersionIsOlderThan(cmd, "1.39"):
- # the slave doesn't know to avoid re-using the same sourcedir
- # when the branch changes. We have no way of knowing which branch
- # the last build used, so if we're using a non-default branch and
- # either 'update' or 'copy' modes, it is safer to refuse to
- # build, and tell the user they need to upgrade the buildslave.
- if (branch != self.branch
- and self.args['mode'] in ("update", "copy")):
- m = ("This buildslave (%s) does not know about multiple "
- "branches, and using mode=%s would probably build the "
- "wrong tree. "
- "Refusing to build. Please upgrade the buildslave to "
- "buildbot-0.7.0 or newer." % (self.build.slavename,
- self.args['mode']))
- log.msg(m)
- raise BuildSlaveTooOldError(m)
-
- return warnings
-
- def startVC(self, branch, revision, patch):
- self.args['version'] = branch
- self.args['revision'] = revision
- self.args['patch'] = patch
- warnings = self.checkSlaveVersion("arch", branch)
-
- revstuff = []
- if branch is not None and branch != self.branch:
- revstuff.append("[branch]")
- if revision is not None:
- revstuff.append("patch%s" % revision)
- self.description.extend(revstuff)
- self.descriptionDone.extend(revstuff)
-
- cmd = LoggedRemoteCommand("arch", self.args)
- self.startCommand(cmd, warnings)
-
-
-class Bazaar(Arch):
- """Bazaar is an alternative client for Arch repositories. baz is mostly
- compatible with tla, but archive registration is slightly different."""
-
- # TODO: slaves >0.6.6 will accept args['build-config'], so use it
-
- def __init__(self, url, version, archive, **kwargs):
- """
- @type url: string
- @param url: the Arch coordinates of the repository. This is
- typically an http:// URL, but could also be the absolute
- pathname of a local directory instead.
-
- @type version: string
- @param version: the category--branch--version to check out
-
- @type archive: string
- @param archive: The archive name (required). This must always match
- the one that comes from the repository, otherwise the
- buildslave will attempt to get sources from the wrong
- archive.
- """
- self.branch = version
- Source.__init__(self, **kwargs)
- self.args.update({'url': url,
- 'archive': archive,
- })
-
- def startVC(self, branch, revision, patch):
- self.args['version'] = branch
- self.args['revision'] = revision
- self.args['patch'] = patch
- warnings = self.checkSlaveVersion("bazaar", branch)
-
- revstuff = []
- if branch is not None and branch != self.branch:
- revstuff.append("[branch]")
- if revision is not None:
- revstuff.append("patch%s" % revision)
- self.description.extend(revstuff)
- self.descriptionDone.extend(revstuff)
-
- cmd = LoggedRemoteCommand("bazaar", self.args)
- self.startCommand(cmd, warnings)
-
-class Mercurial(Source):
- """Check out a source tree from a mercurial repository 'repourl'."""
-
- name = "hg"
-
- def __init__(self, repourl=None, baseURL=None, defaultBranch=None,
- **kwargs):
- """
- @type repourl: string
- @param repourl: the URL which points at the Mercurial repository.
- This is used as the default branch. Using C{repourl}
- does not enable builds of alternate branches: use
- C{baseURL} to enable this. Use either C{repourl} or
- C{baseURL}, not both.
-
- @param baseURL: if branches are enabled, this is the base URL to
- which a branch name will be appended. It should
- probably end in a slash. Use exactly one of
- C{repourl} and C{baseURL}.
-
- @param defaultBranch: if branches are enabled, this is the branch
- to use if the Build does not specify one
- explicitly. It will simply be appended to
- C{baseURL} and the result handed to the
- 'hg clone' command.
- """
- self.repourl = repourl
- self.baseURL = baseURL
- self.branch = defaultBranch
- Source.__init__(self, **kwargs)
- if (not repourl and not baseURL) or (repourl and baseURL):
- raise ValueError("you must provide exactly one of repourl and"
- " baseURL")
-
- def startVC(self, branch, revision, patch):
- slavever = self.slaveVersion("hg")
- if not slavever:
- raise BuildSlaveTooOldError("slave is too old, does not know "
- "about hg")
-
- if self.repourl:
- assert not branch # we need baseURL= to use branches
- self.args['repourl'] = self.repourl
- else:
- self.args['repourl'] = self.baseURL + branch
- self.args['revision'] = revision
- self.args['patch'] = patch
-
- revstuff = []
- if branch is not None and branch != self.branch:
- revstuff.append("[branch]")
- self.description.extend(revstuff)
- self.descriptionDone.extend(revstuff)
-
- cmd = LoggedRemoteCommand("hg", self.args)
- self.startCommand(cmd)
-
-
-class todo_P4(Source):
- name = "p4"
-
- # to create the working directory for the first time:
- # need to create a View. The 'Root' parameter will have to be filled
- # in by the buildslave with the abspath of the basedir. Then the
- # setup process involves 'p4 client' to set up the view. After
- # that, 'p4 sync' does all the necessary updating.
- # P4PORT=P4PORT P4CLIENT=name p4 client
-
- def __init__(self, p4port, view, **kwargs):
- Source.__init__(self, **kwargs)
- self.args.update({'p4port': p4port,
- 'view': view,
- })
-
- def startVC(self, branch, revision, patch):
- cmd = LoggedRemoteCommand("p4", self.args)
- self.startCommand(cmd)
-
-class P4Sync(Source):
- """This is a partial solution for using a P4 source repository. You are
- required to manually set up each build slave with a useful P4
- environment, which means setting various per-slave environment variables,
- and creating a P4 client specification which maps the right files into
- the slave's working directory. Once you have done that, this step merely
- performs a 'p4 sync' to update that workspace with the newest files.
-
- Each slave needs the following environment:
-
- - PATH: the 'p4' binary must be on the slave's PATH
- - P4USER: each slave needs a distinct user account
- - P4CLIENT: each slave needs a distinct client specification
-
- You should use 'p4 client' (?) to set up a client view spec which maps
- the desired files into $SLAVEBASE/$BUILDERBASE/source .
- """
-
- name = "p4sync"
-
- def __init__(self, p4port, p4user, p4passwd, p4client, **kwargs):
- assert kwargs['mode'] == "copy", "P4Sync can only be used in mode=copy"
- self.branch = None
- Source.__init__(self, **kwargs)
- self.args['p4port'] = p4port
- self.args['p4user'] = p4user
- self.args['p4passwd'] = p4passwd
- self.args['p4client'] = p4client
-
- def computeSourceRevision(self, changes):
- if not changes:
- return None
- lastChange = max([int(c.revision) for c in changes])
- return lastChange
-
- def startVC(self, branch, revision, patch):
- slavever = self.slaveVersion("p4sync")
- assert slavever, "slave is too old, does not know about p4"
- cmd = LoggedRemoteCommand("p4sync", self.args)
- self.startCommand(cmd)
-
-
-class Dummy(BuildStep):
- """I am a dummy no-op step, which runs entirely on the master, and simply
- waits 5 seconds before finishing with SUCCESS
- """
-
- haltOnFailure = True
- name = "dummy"
-
- def __init__(self, timeout=5, **kwargs):
- """
- @type timeout: int
- @param timeout: the number of seconds to delay before completing
- """
- BuildStep.__init__(self, **kwargs)
- self.timeout = timeout
- self.timer = None
-
- def start(self):
- self.step_status.setColor("yellow")
- self.step_status.setText(["delay", "%s secs" % self.timeout])
- self.timer = reactor.callLater(self.timeout, self.done)
-
- def interrupt(self, reason):
- if self.timer:
- self.timer.cancel()
- self.timer = None
- self.step_status.setColor("red")
- self.step_status.setText(["delay", "interrupted"])
- self.finished(FAILURE)
-
- def done(self):
- self.step_status.setColor("green")
- self.finished(SUCCESS)
-
-class FailingDummy(Dummy):
- """I am a dummy no-op step that 'runs' master-side and finishes (with a
- FAILURE status) after 5 seconds."""
-
- name = "failing dummy"
-
- def start(self):
- self.step_status.setColor("yellow")
- self.step_status.setText(["boom", "%s secs" % self.timeout])
- self.timer = reactor.callLater(self.timeout, self.done)
-
- def done(self):
- self.step_status.setColor("red")
- self.finished(FAILURE)
-
-class RemoteDummy(LoggingBuildStep):
- """I am a dummy no-op step that runs on the remote side and
- simply waits 5 seconds before completing with success.
- See L{buildbot.slave.commands.DummyCommand}
- """
-
- haltOnFailure = True
- name = "remote dummy"
-
- def __init__(self, timeout=5, **kwargs):
- """
- @type timeout: int
- @param timeout: the number of seconds to delay
- """
- LoggingBuildStep.__init__(self, **kwargs)
- self.timeout = timeout
- self.description = ["remote", "delay", "%s secs" % timeout]
-
- def describe(self, done=False):
- return self.description
-
- def start(self):
- args = {'timeout': self.timeout}
- cmd = LoggedRemoteCommand("dummy", args)
- self.startCommand(cmd)
-
-class Configure(ShellCommand):
-
- name = "configure"
- haltOnFailure = 1
- description = ["configuring"]
- descriptionDone = ["configure"]
- command = ["./configure"]
-
-class OOConfigure(ShellCommand):
-
- name = "configure"
- haltOnFailure = 1
- description = ["configuring"]
- descriptionDone = ["configure"]
- command = ["./configure"]
- config = None
-
- def __init__(self, config, **kwargs):
- self.config = config
- ShellCommand.__init__(self, **kwargs)
-
- def start(self):
- command = self._interpolateProperties(self.command)
- config = self.build.config + " " + self.config
- # create the actual RemoteShellCommand instance now
- kwargs = self.remote_kwargs
- kwargs['command'] = command + " " + config
- cmd = RemoteShellCommand(**kwargs)
- self.setupEnvironment(cmd)
- self.startCommand(cmd)
-
-
-class OOBootstrap(TCSHShellCommand):
-
- name = "bootstrap"
- haltOnFailure = 1
- description = ["bootstraping"]
- descriptionDone = ["bootstrap"]
- command = ["./bootstrap"]
-
-class OOEnvSet(TCSHShellCommand):
-
- name = "source"
- haltOnFailure = 1
- description = ["environment_setting"]
- descriptionDone = ["environment_set"]
- command = ["source"]
-
-class OORehash(TCSHShellCommand):
-
- name = "rehash"
- haltOnFailure = 1
- description = ["rehashing"]
- descriptionDone = ["rehash"]
- command = ["rehash"]
-
-
-
-class OOCompile(ShellCommand):
-
- name = "compile"
- haltOnFailure = 1
- description = ["compiling"]
- descriptionDone = ["compile"]
- command = ["dmake"]
-
- OFFprogressMetrics = ['output']
- # things to track: number of files compiled, number of directories
- # traversed (assuming 'make' is being used)
-
- #def createSummary(self, cmd):
- # command = "grep warning: " + log.getText()
- # self.addCompleteLog('warnings',os.popen(command).read())
- def createSummary(self, log):
- # TODO: grep for the characteristic GCC warning/error lines and
- # assemble them into a pair of buffers
- try:
- logFileName = self.step_status.logs[0].getFilename()
- print '%s' %logFileName
-
- command = "./create_logs.pl " + logFileName
- result = os.popen(command).read()
-
- summary_log_file_name = logFileName + "_brief.html"
- summary_log_file = open(summary_log_file_name)
- self.addHTMLLog('summary log', summary_log_file.read())
-
- command = "grep warning: "+ logFileName
- warnings = os.popen(command).read()
-
- command = "grep error: "+ logFileName
- errors = os.popen(command).read()
-
- command = "tail -50 "+logFileName
- tail = os.popen(command).read()
-
- if warnings != "" :
- self.addCompleteLog('warnings',warnings)
-
- if errors != "":
- self.addCompleteLog('errors',errors)
-
- if tail != "":
- self.addCompleteLog('tail',tail)
-
- except:
- #log.msg("Exception: Cannot open logFile")
- print "cannot execute createSummary after OOCompile"
-
-
-class OOSmokeTest(ShellCommand):
-
- name = "smokeTest"
- #haltOnFailure = 1
- description = ["smoke_testing"]
- descriptionDone = ["Smoke Test"]
- command = ["build"]
-
-class OOInstallSet(ShellCommand):
-
- name = "Install_Set"
- #haltOnFailure = 1
- description = ["generating install set"]
- descriptionDone = ["install set"]
- command = ["echo"]
-
- def start(self):
- buildstatus = self.build.build_status
- installset_filename = buildstatus.getBuilder().getName() +"_build" + `buildstatus.getNumber()` + "_installset.tar.gz"
- installset_filename = installset_filename.replace(" ","_")
- branch, revision, patch = buildstatus.getSourceStamp()
- #command = "cd instsetoo_native && find -wholename '*/OpenOffice/*install*/*download' -exec tar -zcvf "+ installset_filename +" {} \; && ../../../dav2 --dir=" + branch + " --file="+ installset_filename +" --user=" + self.user + " --pass=" + self.password
-
- command = "cd instsetoo_native && find -path '*/OpenOffice/*install*/*download' -exec tar -zcvf "+ installset_filename +" {} \; && scp "+ installset_filename + " buildmaster@ooo-staging.osuosl.org:/home/buildmaster/buildmaster/installsets/"
-
-
- kwargs = self.remote_kwargs
- kwargs['command'] = command
- cmd = RemoteShellCommand(timeout=120*60, **kwargs)
- self.setupEnvironment(cmd)
- self.startCommand(cmd)
-
-
- def createSummary(self, log):
- buildstatus = self.build.build_status
- installset_filename = buildstatus.getBuilder().getName() +"_build" + `buildstatus.getNumber()` + "_installset.tar.gz"
- installset_filename = installset_filename.replace(" ","_")
- #branch, revision, patch = buildstatus.getSourceStamp()
- #url = "http://ooo-staging.osuosl.org/DAV/" +branch+ "/" + installset_filename
- result = "To download installset click <a href='"+installset_filename+"'> here </a>"
- #if buildstatus.getResults() == builder.SUCCESS:
- #if log.getText().find("exit code 0") != -1:
- self.addHTMLLog('download', result)
-
-
-class Compile(ShellCommand):
-
- name = "compile"
- haltOnFailure = 1
- description = ["compiling"]
- descriptionDone = ["compile"]
- command = ["make", "all"]
-
- OFFprogressMetrics = ['output']
- # things to track: number of files compiled, number of directories
- # traversed (assuming 'make' is being used)
-
- def createSummary(self, cmd):
- # TODO: grep for the characteristic GCC warning/error lines and
- # assemble them into a pair of buffers
- pass
-
-class Test(ShellCommand):
-
- name = "test"
- warnOnFailure = 1
- description = ["testing"]
- descriptionDone = ["test"]
- command = ["make", "test"]
diff --git a/buildbot/buildbot-source/buildbot/process/step.py.bak b/buildbot/buildbot-source/buildbot/process/step.py.bak
deleted file mode 100644
index 090c7f58b..000000000
--- a/buildbot/buildbot-source/buildbot/process/step.py.bak
+++ /dev/null
@@ -1,1983 +0,0 @@
-# -*- test-case-name: buildbot.test.test_steps -*-
-
-import time, random, types, re, warnings
-from email.Utils import formatdate
-
-from twisted.internet import reactor, defer, error
-from twisted.spread import pb
-from twisted.python import log
-from twisted.python.failure import Failure
-from twisted.web.util import formatFailure
-
-from buildbot.interfaces import BuildSlaveTooOldError
-from buildbot.util import now
-from buildbot.status import progress, builder
-from buildbot.status.builder import SUCCESS, WARNINGS, FAILURE, SKIPPED, \
- EXCEPTION
-
-"""
-BuildStep and RemoteCommand classes for master-side representation of the
-build process
-"""
-
-class RemoteCommand(pb.Referenceable):
- """
- I represent a single command to be run on the slave. I handle the details
- of reliably gathering status updates from the slave (acknowledging each),
- and (eventually, in a future release) recovering from interrupted builds.
- This is the master-side object that is known to the slave-side
- L{buildbot.slave.bot.SlaveBuilder}, to which status update are sent.
-
- My command should be started by calling .run(), which returns a
- Deferred that will fire when the command has finished, or will
- errback if an exception is raised.
-
- Typically __init__ or run() will set up self.remote_command to be a
- string which corresponds to one of the SlaveCommands registered in
- the buildslave, and self.args to a dictionary of arguments that will
- be passed to the SlaveCommand instance.
-
- start, remoteUpdate, and remoteComplete are available to be overridden
-
- @type commandCounter: list of one int
- @cvar commandCounter: provides a unique value for each
- RemoteCommand executed across all slaves
- @type active: boolean
- @cvar active: whether the command is currently running
- """
- commandCounter = [0] # we use a list as a poor man's singleton
- active = False
-
- def __init__(self, remote_command, args):
- """
- @type remote_command: string
- @param remote_command: remote command to start. This will be
- passed to
- L{buildbot.slave.bot.SlaveBuilder.remote_startCommand}
- and needs to have been registered
- slave-side by
- L{buildbot.slave.registry.registerSlaveCommand}
- @type args: dict
- @param args: arguments to send to the remote command
- """
-
- self.remote_command = remote_command
- self.args = args
-
- def __getstate__(self):
- dict = self.__dict__.copy()
- # Remove the remote ref: if necessary (only for resumed builds), it
- # will be reattached at resume time
- if dict.has_key("remote"):
- del dict["remote"]
- return dict
-
- def run(self, step, remote):
- self.active = True
- self.step = step
- self.remote = remote
- c = self.commandCounter[0]
- self.commandCounter[0] += 1
- #self.commandID = "%d %d" % (c, random.randint(0, 1000000))
- self.commandID = "%d" % c
- log.msg("%s: RemoteCommand.run [%s]" % (self, self.commandID))
- self.deferred = defer.Deferred()
-
- d = defer.maybeDeferred(self.start)
-
- # _finished is called with an error for unknown commands, errors
- # that occur while the command is starting (including OSErrors in
- # exec()), StaleBroker (when the connection was lost before we
- # started), and pb.PBConnectionLost (when the slave isn't responding
- # over this connection, perhaps it had a power failure, or NAT
- # weirdness). If this happens, self.deferred is fired right away.
- d.addErrback(self._finished)
-
- # Connections which are lost while the command is running are caught
- # when our parent Step calls our .lostRemote() method.
- return self.deferred
-
- def start(self):
- """
- Tell the slave to start executing the remote command.
-
- @rtype: L{twisted.internet.defer.Deferred}
- @returns: a deferred that will fire when the remote command is
- done (with None as the result)
- """
- # This method only initiates the remote command.
- # We will receive remote_update messages as the command runs.
- # We will get a single remote_complete when it finishes.
- # We should fire self.deferred when the command is done.
- d = self.remote.callRemote("startCommand", self, self.commandID,
- self.remote_command, self.args)
- return d
-
- def interrupt(self, why):
- # TODO: consider separating this into interrupt() and stop(), where
- # stop() unconditionally calls _finished, but interrupt() merely
- # asks politely for the command to stop soon.
-
- log.msg("RemoteCommand.interrupt", self, why)
- if not self.active:
- log.msg(" but this RemoteCommand is already inactive")
- return
- if not self.remote:
- log.msg(" but our .remote went away")
- return
- if isinstance(why, Failure) and why.check(error.ConnectionLost):
- log.msg("RemoteCommand.disconnect: lost slave")
- self.remote = None
- self._finished(why)
- return
-
- # tell the remote command to halt. Returns a Deferred that will fire
- # when the interrupt command has been delivered.
-
- d = defer.maybeDeferred(self.remote.callRemote, "interruptCommand",
- self.commandID, str(why))
- # the slave may not have remote_interruptCommand
- d.addErrback(self._interruptFailed)
- return d
-
- def _interruptFailed(self, why):
- log.msg("RemoteCommand._interruptFailed", self)
- # TODO: forcibly stop the Command now, since we can't stop it
- # cleanly
- return None
-
- def remote_update(self, updates):
- """
- I am called by the slave's L{buildbot.slave.bot.SlaveBuilder} so
- I can receive updates from the running remote command.
-
- @type updates: list of [object, int]
- @param updates: list of updates from the remote command
- """
- max_updatenum = 0
- for (update, num) in updates:
- #log.msg("update[%d]:" % num)
- try:
- if self.active: # ignore late updates
- self.remoteUpdate(update)
- except:
- # log failure, terminate build, let slave retire the update
- self._finished(Failure())
- # TODO: what if multiple updates arrive? should
- # skip the rest but ack them all
- if num > max_updatenum:
- max_updatenum = num
- return max_updatenum
-
- def remoteUpdate(self, update):
- raise NotImplementedError("You must implement this in a subclass")
-
- def remote_complete(self, failure=None):
- """
- Called by the slave's L{buildbot.slave.bot.SlaveBuilder} to
- notify me the remote command has finished.
-
- @type failure: L{twisted.python.failure.Failure} or None
-
- @rtype: None
- """
- # call the real remoteComplete a moment later, but first return an
- # acknowledgement so the slave can retire the completion message.
- if self.active:
- reactor.callLater(0, self._finished, failure)
- return None
-
- def _finished(self, failure=None):
- self.active = False
- # call .remoteComplete. If it raises an exception, or returns the
- # Failure that we gave it, our self.deferred will be errbacked. If
- # it does not (either it ate the Failure or there the step finished
- # normally and it didn't raise a new exception), self.deferred will
- # be callbacked.
- d = defer.maybeDeferred(self.remoteComplete, failure)
- # arrange for the callback to get this RemoteCommand instance
- # instead of just None
- d.addCallback(lambda r: self)
- # this fires the original deferred we returned from .run(),
- # with self as the result, or a failure
- d.addBoth(self.deferred.callback)
-
- def remoteComplete(self, maybeFailure):
- """Subclasses can override this.
-
- This is called when the RemoteCommand has finished. 'maybeFailure'
- will be None if the command completed normally, or a Failure
- instance in one of the following situations:
-
- - the slave was lost before the command was started
- - the slave didn't respond to the startCommand message
- - the slave raised an exception while starting the command
- (bad command name, bad args, OSError from missing executable)
- - the slave raised an exception while finishing the command
- (they send back a remote_complete message with a Failure payload)
-
- and also (for now):
- - slave disconnected while the command was running
-
- This method should do cleanup, like closing log files. It should
- normally return the 'failure' argument, so that any exceptions will
- be propagated to the Step. If it wants to consume them, return None
- instead."""
-
- return maybeFailure
-
-class LoggedRemoteCommand(RemoteCommand):
- """
- I am a L{RemoteCommand} which expects the slave to send back
- stdout/stderr/rc updates. I gather these updates into a
- L{buildbot.status.builder.LogFile} named C{self.log}. You can give me a
- LogFile to use by calling useLog(), or I will create my own when the
- command is started. Unless you tell me otherwise, I will close the log
- when the command is complete.
- """
-
- log = None
- closeWhenFinished = False
- rc = None
- debug = False
-
- def __repr__(self):
- return "<RemoteCommand '%s' at %d>" % (self.remote_command, id(self))
-
- def useLog(self, loog, closeWhenFinished=False):
- self.log = loog
- self.closeWhenFinished = closeWhenFinished
-
- def start(self):
- if self.log is None:
- # orphan LogFile, cannot be subscribed to
- self.log = builder.LogFile(None)
- self.closeWhenFinished = True
- self.updates = {}
- log.msg("LoggedRemoteCommand.start", self.log)
- return RemoteCommand.start(self)
-
- def addStdout(self, data):
- self.log.addStdout(data)
- def addStderr(self, data):
- self.log.addStderr(data)
- def addHeader(self, data):
- self.log.addHeader(data)
- def remoteUpdate(self, update):
- if self.debug:
- for k,v in update.items():
- log.msg("Update[%s]: %s" % (k,v))
- if update.has_key('stdout'):
- self.addStdout(update['stdout'])
- if update.has_key('stderr'):
- self.addStderr(update['stderr'])
- if update.has_key('header'):
- self.addHeader(update['header'])
- if update.has_key('rc'):
- rc = self.rc = update['rc']
- log.msg("%s rc=%s" % (self, rc))
- self.addHeader("program finished with exit code %d\n" % rc)
- for k in update:
- if k not in ('stdout', 'stderr', 'header', 'rc'):
- if k not in self.updates:
- self.updates[k] = []
- self.updates[k].append(update[k])
-
- def remoteComplete(self, maybeFailure):
- if self.closeWhenFinished:
- if maybeFailure:
- self.addHeader("\nremoteFailed: %s" % maybeFailure)
- else:
- log.msg("closing log")
- self.log.finish()
- return maybeFailure
-
-class RemoteShellCommand(LoggedRemoteCommand):
- """This class helps you run a shell command on the build slave. It will
- accumulate all the command's output into a Log. When the command is
- finished, it will fire a Deferred. You can then check the results of the
- command and parse the output however you like."""
-
- def __init__(self, workdir, command, env=None,
- want_stdout=1, want_stderr=1,
- timeout=20*60, **kwargs):
- """
- @type workdir: string
- @param workdir: directory where the command ought to run,
- relative to the Builder's home directory. Defaults to
- '.': the same as the Builder's homedir. This should
- probably be '.' for the initial 'cvs checkout'
- command (which creates a workdir), and the Build-wide
- workdir for all subsequent commands (including
- compiles and 'cvs update').
-
- @type command: list of strings (or string)
- @param command: the shell command to run, like 'make all' or
- 'cvs update'. This should be a list or tuple
- which can be used directly as the argv array.
- For backwards compatibility, if this is a
- string, the text will be given to '/bin/sh -c
- %s'.
-
- @type env: dict of string->string
- @param env: environment variables to add or change for the
- slave. Each command gets a separate
- environment; all inherit the slave's initial
- one. TODO: make it possible to delete some or
- all of the slave's environment.
-
- @type want_stdout: bool
- @param want_stdout: defaults to True. Set to False if stdout should
- be thrown away. Do this to avoid storing or
- sending large amounts of useless data.
-
- @type want_stderr: bool
- @param want_stderr: False if stderr should be thrown away
-
- @type timeout: int
- @param timeout: tell the remote that if the command fails to
- produce any output for this number of seconds,
- the command is hung and should be killed. Use
- None to disable the timeout.
- """
- self.command = command # stash .command, set it later
- if env is not None:
- # avoid mutating the original master.cfg dictionary. Each
- # ShellCommand gets its own copy, any start() methods won't be
- # able to modify the original.
- env = env.copy()
- args = {'workdir': workdir,
- 'env': env,
- 'want_stdout': want_stdout,
- 'want_stderr': want_stderr,
- 'timeout': timeout,
- }
- LoggedRemoteCommand.__init__(self, "shell", args)
-
- def start(self):
- self.args['command'] = self.command
- if self.remote_command == "shell":
- # non-ShellCommand slavecommands are responsible for doing this
- # fixup themselves
- if self.step.slaveVersion("shell", "old") == "old":
- self.args['dir'] = self.args['workdir']
- what = "command '%s' in dir '%s'" % (self.args['command'],
- self.args['workdir'])
- log.msg(what)
- return LoggedRemoteCommand.start(self)
-
- def __repr__(self):
- return "<RemoteShellCommand '%s'>" % self.command
-
-class BuildStep:
- """
- I represent a single step of the build process. This step may involve
- zero or more commands to be run in the build slave, as well as arbitrary
- processing on the master side. Regardless of how many slave commands are
- run, the BuildStep will result in a single status value.
-
- The step is started by calling startStep(), which returns a Deferred that
- fires when the step finishes. See C{startStep} for a description of the
- results provided by that Deferred.
-
- __init__ and start are good methods to override. Don't forget to upcall
- BuildStep.__init__ or bad things will happen.
-
- To launch a RemoteCommand, pass it to .runCommand and wait on the
- Deferred it returns.
-
- Each BuildStep generates status as it runs. This status data is fed to
- the L{buildbot.status.builder.BuildStepStatus} listener that sits in
- C{self.step_status}. It can also feed progress data (like how much text
- is output by a shell command) to the
- L{buildbot.status.progress.StepProgress} object that lives in
- C{self.progress}, by calling C{progress.setProgress(metric, value)} as it
- runs.
-
- @type build: L{buildbot.process.base.Build}
- @ivar build: the parent Build which is executing this step
-
- @type progress: L{buildbot.status.progress.StepProgress}
- @ivar progress: tracks ETA for the step
-
- @type step_status: L{buildbot.status.builder.BuildStepStatus}
- @ivar step_status: collects output status
- """
-
- # these parameters are used by the parent Build object to decide how to
- # interpret our results. haltOnFailure will affect the build process
- # immediately, the others will be taken into consideration when
- # determining the overall build status.
- #
- haltOnFailure = False
- flunkOnWarnings = False
- flunkOnFailure = False
- warnOnWarnings = False
- warnOnFailure = False
-
- # 'parms' holds a list of all the parameters we care about, to allow
- # users to instantiate a subclass of BuildStep with a mixture of
- # arguments, some of which are for us, some of which are for the subclass
- # (or a delegate of the subclass, like how ShellCommand delivers many
- # arguments to the RemoteShellCommand that it creates). Such delegating
- # subclasses will use this list to figure out which arguments are meant
- # for us and which should be given to someone else.
- parms = ['build', 'name', 'locks',
- 'haltOnFailure',
- 'flunkOnWarnings',
- 'flunkOnFailure',
- 'warnOnWarnings',
- 'warnOnFailure',
- 'progressMetrics',
- ]
-
- name = "generic"
- locks = []
- progressMetrics = [] # 'time' is implicit
- useProgress = True # set to False if step is really unpredictable
- build = None
- step_status = None
- progress = None
-
- def __init__(self, build, **kwargs):
- self.build = build
- for p in self.__class__.parms:
- if kwargs.has_key(p):
- setattr(self, p, kwargs[p])
- del kwargs[p]
- # we want to encourage all steps to get a workdir, so tolerate its
- # presence here. It really only matters for non-ShellCommand steps
- # like Dummy
- if kwargs.has_key('workdir'):
- del kwargs['workdir']
- if kwargs:
- why = "%s.__init__ got unexpected keyword argument(s) %s" \
- % (self, kwargs.keys())
- raise TypeError(why)
-
- def setupProgress(self):
- if self.useProgress:
- sp = progress.StepProgress(self.name, self.progressMetrics)
- self.progress = sp
- self.step_status.setProgress(sp)
- return sp
- return None
-
- def getProperty(self, propname):
- return self.build.getProperty(propname)
-
- def setProperty(self, propname, value):
- self.build.setProperty(propname, value)
-
- def startStep(self, remote):
- """Begin the step. This returns a Deferred that will fire when the
- step finishes.
-
- This deferred fires with a tuple of (result, [extra text]), although
- older steps used to return just the 'result' value, so the receiving
- L{base.Build} needs to be prepared to handle that too. C{result} is
- one of the SUCCESS/WARNINGS/FAILURE/SKIPPED constants from
- L{buildbot.status.builder}, and the extra text is a list of short
- strings which should be appended to the Build's text results. This
- text allows a test-case step which fails to append B{17 tests} to the
- Build's status, in addition to marking the build as failing.
-
- The deferred will errback if the step encounters an exception,
- including an exception on the slave side (or if the slave goes away
- altogether). Failures in shell commands (rc!=0) will B{not} cause an
- errback, in general the BuildStep will evaluate the results and
- decide whether to treat it as a WARNING or FAILURE.
-
- @type remote: L{twisted.spread.pb.RemoteReference}
- @param remote: a reference to the slave's
- L{buildbot.slave.bot.SlaveBuilder} instance where any
- RemoteCommands may be run
- """
-
- self.remote = remote
- self.deferred = defer.Deferred()
- # convert all locks into their real form
- self.locks = [self.build.builder.botmaster.getLockByID(l)
- for l in self.locks]
- # then narrow SlaveLocks down to the slave that this build is being
- # run on
- self.locks = [l.getLock(self.build.slavebuilder) for l in self.locks]
- for l in self.locks:
- if l in self.build.locks:
- log.msg("Hey, lock %s is claimed by both a Step (%s) and the"
- " parent Build (%s)" % (l, self, self.build))
- raise RuntimeError("lock claimed by both Step and Build")
- d = self.acquireLocks()
- d.addCallback(self._startStep_2)
- return self.deferred
-
- def acquireLocks(self, res=None):
- log.msg("acquireLocks(step %s, locks %s)" % (self, self.locks))
- if not self.locks:
- return defer.succeed(None)
- for lock in self.locks:
- if not lock.isAvailable():
- log.msg("step %s waiting for lock %s" % (self, lock))
- d = lock.waitUntilAvailable(self)
- d.addCallback(self.acquireLocks)
- return d
- # all locks are available, claim them all
- for lock in self.locks:
- lock.claim(self)
- return defer.succeed(None)
-
- def _startStep_2(self, res):
- if self.progress:
- self.progress.start()
- self.step_status.stepStarted()
- try:
- skip = self.start()
- if skip == SKIPPED:
- reactor.callLater(0, self.releaseLocks)
- reactor.callLater(0, self.deferred.callback, SKIPPED)
- except:
- log.msg("BuildStep.startStep exception in .start")
- self.failed(Failure())
-
- def start(self):
- """Begin the step. Override this method and add code to do local
- processing, fire off remote commands, etc.
-
- To spawn a command in the buildslave, create a RemoteCommand instance
- and run it with self.runCommand::
-
- c = RemoteCommandFoo(args)
- d = self.runCommand(c)
- d.addCallback(self.fooDone).addErrback(self.failed)
-
- As the step runs, it should send status information to the
- BuildStepStatus::
-
- self.step_status.setColor('red')
- self.step_status.setText(['compile', 'failed'])
- self.step_status.setText2(['4', 'warnings'])
-
- To add a LogFile, use self.addLog. Make sure it gets closed when it
- finishes. When giving a Logfile to a RemoteShellCommand, just ask it
- to close the log when the command completes::
-
- log = self.addLog('output')
- cmd = RemoteShellCommand(args)
- cmd.useLog(log, closeWhenFinished=True)
-
- You can also create complete Logfiles with generated text in a single
- step::
-
- self.addCompleteLog('warnings', text)
-
- When the step is done, it should call self.finished(result). 'result'
- will be provided to the L{buildbot.process.base.Build}, and should be
- one of the constants defined above: SUCCESS, WARNINGS, FAILURE, or
- SKIPPED.
-
- If the step encounters an exception, it should call self.failed(why).
- 'why' should be a Failure object. This automatically fails the whole
- build with an exception. It is a good idea to add self.failed as an
- errback to any Deferreds you might obtain.
-
- If the step decides it does not need to be run, start() can return
- the constant SKIPPED. This fires the callback immediately: it is not
- necessary to call .finished yourself. This can also indicate to the
- status-reporting mechanism that this step should not be displayed."""
-
- raise NotImplementedError("your subclass must implement this method")
-
- def interrupt(self, reason):
- """Halt the command, either because the user has decided to cancel
- the build ('reason' is a string), or because the slave has
- disconnected ('reason' is a ConnectionLost Failure). Any further
- local processing should be skipped, and the Step completed with an
- error status. The results text should say something useful like
- ['step', 'interrupted'] or ['remote', 'lost']"""
- pass
-
- def releaseLocks(self):
- log.msg("releaseLocks(%s): %s" % (self, self.locks))
- for lock in self.locks:
- lock.release(self)
-
- def finished(self, results):
- if self.progress:
- self.progress.finish()
- self.step_status.stepFinished(results)
- self.releaseLocks()
- self.deferred.callback(results)
-
- def failed(self, why):
- # if isinstance(why, pb.CopiedFailure): # a remote exception might
- # only have short traceback, so formatFailure is not as useful as
- # you'd like (no .frames, so no traceback is displayed)
- log.msg("BuildStep.failed, traceback follows")
- log.err(why)
- try:
- if self.progress:
- self.progress.finish()
- self.addHTMLLog("err.html", formatFailure(why))
- self.addCompleteLog("err.text", why.getTraceback())
- # could use why.getDetailedTraceback() for more information
- self.step_status.setColor("purple")
- self.step_status.setText([self.name, "exception"])
- self.step_status.setText2([self.name])
- self.step_status.stepFinished(EXCEPTION)
- except:
- log.msg("exception during failure processing")
- log.err()
- # the progress stuff may still be whacked (the StepStatus may
- # think that it is still running), but the build overall will now
- # finish
- try:
- self.releaseLocks()
- except:
- log.msg("exception while releasing locks")
- log.err()
-
- log.msg("BuildStep.failed now firing callback")
- self.deferred.callback(EXCEPTION)
-
- # utility methods that BuildSteps may find useful
-
- def slaveVersion(self, command, oldversion=None):
- """Return the version number of the given slave command. For the
- commands defined in buildbot.slave.commands, this is the value of
- 'cvs_ver' at the top of that file. Non-existent commands will return
- a value of None. Buildslaves running buildbot-0.5.0 or earlier did
- not respond to the version query: commands on those slaves will
- return a value of OLDVERSION, so you can distinguish between old
- buildslaves and missing commands.
-
- If you know that <=0.5.0 buildslaves have the command you want (CVS
- and SVN existed back then, but none of the other VC systems), then it
- makes sense to call this with oldversion='old'. If the command you
- want is newer than that, just leave oldversion= unspecified, and the
- command will return None for a buildslave that does not implement the
- command.
- """
- return self.build.getSlaveCommandVersion(command, oldversion)
-
- def slaveVersionIsOlderThan(self, command, minversion):
- sv = self.build.getSlaveCommandVersion(command, None)
- if sv is None:
- return True
- # the version we get back is a string form of the CVS version number
- # of the slave's buildbot/slave/commands.py, something like 1.39 .
- # This might change in the future (I might move away from CVS), but
- # if so I'll keep updating that string with suitably-comparable
- # values.
- if sv.split(".") < minversion.split("."):
- return True
- return False
-
- def addLog(self, name):
- loog = self.step_status.addLog(name)
- return loog
-
- def addCompleteLog(self, name, text):
- log.msg("addCompleteLog(%s)" % name)
- loog = self.step_status.addLog(name)
- size = loog.chunkSize
- for start in range(0, len(text), size):
- loog.addStdout(text[start:start+size])
- loog.finish()
-
- def addHTMLLog(self, name, html):
- log.msg("addHTMLLog(%s)" % name)
- self.step_status.addHTMLLog(name, html)
-
- def runCommand(self, c):
- d = c.run(self, self.remote)
- return d
-
-
-
-class LoggingBuildStep(BuildStep):
- # This is an abstract base class, suitable for inheritance by all
- # BuildSteps that invoke RemoteCommands which emit stdout/stderr messages
-
- progressMetrics = ['output']
-
- def describe(self, done=False):
- raise NotImplementedError("implement this in a subclass")
-
- def startCommand(self, cmd, errorMessages=[]):
- """
- @param cmd: a suitable RemoteCommand which will be launched, with
- all output being put into a LogFile named 'log'
- """
- self.cmd = cmd # so we can interrupt it
- self.step_status.setColor("yellow")
- self.step_status.setText(self.describe(False))
- loog = self.addLog("log")
- for em in errorMessages:
- loog.addHeader(em)
- log.msg("ShellCommand.start using log", loog)
- log.msg(" for cmd", cmd)
- cmd.useLog(loog, True)
- loog.logProgressTo(self.progress, "output")
- d = self.runCommand(cmd)
- d.addCallbacks(self._commandComplete, self.checkDisconnect)
- d.addErrback(self.failed)
-
- def interrupt(self, reason):
- # TODO: consider adding an INTERRUPTED or STOPPED status to use
- # instead of FAILURE, might make the text a bit more clear.
- # 'reason' can be a Failure, or text
- self.addCompleteLog('interrupt', str(reason))
- d = self.cmd.interrupt(reason)
- return d
-
- def checkDisconnect(self, f):
- f.trap(error.ConnectionLost)
- self.step_status.setColor("red")
- self.step_status.setText(self.describe(True) +
- ["failed", "slave", "lost"])
- self.step_status.setText2(["failed", "slave", "lost"])
- return self.finished(FAILURE)
-
- def _commandComplete(self, cmd):
- self.commandComplete(cmd)
- self.createSummary(cmd.log)
- results = self.evaluateCommand(cmd)
- self.setStatus(cmd, results)
- return self.finished(results)
-
- # to refine the status output, override one or more of the following
- # methods. Change as little as possible: start with the first ones on
- # this list and only proceed further if you have to
- #
- # createSummary: add additional Logfiles with summarized results
- # evaluateCommand: decides whether the step was successful or not
- #
- # getText: create the final per-step text strings
- # describeText2: create the strings added to the overall build status
- #
- # getText2: only adds describeText2() when the step affects build status
- #
- # setStatus: handles all status updating
-
- # commandComplete is available for general-purpose post-completion work.
- # It is a good place to do one-time parsing of logfiles, counting
- # warnings and errors. It should probably stash such counts in places
- # like self.warnings so they can be picked up later by your getText
- # method.
-
- # TODO: most of this stuff should really be on BuildStep rather than
- # ShellCommand. That involves putting the status-setup stuff in
- # .finished, which would make it hard to turn off.
-
- def commandComplete(self, cmd):
- """This is a general-purpose hook method for subclasses. It will be
- called after the remote command has finished, but before any of the
- other hook functions are called."""
- pass
-
- def createSummary(self, log):
- """To create summary logs, do something like this:
- warnings = grep('^Warning:', log.getText())
- self.addCompleteLog('warnings', warnings)
- """
- pass
-
- def evaluateCommand(self, cmd):
- """Decide whether the command was SUCCESS, WARNINGS, or FAILURE.
- Override this to, say, declare WARNINGS if there is any stderr
- activity, or to say that rc!=0 is not actually an error."""
-
- if cmd.rc != 0:
- return FAILURE
- # if cmd.log.getStderr(): return WARNINGS
- return SUCCESS
-
- def getText(self, cmd, results):
- if results == SUCCESS:
- return self.describe(True)
- elif results == WARNINGS:
- return self.describe(True) + ["warnings"]
- else:
- return self.describe(True) + ["failed"]
-
- def getText2(self, cmd, results):
- """We have decided to add a short note about ourselves to the overall
- build description, probably because something went wrong. Return a
- short list of short strings. If your subclass counts test failures or
- warnings of some sort, this is a good place to announce the count."""
- # return ["%d warnings" % warningcount]
- # return ["%d tests" % len(failedTests)]
- return [self.name]
-
- def maybeGetText2(self, cmd, results):
- if results == SUCCESS:
- # successful steps do not add anything to the build's text
- pass
- elif results == WARNINGS:
- if (self.flunkOnWarnings or self.warnOnWarnings):
- # we're affecting the overall build, so tell them why
- return self.getText2(cmd, results)
- else:
- if (self.haltOnFailure or self.flunkOnFailure
- or self.warnOnFailure):
- # we're affecting the overall build, so tell them why
- return self.getText2(cmd, results)
- return []
-
- def getColor(self, cmd, results):
- assert results in (SUCCESS, WARNINGS, FAILURE)
- if results == SUCCESS:
- return "green"
- elif results == WARNINGS:
- return "orange"
- else:
- return "red"
-
- def setStatus(self, cmd, results):
- # this is good enough for most steps, but it can be overridden to
- # get more control over the displayed text
- self.step_status.setColor(self.getColor(cmd, results))
- self.step_status.setText(self.getText(cmd, results))
- self.step_status.setText2(self.maybeGetText2(cmd, results))
-
-
-# -*- test-case-name: buildbot.test.test_properties -*-
-
-class _BuildPropertyDictionary:
- def __init__(self, build):
- self.build = build
- def __getitem__(self, name):
- p = self.build.getProperty(name)
- if p is None:
- p = ""
- return p
-
-class WithProperties:
- """This is a marker class, used in ShellCommand's command= argument to
- indicate that we want to interpolate a build property.
- """
-
- def __init__(self, fmtstring, *args):
- self.fmtstring = fmtstring
- self.args = args
-
- def render(self, build):
- if self.args:
- strings = []
- for name in self.args:
- p = build.getProperty(name)
- if p is None:
- p = ""
- strings.append(p)
- s = self.fmtstring % tuple(strings)
- else:
- s = self.fmtstring % _BuildPropertyDictionary(build)
- return s
-
-class ShellCommand(LoggingBuildStep):
- """I run a single shell command on the buildslave. I return FAILURE if
- the exit code of that command is non-zero, SUCCESS otherwise. To change
- this behavior, override my .evaluateCommand method.
-
- I create a single Log named 'log' which contains the output of the
- command. To create additional summary Logs, override my .createSummary
- method.
-
- The shell command I run (a list of argv strings) can be provided in
- several ways:
- - a class-level .command attribute
- - a command= parameter to my constructor (overrides .command)
- - set explicitly with my .setCommand() method (overrides both)
-
- @ivar command: a list of argv strings (or WithProperties instances).
- This will be used by start() to create a
- RemoteShellCommand instance.
-
- """
-
- name = "shell"
- description = None # set this to a list of short strings to override
- descriptionDone = None # alternate description when the step is complete
- command = None # set this to a command, or set in kwargs
-
- def __init__(self, workdir,
- description=None, descriptionDone=None,
- command=None,
- **kwargs):
- # most of our arguments get passed through to the RemoteShellCommand
- # that we create, but first strip out the ones that we pass to
- # BuildStep (like haltOnFailure and friends), and a couple that we
- # consume ourselves.
- self.workdir = workdir # required by RemoteShellCommand
- if description:
- self.description = description
- if descriptionDone:
- self.descriptionDone = descriptionDone
- if command:
- self.command = command
-
- # pull out the ones that BuildStep wants, then upcall
- buildstep_kwargs = {}
- for k in kwargs.keys()[:]:
- if k in self.__class__.parms:
- buildstep_kwargs[k] = kwargs[k]
- del kwargs[k]
- LoggingBuildStep.__init__(self, **buildstep_kwargs)
-
- # everything left over goes to the RemoteShellCommand
- kwargs['workdir'] = workdir # including a copy of 'workdir'
- self.remote_kwargs = kwargs
-
-
- def setCommand(self, command):
- self.command = command
-
- def describe(self, done=False):
- """Return a list of short strings to describe this step, for the
- status display. This uses the first few words of the shell command.
- You can replace this by setting .description in your subclass, or by
- overriding this method to describe the step better.
-
- @type done: boolean
- @param done: whether the command is complete or not, to improve the
- way the command is described. C{done=False} is used
- while the command is still running, so a single
- imperfect-tense verb is appropriate ('compiling',
- 'testing', ...) C{done=True} is used when the command
- has finished, and the default getText() method adds some
- text, so a simple noun is appropriate ('compile',
- 'tests' ...)
- """
-
- if done and self.descriptionDone is not None:
- return self.descriptionDone
- if self.description is not None:
- return self.description
-
- words = self.command
- # TODO: handle WithProperties here
- if isinstance(words, types.StringTypes):
- words = words.split()
- if len(words) < 1:
- return ["???"]
- if len(words) == 1:
- return ["'%s'" % words[0]]
- if len(words) == 2:
- return ["'%s" % words[0], "%s'" % words[1]]
- return ["'%s" % words[0], "%s" % words[1], "...'"]
-
- def _interpolateProperties(self, command):
- # interpolate any build properties into our command
- if not isinstance(command, (list, tuple)):
- return command
- command_argv = []
- for argv in command:
- if isinstance(argv, WithProperties):
- command_argv.append(argv.render(self.build))
- else:
- command_argv.append(argv)
- return command_argv
-
- def setupEnvironment(self, cmd):
- # merge in anything from Build.slaveEnvironment . Earlier steps
- # (perhaps ones which compile libraries or sub-projects that need to
- # be referenced by later steps) can add keys to
- # self.build.slaveEnvironment to affect later steps.
- slaveEnv = self.build.slaveEnvironment
- if slaveEnv:
- if cmd.args['env'] is None:
- cmd.args['env'] = {}
- cmd.args['env'].update(slaveEnv)
- # note that each RemoteShellCommand gets its own copy of the
- # dictionary, so we shouldn't be affecting anyone but ourselves.
-
- def start(self):
- command = self._interpolateProperties(self.command)
- # create the actual RemoteShellCommand instance now
- kwargs = self.remote_kwargs
- kwargs['command'] = command
- cmd = RemoteShellCommand(**kwargs)
- self.setupEnvironment(cmd)
- self.startCommand(cmd)
-
-
-
-
-class TreeSize(ShellCommand):
- name = "treesize"
- command = ["du", "-s", "."]
- kb = None
-
- def commandComplete(self, cmd):
- out = cmd.log.getText()
- m = re.search(r'^(\d+)', out)
- if m:
- self.kb = int(m.group(1))
-
- def evaluateCommand(self, cmd):
- if cmd.rc != 0:
- return FAILURE
- if self.kb is None:
- return WARNINGS # not sure how 'du' could fail, but whatever
- return SUCCESS
-
- def getText(self, cmd, results):
- if self.kb is not None:
- return ["treesize", "%d kb" % self.kb]
- return ["treesize", "unknown"]
-
-
-class Source(LoggingBuildStep):
- """This is a base class to generate a source tree in the buildslave.
- Each version control system has a specialized subclass, and is expected
- to override __init__ and implement computeSourceRevision() and
- startVC(). The class as a whole builds up the self.args dictionary, then
- starts a LoggedRemoteCommand with those arguments.
- """
-
- # if the checkout fails, there's no point in doing anything else
- haltOnFailure = True
- notReally = False
-
- branch = None # the default branch, should be set in __init__
-
- def __init__(self, workdir, mode='update', alwaysUseLatest=False,
- timeout=20*60, retry=None, **kwargs):
- """
- @type workdir: string
- @param workdir: local directory (relative to the Builder's root)
- where the tree should be placed
-
- @type mode: string
- @param mode: the kind of VC operation that is desired:
- - 'update': specifies that the checkout/update should be
- performed directly into the workdir. Each build is performed
- in the same directory, allowing for incremental builds. This
- minimizes disk space, bandwidth, and CPU time. However, it
- may encounter problems if the build process does not handle
- dependencies properly (if you must sometimes do a 'clean
- build' to make sure everything gets compiled), or if source
- files are deleted but generated files can influence test
- behavior (e.g. python's .pyc files), or when source
- directories are deleted but generated files prevent CVS from
- removing them.
-
- - 'copy': specifies that the source-controlled workspace
- should be maintained in a separate directory (called the
- 'copydir'), using checkout or update as necessary. For each
- build, a new workdir is created with a copy of the source
- tree (rm -rf workdir; cp -r copydir workdir). This doubles
- the disk space required, but keeps the bandwidth low
- (update instead of a full checkout). A full 'clean' build
- is performed each time. This avoids any generated-file
- build problems, but is still occasionally vulnerable to
- problems such as a CVS repository being manually rearranged
- (causing CVS errors on update) which are not an issue with
- a full checkout.
-
- - 'clobber': specifies that the working directory should be
- deleted each time, necessitating a full checkout for each
- build. This insures a clean build off a complete checkout,
- avoiding any of the problems described above, but is
- bandwidth intensive, as the whole source tree must be
- pulled down for each build.
-
- - 'export': is like 'clobber', except that e.g. the 'cvs
- export' command is used to create the working directory.
- This command removes all VC metadata files (the
- CVS/.svn/{arch} directories) from the tree, which is
- sometimes useful for creating source tarballs (to avoid
- including the metadata in the tar file). Not all VC systems
- support export.
-
- @type alwaysUseLatest: boolean
- @param alwaysUseLatest: whether to always update to the most
- recent available sources for this build.
-
- Normally the Source step asks its Build for a list of all
- Changes that are supposed to go into the build, then computes a
- 'source stamp' (revision number or timestamp) that will cause
- exactly that set of changes to be present in the checked out
- tree. This is turned into, e.g., 'cvs update -D timestamp', or
- 'svn update -r revnum'. If alwaysUseLatest=True, bypass this
- computation and always update to the latest available sources
- for each build.
-
- The source stamp helps avoid a race condition in which someone
- commits a change after the master has decided to start a build
- but before the slave finishes checking out the sources. At best
- this results in a build which contains more changes than the
- buildmaster thinks it has (possibly resulting in the wrong
- person taking the blame for any problems that result), at worst
- is can result in an incoherent set of sources (splitting a
- non-atomic commit) which may not build at all.
-
- @type retry: tuple of ints (delay, repeats) (or None)
- @param retry: if provided, VC update failures are re-attempted up
- to REPEATS times, with DELAY seconds between each
- attempt. Some users have slaves with poor connectivity
- to their VC repository, and they say that up to 80% of
- their build failures are due to transient network
- failures that could be handled by simply retrying a
- couple times.
-
- """
-
- LoggingBuildStep.__init__(self, **kwargs)
-
- assert mode in ("update", "copy", "clobber", "export")
- if retry:
- delay, repeats = retry
- assert isinstance(repeats, int)
- assert repeats > 0
- self.args = {'mode': mode,
- 'workdir': workdir,
- 'timeout': timeout,
- 'retry': retry,
- 'patch': None, # set during .start
- }
- self.alwaysUseLatest = alwaysUseLatest
-
- # Compute defaults for descriptions:
- description = ["updating"]
- descriptionDone = ["update"]
- if mode == "clobber":
- description = ["checkout"]
- # because checkingouting takes too much space
- descriptionDone = ["checkout"]
- elif mode == "export":
- description = ["exporting"]
- descriptionDone = ["export"]
- self.description = description
- self.descriptionDone = descriptionDone
-
- def describe(self, done=False):
- if done:
- return self.descriptionDone
- return self.description
-
- def computeSourceRevision(self, changes):
- """Each subclass must implement this method to do something more
- precise than -rHEAD every time. For version control systems that use
- repository-wide change numbers (SVN, P4), this can simply take the
- maximum such number from all the changes involved in this build. For
- systems that do not (CVS), it needs to create a timestamp based upon
- the latest Change, the Build's treeStableTimer, and an optional
- self.checkoutDelay value."""
- return None
-
- def start(self):
- if self.notReally:
- log.msg("faking %s checkout/update" % self.name)
- self.step_status.setColor("green")
- self.step_status.setText(["fake", self.name, "successful"])
- self.addCompleteLog("log",
- "Faked %s checkout/update 'successful'\n" \
- % self.name)
- return SKIPPED
-
- # what source stamp would this build like to use?
- s = self.build.getSourceStamp()
- # if branch is None, then use the Step's "default" branch
- branch = s.branch or self.branch
- # if revision is None, use the latest sources (-rHEAD)
- revision = s.revision
- if not revision and not self.alwaysUseLatest:
- revision = self.computeSourceRevision(s.changes)
- # if patch is None, then do not patch the tree after checkout
-
- # 'patch' is None or a tuple of (patchlevel, diff)
- patch = s.patch
-
- self.startVC(branch, revision, patch)
-
- def commandComplete(self, cmd):
- got_revision = None
- if cmd.updates.has_key("got_revision"):
- got_revision = cmd.updates["got_revision"][-1]
- self.setProperty("got_revision", got_revision)
-
-
-
-class CVS(Source):
- """I do CVS checkout/update operations.
-
- Note: if you are doing anonymous/pserver CVS operations, you will need
- to manually do a 'cvs login' on each buildslave before the slave has any
- hope of success. XXX: fix then, take a cvs password as an argument and
- figure out how to do a 'cvs login' on each build
- """
-
- name = "cvs"
-
- #progressMetrics = ['output']
- #
- # additional things to track: update gives one stderr line per directory
- # (starting with 'cvs server: Updating ') (and is fairly stable if files
- # is empty), export gives one line per directory (starting with 'cvs
- # export: Updating ') and another line per file (starting with U). Would
- # be nice to track these, requires grepping LogFile data for lines,
- # parsing each line. Might be handy to have a hook in LogFile that gets
- # called with each complete line.
-
- def __init__(self, cvsroot, cvsmodule,
- global_options=[], branch=None, checkoutDelay=None,
- login=None,
- clobber=0, export=0, copydir=None,
- **kwargs):
-
- """
- @type cvsroot: string
- @param cvsroot: CVS Repository from which the source tree should
- be obtained. '/home/warner/Repository' for local
- or NFS-reachable repositories,
- ':pserver:anon@foo.com:/cvs' for anonymous CVS,
- 'user@host.com:/cvs' for non-anonymous CVS or
- CVS over ssh. Lots of possibilities, check the
- CVS documentation for more.
-
- @type cvsmodule: string
- @param cvsmodule: subdirectory of CVS repository that should be
- retrieved
-
- @type login: string or None
- @param login: if not None, a string which will be provided as a
- password to the 'cvs login' command, used when a
- :pserver: method is used to access the repository.
- This login is only needed once, but must be run
- each time (just before the CVS operation) because
- there is no way for the buildslave to tell whether
- it was previously performed or not.
-
- @type branch: string
- @param branch: the default branch name, will be used in a '-r'
- argument to specify which branch of the source tree
- should be used for this checkout. Defaults to None,
- which means to use 'HEAD'.
-
- @type checkoutDelay: int or None
- @param checkoutDelay: if not None, the number of seconds to put
- between the last known Change and the
- timestamp given to the -D argument. This
- defaults to exactly half of the parent
- Build's .treeStableTimer, but it could be
- set to something else if your CVS change
- notification has particularly weird
- latency characteristics.
-
- @type global_options: list of strings
- @param global_options: these arguments are inserted in the cvs
- command line, before the
- 'checkout'/'update' command word. See
- 'cvs --help-options' for a list of what
- may be accepted here. ['-r'] will make
- the checked out files read only. ['-r',
- '-R'] will also assume the repository is
- read-only (I assume this means it won't
- use locks to insure atomic access to the
- ,v files)."""
-
- self.checkoutDelay = checkoutDelay
- self.branch = branch
-
- if not kwargs.has_key('mode') and (clobber or export or copydir):
- # deal with old configs
- warnings.warn("Please use mode=, not clobber/export/copydir",
- DeprecationWarning)
- if export:
- kwargs['mode'] = "export"
- elif clobber:
- kwargs['mode'] = "clobber"
- elif copydir:
- kwargs['mode'] = "copy"
- else:
- kwargs['mode'] = "update"
-
- Source.__init__(self, **kwargs)
-
- self.args.update({'cvsroot': cvsroot,
- 'cvsmodule': cvsmodule,
- 'global_options': global_options,
- 'login': login,
- })
-
- def computeSourceRevision(self, changes):
- if not changes:
- return None
- lastChange = max([c.when for c in changes])
- if self.checkoutDelay is not None:
- when = lastChange + self.checkoutDelay
- else:
- lastSubmit = max([r.submittedAt for r in self.build.requests])
- when = (lastChange + lastSubmit) / 2
- return formatdate(when)
-
- def startVC(self, branch, revision, patch):
- if self.slaveVersionIsOlderThan("cvs", "1.39"):
- # the slave doesn't know to avoid re-using the same sourcedir
- # when the branch changes. We have no way of knowing which branch
- # the last build used, so if we're using a non-default branch and
- # either 'update' or 'copy' modes, it is safer to refuse to
- # build, and tell the user they need to upgrade the buildslave.
- if (branch != self.branch
- and self.args['mode'] in ("update", "copy")):
- m = ("This buildslave (%s) does not know about multiple "
- "branches, and using mode=%s would probably build the "
- "wrong tree. "
- "Refusing to build. Please upgrade the buildslave to "
- "buildbot-0.7.0 or newer." % (self.build.slavename,
- self.args['mode']))
- log.msg(m)
- raise BuildSlaveTooOldError(m)
-
- if branch is None:
- branch = "HEAD"
- self.args['branch'] = branch
- self.args['revision'] = revision
- self.args['patch'] = patch
-
- if self.args['branch'] == "HEAD" and self.args['revision']:
- # special case. 'cvs update -r HEAD -D today' gives no files
- # TODO: figure out why, see if it applies to -r BRANCH
- self.args['branch'] = None
-
- # deal with old slaves
- warnings = []
- slavever = self.slaveVersion("cvs", "old")
-
- if slavever == "old":
- # 0.5.0
- if self.args['mode'] == "export":
- self.args['export'] = 1
- elif self.args['mode'] == "clobber":
- self.args['clobber'] = 1
- elif self.args['mode'] == "copy":
- self.args['copydir'] = "source"
- self.args['tag'] = self.args['branch']
- assert not self.args['patch'] # 0.5.0 slave can't do patch
-
- cmd = LoggedRemoteCommand("cvs", self.args)
- self.startCommand(cmd, warnings)
-
-
-class SVN(Source):
- """I perform Subversion checkout/update operations."""
-
- name = 'svn'
-
- def __init__(self, svnurl=None, baseURL=None, defaultBranch=None,
- directory=None, **kwargs):
- """
- @type svnurl: string
- @param svnurl: the URL which points to the Subversion server,
- combining the access method (HTTP, ssh, local file),
- the repository host/port, the repository path, the
- sub-tree within the repository, and the branch to
- check out. Using C{svnurl} does not enable builds of
- alternate branches: use C{baseURL} to enable this.
- Use exactly one of C{svnurl} and C{baseURL}.
-
- @param baseURL: if branches are enabled, this is the base URL to
- which a branch name will be appended. It should
- probably end in a slash. Use exactly one of
- C{svnurl} and C{baseURL}.
-
- @param defaultBranch: if branches are enabled, this is the branch
- to use if the Build does not specify one
- explicitly. It will simply be appended
- to C{baseURL} and the result handed to
- the SVN command.
- """
-
- if not kwargs.has_key('workdir') and directory is not None:
- # deal with old configs
- warnings.warn("Please use workdir=, not directory=",
- DeprecationWarning)
- kwargs['workdir'] = directory
-
- self.svnurl = svnurl
- self.baseURL = baseURL
- self.branch = defaultBranch
-
- Source.__init__(self, **kwargs)
-
- if not svnurl and not baseURL:
- raise ValueError("you must use exactly one of svnurl and baseURL")
-
-
- def computeSourceRevision(self, changes):
- if not changes:
- return None
- lastChange = max([int(c.revision) for c in changes])
- return lastChange
-
- def startVC(self, branch, revision, patch):
-
- # handle old slaves
- warnings = []
- slavever = self.slaveVersion("svn", "old")
- if not slavever:
- m = "slave does not have the 'svn' command"
- raise BuildSlaveTooOldError(m)
-
- if self.slaveVersionIsOlderThan("svn", "1.39"):
- # the slave doesn't know to avoid re-using the same sourcedir
- # when the branch changes. We have no way of knowing which branch
- # the last build used, so if we're using a non-default branch and
- # either 'update' or 'copy' modes, it is safer to refuse to
- # build, and tell the user they need to upgrade the buildslave.
- if (branch != self.branch
- and self.args['mode'] in ("update", "copy")):
- m = ("This buildslave (%s) does not know about multiple "
- "branches, and using mode=%s would probably build the "
- "wrong tree. "
- "Refusing to build. Please upgrade the buildslave to "
- "buildbot-0.7.0 or newer." % (self.build.slavename,
- self.args['mode']))
- raise BuildSlaveTooOldError(m)
-
- if slavever == "old":
- # 0.5.0 compatibility
- if self.args['mode'] in ("clobber", "copy"):
- # TODO: use some shell commands to make up for the
- # deficiency, by blowing away the old directory first (thus
- # forcing a full checkout)
- warnings.append("WARNING: this slave can only do SVN updates"
- ", not mode=%s\n" % self.args['mode'])
- log.msg("WARNING: this slave only does mode=update")
- if self.args['mode'] == "export":
- raise BuildSlaveTooOldError("old slave does not have "
- "mode=export")
- self.args['directory'] = self.args['workdir']
- if revision is not None:
- # 0.5.0 can only do HEAD. We have no way of knowing whether
- # the requested revision is HEAD or not, and for
- # slowly-changing trees this will probably do the right
- # thing, so let it pass with a warning
- m = ("WARNING: old slave can only update to HEAD, not "
- "revision=%s" % revision)
- log.msg(m)
- warnings.append(m + "\n")
- revision = "HEAD" # interprets this key differently
- if patch:
- raise BuildSlaveTooOldError("old slave can't do patch")
-
- if self.svnurl:
- assert not branch # we need baseURL= to use branches
- self.args['svnurl'] = self.svnurl
- else:
- self.args['svnurl'] = self.baseURL + branch
- self.args['revision'] = revision
- self.args['patch'] = patch
-
- revstuff = []
- if branch is not None and branch != self.branch:
- revstuff.append("[branch]")
- if revision is not None:
- revstuff.append("r%s" % revision)
- self.description.extend(revstuff)
- self.descriptionDone.extend(revstuff)
-
- cmd = LoggedRemoteCommand("svn", self.args)
- self.startCommand(cmd, warnings)
-
-
-class Darcs(Source):
- """Check out a source tree from a Darcs repository at 'repourl'.
-
- To the best of my knowledge, Darcs has no concept of file modes. This
- means the eXecute-bit will be cleared on all source files. As a result,
- you may need to invoke configuration scripts with something like:
-
- C{s(step.Configure, command=['/bin/sh', './configure'])}
- """
-
- name = "darcs"
-
- def __init__(self, repourl=None, baseURL=None, defaultBranch=None,
- **kwargs):
- """
- @type repourl: string
- @param repourl: the URL which points at the Darcs repository. This
- is used as the default branch. Using C{repourl} does
- not enable builds of alternate branches: use
- C{baseURL} to enable this. Use either C{repourl} or
- C{baseURL}, not both.
-
- @param baseURL: if branches are enabled, this is the base URL to
- which a branch name will be appended. It should
- probably end in a slash. Use exactly one of
- C{repourl} and C{baseURL}.
-
- @param defaultBranch: if branches are enabled, this is the branch
- to use if the Build does not specify one
- explicitly. It will simply be appended to
- C{baseURL} and the result handed to the
- 'darcs pull' command.
- """
- self.repourl = repourl
- self.baseURL = baseURL
- self.branch = defaultBranch
- Source.__init__(self, **kwargs)
- assert kwargs['mode'] != "export", \
- "Darcs does not have an 'export' mode"
- if (not repourl and not baseURL) or (repourl and baseURL):
- raise ValueError("you must provide exactly one of repourl and"
- " baseURL")
-
- def startVC(self, branch, revision, patch):
- slavever = self.slaveVersion("darcs")
- if not slavever:
- m = "slave is too old, does not know about darcs"
- raise BuildSlaveTooOldError(m)
-
- if self.slaveVersionIsOlderThan("darcs", "1.39"):
- if revision:
- # TODO: revisit this once we implement computeSourceRevision
- m = "0.6.6 slaves can't handle args['revision']"
- raise BuildSlaveTooOldError(m)
-
- # the slave doesn't know to avoid re-using the same sourcedir
- # when the branch changes. We have no way of knowing which branch
- # the last build used, so if we're using a non-default branch and
- # either 'update' or 'copy' modes, it is safer to refuse to
- # build, and tell the user they need to upgrade the buildslave.
- if (branch != self.branch
- and self.args['mode'] in ("update", "copy")):
- m = ("This buildslave (%s) does not know about multiple "
- "branches, and using mode=%s would probably build the "
- "wrong tree. "
- "Refusing to build. Please upgrade the buildslave to "
- "buildbot-0.7.0 or newer." % (self.build.slavename,
- self.args['mode']))
- raise BuildSlaveTooOldError(m)
-
- if self.repourl:
- assert not branch # we need baseURL= to use branches
- self.args['repourl'] = self.repourl
- else:
- self.args['repourl'] = self.baseURL + branch
- self.args['revision'] = revision
- self.args['patch'] = patch
-
- revstuff = []
- if branch is not None and branch != self.branch:
- revstuff.append("[branch]")
- self.description.extend(revstuff)
- self.descriptionDone.extend(revstuff)
-
- cmd = LoggedRemoteCommand("darcs", self.args)
- self.startCommand(cmd)
-
-
-class Git(Source):
- """Check out a source tree from a git repository 'repourl'."""
-
- name = "git"
-
- def __init__(self, repourl, **kwargs):
- """
- @type repourl: string
- @param repourl: the URL which points at the git repository
- """
- self.branch = None # TODO
- Source.__init__(self, **kwargs)
- self.args['repourl'] = repourl
-
- def startVC(self, branch, revision, patch):
- self.args['branch'] = branch
- self.args['revision'] = revision
- self.args['patch'] = patch
- slavever = self.slaveVersion("git")
- if not slavever:
- raise BuildSlaveTooOldError("slave is too old, does not know "
- "about git")
- cmd = LoggedRemoteCommand("git", self.args)
- self.startCommand(cmd)
-
-
-class Arch(Source):
- """Check out a source tree from an Arch repository named 'archive'
- available at 'url'. 'version' specifies which version number (development
- line) will be used for the checkout: this is mostly equivalent to a
- branch name. This version uses the 'tla' tool to do the checkout, to use
- 'baz' see L{Bazaar} instead.
- """
-
- name = "arch"
- # TODO: slaves >0.6.6 will accept args['build-config'], so use it
-
- def __init__(self, url, version, archive=None, **kwargs):
- """
- @type url: string
- @param url: the Arch coordinates of the repository. This is
- typically an http:// URL, but could also be the absolute
- pathname of a local directory instead.
-
- @type version: string
- @param version: the category--branch--version to check out. This is
- the default branch. If a build specifies a different
- branch, it will be used instead of this.
-
- @type archive: string
- @param archive: The archive name. If provided, it must match the one
- that comes from the repository. If not, the
- repository's default will be used.
- """
- self.branch = version
- Source.__init__(self, **kwargs)
- self.args.update({'url': url,
- 'archive': archive,
- })
-
- def computeSourceRevision(self, changes):
- # in Arch, fully-qualified revision numbers look like:
- # arch@buildbot.sourceforge.net--2004/buildbot--dev--0--patch-104
- # For any given builder, all of this is fixed except the patch-104.
- # The Change might have any part of the fully-qualified string, so we
- # just look for the last part. We return the "patch-NN" string.
- if not changes:
- return None
- lastChange = None
- for c in changes:
- if not c.revision:
- continue
- if c.revision.endswith("--base-0"):
- rev = 0
- else:
- i = c.revision.rindex("patch")
- rev = int(c.revision[i+len("patch-"):])
- lastChange = max(lastChange, rev)
- if lastChange is None:
- return None
- if lastChange == 0:
- return "base-0"
- return "patch-%d" % lastChange
-
- def checkSlaveVersion(self, cmd, branch):
- warnings = []
- slavever = self.slaveVersion(cmd)
- if not slavever:
- m = "slave is too old, does not know about %s" % cmd
- raise BuildSlaveTooOldError(m)
-
- # slave 1.28 and later understand 'revision'
- if self.slaveVersionIsOlderThan(cmd, "1.28"):
- if not self.alwaysUseLatest:
- # we don't know whether our requested revision is the latest
- # or not. If the tree does not change very quickly, this will
- # probably build the right thing, so emit a warning rather
- # than refuse to build at all
- m = "WARNING, buildslave is too old to use a revision"
- log.msg(m)
- warnings.append(m + "\n")
-
- if self.slaveVersionIsOlderThan(cmd, "1.39"):
- # the slave doesn't know to avoid re-using the same sourcedir
- # when the branch changes. We have no way of knowing which branch
- # the last build used, so if we're using a non-default branch and
- # either 'update' or 'copy' modes, it is safer to refuse to
- # build, and tell the user they need to upgrade the buildslave.
- if (branch != self.branch
- and self.args['mode'] in ("update", "copy")):
- m = ("This buildslave (%s) does not know about multiple "
- "branches, and using mode=%s would probably build the "
- "wrong tree. "
- "Refusing to build. Please upgrade the buildslave to "
- "buildbot-0.7.0 or newer." % (self.build.slavename,
- self.args['mode']))
- log.msg(m)
- raise BuildSlaveTooOldError(m)
-
- return warnings
-
- def startVC(self, branch, revision, patch):
- self.args['version'] = branch
- self.args['revision'] = revision
- self.args['patch'] = patch
- warnings = self.checkSlaveVersion("arch", branch)
-
- revstuff = []
- if branch is not None and branch != self.branch:
- revstuff.append("[branch]")
- if revision is not None:
- revstuff.append("patch%s" % revision)
- self.description.extend(revstuff)
- self.descriptionDone.extend(revstuff)
-
- cmd = LoggedRemoteCommand("arch", self.args)
- self.startCommand(cmd, warnings)
-
-
-class Bazaar(Arch):
- """Bazaar is an alternative client for Arch repositories. baz is mostly
- compatible with tla, but archive registration is slightly different."""
-
- # TODO: slaves >0.6.6 will accept args['build-config'], so use it
-
- def __init__(self, url, version, archive, **kwargs):
- """
- @type url: string
- @param url: the Arch coordinates of the repository. This is
- typically an http:// URL, but could also be the absolute
- pathname of a local directory instead.
-
- @type version: string
- @param version: the category--branch--version to check out
-
- @type archive: string
- @param archive: The archive name (required). This must always match
- the one that comes from the repository, otherwise the
- buildslave will attempt to get sources from the wrong
- archive.
- """
- self.branch = version
- Source.__init__(self, **kwargs)
- self.args.update({'url': url,
- 'archive': archive,
- })
-
- def startVC(self, branch, revision, patch):
- self.args['version'] = branch
- self.args['revision'] = revision
- self.args['patch'] = patch
- warnings = self.checkSlaveVersion("bazaar", branch)
-
- revstuff = []
- if branch is not None and branch != self.branch:
- revstuff.append("[branch]")
- if revision is not None:
- revstuff.append("patch%s" % revision)
- self.description.extend(revstuff)
- self.descriptionDone.extend(revstuff)
-
- cmd = LoggedRemoteCommand("bazaar", self.args)
- self.startCommand(cmd, warnings)
-
-class Mercurial(Source):
- """Check out a source tree from a mercurial repository 'repourl'."""
-
- name = "hg"
-
- def __init__(self, repourl=None, baseURL=None, defaultBranch=None,
- **kwargs):
- """
- @type repourl: string
- @param repourl: the URL which points at the Mercurial repository.
- This is used as the default branch. Using C{repourl}
- does not enable builds of alternate branches: use
- C{baseURL} to enable this. Use either C{repourl} or
- C{baseURL}, not both.
-
- @param baseURL: if branches are enabled, this is the base URL to
- which a branch name will be appended. It should
- probably end in a slash. Use exactly one of
- C{repourl} and C{baseURL}.
-
- @param defaultBranch: if branches are enabled, this is the branch
- to use if the Build does not specify one
- explicitly. It will simply be appended to
- C{baseURL} and the result handed to the
- 'hg clone' command.
- """
- self.repourl = repourl
- self.baseURL = baseURL
- self.branch = defaultBranch
- Source.__init__(self, **kwargs)
- if (not repourl and not baseURL) or (repourl and baseURL):
- raise ValueError("you must provide exactly one of repourl and"
- " baseURL")
-
- def startVC(self, branch, revision, patch):
- slavever = self.slaveVersion("hg")
- if not slavever:
- raise BuildSlaveTooOldError("slave is too old, does not know "
- "about hg")
-
- if self.repourl:
- assert not branch # we need baseURL= to use branches
- self.args['repourl'] = self.repourl
- else:
- self.args['repourl'] = self.baseURL + branch
- self.args['revision'] = revision
- self.args['patch'] = patch
-
- revstuff = []
- if branch is not None and branch != self.branch:
- revstuff.append("[branch]")
- self.description.extend(revstuff)
- self.descriptionDone.extend(revstuff)
-
- cmd = LoggedRemoteCommand("hg", self.args)
- self.startCommand(cmd)
-
-
-class todo_P4(Source):
- name = "p4"
-
- # to create the working directory for the first time:
- # need to create a View. The 'Root' parameter will have to be filled
- # in by the buildslave with the abspath of the basedir. Then the
- # setup process involves 'p4 client' to set up the view. After
- # that, 'p4 sync' does all the necessary updating.
- # P4PORT=P4PORT P4CLIENT=name p4 client
-
- def __init__(self, p4port, view, **kwargs):
- Source.__init__(self, **kwargs)
- self.args.update({'p4port': p4port,
- 'view': view,
- })
-
- def startVC(self, branch, revision, patch):
- cmd = LoggedRemoteCommand("p4", self.args)
- self.startCommand(cmd)
-
-class P4Sync(Source):
- """This is a partial solution for using a P4 source repository. You are
- required to manually set up each build slave with a useful P4
- environment, which means setting various per-slave environment variables,
- and creating a P4 client specification which maps the right files into
- the slave's working directory. Once you have done that, this step merely
- performs a 'p4 sync' to update that workspace with the newest files.
-
- Each slave needs the following environment:
-
- - PATH: the 'p4' binary must be on the slave's PATH
- - P4USER: each slave needs a distinct user account
- - P4CLIENT: each slave needs a distinct client specification
-
- You should use 'p4 client' (?) to set up a client view spec which maps
- the desired files into $SLAVEBASE/$BUILDERBASE/source .
- """
-
- name = "p4sync"
-
- def __init__(self, p4port, p4user, p4passwd, p4client, **kwargs):
- assert kwargs['mode'] == "copy", "P4Sync can only be used in mode=copy"
- self.branch = None
- Source.__init__(self, **kwargs)
- self.args['p4port'] = p4port
- self.args['p4user'] = p4user
- self.args['p4passwd'] = p4passwd
- self.args['p4client'] = p4client
-
- def computeSourceRevision(self, changes):
- if not changes:
- return None
- lastChange = max([int(c.revision) for c in changes])
- return lastChange
-
- def startVC(self, branch, revision, patch):
- slavever = self.slaveVersion("p4sync")
- assert slavever, "slave is too old, does not know about p4"
- cmd = LoggedRemoteCommand("p4sync", self.args)
- self.startCommand(cmd)
-
-
-class Dummy(BuildStep):
- """I am a dummy no-op step, which runs entirely on the master, and simply
- waits 5 seconds before finishing with SUCCESS
- """
-
- haltOnFailure = True
- name = "dummy"
-
- def __init__(self, timeout=5, **kwargs):
- """
- @type timeout: int
- @param timeout: the number of seconds to delay before completing
- """
- BuildStep.__init__(self, **kwargs)
- self.timeout = timeout
- self.timer = None
-
- def start(self):
- self.step_status.setColor("yellow")
- self.step_status.setText(["delay", "%s secs" % self.timeout])
- self.timer = reactor.callLater(self.timeout, self.done)
-
- def interrupt(self, reason):
- if self.timer:
- self.timer.cancel()
- self.timer = None
- self.step_status.setColor("red")
- self.step_status.setText(["delay", "interrupted"])
- self.finished(FAILURE)
-
- def done(self):
- self.step_status.setColor("green")
- self.finished(SUCCESS)
-
-class FailingDummy(Dummy):
- """I am a dummy no-op step that 'runs' master-side and finishes (with a
- FAILURE status) after 5 seconds."""
-
- name = "failing dummy"
-
- def start(self):
- self.step_status.setColor("yellow")
- self.step_status.setText(["boom", "%s secs" % self.timeout])
- self.timer = reactor.callLater(self.timeout, self.done)
-
- def done(self):
- self.step_status.setColor("red")
- self.finished(FAILURE)
-
-class RemoteDummy(LoggingBuildStep):
- """I am a dummy no-op step that runs on the remote side and
- simply waits 5 seconds before completing with success.
- See L{buildbot.slave.commands.DummyCommand}
- """
-
- haltOnFailure = True
- name = "remote dummy"
-
- def __init__(self, timeout=5, **kwargs):
- """
- @type timeout: int
- @param timeout: the number of seconds to delay
- """
- LoggingBuildStep.__init__(self, **kwargs)
- self.timeout = timeout
- self.description = ["remote", "delay", "%s secs" % timeout]
-
- def describe(self, done=False):
- return self.description
-
- def start(self):
- args = {'timeout': self.timeout}
- cmd = LoggedRemoteCommand("dummy", args)
- self.startCommand(cmd)
-
-class Configure(ShellCommand):
-
- name = "configure"
- haltOnFailure = 1
- description = ["configuring"]
- descriptionDone = ["configure"]
- command = ["./configure"]
-
-class Compile(ShellCommand):
-
- name = "compile"
- haltOnFailure = 1
- description = ["compiling"]
- descriptionDone = ["compile"]
- command = ["make", "all"]
-
- OFFprogressMetrics = ['output']
- # things to track: number of files compiled, number of directories
- # traversed (assuming 'make' is being used)
-
- def createSummary(self, cmd):
- # TODO: grep for the characteristic GCC warning/error lines and
- # assemble them into a pair of buffers
- pass
-
-class Test(ShellCommand):
-
- name = "test"
- warnOnFailure = 1
- description = ["testing"]
- descriptionDone = ["test"]
- command = ["make", "test"]
diff --git a/buildbot/buildbot-source/buildbot/process/step_twisted.py b/buildbot/buildbot-source/buildbot/process/step_twisted.py
deleted file mode 100644
index 36d8632bf..000000000
--- a/buildbot/buildbot-source/buildbot/process/step_twisted.py
+++ /dev/null
@@ -1,754 +0,0 @@
-# -*- test-case-name: buildbot.test.test_twisted -*-
-
-from twisted.python import log, failure
-
-from buildbot.status import tests, builder
-from buildbot.status.builder import SUCCESS, FAILURE, WARNINGS, SKIPPED
-from buildbot.process import step
-from buildbot.process.step import BuildStep, ShellCommand
-
-try:
- import cStringIO as StringIO
-except ImportError:
- import StringIO
-import os, re, types
-
-# BuildSteps that are specific to the Twisted source tree
-
-class HLint(ShellCommand):
- """I run a 'lint' checker over a set of .xhtml files. Any deviations
- from recommended style is flagged and put in the output log.
-
- This step looks at .changes in the parent Build to extract a list of
- Lore XHTML files to check."""
-
- name = "hlint"
- description = ["running", "hlint"]
- descriptionDone = ["hlint"]
- warnOnWarnings = True
- warnOnFailure = True
- # TODO: track time, but not output
- warnings = 0
-
- def __init__(self, python=None, **kwargs):
- ShellCommand.__init__(self, **kwargs)
- self.python = python
-
- def start(self):
- # create the command
- htmlFiles = {}
- for f in self.build.allFiles():
- if f.endswith(".xhtml") and not f.startswith("sandbox/"):
- htmlFiles[f] = 1
- # remove duplicates
- hlintTargets = htmlFiles.keys()
- hlintTargets.sort()
- if not hlintTargets:
- return SKIPPED
- self.hlintFiles = hlintTargets
- c = []
- if self.python:
- c.append(self.python)
- c += ["bin/lore", "-p", "--output", "lint"] + self.hlintFiles
- self.setCommand(c)
-
- # add an extra log file to show the .html files we're checking
- self.addCompleteLog("files", "\n".join(self.hlintFiles)+"\n")
-
- ShellCommand.start(self)
-
- def commandComplete(self, cmd):
- # TODO: remove the 'files' file (a list of .xhtml files that were
- # submitted to hlint) because it is available in the logfile and
- # mostly exists to give the user an idea of how long the step will
- # take anyway).
- lines = cmd.log.getText().split("\n")
- warningLines = filter(lambda line:':' in line, lines)
- if warningLines:
- self.addCompleteLog("warnings", "".join(warningLines))
- warnings = len(warningLines)
- self.warnings = warnings
-
- def evaluateCommand(self, cmd):
- # warnings are in stdout, rc is always 0, unless the tools break
- if cmd.rc != 0:
- return FAILURE
- if self.warnings:
- return WARNINGS
- return SUCCESS
-
- def getText2(self, cmd, results):
- if cmd.rc != 0:
- return ["hlint"]
- return ["%d hlin%s" % (self.warnings,
- self.warnings == 1 and 't' or 'ts')]
-
-def countFailedTests(output):
- # start scanning 10kb from the end, because there might be a few kb of
- # import exception tracebacks between the total/time line and the errors
- # line
- chunk = output[-10000:]
- lines = chunk.split("\n")
- lines.pop() # blank line at end
- # lines[-3] is "Ran NN tests in 0.242s"
- # lines[-2] is blank
- # lines[-1] is 'OK' or 'FAILED (failures=1, errors=12)'
- # or 'FAILED (failures=1)'
- # or "PASSED (skips=N, successes=N)" (for Twisted-2.0)
- # there might be other lines dumped here. Scan all the lines.
- res = {'total': None,
- 'failures': 0,
- 'errors': 0,
- 'skips': 0,
- 'expectedFailures': 0,
- 'unexpectedSuccesses': 0,
- }
- for l in lines:
- out = re.search(r'Ran (\d+) tests', l)
- if out:
- res['total'] = int(out.group(1))
- if (l.startswith("OK") or
- l.startswith("FAILED ") or
- l.startswith("PASSED")):
- # the extra space on FAILED_ is to distinguish the overall
- # status from an individual test which failed. The lack of a
- # space on the OK is because it may be printed without any
- # additional text (if there are no skips,etc)
- out = re.search(r'failures=(\d+)', l)
- if out: res['failures'] = int(out.group(1))
- out = re.search(r'errors=(\d+)', l)
- if out: res['errors'] = int(out.group(1))
- out = re.search(r'skips=(\d+)', l)
- if out: res['skips'] = int(out.group(1))
- out = re.search(r'expectedFailures=(\d+)', l)
- if out: res['expectedFailures'] = int(out.group(1))
- out = re.search(r'unexpectedSuccesses=(\d+)', l)
- if out: res['unexpectedSuccesses'] = int(out.group(1))
- # successes= is a Twisted-2.0 addition, and is not currently used
- out = re.search(r'successes=(\d+)', l)
- if out: res['successes'] = int(out.group(1))
-
- return res
-
-UNSPECIFIED=() # since None is a valid choice
-
-class Trial(ShellCommand):
- """I run a unit test suite using 'trial', a unittest-like testing
- framework that comes with Twisted. Trial is used to implement Twisted's
- own unit tests, and is the unittest-framework of choice for many projects
- that use Twisted internally.
-
- Projects that use trial typically have all their test cases in a 'test'
- subdirectory of their top-level library directory. I.e. for my package
- 'petmail', the tests are in 'petmail/test/test_*.py'. More complicated
- packages (like Twisted itself) may have multiple test directories, like
- 'twisted/test/test_*.py' for the core functionality and
- 'twisted/mail/test/test_*.py' for the email-specific tests.
-
- To run trial tests, you run the 'trial' executable and tell it where the
- test cases are located. The most common way of doing this is with a
- module name. For petmail, I would run 'trial petmail.test' and it would
- locate all the test_*.py files under petmail/test/, running every test
- case it could find in them. Unlike the unittest.py that comes with
- Python, you do not run the test_foo.py as a script; you always let trial
- do the importing and running. The 'tests' parameter controls which tests
- trial will run: it can be a string or a list of strings.
-
- You can also use a higher-level module name and pass the --recursive flag
- to trial: this will search recursively within the named module to find
- all test cases. For large multiple-test-directory projects like Twisted,
- this means you can avoid specifying all the test directories explicitly.
- Something like 'trial --recursive twisted' will pick up everything.
-
- To find these test cases, you must set a PYTHONPATH that allows something
- like 'import petmail.test' to work. For packages that don't use a
- separate top-level 'lib' directory, PYTHONPATH=. will work, and will use
- the test cases (and the code they are testing) in-place.
- PYTHONPATH=build/lib or PYTHONPATH=build/lib.$ARCH are also useful when
- you do a'setup.py build' step first. The 'testpath' attribute of this
- class controls what PYTHONPATH= is set to.
-
- Trial has the ability (through the --testmodule flag) to run only the set
- of test cases named by special 'test-case-name' tags in source files. We
- can get the list of changed source files from our parent Build and
- provide them to trial, thus running the minimal set of test cases needed
- to cover the Changes. This is useful for quick builds, especially in
- trees with a lot of test cases. The 'testChanges' parameter controls this
- feature: if set, it will override 'tests'.
-
- The trial executable itself is typically just 'trial' (which is usually
- found on your $PATH as /usr/bin/trial), but it can be overridden with the
- 'trial' parameter. This is useful for Twisted's own unittests, which want
- to use the copy of bin/trial that comes with the sources. (when bin/trial
- discovers that it is living in a subdirectory named 'Twisted', it assumes
- it is being run from the source tree and adds that parent directory to
- PYTHONPATH. Therefore the canonical way to run Twisted's own unittest
- suite is './bin/trial twisted.test' rather than 'PYTHONPATH=.
- /usr/bin/trial twisted.test', especially handy when /usr/bin/trial has
- not yet been installed).
-
- To influence the version of python being used for the tests, or to add
- flags to the command, set the 'python' parameter. This can be a string
- (like 'python2.2') or a list (like ['python2.3', '-Wall']).
-
- Trial creates and switches into a directory named _trial_temp/ before
- running the tests, and sends the twisted log (which includes all
- exceptions) to a file named test.log . This file will be pulled up to
- the master where it can be seen as part of the status output.
-
- There are some class attributes which may be usefully overridden
- by subclasses. 'trialMode' and 'trialArgs' can influence the trial
- command line.
- """
-
- flunkOnFailure = True
- python = None
- trial = "trial"
- trialMode = ["-to"]
- trialArgs = []
- testpath = UNSPECIFIED # required (but can be None)
- testChanges = False # TODO: needs better name
- recurse = False
- reactor = None
- randomly = False
- tests = None # required
-
- def __init__(self, reactor=UNSPECIFIED, python=None, trial=None,
- testpath=UNSPECIFIED,
- tests=None, testChanges=None,
- recurse=None, randomly=None,
- trialMode=None, trialArgs=None,
- **kwargs):
- """
- @type testpath: string
- @param testpath: use in PYTHONPATH when running the tests. If
- None, do not set PYTHONPATH. Setting this to '.' will
- cause the source files to be used in-place.
-
- @type python: string (without spaces) or list
- @param python: which python executable to use. Will form the start of
- the argv array that will launch trial. If you use this,
- you should set 'trial' to an explicit path (like
- /usr/bin/trial or ./bin/trial). Defaults to None, which
- leaves it out entirely (running 'trial args' instead of
- 'python ./bin/trial args'). Likely values are 'python',
- ['python2.2'], ['python', '-Wall'], etc.
-
- @type trial: string
- @param trial: which 'trial' executable to run.
- Defaults to 'trial', which will cause $PATH to be
- searched and probably find /usr/bin/trial . If you set
- 'python', this should be set to an explicit path (because
- 'python2.3 trial' will not work).
-
- @type trialMode: list of strings
- @param trialMode: a list of arguments to pass to trial, specifically
- to set the reporting mode. This defaults to ['-to']
- which means 'verbose colorless output' to the trial
- that comes with Twisted-2.0.x and at least -2.1.0 .
- Newer versions of Twisted may come with a trial
- that prefers ['--reporter=bwverbose'].
-
- @type trialArgs: list of strings
- @param trialArgs: a list of arguments to pass to trial, available to
- turn on any extra flags you like. Defaults to [].
-
- @type tests: list of strings
- @param tests: a list of test modules to run, like
- ['twisted.test.test_defer', 'twisted.test.test_process'].
- If this is a string, it will be converted into a one-item
- list.
-
- @type testChanges: boolean
- @param testChanges: if True, ignore the 'tests' parameter and instead
- ask the Build for all the files that make up the
- Changes going into this build. Pass these filenames
- to trial and ask it to look for test-case-name
- tags, running just the tests necessary to cover the
- changes.
-
- @type recurse: boolean
- @param recurse: If True, pass the --recurse option to trial, allowing
- test cases to be found in deeper subdirectories of the
- modules listed in 'tests'. This does not appear to be
- necessary when using testChanges.
-
- @type reactor: string
- @param reactor: which reactor to use, like 'gtk' or 'java'. If not
- provided, the Twisted's usual platform-dependent
- default is used.
-
- @type randomly: boolean
- @param randomly: if True, add the --random=0 argument, which instructs
- trial to run the unit tests in a random order each
- time. This occasionally catches problems that might be
- masked when one module always runs before another
- (like failing to make registerAdapter calls before
- lookups are done).
-
- @type kwargs: dict
- @param kwargs: parameters. The following parameters are inherited from
- L{ShellCommand} and may be useful to set: workdir,
- haltOnFailure, flunkOnWarnings, flunkOnFailure,
- warnOnWarnings, warnOnFailure, want_stdout, want_stderr,
- timeout.
- """
- ShellCommand.__init__(self, **kwargs)
-
- if python:
- self.python = python
- if self.python is not None:
- if type(self.python) is str:
- self.python = [self.python]
- for s in self.python:
- if " " in s:
- # this is not strictly an error, but I suspect more
- # people will accidentally try to use python="python2.3
- # -Wall" than will use embedded spaces in a python flag
- log.msg("python= component '%s' has spaces")
- log.msg("To add -Wall, use python=['python', '-Wall']")
- why = "python= value has spaces, probably an error"
- raise ValueError(why)
-
- if trial:
- self.trial = trial
- if " " in self.trial:
- raise ValueError("trial= value has spaces")
- if trialMode is not None:
- self.trialMode = trialMode
- if trialArgs is not None:
- self.trialArgs = trialArgs
-
- if testpath is not UNSPECIFIED:
- self.testpath = testpath
- if self.testpath is UNSPECIFIED:
- raise ValueError("You must specify testpath= (it can be None)")
- assert isinstance(self.testpath, str) or self.testpath is None
-
- if reactor is not UNSPECIFIED:
- self.reactor = reactor
-
- if tests is not None:
- self.tests = tests
- if type(self.tests) is str:
- self.tests = [self.tests]
- if testChanges is not None:
- self.testChanges = testChanges
- #self.recurse = True # not sure this is necessary
-
- if not self.testChanges and self.tests is None:
- raise ValueError("Must either set testChanges= or provide tests=")
-
- if recurse is not None:
- self.recurse = recurse
- if randomly is not None:
- self.randomly = randomly
-
- # build up most of the command, then stash it until start()
- command = []
- if self.python:
- command.extend(self.python)
- command.append(self.trial)
- command.extend(self.trialMode)
- if self.recurse:
- command.append("--recurse")
- if self.reactor:
- command.append("--reactor=%s" % reactor)
- if self.randomly:
- command.append("--random=0")
- command.extend(self.trialArgs)
- self.command = command
-
- if self.reactor:
- self.description = ["testing", "(%s)" % self.reactor]
- self.descriptionDone = ["tests"]
- # commandComplete adds (reactorname) to self.text
- else:
- self.description = ["testing"]
- self.descriptionDone = ["tests"]
-
- def setupEnvironment(self, cmd):
- ShellCommand.setupEnvironment(self, cmd)
- if self.testpath != None:
- e = cmd.args['env']
- if e is None:
- cmd.args['env'] = {'PYTHONPATH': self.testpath}
- else:
- # TODO: somehow, each build causes another copy of
- # self.testpath to get prepended
- if e.get('PYTHONPATH', "") == "":
- e['PYTHONPATH'] = self.testpath
- else:
- e['PYTHONPATH'] = self.testpath + ":" + e['PYTHONPATH']
- try:
- p = cmd.args['env']['PYTHONPATH']
- if type(p) is not str:
- log.msg("hey, not a string:", p)
- assert False
- except (KeyError, TypeError):
- # KeyError if args doesn't have ['env']
- # KeyError if args['env'] doesn't have ['PYTHONPATH']
- # TypeError if args is None
- pass
-
- def start(self):
- # now that self.build.allFiles() is nailed down, finish building the
- # command
- if self.testChanges:
- for f in self.build.allFiles():
- if f.endswith(".py"):
- self.command.append("--testmodule=%s" % f)
- else:
- self.command.extend(self.tests)
- log.msg("Trial.start: command is", self.command)
- ShellCommand.start(self)
-
- def _commandComplete(self, cmd):
- # before doing the summary, etc, fetch _trial_temp/test.log
- # TODO: refactor ShellCommand so I don't have to override such
- # an internal method
- catcmd = ["cat", "_trial_temp/test.log"]
- c2 = step.RemoteShellCommand(command=catcmd,
- workdir=self.workdir,
- )
- self.cmd = c2
- loog = self.addLog("test.log")
- c2.useLog(loog, True)
- d = c2.run(self, self.remote)
- d.addCallback(self._commandComplete2, cmd)
- return d
-
- def _commandComplete2(self, c2, cmd):
- # pass the original RemoteShellCommand to the summarizer
- return ShellCommand._commandComplete(self, cmd)
-
- def rtext(self, fmt='%s'):
- if self.reactor:
- rtext = fmt % self.reactor
- return rtext.replace("reactor", "")
- return ""
-
-
- def commandComplete(self, cmd):
- # figure out all status, then let the various hook functions return
- # different pieces of it
-
- output = cmd.log.getText()
- counts = countFailedTests(output)
-
- total = counts['total']
- failures, errors = counts['failures'], counts['errors']
- parsed = (total != None)
- text = []
- text2 = ""
-
- if cmd.rc == 0:
- if parsed:
- results = SUCCESS
- if total:
- text += ["%d %s" % \
- (total,
- total == 1 and "test" or "tests"),
- "passed"]
- else:
- text += ["no tests", "run"]
- else:
- results = FAILURE
- text += ["testlog", "unparseable"]
- text2 = "tests"
- else:
- # something failed
- results = FAILURE
- if parsed:
- text.append("tests")
- if failures:
- text.append("%d %s" % \
- (failures,
- failures == 1 and "failure" or "failures"))
- if errors:
- text.append("%d %s" % \
- (errors,
- errors == 1 and "error" or "errors"))
- count = failures + errors
- text2 = "%d tes%s" % (count, (count == 1 and 't' or 'ts'))
- else:
- text += ["tests", "failed"]
- text2 = "tests"
-
- if counts['skips']:
- text.append("%d %s" % \
- (counts['skips'],
- counts['skips'] == 1 and "skip" or "skips"))
- if counts['expectedFailures']:
- text.append("%d %s" % \
- (counts['expectedFailures'],
- counts['expectedFailures'] == 1 and "todo"
- or "todos"))
- if 0: # TODO
- results = WARNINGS
- if not text2:
- text2 = "todo"
-
- if 0:
- # ignore unexpectedSuccesses for now, but it should really mark
- # the build WARNING
- if counts['unexpectedSuccesses']:
- text.append("%d surprises" % counts['unexpectedSuccesses'])
- results = WARNINGS
- if not text2:
- text2 = "tests"
-
- if self.reactor:
- text.append(self.rtext('(%s)'))
- if text2:
- text2 = "%s %s" % (text2, self.rtext('(%s)'))
-
- self.results = results
- self.text = text
- self.text2 = [text2]
-
- def addTestResult(self, testname, results, text, tlog):
- if self.reactor is not None:
- testname = (self.reactor,) + testname
- tr = builder.TestResult(testname, results, text, logs={'log': tlog})
- #self.step_status.build.addTestResult(tr)
- self.build.build_status.addTestResult(tr)
-
- def createSummary(self, loog):
- output = loog.getText()
- problems = ""
- sio = StringIO.StringIO(output)
- warnings = {}
- while 1:
- line = sio.readline()
- if line == "":
- break
- if line.find(" exceptions.DeprecationWarning: ") != -1:
- # no source
- warning = line # TODO: consider stripping basedir prefix here
- warnings[warning] = warnings.get(warning, 0) + 1
- elif (line.find(" DeprecationWarning: ") != -1 or
- line.find(" UserWarning: ") != -1):
- # next line is the source
- warning = line + sio.readline()
- warnings[warning] = warnings.get(warning, 0) + 1
- elif line.find("Warning: ") != -1:
- warning = line
- warnings[warning] = warnings.get(warning, 0) + 1
-
- if line.find("=" * 60) == 0 or line.find("-" * 60) == 0:
- problems += line
- problems += sio.read()
- break
-
- if problems:
- self.addCompleteLog("problems", problems)
- # now parse the problems for per-test results
- pio = StringIO.StringIO(problems)
- pio.readline() # eat the first separator line
- testname = None
- done = False
- while not done:
- while 1:
- line = pio.readline()
- if line == "":
- done = True
- break
- if line.find("=" * 60) == 0:
- break
- if line.find("-" * 60) == 0:
- # the last case has --- as a separator before the
- # summary counts are printed
- done = True
- break
- if testname is None:
- # the first line after the === is like:
-# EXPECTED FAILURE: testLackOfTB (twisted.test.test_failure.FailureTestCase)
-# SKIPPED: testRETR (twisted.test.test_ftp.TestFTPServer)
-# FAILURE: testBatchFile (twisted.conch.test.test_sftp.TestOurServerBatchFile)
- r = re.search(r'^([^:]+): (\w+) \(([\w\.]+)\)', line)
- if not r:
- # TODO: cleanup, if there are no problems,
- # we hit here
- continue
- result, name, case = r.groups()
- testname = tuple(case.split(".") + [name])
- results = {'SKIPPED': SKIPPED,
- 'EXPECTED FAILURE': SUCCESS,
- 'UNEXPECTED SUCCESS': WARNINGS,
- 'FAILURE': FAILURE,
- 'ERROR': FAILURE,
- 'SUCCESS': SUCCESS, # not reported
- }.get(result, WARNINGS)
- text = result.lower().split()
- loog = line
- # the next line is all dashes
- loog += pio.readline()
- else:
- # the rest goes into the log
- loog += line
- if testname:
- self.addTestResult(testname, results, text, loog)
- testname = None
-
- if warnings:
- lines = warnings.keys()
- lines.sort()
- self.addCompleteLog("warnings", "".join(lines))
-
- def evaluateCommand(self, cmd):
- return self.results
-
- def getText(self, cmd, results):
- return self.text
- def getText2(self, cmd, results):
- return self.text2
-
-
-class ProcessDocs(ShellCommand):
- """I build all docs. This requires some LaTeX packages to be installed.
- It will result in the full documentation book (dvi, pdf, etc).
-
- """
-
- name = "process-docs"
- warnOnWarnings = 1
- command = ["admin/process-docs"]
- description = ["processing", "docs"]
- descriptionDone = ["docs"]
- # TODO: track output and time
-
- def __init__(self, **kwargs):
- """
- @type workdir: string
- @keyword workdir: the workdir to start from: must be the base of the
- Twisted tree
-
- @type results: triple of (int, int, string)
- @keyword results: [rc, warnings, output]
- - rc==0 if all files were converted successfully.
- - warnings is a count of hlint warnings.
- - output is the verbose output of the command.
- """
- ShellCommand.__init__(self, **kwargs)
-
- def createSummary(self, log):
- output = log.getText()
- # hlint warnings are of the format: 'WARNING: file:line:col: stuff
- # latex warnings start with "WARNING: LaTeX Warning: stuff", but
- # sometimes wrap around to a second line.
- lines = output.split("\n")
- warningLines = []
- wantNext = False
- for line in lines:
- wantThis = wantNext
- wantNext = False
- if line.startswith("WARNING: "):
- wantThis = True
- wantNext = True
- if wantThis:
- warningLines.append(line)
-
- if warningLines:
- self.addCompleteLog("warnings", "\n".join(warningLines) + "\n")
- self.warnings = len(warningLines)
-
- def evaluateCommand(self, cmd):
- if cmd.rc != 0:
- return FAILURE
- if self.warnings:
- return WARNINGS
- return SUCCESS
-
- def getText(self, cmd, results):
- if results == SUCCESS:
- return ["docs", "successful"]
- if results == WARNINGS:
- return ["docs",
- "%d warnin%s" % (self.warnings,
- self.warnings == 1 and 'g' or 'gs')]
- if results == FAILURE:
- return ["docs", "failed"]
-
- def getText2(self, cmd, results):
- if results == WARNINGS:
- return ["%d do%s" % (self.warnings,
- self.warnings == 1 and 'c' or 'cs')]
- return ["docs"]
-
-
-
-class BuildDebs(ShellCommand):
- """I build the .deb packages."""
-
- name = "debuild"
- flunkOnFailure = 1
- command = ["debuild", "-uc", "-us"]
- description = ["building", "debs"]
- descriptionDone = ["debs"]
-
- def __init__(self, **kwargs):
- """
- @type workdir: string
- @keyword workdir: the workdir to start from (must be the base of the
- Twisted tree)
- @type results: double of [int, string]
- @keyword results: [rc, output].
- - rc == 0 if all .debs were created successfully
- - output: string with any errors or warnings
- """
- ShellCommand.__init__(self, **kwargs)
-
- def commandComplete(self, cmd):
- errors, warnings = 0, 0
- output = cmd.log.getText()
- summary = ""
- sio = StringIO.StringIO(output)
- for line in sio.readlines():
- if line.find("E: ") == 0:
- summary += line
- errors += 1
- if line.find("W: ") == 0:
- summary += line
- warnings += 1
- if summary:
- self.addCompleteLog("problems", summary)
- self.errors = errors
- self.warnings = warnings
-
- def evaluateCommand(self, cmd):
- if cmd.rc != 0:
- return FAILURE
- if self.errors:
- return FAILURE
- if self.warnings:
- return WARNINGS
- return SUCCESS
-
- def getText(self, cmd, results):
- text = ["debuild"]
- if cmd.rc != 0:
- text.append("failed")
- errors, warnings = self.errors, self.warnings
- if warnings or errors:
- text.append("lintian:")
- if warnings:
- text.append("%d warnin%s" % (warnings,
- warnings == 1 and 'g' or 'gs'))
- if errors:
- text.append("%d erro%s" % (errors,
- errors == 1 and 'r' or 'rs'))
- return text
-
- def getText2(self, cmd, results):
- if cmd.rc != 0:
- return ["debuild"]
- if self.errors or self.warnings:
- return ["%d lintian" % (self.errors + self.warnings)]
- return []
-
-class RemovePYCs(ShellCommand):
- name = "remove-.pyc"
- command = 'find . -name "*.pyc" | xargs rm'
- description = ["removing", ".pyc", "files"]
- descriptionDone = ["remove", ".pycs"]
diff --git a/buildbot/buildbot-source/buildbot/process/step_twisted2.py b/buildbot/buildbot-source/buildbot/process/step_twisted2.py
deleted file mode 100644
index b684b60d4..000000000
--- a/buildbot/buildbot-source/buildbot/process/step_twisted2.py
+++ /dev/null
@@ -1,164 +0,0 @@
-#! /usr/bin/python
-
-from buildbot.status import tests
-from buildbot.process.step import SUCCESS, FAILURE, WARNINGS, SKIPPED, \
- BuildStep
-from buildbot.process.step_twisted import RunUnitTests
-
-from zope.interface import implements
-from twisted.python import log, failure
-from twisted.spread import jelly
-from twisted.pb.tokens import BananaError
-from twisted.web.util import formatFailure
-from twisted.web.html import PRE
-from twisted.web.error import NoResource
-
-class Null: pass
-ResultTypes = Null()
-ResultTypeNames = ["SKIP",
- "EXPECTED_FAILURE", "FAILURE", "ERROR",
- "UNEXPECTED_SUCCESS", "SUCCESS"]
-try:
- from twisted.trial import reporter # introduced in Twisted-1.0.5
- # extract the individual result types
- for name in ResultTypeNames:
- setattr(ResultTypes, name, getattr(reporter, name))
-except ImportError:
- from twisted.trial import unittest # Twisted-1.0.4 has them here
- for name in ResultTypeNames:
- setattr(ResultTypes, name, getattr(unittest, name))
-
-log._keepErrors = 0
-from twisted.trial import remote # for trial/jelly parsing
-
-import StringIO
-
-class OneJellyTest(tests.OneTest):
- def html(self, request):
- tpl = "<HTML><BODY>\n\n%s\n\n</body></html>\n"
- pptpl = "<HTML><BODY>\n\n<pre>%s</pre>\n\n</body></html>\n"
- t = request.postpath[0] # one of 'short', 'long' #, or 'html'
- if isinstance(self.results, failure.Failure):
- # it would be nice to remove unittest functions from the
- # traceback like unittest.format_exception() does.
- if t == 'short':
- s = StringIO.StringIO()
- self.results.printTraceback(s)
- return pptpl % PRE(s.getvalue())
- elif t == 'long':
- s = StringIO.StringIO()
- self.results.printDetailedTraceback(s)
- return pptpl % PRE(s.getvalue())
- #elif t == 'html':
- # return tpl % formatFailure(self.results)
- # ACK! source lines aren't stored in the Failure, rather,
- # formatFailure pulls them (by filename) from the local
- # disk. Feh. Even printTraceback() won't work. Double feh.
- return NoResource("No such mode '%s'" % t)
- if self.results == None:
- return tpl % "No results to show: test probably passed."
- # maybe results are plain text?
- return pptpl % PRE(self.results)
-
-class TwistedJellyTestResults(tests.TestResults):
- oneTestClass = OneJellyTest
- def describeOneTest(self, testname):
- return "%s: %s\n" % (testname, self.tests[testname][0])
-
-class RunUnitTestsJelly(RunUnitTests):
- """I run the unit tests with the --jelly option, which generates
- machine-parseable results as the tests are run.
- """
- trialMode = "--jelly"
- implements(remote.IRemoteReporter)
-
- ourtypes = { ResultTypes.SKIP: tests.SKIP,
- ResultTypes.EXPECTED_FAILURE: tests.EXPECTED_FAILURE,
- ResultTypes.FAILURE: tests.FAILURE,
- ResultTypes.ERROR: tests.ERROR,
- ResultTypes.UNEXPECTED_SUCCESS: tests.UNEXPECTED_SUCCESS,
- ResultTypes.SUCCESS: tests.SUCCESS,
- }
-
- def __getstate__(self):
- #d = RunUnitTests.__getstate__(self)
- d = self.__dict__.copy()
- # Banana subclasses are Ephemeral
- if d.has_key("decoder"):
- del d['decoder']
- return d
- def start(self):
- self.decoder = remote.DecodeReport(self)
- # don't accept anything unpleasant from the (untrusted) build slave
- # The jellied stream may have Failures, but everything inside should
- # be a string
- security = jelly.SecurityOptions()
- security.allowBasicTypes()
- security.allowInstancesOf(failure.Failure)
- self.decoder.taster = security
- self.results = TwistedJellyTestResults()
- RunUnitTests.start(self)
-
- def logProgress(self, progress):
- # XXX: track number of tests
- BuildStep.logProgress(self, progress)
-
- def addStdout(self, data):
- if not self.decoder:
- return
- try:
- self.decoder.dataReceived(data)
- except BananaError:
- self.decoder = None
- log.msg("trial --jelly output unparseable, traceback follows")
- log.deferr()
-
- def remote_start(self, expectedTests, times=None):
- print "remote_start", expectedTests
- def remote_reportImportError(self, name, aFailure, times=None):
- pass
- def remote_reportStart(self, testClass, method, times=None):
- print "reportStart", testClass, method
-
- def remote_reportResults(self, testClass, method, resultType, results,
- times=None):
- print "reportResults", testClass, method, resultType
- which = testClass + "." + method
- self.results.addTest(which,
- self.ourtypes.get(resultType, tests.UNKNOWN),
- results)
-
- def finished(self, rc):
- # give self.results to our Build object
- self.build.testsFinished(self.results)
- total = self.results.countTests()
- count = self.results.countFailures()
- result = SUCCESS
- if total == None:
- result = (FAILURE, ['tests%s' % self.rtext(' (%s)')])
- if count:
- result = (FAILURE, ["%d tes%s%s" % (count,
- (count == 1 and 't' or 'ts'),
- self.rtext(' (%s)'))])
- return self.stepComplete(result)
- def finishStatus(self, result):
- total = self.results.countTests()
- count = self.results.countFailures()
- color = "green"
- text = []
- if count == 0:
- text.extend(["%d %s" % \
- (total,
- total == 1 and "test" or "tests"),
- "passed"])
- else:
- text.append("tests")
- text.append("%d %s" % \
- (count,
- count == 1 and "failure" or "failures"))
- color = "red"
- self.updateCurrentActivity(color=color, text=text)
- self.addFileToCurrentActivity("tests", self.results)
- #self.finishStatusSummary()
- self.finishCurrentActivity()
-
diff --git a/buildbot/buildbot-source/buildbot/scheduler.py b/buildbot/buildbot-source/buildbot/scheduler.py
deleted file mode 100644
index 5a9a3a39e..000000000
--- a/buildbot/buildbot-source/buildbot/scheduler.py
+++ /dev/null
@@ -1,688 +0,0 @@
-# -*- test-case-name: buildbot.test.test_dependencies -*-
-
-import time, os.path
-
-from twisted.internet import reactor
-from twisted.application import service, internet, strports
-from twisted.python import log, runtime
-from twisted.protocols import basic
-from twisted.cred import portal, checkers
-from twisted.spread import pb
-
-from buildbot import interfaces, buildset, util, pbutil
-from buildbot.util import now
-from buildbot.status import builder
-from buildbot.twcompat import implements, providedBy
-from buildbot.sourcestamp import SourceStamp
-from buildbot.changes import maildirtwisted
-
-
-class BaseScheduler(service.MultiService, util.ComparableMixin):
- if implements:
- implements(interfaces.IScheduler)
- else:
- __implements__ = (interfaces.IScheduler,
- service.MultiService.__implements__)
-
- def __init__(self, name):
- service.MultiService.__init__(self)
- self.name = name
-
- def __repr__(self):
- # TODO: why can't id() return a positive number? %d is ugly.
- return "<Scheduler '%s' at %d>" % (self.name, id(self))
-
- def submit(self, bs):
- self.parent.submitBuildSet(bs)
-
- def addChange(self, change):
- pass
-
-class BaseUpstreamScheduler(BaseScheduler):
- if implements:
- implements(interfaces.IUpstreamScheduler)
- else:
- __implements__ = (interfaces.IUpstreamScheduler,
- BaseScheduler.__implements__)
-
- def __init__(self, name):
- BaseScheduler.__init__(self, name)
- self.successWatchers = []
-
- def subscribeToSuccessfulBuilds(self, watcher):
- self.successWatchers.append(watcher)
- def unsubscribeToSuccessfulBuilds(self, watcher):
- self.successWatchers.remove(watcher)
-
- def submit(self, bs):
- d = bs.waitUntilFinished()
- d.addCallback(self.buildSetFinished)
- self.parent.submitBuildSet(bs)
-
- def buildSetFinished(self, bss):
- if not self.running:
- return
- if bss.getResults() == builder.SUCCESS:
- ss = bss.getSourceStamp()
- for w in self.successWatchers:
- w(ss)
-
-
-class Scheduler(BaseUpstreamScheduler):
- """The default Scheduler class will run a build after some period of time
- called the C{treeStableTimer}, on a given set of Builders. It only pays
- attention to a single branch. You you can provide a C{fileIsImportant}
- function which will evaluate each Change to decide whether or not it
- should trigger a new build.
- """
-
- fileIsImportant = None
- compare_attrs = ('name', 'treeStableTimer', 'builderNames', 'branch',
- 'fileIsImportant')
-
- def __init__(self, name, branch, treeStableTimer, builderNames,
- fileIsImportant=None):
- """
- @param name: the name of this Scheduler
- @param branch: The branch name that the Scheduler should pay
- attention to. Any Change that is not on this branch
- will be ignored. It can be set to None to only pay
- attention to the default branch.
- @param treeStableTimer: the duration, in seconds, for which the tree
- must remain unchanged before a build will be
- triggered. This is intended to avoid builds
- of partially-committed fixes.
- @param builderNames: a list of Builder names. When this Scheduler
- decides to start a set of builds, they will be
- run on the Builders named by this list.
-
- @param fileIsImportant: A callable which takes one argument (a Change
- instance) and returns True if the change is
- worth building, and False if it is not.
- Unimportant Changes are accumulated until the
- build is triggered by an important change.
- The default value of None means that all
- Changes are important.
- """
-
- BaseUpstreamScheduler.__init__(self, name)
- self.treeStableTimer = treeStableTimer
- for b in builderNames:
- assert isinstance(b, str)
- self.builderNames = builderNames
- self.branch = branch
- if fileIsImportant:
- assert callable(fileIsImportant)
- self.fileIsImportant = fileIsImportant
-
- self.importantChanges = []
- self.unimportantChanges = []
- self.nextBuildTime = None
- self.timer = None
-
- def listBuilderNames(self):
- return self.builderNames
-
- def getPendingBuildTimes(self):
- if self.nextBuildTime is not None:
- return [self.nextBuildTime]
- return []
-
- def addChange(self, change):
- if change.branch != self.branch:
- log.msg("%s ignoring off-branch %s" % (self, change))
- return
- if not self.fileIsImportant:
- self.addImportantChange(change)
- elif self.fileIsImportant(change):
- self.addImportantChange(change)
- else:
- self.addUnimportantChange(change)
-
- def addImportantChange(self, change):
- log.msg("%s: change is important, adding %s" % (self, change))
- self.importantChanges.append(change)
- self.nextBuildTime = max(self.nextBuildTime,
- change.when + self.treeStableTimer)
- self.setTimer(self.nextBuildTime)
-
- def addUnimportantChange(self, change):
- log.msg("%s: change is not important, adding %s" % (self, change))
- self.unimportantChanges.append(change)
-
- def setTimer(self, when):
- log.msg("%s: setting timer to %s" %
- (self, time.strftime("%H:%M:%S", time.localtime(when))))
- now = util.now()
- if when < now:
- when = now + 1
- if self.timer:
- self.timer.cancel()
- self.timer = reactor.callLater(when - now, self.fireTimer)
-
- def stopTimer(self):
- if self.timer:
- self.timer.cancel()
- self.timer = None
-
- def fireTimer(self):
- # clear out our state
- self.timer = None
- self.nextBuildTime = None
- changes = self.importantChanges + self.unimportantChanges
- self.importantChanges = []
- self.unimportantChanges = []
-
- # create a BuildSet, submit it to the BuildMaster
- bs = buildset.BuildSet(self.builderNames,
- SourceStamp(changes=changes))
- self.submit(bs)
-
- def stopService(self):
- self.stopTimer()
- return service.MultiService.stopService(self)
-
-
-class AnyBranchScheduler(BaseUpstreamScheduler):
- """This Scheduler will handle changes on a variety of branches. It will
- accumulate Changes for each branch separately. It works by creating a
- separate Scheduler for each new branch it sees."""
-
- schedulerFactory = Scheduler
- fileIsImportant = None
-
- compare_attrs = ('name', 'branches', 'treeStableTimer', 'builderNames',
- 'fileIsImportant')
-
- def __init__(self, name, branches, treeStableTimer, builderNames,
- fileIsImportant=None):
- """
- @param name: the name of this Scheduler
- @param branches: The branch names that the Scheduler should pay
- attention to. Any Change that is not on one of these
- branches will be ignored. It can be set to None to
- accept changes from any branch. Don't use [] (an
- empty list), because that means we don't pay
- attention to *any* branches, so we'll never build
- anything.
- @param treeStableTimer: the duration, in seconds, for which the tree
- must remain unchanged before a build will be
- triggered. This is intended to avoid builds
- of partially-committed fixes.
- @param builderNames: a list of Builder names. When this Scheduler
- decides to start a set of builds, they will be
- run on the Builders named by this list.
-
- @param fileIsImportant: A callable which takes one argument (a Change
- instance) and returns True if the change is
- worth building, and False if it is not.
- Unimportant Changes are accumulated until the
- build is triggered by an important change.
- The default value of None means that all
- Changes are important.
- """
-
- BaseUpstreamScheduler.__init__(self, name)
- self.treeStableTimer = treeStableTimer
- for b in builderNames:
- assert isinstance(b, str)
- self.builderNames = builderNames
- self.branches = branches
- if self.branches == []:
- log.msg("AnyBranchScheduler %s: branches=[], so we will ignore "
- "all branches, and never trigger any builds. Please set "
- "branches=None to mean 'all branches'" % self)
- # consider raising an exception here, to make this warning more
- # prominent, but I can vaguely imagine situations where you might
- # want to comment out branches temporarily and wouldn't
- # appreciate it being treated as an error.
- if fileIsImportant:
- assert callable(fileIsImportant)
- self.fileIsImportant = fileIsImportant
- self.schedulers = {} # one per branch
-
- def __repr__(self):
- return "<AnyBranchScheduler '%s'>" % self.name
-
- def listBuilderNames(self):
- return self.builderNames
-
- def getPendingBuildTimes(self):
- bts = []
- for s in self.schedulers.values():
- if s.nextBuildTime is not None:
- bts.append(s.nextBuildTime)
- return bts
-
- def addChange(self, change):
- branch = change.branch
- if self.branches is not None and branch not in self.branches:
- log.msg("%s ignoring off-branch %s" % (self, change))
- return
- s = self.schedulers.get(branch)
- if not s:
- if branch:
- name = self.name + "." + branch
- else:
- name = self.name + ".<default>"
- s = self.schedulerFactory(name, branch,
- self.treeStableTimer,
- self.builderNames,
- self.fileIsImportant)
- s.successWatchers = self.successWatchers
- s.setServiceParent(self)
- # TODO: does this result in schedulers that stack up forever?
- # When I make the persistify-pass, think about this some more.
- self.schedulers[branch] = s
- s.addChange(change)
-
- def submitBuildSet(self, bs):
- self.parent.submitBuildSet(bs)
-
-
-class Dependent(BaseUpstreamScheduler):
- """This scheduler runs some set of 'downstream' builds when the
- 'upstream' scheduler has completed successfully."""
-
- compare_attrs = ('name', 'upstream', 'builders')
-
- def __init__(self, name, upstream, builderNames):
- assert providedBy(upstream, interfaces.IUpstreamScheduler)
- BaseUpstreamScheduler.__init__(self, name)
- self.upstream = upstream
- self.builderNames = builderNames
-
- def listBuilderNames(self):
- return self.builderNames
-
- def getPendingBuildTimes(self):
- # report the upstream's value
- return self.upstream.getPendingBuildTimes()
-
- def startService(self):
- service.MultiService.startService(self)
- self.upstream.subscribeToSuccessfulBuilds(self.upstreamBuilt)
-
- def stopService(self):
- d = service.MultiService.stopService(self)
- self.upstream.unsubscribeToSuccessfulBuilds(self.upstreamBuilt)
- return d
-
- def upstreamBuilt(self, ss):
- bs = buildset.BuildSet(self.builderNames, ss)
- self.submit(bs)
-
-
-
-class Periodic(BaseUpstreamScheduler):
- """Instead of watching for Changes, this Scheduler can just start a build
- at fixed intervals. The C{periodicBuildTimer} parameter sets the number
- of seconds to wait between such periodic builds. The first build will be
- run immediately."""
-
- # TODO: consider having this watch another (changed-based) scheduler and
- # merely enforce a minimum time between builds.
-
- compare_attrs = ('name', 'builderNames', 'periodicBuildTimer', 'branch')
-
- def __init__(self, name, builderNames, periodicBuildTimer,
- branch=None):
- BaseUpstreamScheduler.__init__(self, name)
- self.builderNames = builderNames
- self.periodicBuildTimer = periodicBuildTimer
- self.branch = branch
- self.timer = internet.TimerService(self.periodicBuildTimer,
- self.doPeriodicBuild)
- self.timer.setServiceParent(self)
-
- def listBuilderNames(self):
- return self.builderNames
-
- def getPendingBuildTimes(self):
- # TODO: figure out when self.timer is going to fire next and report
- # that
- return []
-
- def doPeriodicBuild(self):
- bs = buildset.BuildSet(self.builderNames,
- SourceStamp(branch=self.branch))
- self.submit(bs)
-
-
-
-class Nightly(BaseUpstreamScheduler):
- """Imitate 'cron' scheduling. This can be used to schedule a nightly
- build, or one which runs are certain times of the day, week, or month.
-
- Pass some subset of minute, hour, dayOfMonth, month, and dayOfWeek; each
- may be a single number or a list of valid values. The builds will be
- triggered whenever the current time matches these values. Wildcards are
- represented by a '*' string. All fields default to a wildcard except
- 'minute', so with no fields this defaults to a build every hour, on the
- hour.
-
- For example, the following master.cfg clause will cause a build to be
- started every night at 3:00am::
-
- s = Nightly('nightly', ['builder1', 'builder2'], hour=3, minute=0)
- c['schedules'].append(s)
-
- This scheduler will perform a build each monday morning at 6:23am and
- again at 8:23am::
-
- s = Nightly('BeforeWork', ['builder1'],
- dayOfWeek=0, hour=[6,8], minute=23)
-
- The following runs a build every two hours::
-
- s = Nightly('every2hours', ['builder1'], hour=range(0, 24, 2))
-
- And this one will run only on December 24th::
-
- s = Nightly('SleighPreflightCheck', ['flying_circuits', 'radar'],
- month=12, dayOfMonth=24, hour=12, minute=0)
-
- For dayOfWeek and dayOfMonth, builds are triggered if the date matches
- either of them. Month and day numbers start at 1, not zero.
- """
-
- compare_attrs = ('name', 'builderNames',
- 'minute', 'hour', 'dayOfMonth', 'month',
- 'dayOfWeek', 'branch')
-
- def __init__(self, name, builderNames, minute=0, hour='*',
- dayOfMonth='*', month='*', dayOfWeek='*',
- branch=None):
- # Setting minute=0 really makes this an 'Hourly' scheduler. This
- # seemed like a better default than minute='*', which would result in
- # a build every 60 seconds.
- BaseUpstreamScheduler.__init__(self, name)
- self.builderNames = builderNames
- self.minute = minute
- self.hour = hour
- self.dayOfMonth = dayOfMonth
- self.month = month
- self.dayOfWeek = dayOfWeek
- self.branch = branch
- self.delayedRun = None
- self.nextRunTime = None
-
- def addTime(self, timetuple, secs):
- return time.localtime(time.mktime(timetuple)+secs)
- def findFirstValueAtLeast(self, values, value, default=None):
- for v in values:
- if v >= value: return v
- return default
-
- def setTimer(self):
- self.nextRunTime = self.calculateNextRunTime()
- self.delayedRun = reactor.callLater(self.nextRunTime - time.time(),
- self.doPeriodicBuild)
-
- def startService(self):
- BaseUpstreamScheduler.startService(self)
- self.setTimer()
-
- def stopService(self):
- BaseUpstreamScheduler.stopService(self)
- self.delayedRun.cancel()
-
- def isRunTime(self, timetuple):
- def check(ourvalue, value):
- if ourvalue == '*': return True
- if isinstance(ourvalue, int): return value == ourvalue
- return (value in ourvalue)
-
- if not check(self.minute, timetuple[4]):
- #print 'bad minute', timetuple[4], self.minute
- return False
-
- if not check(self.hour, timetuple[3]):
- #print 'bad hour', timetuple[3], self.hour
- return False
-
- if not check(self.month, timetuple[1]):
- #print 'bad month', timetuple[1], self.month
- return False
-
- if self.dayOfMonth != '*' and self.dayOfWeek != '*':
- # They specified both day(s) of month AND day(s) of week.
- # This means that we only have to match one of the two. If
- # neither one matches, this time is not the right time.
- if not (check(self.dayOfMonth, timetuple[2]) or
- check(self.dayOfWeek, timetuple[6])):
- #print 'bad day'
- return False
- else:
- if not check(self.dayOfMonth, timetuple[2]):
- #print 'bad day of month'
- return False
-
- if not check(self.dayOfWeek, timetuple[6]):
- #print 'bad day of week'
- return False
-
- return True
-
- def calculateNextRunTime(self):
- return self.calculateNextRunTimeFrom(time.time())
-
- def calculateNextRunTimeFrom(self, now):
- dateTime = time.localtime(now)
-
- # Remove seconds by advancing to at least the next minue
- dateTime = self.addTime(dateTime, 60-dateTime[5])
-
- # Now we just keep adding minutes until we find something that matches
-
- # It not an efficient algorithm, but it'll *work* for now
- yearLimit = dateTime[0]+2
- while not self.isRunTime(dateTime):
- dateTime = self.addTime(dateTime, 60)
- #print 'Trying', time.asctime(dateTime)
- assert dateTime[0] < yearLimit, 'Something is wrong with this code'
- return time.mktime(dateTime)
-
- def listBuilderNames(self):
- return self.builderNames
-
- def getPendingBuildTimes(self):
- # TODO: figure out when self.timer is going to fire next and report
- # that
- if self.nextRunTime is None: return []
- return [self.nextRunTime]
-
- def doPeriodicBuild(self):
- # Schedule the next run
- self.setTimer()
-
- # And trigger a build
- bs = buildset.BuildSet(self.builderNames,
- SourceStamp(branch=self.branch))
- self.submit(bs)
-
- def addChange(self, change):
- pass
-
-
-
-class TryBase(service.MultiService, util.ComparableMixin):
- if implements:
- implements(interfaces.IScheduler)
- else:
- __implements__ = (interfaces.IScheduler,
- service.MultiService.__implements__)
-
- def __init__(self, name, builderNames):
- service.MultiService.__init__(self)
- self.name = name
- self.builderNames = builderNames
-
- def listBuilderNames(self):
- return self.builderNames
-
- def getPendingBuildTimes(self):
- # we can't predict what the developers are going to do in the future
- return []
-
- def addChange(self, change):
- # Try schedulers ignore Changes
- pass
-
-
-class BadJobfile(Exception):
- pass
-
-class JobFileScanner(basic.NetstringReceiver):
- def __init__(self):
- self.strings = []
- self.transport = self # so transport.loseConnection works
- self.error = False
-
- def stringReceived(self, s):
- self.strings.append(s)
-
- def loseConnection(self):
- self.error = True
-
-class Try_Jobdir(TryBase):
- compare_attrs = ["name", "builderNames", "jobdir"]
-
- def __init__(self, name, builderNames, jobdir):
- TryBase.__init__(self, name, builderNames)
- self.jobdir = jobdir
- self.watcher = maildirtwisted.MaildirService()
- self.watcher.setServiceParent(self)
-
- def setServiceParent(self, parent):
- self.watcher.setBasedir(os.path.join(parent.basedir, self.jobdir))
- TryBase.setServiceParent(self, parent)
-
- def parseJob(self, f):
- # jobfiles are serialized build requests. Each is a list of
- # serialized netstrings, in the following order:
- # "1", the version number of this format
- # buildsetID, arbitrary string, used to find the buildSet later
- # branch name, "" for default-branch
- # base revision
- # patchlevel, usually "1"
- # patch
- # builderNames...
- p = JobFileScanner()
- p.dataReceived(f.read())
- if p.error:
- raise BadJobfile("unable to parse netstrings")
- s = p.strings
- ver = s.pop(0)
- if ver != "1":
- raise BadJobfile("unknown version '%s'" % ver)
- buildsetID, branch, baserev, patchlevel, diff = s[:5]
- builderNames = s[5:]
- if branch == "":
- branch = None
- patchlevel = int(patchlevel)
- patch = (patchlevel, diff)
- ss = SourceStamp(branch, baserev, patch)
- return builderNames, ss, buildsetID
-
- def messageReceived(self, filename):
- md = os.path.join(self.parent.basedir, self.jobdir)
- if runtime.platformType == "posix":
- # open the file before moving it, because I'm afraid that once
- # it's in cur/, someone might delete it at any moment
- path = os.path.join(md, "new", filename)
- f = open(path, "r")
- os.rename(os.path.join(md, "new", filename),
- os.path.join(md, "cur", filename))
- else:
- # do this backwards under windows, because you can't move a file
- # that somebody is holding open. This was causing a Permission
- # Denied error on bear's win32-twisted1.3 buildslave.
- os.rename(os.path.join(md, "new", filename),
- os.path.join(md, "cur", filename))
- path = os.path.join(md, "cur", filename)
- f = open(path, "r")
-
- try:
- builderNames, ss, bsid = self.parseJob(f)
- except BadJobfile:
- log.msg("%s reports a bad jobfile in %s" % (self, filename))
- log.err()
- return
- # compare builderNames against self.builderNames
- # TODO: think about this some more.. why bother restricting it?
- # perhaps self.builderNames should be used as the default list
- # instead of being used as a restriction?
- for b in builderNames:
- if not b in self.builderNames:
- log.msg("%s got jobfile %s with builder %s" % (self,
- filename, b))
- log.msg(" but that wasn't in our list: %s"
- % (self.builderNames,))
- return
-
- reason = "'try' job"
- bs = buildset.BuildSet(builderNames, ss, reason=reason, bsid=bsid)
- self.parent.submitBuildSet(bs)
-
-class Try_Userpass(TryBase):
- compare_attrs = ["name", "builderNames", "port", "userpass"]
-
- if implements:
- implements(portal.IRealm)
- else:
- __implements__ = (portal.IRealm,
- TryBase.__implements__)
-
- def __init__(self, name, builderNames, port, userpass):
- TryBase.__init__(self, name, builderNames)
- if type(port) is int:
- port = "tcp:%d" % port
- self.port = port
- self.userpass = userpass
- c = checkers.InMemoryUsernamePasswordDatabaseDontUse()
- for user,passwd in self.userpass:
- c.addUser(user, passwd)
-
- p = portal.Portal(self)
- p.registerChecker(c)
- f = pb.PBServerFactory(p)
- s = strports.service(port, f)
- s.setServiceParent(self)
-
- def getPort(self):
- # utility method for tests: figure out which TCP port we just opened.
- return self.services[0]._port.getHost().port
-
- def requestAvatar(self, avatarID, mind, interface):
- log.msg("%s got connection from user %s" % (self, avatarID))
- assert interface == pb.IPerspective
- p = Try_Userpass_Perspective(self, avatarID)
- return (pb.IPerspective, p, lambda: None)
-
- def submitBuildSet(self, bs):
- return self.parent.submitBuildSet(bs)
-
-class Try_Userpass_Perspective(pbutil.NewCredPerspective):
- def __init__(self, parent, username):
- self.parent = parent
- self.username = username
-
- def perspective_try(self, branch, revision, patch, builderNames):
- log.msg("user %s requesting build on builders %s" % (self.username,
- builderNames))
- for b in builderNames:
- if not b in self.parent.builderNames:
- log.msg("%s got job with builder %s" % (self, b))
- log.msg(" but that wasn't in our list: %s"
- % (self.parent.builderNames,))
- return
- ss = SourceStamp(branch, revision, patch)
- reason = "'try' job from user %s" % self.username
- bs = buildset.BuildSet(builderNames, ss, reason=reason)
- self.parent.submitBuildSet(bs)
-
- # return a remotely-usable BuildSetStatus object
- from buildbot.status.client import makeRemote
- return makeRemote(bs.status)
-
diff --git a/buildbot/buildbot-source/buildbot/scripts/__init__.py b/buildbot/buildbot-source/buildbot/scripts/__init__.py
deleted file mode 100644
index e69de29bb..000000000
--- a/buildbot/buildbot-source/buildbot/scripts/__init__.py
+++ /dev/null
diff --git a/buildbot/buildbot-source/buildbot/scripts/runner.py b/buildbot/buildbot-source/buildbot/scripts/runner.py
deleted file mode 100644
index 7d11a8225..000000000
--- a/buildbot/buildbot-source/buildbot/scripts/runner.py
+++ /dev/null
@@ -1,749 +0,0 @@
-# -*- test-case-name: buildbot.test.test_runner -*-
-
-# N.B.: don't import anything that might pull in a reactor yet. Some of our
-# subcommands want to load modules that need the gtk reactor.
-import os, os.path, sys, shutil, stat, re, time
-from twisted.python import usage, util, runtime
-
-# this is mostly just a front-end for mktap, twistd, and kill(1), but in the
-# future it will also provide an interface to some developer tools that talk
-# directly to a remote buildmaster (like 'try' and a status client)
-
-# the create/start/stop commands should all be run as the same user,
-# preferably a separate 'buildbot' account.
-
-class MakerBase(usage.Options):
- optFlags = [
- ['help', 'h', "Display this message"],
- ["quiet", "q", "Do not emit the commands being run"],
- ]
-
- #["basedir", "d", None, "Base directory for the buildmaster"],
- opt_h = usage.Options.opt_help
-
- def parseArgs(self, *args):
- if len(args) > 0:
- self['basedir'] = args[0]
- else:
- self['basedir'] = None
- if len(args) > 1:
- raise usage.UsageError("I wasn't expecting so many arguments")
-
- def postOptions(self):
- if self['basedir'] is None:
- raise usage.UsageError("<basedir> parameter is required")
- self['basedir'] = os.path.abspath(self['basedir'])
-
-makefile_sample = """# -*- makefile -*-
-
-# This is a simple makefile which lives in a buildmaster/buildslave
-# directory (next to the buildbot.tac file). It allows you to start/stop the
-# master or slave by doing 'make start' or 'make stop'.
-
-# The 'reconfig' target will tell a buildmaster to reload its config file.
-
-start:
- twistd --no_save -y buildbot.tac
-
-stop:
- kill `cat twistd.pid`
-
-reconfig:
- kill -HUP `cat twistd.pid`
-
-log:
- tail -f twistd.log
-"""
-
-class Maker:
- def __init__(self, config):
- self.config = config
- self.basedir = config['basedir']
- self.force = config['force']
- self.quiet = config['quiet']
-
- def mkdir(self):
- if os.path.exists(self.basedir):
- if not self.quiet:
- print "updating existing installation"
- return
- if not self.quiet: print "mkdir", self.basedir
- os.mkdir(self.basedir)
-
- def mkinfo(self):
- path = os.path.join(self.basedir, "info")
- if not os.path.exists(path):
- if not self.quiet: print "mkdir", path
- os.mkdir(path)
- created = False
- admin = os.path.join(path, "admin")
- if not os.path.exists(admin):
- if not self.quiet:
- print "Creating info/admin, you need to edit it appropriately"
- f = open(admin, "wt")
- f.write("Your Name Here <admin@youraddress.invalid>\n")
- f.close()
- created = True
- host = os.path.join(path, "host")
- if not os.path.exists(host):
- if not self.quiet:
- print "Creating info/host, you need to edit it appropriately"
- f = open(host, "wt")
- f.write("Please put a description of this build host here\n")
- f.close()
- created = True
- if created and not self.quiet:
- print "Please edit the files in %s appropriately." % path
-
- def chdir(self):
- if not self.quiet: print "chdir", self.basedir
- os.chdir(self.basedir)
-
- def makeTAC(self, contents, secret=False):
- tacfile = "buildbot.tac"
- if os.path.exists(tacfile):
- oldcontents = open(tacfile, "rt").read()
- if oldcontents == contents:
- if not self.quiet:
- print "buildbot.tac already exists and is correct"
- return
- if not self.quiet:
- print "not touching existing buildbot.tac"
- print "creating buildbot.tac.new instead"
- tacfile = "buildbot.tac.new"
- f = open(tacfile, "wt")
- f.write(contents)
- f.close()
- if secret:
- os.chmod(tacfile, 0600)
-
- def makefile(self):
- target = "Makefile.sample"
- if os.path.exists(target):
- oldcontents = open(target, "rt").read()
- if oldcontents == makefile_sample:
- if not self.quiet:
- print "Makefile.sample already exists and is correct"
- return
- if not self.quiet:
- print "replacing Makefile.sample"
- else:
- if not self.quiet:
- print "creating Makefile.sample"
- f = open(target, "wt")
- f.write(makefile_sample)
- f.close()
-
- def sampleconfig(self, source):
- target = "master.cfg.sample"
- config_sample = open(source, "rt").read()
- if os.path.exists(target):
- oldcontents = open(target, "rt").read()
- if oldcontents == config_sample:
- if not self.quiet:
- print "master.cfg.sample already exists and is up-to-date"
- return
- if not self.quiet:
- print "replacing master.cfg.sample"
- else:
- if not self.quiet:
- print "creating master.cfg.sample"
- f = open(target, "wt")
- f.write(config_sample)
- f.close()
- os.chmod(target, 0600)
-
-class MasterOptions(MakerBase):
- optFlags = [
- ["force", "f",
- "Re-use an existing directory (will not overwrite master.cfg file)"],
- ]
- optParameters = [
- ["config", "c", "master.cfg", "name of the buildmaster config file"],
- ]
- def getSynopsis(self):
- return "Usage: buildbot master [options] <basedir>"
-
- longdesc = """
- This command creates a buildmaster working directory and buildbot.tac
- file. The master will live in <dir> and create various files there.
-
- At runtime, the master will read a configuration file (named
- 'master.cfg' by default) in its basedir. This file should contain python
- code which eventually defines a dictionary named 'BuildmasterConfig'.
- The elements of this dictionary are used to configure the Buildmaster.
- See doc/config.xhtml for details about what can be controlled through
- this interface."""
-
-masterTAC = """
-from twisted.application import service
-from buildbot.master import BuildMaster
-
-basedir = r'%(basedir)s'
-configfile = r'%(config)s'
-
-application = service.Application('buildmaster')
-BuildMaster(basedir, configfile).setServiceParent(application)
-
-"""
-
-def createMaster(config):
- m = Maker(config)
- m.mkdir()
- m.chdir()
- contents = masterTAC % config
- m.makeTAC(contents)
- m.sampleconfig(util.sibpath(__file__, "sample.cfg"))
- m.makefile()
-
- if not m.quiet: print "buildmaster configured in %s" % m.basedir
-
-class SlaveOptions(MakerBase):
- optFlags = [
- ["force", "f", "Re-use an existing directory"],
- ]
- optParameters = [
-# ["name", "n", None, "Name for this build slave"],
-# ["passwd", "p", None, "Password for this build slave"],
-# ["basedir", "d", ".", "Base directory to use"],
-# ["master", "m", "localhost:8007",
-# "Location of the buildmaster (host:port)"],
-
- ["keepalive", "k", 600,
- "Interval at which keepalives should be sent (in seconds)"],
- ["usepty", None, 1,
- "(1 or 0) child processes should be run in a pty"],
- ["umask", None, "None",
- "controls permissions of generated files. Use --umask=022 to be world-readable"],
- ]
-
- longdesc = """
- This command creates a buildslave working directory and buildbot.tac
- file. The bot will use the <name> and <passwd> arguments to authenticate
- itself when connecting to the master. All commands are run in a
- build-specific subdirectory of <basedir>, which defaults to the working
- directory that mktap was run from. <master> is a string of the form
- 'hostname:port', and specifies where the buildmaster can be reached.
-
- <name>, <passwd>, and <master> will be provided by the buildmaster
- administrator for your bot.
- """
-
- def getSynopsis(self):
- return "Usage: buildbot slave [options] <basedir> <master> <name> <passwd>"
-
- def parseArgs(self, *args):
- if len(args) < 4:
- raise usage.UsageError("command needs more arguments")
- basedir, master, name, passwd = args
- self['basedir'] = basedir
- self['master'] = master
- self['name'] = name
- self['passwd'] = passwd
-
- def postOptions(self):
- MakerBase.postOptions(self)
- self['usepty'] = int(self['usepty'])
- self['keepalive'] = int(self['keepalive'])
- if self['master'].find(":") == -1:
- raise usage.UsageError("--master must be in the form host:portnum")
-
-slaveTAC = """
-from twisted.application import service
-from buildbot.slave.bot import BuildSlave
-
-basedir = r'%(basedir)s'
-host = '%(host)s'
-port = %(port)d
-slavename = '%(name)s'
-passwd = '%(passwd)s'
-keepalive = %(keepalive)d
-usepty = %(usepty)d
-umask = %(umask)s
-
-application = service.Application('buildslave')
-s = BuildSlave(host, port, slavename, passwd, basedir, keepalive, usepty,
- umask=umask)
-s.setServiceParent(application)
-
-"""
-
-def createSlave(config):
- m = Maker(config)
- m.mkdir()
- m.chdir()
- try:
- master = config['master']
- host, port = re.search(r'(.+):(\d+)', master).groups()
- config['host'] = host
- config['port'] = int(port)
- except:
- print "unparseable master location '%s'" % master
- print " expecting something more like localhost:8007"
- raise
- contents = slaveTAC % config
-
- m.makeTAC(contents, secret=True)
-
- m.makefile()
- m.mkinfo()
-
- if not m.quiet: print "buildslave configured in %s" % m.basedir
-
-
-def start(config):
- basedir = config['basedir']
- quiet = config['quiet']
- os.chdir(basedir)
- sys.path.insert(0, os.path.abspath(os.getcwd()))
- if os.path.exists("/usr/bin/make") and os.path.exists("Makefile.buildbot"):
- # Preferring the Makefile lets slave admins do useful things like set
- # up environment variables for the buildslave.
- cmd = "make -f Makefile.buildbot start"
- if not quiet: print cmd
- os.system(cmd)
- else:
- # see if we can launch the application without actually having to
- # spawn twistd, since spawning processes correctly is a real hassle
- # on windows.
- from twisted.python.runtime import platformType
- argv = ["twistd",
- "--no_save",
- "--logfile=twistd.log", # windows doesn't use the same default
- "--python=buildbot.tac"]
- if platformType == "win32":
- argv.append("--reactor=win32")
- sys.argv = argv
-
- # this is copied from bin/twistd. twisted-1.3.0 uses twistw, while
- # twisted-2.0.0 uses _twistw.
- if platformType == "win32":
- try:
- from twisted.scripts._twistw import run
- except ImportError:
- from twisted.scripts.twistw import run
- else:
- from twisted.scripts.twistd import run
- run()
-
-
-def stop(config, signame="TERM", wait=False):
- import signal
- basedir = config['basedir']
- quiet = config['quiet']
- os.chdir(basedir)
- f = open("twistd.pid", "rt")
- pid = int(f.read().strip())
- signum = getattr(signal, "SIG"+signame)
- timer = 0
- os.kill(pid, signum)
- if not wait:
- print "sent SIG%s to process" % signame
- return
- time.sleep(0.1)
- while timer < 5:
- # poll once per second until twistd.pid goes away, up to 5 seconds
- try:
- os.kill(pid, 0)
- except OSError:
- print "buildbot process %d is dead" % pid
- return
- timer += 1
- time.sleep(1)
- print "never saw process go away"
-
-def restart(config):
- stop(config, wait=True)
- print "now restarting buildbot process.."
- start(config)
- # this next line might not be printed, if start() ended up running twistd
- # inline
- print "buildbot process has been restarted"
-
-
-def loadOptions(filename="options", here=None, home=None):
- """Find the .buildbot/FILENAME file. Crawl from the current directory up
- towards the root, and also look in ~/.buildbot . The first directory
- that's owned by the user and has the file we're looking for wins. Windows
- skips the owned-by-user test.
-
- @rtype: dict
- @return: a dictionary of names defined in the options file. If no options
- file was found, return an empty dict.
- """
-
- if here is None:
- here = os.getcwd()
- here = os.path.abspath(here)
-
- if home is None:
- if runtime.platformType == 'win32':
- home = os.path.join(os.environ['APPDATA'], "buildbot")
- else:
- home = os.path.expanduser("~/.buildbot")
-
- searchpath = []
- toomany = 20
- while True:
- searchpath.append(os.path.join(here, ".buildbot"))
- next = os.path.dirname(here)
- if next == here:
- break # we've hit the root
- here = next
- toomany -= 1 # just in case
- if toomany == 0:
- raise ValueError("Hey, I seem to have wandered up into the "
- "infinite glories of the heavens. Oops.")
- searchpath.append(home)
-
- localDict = {}
-
- for d in searchpath:
- if os.path.isdir(d):
- if runtime.platformType != 'win32':
- if os.stat(d)[stat.ST_UID] != os.getuid():
- print "skipping %s because you don't own it" % d
- continue # security, skip other people's directories
- optfile = os.path.join(d, filename)
- if os.path.exists(optfile):
- try:
- f = open(optfile, "r")
- options = f.read()
- exec options in localDict
- except:
- print "error while reading %s" % optfile
- raise
- break
-
- for k in localDict.keys():
- if k.startswith("__"):
- del localDict[k]
- return localDict
-
-class StartOptions(MakerBase):
- def getSynopsis(self):
- return "Usage: buildbot start <basedir>"
-
-class StopOptions(MakerBase):
- def getSynopsis(self):
- return "Usage: buildbot stop <basedir>"
-
-class RestartOptions(MakerBase):
- def getSynopsis(self):
- return "Usage: buildbot restart <basedir>"
-
-class DebugClientOptions(usage.Options):
- optFlags = [
- ['help', 'h', "Display this message"],
- ]
- optParameters = [
- ["master", "m", None,
- "Location of the buildmaster's slaveport (host:port)"],
- ["passwd", "p", None, "Debug password to use"],
- ]
-
- def parseArgs(self, *args):
- if len(args) > 0:
- self['master'] = args[0]
- if len(args) > 1:
- self['passwd'] = args[1]
- if len(args) > 2:
- raise usage.UsageError("I wasn't expecting so many arguments")
-
-def debugclient(config):
- from buildbot.clients import debug
- opts = loadOptions()
-
- master = config.get('master')
- if not master:
- master = opts.get('master')
- if master is None:
- raise usage.UsageError("master must be specified: on the command "
- "line or in ~/.buildbot/options")
-
- passwd = config.get('passwd')
- if not passwd:
- passwd = opts.get('debugPassword')
- if passwd is None:
- raise usage.UsageError("passwd must be specified: on the command "
- "line or in ~/.buildbot/options")
-
- d = debug.DebugWidget(master, passwd)
- d.run()
-
-class StatusClientOptions(usage.Options):
- optFlags = [
- ['help', 'h', "Display this message"],
- ]
- optParameters = [
- ["master", "m", None,
- "Location of the buildmaster's status port (host:port)"],
- ]
-
- def parseArgs(self, *args):
- if len(args) > 0:
- self['master'] = args[0]
- if len(args) > 1:
- raise usage.UsageError("I wasn't expecting so many arguments")
-
-def statuslog(config):
- from buildbot.clients import base
- opts = loadOptions()
- master = config.get('master')
- if not master:
- master = opts.get('masterstatus')
- if master is None:
- raise usage.UsageError("master must be specified: on the command "
- "line or in ~/.buildbot/options")
- c = base.TextClient(master)
- c.run()
-
-def statusgui(config):
- from buildbot.clients import gtkPanes
- opts = loadOptions()
- master = config.get('master')
- if not master:
- master = opts.get('masterstatus')
- if master is None:
- raise usage.UsageError("master must be specified: on the command "
- "line or in ~/.buildbot/options")
- c = gtkPanes.GtkClient(master)
- c.run()
-
-class SendChangeOptions(usage.Options):
- optParameters = [
- ("master", "m", None,
- "Location of the buildmaster's PBListener (host:port)"),
- ("username", "u", None, "Username performing the commit"),
- ("branch", "b", None, "Branch specifier"),
- ("revision", "r", None, "Revision specifier (string)"),
- ("revision_number", "n", None, "Revision specifier (integer)"),
- ("revision_file", None, None, "Filename containing revision spec"),
- ("comments", "m", None, "log message"),
- ("logfile", "F", None,
- "Read the log messages from this file (- for stdin)"),
- ]
- def getSynopsis(self):
- return "Usage: buildbot sendchange [options] filenames.."
- def parseArgs(self, *args):
- self['files'] = args
-
-
-def sendchange(config, runReactor=False):
- """Send a single change to the buildmaster's PBChangeSource. The
- connection will be drpoped as soon as the Change has been sent."""
- from buildbot.clients.sendchange import Sender
-
- opts = loadOptions()
- user = config.get('username', opts.get('username'))
- master = config.get('master', opts.get('master'))
- branch = config.get('branch', opts.get('branch'))
- revision = config.get('revision')
- # SVN and P4 use numeric revisions
- if config.get("revision_number"):
- revision = int(config['revision_number'])
- if config.get("revision_file"):
- revision = open(config["revision_file"],"r").read()
-
- comments = config.get('comments')
- if not comments and config.get('logfile'):
- if config['logfile'] == "-":
- f = sys.stdin
- else:
- f = open(config['logfile'], "rt")
- comments = f.read()
- if comments is None:
- comments = ""
-
- files = config.get('files', [])
-
- assert user, "you must provide a username"
- assert master, "you must provide the master location"
-
- s = Sender(master, user)
- d = s.send(branch, revision, comments, files)
- if runReactor:
- d.addCallbacks(s.printSuccess, s.printFailure)
- d.addCallback(s.stop)
- s.run()
- return d
-
-
-class ForceOptions(usage.Options):
- optParameters = [
- ["builder", None, None, "which Builder to start"],
- ["branch", None, None, "which branch to build"],
- ["revision", None, None, "which revision to build"],
- ["reason", None, None, "the reason for starting the build"],
- ]
-
- def parseArgs(self, *args):
- args = list(args)
- if len(args) > 0:
- if self['builder'] is not None:
- raise usage.UsageError("--builder provided in two ways")
- self['builder'] = args.pop(0)
- if len(args) > 0:
- if self['reason'] is not None:
- raise usage.UsageError("--reason provided in two ways")
- self['reason'] = " ".join(args)
-
-
-class TryOptions(usage.Options):
- optParameters = [
- ["connect", "c", None,
- "how to reach the buildmaster, either 'ssh' or 'pb'"],
- # for ssh, use --tryhost, --username, and --trydir
- ["tryhost", None, None,
- "the hostname (used by ssh) for the buildmaster"],
- ["trydir", None, None,
- "the directory (on the tryhost) where tryjobs are deposited"],
- ["username", "u", None, "Username performing the trial build"],
- # for PB, use --master, --username, and --passwd
- ["master", "m", None,
- "Location of the buildmaster's PBListener (host:port)"],
- ["passwd", None, None, "password for PB authentication"],
-
- ["vc", None, None,
- "The VC system in use, one of: cvs,svn,tla,baz,darcs"],
- ["branch", None, None,
- "The branch in use, for VC systems that can't figure it out"
- " themselves"],
-
- ["builder", "b", None,
- "Run the trial build on this Builder. Can be used multiple times."],
- ]
-
- optFlags = [
- ["wait", None, "wait until the builds have finished"],
- ]
-
- def __init__(self):
- super(TryOptions, self).__init__()
- self['builders'] = []
-
- def opt_builder(self, option):
- self['builders'].append(option)
-
- def getSynopsis(self):
- return "Usage: buildbot try [options]"
-
-def doTry(config):
- from buildbot.scripts import tryclient
- t = tryclient.Try(config)
- t.run()
-
-class TryServerOptions(usage.Options):
- optParameters = [
- ["jobdir", None, None, "the jobdir (maildir) for submitting jobs"],
- ]
-
-def doTryServer(config):
- import md5
- jobdir = os.path.expanduser(config["jobdir"])
- job = sys.stdin.read()
- # now do a 'safecat'-style write to jobdir/tmp, then move atomically to
- # jobdir/new . Rather than come up with a unique name randomly, I'm just
- # going to MD5 the contents and prepend a timestamp.
- timestring = "%d" % time.time()
- jobhash = md5.new(job).hexdigest()
- fn = "%s-%s" % (timestring, jobhash)
- tmpfile = os.path.join(jobdir, "tmp", fn)
- newfile = os.path.join(jobdir, "new", fn)
- f = open(tmpfile, "w")
- f.write(job)
- f.close()
- os.rename(tmpfile, newfile)
-
-
-class Options(usage.Options):
- synopsis = "Usage: buildbot <command> [command options]"
-
- subCommands = [
- # the following are all admin commands
- ['master', None, MasterOptions,
- "Create and populate a directory for a new buildmaster"],
- ['slave', None, SlaveOptions,
- "Create and populate a directory for a new buildslave"],
- ['start', None, StartOptions, "Start a buildmaster or buildslave"],
- ['stop', None, StopOptions, "Stop a buildmaster or buildslave"],
- ['restart', None, RestartOptions,
- "Restart a buildmaster or buildslave"],
-
- ['sighup', None, StopOptions,
- "SIGHUP a buildmaster to make it re-read the config file"],
-
- ['sendchange', None, SendChangeOptions,
- "Send a change to the buildmaster"],
-
- ['debugclient', None, DebugClientOptions,
- "Launch a small debug panel GUI"],
-
- ['statuslog', None, StatusClientOptions,
- "Emit current builder status to stdout"],
- ['statusgui', None, StatusClientOptions,
- "Display a small window showing current builder status"],
-
- #['force', None, ForceOptions, "Run a build"],
- ['try', None, TryOptions, "Run a build with your local changes"],
-
- ['tryserver', None, TryServerOptions,
- "buildmaster-side 'try' support function, not for users"],
-
- # TODO: 'watch'
- ]
-
- def opt_version(self):
- import buildbot
- print "Buildbot version: %s" % buildbot.version
- usage.Options.opt_version(self)
-
- def opt_verbose(self):
- from twisted.python import log
- log.startLogging(sys.stderr)
-
- def postOptions(self):
- if not hasattr(self, 'subOptions'):
- raise usage.UsageError("must specify a command")
-
-
-def run():
- config = Options()
- try:
- config.parseOptions()
- except usage.error, e:
- print "%s: %s" % (sys.argv[0], e)
- print
- c = getattr(config, 'subOptions', config)
- print str(c)
- sys.exit(1)
-
- command = config.subCommand
- so = config.subOptions
-
- if command == "master":
- createMaster(so)
- elif command == "slave":
- createSlave(so)
- elif command == "start":
- start(so)
- elif command == "stop":
- stop(so, wait=True)
- elif command == "restart":
- restart(so)
- elif command == "sighup":
- stop(so, "HUP")
- elif command == "sendchange":
- sendchange(so, True)
- elif command == "debugclient":
- debugclient(so)
- elif command == "statuslog":
- statuslog(so)
- elif command == "statusgui":
- statusgui(so)
- elif command == "try":
- doTry(so)
- elif command == "tryserver":
- doTryServer(so)
-
-
diff --git a/buildbot/buildbot-source/buildbot/scripts/sample.cfg b/buildbot/buildbot-source/buildbot/scripts/sample.cfg
deleted file mode 100644
index a8385064a..000000000
--- a/buildbot/buildbot-source/buildbot/scripts/sample.cfg
+++ /dev/null
@@ -1,150 +0,0 @@
-# -*- python -*-
-# ex: set syntax=python:
-
-# This is a sample buildmaster config file. It must be installed as
-# 'master.cfg' in your buildmaster's base directory (although the filename
-# can be changed with the --basedir option to 'mktap buildbot master').
-
-# It has one job: define a dictionary named BuildmasterConfig. This
-# dictionary has a variety of keys to control different aspects of the
-# buildmaster. They are documented in docs/config.xhtml .
-
-import os.path
-from buildbot.changes.freshcvs import FreshCVSSource
-from buildbot.scheduler import Scheduler
-from buildbot.process import step, factory
-from buildbot.status import html
-s = factory.s
-
-# This is the dictionary that the buildmaster pays attention to. We also use
-# a shorter alias to save typing.
-c = BuildmasterConfig = {}
-
-# the 'bots' list defines the set of allowable buildslaves. Each element is a
-# tuple of bot-name and bot-password. These correspond to values given to the
-# buildslave's mktap invocation.
-c['bots'] = [("bot1name", "bot1passwd")]
-
-
-# the 'sources' list tells the buildmaster how it should find out about
-# source code changes. Any class which implements IChangeSource can be added
-# to this list: there are several in buildbot/changes/*.py to choose from.
-
-c['sources'] = []
-
-# For example, if you had CVSToys installed on your repository, and your
-# CVSROOT/freshcfg file had an entry like this:
-#pb = ConfigurationSet([
-# (None, None, None, PBService(userpass=('foo', 'bar'), port=4519)),
-# ])
-
-# then you could use the following buildmaster Change Source to subscribe to
-# the FreshCVS daemon and be notified on every commit:
-#
-#fc_source = FreshCVSSource("cvs.example.com", 4519, "foo", "bar")
-#c['sources'].append(fc_source)
-
-# or, use a PBChangeSource, and then have your repository's commit script run
-# 'buildbot sendchange', or contrib/svn_buildbot.py, or
-# contrib/arch_buildbot.py :
-#
-#from buildbot.changes.pb import PBChangeSource
-#c['sources'].append(PBChangeSource())
-
-
-## configure the Schedulers
-
-c['schedulers'] = []
-c['schedulers'].append(Scheduler(name="all", branch=None,
- treeStableTimer=2*60,
- builderNames=["buildbot-full"]))
-
-
-
-# the 'builders' list defines the Builders. Each one is configured with a
-# dictionary, using the following keys:
-# name (required): the name used to describe this bilder
-# slavename (required): which slave to use, must appear in c['bots']
-# builddir (required): which subdirectory to run the builder in
-# factory (required): a BuildFactory to define how the build is run
-# periodicBuildTime (optional): if set, force a build every N seconds
-
-# buildbot/process/factory.py provides several BuildFactory classes you can
-# start with, which implement build processes for common targets (GNU
-# autoconf projects, CPAN perl modules, etc). The factory.BuildFactory is the
-# base class, and is configured with a series of BuildSteps. When the build
-# is run, the appropriate buildslave is told to execute each Step in turn.
-
-# the first BuildStep is typically responsible for obtaining a copy of the
-# sources. There are source-obtaining Steps in buildbot/process/step.py for
-# CVS, SVN, and others.
-
-cvsroot = ":pserver:anonymous@cvs.sourceforge.net:/cvsroot/buildbot"
-cvsmodule = "buildbot"
-
-builders = []
-
-source = s(step.CVS, cvsroot=cvsroot, cvsmodule=cvsmodule, login="",
- mode="copy")
-f1 = factory.Trial(source, tests="buildbot.test",
- # enable this if you've installed buildbot-test-vc-1.tar.gz
- #env={'BUILDBOT_TEST_VC': "~/incoming"},
- )
-b1 = {'name': "buildbot-full",
- 'slavename': "bot1name",
- 'builddir': "full",
- 'factory': f1,
- }
-c['builders'] = [b1]
-
-# 'slavePortnum' defines the TCP port to listen on. This must match the value
-# configured into the buildslaves (with their --master option)
-
-c['slavePortnum'] = 9989
-
-# 'status' is a list of Status Targets. The results of each build will be
-# pushed to these targets. buildbot/status/*.py has a variety to choose from,
-# including web pages, email senders, and IRC bots.
-
-c['status'] = []
-c['status'].append(html.Waterfall(http_port=8010))
-
-# from buildbot.status import mail
-# c['status'].append(mail.MailNotifier(fromaddr="buildbot@localhost",
-# extraRecipients=["builds@example.com"],
-# sendToInterestedUsers=False))
-# from buildbot.status import words
-# c['status'].append(words.IRC(host="irc.example.com", nick="bb",
-# channels=["#example"]))
-
-
-# if you set 'debugPassword', then you can connect to the buildmaster with
-# the diagnostic tool in contrib/debugclient.py . From this tool, you can
-# manually force builds and inject changes, which may be useful for testing
-# your buildmaster without actually commiting changes to your repository (or
-# before you have a functioning 'sources' set up). The debug tool uses the
-# same port number as the slaves do: 'slavePortnum'.
-
-c['debugPassword'] = "debugpassword"
-
-# if you set 'manhole', you can telnet into the buildmaster and get an
-# interactive python shell, which may be useful for debugging buildbot
-# internals. It is probably only useful for buildbot developers.
-#from buildbot.master import Manhole
-#c['manhole'] = Manhole(9999, "admin", "password")
-
-# the 'projectName' string will be used to describe the project that this
-# buildbot is working on. For example, it is used as the title of the
-# waterfall HTML page. The 'projectURL' string will be used to provide a link
-# from buildbot HTML pages to your project's home page.
-
-c['projectName'] = "Buildbot"
-c['projectURL'] = "http://buildbot.sourceforge.net/"
-
-# the 'buildbotURL' string should point to the location where the buildbot's
-# internal web server (usually the html.Waterfall page) is visible. This
-# typically uses the port number set in the Waterfall 'status' entry, but
-# with an externally-visible host name which the buildbot cannot figure out
-# without some help.
-
-c['buildbotURL'] = "http://localhost:8010/"
diff --git a/buildbot/buildbot-source/buildbot/scripts/tryclient.py b/buildbot/buildbot-source/buildbot/scripts/tryclient.py
deleted file mode 100644
index 796634468..000000000
--- a/buildbot/buildbot-source/buildbot/scripts/tryclient.py
+++ /dev/null
@@ -1,580 +0,0 @@
-# -*- test-case-name: buildbot.test.test_scheduler,buildbot.test.test_vc -*-
-
-import sys, os, re, time, random
-from twisted.internet import utils, protocol, defer, reactor, task
-from twisted.spread import pb
-from twisted.cred import credentials
-from twisted.python import log
-
-from buildbot.sourcestamp import SourceStamp
-from buildbot.scripts import runner
-from buildbot.util import now
-from buildbot.status import builder
-from buildbot.twcompat import which
-
-class SourceStampExtractor:
-
- def __init__(self, treetop, branch):
- self.treetop = treetop
- self.branch = branch
- self.exe = which(self.vcexe)[0]
-
- def dovc(self, cmd):
- """This accepts the arguments of a command, without the actual
- command itself."""
- env = os.environ.copy()
- env['LC_ALL'] = "C"
- return utils.getProcessOutput(self.exe, cmd, env=env,
- path=self.treetop)
-
- def get(self):
- """Return a Deferred that fires with a SourceStamp instance."""
- d = self.getBaseRevision()
- d.addCallback(self.getPatch)
- d.addCallback(self.done)
- return d
- def readPatch(self, res, patchlevel):
- self.patch = (patchlevel, res)
- def done(self, res):
- # TODO: figure out the branch too
- ss = SourceStamp(self.branch, self.baserev, self.patch)
- return ss
-
-class CVSExtractor(SourceStampExtractor):
- patchlevel = 0
- vcexe = "cvs"
- def getBaseRevision(self):
- # this depends upon our local clock and the repository's clock being
- # reasonably synchronized with each other. We express everything in
- # UTC because the '%z' format specifier for strftime doesn't always
- # work.
- self.baserev = time.strftime("%Y-%m-%d %H:%M:%S +0000",
- time.gmtime(now()))
- return defer.succeed(None)
-
- def getPatch(self, res):
- # the -q tells CVS to not announce each directory as it works
- if self.branch is not None:
- # 'cvs diff' won't take both -r and -D at the same time (it
- # ignores the -r). As best I can tell, there is no way to make
- # cvs give you a diff relative to a timestamp on the non-trunk
- # branch. A bare 'cvs diff' will tell you about the changes
- # relative to your checked-out versions, but I know of no way to
- # find out what those checked-out versions are.
- raise RuntimeError("Sorry, CVS 'try' builds don't work with "
- "branches")
- args = ['-q', 'diff', '-u', '-D', self.baserev]
- d = self.dovc(args)
- d.addCallback(self.readPatch, self.patchlevel)
- return d
-
-class SVNExtractor(SourceStampExtractor):
- patchlevel = 0
- vcexe = "svn"
-
- def getBaseRevision(self):
- d = self.dovc(["status", "-u"])
- d.addCallback(self.parseStatus)
- return d
- def parseStatus(self, res):
- # svn shows the base revision for each file that has been modified or
- # which needs an update. You can update each file to a different
- # version, so each file is displayed with its individual base
- # revision. It also shows the repository-wide latest revision number
- # on the last line ("Status against revision: \d+").
-
- # for our purposes, we use the latest revision number as the "base"
- # revision, and get a diff against that. This means we will get
- # reverse-diffs for local files that need updating, but the resulting
- # tree will still be correct. The only weirdness is that the baserev
- # that we emit may be different than the version of the tree that we
- # first checked out.
-
- # to do this differently would probably involve scanning the revision
- # numbers to find the max (or perhaps the min) revision, and then
- # using that as a base.
-
- for line in res.split("\n"):
- m = re.search(r'^Status against revision:\s+(\d+)', line)
- if m:
- self.baserev = int(m.group(1))
- return
- raise IndexError("Could not find 'Status against revision' in "
- "SVN output: %s" % res)
- def getPatch(self, res):
- d = self.dovc(["diff", "-r%d" % self.baserev])
- d.addCallback(self.readPatch, self.patchlevel)
- return d
-
-class BazExtractor(SourceStampExtractor):
- vcexe = "baz"
- def getBaseRevision(self):
- d = self.dovc(["tree-id"])
- d.addCallback(self.parseStatus)
- return d
- def parseStatus(self, res):
- tid = res.strip()
- slash = tid.index("/")
- dd = tid.rindex("--")
- self.branch = tid[slash+1:dd]
- self.baserev = tid[dd+2:]
- def getPatch(self, res):
- d = self.dovc(["diff"])
- d.addCallback(self.readPatch, 1)
- return d
-
-class TlaExtractor(SourceStampExtractor):
- vcexe = "tla"
- def getBaseRevision(self):
- # 'tla logs --full' gives us ARCHIVE/BRANCH--REVISION
- # 'tla logs' gives us REVISION
- d = self.dovc(["logs", "--full", "--reverse"])
- d.addCallback(self.parseStatus)
- return d
- def parseStatus(self, res):
- tid = res.split("\n")[0].strip()
- slash = tid.index("/")
- dd = tid.rindex("--")
- self.branch = tid[slash+1:dd]
- self.baserev = tid[dd+2:]
-
- def getPatch(self, res):
- d = self.dovc(["changes", "--diffs"])
- d.addCallback(self.readPatch, 1)
- return d
-
-class MercurialExtractor(SourceStampExtractor):
- patchlevel = 1
- vcexe = "hg"
- def getBaseRevision(self):
- d = self.dovc(["identify"])
- d.addCallback(self.parseStatus)
- return d
- def parseStatus(self, output):
- m = re.search(r'^(\w+)', output)
- self.baserev = m.group(0)
- def getPatch(self, res):
- d = self.dovc(["diff"])
- d.addCallback(self.readPatch, self.patchlevel)
- return d
-
-class DarcsExtractor(SourceStampExtractor):
- patchlevel = 1
- vcexe = "darcs"
- def getBaseRevision(self):
- d = self.dovc(["changes", "--context"])
- d.addCallback(self.parseStatus)
- return d
- def parseStatus(self, res):
- self.baserev = res # the whole context file
- def getPatch(self, res):
- d = self.dovc(["diff", "-u"])
- d.addCallback(self.readPatch, self.patchlevel)
- return d
-
-def getSourceStamp(vctype, treetop, branch=None):
- if vctype == "cvs":
- e = CVSExtractor(treetop, branch)
- elif vctype == "svn":
- e = SVNExtractor(treetop, branch)
- elif vctype == "baz":
- e = BazExtractor(treetop, branch)
- elif vctype == "tla":
- e = TlaExtractor(treetop, branch)
- elif vctype == "hg":
- e = MercurialExtractor(treetop, branch)
- elif vctype == "darcs":
- e = DarcsExtractor(treetop, branch)
- else:
- raise KeyError("unknown vctype '%s'" % vctype)
- return e.get()
-
-
-def ns(s):
- return "%d:%s," % (len(s), s)
-
-def createJobfile(bsid, branch, baserev, patchlevel, diff, builderNames):
- job = ""
- job += ns("1")
- job += ns(bsid)
- job += ns(branch)
- job += ns(str(baserev))
- job += ns("%d" % patchlevel)
- job += ns(diff)
- for bn in builderNames:
- job += ns(bn)
- return job
-
-def getTopdir(topfile, start=None):
- """walk upwards from the current directory until we find this topfile"""
- if not start:
- start = os.getcwd()
- here = start
- toomany = 20
- while toomany > 0:
- if os.path.exists(os.path.join(here, topfile)):
- return here
- next = os.path.dirname(here)
- if next == here:
- break # we've hit the root
- here = next
- toomany -= 1
- raise ValueError("Unable to find topfile '%s' anywhere from %s upwards"
- % (topfile, start))
-
-class RemoteTryPP(protocol.ProcessProtocol):
- def __init__(self, job):
- self.job = job
- self.d = defer.Deferred()
- def connectionMade(self):
- self.transport.write(self.job)
- self.transport.closeStdin()
- def outReceived(self, data):
- sys.stdout.write(data)
- def errReceived(self, data):
- sys.stderr.write(data)
- def processEnded(self, status_object):
- sig = status_object.value.signal
- rc = status_object.value.exitCode
- if sig != None or rc != 0:
- self.d.errback(RuntimeError("remote 'buildbot tryserver' failed"
- ": sig=%s, rc=%s" % (sig, rc)))
- return
- self.d.callback((sig, rc))
-
-class BuildSetStatusGrabber:
- retryCount = 5 # how many times to we try to grab the BuildSetStatus?
- retryDelay = 3 # seconds to wait between attempts
-
- def __init__(self, status, bsid):
- self.status = status
- self.bsid = bsid
-
- def grab(self):
- # return a Deferred that either fires with the BuildSetStatus
- # reference or errbacks because we were unable to grab it
- self.d = defer.Deferred()
- # wait a second before querying to give the master's maildir watcher
- # a chance to see the job
- reactor.callLater(1, self.go)
- return self.d
-
- def go(self, dummy=None):
- if self.retryCount == 0:
- raise RuntimeError("couldn't find matching buildset")
- self.retryCount -= 1
- d = self.status.callRemote("getBuildSets")
- d.addCallback(self._gotSets)
-
- def _gotSets(self, buildsets):
- for bs,bsid in buildsets:
- if bsid == self.bsid:
- # got it
- self.d.callback(bs)
- return
- d = defer.Deferred()
- d.addCallback(self.go)
- reactor.callLater(self.retryDelay, d.callback, None)
-
-
-class Try(pb.Referenceable):
- buildsetStatus = None
- quiet = False
-
- def __init__(self, config):
- self.config = config
- self.opts = runner.loadOptions()
- self.connect = self.getopt('connect', 'try_connect')
- assert self.connect, "you must specify a connect style: ssh or pb"
- self.builderNames = self.getopt('builders', 'try_builders')
- assert self.builderNames, "no builders! use --builder or " \
- "try_builders=[names..] in .buildbot/options"
-
- def getopt(self, config_name, options_name, default=None):
- value = self.config.get(config_name)
- if value is None or value == []:
- value = self.opts.get(options_name)
- if value is None or value == []:
- value = default
- return value
-
- def createJob(self):
- # returns a Deferred which fires when the job parameters have been
- # created
- config = self.config
- opts = self.opts
- # generate a random (unique) string. It would make sense to add a
- # hostname and process ID here, but a) I suspect that would cause
- # windows portability problems, and b) really this is good enough
- self.bsid = "%d-%s" % (time.time(), random.randint(0, 1000000))
-
- # common options
- vc = self.getopt("vc", "try_vc")
- branch = self.getopt("branch", "try_branch")
-
- if vc in ("cvs", "svn"):
- # we need to find the tree-top
- topdir = self.getopt("try_topdir", "try_topdir")
- if topdir:
- treedir = os.path.expanduser(topdir)
- else:
- topfile = self.getopt("try-topfile", "try_topfile")
- treedir = getTopdir(topfile)
- else:
- treedir = os.getcwd()
- d = getSourceStamp(vc, treedir, branch)
- d.addCallback(self._createJob_1)
- return d
- def _createJob_1(self, ss):
- self.sourcestamp = ss
- if self.connect == "ssh":
- patchlevel, diff = ss.patch
- self.jobfile = createJobfile(self.bsid,
- ss.branch or "", ss.revision,
- patchlevel, diff,
- self.builderNames)
-
- def deliverJob(self):
- # returns a Deferred that fires when the job has been delivered
- config = self.config
- opts = self.opts
-
- if self.connect == "ssh":
- tryhost = self.getopt("tryhost", "try_host")
- tryuser = self.getopt("username", "try_username")
- trydir = self.getopt("trydir", "try_dir")
-
- argv = ["ssh", "-l", tryuser, tryhost,
- "buildbot", "tryserver", "--jobdir", trydir]
- # now run this command and feed the contents of 'job' into stdin
-
- pp = RemoteTryPP(self.jobfile)
- p = reactor.spawnProcess(pp, argv[0], argv, os.environ)
- d = pp.d
- return d
- if self.connect == "pb":
- user = self.getopt("username", "try_username")
- passwd = self.getopt("passwd", "try_password")
- master = self.getopt("master", "try_master")
- tryhost, tryport = master.split(":")
- tryport = int(tryport)
- f = pb.PBClientFactory()
- d = f.login(credentials.UsernamePassword(user, passwd))
- reactor.connectTCP(tryhost, tryport, f)
- d.addCallback(self._deliverJob_pb)
- return d
- raise RuntimeError("unknown connecttype '%s', should be 'ssh' or 'pb'"
- % self.connect)
-
- def _deliverJob_pb(self, remote):
- ss = self.sourcestamp
- d = remote.callRemote("try",
- ss.branch, ss.revision, ss.patch,
- self.builderNames)
- d.addCallback(self._deliverJob_pb2)
- return d
- def _deliverJob_pb2(self, status):
- self.buildsetStatus = status
- return status
-
- def getStatus(self):
- # returns a Deferred that fires when the builds have finished, and
- # may emit status messages while we wait
- wait = bool(self.getopt("wait", "try_wait", False))
- if not wait:
- # TODO: emit the URL where they can follow the builds. This
- # requires contacting the Status server over PB and doing
- # getURLForThing() on the BuildSetStatus. To get URLs for
- # individual builds would require we wait for the builds to
- # start.
- print "not waiting for builds to finish"
- return
- d = self.running = defer.Deferred()
- if self.buildsetStatus:
- self._getStatus_1()
- # contact the status port
- # we're probably using the ssh style
- master = self.getopt("master", "masterstatus")
- host, port = master.split(":")
- port = int(port)
- self.announce("contacting the status port at %s:%d" % (host, port))
- f = pb.PBClientFactory()
- creds = credentials.UsernamePassword("statusClient", "clientpw")
- d = f.login(creds)
- reactor.connectTCP(host, port, f)
- d.addCallback(self._getStatus_ssh_1)
- return self.running
-
- def _getStatus_ssh_1(self, remote):
- # find a remotereference to the corresponding BuildSetStatus object
- self.announce("waiting for job to be accepted")
- g = BuildSetStatusGrabber(remote, self.bsid)
- d = g.grab()
- d.addCallback(self._getStatus_1)
- return d
-
- def _getStatus_1(self, res=None):
- if res:
- self.buildsetStatus = res
- # gather the set of BuildRequests
- d = self.buildsetStatus.callRemote("getBuildRequests")
- d.addCallback(self._getStatus_2)
-
- def _getStatus_2(self, brs):
- self.builderNames = []
- self.buildRequests = {}
-
- # self.builds holds the current BuildStatus object for each one
- self.builds = {}
-
- # self.outstanding holds the list of builderNames which haven't
- # finished yet
- self.outstanding = []
-
- # self.results holds the list of build results. It holds a tuple of
- # (result, text)
- self.results = {}
-
- # self.currentStep holds the name of the Step that each build is
- # currently running
- self.currentStep = {}
-
- # self.ETA holds the expected finishing time (absolute time since
- # epoch)
- self.ETA = {}
-
- for n,br in brs:
- self.builderNames.append(n)
- self.buildRequests[n] = br
- self.builds[n] = None
- self.outstanding.append(n)
- self.results[n] = [None,None]
- self.currentStep[n] = None
- self.ETA[n] = None
- # get new Builds for this buildrequest. We follow each one until
- # it finishes or is interrupted.
- br.callRemote("subscribe", self)
-
- # now that those queries are in transit, we can start the
- # display-status-every-30-seconds loop
- self.printloop = task.LoopingCall(self.printStatus)
- self.printloop.start(3, now=False)
-
-
- # these methods are invoked by the status objects we've subscribed to
-
- def remote_newbuild(self, bs, builderName):
- if self.builds[builderName]:
- self.builds[builderName].callRemote("unsubscribe", self)
- self.builds[builderName] = bs
- bs.callRemote("subscribe", self, 20)
- d = bs.callRemote("waitUntilFinished")
- d.addCallback(self._build_finished, builderName)
-
- def remote_stepStarted(self, buildername, build, stepname, step):
- self.currentStep[buildername] = stepname
-
- def remote_stepFinished(self, buildername, build, stepname, step, results):
- pass
-
- def remote_buildETAUpdate(self, buildername, build, eta):
- self.ETA[buildername] = now() + eta
-
- def _build_finished(self, bs, builderName):
- # we need to collect status from the newly-finished build. We don't
- # remove the build from self.outstanding until we've collected
- # everything we want.
- self.builds[builderName] = None
- self.ETA[builderName] = None
- self.currentStep[builderName] = "finished"
- d = bs.callRemote("getResults")
- d.addCallback(self._build_finished_2, bs, builderName)
- return d
- def _build_finished_2(self, results, bs, builderName):
- self.results[builderName][0] = results
- d = bs.callRemote("getText")
- d.addCallback(self._build_finished_3, builderName)
- return d
- def _build_finished_3(self, text, builderName):
- self.results[builderName][1] = text
-
- self.outstanding.remove(builderName)
- if not self.outstanding:
- # all done
- return self.statusDone()
-
- def printStatus(self):
- names = self.buildRequests.keys()
- names.sort()
- for n in names:
- if n not in self.outstanding:
- # the build is finished, and we have results
- code,text = self.results[n]
- t = builder.Results[code]
- if text:
- t += " (%s)" % " ".join(text)
- elif self.builds[n]:
- t = self.currentStep[n] or "building"
- if self.ETA[n]:
- t += " [ETA %ds]" % (self.ETA[n] - now())
- else:
- t = "no build"
- self.announce("%s: %s" % (n, t))
- self.announce("")
-
- def statusDone(self):
- self.printloop.stop()
- print "All Builds Complete"
- # TODO: include a URL for all failing builds
- names = self.buildRequests.keys()
- names.sort()
- happy = True
- for n in names:
- code,text = self.results[n]
- t = "%s: %s" % (n, builder.Results[code])
- if text:
- t += " (%s)" % " ".join(text)
- print t
- if self.results[n] != builder.SUCCESS:
- happy = False
-
- if happy:
- self.exitcode = 0
- else:
- self.exitcode = 1
- self.running.callback(self.exitcode)
-
- def announce(self, message):
- if not self.quiet:
- print message
-
- def run(self):
- # we can't do spawnProcess until we're inside reactor.run(), so get
- # funky
- print "using '%s' connect method" % self.connect
- self.exitcode = 0
- d = defer.Deferred()
- d.addCallback(lambda res: self.createJob())
- d.addCallback(lambda res: self.announce("job created"))
- d.addCallback(lambda res: self.deliverJob())
- d.addCallback(lambda res: self.announce("job has been delivered"))
- d.addCallback(lambda res: self.getStatus())
- d.addErrback(log.err)
- d.addCallback(self.cleanup)
- d.addCallback(lambda res: reactor.stop())
-
- reactor.callLater(0, d.callback, None)
- reactor.run()
- sys.exit(self.exitcode)
-
- def logErr(self, why):
- log.err(why)
- print "error during 'try' processing"
- print why
-
- def cleanup(self, res=None):
- if self.buildsetStatus:
- self.buildsetStatus.broker.transport.loseConnection()
-
-
-
diff --git a/buildbot/buildbot-source/buildbot/slave/__init__.py b/buildbot/buildbot-source/buildbot/slave/__init__.py
deleted file mode 100644
index e69de29bb..000000000
--- a/buildbot/buildbot-source/buildbot/slave/__init__.py
+++ /dev/null
diff --git a/buildbot/buildbot-source/buildbot/slave/bot.py b/buildbot/buildbot-source/buildbot/slave/bot.py
deleted file mode 100644
index 40b9b4798..000000000
--- a/buildbot/buildbot-source/buildbot/slave/bot.py
+++ /dev/null
@@ -1,495 +0,0 @@
-#! /usr/bin/python
-
-import time, os, os.path, re, sys
-
-from twisted.spread import pb
-from twisted.python import log, usage, failure
-from twisted.internet import reactor, defer
-from twisted.application import service, internet
-from twisted.cred import credentials
-
-from buildbot.util import now
-from buildbot.pbutil import ReconnectingPBClientFactory
-from buildbot.slave import registry
-# make sure the standard commands get registered
-from buildbot.slave import commands
-
-class NoCommandRunning(pb.Error):
- pass
-class WrongCommandRunning(pb.Error):
- pass
-class UnknownCommand(pb.Error):
- pass
-
-class Master:
- def __init__(self, host, port, username, password):
- self.host = host
- self.port = port
- self.username = username
- self.password = password
-
-class SlaveBuild:
-
- """This is an object that can hold state from one step to another in the
- same build. All SlaveCommands have access to it.
- """
- def __init__(self, builder):
- self.builder = builder
-
-class SlaveBuilder(pb.Referenceable, service.Service):
-
- """This is the local representation of a single Builder: it handles a
- single kind of build (like an all-warnings build). It has a name and a
- home directory. The rest of its behavior is determined by the master.
- """
-
- stopCommandOnShutdown = True
-
- # remote is a ref to the Builder object on the master side, and is set
- # when they attach. We use it to detect when the connection to the master
- # is severed.
- remote = None
-
- # .build points to a SlaveBuild object, a new one for each build
- build = None
-
- # .command points to a SlaveCommand instance, and is set while the step
- # is running. We use it to implement the stopBuild method.
- command = None
-
- # .remoteStep is a ref to the master-side BuildStep object, and is set
- # when the step is started
- remoteStep = None
-
- def __init__(self, name, not_really):
- #service.Service.__init__(self) # Service has no __init__ method
- self.setName(name)
- self.not_really = not_really
-
- def __repr__(self):
- return "<SlaveBuilder '%s'>" % self.name
-
- def setServiceParent(self, parent):
- service.Service.setServiceParent(self, parent)
- self.bot = self.parent
- # note that self.parent will go away when the buildmaster's config
- # file changes and this Builder is removed (possibly because it has
- # been changed, so the Builder will be re-added again in a moment).
- # This may occur during a build, while a step is running.
-
- def setBuilddir(self, builddir):
- assert self.parent
- self.builddir = builddir
- self.basedir = os.path.join(self.bot.basedir, self.builddir)
- if not os.path.isdir(self.basedir):
- os.mkdir(self.basedir)
-
- def stopService(self):
- service.Service.stopService(self)
- if self.stopCommandOnShutdown:
- self.stopCommand()
-
- def activity(self):
- bot = self.parent
- if bot:
- buildslave = bot.parent
- if buildslave:
- bf = buildslave.bf
- bf.activity()
-
- def remote_setMaster(self, remote):
- self.remote = remote
- self.remote.notifyOnDisconnect(self.lostRemote)
- def remote_print(self, message):
- log.msg("SlaveBuilder.remote_print(%s): message from master: %s" %
- (self.name, message))
- if message == "ping":
- return self.remote_ping()
-
- def remote_ping(self):
- log.msg("SlaveBuilder.remote_ping(%s)" % self)
- if self.bot and self.bot.parent:
- debugOpts = self.bot.parent.debugOpts
- if debugOpts.get("stallPings"):
- log.msg(" debug_stallPings")
- timeout, timers = debugOpts["stallPings"]
- d = defer.Deferred()
- t = reactor.callLater(timeout, d.callback, None)
- timers.append(t)
- return d
- if debugOpts.get("failPingOnce"):
- log.msg(" debug_failPingOnce")
- class FailPingError(pb.Error): pass
- del debugOpts['failPingOnce']
- raise FailPingError("debug_failPingOnce means we should fail")
-
- def lostRemote(self, remote):
- log.msg("lost remote")
- self.remote = None
-
- def lostRemoteStep(self, remotestep):
- log.msg("lost remote step")
- self.remoteStep = None
- if self.stopCommandOnShutdown:
- self.stopCommand()
-
- # the following are Commands that can be invoked by the master-side
- # Builder
- def remote_startBuild(self):
- """This is invoked before the first step of any new build is run. It
- creates a new SlaveBuild object, which holds slave-side state from
- one step to the next."""
- self.build = SlaveBuild(self)
- log.msg("%s.startBuild" % self)
-
- def remote_startCommand(self, stepref, stepId, command, args):
- """
- This gets invoked by L{buildbot.process.step.RemoteCommand.start}, as
- part of various master-side BuildSteps, to start various commands
- that actually do the build. I return nothing. Eventually I will call
- .commandComplete() to notify the master-side RemoteCommand that I'm
- done.
- """
-
- self.activity()
-
- if self.command:
- log.msg("leftover command, dropping it")
- self.stopCommand()
-
- try:
- factory, version = registry.commandRegistry[command]
- except KeyError:
- raise UnknownCommand, "unrecognized SlaveCommand '%s'" % command
- self.command = factory(self, stepId, args)
-
- log.msg(" startCommand:%s [id %s]" % (command,stepId))
- self.remoteStep = stepref
- self.remoteStep.notifyOnDisconnect(self.lostRemoteStep)
- self.command.running = True
- d = defer.maybeDeferred(self.command.start)
- d.addCallback(lambda res: None)
- d.addBoth(self.commandComplete)
- return None
-
- def remote_interruptCommand(self, stepId, why):
- """Halt the current step."""
- log.msg("asked to interrupt current command: %s" % why)
- self.activity()
- if not self.command:
- # TODO: just log it, a race could result in their interrupting a
- # command that wasn't actually running
- log.msg(" .. but none was running")
- return
- self.command.interrupt()
-
-
- def stopCommand(self):
- """Make any currently-running command die, with no further status
- output. This is used when the buildslave is shutting down or the
- connection to the master has been lost. Interrupt the command,
- silence it, and then forget about it."""
- if not self.command:
- return
- log.msg("stopCommand: halting current command %s" % self.command)
- self.command.running = False # shut up!
- self.command.interrupt() # die!
- self.command = None # forget you!
-
- # sendUpdate is invoked by the Commands we spawn
- def sendUpdate(self, data):
- """This sends the status update to the master-side
- L{buildbot.process.step.RemoteCommand} object, giving it a sequence
- number in the process. It adds the update to a queue, and asks the
- master to acknowledge the update so it can be removed from that
- queue."""
-
- if not self.running:
- # .running comes from service.Service, and says whether the
- # service is running or not. If we aren't running, don't send any
- # status messages.
- return
- # the update[1]=0 comes from the leftover 'updateNum', which the
- # master still expects to receive. Provide it to avoid significant
- # interoperability issues between new slaves and old masters.
- if self.remoteStep:
- update = [data, 0]
- updates = [update]
- d = self.remoteStep.callRemote("update", updates)
- d.addCallback(self.ackUpdate)
- d.addErrback(self._ackFailed, "SlaveBuilder.sendUpdate")
-
- def ackUpdate(self, acknum):
- self.activity() # update the "last activity" timer
-
- def ackComplete(self, dummy):
- self.activity() # update the "last activity" timer
-
- def _ackFailed(self, why, where):
- log.msg("SlaveBuilder._ackFailed:", where)
- #log.err(why) # we don't really care
-
-
- # this is fired by the Deferred attached to each Command
- def commandComplete(self, failure):
- if failure:
- log.msg("SlaveBuilder.commandFailed", self.command)
- log.err(failure)
- # failure, if present, is a failure.Failure. To send it across
- # the wire, we must turn it into a pb.CopyableFailure.
- failure = pb.CopyableFailure(failure)
- failure.unsafeTracebacks = True
- else:
- # failure is None
- log.msg("SlaveBuilder.commandComplete", self.command)
- self.command = None
- if not self.running:
- return
- if self.remoteStep:
- self.remoteStep.dontNotifyOnDisconnect(self.lostRemoteStep)
- d = self.remoteStep.callRemote("complete", failure)
- d.addCallback(self.ackComplete)
- d.addErrback(self._ackFailed, "sendComplete")
- self.remoteStep = None
-
-
- def remote_shutdown(self):
- print "slave shutting down on command from master"
- reactor.stop()
-
-
-class Bot(pb.Referenceable, service.MultiService):
- """I represent the slave-side bot."""
- usePTY = None
- name = "bot"
-
- def __init__(self, basedir, usePTY, not_really=0):
- service.MultiService.__init__(self)
- self.basedir = basedir
- self.usePTY = usePTY
- self.not_really = not_really
- self.builders = {}
-
- def startService(self):
- assert os.path.isdir(self.basedir)
- service.MultiService.startService(self)
-
- def remote_getDirs(self):
- return filter(lambda d: os.path.isdir(d), os.listdir(self.basedir))
-
- def remote_getCommands(self):
- commands = {}
- for name, (factory, version) in registry.commandRegistry.items():
- commands[name] = version
- return commands
-
- def remote_setBuilderList(self, wanted):
- retval = {}
- for (name, builddir) in wanted:
- b = self.builders.get(name, None)
- if b:
- if b.builddir != builddir:
- log.msg("changing builddir for builder %s from %s to %s" \
- % (name, b.builddir, builddir))
- b.setBuilddir(builddir)
- else:
- b = SlaveBuilder(name, self.not_really)
- b.usePTY = self.usePTY
- b.setServiceParent(self)
- b.setBuilddir(builddir)
- self.builders[name] = b
- retval[name] = b
- for name in self.builders.keys():
- if not name in map(lambda a: a[0], wanted):
- log.msg("removing old builder %s" % name)
- self.builders[name].disownServiceParent()
- del(self.builders[name])
- return retval
-
- def remote_print(self, message):
- log.msg("message from master:", message)
-
- def remote_getSlaveInfo(self):
- """This command retrieves data from the files in SLAVEDIR/info/* and
- sends the contents to the buildmaster. These are used to describe
- the slave and its configuration, and should be created and
- maintained by the slave administrator. They will be retrieved each
- time the master-slave connection is established.
- """
-
- files = {}
- basedir = os.path.join(self.basedir, "info")
- if not os.path.isdir(basedir):
- return files
- for f in os.listdir(basedir):
- filename = os.path.join(basedir, f)
- if os.path.isfile(filename):
- files[f] = open(filename, "r").read()
- return files
-
- def debug_forceBuild(self, name):
- d = self.perspective.callRemote("forceBuild", name)
- d.addCallbacks(log.msg, log.err)
-
-class BotFactory(ReconnectingPBClientFactory):
- # 'keepaliveInterval' serves two purposes. The first is to keep the
- # connection alive: it guarantees that there will be at least some
- # traffic once every 'keepaliveInterval' seconds, which may help keep an
- # interposed NAT gateway from dropping the address mapping because it
- # thinks the connection has been abandoned. The second is to put an upper
- # limit on how long the buildmaster might have gone away before we notice
- # it. For this second purpose, we insist upon seeing *some* evidence of
- # the buildmaster at least once every 'keepaliveInterval' seconds.
- keepaliveInterval = None # None = do not use keepalives
-
- # 'keepaliveTimeout' seconds before the interval expires, we will send a
- # keepalive request, both to add some traffic to the connection, and to
- # prompt a response from the master in case all our builders are idle. We
- # don't insist upon receiving a timely response from this message: a slow
- # link might put the request at the wrong end of a large build message.
- keepaliveTimeout = 30 # how long we will go without a response
-
- keepaliveTimer = None
- activityTimer = None
- lastActivity = 0
- unsafeTracebacks = 1
- perspective = None
-
- def __init__(self, keepaliveInterval, keepaliveTimeout):
- ReconnectingPBClientFactory.__init__(self)
- self.keepaliveInterval = keepaliveInterval
- self.keepaliveTimeout = keepaliveTimeout
-
- def startedConnecting(self, connector):
- ReconnectingPBClientFactory.startedConnecting(self, connector)
- self.connector = connector
-
- def gotPerspective(self, perspective):
- ReconnectingPBClientFactory.gotPerspective(self, perspective)
- self.perspective = perspective
- try:
- perspective.broker.transport.setTcpKeepAlive(1)
- except:
- log.msg("unable to set SO_KEEPALIVE")
- if not self.keepaliveInterval:
- self.keepaliveInterval = 10*60
- self.activity()
- if self.keepaliveInterval:
- log.msg("sending application-level keepalives every %d seconds" \
- % self.keepaliveInterval)
- self.startTimers()
-
- def clientConnectionFailed(self, connector, reason):
- self.connector = None
- ReconnectingPBClientFactory.clientConnectionFailed(self,
- connector, reason)
-
- def clientConnectionLost(self, connector, reason):
- self.connector = None
- self.stopTimers()
- self.perspective = None
- ReconnectingPBClientFactory.clientConnectionLost(self,
- connector, reason)
-
- def startTimers(self):
- assert self.keepaliveInterval
- assert not self.keepaliveTimer
- assert not self.activityTimer
- # Insist that doKeepalive fires before checkActivity. Really, it
- # needs to happen at least one RTT beforehand.
- assert self.keepaliveInterval > self.keepaliveTimeout
-
- # arrange to send a keepalive a little while before our deadline
- when = self.keepaliveInterval - self.keepaliveTimeout
- self.keepaliveTimer = reactor.callLater(when, self.doKeepalive)
- # and check for activity too
- self.activityTimer = reactor.callLater(self.keepaliveInterval,
- self.checkActivity)
-
- def stopTimers(self):
- if self.keepaliveTimer:
- self.keepaliveTimer.cancel()
- self.keepaliveTimer = None
- if self.activityTimer:
- self.activityTimer.cancel()
- self.activityTimer = None
-
- def activity(self, res=None):
- self.lastActivity = now()
-
- def doKeepalive(self):
- # send the keepalive request. If it fails outright, the connection
- # was already dropped, so just log and ignore.
- self.keepaliveTimer = None
- log.msg("sending app-level keepalive")
- d = self.perspective.callRemote("keepalive")
- d.addCallback(self.activity)
- d.addErrback(self.keepaliveLost)
-
- def keepaliveLost(self, f):
- log.msg("BotFactory.keepaliveLost")
-
- def checkActivity(self):
- self.activityTimer = None
- if self.lastActivity + self.keepaliveInterval < now():
- log.msg("BotFactory.checkActivity: nothing from master for "
- "%d secs" % (now() - self.lastActivity))
- self.perspective.broker.transport.loseConnection()
- return
- self.startTimers()
-
- def stopFactory(self):
- ReconnectingPBClientFactory.stopFactory(self)
- self.stopTimers()
-
-
-class BuildSlave(service.MultiService):
- botClass = Bot
-
- # debugOpts is a dictionary used during unit tests.
-
- # debugOpts['stallPings'] can be set to a tuple of (timeout, []). Any
- # calls to remote_print will stall for 'timeout' seconds before
- # returning. The DelayedCalls used to implement this are stashed in the
- # list so they can be cancelled later.
-
- # debugOpts['failPingOnce'] can be set to True to make the slaveping fail
- # exactly once.
-
- def __init__(self, host, port, name, passwd, basedir, keepalive,
- usePTY, keepaliveTimeout=30, umask=None, debugOpts={}):
- service.MultiService.__init__(self)
- self.debugOpts = debugOpts.copy()
- bot = self.botClass(basedir, usePTY)
- bot.setServiceParent(self)
- self.bot = bot
- if keepalive == 0:
- keepalive = None
- self.umask = umask
- bf = self.bf = BotFactory(keepalive, keepaliveTimeout)
- bf.startLogin(credentials.UsernamePassword(name, passwd), client=bot)
- self.connection = c = internet.TCPClient(host, port, bf)
- c.setServiceParent(self)
-
- def waitUntilDisconnected(self):
- # utility method for testing. Returns a Deferred that will fire when
- # we lose the connection to the master.
- if not self.bf.perspective:
- return defer.succeed(None)
- d = defer.Deferred()
- self.bf.perspective.notifyOnDisconnect(lambda res: d.callback(None))
- return d
-
- def startService(self):
- if self.umask is not None:
- os.umask(self.umask)
- service.MultiService.startService(self)
-
- def stopService(self):
- self.bf.continueTrying = 0
- self.bf.stopTrying()
- service.MultiService.stopService(self)
- # now kill the TCP connection
- # twisted >2.0.1 does this for us, and leaves _connection=None
- if self.connection._connection:
- self.connection._connection.disconnect()
diff --git a/buildbot/buildbot-source/buildbot/slave/commands.py b/buildbot/buildbot-source/buildbot/slave/commands.py
deleted file mode 100644
index bda6ab305..000000000
--- a/buildbot/buildbot-source/buildbot/slave/commands.py
+++ /dev/null
@@ -1,1824 +0,0 @@
-# -*- test-case-name: buildbot.test.test_slavecommand -*-
-
-import os, os.path, re, signal, shutil, types, time
-
-from twisted.internet.protocol import ProcessProtocol
-from twisted.internet import reactor, defer
-from twisted.python import log, failure, runtime
-
-from buildbot.twcompat import implements, which
-from buildbot.slave.interfaces import ISlaveCommand
-from buildbot.slave.registry import registerSlaveCommand
-
-cvs_ver = '$Revision$'[1+len("Revision: "):-2]
-
-# version history:
-# >=1.17: commands are interruptable
-# >=1.28: Arch understands 'revision', added Bazaar
-# >=1.33: Source classes understand 'retry'
-# >=1.39: Source classes correctly handle changes in branch (except Git)
-# Darcs accepts 'revision' (now all do but Git) (well, and P4Sync)
-# Arch/Baz should accept 'build-config'
-
-class CommandInterrupted(Exception):
- pass
-class TimeoutError(Exception):
- pass
-
-class AbandonChain(Exception):
- """A series of chained steps can raise this exception to indicate that
- one of the intermediate ShellCommands has failed, such that there is no
- point in running the remainder. 'rc' should be the non-zero exit code of
- the failing ShellCommand."""
-
- def __repr__(self):
- return "<AbandonChain rc=%s>" % self.args[0]
-
-def getCommand(name):
- possibles = which(name)
- if not possibles:
- raise RuntimeError("Couldn't find executable for '%s'" % name)
- return possibles[0]
-
-def rmdirRecursive(dir):
- """This is a replacement for shutil.rmtree that works better under
- windows. Thanks to Bear at the OSAF for the code."""
- if not os.path.exists(dir):
- return
-
- if os.path.islink(dir):
- os.remove(dir)
- return
-
- for name in os.listdir(dir):
- full_name = os.path.join(dir, name)
- # on Windows, if we don't have write permission we can't remove
- # the file/directory either, so turn that on
- if os.name == 'nt':
- if not os.access(full_name, os.W_OK):
- os.chmod(full_name, 0600)
- if os.path.isdir(full_name):
- rmdirRecursive(full_name)
- else:
- # print "removing file", full_name
- os.remove(full_name)
- os.rmdir(dir)
-
-class ShellCommandPP(ProcessProtocol):
- debug = False
-
- def __init__(self, command):
- self.command = command
-
- def connectionMade(self):
- if self.debug:
- log.msg("ShellCommandPP.connectionMade")
- if not self.command.process:
- if self.debug:
- log.msg(" assigning self.command.process: %s" %
- (self.transport,))
- self.command.process = self.transport
-
- if self.command.stdin:
- if self.debug: log.msg(" writing to stdin")
- self.transport.write(self.command.stdin)
-
- # TODO: maybe we shouldn't close stdin when using a PTY. I can't test
- # this yet, recent debian glibc has a bug which causes thread-using
- # test cases to SIGHUP trial, and the workaround is to either run
- # the whole test with /bin/sh -c " ".join(argv) (way gross) or to
- # not use a PTY. Once the bug is fixed, I'll be able to test what
- # happens when you close stdin on a pty. My concern is that it will
- # SIGHUP the child (since we are, in a sense, hanging up on them).
- # But it may well be that keeping stdout open prevents the SIGHUP
- # from being sent.
- #if not self.command.usePTY:
-
- if self.debug: log.msg(" closing stdin")
- self.transport.closeStdin()
-
- def outReceived(self, data):
- if self.debug:
- log.msg("ShellCommandPP.outReceived")
- self.command.addStdout(data)
-
- def errReceived(self, data):
- if self.debug:
- log.msg("ShellCommandPP.errReceived")
- self.command.addStderr(data)
-
- def processEnded(self, status_object):
- if self.debug:
- log.msg("ShellCommandPP.processEnded", status_object)
- # status_object is a Failure wrapped around an
- # error.ProcessTerminated or and error.ProcessDone.
- # requires twisted >= 1.0.4 to overcome a bug in process.py
- sig = status_object.value.signal
- rc = status_object.value.exitCode
- self.command.finished(sig, rc)
-
-
-class ShellCommand:
- # This is a helper class, used by SlaveCommands to run programs in a
- # child shell.
-
- notreally = False
- BACKUP_TIMEOUT = 5
- KILL = "KILL"
-
- def __init__(self, builder, command,
- workdir, environ=None,
- sendStdout=True, sendStderr=True, sendRC=True,
- timeout=None, stdin=None, keepStdout=False):
- """
-
- @param keepStdout: if True, we keep a copy of all the stdout text
- that we've seen. This copy is available in
- self.stdout, which can be read after the command
- has finished.
- """
-
- self.builder = builder
- self.command = command
- self.sendStdout = sendStdout
- self.sendStderr = sendStderr
- self.sendRC = sendRC
- self.workdir = workdir
- self.environ = os.environ.copy()
- if environ:
- if (self.environ.has_key('PYTHONPATH')
- and environ.has_key('PYTHONPATH')):
- # special case, prepend the builder's items to the existing
- # ones. This will break if you send over empty strings, so
- # don't do that.
- environ['PYTHONPATH'] = (environ['PYTHONPATH']
- + os.pathsep
- + self.environ['PYTHONPATH'])
- # this will proceed to replace the old one
- self.environ.update(environ)
- self.stdin = stdin
- self.timeout = timeout
- self.timer = None
- self.keepStdout = keepStdout
-
- # usePTY=True is a convenience for cleaning up all children and
- # grandchildren of a hung command. Fall back to usePTY=False on
- # systems where ptys cause problems.
-
- self.usePTY = self.builder.usePTY
- if runtime.platformType != "posix":
- self.usePTY = False # PTYs are posix-only
- if stdin is not None:
- # for .closeStdin to matter, we must use a pipe, not a PTY
- self.usePTY = False
-
- def __repr__(self):
- return "<slavecommand.ShellCommand '%s'>" % self.command
-
- def sendStatus(self, status):
- self.builder.sendUpdate(status)
-
- def start(self):
- # return a Deferred which fires (with the exit code) when the command
- # completes
- if self.keepStdout:
- self.stdout = ""
- self.deferred = defer.Deferred()
- try:
- self._startCommand()
- except:
- log.msg("error in ShellCommand._startCommand")
- log.err()
- # pretend it was a shell error
- self.deferred.errback(AbandonChain(-1))
- return self.deferred
-
- def _startCommand(self):
- log.msg("ShellCommand._startCommand")
- if self.notreally:
- self.sendStatus({'header': "command '%s' in dir %s" % \
- (self.command, self.workdir)})
- self.sendStatus({'header': "(not really)\n"})
- self.finished(None, 0)
- return
-
- self.pp = ShellCommandPP(self)
-
- if type(self.command) in types.StringTypes:
- if runtime.platformType == 'win32':
- argv = ['/bin/bash', '-c', self.command]
- else:
- # for posix, use /bin/sh. for other non-posix, well, doesn't
- # hurt to try
- argv = ['/bin/bash', '-c', self.command]
- else:
- if runtime.platformType == 'win32':
- #argv = [os.environ['COMSPEC'], '/c'] + list(self.command)
- argv = = ['/bin/bash', '-c', self.command]
-
- else:
- argv = self.command
-
- # self.stdin is handled in ShellCommandPP.connectionMade
-
- # first header line is the command in plain text, argv joined with
- # spaces. You should be able to cut-and-paste this into a shell to
- # obtain the same results. If there are spaces in the arguments, too
- # bad.
- msg = " ".join(argv)
- log.msg(" " + msg)
- self.sendStatus({'header': msg+"\n"})
-
- # then comes the secondary information
- msg = " in dir %s" % (self.workdir,)
- if self.timeout:
- msg += " (timeout %d secs)" % (self.timeout,)
- log.msg(" " + msg)
- self.sendStatus({'header': msg+"\n"})
-
- # then the argv array for resolving unambiguity
- msg = " argv: %s" % (argv,)
- log.msg(" " + msg)
- self.sendStatus({'header': msg+"\n"})
-
- # then the environment, since it sometimes causes problems
- msg = " environment: %s" % (self.environ,)
- log.msg(" " + msg)
- self.sendStatus({'header': msg+"\n"})
-
- # win32eventreactor's spawnProcess (under twisted <= 2.0.1) returns
- # None, as opposed to all the posixbase-derived reactors (which
- # return the new Process object). This is a nuisance. We can make up
- # for it by having the ProcessProtocol give us their .transport
- # attribute after they get one. I'd prefer to get it from
- # spawnProcess because I'm concerned about returning from this method
- # without having a valid self.process to work with. (if kill() were
- # called right after we return, but somehow before connectionMade
- # were called, then kill() would blow up).
- self.process = None
- p = reactor.spawnProcess(self.pp, argv[0], argv,
- self.environ,
- self.workdir,
- usePTY=self.usePTY)
- # connectionMade might have been called during spawnProcess
- if not self.process:
- self.process = p
-
- # connectionMade also closes stdin as long as we're not using a PTY.
- # This is intended to kill off inappropriately interactive commands
- # better than the (long) hung-command timeout. ProcessPTY should be
- # enhanced to allow the same childFDs argument that Process takes,
- # which would let us connect stdin to /dev/null .
-
- if self.timeout:
- self.timer = reactor.callLater(self.timeout, self.doTimeout)
-
- def addStdout(self, data):
- if self.sendStdout: self.sendStatus({'stdout': data})
- if self.keepStdout: self.stdout += data
- if self.timer: self.timer.reset(self.timeout)
-
- def addStderr(self, data):
- if self.sendStderr: self.sendStatus({'stderr': data})
- if self.timer: self.timer.reset(self.timeout)
-
- def finished(self, sig, rc):
- log.msg("command finished with signal %s, exit code %s" % (sig,rc))
- if sig is not None:
- rc = -1
- if self.sendRC:
- if sig is not None:
- self.sendStatus(
- {'header': "process killed by signal %d\n" % sig})
- self.sendStatus({'rc': rc})
- if self.timer:
- self.timer.cancel()
- self.timer = None
- d = self.deferred
- self.deferred = None
- if d:
- d.callback(rc)
- else:
- log.msg("Hey, command %s finished twice" % self)
-
- def failed(self, why):
- log.msg("ShellCommand.failed: command failed: %s" % (why,))
- if self.timer:
- self.timer.cancel()
- self.timer = None
- d = self.deferred
- self.deferred = None
- if d:
- d.errback(why)
- else:
- log.msg("Hey, command %s finished twice" % self)
-
- def doTimeout(self):
- self.timer = None
- msg = "command timed out: %d seconds without output" % self.timeout
- self.kill(msg)
-
- def kill(self, msg):
- # This may be called by the timeout, or when the user has decided to
- # abort this build.
- if self.timer:
- self.timer.cancel()
- self.timer = None
- if hasattr(self.process, "pid"):
- msg += ", killing pid %d" % self.process.pid
- log.msg(msg)
- self.sendStatus({'header': "\n" + msg + "\n"})
-
- hit = 0
- if runtime.platformType == "posix":
- try:
- # really want to kill off all child processes too. Process
- # Groups are ideal for this, but that requires
- # spawnProcess(usePTY=1). Try both ways in case process was
- # not started that way.
-
- # the test suite sets self.KILL=None to tell us we should
- # only pretend to kill the child. This lets us test the
- # backup timer.
-
- sig = None
- if self.KILL is not None:
- sig = getattr(signal, "SIG"+ self.KILL, None)
-
- if self.KILL == None:
- log.msg("self.KILL==None, only pretending to kill child")
- elif sig is None:
- log.msg("signal module is missing SIG%s" % self.KILL)
- elif not hasattr(os, "kill"):
- log.msg("os module is missing the 'kill' function")
- else:
- log.msg("trying os.kill(-pid, %d)" % (sig,))
- os.kill(-self.process.pid, sig)
- log.msg(" signal %s sent successfully" % sig)
- hit = 1
- except OSError:
- # probably no-such-process, maybe because there is no process
- # group
- pass
- if not hit:
- try:
- if self.KILL is None:
- log.msg("self.KILL==None, only pretending to kill child")
- else:
- log.msg("trying process.signalProcess('KILL')")
- self.process.signalProcess(self.KILL)
- log.msg(" signal %s sent successfully" % (self.KILL,))
- hit = 1
- except OSError:
- # could be no-such-process, because they finished very recently
- pass
- if not hit:
- log.msg("signalProcess/os.kill failed both times")
-
- if runtime.platformType == "posix":
- # we only do this under posix because the win32eventreactor
- # blocks here until the process has terminated, while closing
- # stderr. This is weird.
- self.pp.transport.loseConnection()
-
- # finished ought to be called momentarily. Just in case it doesn't,
- # set a timer which will abandon the command.
- self.timer = reactor.callLater(self.BACKUP_TIMEOUT,
- self.doBackupTimeout)
-
- def doBackupTimeout(self):
- log.msg("we tried to kill the process, and it wouldn't die.."
- " finish anyway")
- self.timer = None
- self.sendStatus({'header': "SIGKILL failed to kill process\n"})
- if self.sendRC:
- self.sendStatus({'header': "using fake rc=-1\n"})
- self.sendStatus({'rc': -1})
- self.failed(TimeoutError("SIGKILL failed to kill process"))
-
-
-class TCSHShellCommand:
- # This is a helper class, used by SlaveCommands to run programs in a
- # child shell.
-
- notreally = False
- BACKUP_TIMEOUT = 5
- KILL = "KILL"
-
- def __init__(self, builder, command,
- workdir, environ=None,
- sendStdout=True, sendStderr=True, sendRC=True,
- timeout=None, stdin=None, keepStdout=False):
- """
-
- @param keepStdout: if True, we keep a copy of all the stdout text
- that we've seen. This copy is available in
- self.stdout, which can be read after the command
- has finished.
- """
-
- self.builder = builder
- self.command = command
- self.sendStdout = sendStdout
- self.sendStderr = sendStderr
- self.sendRC = sendRC
- self.workdir = workdir
- self.environ = os.environ.copy()
- if environ:
- if (self.environ.has_key('PYTHONPATH')
- and environ.has_key('PYTHONPATH')):
- # special case, prepend the builder's items to the existing
- # ones. This will break if you send over empty strings, so
- # don't do that.
- environ['PYTHONPATH'] = (environ['PYTHONPATH']
- + os.pathsep
- + self.environ['PYTHONPATH'])
- # this will proceed to replace the old one
- self.environ.update(environ)
- self.stdin = stdin
- self.timeout = timeout
- self.timer = None
- self.keepStdout = keepStdout
-
- # usePTY=True is a convenience for cleaning up all children and
- # grandchildren of a hung command. Fall back to usePTY=False on
- # systems where ptys cause problems.
-
- self.usePTY = self.builder.usePTY
- if runtime.platformType != "posix":
- self.usePTY = False # PTYs are posix-only
- if stdin is not None:
- # for .closeStdin to matter, we must use a pipe, not a PTY
- self.usePTY = False
-
- def __repr__(self):
- return "<slavecommand.ShellCommand '%s'>" % self.command
-
- def sendStatus(self, status):
- self.builder.sendUpdate(status)
-
- def start(self):
- # return a Deferred which fires (with the exit code) when the command
- # completes
- if self.keepStdout:
- self.stdout = ""
- self.deferred = defer.Deferred()
- try:
- self._startCommand()
- except:
- log.msg("error in ShellCommand._startCommand")
- log.err()
- # pretend it was a shell error
- self.deferred.errback(AbandonChain(-1))
- return self.deferred
-
- def _startCommand(self):
- log.msg("ShellCommand._startCommand")
- if self.notreally:
- self.sendStatus({'header': "command '%s' in dir %s" % \
- (self.command, self.workdir)})
- self.sendStatus({'header': "(not really)\n"})
- self.finished(None, 0)
- return
-
- self.pp = ShellCommandPP(self)
-
- if type(self.command) in types.StringTypes:
- if runtime.platformType == 'win32':
- argv = ['/usr/bin/tcsh', '-c', self.command]
- else:
- # for posix, use /bin/sh. for other non-posix, well, doesn't
- # hurt to try
- argv = ['/usr/bin/tcsh', '-c', self.command]
- else:
- if runtime.platformType == 'win32':
- argv = [os.environ['COMSPEC'], '/c'] + list(self.command)
- else:
- argv = self.command
-
- # self.stdin is handled in ShellCommandPP.connectionMade
-
- # first header line is the command in plain text, argv joined with
- # spaces. You should be able to cut-and-paste this into a shell to
- # obtain the same results. If there are spaces in the arguments, too
- # bad.
- msg = " ".join(argv)
- log.msg(" " + msg)
- self.sendStatus({'header': msg+"\n"})
-
- # then comes the secondary information
- msg = " in dir %s" % (self.workdir,)
- if self.timeout:
- msg += " (timeout %d secs)" % (self.timeout,)
- log.msg(" " + msg)
- self.sendStatus({'header': msg+"\n"})
-
- # then the argv array for resolving unambiguity
- msg = " argv: %s" % (argv,)
- log.msg(" " + msg)
- self.sendStatus({'header': msg+"\n"})
-
- # then the environment, since it sometimes causes problems
- msg = " environment: %s" % (self.environ,)
- log.msg(" " + msg)
- self.sendStatus({'header': msg+"\n"})
-
- # win32eventreactor's spawnProcess (under twisted <= 2.0.1) returns
- # None, as opposed to all the posixbase-derived reactors (which
- # return the new Process object). This is a nuisance. We can make up
- # for it by having the ProcessProtocol give us their .transport
- # attribute after they get one. I'd prefer to get it from
- # spawnProcess because I'm concerned about returning from this method
- # without having a valid self.process to work with. (if kill() were
- # called right after we return, but somehow before connectionMade
- # were called, then kill() would blow up).
- self.process = None
- p = reactor.spawnProcess(self.pp, argv[0], argv,
- self.environ,
- self.workdir,
- usePTY=self.usePTY)
- # connectionMade might have been called during spawnProcess
- if not self.process:
- self.process = p
-
- # connectionMade also closes stdin as long as we're not using a PTY.
- # This is intended to kill off inappropriately interactive commands
- # better than the (long) hung-command timeout. ProcessPTY should be
- # enhanced to allow the same childFDs argument that Process takes,
- # which would let us connect stdin to /dev/null .
-
- if self.timeout:
- self.timer = reactor.callLater(self.timeout, self.doTimeout)
-
- def addStdout(self, data):
- if self.sendStdout: self.sendStatus({'stdout': data})
- if self.keepStdout: self.stdout += data
- if self.timer: self.timer.reset(self.timeout)
-
- def addStderr(self, data):
- if self.sendStderr: self.sendStatus({'stderr': data})
- if self.timer: self.timer.reset(self.timeout)
-
- def finished(self, sig, rc):
- log.msg("command finished with signal %s, exit code %s" % (sig,rc))
- if sig is not None:
- rc = -1
- if self.sendRC:
- if sig is not None:
- self.sendStatus(
- {'header': "process killed by signal %d\n" % sig})
- self.sendStatus({'rc': rc})
- if self.timer:
- self.timer.cancel()
- self.timer = None
- d = self.deferred
- self.deferred = None
- if d:
- d.callback(rc)
- else:
- log.msg("Hey, command %s finished twice" % self)
-
- def failed(self, why):
- log.msg("ShellCommand.failed: command failed: %s" % (why,))
- if self.timer:
- self.timer.cancel()
- self.timer = None
- d = self.deferred
- self.deferred = None
- if d:
- d.errback(why)
- else:
- log.msg("Hey, command %s finished twice" % self)
-
- def doTimeout(self):
- self.timer = None
- msg = "command timed out: %d seconds without output" % self.timeout
- self.kill(msg)
-
- def kill(self, msg):
- # This may be called by the timeout, or when the user has decided to
- # abort this build.
- if self.timer:
- self.timer.cancel()
- self.timer = None
- if hasattr(self.process, "pid"):
- msg += ", killing pid %d" % self.process.pid
- log.msg(msg)
- self.sendStatus({'header': "\n" + msg + "\n"})
-
- hit = 0
- if runtime.platformType == "posix":
- try:
- # really want to kill off all child processes too. Process
- # Groups are ideal for this, but that requires
- # spawnProcess(usePTY=1). Try both ways in case process was
- # not started that way.
-
- # the test suite sets self.KILL=None to tell us we should
- # only pretend to kill the child. This lets us test the
- # backup timer.
-
- sig = None
- if self.KILL is not None:
- sig = getattr(signal, "SIG"+ self.KILL, None)
-
- if self.KILL == None:
- log.msg("self.KILL==None, only pretending to kill child")
- elif sig is None:
- log.msg("signal module is missing SIG%s" % self.KILL)
- elif not hasattr(os, "kill"):
- log.msg("os module is missing the 'kill' function")
- else:
- log.msg("trying os.kill(-pid, %d)" % (sig,))
- os.kill(-self.process.pid, sig)
- log.msg(" signal %s sent successfully" % sig)
- hit = 1
- except OSError:
- # probably no-such-process, maybe because there is no process
- # group
- pass
- if not hit:
- try:
- if self.KILL is None:
- log.msg("self.KILL==None, only pretending to kill child")
- else:
- log.msg("trying process.signalProcess('KILL')")
- self.process.signalProcess(self.KILL)
- log.msg(" signal %s sent successfully" % (self.KILL,))
- hit = 1
- except OSError:
- # could be no-such-process, because they finished very recently
- pass
- if not hit:
- log.msg("signalProcess/os.kill failed both times")
-
- if runtime.platformType == "posix":
- # we only do this under posix because the win32eventreactor
- # blocks here until the process has terminated, while closing
- # stderr. This is weird.
- self.pp.transport.loseConnection()
-
- # finished ought to be called momentarily. Just in case it doesn't,
- # set a timer which will abandon the command.
- self.timer = reactor.callLater(self.BACKUP_TIMEOUT,
- self.doBackupTimeout)
-
- def doBackupTimeout(self):
- log.msg("we tried to kill the process, and it wouldn't die.."
- " finish anyway")
- self.timer = None
- self.sendStatus({'header': "SIGKILL failed to kill process\n"})
- if self.sendRC:
- self.sendStatus({'header': "using fake rc=-1\n"})
- self.sendStatus({'rc': -1})
- self.failed(TimeoutError("SIGKILL failed to kill process"))
-
-
-class Command:
- if implements:
- implements(ISlaveCommand)
- else:
- __implements__ = ISlaveCommand
-
- """This class defines one command that can be invoked by the build master.
- The command is executed on the slave side, and always sends back a
- completion message when it finishes. It may also send intermediate status
- as it runs (by calling builder.sendStatus). Some commands can be
- interrupted (either by the build master or a local timeout), in which
- case the step is expected to complete normally with a status message that
- indicates an error occurred.
-
- These commands are used by BuildSteps on the master side. Each kind of
- BuildStep uses a single Command. The slave must implement all the
- Commands required by the set of BuildSteps used for any given build:
- this is checked at startup time.
-
- All Commands are constructed with the same signature:
- c = CommandClass(builder, args)
- where 'builder' is the parent SlaveBuilder object, and 'args' is a
- dict that is interpreted per-command.
-
- The setup(args) method is available for setup, and is run from __init__.
-
- The Command is started with start(). This method must be implemented in a
- subclass, and it should return a Deferred. When your step is done, you
- should fire the Deferred (the results are not used). If the command is
- interrupted, it should fire the Deferred anyway.
-
- While the command runs. it may send status messages back to the
- buildmaster by calling self.sendStatus(statusdict). The statusdict is
- interpreted by the master-side BuildStep however it likes.
-
- A separate completion message is sent when the deferred fires, which
- indicates that the Command has finished, but does not carry any status
- data. If the Command needs to return an exit code of some sort, that
- should be sent as a regular status message before the deferred is fired .
- Once builder.commandComplete has been run, no more status messages may be
- sent.
-
- If interrupt() is called, the Command should attempt to shut down as
- quickly as possible. Child processes should be killed, new ones should
- not be started. The Command should send some kind of error status update,
- then complete as usual by firing the Deferred.
-
- .interrupted should be set by interrupt(), and can be tested to avoid
- sending multiple error status messages.
-
- If .running is False, the bot is shutting down (or has otherwise lost the
- connection to the master), and should not send any status messages. This
- is checked in Command.sendStatus .
-
- """
-
- # builder methods:
- # sendStatus(dict) (zero or more)
- # commandComplete() or commandInterrupted() (one, at end)
-
- debug = False
- interrupted = False
- running = False # set by Builder, cleared on shutdown or when the
- # Deferred fires
-
- def __init__(self, builder, stepId, args):
- self.builder = builder
- self.stepId = stepId # just for logging
- self.args = args
- self.setup(args)
-
- def setup(self, args):
- """Override this in a subclass to extract items from the args dict."""
- pass
-
- def start(self):
- """Start the command. self.running will be set just before this is
- called. This method should return a Deferred that will fire when the
- command has completed. The Deferred's argument will be ignored.
-
- This method should be overridden by subclasses."""
- raise NotImplementedError, "You must implement this in a subclass"
-
- def sendStatus(self, status):
- """Send a status update to the master."""
- if self.debug:
- log.msg("sendStatus", status)
- if not self.running:
- log.msg("would sendStatus but not .running")
- return
- self.builder.sendUpdate(status)
-
- def interrupt(self):
- """Override this in a subclass to allow commands to be interrupted.
- May be called multiple times, test and set self.interrupted=True if
- this matters."""
- pass
-
- # utility methods, mostly used by SlaveShellCommand and the like
-
- def _abandonOnFailure(self, rc):
- if type(rc) is not int:
- log.msg("weird, _abandonOnFailure was given rc=%s (%s)" % \
- (rc, type(rc)))
- assert isinstance(rc, int)
- if rc != 0:
- raise AbandonChain(rc)
- return rc
-
- def _sendRC(self, res):
- self.sendStatus({'rc': 0})
-
- def _checkAbandoned(self, why):
- log.msg("_checkAbandoned", why)
- why.trap(AbandonChain)
- log.msg(" abandoning chain", why.value)
- self.sendStatus({'rc': why.value.args[0]})
- return None
-
-
-class SlaveShellCommand(Command):
- """This is a Command which runs a shell command. The args dict contains
- the following keys:
-
- - ['command'] (required): a shell command to run. If this is a string,
- it will be run with /bin/sh (['/bin/sh', '-c', command]). If it is a
- list (preferred), it will be used directly.
- - ['workdir'] (required): subdirectory in which the command will be run,
- relative to the builder dir
- - ['env']: a dict of environment variables to augment/replace os.environ
- - ['want_stdout']: 0 if stdout should be thrown away
- - ['want_stderr']: 0 if stderr should be thrown away
- - ['not_really']: 1 to skip execution and return rc=0
- - ['timeout']: seconds of silence to tolerate before killing command
-
- ShellCommand creates the following status messages:
- - {'stdout': data} : when stdout data is available
- - {'stderr': data} : when stderr data is available
- - {'header': data} : when headers (command start/stop) are available
- - {'rc': rc} : when the process has terminated
- """
-
- def start(self):
- args = self.args
- sendStdout = args.get('want_stdout', True)
- sendStderr = args.get('want_stderr', True)
- # args['workdir'] is relative to Builder directory, and is required.
- assert args['workdir'] is not None
- workdir = os.path.join(self.builder.basedir, args['workdir'])
- timeout = args.get('timeout', None)
-
- c = ShellCommand(self.builder, args['command'],
- workdir, environ=args.get('env'),
- timeout=timeout,
- sendStdout=sendStdout, sendStderr=sendStderr,
- sendRC=True)
- self.command = c
- d = self.command.start()
- return d
-
- def interrupt(self):
- self.interrupted = True
- self.command.kill("command interrupted")
-
-
-registerSlaveCommand("shell", SlaveShellCommand, cvs_ver)
-
-class SlaveTCSHShellCommand(Command):
- """This is a Command which runs a shell command. The args dict contains
- the following keys:
-
- - ['command'] (required): a shell command to run. If this is a string,
- it will be run with /bin/sh (['/bin/sh', '-c', command]). If it is a
- list (preferred), it will be used directly.
- - ['workdir'] (required): subdirectory in which the command will be run,
- relative to the builder dir
- - ['env']: a dict of environment variables to augment/replace os.environ
- - ['want_stdout']: 0 if stdout should be thrown away
- - ['want_stderr']: 0 if stderr should be thrown away
- - ['not_really']: 1 to skip execution and return rc=0
- - ['timeout']: seconds of silence to tolerate before killing command
-
- ShellCommand creates the following status messages:
- - {'stdout': data} : when stdout data is available
- - {'stderr': data} : when stderr data is available
- - {'header': data} : when headers (command start/stop) are available
- - {'rc': rc} : when the process has terminated
- """
-
- def start(self):
- args = self.args
- sendStdout = args.get('want_stdout', True)
- sendStderr = args.get('want_stderr', True)
- # args['workdir'] is relative to Builder directory, and is required.
- assert args['workdir'] is not None
- workdir = os.path.join(self.builder.basedir, args['workdir'])
- timeout = args.get('timeout', None)
-
- c = TCSHShellCommand(self.builder, args['command'],
- workdir, environ=args.get('env'),
- timeout=timeout,
- sendStdout=sendStdout, sendStderr=sendStderr,
- sendRC=True)
- self.command = c
- d = self.command.start()
- return d
-
- def interrupt(self):
- self.interrupted = True
- self.command.kill("command interrupted")
-
-
-registerSlaveCommand("tcsh", SlaveTCSHShellCommand, cvs_ver)
-
-
-class DummyCommand(Command):
- """
- I am a dummy no-op command that by default takes 5 seconds to complete.
- See L{buildbot.process.step.RemoteDummy}
- """
-
- def start(self):
- self.d = defer.Deferred()
- log.msg(" starting dummy command [%s]" % self.stepId)
- self.timer = reactor.callLater(1, self.doStatus)
- return self.d
-
- def interrupt(self):
- if self.interrupted:
- return
- self.timer.cancel()
- self.timer = None
- self.interrupted = True
- self.finished()
-
- def doStatus(self):
- log.msg(" sending intermediate status")
- self.sendStatus({'stdout': 'data'})
- timeout = self.args.get('timeout', 5) + 1
- self.timer = reactor.callLater(timeout - 1, self.finished)
-
- def finished(self):
- log.msg(" dummy command finished [%s]" % self.stepId)
- if self.interrupted:
- self.sendStatus({'rc': 1})
- else:
- self.sendStatus({'rc': 0})
- self.d.callback(0)
-
-registerSlaveCommand("dummy", DummyCommand, cvs_ver)
-
-
-class SourceBase(Command):
- """Abstract base class for Version Control System operations (checkout
- and update). This class extracts the following arguments from the
- dictionary received from the master:
-
- - ['workdir']: (required) the subdirectory where the buildable sources
- should be placed
-
- - ['mode']: one of update/copy/clobber/export, defaults to 'update'
-
- - ['revision']: If not None, this is an int or string which indicates
- which sources (along a time-like axis) should be used.
- It is the thing you provide as the CVS -r or -D
- argument.
-
- - ['patch']: If not None, this is a tuple of (striplevel, patch)
- which contains a patch that should be applied after the
- checkout has occurred. Once applied, the tree is no
- longer eligible for use with mode='update', and it only
- makes sense to use this in conjunction with a
- ['revision'] argument. striplevel is an int, and patch
- is a string in standard unified diff format. The patch
- will be applied with 'patch -p%d <PATCH', with
- STRIPLEVEL substituted as %d. The command will fail if
- the patch process fails (rejected hunks).
-
- - ['timeout']: seconds of silence tolerated before we kill off the
- command
-
- - ['retry']: If not None, this is a tuple of (delay, repeats)
- which means that any failed VC updates should be
- reattempted, up to REPEATS times, after a delay of
- DELAY seconds. This is intended to deal with slaves
- that experience transient network failures.
- """
-
- sourcedata = ""
-
- def setup(self, args):
- # if we need to parse the output, use this environment. Otherwise
- # command output will be in whatever the buildslave's native language
- # has been set to.
- self.env = os.environ.copy()
- self.env['LC_ALL'] = "C"
-
- self.workdir = args['workdir']
- self.mode = args.get('mode', "update")
- self.revision = args.get('revision')
- self.patch = args.get('patch')
- self.timeout = args.get('timeout', 120)
- self.retry = args.get('retry')
- # VC-specific subclasses should override this to extract more args.
- # Make sure to upcall!
-
- def start(self):
- self.sendStatus({'header': "starting " + self.header + "\n"})
- self.command = None
-
- # self.srcdir is where the VC system should put the sources
- if self.mode == "copy":
- self.srcdir = "source" # hardwired directory name, sorry
- else:
- self.srcdir = self.workdir
- self.sourcedatafile = os.path.join(self.builder.basedir,
- self.srcdir,
- ".buildbot-sourcedata")
-
- d = defer.succeed(None)
- # do we need to clobber anything?
- if self.mode in ("copy", "clobber", "export"):
- d.addCallback(self.doClobber, self.workdir)
- if not (self.sourcedirIsUpdateable() and self.sourcedataMatches()):
- # the directory cannot be updated, so we have to clobber it.
- # Perhaps the master just changed modes from 'export' to
- # 'update'.
- d.addCallback(self.doClobber, self.srcdir)
-
- d.addCallback(self.doVC)
-
- if self.mode == "copy":
- d.addCallback(self.doCopy)
- if self.patch:
- d.addCallback(self.doPatch)
- d.addCallbacks(self._sendRC, self._checkAbandoned)
- return d
-
- def interrupt(self):
- self.interrupted = True
- if self.command:
- self.command.kill("command interrupted")
-
- def doVC(self, res):
- if self.interrupted:
- raise AbandonChain(1)
- if self.sourcedirIsUpdateable() and self.sourcedataMatches():
- d = self.doVCUpdate()
- d.addCallback(self.maybeDoVCFallback)
- else:
- d = self.doVCFull()
- d.addBoth(self.maybeDoVCRetry)
- d.addCallback(self._abandonOnFailure)
- d.addCallback(self._handleGotRevision)
- d.addCallback(self.writeSourcedata)
- return d
-
- def sourcedataMatches(self):
- try:
- olddata = open(self.sourcedatafile, "r").read()
- if olddata != self.sourcedata:
- return False
- except IOError:
- return False
- return True
-
- def _handleGotRevision(self, res):
- d = defer.maybeDeferred(self.parseGotRevision)
- d.addCallback(lambda got_revision:
- self.sendStatus({'got_revision': got_revision}))
- return d
-
- def parseGotRevision(self):
- """Override this in a subclass. It should return a string that
- represents which revision was actually checked out, or a Deferred
- that will fire with such a string. If, in a future build, you were to
- pass this 'got_revision' string in as the 'revision' component of a
- SourceStamp, you should wind up with the same source code as this
- checkout just obtained.
-
- It is probably most useful to scan self.command.stdout for a string
- of some sort. Be sure to set keepStdout=True on the VC command that
- you run, so that you'll have something available to look at.
-
- If this information is unavailable, just return None."""
-
- return None
-
- def writeSourcedata(self, res):
- open(self.sourcedatafile, "w").write(self.sourcedata)
- return res
-
- def sourcedirIsUpdateable(self):
- raise NotImplementedError("this must be implemented in a subclass")
-
- def doVCUpdate(self):
- raise NotImplementedError("this must be implemented in a subclass")
-
- def doVCFull(self):
- raise NotImplementedError("this must be implemented in a subclass")
-
- def maybeDoVCFallback(self, rc):
- if type(rc) is int and rc == 0:
- return rc
- if self.interrupted:
- raise AbandonChain(1)
- msg = "update failed, clobbering and trying again"
- self.sendStatus({'header': msg + "\n"})
- log.msg(msg)
- d = self.doClobber(None, self.srcdir)
- d.addCallback(self.doVCFallback2)
- return d
-
- def doVCFallback2(self, res):
- msg = "now retrying VC operation"
- self.sendStatus({'header': msg + "\n"})
- log.msg(msg)
- d = self.doVCFull()
- d.addBoth(self.maybeDoVCRetry)
- d.addCallback(self._abandonOnFailure)
- return d
-
- def maybeDoVCRetry(self, res):
- """We get here somewhere after a VC chain has finished. res could
- be::
-
- - 0: the operation was successful
- - nonzero: the operation failed. retry if possible
- - AbandonChain: the operation failed, someone else noticed. retry.
- - Failure: some other exception, re-raise
- """
-
- if isinstance(res, failure.Failure):
- if self.interrupted:
- return res # don't re-try interrupted builds
- res.trap(AbandonChain)
- else:
- if type(res) is int and res == 0:
- return res
- if self.interrupted:
- raise AbandonChain(1)
- # if we get here, we should retry, if possible
- if self.retry:
- delay, repeats = self.retry
- if repeats >= 0:
- self.retry = (delay, repeats-1)
- msg = ("update failed, trying %d more times after %d seconds"
- % (repeats, delay))
- self.sendStatus({'header': msg + "\n"})
- log.msg(msg)
- d = defer.Deferred()
- d.addCallback(lambda res: self.doVCFull())
- d.addBoth(self.maybeDoVCRetry)
- reactor.callLater(delay, d.callback, None)
- return d
- return res
-
- def doClobber(self, dummy, dirname):
- # TODO: remove the old tree in the background
-## workdir = os.path.join(self.builder.basedir, self.workdir)
-## deaddir = self.workdir + ".deleting"
-## if os.path.isdir(workdir):
-## try:
-## os.rename(workdir, deaddir)
-## # might fail if deaddir already exists: previous deletion
-## # hasn't finished yet
-## # start the deletion in the background
-## # TODO: there was a solaris/NetApp/NFS problem where a
-## # process that was still running out of the directory we're
-## # trying to delete could prevent the rm-rf from working. I
-## # think it stalled the rm, but maybe it just died with
-## # permission issues. Try to detect this.
-## os.commands("rm -rf %s &" % deaddir)
-## except:
-## # fall back to sequential delete-then-checkout
-## pass
- d = os.path.join(self.builder.basedir, dirname)
- if runtime.platformType != "posix":
- # if we're running on w32, use rmtree instead. It will block,
- # but hopefully it won't take too long.
- rmdirRecursive(d)
- return defer.succeed(0)
- command = ["rm", "-rf", d]
- c = ShellCommand(self.builder, command, self.builder.basedir,
- sendRC=0, timeout=self.timeout)
- self.command = c
- # sendRC=0 means the rm command will send stdout/stderr to the
- # master, but not the rc=0 when it finishes. That job is left to
- # _sendRC
- d = c.start()
- d.addCallback(self._abandonOnFailure)
- return d
-
- def doCopy(self, res):
- # now copy tree to workdir
- fromdir = os.path.join(self.builder.basedir, self.srcdir)
- todir = os.path.join(self.builder.basedir, self.workdir)
- if runtime.platformType != "posix":
- shutil.copytree(fromdir, todir)
- return defer.succeed(0)
- command = ['cp', '-r', '-p', fromdir, todir]
- c = ShellCommand(self.builder, command, self.builder.basedir,
- sendRC=False, timeout=self.timeout)
- self.command = c
- d = c.start()
- d.addCallback(self._abandonOnFailure)
- return d
-
- def doPatch(self, res):
- patchlevel, diff = self.patch
- command = [getCommand("patch"), '-p%d' % patchlevel]
- dir = os.path.join(self.builder.basedir, self.workdir)
- # mark the directory so we don't try to update it later
- open(os.path.join(dir, ".buildbot-patched"), "w").write("patched\n")
- # now apply the patch
- c = ShellCommand(self.builder, command, dir,
- sendRC=False, timeout=self.timeout,
- stdin=diff)
- self.command = c
- d = c.start()
- d.addCallback(self._abandonOnFailure)
- return d
-
-
-class CVS(SourceBase):
- """CVS-specific VC operation. In addition to the arguments handled by
- SourceBase, this command reads the following keys:
-
- ['cvsroot'] (required): the CVSROOT repository string
- ['cvsmodule'] (required): the module to be retrieved
- ['branch']: a '-r' tag or branch name to use for the checkout/update
- ['login']: a string for use as a password to 'cvs login'
- ['global_options']: a list of strings to use before the CVS verb
- """
-
- header = "cvs operation"
-
- def setup(self, args):
- SourceBase.setup(self, args)
- self.vcexe = getCommand("cvs")
- self.vcexeoo = "./tinget.pl"
- self.cvsroot = args['cvsroot']
- self.cvsmodule = args['cvsmodule']
- self.global_options = args.get('global_options', [])
- self.branch = args.get('branch')
- self.login = args.get('login')
- self.sourcedata = "%s\n%s\n%s\n" % (self.cvsroot, self.cvsmodule,
- self.branch)
-
- def sourcedirIsUpdateable(self):
- if os.path.exists(os.path.join(self.builder.basedir,
- self.srcdir, ".buildbot-patched")):
- return False
- return os.path.isdir(os.path.join(self.builder.basedir,
- self.srcdir, "CVS"))
-
- def start(self):
- if self.login is not None:
- # need to do a 'cvs login' command first
- d = self.builder.basedir
- command = ([self.vcexe, '-d', self.cvsroot] + self.global_options
- + ['login'])
- c = ShellCommand(self.builder, command, d,
- sendRC=False, timeout=self.timeout,
- stdin=self.login+"\n")
- self.command = c
- d = c.start()
- d.addCallback(self._abandonOnFailure)
- d.addCallback(self._didLogin)
- return d
- else:
- return self._didLogin(None)
-
- def _didLogin(self, res):
- # now we really start
- return SourceBase.start(self)
-
- def doVCUpdate(self):
- d = os.path.join(self.builder.basedir, self.srcdir)
- #command = [self.vcexe, '-z3'] + self.global_options + ['update', '-dP']
- command = [self.vcexeoo]
- if self.branch:
- # command += ['-r', self.branch]
- command += [self.branch]
- #if self.revision:
- # command += ['-D', self.revision]
- command += [self.cvsmodule]
- command += ['up']
- c = ShellCommand(self.builder, command, d,
- sendRC=False, timeout=self.timeout)
- self.command = c
- return c.start()
-
- def doVCFull(self):
- d = self.builder.basedir
- if self.mode == "export":
- verb = "export"
- else:
- verb = "checkout"
- #command = ([self.vcexe, '-d', self.cvsroot, '-z3'] +
- # self.global_options +
- # [verb, '-N', '-d', self.srcdir])
- command = [self.vcexeoo]
- if self.branch:
- # command += ['-r', self.branch]
- command += [self.branch]
- #if self.revision:
- # command += ['-D', self.revision]
- command += [self.cvsmodule]
- command += ['co']
- c = ShellCommand(self.builder, command, d,
- sendRC=False, timeout=self.timeout)
- self.command = c
- return c.start()
-
- def parseGotRevision(self):
- # CVS does not have any kind of revision stamp to speak of. We return
- # the current timestamp as a best-effort guess, but this depends upon
- # the local system having a clock that is
- # reasonably-well-synchronized with the repository.
- return time.strftime("%Y-%m-%d %H:%M:%S +0000", time.gmtime())
-
-registerSlaveCommand("cvs", CVS, cvs_ver)
-
-class SVN(SourceBase):
- """Subversion-specific VC operation. In addition to the arguments
- handled by SourceBase, this command reads the following keys:
-
- ['svnurl'] (required): the SVN repository string
- """
-
- header = "svn operation"
-
- def setup(self, args):
- SourceBase.setup(self, args)
- self.vcexe = getCommand("svn")
- self.svnurl = args['svnurl']
- self.sourcedata = "%s\n" % self.svnurl
-
- def sourcedirIsUpdateable(self):
- if os.path.exists(os.path.join(self.builder.basedir,
- self.srcdir, ".buildbot-patched")):
- return False
- return os.path.isdir(os.path.join(self.builder.basedir,
- self.srcdir, ".svn"))
-
- def doVCUpdate(self):
- revision = self.args['revision'] or 'HEAD'
- # update: possible for mode in ('copy', 'update')
- d = os.path.join(self.builder.basedir, self.srcdir)
- command = [self.vcexe, 'update', '--revision', str(revision)]
- c = ShellCommand(self.builder, command, d,
- sendRC=False, timeout=self.timeout,
- keepStdout=True)
- self.command = c
- return c.start()
-
- def doVCFull(self):
- revision = self.args['revision'] or 'HEAD'
- d = self.builder.basedir
- if self.mode == "export":
- command = [self.vcexe, 'export', '--revision', str(revision),
- self.svnurl, self.srcdir]
- else:
- # mode=='clobber', or copy/update on a broken workspace
- command = [self.vcexe, 'checkout', '--revision', str(revision),
- self.svnurl, self.srcdir]
- c = ShellCommand(self.builder, command, d,
- sendRC=False, timeout=self.timeout,
- keepStdout=True)
- self.command = c
- return c.start()
-
- def parseGotRevision(self):
- # svn checkout operations finish with 'Checked out revision 16657.'
- # svn update operations finish the line 'At revision 16654.'
- # But we don't use those. Instead, run 'svnversion'.
- svnversion_command = getCommand("svnversion")
- # older versions of 'svnversion' (1.1.4) require the WC_PATH
- # argument, newer ones (1.3.1) do not.
- command = [svnversion_command, "."]
- c = ShellCommand(self.builder, command,
- os.path.join(self.builder.basedir, self.srcdir),
- environ=self.env,
- sendStdout=False, sendStderr=False, sendRC=False,
- keepStdout=True)
- c.usePTY = False
- d = c.start()
- def _parse(res):
- r = c.stdout.strip()
- got_version = None
- try:
- got_version = int(r)
- except ValueError:
- msg =("SVN.parseGotRevision unable to parse output "
- "of svnversion: '%s'" % r)
- log.msg(msg)
- self.sendStatus({'header': msg + "\n"})
- return got_version
- d.addCallback(_parse)
- return d
-
-
-registerSlaveCommand("svn", SVN, cvs_ver)
-
-class Darcs(SourceBase):
- """Darcs-specific VC operation. In addition to the arguments
- handled by SourceBase, this command reads the following keys:
-
- ['repourl'] (required): the Darcs repository string
- """
-
- header = "darcs operation"
-
- def setup(self, args):
- SourceBase.setup(self, args)
- self.vcexe = getCommand("darcs")
- self.repourl = args['repourl']
- self.sourcedata = "%s\n" % self.repourl
- self.revision = self.args.get('revision')
-
- def sourcedirIsUpdateable(self):
- if os.path.exists(os.path.join(self.builder.basedir,
- self.srcdir, ".buildbot-patched")):
- return False
- if self.revision:
- # checking out a specific revision requires a full 'darcs get'
- return False
- return os.path.isdir(os.path.join(self.builder.basedir,
- self.srcdir, "_darcs"))
-
- def doVCUpdate(self):
- assert not self.revision
- # update: possible for mode in ('copy', 'update')
- d = os.path.join(self.builder.basedir, self.srcdir)
- command = [self.vcexe, 'pull', '--all', '--verbose']
- c = ShellCommand(self.builder, command, d,
- sendRC=False, timeout=self.timeout)
- self.command = c
- return c.start()
-
- def doVCFull(self):
- # checkout or export
- d = self.builder.basedir
- command = [self.vcexe, 'get', '--verbose', '--partial',
- '--repo-name', self.srcdir]
- if self.revision:
- # write the context to a file
- n = os.path.join(self.builder.basedir, ".darcs-context")
- f = open(n, "wb")
- f.write(self.revision)
- f.close()
- # tell Darcs to use that context
- command.append('--context')
- command.append(n)
- command.append(self.repourl)
-
- c = ShellCommand(self.builder, command, d,
- sendRC=False, timeout=self.timeout)
- self.command = c
- d = c.start()
- if self.revision:
- d.addCallback(self.removeContextFile, n)
- return d
-
- def removeContextFile(self, res, n):
- os.unlink(n)
- return res
-
- def parseGotRevision(self):
- # we use 'darcs context' to find out what we wound up with
- command = [self.vcexe, "changes", "--context"]
- c = ShellCommand(self.builder, command,
- os.path.join(self.builder.basedir, self.srcdir),
- environ=self.env,
- sendStdout=False, sendStderr=False, sendRC=False,
- keepStdout=True)
- c.usePTY = False
- d = c.start()
- d.addCallback(lambda res: c.stdout)
- return d
-
-registerSlaveCommand("darcs", Darcs, cvs_ver)
-
-class Git(SourceBase):
- """Git specific VC operation. In addition to the arguments
- handled by SourceBase, this command reads the following keys:
-
- ['repourl'] (required): the Cogito repository string
- """
-
- header = "git operation"
-
- def setup(self, args):
- SourceBase.setup(self, args)
- self.repourl = args['repourl']
- #self.sourcedata = "" # TODO
-
- def sourcedirIsUpdateable(self):
- if os.path.exists(os.path.join(self.builder.basedir,
- self.srcdir, ".buildbot-patched")):
- return False
- return os.path.isdir(os.path.join(self.builder.basedir,
- self.srcdir, ".git"))
-
- def doVCUpdate(self):
- d = os.path.join(self.builder.basedir, self.srcdir)
- command = ['cg-update']
- c = ShellCommand(self.builder, command, d,
- sendRC=False, timeout=self.timeout)
- self.command = c
- return c.start()
-
- def doVCFull(self):
- d = os.path.join(self.builder.basedir, self.srcdir)
- os.mkdir(d)
- command = ['cg-clone', '-s', self.repourl]
- c = ShellCommand(self.builder, command, d,
- sendRC=False, timeout=self.timeout)
- self.command = c
- return c.start()
-
-registerSlaveCommand("git", Git, cvs_ver)
-
-class Arch(SourceBase):
- """Arch-specific (tla-specific) VC operation. In addition to the
- arguments handled by SourceBase, this command reads the following keys:
-
- ['url'] (required): the repository string
- ['version'] (required): which version (i.e. branch) to retrieve
- ['revision'] (optional): the 'patch-NN' argument to check out
- ['archive']: the archive name to use. If None, use the archive's default
- ['build-config']: if present, give to 'tla build-config' after checkout
- """
-
- header = "arch operation"
- buildconfig = None
-
- def setup(self, args):
- SourceBase.setup(self, args)
- self.vcexe = getCommand("tla")
- self.archive = args.get('archive')
- self.url = args['url']
- self.version = args['version']
- self.revision = args.get('revision')
- self.buildconfig = args.get('build-config')
- self.sourcedata = "%s\n%s\n%s\n" % (self.url, self.version,
- self.buildconfig)
-
- def sourcedirIsUpdateable(self):
- if self.revision:
- # Arch cannot roll a directory backwards, so if they ask for a
- # specific revision, clobber the directory. Technically this
- # could be limited to the cases where the requested revision is
- # later than our current one, but it's too hard to extract the
- # current revision from the tree.
- return False
- if os.path.exists(os.path.join(self.builder.basedir,
- self.srcdir, ".buildbot-patched")):
- return False
- return os.path.isdir(os.path.join(self.builder.basedir,
- self.srcdir, "{arch}"))
-
- def doVCUpdate(self):
- # update: possible for mode in ('copy', 'update')
- d = os.path.join(self.builder.basedir, self.srcdir)
- command = [self.vcexe, 'replay']
- if self.revision:
- command.append(self.revision)
- c = ShellCommand(self.builder, command, d,
- sendRC=False, timeout=self.timeout)
- self.command = c
- return c.start()
-
- def doVCFull(self):
- # to do a checkout, we must first "register" the archive by giving
- # the URL to tla, which will go to the repository at that URL and
- # figure out the archive name. tla will tell you the archive name
- # when it is done, and all further actions must refer to this name.
-
- command = [self.vcexe, 'register-archive', '--force', self.url]
- c = ShellCommand(self.builder, command, self.builder.basedir,
- sendRC=False, keepStdout=True,
- timeout=self.timeout)
- self.command = c
- d = c.start()
- d.addCallback(self._abandonOnFailure)
- d.addCallback(self._didRegister, c)
- return d
-
- def _didRegister(self, res, c):
- # find out what tla thinks the archive name is. If the user told us
- # to use something specific, make sure it matches.
- r = re.search(r'Registering archive: (\S+)\s*$', c.stdout)
- if r:
- msg = "tla reports archive name is '%s'" % r.group(1)
- log.msg(msg)
- self.builder.sendUpdate({'header': msg+"\n"})
- if self.archive and r.group(1) != self.archive:
- msg = (" mismatch, we wanted an archive named '%s'"
- % self.archive)
- log.msg(msg)
- self.builder.sendUpdate({'header': msg+"\n"})
- raise AbandonChain(-1)
- self.archive = r.group(1)
- assert self.archive, "need archive name to continue"
- return self._doGet()
-
- def _doGet(self):
- ver = self.version
- if self.revision:
- ver += "--%s" % self.revision
- command = [self.vcexe, 'get', '--archive', self.archive,
- '--no-pristine',
- ver, self.srcdir]
- c = ShellCommand(self.builder, command, self.builder.basedir,
- sendRC=False, timeout=self.timeout)
- self.command = c
- d = c.start()
- d.addCallback(self._abandonOnFailure)
- if self.buildconfig:
- d.addCallback(self._didGet)
- return d
-
- def _didGet(self, res):
- d = os.path.join(self.builder.basedir, self.srcdir)
- command = [self.vcexe, 'build-config', self.buildconfig]
- c = ShellCommand(self.builder, command, d,
- sendRC=False, timeout=self.timeout)
- self.command = c
- d = c.start()
- d.addCallback(self._abandonOnFailure)
- return d
-
- def parseGotRevision(self):
- # using code from tryclient.TlaExtractor
- # 'tla logs --full' gives us ARCHIVE/BRANCH--REVISION
- # 'tla logs' gives us REVISION
- command = [self.vcexe, "logs", "--full", "--reverse"]
- c = ShellCommand(self.builder, command,
- os.path.join(self.builder.basedir, self.srcdir),
- environ=self.env,
- sendStdout=False, sendStderr=False, sendRC=False,
- keepStdout=True)
- c.usePTY = False
- d = c.start()
- def _parse(res):
- tid = c.stdout.split("\n")[0].strip()
- slash = tid.index("/")
- dd = tid.rindex("--")
- #branch = tid[slash+1:dd]
- baserev = tid[dd+2:]
- return baserev
- d.addCallback(_parse)
- return d
-
-registerSlaveCommand("arch", Arch, cvs_ver)
-
-class Bazaar(Arch):
- """Bazaar (/usr/bin/baz) is an alternative client for Arch repositories.
- It is mostly option-compatible, but archive registration is different
- enough to warrant a separate Command.
-
- ['archive'] (required): the name of the archive being used
- """
-
- def setup(self, args):
- Arch.setup(self, args)
- self.vcexe = getCommand("baz")
- # baz doesn't emit the repository name after registration (and
- # grepping through the output of 'baz archives' is too hard), so we
- # require that the buildmaster configuration to provide both the
- # archive name and the URL.
- self.archive = args['archive'] # required for Baz
- self.sourcedata = "%s\n%s\n%s\n" % (self.url, self.version,
- self.buildconfig)
-
- # in _didRegister, the regexp won't match, so we'll stick with the name
- # in self.archive
-
- def _doGet(self):
- # baz prefers ARCHIVE/VERSION. This will work even if
- # my-default-archive is not set.
- ver = self.archive + "/" + self.version
- if self.revision:
- ver += "--%s" % self.revision
- command = [self.vcexe, 'get', '--no-pristine',
- ver, self.srcdir]
- c = ShellCommand(self.builder, command, self.builder.basedir,
- sendRC=False, timeout=self.timeout)
- self.command = c
- d = c.start()
- d.addCallback(self._abandonOnFailure)
- if self.buildconfig:
- d.addCallback(self._didGet)
- return d
-
- def parseGotRevision(self):
- # using code from tryclient.BazExtractor
- command = [self.vcexe, "tree-id"]
- c = ShellCommand(self.builder, command,
- os.path.join(self.builder.basedir, self.srcdir),
- environ=self.env,
- sendStdout=False, sendStderr=False, sendRC=False,
- keepStdout=True)
- c.usePTY = False
- d = c.start()
- def _parse(res):
- tid = c.stdout.strip()
- slash = tid.index("/")
- dd = tid.rindex("--")
- #branch = tid[slash+1:dd]
- baserev = tid[dd+2:]
- return baserev
- d.addCallback(_parse)
- return d
-
-registerSlaveCommand("bazaar", Bazaar, cvs_ver)
-
-
-class Mercurial(SourceBase):
- """Mercurial specific VC operation. In addition to the arguments
- handled by SourceBase, this command reads the following keys:
-
- ['repourl'] (required): the Cogito repository string
- """
-
- header = "mercurial operation"
-
- def setup(self, args):
- SourceBase.setup(self, args)
- self.vcexe = getCommand("hg")
- self.repourl = args['repourl']
- self.sourcedata = "%s\n" % self.repourl
- self.stdout = ""
- self.stderr = ""
-
- def sourcedirIsUpdateable(self):
- if os.path.exists(os.path.join(self.builder.basedir,
- self.srcdir, ".buildbot-patched")):
- return False
- # like Darcs, to check out a specific (old) revision, we have to do a
- # full checkout. TODO: I think 'hg pull' plus 'hg update' might work
- if self.revision:
- return False
- return os.path.isdir(os.path.join(self.builder.basedir,
- self.srcdir, ".hg"))
-
- def doVCUpdate(self):
- d = os.path.join(self.builder.basedir, self.srcdir)
- command = [self.vcexe, 'pull', '--update', '--verbose']
- if self.args['revision']:
- command.extend(['--rev', self.args['revision']])
- c = ShellCommand(self.builder, command, d,
- sendRC=False, timeout=self.timeout,
- keepStdout=True)
- self.command = c
- d = c.start()
- d.addCallback(self._handleEmptyUpdate)
- return d
-
- def _handleEmptyUpdate(self, res):
- if type(res) is int and res == 1:
- if self.command.stdout.find("no changes found") != -1:
- # 'hg pull', when it doesn't have anything to do, exits with
- # rc=1, and there appears to be no way to shut this off. It
- # emits a distinctive message to stdout, though. So catch
- # this and pretend that it completed successfully.
- return 0
- return res
-
- def doVCFull(self):
- d = os.path.join(self.builder.basedir, self.srcdir)
- command = [self.vcexe, 'clone']
- if self.args['revision']:
- command.extend(['--rev', self.args['revision']])
- command.extend([self.repourl, d])
- c = ShellCommand(self.builder, command, self.builder.basedir,
- sendRC=False, timeout=self.timeout)
- self.command = c
- return c.start()
-
- def parseGotRevision(self):
- # we use 'hg identify' to find out what we wound up with
- command = [self.vcexe, "identify"]
- c = ShellCommand(self.builder, command,
- os.path.join(self.builder.basedir, self.srcdir),
- environ=self.env,
- sendStdout=False, sendStderr=False, sendRC=False,
- keepStdout=True)
- d = c.start()
- def _parse(res):
- m = re.search(r'^(\w+)', c.stdout)
- return m.group(1)
- d.addCallback(_parse)
- return d
-
-registerSlaveCommand("hg", Mercurial, cvs_ver)
-
-
-class P4Sync(SourceBase):
- """A partial P4 source-updater. Requires manual setup of a per-slave P4
- environment. The only thing which comes from the master is P4PORT.
- 'mode' is required to be 'copy'.
-
- ['p4port'] (required): host:port for server to access
- ['p4user'] (optional): user to use for access
- ['p4passwd'] (optional): passwd to try for the user
- ['p4client'] (optional): client spec to use
- """
-
- header = "p4 sync"
-
- def setup(self, args):
- SourceBase.setup(self, args)
- self.vcexe = getCommand("p4")
- self.p4port = args['p4port']
- self.p4user = args['p4user']
- self.p4passwd = args['p4passwd']
- self.p4client = args['p4client']
-
- def sourcedirIsUpdateable(self):
- return True
-
- def doVCUpdate(self):
- d = os.path.join(self.builder.basedir, self.srcdir)
- command = [self.vcexe]
- if self.p4port:
- command.extend(['-p', self.p4port])
- if self.p4user:
- command.extend(['-u', self.p4user])
- if self.p4passwd:
- command.extend(['-P', self.p4passwd])
- if self.p4client:
- command.extend(['-c', self.p4client])
- command.extend(['sync'])
- if self.revision:
- command.extend(['@' + self.revision])
- env = {}
- c = ShellCommand(self.builder, command, d, environ=env,
- sendRC=False, timeout=self.timeout)
- self.command = c
- return c.start()
-
- def doVCFull(self):
- return self.doVCUpdate()
-
-registerSlaveCommand("p4sync", P4Sync, cvs_ver)
diff --git a/buildbot/buildbot-source/buildbot/slave/interfaces.py b/buildbot/buildbot-source/buildbot/slave/interfaces.py
deleted file mode 100644
index 45096147e..000000000
--- a/buildbot/buildbot-source/buildbot/slave/interfaces.py
+++ /dev/null
@@ -1,57 +0,0 @@
-#! /usr/bin/python
-
-from twisted.python.components import Interface
-
-class ISlaveCommand(Interface):
- """This interface is implemented by all of the buildslave's Command
- subclasses. It specifies how the buildslave can start, interrupt, and
- query the various Commands running on behalf of the buildmaster."""
-
- def __init__(builder, stepId, args):
- """Create the Command. 'builder' is a reference to the parent
- buildbot.bot.SlaveBuilder instance, which will be used to send status
- updates (by calling builder.sendStatus). 'stepId' is a random string
- which helps correlate slave logs with the master. 'args' is a dict of
- arguments that comes from the master-side BuildStep, with contents
- that are specific to the individual Command subclass.
-
- This method is not intended to be subclassed."""
-
- def setup(args):
- """This method is provided for subclasses to override, to extract
- parameters from the 'args' dictionary. The default implemention does
- nothing. It will be called from __init__"""
-
- def start():
- """Begin the command, and return a Deferred.
-
- While the command runs, it should send status updates to the
- master-side BuildStep by calling self.sendStatus(status). The
- 'status' argument is typically a dict with keys like 'stdout',
- 'stderr', and 'rc'.
-
- When the step completes, it should fire the Deferred (the results are
- not used). If an exception occurs during execution, it may also
- errback the deferred, however any reasonable errors should be trapped
- and indicated with a non-zero 'rc' status rather than raising an
- exception. Exceptions should indicate problems within the buildbot
- itself, not problems in the project being tested.
-
- """
-
- def interrupt():
- """This is called to tell the Command that the build is being stopped
- and therefore the command should be terminated as quickly as
- possible. The command may continue to send status updates, up to and
- including an 'rc' end-of-command update (which should indicate an
- error condition). The Command's deferred should still be fired when
- the command has finally completed.
-
- If the build is being stopped because the slave it shutting down or
- because the connection to the buildmaster has been lost, the status
- updates will simply be discarded. The Command does not need to be
- aware of this.
-
- Child shell processes should be killed. Simple ShellCommand classes
- can just insert a header line indicating that the process will be
- killed, then os.kill() the child."""
diff --git a/buildbot/buildbot-source/buildbot/slave/registry.py b/buildbot/buildbot-source/buildbot/slave/registry.py
deleted file mode 100644
index b4497d4fe..000000000
--- a/buildbot/buildbot-source/buildbot/slave/registry.py
+++ /dev/null
@@ -1,18 +0,0 @@
-#! /usr/bin/python
-
-commandRegistry = {}
-
-def registerSlaveCommand(name, factory, version):
- """
- Register a slave command with the registry, making it available in slaves.
-
- @type name: string
- @param name: name under which the slave command will be registered; used
- for L{buildbot.slave.bot.SlaveBuilder.remote_startCommand}
-
- @type factory: L{buildbot.slave.commands.Command}
- @type version: string
- @param version: version string of the factory code
- """
- assert not commandRegistry.has_key(name)
- commandRegistry[name] = (factory, version)
diff --git a/buildbot/buildbot-source/buildbot/slave/trial.py b/buildbot/buildbot-source/buildbot/slave/trial.py
deleted file mode 100644
index 9d1fa6f69..000000000
--- a/buildbot/buildbot-source/buildbot/slave/trial.py
+++ /dev/null
@@ -1,175 +0,0 @@
-# -*- test-case-name: buildbot.test.test_trial.TestRemoteReporter -*-
-
-import types, time
-import zope.interface as zi
-
-from twisted.spread import pb
-from twisted.internet import reactor, defer
-from twisted.python import reflect, failure, log, usage, util
-from twisted.trial import registerAdapter, adaptWithDefault, reporter, runner
-from twisted.trial.interfaces import ITestMethod, ITestSuite, ITestRunner, \
- IJellied, IUnjellied, IRemoteReporter
-from twisted.application import strports
-
-
-class RemoteTestAny(object, util.FancyStrMixin):
- def __init__(self, original):
- self.original = original
-
- def __getattr__(self, attr):
- if attr not in self.original:
- raise AttributeError, "%s has no attribute %s" % (self.__str__(), attr)
- return self.original[attr]
-
-
-class RemoteTestMethod(RemoteTestAny):
- zi.implements(ITestMethod)
-
-class RemoteTestSuite(RemoteTestAny):
- zi.implements(ITestSuite)
-
-
-class RemoteReporter(reporter.Reporter):
- zi.implements(IRemoteReporter)
- pbroot = None
-
- def __init__(self, stream=None, tbformat=None, args=None):
- super(RemoteReporter, self).__init__(stream, tbformat, args)
-
- def setUpReporter(self):
- factory = pb.PBClientFactory()
-
- self.pbcnx = reactor.connectTCP("localhost", self.args, factory)
- assert self.pbcnx is not None
-
- def _cb(root):
- self.pbroot = root
- return root
-
- return factory.getRootObject().addCallback(_cb
- ).addErrback(log.err)
-
- def tearDownReporter(self):
- def _disconnected(passthru):
- log.msg(sekritHQ='_disconnected, passthru: %r' % (passthru,))
- return passthru
-
- d = defer.Deferred().addCallback(_disconnected
- ).addErrback(log.err)
-
- self.pbroot.notifyOnDisconnect(d.callback)
- self.pbcnx.transport.loseConnection()
- return d
-
- def reportImportError(self, name, fail):
- pass
-
- def startTest(self, method):
- return self.pbroot.callRemote('startTest', IJellied(method))
-
- def endTest(self, method):
- return self.pbroot.callRemote('endTest', IJellied(method))
-
- def startSuite(self, arg):
- return self.pbroot.callRemote('startSuite', IJellied(arg))
-
- def endSuite(self, suite):
- return self.pbroot.callRemote('endSuite', IJellied(suite))
-
-
-# -- Adapters --
-
-def jellyList(L):
- return [IJellied(i) for i in L]
-
-def jellyTuple(T):
- return tuple(IJellied(list(T)))
-
-def jellyDict(D):
- def _clean(*a):
- return tuple(map(lambda x: adaptWithDefault(IJellied, x, None), a))
- return dict([_clean(k, v) for k, v in D.iteritems()])
-
-def jellyTimingInfo(d, timed):
- for attr in ('startTime', 'endTime'):
- d[attr] = getattr(timed, attr, 0.0)
- return d
-
-def _logFormatter(eventDict):
- #XXX: this is pretty weak, it's basically the guts of
- # t.p.log.FileLogObserver.emit, but then again, that's been pretty
- # stable over the past few releases....
- edm = eventDict['message']
- if not edm:
- if eventDict['isError'] and eventDict.has_key('failure'):
- text = eventDict['failure'].getTraceback()
- elif eventDict.has_key('format'):
- try:
- text = eventDict['format'] % eventDict
- except:
- try:
- text = ('Invalid format string in log message: %s'
- % eventDict)
- except:
- text = 'UNFORMATTABLE OBJECT WRITTEN TO LOG, MESSAGE LOST'
- else:
- # we don't know how to log this
- return
- else:
- text = ' '.join(map(str, edm))
-
- timeStr = time.strftime("%Y/%m/%d %H:%M %Z", time.localtime(eventDict['time']))
- fmtDict = {'system': eventDict['system'], 'text': text.replace("\n", "\n\t")}
- msgStr = " [%(system)s] %(text)s\n" % fmtDict
- return "%s%s" % (timeStr, msgStr)
-
-def jellyTestMethod(testMethod):
- """@param testMethod: an object that implements L{twisted.trial.interfaces.ITestMethod}"""
- d = {}
- for attr in ('status', 'todo', 'skip', 'stdout', 'stderr',
- 'name', 'fullName', 'runs', 'errors', 'failures', 'module'):
- d[attr] = getattr(testMethod, attr)
-
- q = None
- try:
- q = reflect.qual(testMethod.klass)
- except TypeError:
- # XXX: This may be incorrect somehow
- q = "%s.%s" % (testMethod.module, testMethod.klass.__name__)
- d['klass'] = q
-
- d['logevents'] = [_logFormatter(event) for event in testMethod.logevents]
-
- jellyTimingInfo(d, testMethod)
-
- return d
-
-def jellyTestRunner(testRunner):
- """@param testRunner: an object that implements L{twisted.trial.interfaces.ITestRunner}"""
- d = dict(testMethods=[IJellied(m) for m in testRunner.testMethods])
- jellyTimingInfo(d, testRunner)
- return d
-
-def jellyTestSuite(testSuite):
- d = {}
- for attr in ('tests', 'runners', 'couldNotImport'):
- d[attr] = IJellied(getattr(testSuite, attr))
-
- jellyTimingInfo(d, testSuite)
- return d
-
-
-
-for a, o, i in [(jellyTuple, types.TupleType, IJellied),
- (jellyTestMethod, ITestMethod, IJellied),
- (jellyList, types.ListType, IJellied),
- (jellyTestSuite, ITestSuite, IJellied),
- (jellyTestRunner, ITestRunner, IJellied),
- (jellyDict, types.DictType, IJellied),
- (RemoteTestMethod, types.DictType, ITestMethod),
- (RemoteTestSuite, types.DictType, ITestSuite)]:
- registerAdapter(a, o, i)
-
-for t in [types.StringType, types.IntType, types.FloatType, failure.Failure]:
- zi.classImplements(t, IJellied)
-
diff --git a/buildbot/buildbot-source/buildbot/sourcestamp.py b/buildbot/buildbot-source/buildbot/sourcestamp.py
deleted file mode 100644
index 2c9e1ab6e..000000000
--- a/buildbot/buildbot-source/buildbot/sourcestamp.py
+++ /dev/null
@@ -1,85 +0,0 @@
-
-from buildbot import util, interfaces
-from buildbot.twcompat import implements
-
-class SourceStamp(util.ComparableMixin):
- """This is a tuple of (branch, revision, patchspec, changes).
-
- C{branch} is always valid, although it may be None to let the Source
- step use its default branch. There are four possibilities for the
- remaining elements:
- - (revision=REV, patchspec=None, changes=None): build REV
- - (revision=REV, patchspec=(LEVEL, DIFF), changes=None): checkout REV,
- then apply a patch to the source, with C{patch -pPATCHLEVEL <DIFF}.
- - (revision=None, patchspec=None, changes=[CHANGES]): let the Source
- step check out the latest revision indicated by the given Changes.
- CHANGES is a list of L{buildbot.changes.changes.Change} instances,
- and all must be on the same branch.
- - (revision=None, patchspec=None, changes=None): build the latest code
- from the given branch.
- """
-
- # all four of these are publically visible attributes
- branch = None
- revision = None
- patch = None
- changes = []
-
- compare_attrs = ('branch', 'revision', 'patch', 'changes')
-
- if implements:
- implements(interfaces.ISourceStamp)
- else:
- __implements__ = interfaces.ISourceStamp,
-
- def __init__(self, branch=None, revision=None, patch=None,
- changes=None):
- self.branch = branch
- self.revision = revision
- self.patch = patch
- if changes:
- self.changes = changes
- self.branch = changes[0].branch
-
- def canBeMergedWith(self, other):
- if other.branch != self.branch:
- return False # the builds are completely unrelated
-
- if self.changes and other.changes:
- # TODO: consider not merging these. It's a tradeoff between
- # minimizing the number of builds and obtaining finer-grained
- # results.
- return True
- elif self.changes and not other.changes:
- return False # we're using changes, they aren't
- elif not self.changes and other.changes:
- return False # they're using changes, we aren't
-
- if self.patch or other.patch:
- return False # you can't merge patched builds with anything
- if self.revision == other.revision:
- # both builds are using the same specific revision, so they can
- # be merged. It might be the case that revision==None, so they're
- # both building HEAD.
- return True
-
- return False
-
- def mergeWith(self, others):
- """Generate a SourceStamp for the merger of me and all the other
- BuildRequests. This is called by a Build when it starts, to figure
- out what its sourceStamp should be."""
-
- # either we're all building the same thing (changes==None), or we're
- # all building changes (which can be merged)
- changes = []
- changes.extend(self.changes)
- for req in others:
- assert self.canBeMergedWith(req) # should have been checked already
- changes.extend(req.changes)
- newsource = SourceStamp(branch=self.branch,
- revision=self.revision,
- patch=self.patch,
- changes=changes)
- return newsource
-
diff --git a/buildbot/buildbot-source/buildbot/status/__init__.py b/buildbot/buildbot-source/buildbot/status/__init__.py
deleted file mode 100644
index e69de29bb..000000000
--- a/buildbot/buildbot-source/buildbot/status/__init__.py
+++ /dev/null
diff --git a/buildbot/buildbot-source/buildbot/status/base.py b/buildbot/buildbot-source/buildbot/status/base.py
deleted file mode 100644
index 92bace5f8..000000000
--- a/buildbot/buildbot-source/buildbot/status/base.py
+++ /dev/null
@@ -1,77 +0,0 @@
-#! /usr/bin/python
-
-from twisted.application import service
-from twisted.python import components
-
-try:
- from zope.interface import implements
-except ImportError:
- implements = None
-if not hasattr(components, "interface"):
- implements = None # nope
-
-from buildbot.interfaces import IStatusReceiver
-from buildbot import util, pbutil
-
-class StatusReceiver:
- if implements:
- implements(IStatusReceiver)
- else:
- __implements__ = IStatusReceiver,
-
- def buildsetSubmitted(self, buildset):
- pass
-
- def builderAdded(self, builderName, builder):
- pass
-
- def builderChangedState(self, builderName, state):
- pass
-
- def buildStarted(self, builderName, build):
- pass
-
- def buildETAUpdate(self, build, ETA):
- pass
-
- def stepStarted(self, build, step):
- pass
-
- def stepETAUpdate(self, build, step, ETA, expectations):
- pass
-
- def logStarted(self, build, step, log):
- pass
-
- def logChunk(self, build, step, log, channel, text):
- pass
-
- def logFinished(self, build, step, log):
- pass
-
- def stepFinished(self, build, step, results):
- pass
-
- def buildFinished(self, builderName, build, results):
- pass
-
- def builderRemoved(self, builderName):
- pass
-
-class StatusReceiverMultiService(StatusReceiver, service.MultiService,
- util.ComparableMixin):
- if implements:
- implements(IStatusReceiver)
- else:
- __implements__ = IStatusReceiver, service.MultiService.__implements__
-
- def __init__(self):
- service.MultiService.__init__(self)
-
-
-class StatusReceiverPerspective(StatusReceiver, pbutil.NewCredPerspective):
- if implements:
- implements(IStatusReceiver)
- else:
- __implements__ = (IStatusReceiver,
- pbutil.NewCredPerspective.__implements__)
diff --git a/buildbot/buildbot-source/buildbot/status/builder.py b/buildbot/buildbot-source/buildbot/status/builder.py
deleted file mode 100644
index 900287a7c..000000000
--- a/buildbot/buildbot-source/buildbot/status/builder.py
+++ /dev/null
@@ -1,1927 +0,0 @@
-# -*- test-case-name: buildbot.test.test_status -*-
-
-from __future__ import generators
-
-from twisted.python import log
-from twisted.persisted import styles
-from twisted.internet import reactor, defer
-from twisted.protocols import basic
-
-import time, os, os.path, shutil, sys, re, urllib
-try:
- import cPickle as pickle
-except ImportError:
- import pickle
-
-# sibling imports
-from buildbot import interfaces, util, sourcestamp
-from buildbot.twcompat import implements, providedBy
-
-SUCCESS, WARNINGS, FAILURE, SKIPPED, EXCEPTION = range(5)
-Results = ["success", "warnings", "failure", "skipped", "exception"]
-
-
-# build processes call the following methods:
-#
-# setDefaults
-#
-# currentlyBuilding
-# currentlyIdle
-# currentlyInterlocked
-# currentlyOffline
-# currentlyWaiting
-#
-# setCurrentActivity
-# updateCurrentActivity
-# addFileToCurrentActivity
-# finishCurrentActivity
-#
-# startBuild
-# finishBuild
-
-STDOUT = 0
-STDERR = 1
-HEADER = 2
-ChunkTypes = ["stdout", "stderr", "header"]
-
-class LogFileScanner(basic.NetstringReceiver):
- def __init__(self, chunk_cb, channels=[]):
- self.chunk_cb = chunk_cb
- self.channels = channels
-
- def stringReceived(self, line):
- channel = int(line[0])
- if not self.channels or (channel in self.channels):
- self.chunk_cb((channel, line[1:]))
-
-class LogFileProducer:
- """What's the plan?
-
- the LogFile has just one FD, used for both reading and writing.
- Each time you add an entry, fd.seek to the end and then write.
-
- Each reader (i.e. Producer) keeps track of their own offset. The reader
- starts by seeking to the start of the logfile, and reading forwards.
- Between each hunk of file they yield chunks, so they must remember their
- offset before yielding and re-seek back to that offset before reading
- more data. When their read() returns EOF, they're finished with the first
- phase of the reading (everything that's already been written to disk).
-
- After EOF, the remaining data is entirely in the current entries list.
- These entries are all of the same channel, so we can do one "".join and
- obtain a single chunk to be sent to the listener. But since that involves
- a yield, and more data might arrive after we give up control, we have to
- subscribe them before yielding. We can't subscribe them any earlier,
- otherwise they'd get data out of order.
-
- We're using a generator in the first place so that the listener can
- throttle us, which means they're pulling. But the subscription means
- we're pushing. Really we're a Producer. In the first phase we can be
- either a PullProducer or a PushProducer. In the second phase we're only a
- PushProducer.
-
- So the client gives a LogFileConsumer to File.subscribeConsumer . This
- Consumer must have registerProducer(), unregisterProducer(), and
- writeChunk(), and is just like a regular twisted.interfaces.IConsumer,
- except that writeChunk() takes chunks (tuples of (channel,text)) instead
- of the normal write() which takes just text. The LogFileConsumer is
- allowed to call stopProducing, pauseProducing, and resumeProducing on the
- producer instance it is given. """
-
- paused = False
- subscribed = False
- BUFFERSIZE = 2048
-
- def __init__(self, logfile, consumer):
- self.logfile = logfile
- self.consumer = consumer
- self.chunkGenerator = self.getChunks()
- consumer.registerProducer(self, True)
-
- def getChunks(self):
- f = self.logfile.getFile()
- offset = 0
- chunks = []
- p = LogFileScanner(chunks.append)
- f.seek(offset)
- data = f.read(self.BUFFERSIZE)
- offset = f.tell()
- while data:
- p.dataReceived(data)
- while chunks:
- c = chunks.pop(0)
- yield c
- f.seek(offset)
- data = f.read(self.BUFFERSIZE)
- offset = f.tell()
- del f
-
- # now subscribe them to receive new entries
- self.subscribed = True
- self.logfile.watchers.append(self)
- d = self.logfile.waitUntilFinished()
-
- # then give them the not-yet-merged data
- if self.logfile.runEntries:
- channel = self.logfile.runEntries[0][0]
- text = "".join([c[1] for c in self.logfile.runEntries])
- yield (channel, text)
-
- # now we've caught up to the present. Anything further will come from
- # the logfile subscription. We add the callback *after* yielding the
- # data from runEntries, because the logfile might have finished
- # during the yield.
- d.addCallback(self.logfileFinished)
-
- def stopProducing(self):
- # TODO: should we still call consumer.finish? probably not.
- self.paused = True
- self.consumer = None
- self.done()
-
- def done(self):
- if self.chunkGenerator:
- self.chunkGenerator = None # stop making chunks
- if self.subscribed:
- self.logfile.watchers.remove(self)
- self.subscribed = False
-
- def pauseProducing(self):
- self.paused = True
-
- def resumeProducing(self):
- # Twisted-1.3.0 has a bug which causes hangs when resumeProducing
- # calls transport.write (there is a recursive loop, fixed in 2.0 in
- # t.i.abstract.FileDescriptor.doWrite by setting the producerPaused
- # flag *before* calling resumeProducing). To work around this, we
- # just put off the real resumeProducing for a moment. This probably
- # has a performance hit, but I'm going to assume that the log files
- # are not retrieved frequently enough for it to be an issue.
-
- reactor.callLater(0, self._resumeProducing)
-
- def _resumeProducing(self):
- self.paused = False
- if not self.chunkGenerator:
- return
- try:
- while not self.paused:
- chunk = self.chunkGenerator.next()
- self.consumer.writeChunk(chunk)
- # we exit this when the consumer says to stop, or we run out
- # of chunks
- except StopIteration:
- # if the generator finished, it will have done releaseFile
- self.chunkGenerator = None
- # now everything goes through the subscription, and they don't get to
- # pause anymore
-
- def logChunk(self, build, step, logfile, channel, chunk):
- if self.consumer:
- self.consumer.writeChunk((channel, chunk))
-
- def logfileFinished(self, logfile):
- self.done()
- if self.consumer:
- self.consumer.unregisterProducer()
- self.consumer.finish()
- self.consumer = None
-
-class LogFile:
- """A LogFile keeps all of its contents on disk, in a non-pickle format to
- which new entries can easily be appended. The file on disk has a name
- like 12-log-compile-output, under the Builder's directory. The actual
- filename is generated (before the LogFile is created) by
- L{BuildStatus.generateLogfileName}.
-
- Old LogFile pickles (which kept their contents in .entries) must be
- upgraded. The L{BuilderStatus} is responsible for doing this, when it
- loads the L{BuildStatus} into memory. The Build pickle is not modified,
- so users who go from 0.6.5 back to 0.6.4 don't have to lose their
- logs."""
-
- if implements:
- implements(interfaces.IStatusLog)
- else:
- __implements__ = interfaces.IStatusLog,
-
- finished = False
- length = 0
- progress = None
- chunkSize = 10*1000
- runLength = 0
- runEntries = [] # provided so old pickled builds will getChunks() ok
- entries = None
- BUFFERSIZE = 2048
- filename = None # relative to the Builder's basedir
- openfile = None
-
- def __init__(self, parent, name, logfilename):
- """
- @type parent: L{BuildStepStatus}
- @param parent: the Step that this log is a part of
- @type name: string
- @param name: the name of this log, typically 'output'
- @type logfilename: string
- @param logfilename: the Builder-relative pathname for the saved entries
- """
- self.step = parent
- self.name = name
- self.filename = logfilename
- fn = self.getFilename()
- if os.path.exists(fn):
- # the buildmaster was probably stopped abruptly, before the
- # BuilderStatus could be saved, so BuilderStatus.nextBuildNumber
- # is out of date, and we're overlapping with earlier builds now.
- # Warn about it, but then overwrite the old pickle file
- log.msg("Warning: Overwriting old serialized Build at %s" % fn)
- self.openfile = open(fn, "w+")
- self.runEntries = []
- self.watchers = []
- self.finishedWatchers = []
-
- def getFilename(self):
- return os.path.join(self.step.build.builder.basedir, self.filename)
-
- def hasContents(self):
- return os.path.exists(self.getFilename())
-
- def getName(self):
- return self.name
-
- def getStep(self):
- return self.step
-
- def isFinished(self):
- return self.finished
- def waitUntilFinished(self):
- if self.finished:
- d = defer.succeed(self)
- else:
- d = defer.Deferred()
- self.finishedWatchers.append(d)
- return d
-
- def getFile(self):
- if self.openfile:
- # this is the filehandle we're using to write to the log, so
- # don't close it!
- return self.openfile
- # otherwise they get their own read-only handle
- return open(self.getFilename(), "r")
-
- def getText(self):
- # this produces one ginormous string
- return "".join(self.getChunks([STDOUT, STDERR], onlyText=True))
-
- def getTextWithHeaders(self):
- return "".join(self.getChunks(onlyText=True))
-
- def getChunks(self, channels=[], onlyText=False):
- # generate chunks for everything that was logged at the time we were
- # first called, so remember how long the file was when we started.
- # Don't read beyond that point. The current contents of
- # self.runEntries will follow.
-
- # this returns an iterator, which means arbitrary things could happen
- # while we're yielding. This will faithfully deliver the log as it
- # existed when it was started, and not return anything after that
- # point. To use this in subscribe(catchup=True) without missing any
- # data, you must insure that nothing will be added to the log during
- # yield() calls.
-
- f = self.getFile()
- offset = 0
- f.seek(0, 2)
- remaining = f.tell()
-
- leftover = None
- if self.runEntries and (not channels or
- (self.runEntries[0][0] in channels)):
- leftover = (self.runEntries[0][0],
- "".join([c[1] for c in self.runEntries]))
-
- # freeze the state of the LogFile by passing a lot of parameters into
- # a generator
- return self._generateChunks(f, offset, remaining, leftover,
- channels, onlyText)
-
- def _generateChunks(self, f, offset, remaining, leftover,
- channels, onlyText):
- chunks = []
- p = LogFileScanner(chunks.append, channels)
- f.seek(offset)
- data = f.read(min(remaining, self.BUFFERSIZE))
- remaining -= len(data)
- offset = f.tell()
- while data:
- p.dataReceived(data)
- while chunks:
- channel, text = chunks.pop(0)
- if onlyText:
- yield text
- else:
- yield (channel, text)
- f.seek(offset)
- data = f.read(min(remaining, self.BUFFERSIZE))
- remaining -= len(data)
- offset = f.tell()
- del f
-
- if leftover:
- if onlyText:
- yield leftover[1]
- else:
- yield leftover
-
- def subscribe(self, receiver, catchup):
- if self.finished:
- return
- self.watchers.append(receiver)
- if catchup:
- for channel, text in self.getChunks():
- # TODO: add logChunks(), to send over everything at once?
- receiver.logChunk(self.step.build, self.step, self,
- channel, text)
-
- def unsubscribe(self, receiver):
- if receiver in self.watchers:
- self.watchers.remove(receiver)
-
- def subscribeConsumer(self, consumer):
- p = LogFileProducer(self, consumer)
- p.resumeProducing()
-
- # interface used by the build steps to add things to the log
- def logProgressTo(self, progress, name):
- self.progress = progress
- self.progressName = name
-
- def merge(self):
- # merge all .runEntries (which are all of the same type) into a
- # single chunk for .entries
- if not self.runEntries:
- return
- channel = self.runEntries[0][0]
- text = "".join([c[1] for c in self.runEntries])
- assert channel < 10
- f = self.openfile
- f.seek(0, 2)
- offset = 0
- while offset < len(text):
- size = min(len(text)-offset, self.chunkSize)
- f.write("%d:%d" % (1 + size, channel))
- f.write(text[offset:offset+size])
- f.write(",")
- offset += size
- self.runEntries = []
- self.runLength = 0
-
- def addEntry(self, channel, text):
- assert not self.finished
- # we only add to .runEntries here. merge() is responsible for adding
- # merged chunks to .entries
- if self.runEntries and channel != self.runEntries[0][0]:
- self.merge()
- self.runEntries.append((channel, text))
- self.runLength += len(text)
- if self.runLength >= self.chunkSize:
- self.merge()
-
- for w in self.watchers:
- w.logChunk(self.step.build, self.step, self, channel, text)
- self.length += len(text)
- if self.progress:
- self.progress.setProgress(self.progressName, self.length)
-
- def addStdout(self, text):
- self.addEntry(STDOUT, text)
- def addStderr(self, text):
- self.addEntry(STDERR, text)
- def addHeader(self, text):
- self.addEntry(HEADER, text)
-
- def finish(self):
- self.merge()
- if self.openfile:
- # we don't do an explicit close, because there might be readers
- # shareing the filehandle. As soon as they stop reading, the
- # filehandle will be released and automatically closed. We will
- # do a sync, however, to make sure the log gets saved in case of
- # a crash.
- os.fsync(self.openfile.fileno())
- del self.openfile
- self.finished = True
- watchers = self.finishedWatchers
- self.finishedWatchers = []
- for w in watchers:
- w.callback(self)
- if self.progress:
- self.progress.setProgress(self.progressName, self.length)
- del self.progress
- del self.progressName
-
- # persistence stuff
- def __getstate__(self):
- d = self.__dict__.copy()
- del d['step'] # filled in upon unpickling
- del d['watchers']
- del d['finishedWatchers']
- d['entries'] = [] # let 0.6.4 tolerate the saved log. TODO: really?
- if d.has_key('finished'):
- del d['finished']
- if d.has_key('progress'):
- del d['progress']
- del d['progressName']
- if d.has_key('openfile'):
- del d['openfile']
- return d
-
- def __setstate__(self, d):
- self.__dict__ = d
- self.watchers = [] # probably not necessary
- self.finishedWatchers = [] # same
- # self.step must be filled in by our parent
- self.finished = True
-
- def upgrade(self, logfilename):
- """Save our .entries to a new-style offline log file (if necessary),
- and modify our in-memory representation to use it. The original
- pickled LogFile (inside the pickled Build) won't be modified."""
- self.filename = logfilename
- if not os.path.exists(self.getFilename()):
- self.openfile = open(self.getFilename(), "w")
- self.finished = False
- for channel,text in self.entries:
- self.addEntry(channel, text)
- self.finish() # releases self.openfile, which will be closed
- del self.entries
-
-
-class HTMLLogFile:
- if implements:
- implements(interfaces.IStatusLog)
- else:
- __implements__ = interfaces.IStatusLog,
-
- filename = None
-
- def __init__(self, parent, name, logfilename, html):
- self.step = parent
- self.name = name
- self.filename = logfilename
- self.html = html
-
- def getName(self):
- return self.name # set in BuildStepStatus.addLog
- def getStep(self):
- return self.step
-
- def isFinished(self):
- return True
- def waitUntilFinished(self):
- return defer.succeed(self)
-
- def hasContents(self):
- return True
- def getText(self):
- return self.html # looks kinda like text
- def getTextWithHeaders(self):
- return self.html
- def getChunks(self):
- return [(STDERR, self.html)]
-
- def subscribe(self, receiver, catchup):
- pass
- def unsubscribe(self, receiver):
- pass
-
- def finish(self):
- pass
-
- def __getstate__(self):
- d = self.__dict__.copy()
- del d['step']
- return d
-
- def upgrade(self, logfilename):
- pass
-
-
-class Event:
- if implements:
- implements(interfaces.IStatusEvent)
- else:
- __implements__ = interfaces.IStatusEvent,
-
- started = None
- finished = None
- text = []
- color = None
-
- # IStatusEvent methods
- def getTimes(self):
- return (self.started, self.finished)
- def getText(self):
- return self.text
- def getColor(self):
- return self.color
- def getLogs(self):
- return []
-
- def finish(self):
- self.finished = util.now()
-
-class TestResult:
- if implements:
- implements(interfaces.ITestResult)
- else:
- __implements__ = interfaces.ITestResult,
-
- def __init__(self, name, results, text, logs):
- assert isinstance(name, tuple)
- self.name = name
- self.results = results
- self.text = text
- self.logs = logs
-
- def getName(self):
- return self.name
-
- def getResults(self):
- return self.results
-
- def getText(self):
- return self.text
-
- def getLogs(self):
- return self.logs
-
-
-class BuildSetStatus:
- if implements:
- implements(interfaces.IBuildSetStatus)
- else:
- __implements__ = interfaces.IBuildSetStatus,
-
- def __init__(self, source, reason, builderNames, bsid=None):
- self.source = source
- self.reason = reason
- self.builderNames = builderNames
- self.id = bsid
- self.successWatchers = []
- self.finishedWatchers = []
- self.stillHopeful = True
- self.finished = False
-
- def setBuildRequestStatuses(self, buildRequestStatuses):
- self.buildRequests = buildRequestStatuses
- def setResults(self, results):
- # the build set succeeds only if all its component builds succeed
- self.results = results
- def giveUpHope(self):
- self.stillHopeful = False
-
-
- def notifySuccessWatchers(self):
- for d in self.successWatchers:
- d.callback(self)
- self.successWatchers = []
-
- def notifyFinishedWatchers(self):
- self.finished = True
- for d in self.finishedWatchers:
- d.callback(self)
- self.finishedWatchers = []
-
- # methods for our clients
-
- def getSourceStamp(self):
- return self.source
- def getReason(self):
- return self.reason
- def getResults(self):
- return self.results
- def getID(self):
- return self.id
-
- def getBuilderNames(self):
- return self.builderNames
- def getBuildRequests(self):
- return self.buildRequests
- def isFinished(self):
- return self.finished
-
- def waitUntilSuccess(self):
- if self.finished or not self.stillHopeful:
- # the deferreds have already fired
- return defer.succeed(self)
- d = defer.Deferred()
- self.successWatchers.append(d)
- return d
-
- def waitUntilFinished(self):
- if self.finished:
- return defer.succeed(self)
- d = defer.Deferred()
- self.finishedWatchers.append(d)
- return d
-
-class BuildRequestStatus:
- if implements:
- implements(interfaces.IBuildRequestStatus)
- else:
- __implements__ = interfaces.IBuildRequestStatus,
-
- def __init__(self, source, builderName):
- self.source = source
- self.builderName = builderName
- self.builds = [] # list of BuildStatus objects
- self.observers = []
-
- def buildStarted(self, build):
- self.builds.append(build)
- for o in self.observers[:]:
- o(build)
-
- # methods called by our clients
- def getSourceStamp(self):
- return self.source
- def getBuilderName(self):
- return self.builderName
- def getBuilds(self):
- return self.builds
-
- def subscribe(self, observer):
- self.observers.append(observer)
- for b in self.builds:
- observer(b)
- def unsubscribe(self, observer):
- self.observers.remove(observer)
-
-
-class BuildStepStatus:
- """
- I represent a collection of output status for a
- L{buildbot.process.step.BuildStep}.
-
- @type color: string
- @cvar color: color that this step feels best represents its
- current mood. yellow,green,red,orange are the
- most likely choices, although purple indicates
- an exception
- @type progress: L{buildbot.status.progress.StepProgress}
- @cvar progress: tracks ETA for the step
- @type text: list of strings
- @cvar text: list of short texts that describe the command and its status
- @type text2: list of strings
- @cvar text2: list of short texts added to the overall build description
- @type logs: dict of string -> L{buildbot.status.builder.LogFile}
- @ivar logs: logs of steps
- """
- # note that these are created when the Build is set up, before each
- # corresponding BuildStep has started.
- if implements:
- implements(interfaces.IBuildStepStatus, interfaces.IStatusEvent)
- else:
- __implements__ = interfaces.IBuildStepStatus, interfaces.IStatusEvent
-
- started = None
- finished = None
- progress = None
- text = []
- color = None
- results = (None, [])
- text2 = []
- watchers = []
- updates = {}
- finishedWatchers = []
-
- def __init__(self, parent):
- assert interfaces.IBuildStatus(parent)
- self.build = parent
- self.logs = []
- self.watchers = []
- self.updates = {}
- self.finishedWatchers = []
-
- def getName(self):
- """Returns a short string with the name of this step. This string
- may have spaces in it."""
- return self.name
-
- def getBuild(self):
- return self.build
-
- def getTimes(self):
- return (self.started, self.finished)
-
- def getExpectations(self):
- """Returns a list of tuples (name, current, target)."""
- if not self.progress:
- return []
- ret = []
- metrics = self.progress.progress.keys()
- metrics.sort()
- for m in metrics:
- t = (m, self.progress.progress[m], self.progress.expectations[m])
- ret.append(t)
- return ret
-
- def getLogs(self):
- return self.logs
-
-
- def isFinished(self):
- return (self.finished is not None)
-
- def waitUntilFinished(self):
- if self.finished:
- d = defer.succeed(self)
- else:
- d = defer.Deferred()
- self.finishedWatchers.append(d)
- return d
-
- # while the step is running, the following methods make sense.
- # Afterwards they return None
-
- def getETA(self):
- if self.started is None:
- return None # not started yet
- if self.finished is not None:
- return None # already finished
- if not self.progress:
- return None # no way to predict
- return self.progress.remaining()
-
- # Once you know the step has finished, the following methods are legal.
- # Before this step has finished, they all return None.
-
- def getText(self):
- """Returns a list of strings which describe the step. These are
- intended to be displayed in a narrow column. If more space is
- available, the caller should join them together with spaces before
- presenting them to the user."""
- return self.text
-
- def getColor(self):
- """Returns a single string with the color that should be used to
- display this step. 'green', 'orange', 'red', 'yellow' and 'purple'
- are the most likely ones."""
- return self.color
-
- def getResults(self):
- """Return a tuple describing the results of the step.
- 'result' is one of the constants in L{buildbot.status.builder}:
- SUCCESS, WARNINGS, FAILURE, or SKIPPED.
- 'strings' is an optional list of strings that the step wants to
- append to the overall build's results. These strings are usually
- more terse than the ones returned by getText(): in particular,
- successful Steps do not usually contribute any text to the
- overall build.
-
- @rtype: tuple of int, list of strings
- @returns: (result, strings)
- """
- return (self.results, self.text2)
-
- # subscription interface
-
- def subscribe(self, receiver, updateInterval=10):
- # will get logStarted, logFinished, stepETAUpdate
- assert receiver not in self.watchers
- self.watchers.append(receiver)
- self.sendETAUpdate(receiver, updateInterval)
-
- def sendETAUpdate(self, receiver, updateInterval):
- self.updates[receiver] = None
- # they might unsubscribe during stepETAUpdate
- receiver.stepETAUpdate(self.build, self,
- self.getETA(), self.getExpectations())
- if receiver in self.watchers:
- self.updates[receiver] = reactor.callLater(updateInterval,
- self.sendETAUpdate,
- receiver,
- updateInterval)
-
- def unsubscribe(self, receiver):
- if receiver in self.watchers:
- self.watchers.remove(receiver)
- if receiver in self.updates:
- if self.updates[receiver] is not None:
- self.updates[receiver].cancel()
- del self.updates[receiver]
-
-
- # methods to be invoked by the BuildStep
-
- def setName(self, stepname):
- self.name = stepname
-
- def setProgress(self, stepprogress):
- self.progress = stepprogress
-
- def stepStarted(self):
- self.started = util.now()
- if self.build:
- self.build.stepStarted(self)
-
- def addLog(self, name):
- assert self.started # addLog before stepStarted won't notify watchers
- logfilename = self.build.generateLogfileName(self.name, name)
- log = LogFile(self, name, logfilename)
- self.logs.append(log)
- for w in self.watchers:
- receiver = w.logStarted(self.build, self, log)
- if receiver:
- log.subscribe(receiver, True)
- d = log.waitUntilFinished()
- d.addCallback(lambda log: log.unsubscribe(receiver))
- d = log.waitUntilFinished()
- d.addCallback(self.logFinished)
- return log
-
- def addHTMLLog(self, name, html):
- assert self.started # addLog before stepStarted won't notify watchers
- logfilename = self.build.generateLogfileName(self.name, name)
- log = HTMLLogFile(self, name, logfilename, html)
- self.logs.append(log)
- for w in self.watchers:
- receiver = w.logStarted(self.build, self, log)
- # TODO: think about this: there isn't much point in letting
- # them subscribe
- #if receiver:
- # log.subscribe(receiver, True)
- w.logFinished(self.build, self, log)
-
- def logFinished(self, log):
- for w in self.watchers:
- w.logFinished(self.build, self, log)
-
- def setColor(self, color):
- self.color = color
- def setText(self, text):
- self.text = text
- def setText2(self, text):
- self.text2 = text
-
- def stepFinished(self, results):
- self.finished = util.now()
- self.results = results
- for loog in self.logs:
- if not loog.isFinished():
- loog.finish()
-
- for r in self.updates.keys():
- if self.updates[r] is not None:
- self.updates[r].cancel()
- del self.updates[r]
-
- watchers = self.finishedWatchers
- self.finishedWatchers = []
- for w in watchers:
- w.callback(self)
-
- # persistence
-
- def __getstate__(self):
- d = self.__dict__.copy()
- del d['build'] # filled in when loading
- if d.has_key('progress'):
- del d['progress']
- del d['watchers']
- del d['finishedWatchers']
- del d['updates']
- return d
-
- def __setstate__(self, d):
- self.__dict__ = d
- # self.build must be filled in by our parent
- for loog in self.logs:
- loog.step = self
-
-
-class BuildStatus(styles.Versioned):
- if implements:
- implements(interfaces.IBuildStatus, interfaces.IStatusEvent)
- else:
- __implements__ = interfaces.IBuildStatus, interfaces.IStatusEvent
- persistenceVersion = 2
-
- source = None
- username = None
- reason = None
- changes = []
- blamelist = []
- progress = None
- started = None
- finished = None
- currentStep = None
- text = []
- color = None
- results = None
- slavename = "???"
-
- # these lists/dicts are defined here so that unserialized instances have
- # (empty) values. They are set in __init__ to new objects to make sure
- # each instance gets its own copy.
- watchers = []
- updates = {}
- finishedWatchers = []
- testResults = {}
-
- def __init__(self, parent, number):
- """
- @type parent: L{BuilderStatus}
- @type number: int
- """
- assert interfaces.IBuilderStatus(parent)
- self.builder = parent
- self.number = number
- self.watchers = []
- self.updates = {}
- self.finishedWatchers = []
- self.steps = []
- self.testResults = {}
- self.properties = {}
-
- # IBuildStatus
-
- def getBuilder(self):
- """
- @rtype: L{BuilderStatus}
- """
- return self.builder
-
- def getProperty(self, propname):
- return self.properties[propname]
-
- def getNumber(self):
- return self.number
-
- def getPreviousBuild(self):
- if self.number == 0:
- return None
- return self.builder.getBuild(self.number-1)
-
- def getSourceStamp(self):
- return (self.source.branch, self.source.revision, self.source.patch)
-
- def getUsername(self):
- return self.username
-
- def getReason(self):
- return self.reason
-
- def getChanges(self):
- return self.changes
-
- def getResponsibleUsers(self):
- return self.blamelist
-
- def getInterestedUsers(self):
- # TODO: the Builder should add others: sheriffs, domain-owners
- return self.blamelist
-
- def getSteps(self):
- """Return a list of IBuildStepStatus objects. For invariant builds
- (those which always use the same set of Steps), this should be the
- complete list, however some of the steps may not have started yet
- (step.getTimes()[0] will be None). For variant builds, this may not
- be complete (asking again later may give you more of them)."""
- return self.steps
-
- def getTimes(self):
- return (self.started, self.finished)
-
- def isFinished(self):
- return (self.finished is not None)
-
- def waitUntilFinished(self):
- if self.finished:
- d = defer.succeed(self)
- else:
- d = defer.Deferred()
- self.finishedWatchers.append(d)
- return d
-
- # while the build is running, the following methods make sense.
- # Afterwards they return None
-
- def getETA(self):
- if self.finished is not None:
- return None
- if not self.progress:
- return None
- eta = self.progress.eta()
- if eta is None:
- return None
- return eta - util.now()
-
- def getCurrentStep(self):
- return self.currentStep
-
- # Once you know the build has finished, the following methods are legal.
- # Before ths build has finished, they all return None.
-
- def getText(self):
- text = []
- text.extend(self.text)
- for s in self.steps:
- text.extend(s.text2)
- return text
-
- def getColor(self):
- return self.color
-
- def getResults(self):
- return self.results
-
- def getSlavename(self):
- return self.slavename
-
- def getTestResults(self):
- return self.testResults
-
- def getLogs(self):
- # TODO: steps should contribute significant logs instead of this
- # hack, which returns every log from every step. The logs should get
- # names like "compile" and "test" instead of "compile.output"
- logs = []
- for s in self.steps:
- for log in s.getLogs():
- logs.append(log)
- return logs
-
- # subscription interface
-
- def subscribe(self, receiver, updateInterval=None):
- # will receive stepStarted and stepFinished messages
- # and maybe buildETAUpdate
- self.watchers.append(receiver)
- if updateInterval is not None:
- self.sendETAUpdate(receiver, updateInterval)
-
- def sendETAUpdate(self, receiver, updateInterval):
- self.updates[receiver] = None
- ETA = self.getETA()
- if ETA is not None:
- receiver.buildETAUpdate(self, self.getETA())
- # they might have unsubscribed during buildETAUpdate
- if receiver in self.watchers:
- self.updates[receiver] = reactor.callLater(updateInterval,
- self.sendETAUpdate,
- receiver,
- updateInterval)
-
- def unsubscribe(self, receiver):
- if receiver in self.watchers:
- self.watchers.remove(receiver)
- if receiver in self.updates:
- if self.updates[receiver] is not None:
- self.updates[receiver].cancel()
- del self.updates[receiver]
-
- # methods for the base.Build to invoke
-
- def addStep(self, step):
- """The Build is setting up, and has added a new BuildStep to its
- list. The BuildStep object is ready for static queries (everything
- except ETA). Give it a BuildStepStatus object to which it can send
- status updates."""
-
- s = BuildStepStatus(self)
- s.setName(step.name)
- step.step_status = s
- self.steps.append(s)
-
- def setProperty(self, propname, value):
- self.properties[propname] = value
-
- def addTestResult(self, result):
- self.testResults[result.getName()] = result
-
- def setSourceStamp(self, sourceStamp):
- self.source = sourceStamp
- self.changes = self.source.changes
-
- def setUsername(self, username):
- self.username = username
- def setReason(self, reason):
- self.reason = reason
- def setBlamelist(self, blamelist):
- self.blamelist = blamelist
- def setProgress(self, progress):
- self.progress = progress
-
- def buildStarted(self, build):
- """The Build has been set up and is about to be started. It can now
- be safely queried, so it is time to announce the new build."""
-
- self.started = util.now()
- # now that we're ready to report status, let the BuilderStatus tell
- # the world about us
- self.builder.buildStarted(self)
-
- def setSlavename(self, slavename):
- self.slavename = slavename
-
- def setText(self, text):
- assert isinstance(text, (list, tuple))
- self.text = text
- def setColor(self, color):
- self.color = color
- def setResults(self, results):
- self.results = results
-
- def buildFinished(self):
- self.currentStep = None
- self.finished = util.now()
-
- for r in self.updates.keys():
- if self.updates[r] is not None:
- self.updates[r].cancel()
- del self.updates[r]
-
- watchers = self.finishedWatchers
- self.finishedWatchers = []
- for w in watchers:
- w.callback(self)
-
- # methods called by our BuildStepStatus children
-
- def stepStarted(self, step):
- self.currentStep = step
- name = self.getBuilder().getName()
- for w in self.watchers:
- receiver = w.stepStarted(self, step)
- if receiver:
- if type(receiver) == type(()):
- step.subscribe(receiver[0], receiver[1])
- else:
- step.subscribe(receiver)
- d = step.waitUntilFinished()
- d.addCallback(lambda step: step.unsubscribe(receiver))
-
- step.waitUntilFinished().addCallback(self._stepFinished)
-
- def _stepFinished(self, step):
- results = step.getResults()
- for w in self.watchers:
- w.stepFinished(self, step, results)
-
- # methods called by our BuilderStatus parent
-
- def pruneLogs(self):
- # this build is somewhat old: remove the build logs to save space
- # TODO: delete logs visible through IBuildStatus.getLogs
- for s in self.steps:
- s.pruneLogs()
-
- def pruneSteps(self):
- # this build is very old: remove the build steps too
- self.steps = []
-
- # persistence stuff
-
- def generateLogfileName(self, stepname, logname):
- """Return a filename (relative to the Builder's base directory) where
- the logfile's contents can be stored uniquely.
-
- The base filename is made by combining our build number, the Step's
- name, and the log's name, then removing unsuitable characters. The
- filename is then made unique by appending _0, _1, etc, until it does
- not collide with any other logfile.
-
- These files are kept in the Builder's basedir (rather than a
- per-Build subdirectory) because that makes cleanup easier: cron and
- find will help get rid of the old logs, but the empty directories are
- more of a hassle to remove."""
-
- starting_filename = "%d-log-%s-%s" % (self.number, stepname, logname)
- starting_filename = re.sub(r'[^\w\.\-]', '_', starting_filename)
- # now make it unique
- unique_counter = 0
- filename = starting_filename
- while filename in [l.filename
- for step in self.steps
- for l in step.getLogs()
- if l.filename]:
- filename = "%s_%d" % (starting_filename, unique_counter)
- unique_counter += 1
- return filename
-
- def __getstate__(self):
- d = styles.Versioned.__getstate__(self)
- # for now, a serialized Build is always "finished". We will never
- # save unfinished builds.
- if not self.finished:
- d['finished'] = True
- # TODO: push an "interrupted" step so it is clear that the build
- # was interrupted. The builder will have a 'shutdown' event, but
- # someone looking at just this build will be confused as to why
- # the last log is truncated.
- del d['builder'] # filled in by our parent when loading
- del d['watchers']
- del d['updates']
- del d['finishedWatchers']
- return d
-
- def __setstate__(self, d):
- styles.Versioned.__setstate__(self, d)
- # self.builder must be filled in by our parent when loading
- for step in self.steps:
- step.build = self
- self.watchers = []
- self.updates = {}
- self.finishedWatchers = []
-
- def upgradeToVersion1(self):
- if hasattr(self, "sourceStamp"):
- # the old .sourceStamp attribute wasn't actually very useful
- maxChangeNumber, patch = self.sourceStamp
- changes = getattr(self, 'changes', [])
- source = sourcestamp.SourceStamp(branch=None,
- revision=None,
- patch=patch,
- changes=changes)
- self.source = source
- self.changes = source.changes
- del self.sourceStamp
-
- def upgradeToVersion2(self):
- self.properties = {}
-
- def upgradeLogfiles(self):
- # upgrade any LogFiles that need it. This must occur after we've been
- # attached to our Builder, and after we know about all LogFiles of
- # all Steps (to get the filenames right).
- assert self.builder
- for s in self.steps:
- for l in s.getLogs():
- if l.filename:
- pass # new-style, log contents are on disk
- else:
- logfilename = self.generateLogfileName(s.name, l.name)
- # let the logfile update its .filename pointer,
- # transferring its contents onto disk if necessary
- l.upgrade(logfilename)
-
- def saveYourself(self):
- filename = os.path.join(self.builder.basedir, "%d" % self.number)
- if os.path.isdir(filename):
- # leftover from 0.5.0, which stored builds in directories
- shutil.rmtree(filename, ignore_errors=True)
- tmpfilename = filename + ".tmp"
- try:
- pickle.dump(self, open(tmpfilename, "wb"), -1)
- if sys.platform == 'win32':
- # windows cannot rename a file on top of an existing one, so
- # fall back to delete-first. There are ways this can fail and
- # lose the builder's history, so we avoid using it in the
- # general (non-windows) case
- if os.path.exists(filename):
- os.unlink(filename)
- os.rename(tmpfilename, filename)
- except:
- log.msg("unable to save build %s-#%d" % (self.builder.name,
- self.number))
- log.err()
-
-
-
-class BuilderStatus(styles.Versioned):
- """I handle status information for a single process.base.Builder object.
- That object sends status changes to me (frequently as Events), and I
- provide them on demand to the various status recipients, like the HTML
- waterfall display and the live status clients. It also sends build
- summaries to me, which I log and provide to status clients who aren't
- interested in seeing details of the individual build steps.
-
- I am responsible for maintaining the list of historic Events and Builds,
- pruning old ones, and loading them from / saving them to disk.
-
- I live in the buildbot.process.base.Builder object, in the .statusbag
- attribute.
-
- @type category: string
- @ivar category: user-defined category this builder belongs to; can be
- used to filter on in status clients
- """
-
- if implements:
- implements(interfaces.IBuilderStatus)
- else:
- __implements__ = interfaces.IBuilderStatus,
- persistenceVersion = 1
-
- # these limit the amount of memory we consume, as well as the size of the
- # main Builder pickle. The Build and LogFile pickles on disk must be
- # handled separately.
- buildCacheSize = 30
- buildHorizon = 100 # forget builds beyond this
- stepHorizon = 50 # forget steps in builds beyond this
-
- category = None
- currentBigState = "offline" # or idle/waiting/interlocked/building
- basedir = None # filled in by our parent
-
- def __init__(self, buildername, category=None):
- self.name = buildername
- self.category = category
-
- self.slavenames = []
- self.events = []
- # these three hold Events, and are used to retrieve the current
- # state of the boxes.
- self.lastBuildStatus = None
- #self.currentBig = None
- #self.currentSmall = None
- self.currentBuilds = []
- self.pendingBuilds = []
- self.nextBuild = None
- self.watchers = []
- self.buildCache = [] # TODO: age builds out of the cache
-
- # persistence
-
- def __getstate__(self):
- # when saving, don't record transient stuff like what builds are
- # currently running, because they won't be there when we start back
- # up. Nor do we save self.watchers, nor anything that gets set by our
- # parent like .basedir and .status
- d = styles.Versioned.__getstate__(self)
- d['watchers'] = []
- del d['buildCache']
- for b in self.currentBuilds:
- b.saveYourself()
- # TODO: push a 'hey, build was interrupted' event
- del d['currentBuilds']
- del d['pendingBuilds']
- del d['currentBigState']
- del d['basedir']
- del d['status']
- del d['nextBuildNumber']
- return d
-
- def __setstate__(self, d):
- # when loading, re-initialize the transient stuff. Remember that
- # upgradeToVersion1 and such will be called after this finishes.
- styles.Versioned.__setstate__(self, d)
- self.buildCache = []
- self.currentBuilds = []
- self.pendingBuilds = []
- self.watchers = []
- self.slavenames = []
- # self.basedir must be filled in by our parent
- # self.status must be filled in by our parent
-
- def upgradeToVersion1(self):
- if hasattr(self, 'slavename'):
- self.slavenames = [self.slavename]
- del self.slavename
- if hasattr(self, 'nextBuildNumber'):
- del self.nextBuildNumber # determineNextBuildNumber chooses this
-
- def determineNextBuildNumber(self):
- """Scan our directory of saved BuildStatus instances to determine
- what our self.nextBuildNumber should be. Set it one larger than the
- highest-numbered build we discover. This is called by the top-level
- Status object shortly after we are created or loaded from disk.
- """
- existing_builds = [int(f)
- for f in os.listdir(self.basedir)
- if re.match("^\d+$", f)]
- if existing_builds:
- self.nextBuildNumber = max(existing_builds) + 1
- else:
- self.nextBuildNumber = 0
-
- def saveYourself(self):
- for b in self.buildCache:
- if not b.isFinished:
- # interrupted build, need to save it anyway.
- # BuildStatus.saveYourself will mark it as interrupted.
- b.saveYourself()
- filename = os.path.join(self.basedir, "builder")
- tmpfilename = filename + ".tmp"
- try:
- pickle.dump(self, open(tmpfilename, "wb"), -1)
- if sys.platform == 'win32':
- # windows cannot rename a file on top of an existing one
- if os.path.exists(filename):
- os.unlink(filename)
- os.rename(tmpfilename, filename)
- except:
- log.msg("unable to save builder %s" % self.name)
- log.err()
-
-
- # build cache management
-
- def addBuildToCache(self, build):
- if build in self.buildCache:
- return
- self.buildCache.append(build)
- while len(self.buildCache) > self.buildCacheSize:
- self.buildCache.pop(0)
-
- def getBuildByNumber(self, number):
- for b in self.currentBuilds:
- if b.number == number:
- return b
- for build in self.buildCache:
- if build.number == number:
- return build
- filename = os.path.join(self.basedir, "%d" % number)
- try:
- build = pickle.load(open(filename, "rb"))
- styles.doUpgrade()
- build.builder = self
- # handle LogFiles from after 0.5.0 and before 0.6.5
- build.upgradeLogfiles()
- self.addBuildToCache(build)
- return build
- except IOError:
- raise IndexError("no such build %d" % number)
- except EOFError:
- raise IndexError("corrupted build pickle %d" % number)
-
- def prune(self):
- return # TODO: change this to walk through the filesystem
- # first, blow away all builds beyond our build horizon
- self.builds = self.builds[-self.buildHorizon:]
- # then prune steps in builds past the step horizon
- for b in self.builds[0:-self.stepHorizon]:
- b.pruneSteps()
-
- # IBuilderStatus methods
- def getName(self):
- return self.name
-
- def getState(self):
- return (self.currentBigState, self.currentBuilds)
-
- def getSlaves(self):
- return [self.status.getSlave(name) for name in self.slavenames]
-
- def getPendingBuilds(self):
- return self.pendingBuilds
-
- def getCurrentBuilds(self):
- return self.currentBuilds
-
- def getLastFinishedBuild(self):
- b = self.getBuild(-1)
- if not (b and b.isFinished()):
- b = self.getBuild(-2)
- return b
-
- def getBuild(self, number):
- if number < 0:
- number = self.nextBuildNumber + number
- if number < 0 or number >= self.nextBuildNumber:
- return None
-
- try:
- return self.getBuildByNumber(number)
- except IndexError:
- return None
-
- def getEvent(self, number):
- try:
- return self.events[number]
- except IndexError:
- return None
-
- def eventGenerator(self):
- """This function creates a generator which will provide all of this
- Builder's status events, starting with the most recent and
- progressing backwards in time. """
-
- # remember the oldest-to-earliest flow here. "next" means earlier.
-
- # TODO: interleave build steps and self.events by timestamp
-
- eventIndex = -1
- e = self.getEvent(eventIndex)
- for Nb in range(1, self.nextBuildNumber+1):
- b = self.getBuild(-Nb)
- if not b:
- break
- steps = b.getSteps()
- for Ns in range(1, len(steps)+1):
- if steps[-Ns].started:
- step_start = steps[-Ns].getTimes()[0]
- while e is not None and e.getTimes()[0] > step_start:
- yield e
- eventIndex -= 1
- e = self.getEvent(eventIndex)
- yield steps[-Ns]
- yield b
- while e is not None:
- yield e
- eventIndex -= 1
- e = self.getEvent(eventIndex)
-
- def subscribe(self, receiver):
- # will get builderChangedState, buildStarted, and buildFinished
- self.watchers.append(receiver)
- self.publishState(receiver)
-
- def unsubscribe(self, receiver):
- self.watchers.remove(receiver)
-
- ## Builder interface (methods called by the Builder which feeds us)
-
- def setSlavenames(self, names):
- self.slavenames = names
-
- def addEvent(self, text=[], color=None):
- # this adds a duration event. When it is done, the user should call
- # e.finish(). They can also mangle it by modifying .text and .color
- e = Event()
- e.started = util.now()
- e.text = text
- e.color = color
- self.events.append(e)
- return e # they are free to mangle it further
-
- def addPointEvent(self, text=[], color=None):
- # this adds a point event, one which occurs as a single atomic
- # instant of time.
- e = Event()
- e.started = util.now()
- e.finished = 0
- e.text = text
- e.color = color
- self.events.append(e)
- return e # for consistency, but they really shouldn't touch it
-
- def setBigState(self, state):
- needToUpdate = state != self.currentBigState
- self.currentBigState = state
- if needToUpdate:
- self.publishState()
-
- def publishState(self, target=None):
- state = self.currentBigState
-
- if target is not None:
- # unicast
- target.builderChangedState(self.name, state)
- return
- for w in self.watchers:
- w.builderChangedState(self.name, state)
-
- def newBuild(self):
- """The Builder has decided to start a build, but the Build object is
- not yet ready to report status (it has not finished creating the
- Steps). Create a BuildStatus object that it can use."""
- number = self.nextBuildNumber
- self.nextBuildNumber += 1
- # TODO: self.saveYourself(), to make sure we don't forget about the
- # build number we've just allocated. This is not quite as important
- # as it was before we switch to determineNextBuildNumber, but I think
- # it may still be useful to have the new build save itself.
- s = BuildStatus(self, number)
- s.waitUntilFinished().addCallback(self._buildFinished)
- return s
-
- def addBuildRequest(self, brstatus):
- self.pendingBuilds.append(brstatus)
- def removeBuildRequest(self, brstatus):
- self.pendingBuilds.remove(brstatus)
-
- # buildStarted is called by our child BuildStatus instances
- def buildStarted(self, s):
- """Now the BuildStatus object is ready to go (it knows all of its
- Steps, its ETA, etc), so it is safe to notify our watchers."""
-
- assert s.builder is self # paranoia
- assert s.number == self.nextBuildNumber - 1
- assert s not in self.currentBuilds
- self.currentBuilds.append(s)
- self.addBuildToCache(s)
-
- # now that the BuildStatus is prepared to answer queries, we can
- # announce the new build to all our watchers
-
- for w in self.watchers: # TODO: maybe do this later? callLater(0)?
- receiver = w.buildStarted(self.getName(), s)
- if receiver:
- if type(receiver) == type(()):
- s.subscribe(receiver[0], receiver[1])
- else:
- s.subscribe(receiver)
- d = s.waitUntilFinished()
- d.addCallback(lambda s: s.unsubscribe(receiver))
-
-
- def _buildFinished(self, s):
- assert s in self.currentBuilds
- s.saveYourself()
- self.currentBuilds.remove(s)
-
- name = self.getName()
- results = s.getResults()
- for w in self.watchers:
- w.buildFinished(name, s, results)
-
- self.prune() # conserve disk
-
-
- # waterfall display (history)
-
- # I want some kind of build event that holds everything about the build:
- # why, what changes went into it, the results of the build, itemized
- # test results, etc. But, I do kind of need something to be inserted in
- # the event log first, because intermixing step events and the larger
- # build event is fraught with peril. Maybe an Event-like-thing that
- # doesn't have a file in it but does have links. Hmm, that's exactly
- # what it does now. The only difference would be that this event isn't
- # pushed to the clients.
-
- # publish to clients
- def sendLastBuildStatus(self, client):
- #client.newLastBuildStatus(self.lastBuildStatus)
- pass
- def sendCurrentActivityBigToEveryone(self):
- for s in self.subscribers:
- self.sendCurrentActivityBig(s)
- def sendCurrentActivityBig(self, client):
- state = self.currentBigState
- if state == "offline":
- client.currentlyOffline()
- elif state == "idle":
- client.currentlyIdle()
- elif state == "building":
- client.currentlyBuilding()
- else:
- log.msg("Hey, self.currentBigState is weird:", state)
-
-
- ## HTML display interface
-
- def getEventNumbered(self, num):
- # deal with dropped events, pruned events
- first = self.events[0].number
- if first + len(self.events)-1 != self.events[-1].number:
- log.msg(self,
- "lost an event somewhere: [0] is %d, [%d] is %d" % \
- (self.events[0].number,
- len(self.events) - 1,
- self.events[-1].number))
- for e in self.events:
- log.msg("e[%d]: " % e.number, e)
- return None
- offset = num - first
- log.msg(self, "offset", offset)
- try:
- return self.events[offset]
- except IndexError:
- return None
-
- ## Persistence of Status
- def loadYourOldEvents(self):
- if hasattr(self, "allEvents"):
- # first time, nothing to get from file. Note that this is only if
- # the Application gets .run() . If it gets .save()'ed, then the
- # .allEvents attribute goes away in the initial __getstate__ and
- # we try to load a non-existent file.
- return
- self.allEvents = self.loadFile("events", [])
- if self.allEvents:
- self.nextEventNumber = self.allEvents[-1].number + 1
- else:
- self.nextEventNumber = 0
- def saveYourOldEvents(self):
- self.saveFile("events", self.allEvents)
-
- ## clients
-
- def addClient(self, client):
- if client not in self.subscribers:
- self.subscribers.append(client)
- self.sendLastBuildStatus(client)
- self.sendCurrentActivityBig(client)
- client.newEvent(self.currentSmall)
- def removeClient(self, client):
- if client in self.subscribers:
- self.subscribers.remove(client)
-
-class SlaveStatus:
- if implements:
- implements(interfaces.ISlaveStatus)
- else:
- __implements__ = interfaces.ISlaveStatus,
-
- admin = None
- host = None
- connected = False
-
- def __init__(self, name):
- self.name = name
-
- def getName(self):
- return self.name
- def getAdmin(self):
- return self.admin
- def getHost(self):
- return self.host
- def isConnected(self):
- return self.connected
-
-class Status:
- """
- I represent the status of the buildmaster.
- """
- if implements:
- implements(interfaces.IStatus)
- else:
- __implements__ = interfaces.IStatus,
-
- def __init__(self, botmaster, basedir):
- """
- @type botmaster: L{buildbot.master.BotMaster}
- @param botmaster: the Status object uses C{.botmaster} to get at
- both the L{buildbot.master.BuildMaster} (for
- various buildbot-wide parameters) and the
- actual Builders (to get at their L{BuilderStatus}
- objects). It is not allowed to change or influence
- anything through this reference.
- @type basedir: string
- @param basedir: this provides a base directory in which saved status
- information (changes.pck, saved Build status
- pickles) can be stored
- """
- self.botmaster = botmaster
- self.basedir = basedir
- self.watchers = []
- self.activeBuildSets = []
- assert os.path.isdir(basedir)
-
-
- # methods called by our clients
-
- def getProjectName(self):
- return self.botmaster.parent.projectName
- def getProjectURL(self):
- return self.botmaster.parent.projectURL
- def getBuildbotURL(self):
- return self.botmaster.parent.buildbotURL
-
- def getURLForThing(self, thing):
- prefix = self.getBuildbotURL()
- if not prefix:
- return None
- if providedBy(thing, interfaces.IStatus):
- return prefix
- if providedBy(thing, interfaces.ISchedulerStatus):
- pass
- if providedBy(thing, interfaces.IBuilderStatus):
- builder = thing
- return prefix + urllib.quote(builder.getName(), safe='')
- if providedBy(thing, interfaces.IBuildStatus):
- build = thing
- builder = build.getBuilder()
- return "%s%s/builds/%d" % (
- prefix,
- urllib.quote(builder.getName(), safe=''),
- build.getNumber())
- if providedBy(thing, interfaces.IBuildStepStatus):
- step = thing
- build = step.getBuild()
- builder = build.getBuilder()
- return "%s%s/builds/%d/%s" % (
- prefix,
- urllib.quote(builder.getName(), safe=''),
- build.getNumber(),
- "step-" + urllib.quote(step.getName(), safe=''))
- # IBuildSetStatus
- # IBuildRequestStatus
- # ISlaveStatus
-
- # IStatusEvent
- if providedBy(thing, interfaces.IStatusEvent):
- from buildbot.changes import changes
- # TODO: this is goofy, create IChange or something
- if isinstance(thing, changes.Change):
- change = thing
- return "%schanges/%d" % (prefix, change.number)
-
- if providedBy(thing, interfaces.IStatusLog):
- log = thing
- step = log.getStep()
- build = step.getBuild()
- builder = build.getBuilder()
-
- logs = step.getLogs()
- for i in range(len(logs)):
- if log is logs[i]:
- lognum = i
- break
- else:
- return None
- return "%s%s/builds/%d/%s/%d" % (
- prefix,
- urllib.quote(builder.getName(), safe=''),
- build.getNumber(),
- "step-" + urllib.quote(step.getName(), safe=''),
- lognum)
-
-
- def getSchedulers(self):
- return self.botmaster.parent.allSchedulers()
-
- def getBuilderNames(self, categories=None):
- if categories == None:
- return self.botmaster.builderNames[:] # don't let them break it
-
- l = []
- # respect addition order
- for name in self.botmaster.builderNames:
- builder = self.botmaster.builders[name]
- if builder.builder_status.category in categories:
- l.append(name)
- return l
-
- def getBuilder(self, name):
- """
- @rtype: L{BuilderStatus}
- """
- return self.botmaster.builders[name].builder_status
-
- def getSlave(self, slavename):
- return self.botmaster.slaves[slavename].slave_status
-
- def getBuildSets(self):
- return self.activeBuildSets[:]
-
- def subscribe(self, target):
- self.watchers.append(target)
- for name in self.botmaster.builderNames:
- self.announceNewBuilder(target, name, self.getBuilder(name))
- def unsubscribe(self, target):
- self.watchers.remove(target)
-
-
- # methods called by upstream objects
-
- def announceNewBuilder(self, target, name, builder_status):
- t = target.builderAdded(name, builder_status)
- if t:
- builder_status.subscribe(t)
-
- def builderAdded(self, name, basedir, category=None):
- """
- @rtype: L{BuilderStatus}
- """
- filename = os.path.join(self.basedir, basedir, "builder")
- log.msg("trying to load status pickle from %s" % filename)
- builder_status = None
- try:
- builder_status = pickle.load(open(filename, "rb"))
- styles.doUpgrade()
- except IOError:
- log.msg("no saved status pickle, creating a new one")
- except:
- log.msg("error while loading status pickle, creating a new one")
- log.msg("error follows:")
- log.err()
- if not builder_status:
- builder_status = BuilderStatus(name, category)
- builder_status.addPointEvent(["builder", "created"])
- log.msg("added builder %s in category %s" % (name, category))
- # an unpickled object might not have category set from before,
- # so set it here to make sure
- builder_status.category = category
- builder_status.basedir = os.path.join(self.basedir, basedir)
- builder_status.name = name # it might have been updated
- builder_status.status = self
-
- if not os.path.isdir(builder_status.basedir):
- os.mkdir(builder_status.basedir)
- builder_status.determineNextBuildNumber()
-
- builder_status.setBigState("offline")
-
- for t in self.watchers:
- self.announceNewBuilder(t, name, builder_status)
-
- return builder_status
-
- def builderRemoved(self, name):
- for t in self.watchers:
- t.builderRemoved(name)
-
- def prune(self):
- for b in self.botmaster.builders.values():
- b.builder_status.prune()
-
- def buildsetSubmitted(self, bss):
- self.activeBuildSets.append(bss)
- bss.waitUntilFinished().addCallback(self.activeBuildSets.remove)
- for t in self.watchers:
- t.buildsetSubmitted(bss)
diff --git a/buildbot/buildbot-source/buildbot/status/classic.css b/buildbot/buildbot-source/buildbot/status/classic.css
deleted file mode 100644
index 4f56a8a56..000000000
--- a/buildbot/buildbot-source/buildbot/status/classic.css
+++ /dev/null
@@ -1,39 +0,0 @@
-a:visited {
- color: #800080;
-}
-
-td.Event, td.BuildStep, td.Activity, td.Change, td.Time, td.Builder {
- border-top: 1px solid;
- border-right: 1px solid;
-}
-
-/* Activity states */
-.offline {
- background-color: red;
-}
-.idle {
- background-color: white;
-}
-.waiting {
- background-color: yellow;
-}
-.building {
- background-color: yellow;
-}
-
-/* LastBuild, BuildStep states */
-.success {
- background-color: #72ff75;
-}
-.failure {
- background-color: red;
-}
-.warnings {
- background-color: #ff8000;
-}
-.exception {
- background-color: #c000c0;
-}
-.start,.running {
- background-color: yellow;
-}
diff --git a/buildbot/buildbot-source/buildbot/status/client.py b/buildbot/buildbot-source/buildbot/status/client.py
deleted file mode 100644
index 7e2b17c12..000000000
--- a/buildbot/buildbot-source/buildbot/status/client.py
+++ /dev/null
@@ -1,573 +0,0 @@
-# -*- test-case-name: buildbot.test.test_status -*-
-
-from twisted.spread import pb
-from twisted.python import log, components
-from twisted.python.failure import Failure
-from twisted.internet import defer, reactor
-from twisted.application import service, strports
-from twisted.cred import portal, checkers
-
-from buildbot import util, interfaces
-from buildbot.twcompat import Interface, implements
-from buildbot.status import builder, base
-from buildbot.changes import changes
-
-class IRemote(Interface):
- pass
-
-def makeRemote(obj):
- # we want IRemote(None) to be None, but you can't really do that with
- # adapters, so we fake it
- if obj is None:
- return None
- return IRemote(obj)
-
-
-class RemoteBuildSet(pb.Referenceable):
- def __init__(self, buildset):
- self.b = buildset
-
- def remote_getSourceStamp(self):
- return self.b.getSourceStamp()
-
- def remote_getReason(self):
- return self.b.getReason()
-
- def remote_getID(self):
- return self.b.getID()
-
- def remote_getBuilderNames(self):
- return self.b.getBuilderNames()
-
- def remote_getBuildRequests(self):
- """Returns a list of (builderName, BuildRequest) tuples."""
- return [(br.getBuilderName(), IRemote(br))
- for br in self.b.getBuildRequests()]
-
- def remote_isFinished(self):
- return self.b.isFinished()
-
- def remote_waitUntilSuccess(self):
- d = self.b.waitUntilSuccess()
- d.addCallback(lambda res: self)
- return d
-
- def remote_waitUntilFinished(self):
- d = self.b.waitUntilFinished()
- d.addCallback(lambda res: self)
- return d
-
- def remote_getResults(self):
- return self.b.getResults()
-
-components.registerAdapter(RemoteBuildSet,
- interfaces.IBuildSetStatus, IRemote)
-
-
-class RemoteBuilder(pb.Referenceable):
- def __init__(self, builder):
- self.b = builder
-
- def remote_getName(self):
- return self.b.getName()
-
- def remote_getState(self):
- state, builds = self.b.getState()
- return (state,
- None, # TODO: remove leftover ETA
- [makeRemote(b) for b in builds])
-
- def remote_getSlaves(self):
- return [IRemote(s) for s in self.b.getSlaves()]
-
- def remote_getLastFinishedBuild(self):
- return makeRemote(self.b.getLastFinishedBuild())
-
- def remote_getCurrentBuilds(self):
- return makeRemote(self.b.getCurrentBuilds())
-
- def remote_getBuild(self, number):
- return makeRemote(self.b.getBuild(number))
-
- def remote_getEvent(self, number):
- return IRemote(self.b.getEvent(number))
-
-components.registerAdapter(RemoteBuilder,
- interfaces.IBuilderStatus, IRemote)
-
-
-class RemoteBuildRequest(pb.Referenceable):
- def __init__(self, buildreq):
- self.b = buildreq
- self.observers = []
-
- def remote_getSourceStamp(self):
- return self.b.getSourceStamp()
-
- def remote_getBuilderName(self):
- return self.b.getBuilderName()
-
- def remote_subscribe(self, observer):
- """The observer's remote_newbuild method will be called (with two
- arguments: the RemoteBuild object, and our builderName) for each new
- Build that is created to handle this BuildRequest."""
- self.observers.append(observer)
- def send(bs):
- d = observer.callRemote("newbuild",
- IRemote(bs), self.b.getBuilderName())
- d.addErrback(lambda err: None)
- reactor.callLater(0, self.b.subscribe, send)
-
- def remote_unsubscribe(self, observer):
- # PB (well, at least oldpb) doesn't re-use RemoteReference instances,
- # so sending the same object across the wire twice will result in two
- # separate objects that compare as equal ('a is not b' and 'a == b').
- # That means we can't use a simple 'self.observers.remove(observer)'
- # here.
- for o in self.observers:
- if o == observer:
- self.observers.remove(o)
-
-components.registerAdapter(RemoteBuildRequest,
- interfaces.IBuildRequestStatus, IRemote)
-
-class RemoteBuild(pb.Referenceable):
- def __init__(self, build):
- self.b = build
- self.observers = []
-
- def remote_getBuilderName(self):
- return self.b.getBuilder().getName()
-
- def remote_getNumber(self):
- return self.b.getNumber()
-
- def remote_getReason(self):
- return self.b.getReason()
-
- def remote_getChanges(self):
- return [IRemote(c) for c in self.b.getChanges()]
-
- def remote_getResponsibleUsers(self):
- return self.b.getResponsibleUsers()
-
- def remote_getSteps(self):
- return [IRemote(s) for s in self.b.getSteps()]
-
- def remote_getTimes(self):
- return self.b.getTimes()
-
- def remote_isFinished(self):
- return self.b.isFinished()
-
- def remote_waitUntilFinished(self):
- # the Deferred returned by callRemote() will fire when this build is
- # finished
- d = self.b.waitUntilFinished()
- d.addCallback(lambda res: self)
- return d
-
- def remote_getETA(self):
- return self.b.getETA()
-
- def remote_getCurrentStep(self):
- return makeRemote(self.b.getCurrentStep())
-
- def remote_getText(self):
- return self.b.getText()
-
- def remote_getColor(self):
- return self.b.getColor()
-
- def remote_getResults(self):
- return self.b.getResults()
-
- def remote_getLogs(self):
- logs = {}
- for name,log in self.b.getLogs().items():
- logs[name] = IRemote(log)
- return logs
-
- def remote_subscribe(self, observer, updateInterval=None):
- """The observer will have remote_stepStarted(buildername, build,
- stepname, step), remote_stepFinished(buildername, build, stepname,
- step, results), and maybe remote_buildETAUpdate(buildername, build,
- eta)) messages sent to it."""
- self.observers.append(observer)
- s = BuildSubscriber(observer)
- self.b.subscribe(s, updateInterval)
-
- def remote_unsubscribe(self, observer):
- # TODO: is the observer automatically unsubscribed when the build
- # finishes? Or are they responsible for unsubscribing themselves
- # anyway? How do we avoid a race condition here?
- for o in self.observers:
- if o == observer:
- self.observers.remove(o)
-
-
-components.registerAdapter(RemoteBuild,
- interfaces.IBuildStatus, IRemote)
-
-class BuildSubscriber:
- def __init__(self, observer):
- self.observer = observer
-
- def buildETAUpdate(self, build, eta):
- self.observer.callRemote("buildETAUpdate",
- build.getBuilder().getName(),
- IRemote(build),
- eta)
-
- def stepStarted(self, build, step):
- self.observer.callRemote("stepStarted",
- build.getBuilder().getName(),
- IRemote(build),
- step.getName(), IRemote(step))
- return None
-
- def stepFinished(self, build, step, results):
- self.observer.callRemote("stepFinished",
- build.getBuilder().getName(),
- IRemote(build),
- step.getName(), IRemote(step),
- results)
-
-
-class RemoteBuildStep(pb.Referenceable):
- def __init__(self, step):
- self.s = step
-
- def remote_getName(self):
- return self.s.getName()
-
- def remote_getBuild(self):
- return IRemote(self.s.getBuild())
-
- def remote_getTimes(self):
- return self.s.getTimes()
-
- def remote_getExpectations(self):
- return self.s.getExpectations()
-
- def remote_getLogs(self):
- logs = {}
- for name,log in self.s.getLogs().items():
- logs[name] = IRemote(log)
- return logs
-
- def remote_isFinished(self):
- return self.s.isFinished()
-
- def remote_waitUntilFinished(self):
- return self.s.waitUntilFinished() # returns a Deferred
-
- def remote_getETA(self):
- return self.s.getETA()
-
- def remote_getText(self):
- return self.s.getText()
-
- def remote_getColor(self):
- return self.s.getColor()
-
- def remote_getResults(self):
- return self.s.getResults()
-
-components.registerAdapter(RemoteBuildStep,
- interfaces.IBuildStepStatus, IRemote)
-
-class RemoteSlave:
- def __init__(self, slave):
- self.s = slave
-
- def remote_getName(self):
- return self.s.getName()
- def remote_getAdmin(self):
- return self.s.getAdmin()
- def remote_getHost(self):
- return self.s.getHost()
- def remote_isConnected(self):
- return self.s.isConnected()
-
-components.registerAdapter(RemoteSlave,
- interfaces.ISlaveStatus, IRemote)
-
-class RemoteEvent:
- def __init__(self, event):
- self.e = event
-
- def remote_getTimes(self):
- return self.s.getTimes()
- def remote_getText(self):
- return self.s.getText()
- def remote_getColor(self):
- return self.s.getColor()
-
-components.registerAdapter(RemoteEvent,
- interfaces.IStatusEvent, IRemote)
-
-class RemoteLog(pb.Referenceable):
- def __init__(self, log):
- self.l = log
-
- def remote_getName(self):
- return self.l.getName()
-
- def remote_isFinished(self):
- return self.l.isFinished()
- def remote_waitUntilFinished(self):
- d = self.l.waitUntilFinished()
- d.addCallback(lambda res: self)
- return d
-
- def remote_getText(self):
- return self.l.getText()
- def remote_getTextWithHeaders(self):
- return self.l.getTextWithHeaders()
- def remote_getChunks(self):
- return self.l.getChunks()
- # TODO: subscription interface
-
-components.registerAdapter(RemoteLog, builder.LogFile, IRemote)
-# TODO: something similar for builder.HTMLLogfile ?
-
-class RemoteChange:
- def __init__(self, change):
- self.c = change
-
- def getWho(self):
- return self.c.who
- def getFiles(self):
- return self.c.files
- def getComments(self):
- return self.c.comments
-
-components.registerAdapter(RemoteChange, changes.Change, IRemote)
-
-
-class StatusClientPerspective(base.StatusReceiverPerspective):
-
- subscribed = None
- client = None
-
- def __init__(self, status):
- self.status = status # the IStatus
- self.subscribed_to_builders = [] # Builders to which we're subscribed
- self.subscribed_to = [] # everything else we're subscribed to
-
- def __getstate__(self):
- d = self.__dict__.copy()
- d['client'] = None
- return d
-
- def attached(self, mind):
- #log.msg("StatusClientPerspective.attached")
- return self
-
- def detached(self, mind):
- log.msg("PB client detached")
- self.client = None
- for name in self.subscribed_to_builders:
- log.msg(" unsubscribing from Builder(%s)" % name)
- self.status.getBuilder(name).unsubscribe(self)
- for s in self.subscribed_to:
- log.msg(" unsubscribe from %s" % s)
- s.unsubscribe(self)
- self.subscribed = None
-
- def perspective_subscribe(self, mode, interval, target):
- """The remote client wishes to subscribe to some set of events.
- 'target' will be sent remote messages when these events happen.
- 'mode' indicates which events are desired: it is a string with one
- of the following values:
-
- 'builders': builderAdded, builderRemoved
- 'builds': those plus builderChangedState, buildStarted, buildFinished
- 'steps': all those plus buildETAUpdate, stepStarted, stepFinished
- 'logs': all those plus stepETAUpdate, logStarted, logFinished
- 'full': all those plus logChunk (with the log contents)
-
-
- Messages are defined by buildbot.interfaces.IStatusReceiver .
- 'interval' is used to specify how frequently ETAUpdate messages
- should be sent.
-
- Raising or lowering the subscription level will take effect starting
- with the next build or step."""
-
- assert mode in ("builders", "builds", "steps", "logs", "full")
- assert target
- log.msg("PB subscribe(%s)" % mode)
-
- self.client = target
- self.subscribed = mode
- self.interval = interval
- self.subscribed_to.append(self.status)
- # wait a moment before subscribing, so the new-builder messages
- # won't appear before this remote method finishes
- reactor.callLater(0, self.status.subscribe, self)
- return None
-
- def perspective_unsubscribe(self):
- log.msg("PB unsubscribe")
- self.status.unsubscribe(self)
- self.subscribed_to.remove(self.status)
- self.client = None
-
- def perspective_getBuildSets(self):
- """This returns tuples of (buildset, bsid), because that is much more
- convenient for tryclient."""
- return [(IRemote(s), s.getID()) for s in self.status.getBuildSets()]
-
- def perspective_getBuilderNames(self):
- return self.status.getBuilderNames()
-
- def perspective_getBuilder(self, name):
- b = self.status.getBuilder(name)
- return IRemote(b)
-
- def perspective_getSlave(self, name):
- s = self.status.getSlave(name)
- return IRemote(s)
-
- # IStatusReceiver methods, invoked if we've subscribed
-
- # mode >= builder
- def builderAdded(self, name, builder):
- self.client.callRemote("builderAdded", name, IRemote(builder))
- if self.subscribed in ("builds", "steps", "logs", "full"):
- self.subscribed_to_builders.append(name)
- return self
- return None
-
- def builderChangedState(self, name, state):
- self.client.callRemote("builderChangedState", name, state, None)
- # TODO: remove leftover ETA argument
-
- def builderRemoved(self, name):
- if name in self.subscribed_to_builders:
- self.subscribed_to_builders.remove(name)
- self.client.callRemote("builderRemoved", name)
-
- def buildsetSubmitted(self, buildset):
- # TODO: deliver to client, somehow
- pass
-
- # mode >= builds
- def buildStarted(self, name, build):
- self.client.callRemote("buildStarted", name, IRemote(build))
- if self.subscribed in ("steps", "logs", "full"):
- self.subscribed_to.append(build)
- return (self, self.interval)
- return None
-
- def buildFinished(self, name, build, results):
- if build in self.subscribed_to:
- # we might have joined during the build
- self.subscribed_to.remove(build)
- self.client.callRemote("buildFinished",
- name, IRemote(build), results)
-
- # mode >= steps
- def buildETAUpdate(self, build, eta):
- self.client.callRemote("buildETAUpdate",
- build.getBuilder().getName(), IRemote(build),
- eta)
-
- def stepStarted(self, build, step):
- # we add some information here so the client doesn't have to do an
- # extra round-trip
- self.client.callRemote("stepStarted",
- build.getBuilder().getName(), IRemote(build),
- step.getName(), IRemote(step))
- if self.subscribed in ("logs", "full"):
- self.subscribed_to.append(step)
- return (self, self.interval)
- return None
-
- def stepFinished(self, build, step, results):
- self.client.callRemote("stepFinished",
- build.getBuilder().getName(), IRemote(build),
- step.getName(), IRemote(step),
- results)
- if step in self.subscribed_to:
- # eventually (through some new subscription method) we could
- # join in the middle of the step
- self.subscribed_to.remove(step)
-
- # mode >= logs
- def stepETAUpdate(self, build, step, ETA, expectations):
- self.client.callRemote("stepETAUpdate",
- build.getBuilder().getName(), IRemote(build),
- step.getName(), IRemote(step),
- ETA, expectations)
-
- def logStarted(self, build, step, log):
- # TODO: make the HTMLLog adapter
- rlog = IRemote(log, None)
- if not rlog:
- print "hey, couldn't adapt %s to IRemote" % log
- self.client.callRemote("logStarted",
- build.getBuilder().getName(), IRemote(build),
- step.getName(), IRemote(step),
- log.getName(), IRemote(log, None))
- if self.subscribed in ("full",):
- self.subscribed_to.append(log)
- return self
- return None
-
- def logFinished(self, build, step, log):
- self.client.callRemote("logFinished",
- build.getBuilder().getName(), IRemote(build),
- step.getName(), IRemote(step),
- log.getName(), IRemote(log, None))
- if log in self.subscribed_to:
- self.subscribed_to.remove(log)
-
- # mode >= full
- def logChunk(self, build, step, log, channel, text):
- self.client.callRemote("logChunk",
- build.getBuilder().getName(), IRemote(build),
- step.getName(), IRemote(step),
- log.getName(), IRemote(log),
- channel, text)
-
-
-class PBListener(base.StatusReceiverMultiService):
- """I am a listener for PB-based status clients."""
-
- compare_attrs = ["port", "cred"]
- if implements:
- implements(portal.IRealm)
- else:
- __implements__ = (portal.IRealm,
- base.StatusReceiverMultiService.__implements__)
-
- def __init__(self, port, user="statusClient", passwd="clientpw"):
- base.StatusReceiverMultiService.__init__(self)
- if type(port) is int:
- port = "tcp:%d" % port
- self.port = port
- self.cred = (user, passwd)
- p = portal.Portal(self)
- c = checkers.InMemoryUsernamePasswordDatabaseDontUse()
- c.addUser(user, passwd)
- p.registerChecker(c)
- f = pb.PBServerFactory(p)
- s = strports.service(port, f)
- s.setServiceParent(self)
-
- def setServiceParent(self, parent):
- base.StatusReceiverMultiService.setServiceParent(self, parent)
- self.setup()
-
- def setup(self):
- self.status = self.parent.getStatus()
-
- def requestAvatar(self, avatarID, mind, interface):
- assert interface == pb.IPerspective
- p = StatusClientPerspective(self.status)
- p.attached(mind) # perhaps .callLater(0) ?
- return (pb.IPerspective, p,
- lambda p=p,mind=mind: p.detached(mind))
diff --git a/buildbot/buildbot-source/buildbot/status/getcws.py b/buildbot/buildbot-source/buildbot/status/getcws.py
deleted file mode 100644
index c545b83c8..000000000
--- a/buildbot/buildbot-source/buildbot/status/getcws.py
+++ /dev/null
@@ -1,133 +0,0 @@
-# Original thanks to David Fraser <davidf@sjsoft.com> and Caolan McNamara <caolanm@redhat.com>
-
-import urllib2, cookielib, cgi
-import os, sys
-
-from HTMLParser import HTMLParser
-
-class cws:
- def __init__(self, cwss):
- self.cwss = cwss
-
-
-class EISScraper(HTMLParser):
- def __init__(self):
- HTMLParser.__init__(self)
- self.state = 0;
- self.cwss = []
-
- def handle_starttag(self, tag, attrs):
- if tag == 'td' and self.state < 3:
- self.state += 1
-
- def handle_data(self, data):
- if self.state == 3:
- self.cwss.append(data.strip())
- self.state = 4
-
-
- def handle_endtag(self, tag):
- if tag == 'tr' and self.state == 4:
- self.state = 0
-
-class EIS:
- def __init__(self, cookiefile="eis.lwp"):
- self.cookiefile = cookiefile
- self.cookiejar = cookielib.LWPCookieJar()
- if os.path.isfile(self.cookiefile):
- self.cookiejar.load(self.cookiefile)
- opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self.cookiejar))
- urllib2.install_opener(opener)
- self.login()
- self.cache = {}
-
- def login(self):
- urllib2.urlopen("http://eis.services.openoffice.org/EIS2/GuestLogon").read()
- self.cookiejar.save(self.cookiefile)
-
- def cacheurl(self, url):
- if url in self.cache:
- return self.cache[url]
- else:
- try:
- contents = urllib2.urlopen(url).read()
- except urllib2.HTTPError, e:
- if e.code == 401:
- self.login()
- contents = urllib2.urlopen(url).read()
- else:
- raise
- self.cache[url] = contents
- return contents
- def findcws(self, cws,):
- thiscwsid = None
- milestoneresults = self.cacheurl("http://eis.services.openoffice.org/EIS2/cws.SearchCWS?DATE_NULL_Integrated_After=&DATE_NULL_DueDateBefore=&INT_NULL_Priority=&Name=" + cws + "&SRC_Step=Search&INT_NULL_IsHelpRelevant=&RSV_NoWait=true&DATE_NULL_DueDateAfter=&TaskId=&DATE_NULL_Integrated_Before=&INT_NULL_IsUIRelevant=")
- for line in milestoneresults.replace("\r", "").split("\n"):
- # cws.ShowCWS?Path=SRC680%2Fm54%2Fdba15&Id=1431
- startmark, endmark = "'cws.ShowCWS?", "'"
- if startmark in line:
- cwsargs = line[line.find(startmark) + len(startmark):]
- cwsargs = cwsargs[:cwsargs.find(endmark)]
- cwsargs = cgi.parse_qs(cwsargs)
- thiscwsid = int(cwsargs["Id"][0])
-
- return thiscwsid
-
-
- def getCWSs(self, query):
- status = -1
- if query == "new":
- status = 1
- elif query == "nominated":
- status = 2
- elif query == "integrated":
- status = 3
- elif query == "cancelled":
- status = 4
- elif query == "deleted":
- status = 5
- elif query == "ready":
- status = 6
- elif query == "planned":
- status = 7
- elif query == "approved":
- status = 8
- elif query == "pre-nominated":
- status = 9
- elif query == "fixed":
- status = 10
- elif query == "finished":
- status = 11
- elif query == "cloned":
- status = 12
-
- cwsresults = self.cacheurl("http://eis.services.openoffice.org/EIS2/cws.SearchCWS?Status=" + `status` +"&MWS=3&RSV_NoWait=true&SRC_Step=Search")
-
- foo = EISScraper()
- foo.feed(cwsresults)
- foo.cwss = foo.cwss[1:]
- foo.cwss.sort(lambda x, y: cmp(x.lower(), y.lower()))
- return cws(foo.cwss)
-
- def getcwsid(self, cwsname):
- somecwsid = self.findcws(cwsname)
- if somecwsid != None:
- return somecwsid
- raise ValueError("no id found for cws %s" % cwsname)
-
- def getcwsurl(self, cwsname):
- cwsid = self.getcwsid(cwsname)
- return self.cacheurl("http://eis.services.openoffice.org/EIS2/cws.ShowCWS?Id=%d" % cwsid)
-
-
-
-class GetCWS:
- def __init__(self, query):
- self.query = query
-
- def getCWSs(self):
- eis = EIS()
- info = eis.getCWSs(self.query)
- return info.cwss
-
-
diff --git a/buildbot/buildbot-source/buildbot/status/html.py b/buildbot/buildbot-source/buildbot/status/html.py
deleted file mode 100644
index efed7509e..000000000
--- a/buildbot/buildbot-source/buildbot/status/html.py
+++ /dev/null
@@ -1,2385 +0,0 @@
-# -*- test-case-name: buildbot.test.test_web -*-
-
-from __future__ import generators
-
-from twisted.python import log, components
-from twisted.python.util import sibpath
-import urllib, re
-
-from twisted.internet import defer, reactor
-from twisted.web.resource import Resource
-from twisted.web import static, html, server, distrib
-from twisted.web.error import NoResource
-from twisted.web.util import Redirect, DeferredResource
-from twisted.application import strports
-from twisted.spread import pb
-
-from buildbot.twcompat import implements, Interface
-
-import string, types, time, os.path
-
-from buildbot import interfaces, util
-from buildbot import version
-from buildbot.sourcestamp import SourceStamp
-from buildbot.status import builder, base, getcws
-from buildbot.changes import changes
-from buildbot.process.base import BuildRequest
-
-class ITopBox(Interface):
- """I represent a box in the top row of the waterfall display: the one
- which shows the status of the last build for each builder."""
- pass
-
-class ICurrentBox(Interface):
- """I represent the 'current activity' box, just above the builder name."""
- pass
-
-class IBox(Interface):
- """I represent a box in the waterfall display."""
- pass
-
-class IHTMLLog(Interface):
- pass
-
-ROW_TEMPLATE = '''
-<div class="row">
- <span class="label">%(label)s</span>
- <span class="field">%(field)s</span>
-</div>'''
-
-def make_row(label, field):
- """Create a name/value row for the HTML.
-
- `label` is plain text; it will be HTML-encoded.
-
- `field` is a bit of HTML structure; it will not be encoded in
- any way.
- """
- label = html.escape(label)
- return ROW_TEMPLATE % {"label": label, "field": field}
-
-colormap = {
- 'green': '#72ff75',
- }
-def td(text="", parms={}, **props):
- data = ""
- data += " "
- #if not props.has_key("border"):
- # props["border"] = 1
- props.update(parms)
- if props.has_key("bgcolor"):
- props["bgcolor"] = colormap.get(props["bgcolor"], props["bgcolor"])
- comment = props.get("comment", None)
- if comment:
- data += "<!-- %s -->" % comment
- data += "<td"
- class_ = props.get('class_', None)
- if class_:
- props["class"] = class_
- for prop in ("align", "bgcolor", "colspan", "rowspan", "border",
- "valign", "halign", "class"):
- p = props.get(prop, None)
- if p != None:
- data += " %s=\"%s\"" % (prop, p)
- data += ">"
- if not text:
- text = "&nbsp;"
- if type(text) == types.ListType:
- data += string.join(text, "<br />")
- else:
- data += text
- data += "</td>\n"
- return data
-
-def build_get_class(b):
- """
- Return the class to use for a finished build or buildstep,
- based on the result.
- """
- # FIXME: this getResults duplicity might need to be fixed
- result = b.getResults()
- #print "THOMAS: result for b %r: %r" % (b, result)
- if isinstance(b, builder.BuildStatus):
- result = b.getResults()
- elif isinstance(b, builder.BuildStepStatus):
- result = b.getResults()[0]
- # after forcing a build, b.getResults() returns ((None, []), []), ugh
- if isinstance(result, tuple):
- result = result[0]
- else:
- raise TypeError, "%r is not a BuildStatus or BuildStepStatus" % b
-
- if result == None:
- # FIXME: this happens when a buildstep is running ?
- return "running"
- return builder.Results[result]
-
-class Box:
- # a Box wraps an Event. The Box has HTML <td> parameters that Events
- # lack, and it has a base URL to which each File's name is relative.
- # Events don't know about HTML.
- spacer = False
- def __init__(self, text=[], color=None, class_=None, urlbase=None,
- **parms):
- self.text = text
- self.color = color
- self.class_ = class_
- self.urlbase = urlbase
- self.show_idle = 0
- if parms.has_key('show_idle'):
- del parms['show_idle']
- self.show_idle = 1
-
- self.parms = parms
- # parms is a dict of HTML parameters for the <td> element that will
- # represent this Event in the waterfall display.
-
- def td(self, **props):
- props.update(self.parms)
- text = self.text
- if not text and self.show_idle:
- text = ["[idle]"]
- return td(text, props, bgcolor=self.color, class_=self.class_)
-
-
-class HtmlResource(Resource):
- css = None
- contentType = "text/html; charset=UTF-8"
- def render(self, request):
- data = self.content(request)
- request.setHeader("content-type", self.contentType)
- if request.method == "HEAD":
- request.setHeader("content-length", len(data))
- return ''
- return data
- title = "Dummy"
- def content(self, request):
- data = ('<!DOCTYPE html PUBLIC'
- ' "-//W3C//DTD XHTML 1.0 Transitional//EN"\n'
- '"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">\n'
- '<html'
- ' xmlns="http://www.w3.org/1999/xhtml"'
- ' lang="en"'
- ' xml:lang="en">\n')
- data += "<head>\n"
- data += " <title>" + self.title + "</title>\n"
- if self.css:
- # TODO: use some sort of relative link up to the root page, so
- # this css can be used from child pages too
- data += (' <link href="%s" rel="stylesheet" type="text/css"/>\n'
- % "buildbot.css")
- data += "</head>\n"
- data += '<body vlink="#800080">\n'
- data += self.body(request)
- data += "</body></html>\n"
- return data
- def body(self, request):
- return "Dummy\n"
-
-class StaticHTML(HtmlResource):
- def __init__(self, body, title):
- HtmlResource.__init__(self)
- self.bodyHTML = body
- self.title = title
- def body(self, request):
- return self.bodyHTML
-
-# $builder/builds/NN/stepname
-class StatusResourceBuildStep(HtmlResource):
- title = "Build Step"
-
- def __init__(self, status, step):
- HtmlResource.__init__(self)
- self.status = status
- self.step = step
-
- def body(self, request):
- s = self.step
- b = s.getBuild()
- data = "<h1>BuildStep %s:#%d:%s</h1>\n" % \
- (b.getBuilder().getName(), b.getNumber(), s.getName())
-
- if s.isFinished():
- data += ("<h2>Finished</h2>\n"
- "<p>%s</p>\n" % html.escape("%s" % s.getText()))
- else:
- data += ("<h2>Not Finished</h2>\n"
- "<p>ETA %s seconds</p>\n" % s.getETA())
-
- exp = s.getExpectations()
- if exp:
- data += ("<h2>Expectations</h2>\n"
- "<ul>\n")
- for e in exp:
- data += "<li>%s: current=%s, target=%s</li>\n" % \
- (html.escape(e[0]), e[1], e[2])
- data += "</ul>\n"
- logs = s.getLogs()
- if logs:
- data += ("<h2>Logs</h2>\n"
- "<ul>\n")
- for num in range(len(logs)):
- if logs[num].hasContents():
- # FIXME: If the step name has a / in it, this is broken
- # either way. If we quote it but say '/'s are safe,
- # it chops up the step name. If we quote it and '/'s
- # are not safe, it escapes the / that separates the
- # step name from the log number.
- data += '<li><a href="%s">%s</a></li>\n' % \
- (urllib.quote(request.childLink("%d" % num)),
- html.escape(logs[num].getName()))
- else:
- data += ('<li>%s</li>\n' %
- html.escape(logs[num].getName()))
- data += "</ul>\n"
-
- return data
-
- def getChild(self, path, request):
- logname = path
- if path.endswith("installset.tar.gz"):
- filename = "installsets/" + path
- return static.File(filename)
- try:
- log = self.step.getLogs()[int(logname)]
- if log.hasContents():
- return IHTMLLog(interfaces.IStatusLog(log))
- return NoResource("Empty Log '%s'" % logname)
- except (IndexError, ValueError):
- return NoResource("No such Log '%s'" % logname)
-
-# $builder/builds/NN/tests/TESTNAME
-class StatusResourceTestResult(HtmlResource):
- title = "Test Logs"
-
- def __init__(self, status, name, result):
- HtmlResource.__init__(self)
- self.status = status
- self.name = name
- self.result = result
-
- def body(self, request):
- dotname = ".".join(self.name)
- logs = self.result.getLogs()
- lognames = logs.keys()
- lognames.sort()
- data = "<h1>%s</h1>\n" % html.escape(dotname)
- for name in lognames:
- data += "<h2>%s</h2>\n" % html.escape(name)
- data += "<pre>" + logs[name] + "</pre>\n\n"
-
- return data
-
-
-# $builder/builds/NN/tests
-class StatusResourceTestResults(HtmlResource):
- title = "Test Results"
-
- def __init__(self, status, results):
- HtmlResource.__init__(self)
- self.status = status
- self.results = results
-
- def body(self, request):
- r = self.results
- data = "<h1>Test Results</h1>\n"
- data += "<ul>\n"
- testnames = r.keys()
- testnames.sort()
- for name in testnames:
- res = r[name]
- dotname = ".".join(name)
- data += " <li>%s: " % dotname
- # TODO: this could break on weird test names. At the moment,
- # test names only come from Trial tests, where the name
- # components must be legal python names, but that won't always
- # be a restriction.
- url = request.childLink(dotname)
- data += "<a href=\"%s\">%s</a>" % (url, " ".join(res.getText()))
- data += "</li>\n"
- data += "</ul>\n"
- return data
-
- def getChild(self, path, request):
- try:
- name = tuple(path.split("."))
- result = self.results[name]
- return StatusResourceTestResult(self.status, name, result)
- except KeyError:
- return NoResource("No such test name '%s'" % path)
-
-
-# $builder/builds/NN
-class StatusResourceBuild(HtmlResource):
- title = "Build"
-
- def __init__(self, status, build, builderControl, buildControl):
- HtmlResource.__init__(self)
- self.status = status
- self.build = build
- self.builderControl = builderControl
- self.control = buildControl
-
- def body(self, request):
- b = self.build
- buildbotURL = self.status.getBuildbotURL()
- projectName = self.status.getProjectName()
- data = '<div class="title"><a href="%s">%s</a></div>\n'%(buildbotURL,
- projectName)
- # the color in the following line gives python-mode trouble
- data += ("<h1>Build <a href=\"%s\">%s</a>:#%d</h1>\n"
- "<h2>Reason:</h2>\n%s\n"
- % (self.status.getURLForThing(b.getBuilder()),
- b.getBuilder().getName(), b.getNumber(),
- html.escape(b.getReason())))
-
- branch, revision, patch = b.getSourceStamp()
- data += "<h2>SourceStamp:</h2>\n"
- data += " <ul>\n"
- if branch:
- data += " <li>Branch: %s</li>\n" % html.escape(branch)
- if revision:
- data += " <li>Revision: %s</li>\n" % html.escape(str(revision))
- if patch:
- data += " <li>Patch: YES</li>\n" # TODO: provide link to .diff
- if b.getChanges():
- data += " <li>Changes: see below</li>\n"
- if (branch is None and revision is None and patch is None
- and not b.getChanges()):
- data += " <li>build of most recent revision</li>\n"
- data += " </ul>\n"
- if b.isFinished():
- data += "<h4>Buildslave: %s</h4>\n" % html.escape(b.getSlavename())
- data += "<h2>Results:</h2>\n"
- data += " ".join(b.getText()) + "\n"
- if b.getTestResults():
- url = request.childLink("tests")
- data += "<h3><a href=\"%s\">test results</a></h3>\n" % url
- else:
- data += "<h2>Build In Progress</h2>"
- if self.control is not None:
- stopURL = urllib.quote(request.childLink("stop"))
- data += """
- <form action="%s" class='command stopbuild'>
- <p>To stop this build, fill out the following fields and
- push the 'Stop' button</p>\n""" % stopURL
- data += make_row("Your name:",
- "<input type='text' name='username' />")
- data += make_row("Reason for stopping build:",
- "<input type='text' name='comments' />")
- data += """<input type="submit" value="Stop Builder" />
- </form>
- """
-
- if b.isFinished() and self.builderControl is not None:
- data += "<h3>Resubmit Build:</h3>\n"
- # can we rebuild it exactly?
- exactly = (revision is not None) or b.getChanges()
- if exactly:
- data += ("<p>This tree was built from a specific set of \n"
- "source files, and can be rebuilt exactly</p>\n")
- else:
- data += ("<p>This tree was built from the most recent "
- "revision")
- if branch:
- data += " (along some branch)"
- data += (" and thus it might not be possible to rebuild it \n"
- "exactly. Any changes that have been committed \n"
- "after this build was started <b>will</b> be \n"
- "included in a rebuild.</p>\n")
- rebuildURL = urllib.quote(request.childLink("rebuild"))
- data += ('<form action="%s" class="command rebuild">\n'
- % rebuildURL)
- data += make_row("Your name:",
- "<input type='text' name='username' />")
- data += make_row("Reason for re-running build:",
- "<input type='text' name='comments' />")
- data += '<input type="submit" value="Rebuild" />\n'
-
- data += "<h2>Steps and Logfiles:</h2>\n"
- if b.getLogs():
- data += "<ol>\n"
- for s in b.getSteps():
- data += (" <li><a href=\"%s\">%s</a> [%s]\n"
- % (self.status.getURLForThing(s), s.getName(),
- " ".join(s.getText())))
- if s.getLogs():
- data += " <ol>\n"
- for logfile in s.getLogs():
- data += (" <li><a href=\"%s\">%s</a></li>\n" %
- (self.status.getURLForThing(logfile),
- logfile.getName()))
- data += " </ol>\n"
- data += " </li>\n"
- data += "</ol>\n"
-
- data += ("<h2>Blamelist:</h2>\n"
- " <ol>\n")
- for who in b.getResponsibleUsers():
- data += " <li>%s</li>\n" % html.escape(who)
- data += (" </ol>\n"
- "<h2>All Changes</h2>\n")
- changes = b.getChanges()
- if changes:
- data += "<ol>\n"
- for c in changes:
- data += "<li>" + c.asHTML() + "</li>\n"
- data += "</ol>\n"
- #data += html.PRE(b.changesText()) # TODO
- return data
-
- def stop(self, request):
- log.msg("web stopBuild of build %s:%s" % \
- (self.build.getBuilder().getName(),
- self.build.getNumber()))
- name = request.args.get("username", ["<unknown>"])[0]
- comments = request.args.get("comments", ["<no reason specified>"])[0]
- reason = ("The web-page 'stop build' button was pressed by "
- "'%s': %s\n" % (name, comments))
- self.control.stopBuild(reason)
- # we're at http://localhost:8080/svn-hello/builds/5/stop?[args] and
- # we want to go to: http://localhost:8080/svn-hello/builds/5 or
- # http://localhost:8080/
- #
- #return Redirect("../%d" % self.build.getNumber())
- r = Redirect("../../..")
- d = defer.Deferred()
- reactor.callLater(1, d.callback, r)
- return DeferredResource(d)
-
- def rebuild(self, request):
- log.msg("web rebuild of build %s:%s" % \
- (self.build.getBuilder().getName(),
- self.build.getNumber()))
- name = request.args.get("username", ["<unknown>"])[0]
- comments = request.args.get("comments", ["<no reason specified>"])[0]
- reason = ("The web-page 'rebuild' button was pressed by "
- "'%s': %s\n" % (name, comments))
- if not self.builderControl or not self.build.isFinished():
- log.msg("could not rebuild: bc=%s, isFinished=%s"
- % (self.builderControl, self.build.isFinished()))
- # TODO: indicate an error
- else:
- self.builderControl.resubmitBuild(self.build, reason)
- # we're at http://localhost:8080/svn-hello/builds/5/rebuild?[args] and
- # we want to go to the top, at http://localhost:8080/
- r = Redirect("../../..")
- d = defer.Deferred()
- reactor.callLater(1, d.callback, r)
- return DeferredResource(d)
-
- def getChild(self, path, request):
- if path == "tests":
- return StatusResourceTestResults(self.status,
- self.build.getTestResults())
- if path == "stop":
- return self.stop(request)
- if path == "rebuild":
- return self.rebuild(request)
- if path.startswith("step-"):
- stepname = path[len("step-"):]
- steps = self.build.getSteps()
- for s in steps:
- if s.getName() == stepname:
- return StatusResourceBuildStep(self.status, s)
- return NoResource("No such BuildStep '%s'" % stepname)
- return NoResource("No such resource '%s'" % path)
-
-# $builder
-class StatusResourceBuilder(HtmlResource):
-
- def __init__(self, status, builder, control):
- HtmlResource.__init__(self)
- self.status = status
- self.title = builder.getName() + " Builder"
- self.builder = builder
- self.control = control
-
- def body(self, request):
- b = self.builder
- slaves = b.getSlaves()
- connected_slaves = [s for s in slaves if s.isConnected()]
-
- buildbotURL = self.status.getBuildbotURL()
- projectName = self.status.getProjectName()
- data = "<a href=\"%s\">%s</a>\n" % (buildbotURL, projectName)
- data += make_row("Builder:", html.escape(b.getName()))
- b1 = b.getBuild(-1)
- if b1 is not None:
- data += make_row("Current/last build:", str(b1.getNumber()))
- data += "\n<br />BUILDSLAVES<br />\n"
- data += "<ol>\n"
- for slave in slaves:
- data += "<li><b>%s</b>: " % html.escape(slave.getName())
- if slave.isConnected():
- data += "CONNECTED\n"
- if slave.getAdmin():
- data += make_row("Admin:", html.escape(slave.getAdmin()))
- if slave.getHost():
- data += "<span class='label'>Host info:</span>\n"
- data += html.PRE(slave.getHost())
- else:
- data += ("NOT CONNECTED\n")
- data += "</li>\n"
- data += "</ol>\n"
-
- if self.control is not None and connected_slaves:
- forceURL = urllib.quote(request.childLink("force"))
- data += (
- """
- <form action='%(forceURL)s' class='command forcebuild'>
- <p>To force a build, fill out the following fields and
- push the 'Force Build' button</p>
- <table border='0'>
- <tr>
- <td>
- Your name:
- </td>
- <td>
- <input type='text' name='username' />@openoffice.org (for email notification about build status)
- </td>
- </tr>
- <tr>
- <td>
- Reason for build:
- </td>
- <td>
- <input type='text' name='comments' />
- </td>
- </tr>
- <tr>
- <td>
- CWS to build:
- </td>
- <td>
- <input type='text' name='branch' />(e.g. configdbbe, kaib01, ww8perf02)
- </td>
- </tr>
- <tr>
- <td>
- Config Switches:
- </td>
- <td>
- <input type='text' size='50' name='config' />(if your CWS requires extra config switches)
- </td>
- </tr>
- <tr>
- <td>
- Make Install-Set:
- </td>
- <td>
- <input type='checkbox' name='installsetcheck' />(If you want to download install-sets)
- </td>
- </tr>
- <tr>
- <td colspan='2'>
- <input type='submit' value='Force Build' />
- </td>
- </tr>
- </table>
- </form>
- """) % {"forceURL": forceURL}
- elif self.control is not None:
- data += """
- <p>All buildslaves appear to be offline, so it's not possible
- to force this build to execute at this time.</p>
- """
-
- if self.control is not None:
- pingURL = urllib.quote(request.childLink("ping"))
- data += """
- <form action="%s" class='command pingbuilder'>
- <p>To ping the buildslave(s), push the 'Ping' button</p>
-
- <input type="submit" value="Ping Builder" />
- </form>
- """ % pingURL
-
- return data
-
- def force(self, request):
- name = request.args.get("username", ["<unknown>"])[0]
- reason = request.args.get("comments", ["<no reason specified>"])[0]
- branch = request.args.get("branch", [""])[0]
- revision = request.args.get("revision", [""])[0]
- config = request.args.get("config", [""])[0]
- installsetcheck = request.args.get("installsetcheck", [""])[0]
-
- r = "The web-page 'force build' button was pressed by '%s': %s\n" \
- % (name, reason)
- log.msg("web forcebuild of builder '%s', branch='%s', revision='%s', config='%s', installsetcheck='%s' "
- % (self.builder.name, branch, revision,config, installsetcheck))
-
- if not self.control:
- # TODO: tell the web user that their request was denied
- log.msg("but builder control is disabled")
- return Redirect("..")
-
- # keep weird stuff out of the branch and revision strings. TODO:
- # centralize this somewhere.
- if not re.match(r'^[\w\.\-\/]*$', branch):
- log.msg("bad branch '%s'" % branch)
- return Redirect("..")
- if not re.match(r'^[\w\.\-\/]*$', revision):
- log.msg("bad revision '%s'" % revision)
- return Redirect("..")
- if name == "":
- name = None
- if branch == "":
- branch = None
- if revision == "":
- revision = None
- if config == "":
- config = None
- if installsetcheck == "":
- installsetcheck = None
-
- # TODO: if we can authenticate that a particular User pushed the
- # button, use their name instead of None, so they'll be informed of
- # the results.
- s = SourceStamp(branch=branch, revision=revision)
-
- req = BuildRequest(r, s, self.builder.getName(), name, config, installsetcheck)
- try:
- self.control.requestBuildSoon(req)
- except interfaces.NoSlaveError:
- # TODO: tell the web user that their request could not be
- # honored
- pass
- return Redirect("..")
-
- def ping(self, request):
- log.msg("web ping of builder '%s'" % self.builder.name)
- self.control.ping() # TODO: there ought to be an ISlaveControl
- return Redirect("..")
-
- def getChild(self, path, request):
- if path == "force":
- return self.force(request)
- if path == "ping":
- return self.ping(request)
- if not path in ("events", "builds"):
- return NoResource("Bad URL '%s'" % path)
- num = request.postpath.pop(0)
- request.prepath.append(num)
- num = int(num)
- if path == "events":
- # TODO: is this dead code? .statusbag doesn't exist,right?
- log.msg("getChild['path']: %s" % request.uri)
- return NoResource("events are unavailable until code gets fixed")
- filename = request.postpath.pop(0)
- request.prepath.append(filename)
- e = self.builder.statusbag.getEventNumbered(num)
- if not e:
- return NoResource("No such event '%d'" % num)
- file = e.files.get(filename, None)
- if file == None:
- return NoResource("No such file '%s'" % filename)
- if type(file) == type(""):
- if file[:6] in ("<HTML>", "<html>"):
- return static.Data(file, "text/html")
- return static.Data(file, "text/plain")
- return file
- if path == "builds":
- build = self.builder.getBuild(num)
- if build:
- control = None
- if self.control:
- control = self.control.getBuild(num)
- return StatusResourceBuild(self.status, build,
- self.control, control)
- else:
- return NoResource("No such build '%d'" % num)
- return NoResource("really weird URL %s" % path)
-
-# $changes/NN
-class StatusResourceChanges(HtmlResource):
- def __init__(self, status, changemaster):
- HtmlResource.__init__(self)
- self.status = status
- self.changemaster = changemaster
- def body(self, request):
- data = ""
- data += "Change sources:\n"
- sources = list(self.changemaster)
- if sources:
- data += "<ol>\n"
- for s in sources:
- data += "<li>%s</li>\n" % s.describe()
- data += "</ol>\n"
- else:
- data += "none (push only)\n"
- return data
- def getChild(self, path, request):
- num = int(path)
- c = self.changemaster.getChangeNumbered(num)
- if not c:
- return NoResource("No change number '%d'" % num)
- return StaticHTML(c.asHTML(), "Change #%d" % num)
-
-textlog_stylesheet = """
-<style type="text/css">
- div.data {
- font-family: "Courier New", courier, monotype;
- }
- span.stdout {
- font-family: "Courier New", courier, monotype;
- }
- span.stderr {
- font-family: "Courier New", courier, monotype;
- color: red;
- }
- span.header {
- font-family: "Courier New", courier, monotype;
- color: blue;
- }
-</style>
-"""
-
-class ChunkConsumer:
- if implements:
- implements(interfaces.IStatusLogConsumer)
- else:
- __implements__ = interfaces.IStatusLogConsumer,
-
- def __init__(self, original, textlog):
- self.original = original
- self.textlog = textlog
- def registerProducer(self, producer, streaming):
- self.producer = producer
- self.original.registerProducer(producer, streaming)
- def unregisterProducer(self):
- self.original.unregisterProducer()
- def writeChunk(self, chunk):
- formatted = self.textlog.content([chunk])
- try:
- self.original.write(formatted)
- except pb.DeadReferenceError:
- self.producing.stopProducing()
- def finish(self):
- self.textlog.finished()
-
-class TextLog(Resource):
- # a new instance of this Resource is created for each client who views
- # it, so we can afford to track the request in the Resource.
- if implements:
- implements(IHTMLLog)
- else:
- __implements__ = IHTMLLog,
-
- asText = False
- subscribed = False
-
- def __init__(self, original):
- Resource.__init__(self)
- self.original = original
-
- def getChild(self, path, request):
- if path == "text":
- self.asText = True
- return self
- return NoResource("bad pathname")
-
- def htmlHeader(self, request):
- title = "Log File contents"
- data = "<html>\n<head><title>" + title + "</title>\n"
- data += textlog_stylesheet
- data += "</head>\n"
- data += "<body vlink=\"#800080\">\n"
- texturl = request.childLink("text")
- data += '<a href="%s">(view as text)</a><br />\n' % texturl
- data += "<pre>\n"
- return data
-
- def content(self, entries):
- spanfmt = '<span class="%s">%s</span>'
- data = ""
- for type, entry in entries:
- if self.asText:
- if type != builder.HEADER:
- data += entry
- else:
- data += spanfmt % (builder.ChunkTypes[type],
- html.escape(entry))
- return data
-
- def htmlFooter(self):
- data = "</pre>\n"
- data += "</body></html>\n"
- return data
-
- def render_HEAD(self, request):
- if self.asText:
- request.setHeader("content-type", "text/plain")
- else:
- request.setHeader("content-type", "text/html")
-
- # vague approximation, ignores markup
- request.setHeader("content-length", self.original.length)
- return ''
-
- def render_GET(self, req):
- self.req = req
-
- if self.asText:
- req.setHeader("content-type", "text/plain")
- else:
- req.setHeader("content-type", "text/html")
-
- if not self.asText:
- req.write(self.htmlHeader(req))
-
- self.original.subscribeConsumer(ChunkConsumer(req, self))
- return server.NOT_DONE_YET
-
- def finished(self):
- if not self.req:
- return
- try:
- if not self.asText:
- self.req.write(self.htmlFooter())
- self.req.finish()
- except pb.DeadReferenceError:
- pass
- # break the cycle, the Request's .notifications list includes the
- # Deferred (from req.notifyFinish) that's pointing at us.
- self.req = None
-
-components.registerAdapter(TextLog, interfaces.IStatusLog, IHTMLLog)
-
-
-class HTMLLog(Resource):
- if implements:
- implements(IHTMLLog)
- else:
- __implements__ = IHTMLLog,
-
-
- def __init__(self, original):
- Resource.__init__(self)
- self.original = original
-
- def render(self, request):
- request.setHeader("content-type", "text/html")
- return self.original.html
-
-components.registerAdapter(HTMLLog, builder.HTMLLogFile, IHTMLLog)
-
-
-class CurrentBox(components.Adapter):
- # this provides the "current activity" box, just above the builder name
- if implements:
- implements(ICurrentBox)
- else:
- __implements__ = ICurrentBox,
-
- def formatETA(self, eta):
- if eta is None:
- return []
- if eta < 0:
- return ["Soon"]
- abstime = time.strftime("%H:%M:%S", time.localtime(util.now()+eta))
- return ["ETA in", "%d secs" % eta, "at %s" % abstime]
-
- def getBox(self, status):
- # getState() returns offline, idle, or building
- state, builds = self.original.getState()
-
- # look for upcoming builds. We say the state is "waiting" if the
- # builder is otherwise idle and there is a scheduler which tells us a
- # build will be performed some time in the near future. TODO: this
- # functionality used to be in BuilderStatus.. maybe this code should
- # be merged back into it.
- upcoming = []
- builderName = self.original.getName()
- for s in status.getSchedulers():
- if builderName in s.listBuilderNames():
- upcoming.extend(s.getPendingBuildTimes())
- if state == "idle" and upcoming:
- state = "waiting"
-
- if state == "building":
- color = "yellow"
- text = ["building"]
- if builds:
- for b in builds:
- eta = b.getETA()
- if eta:
- text.extend(self.formatETA(eta))
- elif state == "offline":
- color = "red"
- text = ["offline"]
- elif state == "idle":
- color = "white"
- text = ["idle"]
- elif state == "waiting":
- color = "yellow"
- text = ["waiting"]
- else:
- # just in case I add a state and forget to update this
- color = "white"
- text = [state]
-
- # TODO: for now, this pending/upcoming stuff is in the "current
- # activity" box, but really it should go into a "next activity" row
- # instead. The only times it should show up in "current activity" is
- # when the builder is otherwise idle.
-
- # are any builds pending? (waiting for a slave to be free)
- pbs = self.original.getPendingBuilds()
- if pbs:
- text.append("%d pending" % len(pbs))
- for t in upcoming:
- text.extend(["next at",
- time.strftime("%H:%M:%S", time.localtime(t)),
- "[%d secs]" % (t - util.now()),
- ])
- # TODO: the upcoming-builds box looks like:
- # ['waiting', 'next at', '22:14:15', '[86 secs]']
- # while the currently-building box is reversed:
- # ['building', 'ETA in', '2 secs', 'at 22:12:50']
- # consider swapping one of these to make them look the same. also
- # consider leaving them reversed to make them look different.
- return Box(text, color=color, class_="Activity " + state)
-
-components.registerAdapter(CurrentBox, builder.BuilderStatus, ICurrentBox)
-
-class ChangeBox(components.Adapter):
- if implements:
- implements(IBox)
- else:
- __implements__ = IBox,
-
- def getBox(self):
- url = "changes/%d" % self.original.number
- text = '<a href="%s">%s</a>' % (url, html.escape(self.original.who))
- return Box([text], color="white", class_="Change")
-components.registerAdapter(ChangeBox, changes.Change, IBox)
-
-class BuildBox(components.Adapter):
- # this provides the yellow "starting line" box for each build
- if implements:
- implements(IBox)
- else:
- __implements__ = IBox,
-
- def getBox(self):
- b = self.original
- name = b.getBuilder().getName()
- number = b.getNumber()
- url = "%s/builds/%d" % (urllib.quote(name, safe=''), number)
- text = '<a href="%s">Build %d</a>' % (url, number)
- color = "yellow"
- class_ = "start"
- if b.isFinished() and not b.getSteps():
- # the steps have been pruned, so there won't be any indication
- # of whether it succeeded or failed. Color the box red or green
- # to show its status
- color = b.getColor()
- class_ = build_get_class(b)
- return Box([text], color=color, class_="BuildStep " + class_)
-components.registerAdapter(BuildBox, builder.BuildStatus, IBox)
-
-class StepBox(components.Adapter):
- if implements:
- implements(IBox)
- else:
- __implements__ = IBox,
-
- def getBox(self):
- b = self.original.getBuild()
- urlbase = "%s/builds/%d/step-%s" % (
- urllib.quote(b.getBuilder().getName(), safe=''),
- b.getNumber(),
- urllib.quote(self.original.getName(), safe=''))
- text = self.original.getText()
- if text is None:
- log.msg("getText() gave None", urlbase)
- text = []
- text = text[:]
- logs = self.original.getLogs()
- for num in range(len(logs)):
- name = logs[num].getName()
- if logs[num].hasContents():
- url = "%s/%d" % (urlbase, num)
- text.append("<a href=\"%s\">%s</a>" % (url, html.escape(name)))
- else:
- text.append(html.escape(name))
- color = self.original.getColor()
- class_ = "BuildStep " + build_get_class(self.original)
- return Box(text, color, class_=class_)
-components.registerAdapter(StepBox, builder.BuildStepStatus, IBox)
-
-class EventBox(components.Adapter):
- if implements:
- implements(IBox)
- else:
- __implements__ = IBox,
-
- def getBox(self):
- text = self.original.getText()
- color = self.original.getColor()
- class_ = "Event"
- if color:
- class_ += " " + color
- return Box(text, color, class_=class_)
-components.registerAdapter(EventBox, builder.Event, IBox)
-
-
-class BuildTopBox(components.Adapter):
- # this provides a per-builder box at the very top of the display,
- # showing the results of the most recent build
- if implements:
- implements(IBox)
- else:
- __implements__ = IBox,
-
- def getBox(self):
- assert interfaces.IBuilderStatus(self.original)
- b = self.original.getLastFinishedBuild()
- if not b:
- return Box(["none"], "white", class_="LastBuild")
- name = b.getBuilder().getName()
- number = b.getNumber()
- url = "%s/builds/%d" % (name, number)
- text = b.getText()
- # TODO: add logs?
- # TODO: add link to the per-build page at 'url'
- c = b.getColor()
- class_ = build_get_class(b)
- return Box(text, c, class_="LastBuild %s" % class_)
-components.registerAdapter(BuildTopBox, builder.BuilderStatus, ITopBox)
-
-class Spacer(builder.Event):
- def __init__(self, start, finish):
- self.started = start
- self.finished = finish
-
-class SpacerBox(components.Adapter):
- if implements:
- implements(IBox)
- else:
- __implements__ = IBox,
-
- def getBox(self):
- #b = Box(["spacer"], "white")
- b = Box([])
- b.spacer = True
- return b
-components.registerAdapter(SpacerBox, Spacer, IBox)
-
-def insertGaps(g, lastEventTime, idleGap=2):
- debug = False
-
- e = g.next()
- starts, finishes = e.getTimes()
- if debug: log.msg("E0", starts, finishes)
- if finishes == 0:
- finishes = starts
- if debug: log.msg("E1 finishes=%s, gap=%s, lET=%s" % \
- (finishes, idleGap, lastEventTime))
- if finishes is not None and finishes + idleGap < lastEventTime:
- if debug: log.msg(" spacer0")
- yield Spacer(finishes, lastEventTime)
-
- followingEventStarts = starts
- if debug: log.msg(" fES0", starts)
- yield e
-
- while 1:
- e = g.next()
- starts, finishes = e.getTimes()
- if debug: log.msg("E2", starts, finishes)
- if finishes == 0:
- finishes = starts
- if finishes is not None and finishes + idleGap < followingEventStarts:
- # there is a gap between the end of this event and the beginning
- # of the next one. Insert an idle event so the waterfall display
- # shows a gap here.
- if debug:
- log.msg(" finishes=%s, gap=%s, fES=%s" % \
- (finishes, idleGap, followingEventStarts))
- yield Spacer(finishes, followingEventStarts)
- yield e
- followingEventStarts = starts
- if debug: log.msg(" fES1", starts)
-
-
-class WaterfallStatusResource(HtmlResource):
- """This builds the main status page, with the waterfall display, and
- all child pages."""
- title = "BuildBot"
- def __init__(self, status, changemaster, categories, css=None):
- HtmlResource.__init__(self)
- self.status = status
- self.changemaster = changemaster
- self.categories = categories
- p = self.status.getProjectName()
- if p:
- self.title = "BuildBot: %s" % p
- self.css = css
-
- def body(self, request):
- "This method builds the main waterfall display."
-
- data = ''
-
- projectName = self.status.getProjectName()
- projectURL = self.status.getProjectURL()
-
- phase = request.args.get("phase",["2"])
- phase = int(phase[0])
-
- showBuilders = request.args.get("show", None)
- allBuilders = self.status.getBuilderNames(categories=self.categories)
- if showBuilders:
- builderNames = []
- for b in showBuilders:
- if b not in allBuilders:
- continue
- if b in builderNames:
- continue
- builderNames.append(b)
- else:
- builderNames = allBuilders
- builders = map(lambda name: self.status.getBuilder(name),
- builderNames)
-
- if phase == -1:
- return self.body0(request, builders)
- (changeNames, builderNames, timestamps, eventGrid, sourceEvents) = \
- self.buildGrid(request, builders)
- if phase == 0:
- return self.phase0(request, (changeNames + builderNames),
- timestamps, eventGrid)
- # start the table: top-header material
- data += '<table border="0" cellspacing="0">\n'
-
- if projectName and projectURL:
- # TODO: this is going to look really ugly
- #topleft = "<a href=\"%s\">%s</a><br />last build" % \
- # (projectURL, projectName)
- topleft = "<a href=\"%s\">%s</a><br /><a href=\"cws_view_ready\">Ready For QA</a><br /><a href=\"cws_view_new\">New</a>" % \
- (projectURL, projectName)
- #else:
- topright = "last build"
- data += ' <tr class="LastBuild">\n'
- data += td(topleft, align="right", class_="Project")
- data += td(topright, align="right", class_="Project")
- for b in builders:
- box = ITopBox(b).getBox()
- data += box.td(align="center")
- data += " </tr>\n"
-
- data += ' <tr class="Activity">\n'
- data += td('current activity', align='right', colspan=2)
- for b in builders:
- box = ICurrentBox(b).getBox(self.status)
- data += box.td(align="center")
- data += " </tr>\n"
-
- data += " <tr>\n"
- TZ = time.tzname[time.daylight]
- data += td("time (%s)" % TZ, align="center", class_="Time")
- name = changeNames[0]
- data += td(
- "<a href=\"%s\">%s</a>" % (urllib.quote(name, safe=''), name),
- align="center", class_="Change")
- for name in builderNames:
- data += td(
- #"<a href=\"%s\">%s</a>" % (request.childLink(name), name),
- "<a href=\"%s\">%s</a>" % (urllib.quote(name, safe=''), name),
- align="center", class_="Builder")
- data += " </tr>\n"
-
- if phase == 1:
- f = self.phase1
- else:
- f = self.phase2
- data += f(request, changeNames + builderNames, timestamps, eventGrid,
- sourceEvents)
-
- data += "</table>\n"
-
- data += "<hr />\n"
-
- data += "<a href=\"http://buildbot.sourceforge.net/\">Buildbot</a>"
- data += "-%s " % version
- if projectName:
- data += "working for the "
- if projectURL:
- data += "<a href=\"%s\">%s</a> project." % (projectURL,
- projectName)
- else:
- data += "%s project." % projectName
- data += "<br />\n"
- # TODO: push this to the right edge, if possible
- data += ("Page built: " +
- time.strftime("%a %d %b %Y %H:%M:%S",
- time.localtime(util.now()))
- + "\n")
- return data
-
- def body0(self, request, builders):
- # build the waterfall display
- data = ""
- data += "<h2>Basic display</h2>\n"
- data += "<p>See <a href=\"%s\">here</a>" % \
- urllib.quote(request.childLink("waterfall"))
- data += " for the waterfall display</p>\n"
-
- data += '<table border="0" cellspacing="0">\n'
- names = map(lambda builder: builder.name, builders)
-
- # the top row is two blank spaces, then the top-level status boxes
- data += " <tr>\n"
- data += td("", colspan=2)
- for b in builders:
- text = ""
- color = "#ca88f7"
- state, builds = b.getState()
- if state != "offline":
- text += "%s<br />\n" % state #b.getCurrentBig().text[0]
- else:
- text += "OFFLINE<br />\n"
- color = "#ffe0e0"
- data += td(text, align="center", bgcolor=color)
-
- # the next row has the column headers: time, changes, builder names
- data += " <tr>\n"
- data += td("Time", align="center")
- data += td("Changes", align="center")
- for name in names:
- data += td(
- "<a href=\"%s\">%s</a>" % (urllib.quote(request.childLink(name)), name),
- align="center")
- data += " </tr>\n"
-
- # all further rows involve timestamps, commit events, and build events
- data += " <tr>\n"
- data += td("04:00", align="bottom")
- data += td("fred", align="center")
- for name in names:
- data += td("stuff", align="center", bgcolor="red")
- data += " </tr>\n"
-
- data += "</table>\n"
- return data
-
- def buildGrid(self, request, builders):
- debug = False
-
- # XXX: see if we can use a cached copy
-
- # first step is to walk backwards in time, asking each column
- # (commit, all builders) if they have any events there. Build up the
- # array of events, and stop when we have a reasonable number.
-
- commit_source = self.changemaster
-
- lastEventTime = util.now()
- sources = [commit_source] + builders
- changeNames = ["changes"]
- builderNames = map(lambda builder: builder.getName(), builders)
- sourceNames = changeNames + builderNames
- sourceEvents = []
- sourceGenerators = []
- for s in sources:
- gen = insertGaps(s.eventGenerator(), lastEventTime)
- sourceGenerators.append(gen)
- # get the first event
- try:
- e = gen.next()
- event = interfaces.IStatusEvent(e)
- if debug:
- log.msg("gen %s gave1 %s" % (gen, event.getText()))
- except StopIteration:
- event = None
- sourceEvents.append(event)
- eventGrid = []
- timestamps = []
- spanLength = 10 # ten-second chunks
- tooOld = util.now() - 12*60*60 # never show more than 12 hours
- maxPageLen = 400
-
- lastEventTime = 0
- for e in sourceEvents:
- if e and e.getTimes()[0] > lastEventTime:
- lastEventTime = e.getTimes()[0]
- if lastEventTime == 0:
- lastEventTime = util.now()
-
- spanStart = lastEventTime - spanLength
- debugGather = 0
-
- while 1:
- if debugGather: log.msg("checking (%s,]" % spanStart)
- # the tableau of potential events is in sourceEvents[]. The
- # window crawls backwards, and we examine one source at a time.
- # If the source's top-most event is in the window, is it pushed
- # onto the events[] array and the tableau is refilled. This
- # continues until the tableau event is not in the window (or is
- # missing).
-
- spanEvents = [] # for all sources, in this span. row of eventGrid
- firstTimestamp = None # timestamp of first event in the span
- lastTimestamp = None # last pre-span event, for next span
-
- for c in range(len(sourceGenerators)):
- events = [] # for this source, in this span. cell of eventGrid
- event = sourceEvents[c]
- while event and spanStart < event.getTimes()[0]:
- # to look at windows that don't end with the present,
- # condition the .append on event.time <= spanFinish
- if not IBox(event, None):
- log.msg("BAD EVENT", event, event.getText())
- assert 0
- if debug:
- log.msg("pushing", event.getText(), event)
- events.append(event)
- starts, finishes = event.getTimes()
- firstTimestamp = util.earlier(firstTimestamp, starts)
- try:
- event = sourceGenerators[c].next()
- #event = interfaces.IStatusEvent(event)
- if debug:
- log.msg("gen[%s] gave2 %s" % (sourceNames[c],
- event.getText()))
- except StopIteration:
- event = None
- if debug:
- log.msg("finished span")
-
- if event:
- # this is the last pre-span event for this source
- lastTimestamp = util.later(lastTimestamp,
- event.getTimes()[0])
- if debugGather:
- log.msg(" got %s from %s" % (events, sourceNames[c]))
- sourceEvents[c] = event # refill the tableau
- spanEvents.append(events)
-
- if firstTimestamp is not None:
- eventGrid.append(spanEvents)
- timestamps.append(firstTimestamp)
-
-
- if lastTimestamp:
- spanStart = lastTimestamp - spanLength
- else:
- # no more events
- break
- if lastTimestamp < tooOld:
- pass
- #break
- if len(timestamps) > maxPageLen:
- break
-
-
- # now loop
-
- # loop is finished. now we have eventGrid[] and timestamps[]
- if debugGather: log.msg("finished loop")
- assert(len(timestamps) == len(eventGrid))
- return (changeNames, builderNames, timestamps, eventGrid, sourceEvents)
-
- def phase0(self, request, sourceNames, timestamps, eventGrid):
- # phase0 rendering
- if not timestamps:
- return "no events"
- data = ""
- for r in range(0, len(timestamps)):
- data += "<p>\n"
- data += "[%s]<br />" % timestamps[r]
- row = eventGrid[r]
- assert(len(row) == len(sourceNames))
- for c in range(0, len(row)):
- if row[c]:
- data += "<b>%s</b><br />\n" % sourceNames[c]
- for e in row[c]:
- log.msg("Event", r, c, sourceNames[c], e.getText())
- lognames = [loog.getName() for loog in e.getLogs()]
- data += "%s: %s: %s %s<br />" % (e.getText(),
- e.getTimes()[0],
- e.getColor(),
- lognames)
- else:
- data += "<b>%s</b> [none]<br />\n" % sourceNames[c]
- return data
-
- def phase1(self, request, sourceNames, timestamps, eventGrid,
- sourceEvents):
- # phase1 rendering: table, but boxes do not overlap
- data = ""
- if not timestamps:
- return data
- lastDate = None
- for r in range(0, len(timestamps)):
- chunkstrip = eventGrid[r]
- # chunkstrip is a horizontal strip of event blocks. Each block
- # is a vertical list of events, all for the same source.
- assert(len(chunkstrip) == len(sourceNames))
- maxRows = reduce(lambda x,y: max(x,y),
- map(lambda x: len(x), chunkstrip))
- for i in range(maxRows):
- data += " <tr>\n";
- if i == 0:
- stuff = []
- # add the date at the beginning, and each time it changes
- today = time.strftime("<b>%d %b %Y</b>",
- time.localtime(timestamps[r]))
- todayday = time.strftime("<b>%a</b>",
- time.localtime(timestamps[r]))
- if today != lastDate:
- stuff.append(todayday)
- stuff.append(today)
- lastDate = today
- stuff.append(
- time.strftime("%H:%M:%S",
- time.localtime(timestamps[r])))
- data += td(stuff, valign="bottom", align="center",
- rowspan=maxRows, class_="Time")
- for c in range(0, len(chunkstrip)):
- block = chunkstrip[c]
- assert(block != None) # should be [] instead
- # bottom-justify
- offset = maxRows - len(block)
- if i < offset:
- data += td("")
- else:
- e = block[i-offset]
- box = IBox(e).getBox()
- box.parms["show_idle"] = 1
- data += box.td(valign="top", align="center")
- data += " </tr>\n"
-
- return data
-
- def phase2(self, request, sourceNames, timestamps, eventGrid,
- sourceEvents):
- data = ""
- if not timestamps:
- return data
- # first pass: figure out the height of the chunks, populate grid
- grid = []
- for i in range(1+len(sourceNames)):
- grid.append([])
- # grid is a list of columns, one for the timestamps, and one per
- # event source. Each column is exactly the same height. Each element
- # of the list is a single <td> box.
- lastDate = time.strftime("<b>%d %b %Y</b>",
- time.localtime(util.now()))
- for r in range(0, len(timestamps)):
- chunkstrip = eventGrid[r]
- # chunkstrip is a horizontal strip of event blocks. Each block
- # is a vertical list of events, all for the same source.
- assert(len(chunkstrip) == len(sourceNames))
- maxRows = reduce(lambda x,y: max(x,y),
- map(lambda x: len(x), chunkstrip))
- for i in range(maxRows):
- if i != maxRows-1:
- grid[0].append(None)
- else:
- # timestamp goes at the bottom of the chunk
- stuff = []
- # add the date at the beginning (if it is not the same as
- # today's date), and each time it changes
- todayday = time.strftime("<b>%a</b>",
- time.localtime(timestamps[r]))
- today = time.strftime("<b>%d %b %Y</b>",
- time.localtime(timestamps[r]))
- if today != lastDate:
- stuff.append(todayday)
- stuff.append(today)
- lastDate = today
- stuff.append(
- time.strftime("%H:%M:%S",
- time.localtime(timestamps[r])))
- grid[0].append(Box(text=stuff, class_="Time",
- valign="bottom", align="center"))
-
- # at this point the timestamp column has been populated with
- # maxRows boxes, most None but the last one has the time string
- for c in range(0, len(chunkstrip)):
- block = chunkstrip[c]
- assert(block != None) # should be [] instead
- for i in range(maxRows - len(block)):
- # fill top of chunk with blank space
- grid[c+1].append(None)
- for i in range(len(block)):
- # so the events are bottom-justified
- b = IBox(block[i]).getBox()
- b.parms['valign'] = "top"
- b.parms['align'] = "center"
- grid[c+1].append(b)
- # now all the other columns have maxRows new boxes too
- # populate the last row, if empty
- gridlen = len(grid[0])
- for i in range(len(grid)):
- strip = grid[i]
- assert(len(strip) == gridlen)
- if strip[-1] == None:
- if sourceEvents[i-1]:
- filler = IBox(sourceEvents[i-1]).getBox()
- else:
- # this can happen if you delete part of the build history
- filler = Box(text=["?"], align="center")
- strip[-1] = filler
- strip[-1].parms['rowspan'] = 1
- # second pass: bubble the events upwards to un-occupied locations
- # Every square of the grid that has a None in it needs to have
- # something else take its place.
- noBubble = request.args.get("nobubble",['0'])
- noBubble = int(noBubble[0])
- if not noBubble:
- for col in range(len(grid)):
- strip = grid[col]
- if col == 1: # changes are handled differently
- for i in range(2, len(strip)+1):
- # only merge empty boxes. Don't bubble commit boxes.
- if strip[-i] == None:
- next = strip[-i+1]
- assert(next)
- if next:
- #if not next.event:
- if next.spacer:
- # bubble the empty box up
- strip[-i] = next
- strip[-i].parms['rowspan'] += 1
- strip[-i+1] = None
- else:
- # we are above a commit box. Leave it
- # be, and turn the current box into an
- # empty one
- strip[-i] = Box([], rowspan=1,
- comment="commit bubble")
- strip[-i].spacer = True
- else:
- # we are above another empty box, which
- # somehow wasn't already converted.
- # Shouldn't happen
- pass
- else:
- for i in range(2, len(strip)+1):
- # strip[-i] will go from next-to-last back to first
- if strip[-i] == None:
- # bubble previous item up
- assert(strip[-i+1] != None)
- strip[-i] = strip[-i+1]
- strip[-i].parms['rowspan'] += 1
- strip[-i+1] = None
- else:
- strip[-i].parms['rowspan'] = 1
- # third pass: render the HTML table
- for i in range(gridlen):
- data += " <tr>\n";
- for strip in grid:
- b = strip[i]
- if b:
- data += b.td()
- else:
- if noBubble:
- data += td([])
- # Nones are left empty, rowspan should make it all fit
- data += " </tr>\n"
- return data
-
-
-class CWSStatusResource(HtmlResource):
- """This builds the main status page, with the waterfall display, and
- all child pages."""
- title = "BuildBot"
- def __init__(self, status, changemaster, categories, css=None, branches=None, cws_type='new'):
- HtmlResource.__init__(self)
- self.status = status
- self.changemaster = changemaster
- self.categories = categories
- p = self.status.getProjectName()
- if p:
- self.title = "BuildBot: %s" % p
- self.css = css
- self.branches = branches
- self.cws_type = cws_type
-
- def body(self, request):
- "This method builds the main waterfall display."
-
- data = ''
-
- projectName = self.status.getProjectName()
- projectURL = self.status.getProjectURL()
- buildbotURL = self.status.getBuildbotURL()
-
- phase = request.args.get("phase",["2"])
- phase = int(phase[0])
-
- showBuilders = request.args.get("show", None)
- allBuilders = self.status.getBuilderNames(categories=self.categories)
- if showBuilders:
- builderNames = []
- for b in showBuilders:
- if b not in allBuilders:
- continue
- if b in builderNames:
- continue
- builderNames.append(b)
- else:
- builderNames = allBuilders
- builders = map(lambda name: self.status.getBuilder(name),
- builderNames)
-
- if phase == -1:
- return self.body0(request, builders)
- (changeNames, builderNames, timestamps, eventGrid, sourceEvents) = \
- self.buildGrid(request, builders)
- if phase == 0:
- return self.phase0(request, (changeNames + builderNames),
- timestamps, eventGrid)
- # start the table: top-header material
- data += '<table border="0" cellspacing="0">\n'
-
- if projectName and projectURL:
- # TODO: this is going to look really ugly
- topleft = "<a href=\"%s\">%s</a><br /><a href=\"%s\">slave_view</a>" % \
- (projectURL, projectName, buildbotURL)
- #else:
- #topright = "last build"
- data += ' <tr class="LastBuild">\n'
- data += td(topleft, align="left", class_="Project")
- #data += td(topright, align="right", class_="Project")
- #for b in builders:
- # box = ITopBox(b).getBox()
- # data += box.td(align="center")
- #data += " </tr>\n"
-
- #data += ' <tr class="Activity">\n'
- #data += td('current activity', align='right', colspan=2)
- #for b in builders:
- # box = ICurrentBox(b).getBox(self.status)
- # data += box.td(align="center")
- #data += " </tr>\n"
-
- #data += " <tr>\n"
- #TZ = time.tzname[time.daylight]
- #data += td("time (%s)" % TZ, align="center", class_="Time")
- #name = changeNames[0]
- #data += td(
- # "<a href=\"%s\">%s</a>" % (urllib.quote(name, safe=''), name),
- # align="center", class_="Change")
- #for name in builderNames:
- # data += td(
- # #"<a href=\"%s\">%s</a>" % (request.childLink(name), name),
- # "<a href=\"%s\">%s</a>" % (urllib.quote(name, safe=''), name),
- # align="center", class_="Builder")
- #data += " </tr>\n"
-
- blockList = []
-
- for j in range(0, len(eventGrid)) :
- col = eventGrid[j]
- for k in range(0, len(col)) :
- block = col[k]
-
- for i in range(len(block)):
- blockList.append(block[i])
-
- TZ = time.tzname[time.daylight]
- numBlock = len(blockList)
- data += td("time (%s)" % TZ, align="center", class_="Time", colspan=numBlock)
- data += " </tr>\n"
-
- data += " <tr> \n"
- data += "<td></td>\n"
-
- p = getcws.GetCWS(self.cws_type)
- branchList = p.getCWSs()
-
-
- for i in range(0, len(blockList)) :
- branch, revision, patch = blockList[i].getSourceStamp()
- if branch and branch in branchList:
- start, finish = blockList[i].getTimes()
-
- if start:
- start = time.strftime("%d %b %Y %H:%M",time.localtime(start))
- else:
- start = time.strftime("%d %b %Y %H:%M",time.localtime(util.now()))
- if finish:
- finish = time.strftime("%H:%M",time.localtime(finish))
- else:
- finish = time.strftime("%H:%M",time.localtime(util.now()))
-
- box1 = Box(text=["%s-%s" %(start,finish)], align="center")
- data += box1.td(valign="top", align="center", class_="Time")
- data += " </tr> \n"
-
-
- if self.branches:
-
- #branch_file = open(self.branches, 'r')
-
- #branchList = branch_file.readlines()
-
- #p = getcws.GetCWS(self.cws_type)
- #branchList = p.getCWSs()
-
- last_time = -1
- trcolor = 1
- #for row_branch in branch_file.readlines():
- for row_branch in branchList:
- row_branch = row_branch.replace("\r","")
- row_branch = row_branch.replace("\n","")
- if trcolor == 1:
- data += " <tr border=\"0\" bgcolor=\"#fffccc\">\n"
- trcolor = 0
- else:
- data += " <tr border=\"0\" bgcolor=\"#fffff0\">\n"
- trcolor = 1
- #data += td("%s" % row_branch, align="center")
- branch_box = Box(text=["%s"%row_branch], align="center")
- data += branch_box.td(class_="branch_box")
- #last_time = timestamps[r]
-
- for i in range(len(blockList)):
- #text = block[i].getBuild()
- branch, revision, patch = blockList[i].getSourceStamp()
- slave = blockList[i].getBuilder().getName()
- boxclass = None
- if branch and (branch in branchList):
- if (row_branch == branch):
- box = IBox(blockList[i]).getBox()
- text = blockList[i].getText()
- if ("failed" in text or "exception" in text):
- boxclass = "failure"
- elif ("successful" in text):
- boxclass = "success"
- else:
- boxclass = "empty"
- #box1 = Box(text=["%s" %text], align="center")
- else:
- box = Box(text=[""], align="center")
- #box1 = Box(text=[""], align="center")
- data += box.td(valign="top", align="center", class_=boxclass)
-
- #data += box1.td(valign="top", align="center", class_=boxclass)
- data += " </tr>\n"
- #row_branch = branch_file.readline()
- #branch_file.close()
- else:
- data +="<tr><td> No branches listed in branch_file.txt or no branch_file.txt specified in master.cfg file </td></tr>\n"
-
- #if phase == 1:
- # f = self.phase2
- #else:
- # f = self.phase2
- #data += f(request, changeNames + builderNames, timestamps, eventGrid,
- # sourceEvents)
-
- data += "</table>\n"
-
- data += "<hr />\n"
-
- data += "<a href=\"http://buildbot.sourceforge.net/\">Buildbot</a>"
- data += "-%s " % version
- if projectName:
- data += "working for the "
- if projectURL:
- data += "<a href=\"%s\">%s</a> project." % (projectURL,
- projectName)
- else:
- data += "%s project." % projectName
- data += "<br />\n"
- # TODO: push this to the right edge, if possible
- data += ("Page built: " +
- time.strftime("%a %d %b %Y %H:%M:%S",
- time.localtime(util.now()))
- + "\n")
- return data
-
- def body0(self, request, builders):
- # build the waterfall display
- data = ""
- data += "<h2>Basic display</h2>\n"
- data += "<p>See <a href=\"%s\">here</a>" % \
- urllib.quote(request.childLink("waterfall"))
- data += " for the waterfall display</p>\n"
-
- data += '<table border="0" cellspacing="0">\n'
- names = map(lambda builder: builder.name, builders)
-
- # the top row is two blank spaces, then the top-level status boxes
- data += " <tr>\n"
- data += td("", colspan=2)
- for b in builders:
- text = ""
- color = "#ca88f7"
- state, builds = b.getState()
- if state != "offline":
- text += "%s<br />\n" % state #b.getCurrentBig().text[0]
- else:
- text += "OFFLINE<br />\n"
- color = "#ffe0e0"
- data += td(text, align="center", bgcolor=color)
-
- # the next row has the column headers: time, changes, builder names
- data += " <tr>\n"
- data += td("Time", align="center")
- data += td("Changes", align="center")
- for name in names:
- data += td(
- "<a href=\"%s\">%s</a>" % (urllib.quote(request.childLink(name)), name),
- align="center")
- data += " </tr>\n"
-
- # all further rows involve timestamps, commit events, and build events
- data += " <tr>\n"
- data += td("04:00", align="bottom")
- data += td("fred", align="center")
- for name in names:
- data += td("stuff", align="center", bgcolor="red")
- data += " </tr>\n"
-
- data += "</table>\n"
- return data
-
- def buildGrid(self, request, builders):
- debug = False
-
- # XXX: see if we can use a cached copy
-
- # first step is to walk backwards in time, asking each column
- # (commit, all builders) if they have any events there. Build up the
- # array of events, and stop when we have a reasonable number.
-
- commit_source = self.changemaster
-
- lastEventTime = util.now()
- sources = builders
- changeNames = ["changes"]
- builderNames = map(lambda builder: builder.getName(), builders)
- sourceNames = changeNames + builderNames
- sourceEvents = []
- sourceGenerators = []
- for s in sources:
- gen = insertGaps(s.eventGenerator(), lastEventTime)
- sourceGenerators.append(gen)
- # get the first event
- try:
- e = gen.next()
- event = interfaces.IStatusEvent(e)
- if debug:
- log.msg("gen %s gave1 %s" % (gen, event.getText()))
- except StopIteration:
- event = None
- sourceEvents.append(event)
- eventGrid = []
- timestamps = []
- spanLength = 10 # ten-second chunks
- tooOld = util.now() - 12*60*60 # never show more than 12 hours
- maxPageLen = 400
-
- lastEventTime = 0
- for e in sourceEvents:
- if e and e.getTimes()[0] > lastEventTime:
- lastEventTime = e.getTimes()[0]
- if lastEventTime == 0:
- lastEventTime = util.now()
-
- spanStart = lastEventTime - spanLength
- debugGather = 0
-
- while 1:
- if debugGather: log.msg("checking (%s,]" % spanStart)
- # the tableau of potential events is in sourceEvents[]. The
- # window crawls backwards, and we examine one source at a time.
- # If the source's top-most event is in the window, is it pushed
- # onto the events[] array and the tableau is refilled. This
- # continues until the tableau event is not in the window (or is
- # missing).
-
- spanEvents = [] # for all sources, in this span. row of eventGrid
- firstTimestamp = None # timestamp of first event in the span
- lastTimestamp = None # last pre-span event, for next span
-
- for c in range(len(sourceGenerators)):
- events = [] # for this source, in this span. cell of eventGrid
- event = sourceEvents[c]
- while event and spanStart < event.getTimes()[0]:
- # to look at windows that don't end with the present,
- # condition the .append on event.time <= spanFinish
- if not IBox(event, None):
- log.msg("BAD EVENT", event, event.getText())
- assert 0
- if debug:
- log.msg("pushing", event.getText(), event)
- if isinstance(event, builder.BuildStatus):
- events.append(event)
- starts, finishes = event.getTimes()
- firstTimestamp = util.earlier(firstTimestamp, starts)
- try:
- event = sourceGenerators[c].next()
- #event = interfaces.IStatusEvent(event)
- if debug:
- log.msg("gen[%s] gave2 %s" % (sourceNames[c],
- event.getText()))
- except StopIteration:
- event = None
- if debug:
- log.msg("finished span")
-
- if event:
- # this is the last pre-span event for this source
- lastTimestamp = util.later(lastTimestamp,
- event.getTimes()[0])
- if debugGather:
- log.msg(" got %s from %s" % (events, sourceNames[c]))
- sourceEvents[c] = event # refill the tableau
- spanEvents.append(events)
-
- if firstTimestamp is not None:
- eventGrid.append(spanEvents)
- timestamps.append(firstTimestamp)
-
-
- if lastTimestamp:
- spanStart = lastTimestamp - spanLength
- else:
- # no more events
- break
- if lastTimestamp < tooOld:
- pass
- #break
- if len(timestamps) > maxPageLen:
- break
-
-
- # now loop
-
- # loop is finished. now we have eventGrid[] and timestamps[]
- if debugGather: log.msg("finished loop")
- assert(len(timestamps) == len(eventGrid))
- return (changeNames, builderNames, timestamps, eventGrid, sourceEvents)
-
- def phase0(self, request, sourceNames, timestamps, eventGrid):
- # phase0 rendering
- if not timestamps:
- return "no events"
- data = ""
- for r in range(0, len(timestamps)):
- data += "<p>\n"
- data += "[%s]<br />" % timestamps[r]
- row = eventGrid[r]
- assert(len(row) == len(sourceNames))
- for c in range(0, len(row)):
- if row[c]:
- data += "<b>%s</b><br />\n" % sourceNames[c]
- for e in row[c]:
- log.msg("Event", r, c, sourceNames[c], e.getText())
- lognames = [loog.getName() for loog in e.getLogs()]
- data += "%s: %s: %s %s<br />" % (e.getText(),
- e.getTimes()[0],
- e.getColor(),
- lognames)
- else:
- data += "<b>%s</b> [none]<br />\n" % sourceNames[c]
- return data
-
- def phase1(self, request, sourceNames, timestamps, eventGrid,
- sourceEvents):
- # phase1 rendering: table, but boxes do not overlap
- data = ""
- if not timestamps:
- return data
- lastDate = None
- for r in range(0, len(timestamps)):
- chunkstrip = eventGrid[r]
- # chunkstrip is a horizontal strip of event blocks. Each block
- # is a vertical list of events, all for the same source.
- assert(len(chunkstrip) == len(sourceNames))
- maxRows = reduce(lambda x,y: max(x,y),
- map(lambda x: len(x), chunkstrip))
- for i in range(maxRows):
- data += " <tr>\n";
- if i == 0:
- stuff = []
- # add the date at the beginning, and each time it changes
- today = time.strftime("<b>%d %b %Y</b>",
- time.localtime(timestamps[r]))
- todayday = time.strftime("<b>%a</b>",
- time.localtime(timestamps[r]))
- if today != lastDate:
- stuff.append(todayday)
- stuff.append(today)
- lastDate = today
- stuff.append(
- time.strftime("%H:%M:%S",
- time.localtime(timestamps[r])))
- data += td(stuff, valign="bottom", align="center",
- rowspan=maxRows, class_="Time")
- for c in range(0, len(chunkstrip)):
- block = chunkstrip[c]
- assert(block != None) # should be [] instead
- # bottom-justify
- offset = maxRows - len(block)
- if i < offset:
- data += td("")
- else:
- e = block[i-offset]
- box = IBox(e).getBox()
- box.parms["show_idle"] = 1
- data += box.td(valign="top", align="center")
- data += " </tr>\n"
-
- return data
-
- def phase2(self, request, sourceNames, timestamps, eventGrid,
- sourceEvents):
- data = ""
- if not timestamps:
- return data
- # first pass: figure out the height of the chunks, populate grid
- grid = []
- for i in range(1+len(sourceNames)):
- grid.append([])
- # grid is a list of columns, one for the timestamps, and one per
- # event source. Each column is exactly the same height. Each element
- # of the list is a single <td> box.
- lastDate = time.strftime("<b>%d %b %Y</b>",
- time.localtime(util.now()))
- for r in range(0, len(timestamps)):
- chunkstrip = eventGrid[r]
- # chunkstrip is a horizontal strip of event blocks. Each block
- # is a vertical list of events, all for the same source.
- assert(len(chunkstrip) == len(sourceNames))
- maxRows = reduce(lambda x,y: max(x,y),
- map(lambda x: len(x), chunkstrip))
- for i in range(maxRows):
- if i != maxRows-1:
- grid[0].append(None)
- else:
- # timestamp goes at the bottom of the chunk
- stuff = []
- # add the date at the beginning (if it is not the same as
- # today's date), and each time it changes
- todayday = time.strftime("<b>%a</b>",
- time.localtime(timestamps[r]))
- today = time.strftime("<b>%d %b %Y</b>",
- time.localtime(timestamps[r]))
- if today != lastDate:
- stuff.append(todayday)
- stuff.append(today)
- lastDate = today
- stuff.append(
- time.strftime("%H:%M:%S",
- time.localtime(timestamps[r])))
- grid[0].append(Box(text=stuff, class_="Time",
- valign="bottom", align="center"))
-
- # at this point the timestamp column has been populated with
- # maxRows boxes, most None but the last one has the time string
- for c in range(0, len(chunkstrip)):
- block = chunkstrip[c]
- assert(block != None) # should be [] instead
- for i in range(maxRows - len(block)):
- # fill top of chunk with blank space
- grid[c+1].append(None)
- for i in range(len(block)):
- # so the events are bottom-justified
- b = IBox(block[i]).getBox()
- b.parms['valign'] = "top"
- b.parms['align'] = "center"
- grid[c+1].append(b)
- # now all the other columns have maxRows new boxes too
- # populate the last row, if empty
- gridlen = len(grid[0])
- for i in range(len(grid)):
- strip = grid[i]
- assert(len(strip) == gridlen)
- if strip[-1] == None:
- if sourceEvents[i-1]:
- filler = IBox(sourceEvents[i-1]).getBox()
- else:
- # this can happen if you delete part of the build history
- filler = Box(text=["?"], align="center")
- strip[-1] = filler
- strip[-1].parms['rowspan'] = 1
- # second pass: bubble the events upwards to un-occupied locations
- # Every square of the grid that has a None in it needs to have
- # something else take its place.
- noBubble = request.args.get("nobubble",['0'])
- noBubble = int(noBubble[0])
- if not noBubble:
- for col in range(len(grid)):
- strip = grid[col]
- if col == 1: # changes are handled differently
- for i in range(2, len(strip)+1):
- # only merge empty boxes. Don't bubble commit boxes.
- if strip[-i] == None:
- next = strip[-i+1]
- assert(next)
- if next:
- #if not next.event:
- if next.spacer:
- # bubble the empty box up
- strip[-i] = next
- strip[-i].parms['rowspan'] += 1
- strip[-i+1] = None
- else:
- # we are above a commit box. Leave it
- # be, and turn the current box into an
- # empty one
- strip[-i] = Box([], rowspan=1,
- comment="commit bubble")
- strip[-i].spacer = True
- else:
- # we are above another empty box, which
- # somehow wasn't already converted.
- # Shouldn't happen
- pass
- else:
- for i in range(2, len(strip)+1):
- # strip[-i] will go from next-to-last back to first
- if strip[-i] == None:
- # bubble previous item up
- assert(strip[-i+1] != None)
- strip[-i] = strip[-i+1]
- strip[-i].parms['rowspan'] += 1
- strip[-i+1] = None
- else:
- strip[-i].parms['rowspan'] = 1
- # third pass: render the HTML table
- for i in range(gridlen):
- data += " <tr>\n";
- for strip in grid:
- b = strip[i]
- if b:
- data += b.td()
- else:
- if noBubble:
- data += td([])
- # Nones are left empty, rowspan should make it all fit
- data += " </tr>\n"
- return data
-
-
-
-class StatusResource(Resource):
- status = None
- control = None
- favicon = None
- robots_txt = None
-
- def __init__(self, status, control, changemaster, categories, css, branches):
- """
- @type status: L{buildbot.status.builder.Status}
- @type control: L{buildbot.master.Control}
- @type changemaster: L{buildbot.changes.changes.ChangeMaster}
- """
- Resource.__init__(self)
- self.status = status
- self.control = control
- self.changemaster = changemaster
- self.categories = categories
- self.css = css
- self.branches = branches
- waterfall = WaterfallStatusResource(self.status, changemaster,
- categories, css)
- self.putChild("", waterfall)
-
- def render(self, request):
- request.redirect(request.prePathURL() + '/')
- request.finish()
-
- def getChild(self, path, request):
- if path == "robots.txt" and self.robots_txt:
- return static.File(self.robots_txt)
- if path == "buildbot.css" and self.css:
- return static.File(self.css)
- if path == "changes":
- return StatusResourceChanges(self.status, self.changemaster)
- if path == "favicon.ico":
- if self.favicon:
- return static.File(self.favicon)
- return NoResource("No favicon.ico registered")
-
- if path in self.status.getBuilderNames():
- builder = self.status.getBuilder(path)
- control = None
- if self.control:
- control = self.control.getBuilder(path)
- return StatusResourceBuilder(self.status, builder, control)
-
- if path == "cws_view_ready":
- return CWSStatusResource(self.status, [],
- None, self.css, self.branches, 'ready')
-
- if path == "cws_view_new":
- return CWSStatusResource(self.status, [],
- None, self.css, self.branches, 'new')
-
-
- return NoResource("No such Builder '%s'" % path)
-
-# the icon is sibpath(__file__, "../buildbot.png") . This is for portability.
-up = os.path.dirname
-buildbot_icon = os.path.abspath(os.path.join(up(up(__file__)),
- "buildbot.png"))
-buildbot_css = os.path.abspath(os.path.join(up(__file__), "classic.css"))
-
-class Waterfall(base.StatusReceiverMultiService):
- """I implement the primary web-page status interface, called a 'Waterfall
- Display' because builds and steps are presented in a grid of boxes which
- move downwards over time. The top edge is always the present. Each column
- represents a single builder. Each box describes a single Step, which may
- have logfiles or other status information.
-
- All these pages are served via a web server of some sort. The simplest
- approach is to let the buildmaster run its own webserver, on a given TCP
- port, but it can also publish its pages to a L{twisted.web.distrib}
- distributed web server (which lets the buildbot pages be a subset of some
- other web server).
-
- Since 0.6.3, BuildBot defines class attributes on elements so they can be
- styled with CSS stylesheets. Buildbot uses some generic classes to
- identify the type of object, and some more specific classes for the
- various kinds of those types. It does this by specifying both in the
- class attributes where applicable, separated by a space. It is important
- that in your CSS you declare the more generic class styles above the more
- specific ones. For example, first define a style for .Event, and below
- that for .SUCCESS
-
- The following CSS class names are used:
- - Activity, Event, BuildStep, LastBuild: general classes
- - waiting, interlocked, building, offline, idle: Activity states
- - start, running, success, failure, warnings, skipped, exception:
- LastBuild and BuildStep states
- - Change: box with change
- - Builder: box for builder name (at top)
- - Project
- - Time
-
- @type parent: L{buildbot.master.BuildMaster}
- @ivar parent: like all status plugins, this object is a child of the
- BuildMaster, so C{.parent} points to a
- L{buildbot.master.BuildMaster} instance, through which
- the status-reporting object is acquired.
- """
-
- compare_attrs = ["http_port", "distrib_port", "allowForce",
- "categories", "css", "favicon", "robots_txt", "branches"]
-
- def __init__(self, http_port=None, distrib_port=None, allowForce=True,
- categories=None, css=buildbot_css, favicon=buildbot_icon,
- robots_txt=None, branches=None):
- """To have the buildbot run its own web server, pass a port number to
- C{http_port}. To have it run a web.distrib server
-
- @type http_port: int or L{twisted.application.strports} string
- @param http_port: a strports specification describing which port the
- buildbot should use for its web server, with the
- Waterfall display as the root page. For backwards
- compatibility this can also be an int. Use
- 'tcp:8000' to listen on that port, or
- 'tcp:12345:interface=127.0.0.1' if you only want
- local processes to connect to it (perhaps because
- you are using an HTTP reverse proxy to make the
- buildbot available to the outside world, and do not
- want to make the raw port visible).
-
- @type distrib_port: int or L{twisted.application.strports} string
- @param distrib_port: Use this if you want to publish the Waterfall
- page using web.distrib instead. The most common
- case is to provide a string that is an absolute
- pathname to the unix socket on which the
- publisher should listen
- (C{os.path.expanduser(~/.twistd-web-pb)} will
- match the default settings of a standard
- twisted.web 'personal web server'). Another
- possibility is to pass an integer, which means
- the publisher should listen on a TCP socket,
- allowing the web server to be on a different
- machine entirely. Both forms are provided for
- backwards compatibility; the preferred form is a
- strports specification like
- 'unix:/home/buildbot/.twistd-web-pb'. Providing
- a non-absolute pathname will probably confuse
- the strports parser.
-
- @type allowForce: bool
- @param allowForce: if True, present a 'Force Build' button on the
- per-Builder page that allows visitors to the web
- site to initiate a build. If False, don't provide
- this button.
-
- @type favicon: string
- @param favicon: if set, provide the pathname of an image file that
- will be used for the 'favicon.ico' resource. Many
- browsers automatically request this file and use it
- as an icon in any bookmark generated from this site.
- Defaults to the buildbot/buildbot.png image provided
- in the distribution. Can be set to None to avoid
- using a favicon at all.
-
- @type robots_txt: string
- @param robots_txt: if set, provide the pathname of a robots.txt file.
- Many search engines request this file and obey the
- rules in it. E.g. to disallow them to crawl the
- status page, put the following two lines in
- robots.txt:
- User-agent: *
- Disallow: /
- """
-
- base.StatusReceiverMultiService.__init__(self)
- assert allowForce in (True, False) # TODO: implement others
- if type(http_port) is int:
- http_port = "tcp:%d" % http_port
- self.http_port = http_port
- if distrib_port is not None:
- if type(distrib_port) is int:
- distrib_port = "tcp:%d" % distrib_port
- if distrib_port[0] in "/~.": # pathnames
- distrib_port = "unix:%s" % distrib_port
- self.distrib_port = distrib_port
- self.allowForce = allowForce
- self.categories = categories
- self.css = css
- self.favicon = favicon
- self.robots_txt = robots_txt
- self.branches = branches
-
- def __repr__(self):
- if self.http_port is None:
- return "<Waterfall on path %s>" % self.distrib_port
- if self.distrib_port is None:
- return "<Waterfall on port %s>" % self.http_port
- return "<Waterfall on port %s and path %s>" % (self.http_port,
- self.distrib_port)
-
- def setServiceParent(self, parent):
- """
- @type parent: L{buildbot.master.BuildMaster}
- """
- base.StatusReceiverMultiService.setServiceParent(self, parent)
- self.setup()
-
- def setup(self):
- status = self.parent.getStatus()
- if self.allowForce:
- control = interfaces.IControl(self.parent)
- else:
- control = None
- change_svc = self.parent.change_svc
- sr = StatusResource(status, control, change_svc, self.categories,
- self.css, self.branches)
- sr.favicon = self.favicon
- sr.robots_txt = self.robots_txt
- self.site = server.Site(sr)
-
- if self.http_port is not None:
- s = strports.service(self.http_port, self.site)
- s.setServiceParent(self)
- if self.distrib_port is not None:
- f = pb.PBServerFactory(distrib.ResourcePublisher(self.site))
- s = strports.service(self.distrib_port, f)
- s.setServiceParent(self)
diff --git a/buildbot/buildbot-source/buildbot/status/html.py.bakforCWS_View b/buildbot/buildbot-source/buildbot/status/html.py.bakforCWS_View
deleted file mode 100644
index 7d4926b46..000000000
--- a/buildbot/buildbot-source/buildbot/status/html.py.bakforCWS_View
+++ /dev/null
@@ -1,1744 +0,0 @@
-# -*- test-case-name: buildbot.test.test_web -*-
-
-from __future__ import generators
-
-from twisted.python import log, components
-from twisted.python.util import sibpath
-import urllib, re
-
-from twisted.internet import defer, reactor
-from twisted.web.resource import Resource
-from twisted.web import static, html, server, distrib
-from twisted.web.error import NoResource
-from twisted.web.util import Redirect, DeferredResource
-from twisted.application import strports
-from twisted.spread import pb
-
-from buildbot.twcompat import implements, Interface
-
-import string, types, time, os.path
-
-from buildbot import interfaces, util
-from buildbot import version
-from buildbot.sourcestamp import SourceStamp
-from buildbot.status import builder, base
-from buildbot.changes import changes
-from buildbot.process.base import BuildRequest
-
-class ITopBox(Interface):
- """I represent a box in the top row of the waterfall display: the one
- which shows the status of the last build for each builder."""
- pass
-
-class ICurrentBox(Interface):
- """I represent the 'current activity' box, just above the builder name."""
- pass
-
-class IBox(Interface):
- """I represent a box in the waterfall display."""
- pass
-
-class IHTMLLog(Interface):
- pass
-
-ROW_TEMPLATE = '''
-<div class="row">
- <span class="label">%(label)s</span>
- <span class="field">%(field)s</span>
-</div>'''
-
-def make_row(label, field):
- """Create a name/value row for the HTML.
-
- `label` is plain text; it will be HTML-encoded.
-
- `field` is a bit of HTML structure; it will not be encoded in
- any way.
- """
- label = html.escape(label)
- return ROW_TEMPLATE % {"label": label, "field": field}
-
-colormap = {
- 'green': '#72ff75',
- }
-def td(text="", parms={}, **props):
- data = ""
- data += " "
- #if not props.has_key("border"):
- # props["border"] = 1
- props.update(parms)
- if props.has_key("bgcolor"):
- props["bgcolor"] = colormap.get(props["bgcolor"], props["bgcolor"])
- comment = props.get("comment", None)
- if comment:
- data += "<!-- %s -->" % comment
- data += "<td"
- class_ = props.get('class_', None)
- if class_:
- props["class"] = class_
- for prop in ("align", "bgcolor", "colspan", "rowspan", "border",
- "valign", "halign", "class"):
- p = props.get(prop, None)
- if p != None:
- data += " %s=\"%s\"" % (prop, p)
- data += ">"
- if not text:
- text = "&nbsp;"
- if type(text) == types.ListType:
- data += string.join(text, "<br />")
- else:
- data += text
- data += "</td>\n"
- return data
-
-def build_get_class(b):
- """
- Return the class to use for a finished build or buildstep,
- based on the result.
- """
- # FIXME: this getResults duplicity might need to be fixed
- result = b.getResults()
- #print "THOMAS: result for b %r: %r" % (b, result)
- if isinstance(b, builder.BuildStatus):
- result = b.getResults()
- elif isinstance(b, builder.BuildStepStatus):
- result = b.getResults()[0]
- # after forcing a build, b.getResults() returns ((None, []), []), ugh
- if isinstance(result, tuple):
- result = result[0]
- else:
- raise TypeError, "%r is not a BuildStatus or BuildStepStatus" % b
-
- if result == None:
- # FIXME: this happens when a buildstep is running ?
- return "running"
- return builder.Results[result]
-
-class Box:
- # a Box wraps an Event. The Box has HTML <td> parameters that Events
- # lack, and it has a base URL to which each File's name is relative.
- # Events don't know about HTML.
- spacer = False
- def __init__(self, text=[], color=None, class_=None, urlbase=None,
- **parms):
- self.text = text
- self.color = color
- self.class_ = class_
- self.urlbase = urlbase
- self.show_idle = 0
- if parms.has_key('show_idle'):
- del parms['show_idle']
- self.show_idle = 1
-
- self.parms = parms
- # parms is a dict of HTML parameters for the <td> element that will
- # represent this Event in the waterfall display.
-
- def td(self, **props):
- props.update(self.parms)
- text = self.text
- if not text and self.show_idle:
- text = ["[idle]"]
- return td(text, props, bgcolor=self.color, class_=self.class_)
-
-
-class HtmlResource(Resource):
- css = None
- contentType = "text/html; charset=UTF-8"
- def render(self, request):
- data = self.content(request)
- request.setHeader("content-type", self.contentType)
- if request.method == "HEAD":
- request.setHeader("content-length", len(data))
- return ''
- return data
- title = "Dummy"
- def content(self, request):
- data = ('<!DOCTYPE html PUBLIC'
- ' "-//W3C//DTD XHTML 1.0 Transitional//EN"\n'
- '"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">\n'
- '<html'
- ' xmlns="http://www.w3.org/1999/xhtml"'
- ' lang="en"'
- ' xml:lang="en">\n')
- data += "<head>\n"
- data += " <title>" + self.title + "</title>\n"
- if self.css:
- # TODO: use some sort of relative link up to the root page, so
- # this css can be used from child pages too
- data += (' <link href="%s" rel="stylesheet" type="text/css"/>\n'
- % "buildbot.css")
- data += "</head>\n"
- data += '<body vlink="#800080">\n'
- data += self.body(request)
- data += "</body></html>\n"
- return data
- def body(self, request):
- return "Dummy\n"
-
-class StaticHTML(HtmlResource):
- def __init__(self, body, title):
- HtmlResource.__init__(self)
- self.bodyHTML = body
- self.title = title
- def body(self, request):
- return self.bodyHTML
-
-# $builder/builds/NN/stepname
-class StatusResourceBuildStep(HtmlResource):
- title = "Build Step"
-
- def __init__(self, status, step):
- HtmlResource.__init__(self)
- self.status = status
- self.step = step
-
- def body(self, request):
- s = self.step
- b = s.getBuild()
- data = "<h1>BuildStep %s:#%d:%s</h1>\n" % \
- (b.getBuilder().getName(), b.getNumber(), s.getName())
-
- if s.isFinished():
- data += ("<h2>Finished</h2>\n"
- "<p>%s</p>\n" % html.escape("%s" % s.getText()))
- else:
- data += ("<h2>Not Finished</h2>\n"
- "<p>ETA %s seconds</p>\n" % s.getETA())
-
- exp = s.getExpectations()
- if exp:
- data += ("<h2>Expectations</h2>\n"
- "<ul>\n")
- for e in exp:
- data += "<li>%s: current=%s, target=%s</li>\n" % \
- (html.escape(e[0]), e[1], e[2])
- data += "</ul>\n"
- logs = s.getLogs()
- if logs:
- data += ("<h2>Logs</h2>\n"
- "<ul>\n")
- for num in range(len(logs)):
- if logs[num].hasContents():
- # FIXME: If the step name has a / in it, this is broken
- # either way. If we quote it but say '/'s are safe,
- # it chops up the step name. If we quote it and '/'s
- # are not safe, it escapes the / that separates the
- # step name from the log number.
- data += '<li><a href="%s">%s</a></li>\n' % \
- (urllib.quote(request.childLink("%d" % num)),
- html.escape(logs[num].getName()))
- else:
- data += ('<li>%s</li>\n' %
- html.escape(logs[num].getName()))
- data += "</ul>\n"
-
- return data
-
- def getChild(self, path, request):
- logname = path
- try:
- log = self.step.getLogs()[int(logname)]
- if log.hasContents():
- return IHTMLLog(interfaces.IStatusLog(log))
- return NoResource("Empty Log '%s'" % logname)
- except (IndexError, ValueError):
- return NoResource("No such Log '%s'" % logname)
-
-# $builder/builds/NN/tests/TESTNAME
-class StatusResourceTestResult(HtmlResource):
- title = "Test Logs"
-
- def __init__(self, status, name, result):
- HtmlResource.__init__(self)
- self.status = status
- self.name = name
- self.result = result
-
- def body(self, request):
- dotname = ".".join(self.name)
- logs = self.result.getLogs()
- lognames = logs.keys()
- lognames.sort()
- data = "<h1>%s</h1>\n" % html.escape(dotname)
- for name in lognames:
- data += "<h2>%s</h2>\n" % html.escape(name)
- data += "<pre>" + logs[name] + "</pre>\n\n"
-
- return data
-
-
-# $builder/builds/NN/tests
-class StatusResourceTestResults(HtmlResource):
- title = "Test Results"
-
- def __init__(self, status, results):
- HtmlResource.__init__(self)
- self.status = status
- self.results = results
-
- def body(self, request):
- r = self.results
- data = "<h1>Test Results</h1>\n"
- data += "<ul>\n"
- testnames = r.keys()
- testnames.sort()
- for name in testnames:
- res = r[name]
- dotname = ".".join(name)
- data += " <li>%s: " % dotname
- # TODO: this could break on weird test names. At the moment,
- # test names only come from Trial tests, where the name
- # components must be legal python names, but that won't always
- # be a restriction.
- url = request.childLink(dotname)
- data += "<a href=\"%s\">%s</a>" % (url, " ".join(res.getText()))
- data += "</li>\n"
- data += "</ul>\n"
- return data
-
- def getChild(self, path, request):
- try:
- name = tuple(path.split("."))
- result = self.results[name]
- return StatusResourceTestResult(self.status, name, result)
- except KeyError:
- return NoResource("No such test name '%s'" % path)
-
-
-# $builder/builds/NN
-class StatusResourceBuild(HtmlResource):
- title = "Build"
-
- def __init__(self, status, build, builderControl, buildControl):
- HtmlResource.__init__(self)
- self.status = status
- self.build = build
- self.builderControl = builderControl
- self.control = buildControl
-
- def body(self, request):
- b = self.build
- buildbotURL = self.status.getBuildbotURL()
- projectName = self.status.getProjectName()
- data = '<div class="title"><a href="%s">%s</a></div>\n'%(buildbotURL,
- projectName)
- # the color in the following line gives python-mode trouble
- data += ("<h1>Build <a href=\"%s\">%s</a>:#%d</h1>\n"
- "<h2>Reason:</h2>\n%s\n"
- % (self.status.getURLForThing(b.getBuilder()),
- b.getBuilder().getName(), b.getNumber(),
- html.escape(b.getReason())))
-
- branch, revision, patch = b.getSourceStamp()
- data += "<h2>SourceStamp:</h2>\n"
- data += " <ul>\n"
- if branch:
- data += " <li>Branch: %s</li>\n" % html.escape(branch)
- if revision:
- data += " <li>Revision: %s</li>\n" % html.escape(str(revision))
- if patch:
- data += " <li>Patch: YES</li>\n" # TODO: provide link to .diff
- if b.getChanges():
- data += " <li>Changes: see below</li>\n"
- if (branch is None and revision is None and patch is None
- and not b.getChanges()):
- data += " <li>build of most recent revision</li>\n"
- data += " </ul>\n"
- if b.isFinished():
- data += "<h4>Buildslave: %s</h4>\n" % html.escape(b.getSlavename())
- data += "<h2>Results:</h2>\n"
- data += " ".join(b.getText()) + "\n"
- if b.getTestResults():
- url = request.childLink("tests")
- data += "<h3><a href=\"%s\">test results</a></h3>\n" % url
- else:
- data += "<h2>Build In Progress</h2>"
- if self.control is not None:
- stopURL = urllib.quote(request.childLink("stop"))
- data += """
- <form action="%s" class='command stopbuild'>
- <p>To stop this build, fill out the following fields and
- push the 'Stop' button</p>\n""" % stopURL
- data += make_row("Your name:",
- "<input type='text' name='username' />")
- data += make_row("Reason for stopping build:",
- "<input type='text' name='comments' />")
- data += """<input type="submit" value="Stop Builder" />
- </form>
- """
-
- if b.isFinished() and self.builderControl is not None:
- data += "<h3>Resubmit Build:</h3>\n"
- # can we rebuild it exactly?
- exactly = (revision is not None) or b.getChanges()
- if exactly:
- data += ("<p>This tree was built from a specific set of \n"
- "source files, and can be rebuilt exactly</p>\n")
- else:
- data += ("<p>This tree was built from the most recent "
- "revision")
- if branch:
- data += " (along some branch)"
- data += (" and thus it might not be possible to rebuild it \n"
- "exactly. Any changes that have been committed \n"
- "after this build was started <b>will</b> be \n"
- "included in a rebuild.</p>\n")
- rebuildURL = urllib.quote(request.childLink("rebuild"))
- data += ('<form action="%s" class="command rebuild">\n'
- % rebuildURL)
- data += make_row("Your name:",
- "<input type='text' name='username' />")
- data += make_row("Reason for re-running build:",
- "<input type='text' name='comments' />")
- data += '<input type="submit" value="Rebuild" />\n'
-
- data += "<h2>Steps and Logfiles:</h2>\n"
- if b.getLogs():
- data += "<ol>\n"
- for s in b.getSteps():
- data += (" <li><a href=\"%s\">%s</a> [%s]\n"
- % (self.status.getURLForThing(s), s.getName(),
- " ".join(s.getText())))
- if s.getLogs():
- data += " <ol>\n"
- for logfile in s.getLogs():
- data += (" <li><a href=\"%s\">%s</a></li>\n" %
- (self.status.getURLForThing(logfile),
- logfile.getName()))
- data += " </ol>\n"
- data += " </li>\n"
- data += "</ol>\n"
-
- data += ("<h2>Blamelist:</h2>\n"
- " <ol>\n")
- for who in b.getResponsibleUsers():
- data += " <li>%s</li>\n" % html.escape(who)
- data += (" </ol>\n"
- "<h2>All Changes</h2>\n")
- changes = b.getChanges()
- if changes:
- data += "<ol>\n"
- for c in changes:
- data += "<li>" + c.asHTML() + "</li>\n"
- data += "</ol>\n"
- #data += html.PRE(b.changesText()) # TODO
- return data
-
- def stop(self, request):
- log.msg("web stopBuild of build %s:%s" % \
- (self.build.getBuilder().getName(),
- self.build.getNumber()))
- name = request.args.get("username", ["<unknown>"])[0]
- comments = request.args.get("comments", ["<no reason specified>"])[0]
- reason = ("The web-page 'stop build' button was pressed by "
- "'%s': %s\n" % (name, comments))
- self.control.stopBuild(reason)
- # we're at http://localhost:8080/svn-hello/builds/5/stop?[args] and
- # we want to go to: http://localhost:8080/svn-hello/builds/5 or
- # http://localhost:8080/
- #
- #return Redirect("../%d" % self.build.getNumber())
- r = Redirect("../../..")
- d = defer.Deferred()
- reactor.callLater(1, d.callback, r)
- return DeferredResource(d)
-
- def rebuild(self, request):
- log.msg("web rebuild of build %s:%s" % \
- (self.build.getBuilder().getName(),
- self.build.getNumber()))
- name = request.args.get("username", ["<unknown>"])[0]
- comments = request.args.get("comments", ["<no reason specified>"])[0]
- reason = ("The web-page 'rebuild' button was pressed by "
- "'%s': %s\n" % (name, comments))
- if not self.builderControl or not self.build.isFinished():
- log.msg("could not rebuild: bc=%s, isFinished=%s"
- % (self.builderControl, self.build.isFinished()))
- # TODO: indicate an error
- else:
- self.builderControl.resubmitBuild(self.build, reason)
- # we're at http://localhost:8080/svn-hello/builds/5/rebuild?[args] and
- # we want to go to the top, at http://localhost:8080/
- r = Redirect("../../..")
- d = defer.Deferred()
- reactor.callLater(1, d.callback, r)
- return DeferredResource(d)
-
- def getChild(self, path, request):
- if path == "tests":
- return StatusResourceTestResults(self.status,
- self.build.getTestResults())
- if path == "stop":
- return self.stop(request)
- if path == "rebuild":
- return self.rebuild(request)
- if path.startswith("step-"):
- stepname = path[len("step-"):]
- steps = self.build.getSteps()
- for s in steps:
- if s.getName() == stepname:
- return StatusResourceBuildStep(self.status, s)
- return NoResource("No such BuildStep '%s'" % stepname)
- return NoResource("No such resource '%s'" % path)
-
-# $builder
-class StatusResourceBuilder(HtmlResource):
-
- def __init__(self, status, builder, control):
- HtmlResource.__init__(self)
- self.status = status
- self.title = builder.getName() + " Builder"
- self.builder = builder
- self.control = control
-
- def body(self, request):
- b = self.builder
- slaves = b.getSlaves()
- connected_slaves = [s for s in slaves if s.isConnected()]
-
- buildbotURL = self.status.getBuildbotURL()
- projectName = self.status.getProjectName()
- data = "<a href=\"%s\">%s</a>\n" % (buildbotURL, projectName)
- data += make_row("Builder:", html.escape(b.getName()))
- b1 = b.getBuild(-1)
- if b1 is not None:
- data += make_row("Current/last build:", str(b1.getNumber()))
- data += "\n<br />BUILDSLAVES<br />\n"
- data += "<ol>\n"
- for slave in slaves:
- data += "<li><b>%s</b>: " % html.escape(slave.getName())
- if slave.isConnected():
- data += "CONNECTED\n"
- if slave.getAdmin():
- data += make_row("Admin:", html.escape(slave.getAdmin()))
- if slave.getHost():
- data += "<span class='label'>Host info:</span>\n"
- data += html.PRE(slave.getHost())
- else:
- data += ("NOT CONNECTED\n")
- data += "</li>\n"
- data += "</ol>\n"
-
- if self.control is not None and connected_slaves:
- forceURL = urllib.quote(request.childLink("force"))
- data += (
- """
- <form action='%(forceURL)s' class='command forcebuild'>
- <p>To force a build, fill out the following fields and
- push the 'Force Build' button</p>"""
- + make_row("Your name:",
- "<input type='text' name='username' />")
- + make_row("Reason for build:",
- "<input type='text' name='comments' />")
- + make_row("CWS to build:",
- "<input type='text' name='branch' />")
- #+ make_row("Revision to build:",
- # "<input type='text' name='revision' />")
- + """
- <input type='submit' value='Force Build' />
- </form>
- """) % {"forceURL": forceURL}
- elif self.control is not None:
- data += """
- <p>All buildslaves appear to be offline, so it's not possible
- to force this build to execute at this time.</p>
- """
-
- if self.control is not None:
- pingURL = urllib.quote(request.childLink("ping"))
- data += """
- <form action="%s" class='command pingbuilder'>
- <p>To ping the buildslave(s), push the 'Ping' button</p>
-
- <input type="submit" value="Ping Builder" />
- </form>
- """ % pingURL
-
- return data
-
- def force(self, request):
- name = request.args.get("username", ["<unknown>"])[0]
- reason = request.args.get("comments", ["<no reason specified>"])[0]
- branch = request.args.get("branch", [""])[0]
- revision = request.args.get("revision", [""])[0]
-
- r = "The web-page 'force build' button was pressed by '%s': %s\n" \
- % (name, reason)
- log.msg("web forcebuild of builder '%s', branch='%s', revision='%s'"
- % (self.builder.name, branch, revision))
-
- if not self.control:
- # TODO: tell the web user that their request was denied
- log.msg("but builder control is disabled")
- return Redirect("..")
-
- # keep weird stuff out of the branch and revision strings. TODO:
- # centralize this somewhere.
- if not re.match(r'^[\w\.\-\/]*$', branch):
- log.msg("bad branch '%s'" % branch)
- return Redirect("..")
- if not re.match(r'^[\w\.\-\/]*$', revision):
- log.msg("bad revision '%s'" % revision)
- return Redirect("..")
- if branch == "":
- branch = None
- if revision == "":
- revision = None
-
- # TODO: if we can authenticate that a particular User pushed the
- # button, use their name instead of None, so they'll be informed of
- # the results.
- s = SourceStamp(branch=branch, revision=revision)
- req = BuildRequest(r, s, self.builder.getName())
- try:
- self.control.requestBuildSoon(req)
- except interfaces.NoSlaveError:
- # TODO: tell the web user that their request could not be
- # honored
- pass
- return Redirect("..")
-
- def ping(self, request):
- log.msg("web ping of builder '%s'" % self.builder.name)
- self.control.ping() # TODO: there ought to be an ISlaveControl
- return Redirect("..")
-
- def getChild(self, path, request):
- if path == "force":
- return self.force(request)
- if path == "ping":
- return self.ping(request)
- if not path in ("events", "builds"):
- return NoResource("Bad URL '%s'" % path)
- num = request.postpath.pop(0)
- request.prepath.append(num)
- num = int(num)
- if path == "events":
- # TODO: is this dead code? .statusbag doesn't exist,right?
- log.msg("getChild['path']: %s" % request.uri)
- return NoResource("events are unavailable until code gets fixed")
- filename = request.postpath.pop(0)
- request.prepath.append(filename)
- e = self.builder.statusbag.getEventNumbered(num)
- if not e:
- return NoResource("No such event '%d'" % num)
- file = e.files.get(filename, None)
- if file == None:
- return NoResource("No such file '%s'" % filename)
- if type(file) == type(""):
- if file[:6] in ("<HTML>", "<html>"):
- return static.Data(file, "text/html")
- return static.Data(file, "text/plain")
- return file
- if path == "builds":
- build = self.builder.getBuild(num)
- if build:
- control = None
- if self.control:
- control = self.control.getBuild(num)
- return StatusResourceBuild(self.status, build,
- self.control, control)
- else:
- return NoResource("No such build '%d'" % num)
- return NoResource("really weird URL %s" % path)
-
-# $changes/NN
-class StatusResourceChanges(HtmlResource):
- def __init__(self, status, changemaster):
- HtmlResource.__init__(self)
- self.status = status
- self.changemaster = changemaster
- def body(self, request):
- data = ""
- data += "Change sources:\n"
- sources = list(self.changemaster)
- if sources:
- data += "<ol>\n"
- for s in sources:
- data += "<li>%s</li>\n" % s.describe()
- data += "</ol>\n"
- else:
- data += "none (push only)\n"
- return data
- def getChild(self, path, request):
- num = int(path)
- c = self.changemaster.getChangeNumbered(num)
- if not c:
- return NoResource("No change number '%d'" % num)
- return StaticHTML(c.asHTML(), "Change #%d" % num)
-
-textlog_stylesheet = """
-<style type="text/css">
- div.data {
- font-family: "Courier New", courier, monotype;
- }
- span.stdout {
- font-family: "Courier New", courier, monotype;
- }
- span.stderr {
- font-family: "Courier New", courier, monotype;
- color: red;
- }
- span.header {
- font-family: "Courier New", courier, monotype;
- color: blue;
- }
-</style>
-"""
-
-class ChunkConsumer:
- if implements:
- implements(interfaces.IStatusLogConsumer)
- else:
- __implements__ = interfaces.IStatusLogConsumer,
-
- def __init__(self, original, textlog):
- self.original = original
- self.textlog = textlog
- def registerProducer(self, producer, streaming):
- self.producer = producer
- self.original.registerProducer(producer, streaming)
- def unregisterProducer(self):
- self.original.unregisterProducer()
- def writeChunk(self, chunk):
- formatted = self.textlog.content([chunk])
- try:
- self.original.write(formatted)
- except pb.DeadReferenceError:
- self.producing.stopProducing()
- def finish(self):
- self.textlog.finished()
-
-class TextLog(Resource):
- # a new instance of this Resource is created for each client who views
- # it, so we can afford to track the request in the Resource.
- if implements:
- implements(IHTMLLog)
- else:
- __implements__ = IHTMLLog,
-
- asText = False
- subscribed = False
-
- def __init__(self, original):
- Resource.__init__(self)
- self.original = original
-
- def getChild(self, path, request):
- if path == "text":
- self.asText = True
- return self
- return NoResource("bad pathname")
-
- def htmlHeader(self, request):
- title = "Log File contents"
- data = "<html>\n<head><title>" + title + "</title>\n"
- data += textlog_stylesheet
- data += "</head>\n"
- data += "<body vlink=\"#800080\">\n"
- texturl = request.childLink("text")
- data += '<a href="%s">(view as text)</a><br />\n' % texturl
- data += "<pre>\n"
- return data
-
- def content(self, entries):
- spanfmt = '<span class="%s">%s</span>'
- data = ""
- for type, entry in entries:
- if self.asText:
- if type != builder.HEADER:
- data += entry
- else:
- data += spanfmt % (builder.ChunkTypes[type],
- html.escape(entry))
- return data
-
- def htmlFooter(self):
- data = "</pre>\n"
- data += "</body></html>\n"
- return data
-
- def render_HEAD(self, request):
- if self.asText:
- request.setHeader("content-type", "text/plain")
- else:
- request.setHeader("content-type", "text/html")
-
- # vague approximation, ignores markup
- request.setHeader("content-length", self.original.length)
- return ''
-
- def render_GET(self, req):
- self.req = req
-
- if self.asText:
- req.setHeader("content-type", "text/plain")
- else:
- req.setHeader("content-type", "text/html")
-
- if not self.asText:
- req.write(self.htmlHeader(req))
-
- self.original.subscribeConsumer(ChunkConsumer(req, self))
- return server.NOT_DONE_YET
-
- def finished(self):
- if not self.req:
- return
- try:
- if not self.asText:
- self.req.write(self.htmlFooter())
- self.req.finish()
- except pb.DeadReferenceError:
- pass
- # break the cycle, the Request's .notifications list includes the
- # Deferred (from req.notifyFinish) that's pointing at us.
- self.req = None
-
-components.registerAdapter(TextLog, interfaces.IStatusLog, IHTMLLog)
-
-
-class HTMLLog(Resource):
- if implements:
- implements(IHTMLLog)
- else:
- __implements__ = IHTMLLog,
-
-
- def __init__(self, original):
- Resource.__init__(self)
- self.original = original
-
- def render(self, request):
- request.setHeader("content-type", "text/html")
- return self.original.html
-
-components.registerAdapter(HTMLLog, builder.HTMLLogFile, IHTMLLog)
-
-
-class CurrentBox(components.Adapter):
- # this provides the "current activity" box, just above the builder name
- if implements:
- implements(ICurrentBox)
- else:
- __implements__ = ICurrentBox,
-
- def formatETA(self, eta):
- if eta is None:
- return []
- if eta < 0:
- return ["Soon"]
- abstime = time.strftime("%H:%M:%S", time.localtime(util.now()+eta))
- return ["ETA in", "%d secs" % eta, "at %s" % abstime]
-
- def getBox(self, status):
- # getState() returns offline, idle, or building
- state, builds = self.original.getState()
-
- # look for upcoming builds. We say the state is "waiting" if the
- # builder is otherwise idle and there is a scheduler which tells us a
- # build will be performed some time in the near future. TODO: this
- # functionality used to be in BuilderStatus.. maybe this code should
- # be merged back into it.
- upcoming = []
- builderName = self.original.getName()
- for s in status.getSchedulers():
- if builderName in s.listBuilderNames():
- upcoming.extend(s.getPendingBuildTimes())
- if state == "idle" and upcoming:
- state = "waiting"
-
- if state == "building":
- color = "yellow"
- text = ["building"]
- if builds:
- for b in builds:
- eta = b.getETA()
- if eta:
- text.extend(self.formatETA(eta))
- elif state == "offline":
- color = "red"
- text = ["offline"]
- elif state == "idle":
- color = "white"
- text = ["idle"]
- elif state == "waiting":
- color = "yellow"
- text = ["waiting"]
- else:
- # just in case I add a state and forget to update this
- color = "white"
- text = [state]
-
- # TODO: for now, this pending/upcoming stuff is in the "current
- # activity" box, but really it should go into a "next activity" row
- # instead. The only times it should show up in "current activity" is
- # when the builder is otherwise idle.
-
- # are any builds pending? (waiting for a slave to be free)
- pbs = self.original.getPendingBuilds()
- if pbs:
- text.append("%d pending" % len(pbs))
- for t in upcoming:
- text.extend(["next at",
- time.strftime("%H:%M:%S", time.localtime(t)),
- "[%d secs]" % (t - util.now()),
- ])
- # TODO: the upcoming-builds box looks like:
- # ['waiting', 'next at', '22:14:15', '[86 secs]']
- # while the currently-building box is reversed:
- # ['building', 'ETA in', '2 secs', 'at 22:12:50']
- # consider swapping one of these to make them look the same. also
- # consider leaving them reversed to make them look different.
- return Box(text, color=color, class_="Activity " + state)
-
-components.registerAdapter(CurrentBox, builder.BuilderStatus, ICurrentBox)
-
-class ChangeBox(components.Adapter):
- if implements:
- implements(IBox)
- else:
- __implements__ = IBox,
-
- def getBox(self):
- url = "changes/%d" % self.original.number
- text = '<a href="%s">%s</a>' % (url, html.escape(self.original.who))
- return Box([text], color="white", class_="Change")
-components.registerAdapter(ChangeBox, changes.Change, IBox)
-
-class BuildBox(components.Adapter):
- # this provides the yellow "starting line" box for each build
- if implements:
- implements(IBox)
- else:
- __implements__ = IBox,
-
- def getBox(self):
- b = self.original
- name = b.getBuilder().getName()
- number = b.getNumber()
- url = "%s/builds/%d" % (urllib.quote(name, safe=''), number)
- text = '<a href="%s">Build %d</a>' % (url, number)
- color = "yellow"
- class_ = "start"
- if b.isFinished() and not b.getSteps():
- # the steps have been pruned, so there won't be any indication
- # of whether it succeeded or failed. Color the box red or green
- # to show its status
- color = b.getColor()
- class_ = build_get_class(b)
- return Box([text], color=color, class_="BuildStep " + class_)
-components.registerAdapter(BuildBox, builder.BuildStatus, IBox)
-
-class StepBox(components.Adapter):
- if implements:
- implements(IBox)
- else:
- __implements__ = IBox,
-
- def getBox(self):
- b = self.original.getBuild()
- urlbase = "%s/builds/%d/step-%s" % (
- urllib.quote(b.getBuilder().getName(), safe=''),
- b.getNumber(),
- urllib.quote(self.original.getName(), safe=''))
- text = self.original.getText()
- if text is None:
- log.msg("getText() gave None", urlbase)
- text = []
- text = text[:]
- logs = self.original.getLogs()
- for num in range(len(logs)):
- name = logs[num].getName()
- if logs[num].hasContents():
- url = "%s/%d" % (urlbase, num)
- text.append("<a href=\"%s\">%s</a>" % (url, html.escape(name)))
- else:
- text.append(html.escape(name))
- color = self.original.getColor()
- class_ = "BuildStep " + build_get_class(self.original)
- return Box(text, color, class_=class_)
-components.registerAdapter(StepBox, builder.BuildStepStatus, IBox)
-
-class EventBox(components.Adapter):
- if implements:
- implements(IBox)
- else:
- __implements__ = IBox,
-
- def getBox(self):
- text = self.original.getText()
- color = self.original.getColor()
- class_ = "Event"
- if color:
- class_ += " " + color
- return Box(text, color, class_=class_)
-components.registerAdapter(EventBox, builder.Event, IBox)
-
-
-class BuildTopBox(components.Adapter):
- # this provides a per-builder box at the very top of the display,
- # showing the results of the most recent build
- if implements:
- implements(IBox)
- else:
- __implements__ = IBox,
-
- def getBox(self):
- assert interfaces.IBuilderStatus(self.original)
- b = self.original.getLastFinishedBuild()
- if not b:
- return Box(["none"], "white", class_="LastBuild")
- name = b.getBuilder().getName()
- number = b.getNumber()
- url = "%s/builds/%d" % (name, number)
- text = b.getText()
- # TODO: add logs?
- # TODO: add link to the per-build page at 'url'
- c = b.getColor()
- class_ = build_get_class(b)
- return Box(text, c, class_="LastBuild %s" % class_)
-components.registerAdapter(BuildTopBox, builder.BuilderStatus, ITopBox)
-
-class Spacer(builder.Event):
- def __init__(self, start, finish):
- self.started = start
- self.finished = finish
-
-class SpacerBox(components.Adapter):
- if implements:
- implements(IBox)
- else:
- __implements__ = IBox,
-
- def getBox(self):
- #b = Box(["spacer"], "white")
- b = Box([])
- b.spacer = True
- return b
-components.registerAdapter(SpacerBox, Spacer, IBox)
-
-def insertGaps(g, lastEventTime, idleGap=2):
- debug = False
-
- e = g.next()
- starts, finishes = e.getTimes()
- if debug: log.msg("E0", starts, finishes)
- if finishes == 0:
- finishes = starts
- if debug: log.msg("E1 finishes=%s, gap=%s, lET=%s" % \
- (finishes, idleGap, lastEventTime))
- if finishes is not None and finishes + idleGap < lastEventTime:
- if debug: log.msg(" spacer0")
- yield Spacer(finishes, lastEventTime)
-
- followingEventStarts = starts
- if debug: log.msg(" fES0", starts)
- yield e
-
- while 1:
- e = g.next()
- starts, finishes = e.getTimes()
- if debug: log.msg("E2", starts, finishes)
- if finishes == 0:
- finishes = starts
- if finishes is not None and finishes + idleGap < followingEventStarts:
- # there is a gap between the end of this event and the beginning
- # of the next one. Insert an idle event so the waterfall display
- # shows a gap here.
- if debug:
- log.msg(" finishes=%s, gap=%s, fES=%s" % \
- (finishes, idleGap, followingEventStarts))
- yield Spacer(finishes, followingEventStarts)
- yield e
- followingEventStarts = starts
- if debug: log.msg(" fES1", starts)
-
-
-class WaterfallStatusResource(HtmlResource):
- """This builds the main status page, with the waterfall display, and
- all child pages."""
- title = "BuildBot"
- def __init__(self, status, changemaster, categories, css=None):
- HtmlResource.__init__(self)
- self.status = status
- self.changemaster = changemaster
- self.categories = categories
- p = self.status.getProjectName()
- if p:
- self.title = "BuildBot: %s" % p
- self.css = css
-
- def body(self, request):
- "This method builds the main waterfall display."
-
- data = ''
-
- projectName = self.status.getProjectName()
- projectURL = self.status.getProjectURL()
-
- phase = request.args.get("phase",["2"])
- phase = int(phase[0])
-
- showBuilders = request.args.get("show", None)
- allBuilders = self.status.getBuilderNames(categories=self.categories)
- if showBuilders:
- builderNames = []
- for b in showBuilders:
- if b not in allBuilders:
- continue
- if b in builderNames:
- continue
- builderNames.append(b)
- else:
- builderNames = allBuilders
- builders = map(lambda name: self.status.getBuilder(name),
- builderNames)
-
- if phase == -1:
- return self.body0(request, builders)
- (changeNames, builderNames, timestamps, eventGrid, sourceEvents) = \
- self.buildGrid(request, builders)
- if phase == 0:
- return self.phase0(request, (changeNames + builderNames),
- timestamps, eventGrid)
- # start the table: top-header material
- data += '<table border="0" cellspacing="0">\n'
-
- if projectName and projectURL:
- # TODO: this is going to look really ugly
- topleft = "<a href=\"%s\">%s</a><br />last build" % \
- (projectURL, projectName)
- else:
- topleft = "last build"
- data += ' <tr class="LastBuild">\n'
- data += td(topleft, align="right", colspan=2, class_="Project")
- for b in builders:
- box = ITopBox(b).getBox()
- data += box.td(align="center")
- data += " </tr>\n"
-
- data += ' <tr class="Activity">\n'
- data += td('current activity', align='right', colspan=2)
- for b in builders:
- box = ICurrentBox(b).getBox(self.status)
- data += box.td(align="center")
- data += " </tr>\n"
-
- data += " <tr>\n"
- TZ = time.tzname[time.daylight]
- data += td("time (%s)" % TZ, align="center", class_="Time")
- name = changeNames[0]
- data += td(
- "<a href=\"%s\">%s</a>" % (urllib.quote(name, safe=''), name),
- align="center", class_="Change")
- for name in builderNames:
- data += td(
- #"<a href=\"%s\">%s</a>" % (request.childLink(name), name),
- "<a href=\"%s\">%s</a>" % (urllib.quote(name, safe=''), name),
- align="center", class_="Builder")
- data += " </tr>\n"
-
- if phase == 1:
- f = self.phase1
- else:
- f = self.phase2
- data += f(request, changeNames + builderNames, timestamps, eventGrid,
- sourceEvents)
-
- data += "</table>\n"
-
- data += "<hr />\n"
-
- data += "<a href=\"http://buildbot.sourceforge.net/\">Buildbot</a>"
- data += "-%s " % version
- if projectName:
- data += "working for the "
- if projectURL:
- data += "<a href=\"%s\">%s</a> project." % (projectURL,
- projectName)
- else:
- data += "%s project." % projectName
- data += "<br />\n"
- # TODO: push this to the right edge, if possible
- data += ("Page built: " +
- time.strftime("%a %d %b %Y %H:%M:%S",
- time.localtime(util.now()))
- + "\n")
- return data
-
- def body0(self, request, builders):
- # build the waterfall display
- data = ""
- data += "<h2>Basic display</h2>\n"
- data += "<p>See <a href=\"%s\">here</a>" % \
- urllib.quote(request.childLink("waterfall"))
- data += " for the waterfall display</p>\n"
-
- data += '<table border="0" cellspacing="0">\n'
- names = map(lambda builder: builder.name, builders)
-
- # the top row is two blank spaces, then the top-level status boxes
- data += " <tr>\n"
- data += td("", colspan=2)
- for b in builders:
- text = ""
- color = "#ca88f7"
- state, builds = b.getState()
- if state != "offline":
- text += "%s<br />\n" % state #b.getCurrentBig().text[0]
- else:
- text += "OFFLINE<br />\n"
- color = "#ffe0e0"
- data += td(text, align="center", bgcolor=color)
-
- # the next row has the column headers: time, changes, builder names
- data += " <tr>\n"
- data += td("Time", align="center")
- data += td("Changes", align="center")
- for name in names:
- data += td(
- "<a href=\"%s\">%s</a>" % (urllib.quote(request.childLink(name)), name),
- align="center")
- data += " </tr>\n"
-
- # all further rows involve timestamps, commit events, and build events
- data += " <tr>\n"
- data += td("04:00", align="bottom")
- data += td("fred", align="center")
- for name in names:
- data += td("stuff", align="center", bgcolor="red")
- data += " </tr>\n"
-
- data += "</table>\n"
- return data
-
- def buildGrid(self, request, builders):
- debug = False
-
- # XXX: see if we can use a cached copy
-
- # first step is to walk backwards in time, asking each column
- # (commit, all builders) if they have any events there. Build up the
- # array of events, and stop when we have a reasonable number.
-
- commit_source = self.changemaster
-
- lastEventTime = util.now()
- sources = [commit_source] + builders
- changeNames = ["changes"]
- builderNames = map(lambda builder: builder.getName(), builders)
- sourceNames = changeNames + builderNames
- sourceEvents = []
- sourceGenerators = []
- for s in sources:
- gen = insertGaps(s.eventGenerator(), lastEventTime)
- sourceGenerators.append(gen)
- # get the first event
- try:
- e = gen.next()
- event = interfaces.IStatusEvent(e)
- if debug:
- log.msg("gen %s gave1 %s" % (gen, event.getText()))
- except StopIteration:
- event = None
- sourceEvents.append(event)
- eventGrid = []
- timestamps = []
- spanLength = 10 # ten-second chunks
- tooOld = util.now() - 12*60*60 # never show more than 12 hours
- maxPageLen = 200
-
- lastEventTime = 0
- for e in sourceEvents:
- if e and e.getTimes()[0] > lastEventTime:
- lastEventTime = e.getTimes()[0]
- if lastEventTime == 0:
- lastEventTime = util.now()
-
- spanStart = lastEventTime - spanLength
- debugGather = 0
-
- while 1:
- if debugGather: log.msg("checking (%s,]" % spanStart)
- # the tableau of potential events is in sourceEvents[]. The
- # window crawls backwards, and we examine one source at a time.
- # If the source's top-most event is in the window, is it pushed
- # onto the events[] array and the tableau is refilled. This
- # continues until the tableau event is not in the window (or is
- # missing).
-
- spanEvents = [] # for all sources, in this span. row of eventGrid
- firstTimestamp = None # timestamp of first event in the span
- lastTimestamp = None # last pre-span event, for next span
-
- for c in range(len(sourceGenerators)):
- events = [] # for this source, in this span. cell of eventGrid
- event = sourceEvents[c]
- while event and spanStart < event.getTimes()[0]:
- # to look at windows that don't end with the present,
- # condition the .append on event.time <= spanFinish
- if not IBox(event, None):
- log.msg("BAD EVENT", event, event.getText())
- assert 0
- if debug:
- log.msg("pushing", event.getText(), event)
- events.append(event)
- starts, finishes = event.getTimes()
- firstTimestamp = util.earlier(firstTimestamp, starts)
- try:
- event = sourceGenerators[c].next()
- #event = interfaces.IStatusEvent(event)
- if debug:
- log.msg("gen[%s] gave2 %s" % (sourceNames[c],
- event.getText()))
- except StopIteration:
- event = None
- if debug:
- log.msg("finished span")
-
- if event:
- # this is the last pre-span event for this source
- lastTimestamp = util.later(lastTimestamp,
- event.getTimes()[0])
- if debugGather:
- log.msg(" got %s from %s" % (events, sourceNames[c]))
- sourceEvents[c] = event # refill the tableau
- spanEvents.append(events)
-
- if firstTimestamp is not None:
- eventGrid.append(spanEvents)
- timestamps.append(firstTimestamp)
-
-
- if lastTimestamp:
- spanStart = lastTimestamp - spanLength
- else:
- # no more events
- break
- if lastTimestamp < tooOld:
- pass
- #break
- if len(timestamps) > maxPageLen:
- break
-
-
- # now loop
-
- # loop is finished. now we have eventGrid[] and timestamps[]
- if debugGather: log.msg("finished loop")
- assert(len(timestamps) == len(eventGrid))
- return (changeNames, builderNames, timestamps, eventGrid, sourceEvents)
-
- def phase0(self, request, sourceNames, timestamps, eventGrid):
- # phase0 rendering
- if not timestamps:
- return "no events"
- data = ""
- for r in range(0, len(timestamps)):
- data += "<p>\n"
- data += "[%s]<br />" % timestamps[r]
- row = eventGrid[r]
- assert(len(row) == len(sourceNames))
- for c in range(0, len(row)):
- if row[c]:
- data += "<b>%s</b><br />\n" % sourceNames[c]
- for e in row[c]:
- log.msg("Event", r, c, sourceNames[c], e.getText())
- lognames = [loog.getName() for loog in e.getLogs()]
- data += "%s: %s: %s %s<br />" % (e.getText(),
- e.getTimes()[0],
- e.getColor(),
- lognames)
- else:
- data += "<b>%s</b> [none]<br />\n" % sourceNames[c]
- return data
-
- def phase1(self, request, sourceNames, timestamps, eventGrid,
- sourceEvents):
- # phase1 rendering: table, but boxes do not overlap
- data = ""
- if not timestamps:
- return data
- lastDate = None
- for r in range(0, len(timestamps)):
- chunkstrip = eventGrid[r]
- # chunkstrip is a horizontal strip of event blocks. Each block
- # is a vertical list of events, all for the same source.
- assert(len(chunkstrip) == len(sourceNames))
- maxRows = reduce(lambda x,y: max(x,y),
- map(lambda x: len(x), chunkstrip))
- for i in range(maxRows):
- data += " <tr>\n";
- if i == 0:
- stuff = []
- # add the date at the beginning, and each time it changes
- today = time.strftime("<b>%d %b %Y</b>",
- time.localtime(timestamps[r]))
- todayday = time.strftime("<b>%a</b>",
- time.localtime(timestamps[r]))
- if today != lastDate:
- stuff.append(todayday)
- stuff.append(today)
- lastDate = today
- stuff.append(
- time.strftime("%H:%M:%S",
- time.localtime(timestamps[r])))
- data += td(stuff, valign="bottom", align="center",
- rowspan=maxRows, class_="Time")
- for c in range(0, len(chunkstrip)):
- block = chunkstrip[c]
- assert(block != None) # should be [] instead
- # bottom-justify
- offset = maxRows - len(block)
- if i < offset:
- data += td("")
- else:
- e = block[i-offset]
- box = IBox(e).getBox()
- box.parms["show_idle"] = 1
- data += box.td(valign="top", align="center")
- data += " </tr>\n"
-
- return data
-
- def phase2(self, request, sourceNames, timestamps, eventGrid,
- sourceEvents):
- data = ""
- if not timestamps:
- return data
- # first pass: figure out the height of the chunks, populate grid
- grid = []
- for i in range(1+len(sourceNames)):
- grid.append([])
- # grid is a list of columns, one for the timestamps, and one per
- # event source. Each column is exactly the same height. Each element
- # of the list is a single <td> box.
- lastDate = time.strftime("<b>%d %b %Y</b>",
- time.localtime(util.now()))
- for r in range(0, len(timestamps)):
- chunkstrip = eventGrid[r]
- # chunkstrip is a horizontal strip of event blocks. Each block
- # is a vertical list of events, all for the same source.
- assert(len(chunkstrip) == len(sourceNames))
- maxRows = reduce(lambda x,y: max(x,y),
- map(lambda x: len(x), chunkstrip))
- for i in range(maxRows):
- if i != maxRows-1:
- grid[0].append(None)
- else:
- # timestamp goes at the bottom of the chunk
- stuff = []
- # add the date at the beginning (if it is not the same as
- # today's date), and each time it changes
- todayday = time.strftime("<b>%a</b>",
- time.localtime(timestamps[r]))
- today = time.strftime("<b>%d %b %Y</b>",
- time.localtime(timestamps[r]))
- if today != lastDate:
- stuff.append(todayday)
- stuff.append(today)
- lastDate = today
- stuff.append(
- time.strftime("%H:%M:%S",
- time.localtime(timestamps[r])))
- grid[0].append(Box(text=stuff, class_="Time",
- valign="bottom", align="center"))
-
- # at this point the timestamp column has been populated with
- # maxRows boxes, most None but the last one has the time string
- for c in range(0, len(chunkstrip)):
- block = chunkstrip[c]
- assert(block != None) # should be [] instead
- for i in range(maxRows - len(block)):
- # fill top of chunk with blank space
- grid[c+1].append(None)
- for i in range(len(block)):
- # so the events are bottom-justified
- b = IBox(block[i]).getBox()
- b.parms['valign'] = "top"
- b.parms['align'] = "center"
- grid[c+1].append(b)
- # now all the other columns have maxRows new boxes too
- # populate the last row, if empty
- gridlen = len(grid[0])
- for i in range(len(grid)):
- strip = grid[i]
- assert(len(strip) == gridlen)
- if strip[-1] == None:
- if sourceEvents[i-1]:
- filler = IBox(sourceEvents[i-1]).getBox()
- else:
- # this can happen if you delete part of the build history
- filler = Box(text=["?"], align="center")
- strip[-1] = filler
- strip[-1].parms['rowspan'] = 1
- # second pass: bubble the events upwards to un-occupied locations
- # Every square of the grid that has a None in it needs to have
- # something else take its place.
- noBubble = request.args.get("nobubble",['0'])
- noBubble = int(noBubble[0])
- if not noBubble:
- for col in range(len(grid)):
- strip = grid[col]
- if col == 1: # changes are handled differently
- for i in range(2, len(strip)+1):
- # only merge empty boxes. Don't bubble commit boxes.
- if strip[-i] == None:
- next = strip[-i+1]
- assert(next)
- if next:
- #if not next.event:
- if next.spacer:
- # bubble the empty box up
- strip[-i] = next
- strip[-i].parms['rowspan'] += 1
- strip[-i+1] = None
- else:
- # we are above a commit box. Leave it
- # be, and turn the current box into an
- # empty one
- strip[-i] = Box([], rowspan=1,
- comment="commit bubble")
- strip[-i].spacer = True
- else:
- # we are above another empty box, which
- # somehow wasn't already converted.
- # Shouldn't happen
- pass
- else:
- for i in range(2, len(strip)+1):
- # strip[-i] will go from next-to-last back to first
- if strip[-i] == None:
- # bubble previous item up
- assert(strip[-i+1] != None)
- strip[-i] = strip[-i+1]
- strip[-i].parms['rowspan'] += 1
- strip[-i+1] = None
- else:
- strip[-i].parms['rowspan'] = 1
- # third pass: render the HTML table
- for i in range(gridlen):
- data += " <tr>\n";
- for strip in grid:
- b = strip[i]
- if b:
- data += b.td()
- else:
- if noBubble:
- data += td([])
- # Nones are left empty, rowspan should make it all fit
- data += " </tr>\n"
- return data
-
-
-class StatusResource(Resource):
- status = None
- control = None
- favicon = None
- robots_txt = None
-
- def __init__(self, status, control, changemaster, categories, css):
- """
- @type status: L{buildbot.status.builder.Status}
- @type control: L{buildbot.master.Control}
- @type changemaster: L{buildbot.changes.changes.ChangeMaster}
- """
- Resource.__init__(self)
- self.status = status
- self.control = control
- self.changemaster = changemaster
- self.categories = categories
- self.css = css
- waterfall = WaterfallStatusResource(self.status, changemaster,
- categories, css)
- self.putChild("", waterfall)
-
- def render(self, request):
- request.redirect(request.prePathURL() + '/')
- request.finish()
-
- def getChild(self, path, request):
- if path == "robots.txt" and self.robots_txt:
- return static.File(self.robots_txt)
- if path == "buildbot.css" and self.css:
- return static.File(self.css)
- if path == "changes":
- return StatusResourceChanges(self.status, self.changemaster)
- if path == "favicon.ico":
- if self.favicon:
- return static.File(self.favicon)
- return NoResource("No favicon.ico registered")
-
- if path in self.status.getBuilderNames():
- builder = self.status.getBuilder(path)
- control = None
- if self.control:
- control = self.control.getBuilder(path)
- return StatusResourceBuilder(self.status, builder, control)
-
- return NoResource("No such Builder '%s'" % path)
-
-# the icon is sibpath(__file__, "../buildbot.png") . This is for portability.
-up = os.path.dirname
-buildbot_icon = os.path.abspath(os.path.join(up(up(__file__)),
- "buildbot.png"))
-buildbot_css = os.path.abspath(os.path.join(up(__file__), "classic.css"))
-
-class Waterfall(base.StatusReceiverMultiService):
- """I implement the primary web-page status interface, called a 'Waterfall
- Display' because builds and steps are presented in a grid of boxes which
- move downwards over time. The top edge is always the present. Each column
- represents a single builder. Each box describes a single Step, which may
- have logfiles or other status information.
-
- All these pages are served via a web server of some sort. The simplest
- approach is to let the buildmaster run its own webserver, on a given TCP
- port, but it can also publish its pages to a L{twisted.web.distrib}
- distributed web server (which lets the buildbot pages be a subset of some
- other web server).
-
- Since 0.6.3, BuildBot defines class attributes on elements so they can be
- styled with CSS stylesheets. Buildbot uses some generic classes to
- identify the type of object, and some more specific classes for the
- various kinds of those types. It does this by specifying both in the
- class attributes where applicable, separated by a space. It is important
- that in your CSS you declare the more generic class styles above the more
- specific ones. For example, first define a style for .Event, and below
- that for .SUCCESS
-
- The following CSS class names are used:
- - Activity, Event, BuildStep, LastBuild: general classes
- - waiting, interlocked, building, offline, idle: Activity states
- - start, running, success, failure, warnings, skipped, exception:
- LastBuild and BuildStep states
- - Change: box with change
- - Builder: box for builder name (at top)
- - Project
- - Time
-
- @type parent: L{buildbot.master.BuildMaster}
- @ivar parent: like all status plugins, this object is a child of the
- BuildMaster, so C{.parent} points to a
- L{buildbot.master.BuildMaster} instance, through which
- the status-reporting object is acquired.
- """
-
- compare_attrs = ["http_port", "distrib_port", "allowForce",
- "categories", "css", "favicon", "robots_txt"]
-
- def __init__(self, http_port=None, distrib_port=None, allowForce=True,
- categories=None, css=buildbot_css, favicon=buildbot_icon,
- robots_txt=None):
- """To have the buildbot run its own web server, pass a port number to
- C{http_port}. To have it run a web.distrib server
-
- @type http_port: int or L{twisted.application.strports} string
- @param http_port: a strports specification describing which port the
- buildbot should use for its web server, with the
- Waterfall display as the root page. For backwards
- compatibility this can also be an int. Use
- 'tcp:8000' to listen on that port, or
- 'tcp:12345:interface=127.0.0.1' if you only want
- local processes to connect to it (perhaps because
- you are using an HTTP reverse proxy to make the
- buildbot available to the outside world, and do not
- want to make the raw port visible).
-
- @type distrib_port: int or L{twisted.application.strports} string
- @param distrib_port: Use this if you want to publish the Waterfall
- page using web.distrib instead. The most common
- case is to provide a string that is an absolute
- pathname to the unix socket on which the
- publisher should listen
- (C{os.path.expanduser(~/.twistd-web-pb)} will
- match the default settings of a standard
- twisted.web 'personal web server'). Another
- possibility is to pass an integer, which means
- the publisher should listen on a TCP socket,
- allowing the web server to be on a different
- machine entirely. Both forms are provided for
- backwards compatibility; the preferred form is a
- strports specification like
- 'unix:/home/buildbot/.twistd-web-pb'. Providing
- a non-absolute pathname will probably confuse
- the strports parser.
-
- @type allowForce: bool
- @param allowForce: if True, present a 'Force Build' button on the
- per-Builder page that allows visitors to the web
- site to initiate a build. If False, don't provide
- this button.
-
- @type favicon: string
- @param favicon: if set, provide the pathname of an image file that
- will be used for the 'favicon.ico' resource. Many
- browsers automatically request this file and use it
- as an icon in any bookmark generated from this site.
- Defaults to the buildbot/buildbot.png image provided
- in the distribution. Can be set to None to avoid
- using a favicon at all.
-
- @type robots_txt: string
- @param robots_txt: if set, provide the pathname of a robots.txt file.
- Many search engines request this file and obey the
- rules in it. E.g. to disallow them to crawl the
- status page, put the following two lines in
- robots.txt:
- User-agent: *
- Disallow: /
- """
-
- base.StatusReceiverMultiService.__init__(self)
- assert allowForce in (True, False) # TODO: implement others
- if type(http_port) is int:
- http_port = "tcp:%d" % http_port
- self.http_port = http_port
- if distrib_port is not None:
- if type(distrib_port) is int:
- distrib_port = "tcp:%d" % distrib_port
- if distrib_port[0] in "/~.": # pathnames
- distrib_port = "unix:%s" % distrib_port
- self.distrib_port = distrib_port
- self.allowForce = allowForce
- self.categories = categories
- self.css = css
- self.favicon = favicon
- self.robots_txt = robots_txt
-
- def __repr__(self):
- if self.http_port is None:
- return "<Waterfall on path %s>" % self.distrib_port
- if self.distrib_port is None:
- return "<Waterfall on port %s>" % self.http_port
- return "<Waterfall on port %s and path %s>" % (self.http_port,
- self.distrib_port)
-
- def setServiceParent(self, parent):
- """
- @type parent: L{buildbot.master.BuildMaster}
- """
- base.StatusReceiverMultiService.setServiceParent(self, parent)
- self.setup()
-
- def setup(self):
- status = self.parent.getStatus()
- if self.allowForce:
- control = interfaces.IControl(self.parent)
- else:
- control = None
- change_svc = self.parent.change_svc
- sr = StatusResource(status, control, change_svc, self.categories,
- self.css)
- sr.favicon = self.favicon
- sr.robots_txt = self.robots_txt
- self.site = server.Site(sr)
-
- if self.http_port is not None:
- s = strports.service(self.http_port, self.site)
- s.setServiceParent(self)
- if self.distrib_port is not None:
- f = pb.PBServerFactory(distrib.ResourcePublisher(self.site))
- s = strports.service(self.distrib_port, f)
- s.setServiceParent(self)
diff --git a/buildbot/buildbot-source/buildbot/status/mail.py b/buildbot/buildbot-source/buildbot/status/mail.py
deleted file mode 100644
index 69744adff..000000000
--- a/buildbot/buildbot-source/buildbot/status/mail.py
+++ /dev/null
@@ -1,368 +0,0 @@
-# -*- test-case-name: buildbot.test.test_status -*-
-
-# the email.MIMEMultipart module is only available in python-2.2.2 and later
-
-from email.Message import Message
-from email.Utils import formatdate
-from email.MIMEText import MIMEText
-try:
- from email.MIMEMultipart import MIMEMultipart
- canDoAttachments = True
-except ImportError:
- canDoAttachments = False
-import urllib
-
-from twisted.internet import defer
-from twisted.application import service
-try:
- from twisted.mail.smtp import sendmail # Twisted-2.0
-except ImportError:
- from twisted.protocols.smtp import sendmail # Twisted-1.3
-from twisted.python import log
-
-from buildbot import interfaces, util
-from buildbot.twcompat import implements, providedBy
-from buildbot.status import base
-from buildbot.status.builder import FAILURE, SUCCESS, WARNINGS
-
-
-class Domain(util.ComparableMixin):
- if implements:
- implements(interfaces.IEmailLookup)
- else:
- __implements__ = interfaces.IEmailLookup
- compare_attrs = ["domain"]
-
- def __init__(self, domain):
- assert "@" not in domain
- self.domain = domain
-
- def getAddress(self, name):
- return name + "@" + self.domain
-
-
-class MailNotifier(base.StatusReceiverMultiService):
- """This is a status notifier which sends email to a list of recipients
- upon the completion of each build. It can be configured to only send out
- mail for certain builds, and only send messages when the build fails, or
- when it transitions from success to failure. It can also be configured to
- include various build logs in each message.
-
- By default, the message will be sent to the Interested Users list, which
- includes all developers who made changes in the build. You can add
- additional recipients with the extraRecipients argument.
-
- To get a simple one-message-per-build (say, for a mailing list), use
- sendToInterestedUsers=False, extraRecipients=['listaddr@example.org']
-
- Each MailNotifier sends mail to a single set of recipients. To send
- different kinds of mail to different recipients, use multiple
- MailNotifiers.
- """
-
- if implements:
- implements(interfaces.IEmailSender)
- else:
- __implements__ = (interfaces.IEmailSender,
- base.StatusReceiverMultiService.__implements__)
-
- compare_attrs = ["extraRecipients", "lookup", "fromaddr", "mode",
- "categories", "builders", "addLogs", "relayhost",
- "subject", "sendToInterestedUsers"]
-
- def __init__(self, fromaddr, mode="all", categories=None, builders=None,
- addLogs=False, relayhost="localhost",
- subject="buildbot %(result)s in %(builder)s",
- lookup=None, extraRecipients=[],
- sendToInterestedUsers=True):
- """
- @type fromaddr: string
- @param fromaddr: the email address to be used in the 'From' header.
- @type sendToInterestedUsers: boolean
- @param sendToInterestedUsers: if True (the default), send mail to all
- of the Interested Users. If False, only
- send mail to the extraRecipients list.
-
- @type extraRecipients: tuple of string
- @param extraRecipients: a list of email addresses to which messages
- should be sent (in addition to the
- InterestedUsers list, which includes any
- developers who made Changes that went into this
- build). It is a good idea to create a small
- mailing list and deliver to that, then let
- subscribers come and go as they please.
-
- @type subject: string
- @param subject: a string to be used as the subject line of the message.
- %(builder)s will be replaced with the name of the
- %builder which provoked the message.
-
- @type mode: string (defaults to all)
- @param mode: one of:
- - 'all': send mail about all builds, passing and failing
- - 'failing': only send mail about builds which fail
- - 'problem': only send mail about a build which failed
- when the previous build passed
-
- @type builders: list of strings
- @param builders: a list of builder names for which mail should be
- sent. Defaults to None (send mail for all builds).
- Use either builders or categories, but not both.
-
- @type categories: list of strings
- @param categories: a list of category names to serve status
- information for. Defaults to None (all
- categories). Use either builders or categories,
- but not both.
-
- @type addLogs: boolean.
- @param addLogs: if True, include all build logs as attachments to the
- messages. These can be quite large. This can also be
- set to a list of log names, to send a subset of the
- logs. Defaults to False.
-
- @type relayhost: string
- @param relayhost: the host to which the outbound SMTP connection
- should be made. Defaults to 'localhost'
-
- @type lookup: implementor of {IEmailLookup}
- @param lookup: object which provides IEmailLookup, which is
- responsible for mapping User names (which come from
- the VC system) into valid email addresses. If not
- provided, the notifier will only be able to send mail
- to the addresses in the extraRecipients list. Most of
- the time you can use a simple Domain instance. As a
- shortcut, you can pass as string: this will be
- treated as if you had provided Domain(str). For
- example, lookup='twistedmatrix.com' will allow mail
- to be sent to all developers whose SVN usernames
- match their twistedmatrix.com account names.
- """
-
- base.StatusReceiverMultiService.__init__(self)
- assert isinstance(extraRecipients, (list, tuple))
- for r in extraRecipients:
- assert isinstance(r, str)
- assert "@" in r # require full email addresses, not User names
- self.extraRecipients = extraRecipients
- self.sendToInterestedUsers = sendToInterestedUsers
- self.fromaddr = fromaddr
- self.mode = mode
- self.categories = categories
- self.builders = builders
- self.addLogs = addLogs
- self.relayhost = relayhost
- self.subject = subject
- if lookup is not None:
- if type(lookup) is str:
- lookup = Domain(lookup)
- assert providedBy(lookup, interfaces.IEmailLookup)
- self.lookup = lookup
- self.watched = []
- self.status = None
-
- # you should either limit on builders or categories, not both
- if self.builders != None and self.categories != None:
- log.err("Please specify only builders to ignore or categories to include")
- raise # FIXME: the asserts above do not raise some Exception either
-
- def setServiceParent(self, parent):
- """
- @type parent: L{buildbot.master.BuildMaster}
- """
- base.StatusReceiverMultiService.setServiceParent(self, parent)
- self.setup()
-
- def setup(self):
- self.status = self.parent.getStatus()
- self.status.subscribe(self)
-
- def disownServiceParent(self):
- self.status.unsubscribe(self)
- for w in self.watched:
- w.unsubscribe(self)
- return base.StatusReceiverMultiService.disownServiceParent(self)
-
- def builderAdded(self, name, builder):
- # only subscribe to builders we are interested in
- if self.categories != None and builder.category not in self.categories:
- return None
-
- self.watched.append(builder)
- return self # subscribe to this builder
-
- def builderRemoved(self, name):
- pass
-
- def builderChangedState(self, name, state):
- pass
- def buildStarted(self, name, build):
- pass
- def buildFinished(self, name, build, results):
- # here is where we actually do something.
- builder = build.getBuilder()
- if self.builders is not None and name not in self.builders:
- return # ignore this build
- if self.categories is not None and \
- builder.category not in self.categories:
- return # ignore this build
-
- if self.mode == "failing" and results != FAILURE:
- return
- if self.mode == "problem":
- if results != FAILURE:
- return
- prev = build.getPreviousBuild()
- if prev and prev.getResults() == FAILURE:
- return
- # for testing purposes, buildMessage returns a Deferred that fires
- # when the mail has been sent. To help unit tests, we return that
- # Deferred here even though the normal IStatusReceiver.buildFinished
- # signature doesn't do anything with it. If that changes (if
- # .buildFinished's return value becomes significant), we need to
- # rearrange this.
- return self.buildMessage(name, build, results)
-
- def buildMessage(self, name, build, results):
- text = ""
- if self.mode == "all":
- text += "The Buildbot has finished a build of %s.\n" % name
- elif self.mode == "failing":
- text += "The Buildbot has detected a failed build of %s.\n" % name
- else:
- text += "The Buildbot has detected a new failure of %s.\n" % name
- buildurl = self.status.getURLForThing(build)
- if buildurl:
- text += ("Full details are available at:\n %s\n" %
- urllib.quote(buildurl, '/:'))
- text += "\n"
-
- url = self.status.getBuildbotURL()
- if url:
- text += "Buildbot URL: %s\n\n" % urllib.quote(url, '/:')
-
- text += "Build Reason: %s\n" % build.getReason()
-
- patch = None
- ss = build.getSourceStamp()
- if ss is None:
- source = "unavailable"
- else:
- branch, revision, patch = ss
- source = ""
- if branch:
- source += "[branch %s] " % branch
- if revision:
- source += revision
- else:
- source += "HEAD"
- if patch is not None:
- source += " (plus patch)"
- text += "Build Source Stamp: %s\n" % source
-
- text += "Blamelist: %s\n" % ",".join(build.getResponsibleUsers())
-
- # TODO: maybe display changes here? or in an attachment?
- text += "\n"
-
- t = build.getText()
- if t:
- t = ": " + " ".join(t)
- else:
- t = ""
-
- if results == SUCCESS:
- text += "Build succeeded!\n"
- res = "success"
- elif results == WARNINGS:
- text += "Build Had Warnings%s\n" % t
- res = "warnings"
- else:
- text += "BUILD FAILED%s\n" % t
- res = "failure"
-
- if self.addLogs and build.getLogs():
- text += "Logs are attached.\n"
-
- # TODO: it would be nice to provide a URL for the specific build
- # here. That involves some coordination with html.Waterfall .
- # Ideally we could do:
- # helper = self.parent.getServiceNamed("html")
- # if helper:
- # url = helper.getURLForBuild(build)
-
- text += "\n"
- text += "sincerely,\n"
- text += " -The Buildbot\n"
- text += "\n"
-
- haveAttachments = False
- if patch or self.addLogs:
- haveAttachments = True
- if not canDoAttachments:
- log.msg("warning: I want to send mail with attachments, "
- "but this python is too old to have "
- "email.MIMEMultipart . Please upgrade to python-2.3 "
- "or newer to enable addLogs=True")
-
- if haveAttachments and canDoAttachments:
- m = MIMEMultipart()
- m.attach(MIMEText(text))
- else:
- m = Message()
- m.set_payload(text)
-
- m['Date'] = formatdate(localtime=True)
- m['Subject'] = self.subject % { 'result': res,
- 'builder': name,
- }
- m['From'] = self.fromaddr
- # m['To'] is added later
-
- if patch:
- a = MIMEText(patch)
- a.add_header('Content-Disposition', "attachment",
- filename="source patch")
- m.attach(a)
- if self.addLogs:
- for log in build.getLogs():
- name = "%s.%s" % (log.getStep().getName(),
- log.getName())
- a = MIMEText(log.getText())
- a.add_header('Content-Disposition', "attachment",
- filename=name)
- m.attach(a)
-
- # now, who is this message going to?
- dl = []
- recipients = self.extraRecipients[:]
- username = build.getUsername()
-
- if username:
- recipients.append(username+"@openoffice.org")
-
- if self.sendToInterestedUsers and self.lookup:
- for u in build.getInterestedUsers():
- d = defer.maybeDeferred(self.lookup.getAddress, u)
- d.addCallback(recipients.append)
- dl.append(d)
- d = defer.DeferredList(dl)
- d.addCallback(self._gotRecipients, recipients, m)
- return d
-
- def _gotRecipients(self, res, rlist, m):
- recipients = []
- for r in rlist:
- if r is not None and r not in recipients:
- recipients.append(r)
- recipients.sort()
- m['To'] = ", ".join(recipients)
- return self.sendMessage(m, recipients)
-
- def sendMessage(self, m, recipients):
- s = m.as_string()
- ds = []
- log.msg("sending mail (%d bytes) to" % len(s), recipients)
- for recip in recipients:
- ds.append(sendmail(self.relayhost, self.fromaddr, recip, s))
- return defer.DeferredList(ds)
diff --git a/buildbot/buildbot-source/buildbot/status/progress.py b/buildbot/buildbot-source/buildbot/status/progress.py
deleted file mode 100644
index dc4d3d572..000000000
--- a/buildbot/buildbot-source/buildbot/status/progress.py
+++ /dev/null
@@ -1,308 +0,0 @@
-# -*- test-case-name: buildbot.test.test_status -*-
-
-from twisted.internet import reactor
-from twisted.spread import pb
-from twisted.python import log
-from buildbot import util
-
-class StepProgress:
- """I keep track of how much progress a single BuildStep has made.
-
- Progress is measured along various axes. Time consumed is one that is
- available for all steps. Amount of command output is another, and may be
- better quantified by scanning the output for markers to derive number of
- files compiled, directories walked, tests run, etc.
-
- I am created when the build begins, and given to a BuildProgress object
- so it can track the overall progress of the whole build.
-
- """
-
- startTime = None
- stopTime = None
- expectedTime = None
- buildProgress = None
- debug = False
-
- def __init__(self, name, metricNames):
- self.name = name
- self.progress = {}
- self.expectations = {}
- for m in metricNames:
- self.progress[m] = None
- self.expectations[m] = None
-
- def setBuildProgress(self, bp):
- self.buildProgress = bp
-
- def setExpectations(self, metrics):
- """The step can call this to explicitly set a target value for one
- of its metrics. E.g., ShellCommands knows how many commands it will
- execute, so it could set the 'commands' expectation."""
- for metric, value in metrics.items():
- self.expectations[metric] = value
- self.buildProgress.newExpectations()
-
- def setExpectedTime(self, seconds):
- self.expectedTime = seconds
- self.buildProgress.newExpectations()
-
- def start(self):
- if self.debug: print "StepProgress.start[%s]" % self.name
- self.startTime = util.now()
-
- def setProgress(self, metric, value):
- """The step calls this as progress is made along various axes."""
- if self.debug:
- print "setProgress[%s][%s] = %s" % (self.name, metric, value)
- self.progress[metric] = value
- if self.debug:
- r = self.remaining()
- print " step remaining:", r
- self.buildProgress.newProgress()
-
- def finish(self):
- """This stops the 'time' metric and marks the step as finished
- overall. It should be called after the last .setProgress has been
- done for each axis."""
- if self.debug: print "StepProgress.finish[%s]" % self.name
- self.stopTime = util.now()
- self.buildProgress.stepFinished(self.name)
-
- def totalTime(self):
- if self.startTime != None and self.stopTime != None:
- return self.stopTime - self.startTime
-
- def remaining(self):
- if self.startTime == None:
- return self.expectedTime
- if self.stopTime != None:
- return 0 # already finished
- # TODO: replace this with cleverness that graphs each metric vs.
- # time, then finds the inverse function. Will probably need to save
- # a timestamp with each setProgress update, when finished, go back
- # and find the 2% transition points, then save those 50 values in a
- # list. On the next build, do linear interpolation between the two
- # closest samples to come up with a percentage represented by that
- # metric.
-
- # TODO: If no other metrics are available, just go with elapsed
- # time. Given the non-time-uniformity of text output from most
- # steps, this would probably be better than the text-percentage
- # scheme currently implemented.
-
- percentages = []
- for metric, value in self.progress.items():
- expectation = self.expectations[metric]
- if value != None and expectation != None:
- p = 1.0 * value / expectation
- percentages.append(p)
- if percentages:
- avg = reduce(lambda x,y: x+y, percentages) / len(percentages)
- if avg > 1.0:
- # overdue
- avg = 1.0
- if avg < 0.0:
- avg = 0.0
- if percentages and self.expectedTime != None:
- return self.expectedTime - (avg * self.expectedTime)
- if self.expectedTime is not None:
- # fall back to pure time
- return self.expectedTime - (util.now() - self.startTime)
- return None # no idea
-
-
-class WatcherState:
- def __init__(self, interval):
- self.interval = interval
- self.timer = None
- self.needUpdate = 0
-
-class BuildProgress(pb.Referenceable):
- """I keep track of overall build progress. I hold a list of StepProgress
- objects.
- """
-
- def __init__(self, stepProgresses):
- self.steps = {}
- for s in stepProgresses:
- self.steps[s.name] = s
- s.setBuildProgress(self)
- self.finishedSteps = []
- self.watchers = {}
- self.debug = 0
-
- def setExpectationsFrom(self, exp):
- """Set our expectations from the builder's Expectations object."""
- for name, metrics in exp.steps.items():
- s = self.steps[name]
- s.setExpectedTime(exp.times[name])
- s.setExpectations(exp.steps[name])
-
- def newExpectations(self):
- """Call this when one of the steps has changed its expectations.
- This should trigger us to update our ETA value and notify any
- subscribers."""
- pass # subscribers are not implemented: they just poll
-
- def stepFinished(self, stepname):
- assert(stepname not in self.finishedSteps)
- self.finishedSteps.append(stepname)
- if len(self.finishedSteps) == len(self.steps.keys()):
- self.sendLastUpdates()
-
- def newProgress(self):
- r = self.remaining()
- if self.debug:
- print " remaining:", r
- if r != None:
- self.sendAllUpdates()
-
- def remaining(self):
- # sum eta of all steps
- sum = 0
- for name, step in self.steps.items():
- rem = step.remaining()
- if rem == None:
- return None # not sure
- sum += rem
- return sum
- def eta(self):
- left = self.remaining()
- if left == None:
- return None # not sure
- done = util.now() + left
- return done
-
-
- def remote_subscribe(self, remote, interval=5):
- # [interval, timer, needUpdate]
- # don't send an update more than once per interval
- self.watchers[remote] = WatcherState(interval)
- remote.notifyOnDisconnect(self.removeWatcher)
- self.updateWatcher(remote)
- self.startTimer(remote)
- log.msg("BuildProgress.remote_subscribe(%s)" % remote)
- def remote_unsubscribe(self, remote):
- # TODO: this doesn't work. I think 'remote' will always be different
- # than the object that appeared in _subscribe.
- log.msg("BuildProgress.remote_unsubscribe(%s)" % remote)
- self.removeWatcher(remote)
- #remote.dontNotifyOnDisconnect(self.removeWatcher)
- def removeWatcher(self, remote):
- #log.msg("removeWatcher(%s)" % remote)
- try:
- timer = self.watchers[remote].timer
- if timer:
- timer.cancel()
- del self.watchers[remote]
- except KeyError:
- log.msg("Weird, removeWatcher on non-existent subscriber:",
- remote)
- def sendAllUpdates(self):
- for r in self.watchers.keys():
- self.updateWatcher(r)
- def updateWatcher(self, remote):
- # an update wants to go to this watcher. Send it if we can, otherwise
- # queue it for later
- w = self.watchers[remote]
- if not w.timer:
- # no timer, so send update now and start the timer
- self.sendUpdate(remote)
- self.startTimer(remote)
- else:
- # timer is running, just mark as needing an update
- w.needUpdate = 1
- def startTimer(self, remote):
- w = self.watchers[remote]
- timer = reactor.callLater(w.interval, self.watcherTimeout, remote)
- w.timer = timer
- def sendUpdate(self, remote, last=0):
- self.watchers[remote].needUpdate = 0
- #text = self.asText() # TODO: not text, duh
- try:
- remote.callRemote("progress", self.remaining())
- if last:
- remote.callRemote("finished", self)
- except:
- log.deferr()
- self.removeWatcher(remote)
-
- def watcherTimeout(self, remote):
- w = self.watchers.get(remote, None)
- if not w:
- return # went away
- w.timer = None
- if w.needUpdate:
- self.sendUpdate(remote)
- self.startTimer(remote)
- def sendLastUpdates(self):
- for remote in self.watchers.keys():
- self.sendUpdate(remote, 1)
- self.removeWatcher(remote)
-
-
-class Expectations:
- debug = False
- # decay=1.0 ignores all but the last build
- # 0.9 is short time constant. 0.1 is very long time constant
- # TODO: let decay be specified per-metric
- decay = 0.5
-
- def __init__(self, buildprogress):
- """Create us from a successful build. We will expect each step to
- take as long as it did in that build."""
-
- # .steps maps stepname to dict2
- # dict2 maps metricname to final end-of-step value
- self.steps = {}
-
- # .times maps stepname to per-step elapsed time
- self.times = {}
-
- for name, step in buildprogress.steps.items():
- self.steps[name] = {}
- for metric, value in step.progress.items():
- self.steps[name][metric] = value
- self.times[name] = None
- if step.startTime is not None and step.stopTime is not None:
- self.times[name] = step.stopTime - step.startTime
-
- def wavg(self, old, current):
- if old is None:
- return current
- if current is None:
- return old
- else:
- return (current * self.decay) + (old * (1 - self.decay))
-
- def update(self, buildprogress):
- for name, stepprogress in buildprogress.steps.items():
- old = self.times[name]
- current = stepprogress.totalTime()
- if current == None:
- log.msg("Expectations.update: current[%s] was None!" % name)
- continue
- new = self.wavg(old, current)
- self.times[name] = new
- if self.debug:
- print "new expected time[%s] = %s, old %s, cur %s" % \
- (name, new, old, current)
-
- for metric, current in stepprogress.progress.items():
- old = self.steps[name][metric]
- new = self.wavg(old, current)
- if self.debug:
- print "new expectation[%s][%s] = %s, old %s, cur %s" % \
- (name, metric, new, old, current)
- self.steps[name][metric] = new
-
- def expectedBuildTime(self):
- if None in self.times.values():
- return None
- #return sum(self.times.values())
- # python-2.2 doesn't have 'sum'. TODO: drop python-2.2 support
- s = 0
- for v in self.times.values():
- s += v
- return s
diff --git a/buildbot/buildbot-source/buildbot/status/tests.py b/buildbot/buildbot-source/buildbot/status/tests.py
deleted file mode 100644
index 6b1031a65..000000000
--- a/buildbot/buildbot-source/buildbot/status/tests.py
+++ /dev/null
@@ -1,75 +0,0 @@
-#! /usr/bin/python
-
-from twisted.web import resource
-from twisted.web.error import NoResource
-from twisted.web.html import PRE
-
-# these are our test result types. Steps are responsible for mapping results
-# into these values.
-SKIP, EXPECTED_FAILURE, FAILURE, ERROR, UNEXPECTED_SUCCESS, SUCCESS = \
- "skip", "expected failure", "failure", "error", "unexpected success", \
- "success"
-UNKNOWN = "unknown" # catch-all
-
-
-class OneTest(resource.Resource):
- isLeaf = 1
- def __init__(self, parent, testName, results):
- self.parent = parent
- self.testName = testName
- self.resultType, self.results = results
-
- def render(self, request):
- request.setHeader("content-type", "text/html")
- if request.method == "HEAD":
- request.setHeader("content-length", len(self.html(request)))
- return ''
- return self.html(request)
-
- def html(self, request):
- # turn ourselves into HTML
- raise NotImplementedError
-
-class TestResults(resource.Resource):
- oneTestClass = OneTest
- def __init__(self):
- resource.Resource.__init__(self)
- self.tests = {}
- def addTest(self, testName, resultType, results=None):
- self.tests[testName] = (resultType, results)
- # TODO: .setName and .delete should be used on our Swappable
- def countTests(self):
- return len(self.tests)
- def countFailures(self):
- failures = 0
- for t in self.tests.values():
- if t[0] in (FAILURE, ERROR):
- failures += 1
- return failures
- def summary(self):
- """Return a short list of text strings as a summary, suitable for
- inclusion in an Event"""
- return ["some", "tests"]
- def describeOneTest(self, testname):
- return "%s: %s\n" % (testname, self.tests[testname][0])
- def html(self):
- data = "<html>\n<head><title>Test Results</title></head>\n"
- data += "<body>\n"
- data += "<pre>\n"
- tests = self.tests.keys()
- tests.sort()
- for testname in tests:
- data += self.describeOneTest(testname)
- data += "</pre>\n"
- data += "</body></html>\n"
- return data
- def render(self, request):
- request.setHeader("content-type", "text/html")
- if request.method == "HEAD":
- request.setHeader("content-length", len(self.html()))
- return ''
- return self.html()
- def getChild(self, path, request):
- if self.tests.has_key(path):
- return self.oneTestClass(self, path, self.tests[path])
- return NoResource("No such test '%s'" % path)
diff --git a/buildbot/buildbot-source/buildbot/status/words.py b/buildbot/buildbot-source/buildbot/status/words.py
deleted file mode 100644
index 9ea54af91..000000000
--- a/buildbot/buildbot-source/buildbot/status/words.py
+++ /dev/null
@@ -1,614 +0,0 @@
-#! /usr/bin/python
-
-# code to deliver build status through twisted.words (instant messaging
-# protocols: irc, etc)
-
-import traceback, StringIO, re, shlex
-
-from twisted.internet import protocol, reactor
-try:
- # Twisted-2.0
- from twisted.words.protocols import irc
-except ImportError:
- # Twisted-1.3
- from twisted.protocols import irc
-from twisted.python import log, failure
-from twisted.application import internet
-
-from buildbot import interfaces, util
-from buildbot import version
-from buildbot.sourcestamp import SourceStamp
-from buildbot.process.base import BuildRequest
-from buildbot.status import base
-from buildbot.status.builder import SUCCESS, WARNINGS, FAILURE, EXCEPTION
-from buildbot.scripts.runner import ForceOptions
-
-class UsageError(ValueError):
- def __init__(self, string = "Invalid usage", *more):
- ValueError.__init__(self, string, *more)
-
-class IrcBuildRequest:
- hasStarted = False
- timer = None
-
- def __init__(self, parent, reply):
- self.parent = parent
- self.reply = reply
- self.timer = reactor.callLater(5, self.soon)
-
- def soon(self):
- del self.timer
- if not self.hasStarted:
- self.parent.reply(self.reply,
- "The build has been queued, I'll give a shout"
- " when it starts")
-
- def started(self, c):
- self.hasStarted = True
- if self.timer:
- self.timer.cancel()
- del self.timer
- s = c.getStatus()
- eta = s.getETA()
- response = "build #%d forced" % s.getNumber()
- if eta is not None:
- response = "build forced [ETA %s]" % self.parent.convertTime(eta)
- self.parent.reply(self.reply, response)
- self.parent.reply(self.reply,
- "I'll give a shout when the build finishes")
- d = s.waitUntilFinished()
- d.addCallback(self.parent.buildFinished, self.reply)
-
-
-class IrcStatusBot(irc.IRCClient):
- silly = {
- "What happen ?": "Somebody set up us the bomb.",
- "It's You !!": ["How are you gentlemen !!",
- "All your base are belong to us.",
- "You are on the way to destruction."],
- "What you say !!": ["You have no chance to survive make your time.",
- "HA HA HA HA ...."],
- }
- def __init__(self, nickname, password, channels, status, categories):
- """
- @type nickname: string
- @param nickname: the nickname by which this bot should be known
- @type password: string
- @param password: the password to use for identifying with Nickserv
- @type channels: list of strings
- @param channels: the bot will maintain a presence in these channels
- @type status: L{buildbot.status.builder.Status}
- @param status: the build master's Status object, through which the
- bot retrieves all status information
- """
- self.nickname = nickname
- self.channels = channels
- self.password = password
- self.status = status
- self.categories = categories
- self.counter = 0
- self.hasQuit = 0
-
- def signedOn(self):
- if self.password:
- self.msg("Nickserv", "IDENTIFY " + self.password)
- for c in self.channels:
- self.join(c)
- def joined(self, channel):
- log.msg("I have joined", channel)
- def left(self, channel):
- log.msg("I have left", channel)
- def kickedFrom(self, channel, kicker, message):
- log.msg("I have been kicked from %s by %s: %s" % (channel,
- kicker,
- message))
-
- # input
- def privmsg(self, user, channel, message):
- user = user.split('!', 1)[0] # rest is ~user@hostname
- # channel is '#twisted' or 'buildbot' (for private messages)
- channel = channel.lower()
- #print "privmsg:", user, channel, message
- if channel == self.nickname:
- # private message
- message = "%s: %s" % (self.nickname, message)
- reply = user
- else:
- reply = channel
- if message.startswith("%s:" % self.nickname):
- message = message[len("%s:" % self.nickname):]
-
- message = message.lstrip()
- if self.silly.has_key(message):
- return self.doSilly(user, reply, message)
-
- parts = message.split(' ', 1)
- if len(parts) == 1:
- parts = parts + ['']
- cmd, args = parts
- log.msg("irc command", cmd)
-
- meth = self.getCommandMethod(cmd)
- if not meth and message[-1] == '!':
- meth = self.command_EXCITED
-
- error = None
- try:
- if meth:
- meth(user, reply, args.strip())
- except UsageError, e:
- self.reply(reply, str(e))
- except:
- f = failure.Failure()
- log.err(f)
- error = "Something bad happened (see logs): %s" % f.type
-
- if error:
- try:
- self.reply(reply, error)
- except:
- log.err()
-
- #self.say(channel, "count %d" % self.counter)
- self.counter += 1
- def reply(self, dest, message):
- # maybe self.notice(dest, message) instead?
- self.msg(dest, message)
-
- def getCommandMethod(self, command):
- meth = getattr(self, 'command_' + command.upper(), None)
- return meth
-
- def getBuilder(self, which):
- try:
- b = self.status.getBuilder(which)
- except KeyError:
- raise UsageError, "no such builder '%s'" % which
- return b
-
- def getControl(self, which):
- if not self.control:
- raise UsageError("builder control is not enabled")
- try:
- bc = self.control.getBuilder(which)
- except KeyError:
- raise UsageError("no such builder '%s'" % which)
- return bc
-
- def getAllBuilders(self):
- """
- @rtype: list of L{buildbot.process.builder.Builder}
- """
- names = self.status.getBuilderNames(categories=self.categories)
- names.sort()
- builders = [self.status.getBuilder(n) for n in names]
- return builders
-
- def convertTime(self, seconds):
- if seconds < 60:
- return "%d seconds" % seconds
- minutes = int(seconds / 60)
- seconds = seconds - 60*minutes
- if minutes < 60:
- return "%dm%02ds" % (minutes, seconds)
- hours = int(minutes / 60)
- minutes = minutes - 60*hours
- return "%dh%02dm%02ds" % (hours, minutes, seconds)
-
- def doSilly(self, user, reply, message):
- response = self.silly[message]
- if type(response) != type([]):
- response = [response]
- when = 0.5
- for r in response:
- reactor.callLater(when, self.reply, reply, r)
- when += 2.5
-
- def command_HELLO(self, user, reply, args):
- self.reply(reply, "yes?")
-
- def command_VERSION(self, user, reply, args):
- self.reply(reply, "buildbot-%s at your service" % version)
-
- def command_LIST(self, user, reply, args):
- args = args.split()
- if len(args) == 0:
- raise UsageError, "try 'list builders'"
- if args[0] == 'builders':
- builders = self.getAllBuilders()
- str = "Configured builders: "
- for b in builders:
- str += b.name
- state = b.getState()[0]
- if state == 'offline':
- str += "[offline]"
- str += " "
- str.rstrip()
- self.reply(reply, str)
- return
- command_LIST.usage = "list builders - List configured builders"
-
- def command_STATUS(self, user, reply, args):
- args = args.split()
- if len(args) == 0:
- which = "all"
- elif len(args) == 1:
- which = args[0]
- else:
- raise UsageError, "try 'status <builder>'"
- if which == "all":
- builders = self.getAllBuilders()
- for b in builders:
- self.emit_status(reply, b.name)
- return
- self.emit_status(reply, which)
- command_STATUS.usage = "status [<which>] - List status of a builder (or all builders)"
-
- def command_WATCH(self, user, reply, args):
- args = args.split()
- if len(args) != 1:
- raise UsageError("try 'watch <builder>'")
- which = args[0]
- b = self.getBuilder(which)
- builds = b.getCurrentBuilds()
- if not builds:
- self.reply(reply, "there are no builds currently running")
- return
- for build in builds:
- assert not build.isFinished()
- d = build.waitUntilFinished()
- d.addCallback(self.buildFinished, reply)
- r = "watching build %s #%d until it finishes" \
- % (which, build.getNumber())
- eta = build.getETA()
- if eta is not None:
- r += " [%s]" % self.convertTime(eta)
- r += ".."
- self.reply(reply, r)
- command_WATCH.usage = "watch <which> - announce the completion of an active build"
-
- def buildFinished(self, b, reply):
- results = {SUCCESS: "Success",
- WARNINGS: "Warnings",
- FAILURE: "Failure",
- EXCEPTION: "Exception",
- }
-
- # only notify about builders we are interested in
- builder = b.getBuilder()
- log.msg('builder %r in category %s finished' % (builder,
- builder.category))
- if (self.categories != None and
- builder.category not in self.categories):
- return
-
- r = "Hey! build %s #%d is complete: %s" % \
- (b.getBuilder().getName(),
- b.getNumber(),
- results.get(b.getResults(), "??"))
- r += " [%s]" % " ".join(b.getText())
- self.reply(reply, r)
- buildurl = self.status.getURLForThing(b)
- if buildurl:
- self.reply(reply, "Build details are at %s" % buildurl)
-
- def command_FORCE(self, user, reply, args):
- args = shlex.split(args) # TODO: this requires python2.3 or newer
- if args.pop(0) != "build":
- raise UsageError("try 'force build WHICH <REASON>'")
- opts = ForceOptions()
- opts.parseOptions(args)
-
- which = opts['builder']
- branch = opts['branch']
- revision = opts['revision']
- reason = opts['reason']
-
- # keep weird stuff out of the branch and revision strings. TODO:
- # centralize this somewhere.
- if branch and not re.match(r'^[\w\.\-\/]*$', branch):
- log.msg("bad branch '%s'" % branch)
- self.reply(reply, "sorry, bad branch '%s'" % branch)
- return
- if revision and not re.match(r'^[\w\.\-\/]*$', revision):
- log.msg("bad revision '%s'" % revision)
- self.reply(reply, "sorry, bad revision '%s'" % revision)
- return
-
- bc = self.getControl(which)
-
- who = None # TODO: if we can authenticate that a particular User
- # asked for this, use User Name instead of None so they'll
- # be informed of the results.
- # TODO: or, monitor this build and announce the results through the
- # 'reply' argument.
- r = "forced: by IRC user <%s>: %s" % (user, reason)
- # TODO: maybe give certain users the ability to request builds of
- # certain branches
- s = SourceStamp(branch=branch, revision=revision)
- req = BuildRequest(r, s, which)
- try:
- bc.requestBuildSoon(req)
- except interfaces.NoSlaveError:
- self.reply(reply,
- "sorry, I can't force a build: all slaves are offline")
- return
- ireq = IrcBuildRequest(self, reply)
- req.subscribe(ireq.started)
-
-
- command_FORCE.usage = "force build <which> <reason> - Force a build"
-
- def command_STOP(self, user, reply, args):
- args = args.split(None, 2)
- if len(args) < 3 or args[0] != 'build':
- raise UsageError, "try 'stop build WHICH <REASON>'"
- which = args[1]
- reason = args[2]
-
- buildercontrol = self.getControl(which)
-
- who = None
- r = "stopped: by IRC user <%s>: %s" % (user, reason)
-
- # find an in-progress build
- builderstatus = self.getBuilder(which)
- builds = builderstatus.getCurrentBuilds()
- if not builds:
- self.reply(reply, "sorry, no build is currently running")
- return
- for build in builds:
- num = build.getNumber()
-
- # obtain the BuildControl object
- buildcontrol = buildercontrol.getBuild(num)
-
- # make it stop
- buildcontrol.stopBuild(r)
-
- self.reply(reply, "build %d interrupted" % num)
-
- command_STOP.usage = "stop build <which> <reason> - Stop a running build"
-
- def emit_status(self, reply, which):
- b = self.getBuilder(which)
- str = "%s: " % which
- state, builds = b.getState()
- str += state
- if state == "idle":
- last = b.getLastFinishedBuild()
- if last:
- start,finished = last.getTimes()
- str += ", last build %s secs ago: %s" % \
- (int(util.now() - finished), " ".join(last.getText()))
- if state == "building":
- t = []
- for build in builds:
- step = build.getCurrentStep()
- s = "(%s)" % " ".join(step.getText())
- ETA = build.getETA()
- if ETA is not None:
- s += " [ETA %s]" % self.convertTime(ETA)
- t.append(s)
- str += ", ".join(t)
- self.reply(reply, str)
-
- def emit_last(self, reply, which):
- last = self.getBuilder(which).getLastFinishedBuild()
- if not last:
- str = "(no builds run since last restart)"
- else:
- start,finish = last.getTimes()
- str = "%s secs ago: " % (int(util.now() - finish))
- str += " ".join(last.getText())
- self.reply(reply, "last build [%s]: %s" % (which, str))
-
- def command_LAST(self, user, reply, args):
- args = args.split()
- if len(args) == 0:
- which = "all"
- elif len(args) == 1:
- which = args[0]
- else:
- raise UsageError, "try 'last <builder>'"
- if which == "all":
- builders = self.getAllBuilders()
- for b in builders:
- self.emit_last(reply, b.name)
- return
- self.emit_last(reply, which)
- command_LAST.usage = "last <which> - list last build status for builder <which>"
-
- def build_commands(self):
- commands = []
- for k in self.__class__.__dict__.keys():
- if k.startswith('command_'):
- commands.append(k[8:].lower())
- commands.sort()
- return commands
-
- def command_HELP(self, user, reply, args):
- args = args.split()
- if len(args) == 0:
- self.reply(reply, "Get help on what? (try 'help <foo>', or 'commands' for a command list)")
- return
- command = args[0]
- meth = self.getCommandMethod(command)
- if not meth:
- raise UsageError, "no such command '%s'" % command
- usage = getattr(meth, 'usage', None)
- if usage:
- self.reply(reply, "Usage: %s" % usage)
- else:
- self.reply(reply, "No usage info for '%s'" % command)
- command_HELP.usage = "help <command> - Give help for <command>"
-
- def command_SOURCE(self, user, reply, args):
- banner = "My source can be found at http://buildbot.sourceforge.net/"
- self.reply(reply, banner)
-
- def command_COMMANDS(self, user, reply, args):
- commands = self.build_commands()
- str = "buildbot commands: " + ", ".join(commands)
- self.reply(reply, str)
- command_COMMANDS.usage = "commands - List available commands"
-
- def command_DESTROY(self, user, reply, args):
- self.me(reply, "readies phasers")
-
- def command_DANCE(self, user, reply, args):
- reactor.callLater(1.0, self.reply, reply, "0-<")
- reactor.callLater(3.0, self.reply, reply, "0-/")
- reactor.callLater(3.5, self.reply, reply, "0-\\")
-
- def command_EXCITED(self, user, reply, args):
- # like 'buildbot: destroy the sun!'
- self.reply(reply, "What you say!")
-
- def action(self, user, channel, data):
- #log.msg("action: %s,%s,%s" % (user, channel, data))
- user = user.split('!', 1)[0] # rest is ~user@hostname
- # somebody did an action (/me actions)
- if data.endswith("s buildbot"):
- words = data.split()
- verb = words[-2]
- timeout = 4
- if verb == "kicks":
- response = "%s back" % verb
- timeout = 1
- else:
- response = "%s %s too" % (verb, user)
- reactor.callLater(timeout, self.me, channel, response)
- # userJoined(self, user, channel)
-
- # output
- # self.say(channel, message) # broadcast
- # self.msg(user, message) # unicast
- # self.me(channel, action) # send action
- # self.away(message='')
- # self.quit(message='')
-
-class ThrottledClientFactory(protocol.ClientFactory):
- lostDelay = 2
- failedDelay = 60
- def clientConnectionLost(self, connector, reason):
- reactor.callLater(self.lostDelay, connector.connect)
- def clientConnectionFailed(self, connector, reason):
- reactor.callLater(self.failedDelay, connector.connect)
-
-class IrcStatusFactory(ThrottledClientFactory):
- protocol = IrcStatusBot
-
- status = None
- control = None
- shuttingDown = False
- p = None
-
- def __init__(self, nickname, password, channels, categories):
- #ThrottledClientFactory.__init__(self) # doesn't exist
- self.status = None
- self.nickname = nickname
- self.password = password
- self.channels = channels
- self.categories = categories
-
- def __getstate__(self):
- d = self.__dict__.copy()
- del d['p']
- return d
-
- def shutdown(self):
- self.shuttingDown = True
- if self.p:
- self.p.quit("buildmaster reconfigured: bot disconnecting")
-
- def buildProtocol(self, address):
- p = self.protocol(self.nickname, self.password,
- self.channels, self.status,
- self.categories)
- p.factory = self
- p.status = self.status
- p.control = self.control
- self.p = p
- return p
-
- # TODO: I think a shutdown that occurs while the connection is being
- # established will make this explode
-
- def clientConnectionLost(self, connector, reason):
- if self.shuttingDown:
- log.msg("not scheduling reconnection attempt")
- return
- ThrottledClientFactory.clientConnectionLost(self, connector, reason)
-
- def clientConnectionFailed(self, connector, reason):
- if self.shuttingDown:
- log.msg("not scheduling reconnection attempt")
- return
- ThrottledClientFactory.clientConnectionFailed(self, connector, reason)
-
-
-class IRC(base.StatusReceiverMultiService):
- """I am an IRC bot which can be queried for status information. I
- connect to a single IRC server and am known by a single nickname on that
- server, however I can join multiple channels."""
-
- compare_attrs = ["host", "port", "nick", "password",
- "channels", "allowForce",
- "categories"]
-
- def __init__(self, host, nick, channels, port=6667, allowForce=True,
- categories=None, password=None):
- base.StatusReceiverMultiService.__init__(self)
-
- assert allowForce in (True, False) # TODO: implement others
-
- # need to stash these so we can detect changes later
- self.host = host
- self.port = port
- self.nick = nick
- self.channels = channels
- self.password = password
- self.allowForce = allowForce
- self.categories = categories
-
- # need to stash the factory so we can give it the status object
- self.f = IrcStatusFactory(self.nick, self.password,
- self.channels, self.categories)
-
- c = internet.TCPClient(host, port, self.f)
- c.setServiceParent(self)
-
- def setServiceParent(self, parent):
- base.StatusReceiverMultiService.setServiceParent(self, parent)
- self.f.status = parent.getStatus()
- if self.allowForce:
- self.f.control = interfaces.IControl(parent)
-
- def stopService(self):
- # make sure the factory will stop reconnecting
- self.f.shutdown()
- return base.StatusReceiverMultiService.stopService(self)
-
-
-def main():
- from twisted.internet import app
- a = app.Application("irctest")
- f = IrcStatusFactory()
- host = "localhost"
- port = 6667
- f.addNetwork((host, port), ["private", "other"])
- a.connectTCP(host, port, f)
- a.run(save=0)
-
-
-if __name__ == '__main__':
- main()
-
-## buildbot: list builders
-# buildbot: watch quick
-# print notification when current build in 'quick' finishes
-## buildbot: status
-## buildbot: status full-2.3
-## building, not, % complete, ETA
-## buildbot: force build full-2.3 "reason"
diff --git a/buildbot/buildbot-source/buildbot/test/__init__.py b/buildbot/buildbot-source/buildbot/test/__init__.py
deleted file mode 100644
index e69de29bb..000000000
--- a/buildbot/buildbot-source/buildbot/test/__init__.py
+++ /dev/null
diff --git a/buildbot/buildbot-source/buildbot/test/emit.py b/buildbot/buildbot-source/buildbot/test/emit.py
deleted file mode 100644
index c5bf5677d..000000000
--- a/buildbot/buildbot-source/buildbot/test/emit.py
+++ /dev/null
@@ -1,10 +0,0 @@
-#! /usr/bin/python
-
-import os, sys
-
-sys.stdout.write("this is stdout\n")
-sys.stderr.write("this is stderr\n")
-if os.environ.has_key("EMIT_TEST"):
- sys.stdout.write("EMIT_TEST: %s\n" % os.environ["EMIT_TEST"])
-rc = int(sys.argv[1])
-sys.exit(rc)
diff --git a/buildbot/buildbot-source/buildbot/test/mail/msg1 b/buildbot/buildbot-source/buildbot/test/mail/msg1
deleted file mode 100644
index cc8442eb7..000000000
--- a/buildbot/buildbot-source/buildbot/test/mail/msg1
+++ /dev/null
@@ -1,68 +0,0 @@
-Return-Path: <twisted-commits-admin@twistedmatrix.com>
-Delivered-To: warner-twistedcvs@luther.lothar.com
-Received: (qmail 11151 invoked by uid 1000); 11 Jan 2003 17:10:04 -0000
-Delivered-To: warner-twistedcvs@lothar.com
-Received: (qmail 1548 invoked by uid 13574); 11 Jan 2003 17:06:39 -0000
-Received: from unknown (HELO pyramid.twistedmatrix.com) ([64.123.27.105]) (envelope-sender <twisted-commits-admin@twistedmatrix.com>)
- by 130.94.181.6 (qmail-ldap-1.03) with SMTP
- for <warner-twistedcvs@lothar.com>; 11 Jan 2003 17:06:39 -0000
-Received: from localhost ([127.0.0.1] helo=pyramid.twistedmatrix.com)
- by pyramid.twistedmatrix.com with esmtp (Exim 3.35 #1 (Debian))
- id 18XP0U-0002Mq-00; Sat, 11 Jan 2003 11:01:14 -0600
-Received: from acapnotic by pyramid.twistedmatrix.com with local (Exim 3.35 #1 (Debian))
- id 18XP02-0002MN-00
- for <twisted-commits@twistedmatrix.com>; Sat, 11 Jan 2003 11:00:46 -0600
-To: twisted-commits@twistedmatrix.com
-From: moshez CVS <moshez@twistedmatrix.com>
-Reply-To: twisted-python@twistedmatrix.com
-X-Mailer: CVSToys
-From: moshez CVS <moshez@twistedmatrix.com>
-Reply-To: twisted-python@twistedmatrix.com
-Message-Id: <E18XP02-0002MN-00@pyramid.twistedmatrix.com>
-Subject: [Twisted-commits] Instance massenger, apparently
-Sender: twisted-commits-admin@twistedmatrix.com
-Errors-To: twisted-commits-admin@twistedmatrix.com
-X-BeenThere: twisted-commits@twistedmatrix.com
-X-Mailman-Version: 2.0.11
-Precedence: bulk
-List-Help: <mailto:twisted-commits-request@twistedmatrix.com?subject=help>
-List-Post: <mailto:twisted-commits@twistedmatrix.com>
-List-Subscribe: <http://twistedmatrix.com/cgi-bin/mailman/listinfo/twisted-commits>,
- <mailto:twisted-commits-request@twistedmatrix.com?subject=subscribe>
-List-Id: <twisted-commits.twistedmatrix.com>
-List-Unsubscribe: <http://twistedmatrix.com/cgi-bin/mailman/listinfo/twisted-commits>,
- <mailto:twisted-commits-request@twistedmatrix.com?subject=unsubscribe>
-List-Archive: <http://twistedmatrix.com/pipermail/twisted-commits/>
-Date: Sat, 11 Jan 2003 11:00:46 -0600
-Status:
-
-Modified files:
-Twisted/debian/python-twisted.menu.in 1.3 1.4
-
-Log message:
-Instance massenger, apparently
-
-
-ViewCVS links:
-http://twistedmatrix.com/users/jh.twistd/viewcvs/cgi/viewcvs.cgi/debian/python-twisted.menu.in.diff?r1=text&tr1=1.3&r2=text&tr2=1.4&cvsroot=Twisted
-
-Index: Twisted/debian/python-twisted.menu.in
-diff -u Twisted/debian/python-twisted.menu.in:1.3 Twisted/debian/python-twisted.menu.in:1.4
---- Twisted/debian/python-twisted.menu.in:1.3 Sat Dec 28 10:02:12 2002
-+++ Twisted/debian/python-twisted.menu.in Sat Jan 11 09:00:44 2003
-@@ -1,7 +1,7 @@
- ?package(python@VERSION@-twisted):\
- needs=x11\
- section="Apps/Net"\
--title="Twisted Instant Messenger (@VERSION@)"\
-+title="Twisted Instance Messenger (@VERSION@)"\
- command="/usr/bin/t-im@VERSION@"
-
- ?package(python@VERSION@-twisted):\
-
-.
-
-_______________________________________________
-Twisted-commits mailing list
-Twisted-commits@twistedmatrix.com
-http://twistedmatrix.com/cgi-bin/mailman/listinfo/twisted-commits
diff --git a/buildbot/buildbot-source/buildbot/test/mail/msg2 b/buildbot/buildbot-source/buildbot/test/mail/msg2
deleted file mode 100644
index ada1311eb..000000000
--- a/buildbot/buildbot-source/buildbot/test/mail/msg2
+++ /dev/null
@@ -1,101 +0,0 @@
-Return-Path: <twisted-commits-admin@twistedmatrix.com>
-Delivered-To: warner-twistedcvs@luther.lothar.com
-Received: (qmail 32220 invoked by uid 1000); 14 Jan 2003 21:50:04 -0000
-Delivered-To: warner-twistedcvs@lothar.com
-Received: (qmail 7923 invoked by uid 13574); 14 Jan 2003 21:49:48 -0000
-Received: from unknown (HELO pyramid.twistedmatrix.com) ([64.123.27.105]) (envelope-sender <twisted-commits-admin@twistedmatrix.com>)
- by 130.94.181.6 (qmail-ldap-1.03) with SMTP
- for <warner-twistedcvs@lothar.com>; 14 Jan 2003 21:49:48 -0000
-Received: from localhost ([127.0.0.1] helo=pyramid.twistedmatrix.com)
- by pyramid.twistedmatrix.com with esmtp (Exim 3.35 #1 (Debian))
- id 18YYr0-0005en-00; Tue, 14 Jan 2003 15:44:14 -0600
-Received: from acapnotic by pyramid.twistedmatrix.com with local (Exim 3.35 #1 (Debian))
- id 18YYq7-0005eQ-00
- for <twisted-commits@twistedmatrix.com>; Tue, 14 Jan 2003 15:43:19 -0600
-To: twisted-commits@twistedmatrix.com
-From: itamarst CVS <itamarst@twistedmatrix.com>
-Reply-To: twisted-python@twistedmatrix.com
-X-Mailer: CVSToys
-From: itamarst CVS <itamarst@twistedmatrix.com>
-Reply-To: twisted-python@twistedmatrix.com
-Message-Id: <E18YYq7-0005eQ-00@pyramid.twistedmatrix.com>
-Subject: [Twisted-commits] submit formmethod now subclass of Choice
-Sender: twisted-commits-admin@twistedmatrix.com
-Errors-To: twisted-commits-admin@twistedmatrix.com
-X-BeenThere: twisted-commits@twistedmatrix.com
-X-Mailman-Version: 2.0.11
-Precedence: bulk
-List-Help: <mailto:twisted-commits-request@twistedmatrix.com?subject=help>
-List-Post: <mailto:twisted-commits@twistedmatrix.com>
-List-Subscribe: <http://twistedmatrix.com/cgi-bin/mailman/listinfo/twisted-commits>,
- <mailto:twisted-commits-request@twistedmatrix.com?subject=subscribe>
-List-Id: <twisted-commits.twistedmatrix.com>
-List-Unsubscribe: <http://twistedmatrix.com/cgi-bin/mailman/listinfo/twisted-commits>,
- <mailto:twisted-commits-request@twistedmatrix.com?subject=unsubscribe>
-List-Archive: <http://twistedmatrix.com/pipermail/twisted-commits/>
-Date: Tue, 14 Jan 2003 15:43:19 -0600
-Status:
-
-Modified files:
-Twisted/twisted/web/woven/form.py 1.20 1.21
-Twisted/twisted/python/formmethod.py 1.12 1.13
-
-Log message:
-submit formmethod now subclass of Choice
-
-
-ViewCVS links:
-http://twistedmatrix.com/users/jh.twistd/viewcvs/cgi/viewcvs.cgi/twisted/web/woven/form.py.diff?r1=text&tr1=1.20&r2=text&tr2=1.21&cvsroot=Twisted
-http://twistedmatrix.com/users/jh.twistd/viewcvs/cgi/viewcvs.cgi/twisted/python/formmethod.py.diff?r1=text&tr1=1.12&r2=text&tr2=1.13&cvsroot=Twisted
-
-Index: Twisted/twisted/web/woven/form.py
-diff -u Twisted/twisted/web/woven/form.py:1.20 Twisted/twisted/web/woven/form.py:1.21
---- Twisted/twisted/web/woven/form.py:1.20 Tue Jan 14 12:07:29 2003
-+++ Twisted/twisted/web/woven/form.py Tue Jan 14 13:43:16 2003
-@@ -140,8 +140,8 @@
-
- def input_submit(self, request, content, arg):
- div = content.div()
-- for value in arg.buttons:
-- div.input(type="submit", name=arg.name, value=value)
-+ for tag, value, desc in arg.choices:
-+ div.input(type="submit", name=arg.name, value=tag)
- div.text(" ")
- if arg.reset:
- div.input(type="reset")
-
-Index: Twisted/twisted/python/formmethod.py
-diff -u Twisted/twisted/python/formmethod.py:1.12 Twisted/twisted/python/formmethod.py:1.13
---- Twisted/twisted/python/formmethod.py:1.12 Tue Jan 14 12:07:30 2003
-+++ Twisted/twisted/python/formmethod.py Tue Jan 14 13:43:17 2003
-@@ -180,19 +180,13 @@
- return 1
-
-
--class Submit(Argument):
-+class Submit(Choice):
- """Submit button or a reasonable facsimile thereof."""
-
-- def __init__(self, name, buttons=["Submit"], reset=0, shortDesc=None, longDesc=None):
-- Argument.__init__(self, name, shortDesc=shortDesc, longDesc=longDesc)
-- self.buttons = buttons
-+ def __init__(self, name, choices=[("Submit", "submit", "Submit form")],
-+ reset=0, shortDesc=None, longDesc=None):
-+ Choice.__init__(self, name, choices=choices, shortDesc=shortDesc, longDesc=longDesc)
- self.reset = reset
--
-- def coerce(self, val):
-- if val in self.buttons:
-- return val
-- else:
-- raise InputError, "no such action"
-
-
- class PresentationHint:
-
-.
-
-_______________________________________________
-Twisted-commits mailing list
-Twisted-commits@twistedmatrix.com
-http://twistedmatrix.com/cgi-bin/mailman/listinfo/twisted-commits
diff --git a/buildbot/buildbot-source/buildbot/test/mail/msg3 b/buildbot/buildbot-source/buildbot/test/mail/msg3
deleted file mode 100644
index f9ff199af..000000000
--- a/buildbot/buildbot-source/buildbot/test/mail/msg3
+++ /dev/null
@@ -1,97 +0,0 @@
-Return-Path: <twisted-commits-admin@twistedmatrix.com>
-Delivered-To: warner-twistedcvs@luther.lothar.com
-Received: (qmail 32220 invoked by uid 1000); 14 Jan 2003 21:50:04 -0000
-Delivered-To: warner-twistedcvs@lothar.com
-Received: (qmail 7923 invoked by uid 13574); 14 Jan 2003 21:49:48 -0000
-Received: from unknown (HELO pyramid.twistedmatrix.com) ([64.123.27.105]) (envelope-sender <twisted-commits-admin@twistedmatrix.com>)
- by 130.94.181.6 (qmail-ldap-1.03) with SMTP
- for <warner-twistedcvs@lothar.com>; 14 Jan 2003 21:49:48 -0000
-Received: from localhost ([127.0.0.1] helo=pyramid.twistedmatrix.com)
- by pyramid.twistedmatrix.com with esmtp (Exim 3.35 #1 (Debian))
- id 18YYr0-0005en-00; Tue, 14 Jan 2003 15:44:14 -0600
-Received: from acapnotic by pyramid.twistedmatrix.com with local (Exim 3.35 #1 (Debian))
- id 18YYq7-0005eQ-00
- for <twisted-commits@twistedmatrix.com>; Tue, 14 Jan 2003 15:43:19 -0600
-To: twisted-commits@twistedmatrix.com
-From: itamarst CVS <itamarst@twistedmatrix.com>
-Reply-To: twisted-python@twistedmatrix.com
-X-Mailer: CVSToys
-From: itamarst CVS <itamarst@twistedmatrix.com>
-Reply-To: twisted-python@twistedmatrix.com
-Message-Id: <E18YYq7-0005eQ-00@pyramid.twistedmatrix.com>
-Subject: [Twisted-commits] submit formmethod now subclass of Choice
-Sender: twisted-commits-admin@twistedmatrix.com
-Errors-To: twisted-commits-admin@twistedmatrix.com
-X-BeenThere: twisted-commits@twistedmatrix.com
-X-Mailman-Version: 2.0.11
-Precedence: bulk
-List-Help: <mailto:twisted-commits-request@twistedmatrix.com?subject=help>
-List-Post: <mailto:twisted-commits@twistedmatrix.com>
-List-Subscribe: <http://twistedmatrix.com/cgi-bin/mailman/listinfo/twisted-commits>,
- <mailto:twisted-commits-request@twistedmatrix.com?subject=subscribe>
-List-Id: <twisted-commits.twistedmatrix.com>
-List-Unsubscribe: <http://twistedmatrix.com/cgi-bin/mailman/listinfo/twisted-commits>,
- <mailto:twisted-commits-request@twistedmatrix.com?subject=unsubscribe>
-List-Archive: <http://twistedmatrix.com/pipermail/twisted-commits/>
-Date: Tue, 14 Jan 2003 15:43:19 -0600
-Status:
-
-Modified files:
-Twisted/twisted/web/woven/form.py 1.20 1.21
-Twisted/twisted/python/formmethod.py 1.12 1.13
-
-Log message:
-submit formmethod now subclass of Choice
-
-
-Index: Twisted/twisted/web/woven/form.py
-diff -u Twisted/twisted/web/woven/form.py:1.20 Twisted/twisted/web/woven/form.py:1.21
---- Twisted/twisted/web/woven/form.py:1.20 Tue Jan 14 12:07:29 2003
-+++ Twisted/twisted/web/woven/form.py Tue Jan 14 13:43:16 2003
-@@ -140,8 +140,8 @@
-
- def input_submit(self, request, content, arg):
- div = content.div()
-- for value in arg.buttons:
-- div.input(type="submit", name=arg.name, value=value)
-+ for tag, value, desc in arg.choices:
-+ div.input(type="submit", name=arg.name, value=tag)
- div.text(" ")
- if arg.reset:
- div.input(type="reset")
-
-Index: Twisted/twisted/python/formmethod.py
-diff -u Twisted/twisted/python/formmethod.py:1.12 Twisted/twisted/python/formmethod.py:1.13
---- Twisted/twisted/python/formmethod.py:1.12 Tue Jan 14 12:07:30 2003
-+++ Twisted/twisted/python/formmethod.py Tue Jan 14 13:43:17 2003
-@@ -180,19 +180,13 @@
- return 1
-
-
--class Submit(Argument):
-+class Submit(Choice):
- """Submit button or a reasonable facsimile thereof."""
-
-- def __init__(self, name, buttons=["Submit"], reset=0, shortDesc=None, longDesc=None):
-- Argument.__init__(self, name, shortDesc=shortDesc, longDesc=longDesc)
-- self.buttons = buttons
-+ def __init__(self, name, choices=[("Submit", "submit", "Submit form")],
-+ reset=0, shortDesc=None, longDesc=None):
-+ Choice.__init__(self, name, choices=choices, shortDesc=shortDesc, longDesc=longDesc)
- self.reset = reset
--
-- def coerce(self, val):
-- if val in self.buttons:
-- return val
-- else:
-- raise InputError, "no such action"
-
-
- class PresentationHint:
-
-.
-
-_______________________________________________
-Twisted-commits mailing list
-Twisted-commits@twistedmatrix.com
-http://twistedmatrix.com/cgi-bin/mailman/listinfo/twisted-commits
diff --git a/buildbot/buildbot-source/buildbot/test/mail/msg4 b/buildbot/buildbot-source/buildbot/test/mail/msg4
deleted file mode 100644
index 9e674dc8e..000000000
--- a/buildbot/buildbot-source/buildbot/test/mail/msg4
+++ /dev/null
@@ -1,45 +0,0 @@
-Return-Path: <twisted-commits-admin@twistedmatrix.com>
-Delivered-To: warner-twistedcvs@luther.lothar.com
-Received: (qmail 32220 invoked by uid 1000); 14 Jan 2003 21:50:04 -0000
-Delivered-To: warner-twistedcvs@lothar.com
-Received: (qmail 7923 invoked by uid 13574); 14 Jan 2003 21:49:48 -0000
-Received: from unknown (HELO pyramid.twistedmatrix.com) ([64.123.27.105]) (envelope-sender <twisted-commits-admin@twistedmatrix.com>)
- by 130.94.181.6 (qmail-ldap-1.03) with SMTP
- for <warner-twistedcvs@lothar.com>; 14 Jan 2003 21:49:48 -0000
-Received: from localhost ([127.0.0.1] helo=pyramid.twistedmatrix.com)
- by pyramid.twistedmatrix.com with esmtp (Exim 3.35 #1 (Debian))
- id 18YYr0-0005en-00; Tue, 14 Jan 2003 15:44:14 -0600
-Received: from acapnotic by pyramid.twistedmatrix.com with local (Exim 3.35 #1 (Debian))
- id 18YYq7-0005eQ-00
- for <twisted-commits@twistedmatrix.com>; Tue, 14 Jan 2003 15:43:19 -0600
-To: twisted-commits@twistedmatrix.com
-From: itamarst CVS <itamarst@twistedmatrix.com>
-Reply-To: twisted-python@twistedmatrix.com
-X-Mailer: CVSToys
-From: itamarst CVS <itamarst@twistedmatrix.com>
-Reply-To: twisted-python@twistedmatrix.com
-Message-Id: <E18YYq7-0005eQ-00@pyramid.twistedmatrix.com>
-Subject: [Twisted-commits] submit formmethod now subclass of Choice
-Sender: twisted-commits-admin@twistedmatrix.com
-Errors-To: twisted-commits-admin@twistedmatrix.com
-X-BeenThere: twisted-commits@twistedmatrix.com
-X-Mailman-Version: 2.0.11
-Precedence: bulk
-List-Help: <mailto:twisted-commits-request@twistedmatrix.com?subject=help>
-List-Post: <mailto:twisted-commits@twistedmatrix.com>
-List-Subscribe: <http://twistedmatrix.com/cgi-bin/mailman/listinfo/twisted-commits>,
- <mailto:twisted-commits-request@twistedmatrix.com?subject=subscribe>
-List-Id: <twisted-commits.twistedmatrix.com>
-List-Unsubscribe: <http://twistedmatrix.com/cgi-bin/mailman/listinfo/twisted-commits>,
- <mailto:twisted-commits-request@twistedmatrix.com?subject=unsubscribe>
-List-Archive: <http://twistedmatrix.com/pipermail/twisted-commits/>
-Date: Tue, 14 Jan 2003 15:43:19 -0600
-Status:
-
-Modified files:
-Twisted/twisted/web/woven/form.py 1.20 1.21
-Twisted/twisted/python/formmethod.py 1.12 1.13
-
-Log message:
-submit formmethod now subclass of Choice
-
diff --git a/buildbot/buildbot-source/buildbot/test/mail/msg5 b/buildbot/buildbot-source/buildbot/test/mail/msg5
deleted file mode 100644
index f20a958ea..000000000
--- a/buildbot/buildbot-source/buildbot/test/mail/msg5
+++ /dev/null
@@ -1,54 +0,0 @@
-Return-Path: <twisted-commits-admin@twistedmatrix.com>
-Delivered-To: warner-twistedcvs@luther.lothar.com
-Received: (qmail 5865 invoked by uid 1000); 17 Jan 2003 07:00:04 -0000
-Delivered-To: warner-twistedcvs@lothar.com
-Received: (qmail 40460 invoked by uid 13574); 17 Jan 2003 06:51:55 -0000
-Received: from unknown (HELO pyramid.twistedmatrix.com) ([64.123.27.105]) (envelope-sender <twisted-commits-admin@twistedmatrix.com>)
- by 130.94.181.6 (qmail-ldap-1.03) with SMTP
- for <warner-twistedcvs@lothar.com>; 17 Jan 2003 06:51:55 -0000
-Received: from localhost ([127.0.0.1] helo=pyramid.twistedmatrix.com)
- by pyramid.twistedmatrix.com with esmtp (Exim 3.35 #1 (Debian))
- id 18ZQGk-0003WL-00; Fri, 17 Jan 2003 00:46:22 -0600
-Received: from acapnotic by pyramid.twistedmatrix.com with local (Exim 3.35 #1 (Debian))
- id 18ZQFy-0003VP-00
- for <twisted-commits@twistedmatrix.com>; Fri, 17 Jan 2003 00:45:34 -0600
-To: twisted-commits@twistedmatrix.com
-From: etrepum CVS <etrepum@twistedmatrix.com>
-Reply-To: twisted-python@twistedmatrix.com
-X-Mailer: CVSToys
-From: etrepum CVS <etrepum@twistedmatrix.com>
-Reply-To: twisted-python@twistedmatrix.com
-Message-Id: <E18ZQFy-0003VP-00@pyramid.twistedmatrix.com>
-Subject: [Twisted-commits] Directory /cvs/Twisted/doc/examples/cocoaDemo added to the repository
-Sender: twisted-commits-admin@twistedmatrix.com
-Errors-To: twisted-commits-admin@twistedmatrix.com
-X-BeenThere: twisted-commits@twistedmatrix.com
-X-Mailman-Version: 2.0.11
-Precedence: bulk
-List-Help: <mailto:twisted-commits-request@twistedmatrix.com?subject=help>
-List-Post: <mailto:twisted-commits@twistedmatrix.com>
-List-Subscribe: <http://twistedmatrix.com/cgi-bin/mailman/listinfo/twisted-commits>,
- <mailto:twisted-commits-request@twistedmatrix.com?subject=subscribe>
-List-Id: <twisted-commits.twistedmatrix.com>
-List-Unsubscribe: <http://twistedmatrix.com/cgi-bin/mailman/listinfo/twisted-commits>,
- <mailto:twisted-commits-request@twistedmatrix.com?subject=unsubscribe>
-List-Archive: <http://twistedmatrix.com/pipermail/twisted-commits/>
-Date: Fri, 17 Jan 2003 00:45:34 -0600
-Status:
-
-Modified files:
-Twisted/doc/examples/cocoaDemo 0 0
-
-Log message:
-Directory /cvs/Twisted/doc/examples/cocoaDemo added to the repository
-
-
-ViewCVS links:
-http://twistedmatrix.com/users/jh.twistd/viewcvs/cgi/viewcvs.cgi/doc/examples/cocoaDemo.diff?r1=text&tr1=NONE&r2=text&tr2=NONE&cvsroot=Twisted
-
-.
-
-_______________________________________________
-Twisted-commits mailing list
-Twisted-commits@twistedmatrix.com
-http://twistedmatrix.com/cgi-bin/mailman/listinfo/twisted-commits
diff --git a/buildbot/buildbot-source/buildbot/test/mail/msg6 b/buildbot/buildbot-source/buildbot/test/mail/msg6
deleted file mode 100644
index 20719f4e3..000000000
--- a/buildbot/buildbot-source/buildbot/test/mail/msg6
+++ /dev/null
@@ -1,70 +0,0 @@
-Return-Path: <twisted-commits-admin@twistedmatrix.com>
-Delivered-To: warner-twistedcvs@luther.lothar.com
-Received: (qmail 7252 invoked by uid 1000); 17 Jan 2003 07:10:04 -0000
-Delivered-To: warner-twistedcvs@lothar.com
-Received: (qmail 43115 invoked by uid 13574); 17 Jan 2003 07:07:57 -0000
-Received: from unknown (HELO pyramid.twistedmatrix.com) ([64.123.27.105]) (envelope-sender <twisted-commits-admin@twistedmatrix.com>)
- by 130.94.181.6 (qmail-ldap-1.03) with SMTP
- for <warner-twistedcvs@lothar.com>; 17 Jan 2003 07:07:57 -0000
-Received: from localhost ([127.0.0.1] helo=pyramid.twistedmatrix.com)
- by pyramid.twistedmatrix.com with esmtp (Exim 3.35 #1 (Debian))
- id 18ZQW6-0003dA-00; Fri, 17 Jan 2003 01:02:14 -0600
-Received: from acapnotic by pyramid.twistedmatrix.com with local (Exim 3.35 #1 (Debian))
- id 18ZQV7-0003cm-00
- for <twisted-commits@twistedmatrix.com>; Fri, 17 Jan 2003 01:01:13 -0600
-To: twisted-commits@twistedmatrix.com
-From: etrepum CVS <etrepum@twistedmatrix.com>
-Reply-To: twisted-python@twistedmatrix.com
-X-Mailer: CVSToys
-From: etrepum CVS <etrepum@twistedmatrix.com>
-Reply-To: twisted-python@twistedmatrix.com
-Message-Id: <E18ZQV7-0003cm-00@pyramid.twistedmatrix.com>
-Subject: [Twisted-commits] Cocoa (OS X) clone of the QT demo, using polling reactor
-Sender: twisted-commits-admin@twistedmatrix.com
-Errors-To: twisted-commits-admin@twistedmatrix.com
-X-BeenThere: twisted-commits@twistedmatrix.com
-X-Mailman-Version: 2.0.11
-Precedence: bulk
-List-Help: <mailto:twisted-commits-request@twistedmatrix.com?subject=help>
-List-Post: <mailto:twisted-commits@twistedmatrix.com>
-List-Subscribe: <http://twistedmatrix.com/cgi-bin/mailman/listinfo/twisted-commits>,
- <mailto:twisted-commits-request@twistedmatrix.com?subject=subscribe>
-List-Id: <twisted-commits.twistedmatrix.com>
-List-Unsubscribe: <http://twistedmatrix.com/cgi-bin/mailman/listinfo/twisted-commits>,
- <mailto:twisted-commits-request@twistedmatrix.com?subject=unsubscribe>
-List-Archive: <http://twistedmatrix.com/pipermail/twisted-commits/>
-Date: Fri, 17 Jan 2003 01:01:13 -0600
-Status:
-
-Modified files:
-Twisted/doc/examples/cocoaDemo/MyAppDelegate.py None 1.1
-Twisted/doc/examples/cocoaDemo/__main__.py None 1.1
-Twisted/doc/examples/cocoaDemo/bin-python-main.m None 1.1
-Twisted/doc/examples/cocoaDemo/English.lproj/InfoPlist.strings None 1.1
-Twisted/doc/examples/cocoaDemo/English.lproj/MainMenu.nib/classes.nib None 1.1
-Twisted/doc/examples/cocoaDemo/English.lproj/MainMenu.nib/info.nib None 1.1
-Twisted/doc/examples/cocoaDemo/English.lproj/MainMenu.nib/keyedobjects.nib None 1.1
-Twisted/doc/examples/cocoaDemo/cocoaDemo.pbproj/project.pbxproj None 1.1
-
-Log message:
-Cocoa (OS X) clone of the QT demo, using polling reactor
-
-Requires pyobjc ( http://pyobjc.sourceforge.net ), it's not much different than the template project. The reactor is iterated periodically by a repeating NSTimer.
-
-
-ViewCVS links:
-http://twistedmatrix.com/users/jh.twistd/viewcvs/cgi/viewcvs.cgi/doc/examples/cocoaDemo/MyAppDelegate.py.diff?r1=text&tr1=None&r2=text&tr2=1.1&cvsroot=Twisted
-http://twistedmatrix.com/users/jh.twistd/viewcvs/cgi/viewcvs.cgi/doc/examples/cocoaDemo/__main__.py.diff?r1=text&tr1=None&r2=text&tr2=1.1&cvsroot=Twisted
-http://twistedmatrix.com/users/jh.twistd/viewcvs/cgi/viewcvs.cgi/doc/examples/cocoaDemo/bin-python-main.m.diff?r1=text&tr1=None&r2=text&tr2=1.1&cvsroot=Twisted
-http://twistedmatrix.com/users/jh.twistd/viewcvs/cgi/viewcvs.cgi/doc/examples/cocoaDemo/English.lproj/InfoPlist.strings.diff?r1=text&tr1=None&r2=text&tr2=1.1&cvsroot=Twisted
-http://twistedmatrix.com/users/jh.twistd/viewcvs/cgi/viewcvs.cgi/doc/examples/cocoaDemo/English.lproj/MainMenu.nib/classes.nib.diff?r1=text&tr1=None&r2=text&tr2=1.1&cvsroot=Twisted
-http://twistedmatrix.com/users/jh.twistd/viewcvs/cgi/viewcvs.cgi/doc/examples/cocoaDemo/English.lproj/MainMenu.nib/info.nib.diff?r1=text&tr1=None&r2=text&tr2=1.1&cvsroot=Twisted
-http://twistedmatrix.com/users/jh.twistd/viewcvs/cgi/viewcvs.cgi/doc/examples/cocoaDemo/English.lproj/MainMenu.nib/keyedobjects.nib.diff?r1=text&tr1=None&r2=text&tr2=1.1&cvsroot=Twisted
-http://twistedmatrix.com/users/jh.twistd/viewcvs/cgi/viewcvs.cgi/doc/examples/cocoaDemo/cocoaDemo.pbproj/project.pbxproj.diff?r1=text&tr1=None&r2=text&tr2=1.1&cvsroot=Twisted
-
-.
-
-_______________________________________________
-Twisted-commits mailing list
-Twisted-commits@twistedmatrix.com
-http://twistedmatrix.com/cgi-bin/mailman/listinfo/twisted-commits
diff --git a/buildbot/buildbot-source/buildbot/test/mail/msg7 b/buildbot/buildbot-source/buildbot/test/mail/msg7
deleted file mode 100644
index 515be1d16..000000000
--- a/buildbot/buildbot-source/buildbot/test/mail/msg7
+++ /dev/null
@@ -1,68 +0,0 @@
-Return-Path: <twisted-commits-admin@twistedmatrix.com>
-Delivered-To: warner-twistedcvs@luther.lothar.com
-Received: (qmail 8665 invoked by uid 1000); 17 Jan 2003 08:00:03 -0000
-Delivered-To: warner-twistedcvs@lothar.com
-Received: (qmail 50728 invoked by uid 13574); 17 Jan 2003 07:51:14 -0000
-Received: from unknown (HELO pyramid.twistedmatrix.com) ([64.123.27.105]) (envelope-sender <twisted-commits-admin@twistedmatrix.com>)
- by 130.94.181.6 (qmail-ldap-1.03) with SMTP
- for <warner-twistedcvs@lothar.com>; 17 Jan 2003 07:51:14 -0000
-Received: from localhost ([127.0.0.1] helo=pyramid.twistedmatrix.com)
- by pyramid.twistedmatrix.com with esmtp (Exim 3.35 #1 (Debian))
- id 18ZRBm-0003pN-00; Fri, 17 Jan 2003 01:45:18 -0600
-Received: from acapnotic by pyramid.twistedmatrix.com with local (Exim 3.35 #1 (Debian))
- id 18ZRBQ-0003ou-00
- for <twisted-commits@twistedmatrix.com>; Fri, 17 Jan 2003 01:44:56 -0600
-To: twisted-commits@twistedmatrix.com
-From: etrepum CVS <etrepum@twistedmatrix.com>
-Reply-To: twisted-python@twistedmatrix.com
-X-Mailer: CVSToys
-From: etrepum CVS <etrepum@twistedmatrix.com>
-Reply-To: twisted-python@twistedmatrix.com
-Message-Id: <E18ZRBQ-0003ou-00@pyramid.twistedmatrix.com>
-Subject: [Twisted-commits] Directories break debian build script, waiting for reasonable fix
-Sender: twisted-commits-admin@twistedmatrix.com
-Errors-To: twisted-commits-admin@twistedmatrix.com
-X-BeenThere: twisted-commits@twistedmatrix.com
-X-Mailman-Version: 2.0.11
-Precedence: bulk
-List-Help: <mailto:twisted-commits-request@twistedmatrix.com?subject=help>
-List-Post: <mailto:twisted-commits@twistedmatrix.com>
-List-Subscribe: <http://twistedmatrix.com/cgi-bin/mailman/listinfo/twisted-commits>,
- <mailto:twisted-commits-request@twistedmatrix.com?subject=subscribe>
-List-Id: <twisted-commits.twistedmatrix.com>
-List-Unsubscribe: <http://twistedmatrix.com/cgi-bin/mailman/listinfo/twisted-commits>,
- <mailto:twisted-commits-request@twistedmatrix.com?subject=unsubscribe>
-List-Archive: <http://twistedmatrix.com/pipermail/twisted-commits/>
-Date: Fri, 17 Jan 2003 01:44:56 -0600
-Status:
-
-Modified files:
-Twisted/doc/examples/cocoaDemo/MyAppDelegate.py 1.1 None
-Twisted/doc/examples/cocoaDemo/__main__.py 1.1 None
-Twisted/doc/examples/cocoaDemo/bin-python-main.m 1.1 None
-Twisted/doc/examples/cocoaDemo/English.lproj/InfoPlist.strings 1.1 None
-Twisted/doc/examples/cocoaDemo/English.lproj/MainMenu.nib/classes.nib 1.1 None
-Twisted/doc/examples/cocoaDemo/English.lproj/MainMenu.nib/info.nib 1.1 None
-Twisted/doc/examples/cocoaDemo/English.lproj/MainMenu.nib/keyedobjects.nib 1.1 None
-Twisted/doc/examples/cocoaDemo/cocoaDemo.pbproj/project.pbxproj 1.1 None
-
-Log message:
-Directories break debian build script, waiting for reasonable fix
-
-
-ViewCVS links:
-http://twistedmatrix.com/users/jh.twistd/viewcvs/cgi/viewcvs.cgi/doc/examples/cocoaDemo/MyAppDelegate.py.diff?r1=text&tr1=1.1&r2=text&tr2=None&cvsroot=Twisted
-http://twistedmatrix.com/users/jh.twistd/viewcvs/cgi/viewcvs.cgi/doc/examples/cocoaDemo/__main__.py.diff?r1=text&tr1=1.1&r2=text&tr2=None&cvsroot=Twisted
-http://twistedmatrix.com/users/jh.twistd/viewcvs/cgi/viewcvs.cgi/doc/examples/cocoaDemo/bin-python-main.m.diff?r1=text&tr1=1.1&r2=text&tr2=None&cvsroot=Twisted
-http://twistedmatrix.com/users/jh.twistd/viewcvs/cgi/viewcvs.cgi/doc/examples/cocoaDemo/English.lproj/InfoPlist.strings.diff?r1=text&tr1=1.1&r2=text&tr2=None&cvsroot=Twisted
-http://twistedmatrix.com/users/jh.twistd/viewcvs/cgi/viewcvs.cgi/doc/examples/cocoaDemo/English.lproj/MainMenu.nib/classes.nib.diff?r1=text&tr1=1.1&r2=text&tr2=None&cvsroot=Twisted
-http://twistedmatrix.com/users/jh.twistd/viewcvs/cgi/viewcvs.cgi/doc/examples/cocoaDemo/English.lproj/MainMenu.nib/info.nib.diff?r1=text&tr1=1.1&r2=text&tr2=None&cvsroot=Twisted
-http://twistedmatrix.com/users/jh.twistd/viewcvs/cgi/viewcvs.cgi/doc/examples/cocoaDemo/English.lproj/MainMenu.nib/keyedobjects.nib.diff?r1=text&tr1=1.1&r2=text&tr2=None&cvsroot=Twisted
-http://twistedmatrix.com/users/jh.twistd/viewcvs/cgi/viewcvs.cgi/doc/examples/cocoaDemo/cocoaDemo.pbproj/project.pbxproj.diff?r1=text&tr1=1.1&r2=text&tr2=None&cvsroot=Twisted
-
-.
-
-_______________________________________________
-Twisted-commits mailing list
-Twisted-commits@twistedmatrix.com
-http://twistedmatrix.com/cgi-bin/mailman/listinfo/twisted-commits
diff --git a/buildbot/buildbot-source/buildbot/test/mail/msg8 b/buildbot/buildbot-source/buildbot/test/mail/msg8
deleted file mode 100644
index 9b1e4fd0f..000000000
--- a/buildbot/buildbot-source/buildbot/test/mail/msg8
+++ /dev/null
@@ -1,61 +0,0 @@
-Return-Path: <twisted-commits-admin@twistedmatrix.com>
-Delivered-To: warner-twistedcvs@luther.lothar.com
-Received: (qmail 10804 invoked by uid 1000); 19 Jan 2003 14:10:03 -0000
-Delivered-To: warner-twistedcvs@lothar.com
-Received: (qmail 6704 invoked by uid 13574); 19 Jan 2003 14:00:20 -0000
-Received: from unknown (HELO pyramid.twistedmatrix.com) ([64.123.27.105]) (envelope-sender <twisted-commits-admin@twistedmatrix.com>)
- by 130.94.181.6 (qmail-ldap-1.03) with SMTP
- for <warner-twistedcvs@lothar.com>; 19 Jan 2003 14:00:20 -0000
-Received: from localhost ([127.0.0.1] helo=pyramid.twistedmatrix.com)
- by pyramid.twistedmatrix.com with esmtp (Exim 3.35 #1 (Debian))
- id 18aFtx-0002WS-00; Sun, 19 Jan 2003 07:54:17 -0600
-Received: from acapnotic by pyramid.twistedmatrix.com with local (Exim 3.35 #1 (Debian))
- id 18aFtH-0002W3-00
- for <twisted-commits@twistedmatrix.com>; Sun, 19 Jan 2003 07:53:35 -0600
-To: twisted-commits@twistedmatrix.com
-From: acapnotic CVS <acapnotic@twistedmatrix.com>
-X-Mailer: CVSToys
-Message-Id: <E18aFtH-0002W3-00@pyramid.twistedmatrix.com>
-Subject: [Twisted-commits] it doesn't work with invalid syntax
-Sender: twisted-commits-admin@twistedmatrix.com
-Errors-To: twisted-commits-admin@twistedmatrix.com
-X-BeenThere: twisted-commits@twistedmatrix.com
-X-Mailman-Version: 2.0.11
-Precedence: bulk
-List-Help: <mailto:twisted-commits-request@twistedmatrix.com?subject=help>
-List-Post: <mailto:twisted-commits@twistedmatrix.com>
-List-Subscribe: <http://twistedmatrix.com/cgi-bin/mailman/listinfo/twisted-commits>,
- <mailto:twisted-commits-request@twistedmatrix.com?subject=subscribe>
-List-Id: <twisted-commits.twistedmatrix.com>
-List-Unsubscribe: <http://twistedmatrix.com/cgi-bin/mailman/listinfo/twisted-commits>,
- <mailto:twisted-commits-request@twistedmatrix.com?subject=unsubscribe>
-List-Archive: <http://twistedmatrix.com/pipermail/twisted-commits/>
-Date: Sun, 19 Jan 2003 07:53:35 -0600
-Status:
-
-Modified files:
-CVSROOT/freshCfg 1.16 1.17
-
-Log message:
-it doesn't work with invalid syntax
-
-
-Index: CVSROOT/freshCfg
-diff -u CVSROOT/freshCfg:1.16 CVSROOT/freshCfg:1.17
---- CVSROOT/freshCfg:1.16 Sun Jan 19 05:52:34 2003
-+++ CVSROOT/freshCfg Sun Jan 19 05:53:34 2003
-@@ -27,7 +27,7 @@
- ('/cvs', '^Reality', None, MailNotification(['reality-commits'])),
- ('/cvs', '^Twistby', None, MailNotification(['acapnotic'])),
- ('/cvs', '^CVSToys', None,
-- MailNotification(['CVSToys-list']
-+ MailNotification(['CVSToys-list'],
- "http://twistedmatrix.com/users/jh.twistd/"
- "viewcvs/cgi/viewcvs.cgi/",
- replyTo="cvstoys-list@twistedmatrix.com"),)
-
-
-_______________________________________________
-Twisted-commits mailing list
-Twisted-commits@twistedmatrix.com
-http://twistedmatrix.com/cgi-bin/mailman/listinfo/twisted-commits
diff --git a/buildbot/buildbot-source/buildbot/test/mail/msg9 b/buildbot/buildbot-source/buildbot/test/mail/msg9
deleted file mode 100644
index fd4f78584..000000000
--- a/buildbot/buildbot-source/buildbot/test/mail/msg9
+++ /dev/null
@@ -1,18 +0,0 @@
-From twisted-python@twistedmatrix.com Fri Dec 26 07:25:13 2003
-From: twisted-python@twistedmatrix.com (exarkun CVS)
-Date: Fri, 26 Dec 2003 00:25:13 -0700
-Subject: [Twisted-commits] Directory /cvs/Twisted/sandbox/exarkun/persist-plugin added to the repository
-Message-ID: <E1AZmLR-0000Tl-00@wolfwood>
-
-Modified files:
-Twisted/sandbox/exarkun/persist-plugin
-
-Log message:
-Directory /cvs/Twisted/sandbox/exarkun/persist-plugin added to the repository
-
-
-ViewCVS links:
-http://cvs.twistedmatrix.com/cvs/sandbox/exarkun/persist-plugin?cvsroot=Twisted
-
-
-
diff --git a/buildbot/buildbot-source/buildbot/test/mail/syncmail.1 b/buildbot/buildbot-source/buildbot/test/mail/syncmail.1
deleted file mode 100644
index eb35e25ad..000000000
--- a/buildbot/buildbot-source/buildbot/test/mail/syncmail.1
+++ /dev/null
@@ -1,152 +0,0 @@
-Return-Path: <warner@users.sourceforge.net>
-Delivered-To: warner-sourceforge@luther.lothar.com
-Received: (qmail 23758 invoked by uid 1000); 28 Jul 2003 07:22:14 -0000
-Delivered-To: warner-sourceforge@lothar.com
-Received: (qmail 62715 invoked by uid 13574); 28 Jul 2003 07:22:03 -0000
-Received: from unknown (HELO sc8-sf-list1.sourceforge.net) ([66.35.250.206]) (envelope-sender <warner@users.sourceforge.net>)
- by 130.94.181.6 (qmail-ldap-1.03) with SMTP
- for <warner-sourceforge@lothar.com>; 28 Jul 2003 07:22:03 -0000
-Received: from sc8-sf-sshgate.sourceforge.net ([66.35.250.220] helo=sc8-sf-netmisc.sourceforge.net)
- by sc8-sf-list1.sourceforge.net with esmtp
- (Cipher TLSv1:DES-CBC3-SHA:168) (Exim 3.31-VA-mm2 #1 (Debian))
- id 19h2KY-0004Nr-00
- for <warner@users.sourceforge.net>; Mon, 28 Jul 2003 00:22:02 -0700
-Received: from sc8-pr-cvs1-b.sourceforge.net ([10.5.1.7] helo=sc8-pr-cvs1.sourceforge.net)
- by sc8-sf-netmisc.sourceforge.net with esmtp (Exim 3.36 #1 (Debian))
- id 19h2KY-0001rv-00
- for <warner@users.sourceforge.net>; Mon, 28 Jul 2003 00:22:02 -0700
-Received: from localhost ([127.0.0.1] helo=sc8-pr-cvs1.sourceforge.net)
- by sc8-pr-cvs1.sourceforge.net with esmtp (Exim 3.22 #1 (Debian))
- id 19h2KY-0003r4-00
- for <warner@users.sourceforge.net>; Mon, 28 Jul 2003 00:22:02 -0700
-From: warner@users.sourceforge.net
-To: warner@users.sourceforge.net
-Subject: buildbot/buildbot/changes freshcvsmail.py,1.2,1.3
-Message-Id: <E19h2KY-0003r4-00@sc8-pr-cvs1.sourceforge.net>
-Date: Mon, 28 Jul 2003 00:22:02 -0700
-Status:
-
-Update of /cvsroot/buildbot/buildbot/buildbot/changes
-In directory sc8-pr-cvs1:/tmp/cvs-serv14795/buildbot/changes
-
-Modified Files:
- freshcvsmail.py
-Log Message:
-remove leftover code, leave a temporary compatibility import. Note! Start
-importing FCMaildirSource from changes.mail instead of changes.freshcvsmail
-
-
-Index: freshcvsmail.py
-===================================================================
-RCS file: /cvsroot/buildbot/buildbot/buildbot/changes/freshcvsmail.py,v
-retrieving revision 1.2
-retrieving revision 1.3
-diff -C2 -d -r1.2 -r1.3
-*** freshcvsmail.py 27 Jul 2003 18:54:08 -0000 1.2
---- freshcvsmail.py 28 Jul 2003 07:22:00 -0000 1.3
-***************
-*** 1,96 ****
- #! /usr/bin/python
-
-! from buildbot.interfaces import IChangeSource
-! from buildbot.changes.maildirtwisted import MaildirTwisted
-! from buildbot.changes.changes import Change
-! from rfc822 import Message
-! import os, os.path
-!
-! def parseFreshCVSMail(fd, prefix=None):
-! """Parse mail sent by FreshCVS"""
-! # this uses rfc822.Message so it can run under python2.1 . In the future
-! # it will be updated to use python2.2's "email" module.
-!
-! m = Message(fd)
-! # FreshCVS sets From: to "user CVS <user>", but the <> part may be
-! # modified by the MTA (to include a local domain)
-! name, addr = m.getaddr("from")
-! if not name:
-! return None # no From means this message isn't from FreshCVS
-! cvs = name.find(" CVS")
-! if cvs == -1:
-! return None # this message isn't from FreshCVS
-! who = name[:cvs]
-!
-! # we take the time of receipt as the time of checkin. Not correct,
-! # but it avoids the out-of-order-changes issue
-! #when = m.getdate() # and convert from 9-tuple, and handle timezone
-!
-! files = []
-! comments = ""
-! isdir = 0
-! lines = m.fp.readlines()
-! while lines:
-! line = lines.pop(0)
-! if line == "Modified files:\n":
-! break
-! while lines:
-! line = lines.pop(0)
-! if line == "\n":
-! break
-! line = line.rstrip("\n")
-! file, junk = line.split(None, 1)
-! if prefix:
-! # insist that the file start with the prefix: FreshCVS sends
-! # changes we don't care about too
-! bits = file.split(os.sep)
-! if bits[0] == prefix:
-! file = apply(os.path.join, bits[1:])
-! else:
-! break
-! if junk == "0 0":
-! isdir = 1
-! files.append(file)
-! while lines:
-! line = lines.pop(0)
-! if line == "Log message:\n":
-! break
-! # message is terminated by "ViewCVS links:" or "Index:..." (patch)
-! while lines:
-! line = lines.pop(0)
-! if line == "ViewCVS links:\n":
-! break
-! if line.find("Index: ") == 0:
-! break
-! comments += line
-! comments = comments.rstrip() + "\n"
-!
-! if not files:
-! return None
-!
-! change = Change(who, files, comments, isdir)
-!
-! return change
-!
-!
-!
-! class FCMaildirSource(MaildirTwisted):
-! """This source will watch a maildir that is subscribed to a FreshCVS
-! change-announcement mailing list.
-! """
-!
-! __implements__ = IChangeSource,
-
-! def __init__(self, maildir, prefix=None):
-! MaildirTwisted.__init__(self, maildir)
-! self.changemaster = None # filled in when added
-! self.prefix = prefix
-! def describe(self):
-! return "FreshCVS mailing list in maildir %s" % self.maildir.where
-! def messageReceived(self, filename):
-! path = os.path.join(self.basedir, "new", filename)
-! change = parseFreshCVSMail(open(path, "r"), self.prefix)
-! if change:
-! self.changemaster.addChange(change)
-! os.rename(os.path.join(self.basedir, "new", filename),
-! os.path.join(self.basedir, "cur", filename))
---- 1,5 ----
- #! /usr/bin/python
-
-! # leftover import for compatibility
-
-! from buildbot.changes.mail import FCMaildirSource
-
-
diff --git a/buildbot/buildbot-source/buildbot/test/mail/syncmail.2 b/buildbot/buildbot-source/buildbot/test/mail/syncmail.2
deleted file mode 100644
index 5296cbeb2..000000000
--- a/buildbot/buildbot-source/buildbot/test/mail/syncmail.2
+++ /dev/null
@@ -1,56 +0,0 @@
-Return-Path: <warner@users.sourceforge.net>
-Delivered-To: warner-sourceforge@luther.lothar.com
-Received: (qmail 23221 invoked by uid 1000); 28 Jul 2003 06:53:15 -0000
-Delivered-To: warner-sourceforge@lothar.com
-Received: (qmail 58537 invoked by uid 13574); 28 Jul 2003 06:53:09 -0000
-Received: from unknown (HELO sc8-sf-list1.sourceforge.net) ([66.35.250.206]) (envelope-sender <warner@users.sourceforge.net>)
- by 130.94.181.6 (qmail-ldap-1.03) with SMTP
- for <warner-sourceforge@lothar.com>; 28 Jul 2003 06:53:09 -0000
-Received: from sc8-sf-sshgate.sourceforge.net ([66.35.250.220] helo=sc8-sf-netmisc.sourceforge.net)
- by sc8-sf-list1.sourceforge.net with esmtp
- (Cipher TLSv1:DES-CBC3-SHA:168) (Exim 3.31-VA-mm2 #1 (Debian))
- id 19h1sb-0003nw-00
- for <warner@users.sourceforge.net>; Sun, 27 Jul 2003 23:53:09 -0700
-Received: from sc8-pr-cvs1-b.sourceforge.net ([10.5.1.7] helo=sc8-pr-cvs1.sourceforge.net)
- by sc8-sf-netmisc.sourceforge.net with esmtp (Exim 3.36 #1 (Debian))
- id 19h1sa-00018t-00
- for <warner@users.sourceforge.net>; Sun, 27 Jul 2003 23:53:08 -0700
-Received: from localhost ([127.0.0.1] helo=sc8-pr-cvs1.sourceforge.net)
- by sc8-pr-cvs1.sourceforge.net with esmtp (Exim 3.22 #1 (Debian))
- id 19h1sa-0002mX-00
- for <warner@users.sourceforge.net>; Sun, 27 Jul 2003 23:53:08 -0700
-From: warner@users.sourceforge.net
-To: warner@users.sourceforge.net
-Subject: buildbot ChangeLog,1.93,1.94
-Message-Id: <E19h1sa-0002mX-00@sc8-pr-cvs1.sourceforge.net>
-Date: Sun, 27 Jul 2003 23:53:08 -0700
-Status:
-
-Update of /cvsroot/buildbot/buildbot
-In directory sc8-pr-cvs1:/tmp/cvs-serv10689
-
-Modified Files:
- ChangeLog
-Log Message:
- * NEWS: started adding new features
-
-
-Index: ChangeLog
-===================================================================
-RCS file: /cvsroot/buildbot/buildbot/ChangeLog,v
-retrieving revision 1.93
-retrieving revision 1.94
-diff -C2 -d -r1.93 -r1.94
-*** ChangeLog 27 Jul 2003 22:53:27 -0000 1.93
---- ChangeLog 28 Jul 2003 06:53:06 -0000 1.94
-***************
-*** 1,4 ****
---- 1,6 ----
- 2003-07-27 Brian Warner <warner@lothar.com>
-
-+ * NEWS: started adding new features
-+
- * buildbot/changes/mail.py: start work on Syncmail parser, move
- mail sources into their own file
-
-
diff --git a/buildbot/buildbot-source/buildbot/test/mail/syncmail.3 b/buildbot/buildbot-source/buildbot/test/mail/syncmail.3
deleted file mode 100644
index eee19b1bd..000000000
--- a/buildbot/buildbot-source/buildbot/test/mail/syncmail.3
+++ /dev/null
@@ -1,39 +0,0 @@
-Return-Path: <warner@users.sourceforge.net>
-Delivered-To: warner-sourceforge@luther.lothar.com
-Received: (qmail 23196 invoked by uid 1000); 28 Jul 2003 06:51:53 -0000
-Delivered-To: warner-sourceforge@lothar.com
-Received: (qmail 58269 invoked by uid 13574); 28 Jul 2003 06:51:46 -0000
-Received: from unknown (HELO sc8-sf-list1.sourceforge.net) ([66.35.250.206]) (envelope-sender <warner@users.sourceforge.net>)
- by 130.94.181.6 (qmail-ldap-1.03) with SMTP
- for <warner-sourceforge@lothar.com>; 28 Jul 2003 06:51:46 -0000
-Received: from sc8-sf-sshgate.sourceforge.net ([66.35.250.220] helo=sc8-sf-netmisc.sourceforge.net)
- by sc8-sf-list1.sourceforge.net with esmtp
- (Cipher TLSv1:DES-CBC3-SHA:168) (Exim 3.31-VA-mm2 #1 (Debian))
- id 19h1rF-00027s-00
- for <warner@users.sourceforge.net>; Sun, 27 Jul 2003 23:51:46 -0700
-Received: from sc8-pr-cvs1-b.sourceforge.net ([10.5.1.7] helo=sc8-pr-cvs1.sourceforge.net)
- by sc8-sf-netmisc.sourceforge.net with esmtp (Exim 3.36 #1 (Debian))
- id 19h1rF-00017O-00
- for <warner@users.sourceforge.net>; Sun, 27 Jul 2003 23:51:45 -0700
-Received: from localhost ([127.0.0.1] helo=sc8-pr-cvs1.sourceforge.net)
- by sc8-pr-cvs1.sourceforge.net with esmtp (Exim 3.22 #1 (Debian))
- id 19h1rF-0002jg-00
- for <warner@users.sourceforge.net>; Sun, 27 Jul 2003 23:51:45 -0700
-From: warner@users.sourceforge.net
-To: warner@users.sourceforge.net
-Subject: CVSROOT syncmail,1.1,NONE
-Message-Id: <E19h1rF-0002jg-00@sc8-pr-cvs1.sourceforge.net>
-Date: Sun, 27 Jul 2003 23:51:45 -0700
-Status:
-
-Update of /cvsroot/buildbot/CVSROOT
-In directory sc8-pr-cvs1:/tmp/cvs-serv10515
-
-Removed Files:
- syncmail
-Log Message:
-nevermind
-
---- syncmail DELETED ---
-
-
diff --git a/buildbot/buildbot-source/buildbot/test/mail/syncmail.4 b/buildbot/buildbot-source/buildbot/test/mail/syncmail.4
deleted file mode 100644
index 44bda5df2..000000000
--- a/buildbot/buildbot-source/buildbot/test/mail/syncmail.4
+++ /dev/null
@@ -1,290 +0,0 @@
-Return-Path: <warner@users.sourceforge.net>
-Delivered-To: warner-sourceforge@luther.lothar.com
-Received: (qmail 24111 invoked by uid 1000); 28 Jul 2003 08:01:54 -0000
-Delivered-To: warner-sourceforge@lothar.com
-Received: (qmail 68756 invoked by uid 13574); 28 Jul 2003 08:01:46 -0000
-Received: from unknown (HELO sc8-sf-list1.sourceforge.net) ([66.35.250.206]) (envelope-sender <warner@users.sourceforge.net>)
- by 130.94.181.6 (qmail-ldap-1.03) with SMTP
- for <warner-sourceforge@lothar.com>; 28 Jul 2003 08:01:46 -0000
-Received: from sc8-sf-sshgate.sourceforge.net ([66.35.250.220] helo=sc8-sf-netmisc.sourceforge.net)
- by sc8-sf-list1.sourceforge.net with esmtp
- (Cipher TLSv1:DES-CBC3-SHA:168) (Exim 3.31-VA-mm2 #1 (Debian))
- id 19h2wz-00029d-00
- for <warner@users.sourceforge.net>; Mon, 28 Jul 2003 01:01:45 -0700
-Received: from sc8-pr-cvs1-b.sourceforge.net ([10.5.1.7] helo=sc8-pr-cvs1.sourceforge.net)
- by sc8-sf-netmisc.sourceforge.net with esmtp (Exim 3.36 #1 (Debian))
- id 19h2wz-0002XB-00
- for <warner@users.sourceforge.net>; Mon, 28 Jul 2003 01:01:45 -0700
-Received: from localhost ([127.0.0.1] helo=sc8-pr-cvs1.sourceforge.net)
- by sc8-pr-cvs1.sourceforge.net with esmtp (Exim 3.22 #1 (Debian))
- id 19h2wz-0005a9-00
- for <warner@users.sourceforge.net>; Mon, 28 Jul 2003 01:01:45 -0700
-From: warner@users.sourceforge.net
-To: warner@users.sourceforge.net
-Subject: buildbot/test/mail syncmail.1,NONE,1.1 syncmail.2,NONE,1.1 syncmail.3,NONE,1.1
-Message-Id: <E19h2wz-0005a9-00@sc8-pr-cvs1.sourceforge.net>
-Date: Mon, 28 Jul 2003 01:01:45 -0700
-Status:
-
-Update of /cvsroot/buildbot/buildbot/test/mail
-In directory sc8-pr-cvs1:/tmp/cvs-serv21445
-
-Added Files:
- syncmail.1 syncmail.2 syncmail.3
-Log Message:
-test cases for syncmail parser
-
---- NEW FILE: syncmail.1 ---
-Return-Path: <warner@users.sourceforge.net>
-Delivered-To: warner-sourceforge@luther.lothar.com
-Received: (qmail 23758 invoked by uid 1000); 28 Jul 2003 07:22:14 -0000
-Delivered-To: warner-sourceforge@lothar.com
-Received: (qmail 62715 invoked by uid 13574); 28 Jul 2003 07:22:03 -0000
-Received: from unknown (HELO sc8-sf-list1.sourceforge.net) ([66.35.250.206]) (envelope-sender <warner@users.sourceforge.net>)
- by 130.94.181.6 (qmail-ldap-1.03) with SMTP
- for <warner-sourceforge@lothar.com>; 28 Jul 2003 07:22:03 -0000
-Received: from sc8-sf-sshgate.sourceforge.net ([66.35.250.220] helo=sc8-sf-netmisc.sourceforge.net)
- by sc8-sf-list1.sourceforge.net with esmtp
- (Cipher TLSv1:DES-CBC3-SHA:168) (Exim 3.31-VA-mm2 #1 (Debian))
- id 19h2KY-0004Nr-00
- for <warner@users.sourceforge.net>; Mon, 28 Jul 2003 00:22:02 -0700
-Received: from sc8-pr-cvs1-b.sourceforge.net ([10.5.1.7] helo=sc8-pr-cvs1.sourceforge.net)
- by sc8-sf-netmisc.sourceforge.net with esmtp (Exim 3.36 #1 (Debian))
- id 19h2KY-0001rv-00
- for <warner@users.sourceforge.net>; Mon, 28 Jul 2003 00:22:02 -0700
-Received: from localhost ([127.0.0.1] helo=sc8-pr-cvs1.sourceforge.net)
- by sc8-pr-cvs1.sourceforge.net with esmtp (Exim 3.22 #1 (Debian))
- id 19h2KY-0003r4-00
- for <warner@users.sourceforge.net>; Mon, 28 Jul 2003 00:22:02 -0700
-From: warner@users.sourceforge.net
-To: warner@users.sourceforge.net
-Subject: buildbot/buildbot/changes freshcvsmail.py,1.2,1.3
-Message-Id: <E19h2KY-0003r4-00@sc8-pr-cvs1.sourceforge.net>
-Date: Mon, 28 Jul 2003 00:22:02 -0700
-Status:
-
-Update of /cvsroot/buildbot/buildbot/buildbot/changes
-In directory sc8-pr-cvs1:/tmp/cvs-serv14795/buildbot/changes
-
-Modified Files:
- freshcvsmail.py
-Log Message:
-remove leftover code, leave a temporary compatibility import. Note! Start
-importing FCMaildirSource from changes.mail instead of changes.freshcvsmail
-
-
-Index: freshcvsmail.py
-===================================================================
-RCS file: /cvsroot/buildbot/buildbot/buildbot/changes/freshcvsmail.py,v
-retrieving revision 1.2
-retrieving revision 1.3
-diff -C2 -d -r1.2 -r1.3
-*** freshcvsmail.py 27 Jul 2003 18:54:08 -0000 1.2
---- freshcvsmail.py 28 Jul 2003 07:22:00 -0000 1.3
-***************
-*** 1,96 ****
- #! /usr/bin/python
-
-! from buildbot.interfaces import IChangeSource
-! from buildbot.changes.maildirtwisted import MaildirTwisted
-! from buildbot.changes.changes import Change
-! from rfc822 import Message
-! import os, os.path
-!
-! def parseFreshCVSMail(fd, prefix=None):
-! """Parse mail sent by FreshCVS"""
-! # this uses rfc822.Message so it can run under python2.1 . In the future
-! # it will be updated to use python2.2's "email" module.
-!
-! m = Message(fd)
-! # FreshCVS sets From: to "user CVS <user>", but the <> part may be
-! # modified by the MTA (to include a local domain)
-! name, addr = m.getaddr("from")
-! if not name:
-! return None # no From means this message isn't from FreshCVS
-! cvs = name.find(" CVS")
-! if cvs == -1:
-! return None # this message isn't from FreshCVS
-! who = name[:cvs]
-!
-! # we take the time of receipt as the time of checkin. Not correct,
-! # but it avoids the out-of-order-changes issue
-! #when = m.getdate() # and convert from 9-tuple, and handle timezone
-!
-! files = []
-! comments = ""
-! isdir = 0
-! lines = m.fp.readlines()
-! while lines:
-! line = lines.pop(0)
-! if line == "Modified files:\n":
-! break
-! while lines:
-! line = lines.pop(0)
-! if line == "\n":
-! break
-! line = line.rstrip("\n")
-! file, junk = line.split(None, 1)
-! if prefix:
-! # insist that the file start with the prefix: FreshCVS sends
-! # changes we don't care about too
-! bits = file.split(os.sep)
-! if bits[0] == prefix:
-! file = apply(os.path.join, bits[1:])
-! else:
-! break
-! if junk == "0 0":
-! isdir = 1
-! files.append(file)
-! while lines:
-! line = lines.pop(0)
-! if line == "Log message:\n":
-! break
-! # message is terminated by "ViewCVS links:" or "Index:..." (patch)
-! while lines:
-! line = lines.pop(0)
-! if line == "ViewCVS links:\n":
-! break
-! if line.find("Index: ") == 0:
-! break
-! comments += line
-! comments = comments.rstrip() + "\n"
-!
-! if not files:
-! return None
-!
-! change = Change(who, files, comments, isdir)
-!
-! return change
-!
-!
-!
-! class FCMaildirSource(MaildirTwisted):
-! """This source will watch a maildir that is subscribed to a FreshCVS
-! change-announcement mailing list.
-! """
-!
-! __implements__ = IChangeSource,
-
-! def __init__(self, maildir, prefix=None):
-! MaildirTwisted.__init__(self, maildir)
-! self.changemaster = None # filled in when added
-! self.prefix = prefix
-! def describe(self):
-! return "FreshCVS mailing list in maildir %s" % self.maildir.where
-! def messageReceived(self, filename):
-! path = os.path.join(self.basedir, "new", filename)
-! change = parseFreshCVSMail(open(path, "r"), self.prefix)
-! if change:
-! self.changemaster.addChange(change)
-! os.rename(os.path.join(self.basedir, "new", filename),
-! os.path.join(self.basedir, "cur", filename))
---- 1,5 ----
- #! /usr/bin/python
-
-! # leftover import for compatibility
-
-! from buildbot.changes.mail import FCMaildirSource
-
-
-
---- NEW FILE: syncmail.2 ---
-Return-Path: <warner@users.sourceforge.net>
-Delivered-To: warner-sourceforge@luther.lothar.com
-Received: (qmail 23221 invoked by uid 1000); 28 Jul 2003 06:53:15 -0000
-Delivered-To: warner-sourceforge@lothar.com
-Received: (qmail 58537 invoked by uid 13574); 28 Jul 2003 06:53:09 -0000
-Received: from unknown (HELO sc8-sf-list1.sourceforge.net) ([66.35.250.206]) (envelope-sender <warner@users.sourceforge.net>)
- by 130.94.181.6 (qmail-ldap-1.03) with SMTP
- for <warner-sourceforge@lothar.com>; 28 Jul 2003 06:53:09 -0000
-Received: from sc8-sf-sshgate.sourceforge.net ([66.35.250.220] helo=sc8-sf-netmisc.sourceforge.net)
- by sc8-sf-list1.sourceforge.net with esmtp
- (Cipher TLSv1:DES-CBC3-SHA:168) (Exim 3.31-VA-mm2 #1 (Debian))
- id 19h1sb-0003nw-00
- for <warner@users.sourceforge.net>; Sun, 27 Jul 2003 23:53:09 -0700
-Received: from sc8-pr-cvs1-b.sourceforge.net ([10.5.1.7] helo=sc8-pr-cvs1.sourceforge.net)
- by sc8-sf-netmisc.sourceforge.net with esmtp (Exim 3.36 #1 (Debian))
- id 19h1sa-00018t-00
- for <warner@users.sourceforge.net>; Sun, 27 Jul 2003 23:53:08 -0700
-Received: from localhost ([127.0.0.1] helo=sc8-pr-cvs1.sourceforge.net)
- by sc8-pr-cvs1.sourceforge.net with esmtp (Exim 3.22 #1 (Debian))
- id 19h1sa-0002mX-00
- for <warner@users.sourceforge.net>; Sun, 27 Jul 2003 23:53:08 -0700
-From: warner@users.sourceforge.net
-To: warner@users.sourceforge.net
-Subject: buildbot ChangeLog,1.93,1.94
-Message-Id: <E19h1sa-0002mX-00@sc8-pr-cvs1.sourceforge.net>
-Date: Sun, 27 Jul 2003 23:53:08 -0700
-Status:
-
-Update of /cvsroot/buildbot/buildbot
-In directory sc8-pr-cvs1:/tmp/cvs-serv10689
-
-Modified Files:
- ChangeLog
-Log Message:
- * NEWS: started adding new features
-
-
-Index: ChangeLog
-===================================================================
-RCS file: /cvsroot/buildbot/buildbot/ChangeLog,v
-retrieving revision 1.93
-retrieving revision 1.94
-diff -C2 -d -r1.93 -r1.94
-*** ChangeLog 27 Jul 2003 22:53:27 -0000 1.93
---- ChangeLog 28 Jul 2003 06:53:06 -0000 1.94
-***************
-*** 1,4 ****
---- 1,6 ----
- 2003-07-27 Brian Warner <warner@lothar.com>
-
-+ * NEWS: started adding new features
-+
- * buildbot/changes/mail.py: start work on Syncmail parser, move
- mail sources into their own file
-
-
-
---- NEW FILE: syncmail.3 ---
-Return-Path: <warner@users.sourceforge.net>
-Delivered-To: warner-sourceforge@luther.lothar.com
-Received: (qmail 23196 invoked by uid 1000); 28 Jul 2003 06:51:53 -0000
-Delivered-To: warner-sourceforge@lothar.com
-Received: (qmail 58269 invoked by uid 13574); 28 Jul 2003 06:51:46 -0000
-Received: from unknown (HELO sc8-sf-list1.sourceforge.net) ([66.35.250.206]) (envelope-sender <warner@users.sourceforge.net>)
- by 130.94.181.6 (qmail-ldap-1.03) with SMTP
- for <warner-sourceforge@lothar.com>; 28 Jul 2003 06:51:46 -0000
-Received: from sc8-sf-sshgate.sourceforge.net ([66.35.250.220] helo=sc8-sf-netmisc.sourceforge.net)
- by sc8-sf-list1.sourceforge.net with esmtp
- (Cipher TLSv1:DES-CBC3-SHA:168) (Exim 3.31-VA-mm2 #1 (Debian))
- id 19h1rF-00027s-00
- for <warner@users.sourceforge.net>; Sun, 27 Jul 2003 23:51:46 -0700
-Received: from sc8-pr-cvs1-b.sourceforge.net ([10.5.1.7] helo=sc8-pr-cvs1.sourceforge.net)
- by sc8-sf-netmisc.sourceforge.net with esmtp (Exim 3.36 #1 (Debian))
- id 19h1rF-00017O-00
- for <warner@users.sourceforge.net>; Sun, 27 Jul 2003 23:51:45 -0700
-Received: from localhost ([127.0.0.1] helo=sc8-pr-cvs1.sourceforge.net)
- by sc8-pr-cvs1.sourceforge.net with esmtp (Exim 3.22 #1 (Debian))
- id 19h1rF-0002jg-00
- for <warner@users.sourceforge.net>; Sun, 27 Jul 2003 23:51:45 -0700
-From: warner@users.sourceforge.net
-To: warner@users.sourceforge.net
-Subject: CVSROOT syncmail,1.1,NONE
-Message-Id: <E19h1rF-0002jg-00@sc8-pr-cvs1.sourceforge.net>
-Date: Sun, 27 Jul 2003 23:51:45 -0700
-Status:
-
-Update of /cvsroot/buildbot/CVSROOT
-In directory sc8-pr-cvs1:/tmp/cvs-serv10515
-
-Removed Files:
- syncmail
-Log Message:
-nevermind
-
---- syncmail DELETED ---
-
-
-
-
diff --git a/buildbot/buildbot-source/buildbot/test/mail/syncmail.5 b/buildbot/buildbot-source/buildbot/test/mail/syncmail.5
deleted file mode 100644
index 82ba45108..000000000
--- a/buildbot/buildbot-source/buildbot/test/mail/syncmail.5
+++ /dev/null
@@ -1,70 +0,0 @@
-From thomas@otto.amantes Mon Feb 21 17:46:45 2005
-Return-Path: <thomas@otto.amantes>
-Received: from otto.amantes (otto.amantes [127.0.0.1]) by otto.amantes
- (8.13.1/8.13.1) with ESMTP id j1LGkjr3011986 for <thomas@localhost>; Mon,
- 21 Feb 2005 17:46:45 +0100
-Message-Id: <200502211646.j1LGkjr3011986@otto.amantes>
-From: Thomas Vander Stichele <thomas@otto.amantes>
-To: thomas@otto.amantes
-Subject: test1 s
-Date: Mon, 21 Feb 2005 16:46:45 +0000
-X-Mailer: Python syncmail $Revision: 1.1 $
- <http://sf.net/projects/cvs-syncmail>
-Content-Transfer-Encoding: 8bit
-Mime-Version: 1.0
-
-Update of /home/cvs/test/test1
-In directory otto.amantes:/home/thomas/dev/tests/cvs/test1
-
-Added Files:
- Tag: BRANCH-DEVEL
- MANIFEST Makefile.am autogen.sh configure.in
-Log Message:
-stuff on the branch
-
---- NEW FILE: Makefile.am ---
-SUBDIRS = src
-
-# normally I wouldn't distribute autogen.sh and friends with a tarball
-# but this one is specifically distributed for demonstration purposes
-
-EXTRA_DIST = autogen.sh
-
-# target for making the "import this into svn" tarball
-test:
- mkdir test
- for a in `cat MANIFEST`; do \
- cp -pr $$a test/$$a; done
- tar czf test.tar.gz test
- rm -rf test
-
---- NEW FILE: MANIFEST ---
-MANIFEST
-autogen.sh
-configure.in
-Makefile.am
-src
-src/Makefile.am
-src/test.c
-
---- NEW FILE: autogen.sh ---
-#!/bin/sh
-
-set -x
-
-aclocal && \
-autoheader && \
-autoconf && \
-automake -a --foreign && \
-./configure $@
-
---- NEW FILE: configure.in ---
-dnl configure.ac for version macro
-AC_INIT
-
-AM_CONFIG_HEADER(config.h)
-
-AM_INIT_AUTOMAKE(test, 0.0.0)
-AC_PROG_CC
-
-AC_OUTPUT(Makefile src/Makefile)
diff --git a/buildbot/buildbot-source/buildbot/test/runutils.py b/buildbot/buildbot-source/buildbot/test/runutils.py
deleted file mode 100644
index 0f7b99e35..000000000
--- a/buildbot/buildbot-source/buildbot/test/runutils.py
+++ /dev/null
@@ -1,193 +0,0 @@
-
-import shutil, os, errno
-from twisted.internet import defer
-from twisted.python import log
-
-from buildbot import master, interfaces
-from buildbot.twcompat import maybeWait
-from buildbot.slave import bot
-from buildbot.process.base import BuildRequest
-from buildbot.sourcestamp import SourceStamp
-from buildbot.status.builder import SUCCESS
-
-class MyBot(bot.Bot):
- def remote_getSlaveInfo(self):
- return self.parent.info
-
-class MyBuildSlave(bot.BuildSlave):
- botClass = MyBot
-
-class RunMixin:
- master = None
-
- def rmtree(self, d):
- try:
- shutil.rmtree(d, ignore_errors=1)
- except OSError, e:
- # stupid 2.2 appears to ignore ignore_errors
- if e.errno != errno.ENOENT:
- raise
-
- def setUp(self):
- self.slaves = {}
- self.rmtree("basedir")
- os.mkdir("basedir")
- self.master = master.BuildMaster("basedir")
- self.status = self.master.getStatus()
- self.control = interfaces.IControl(self.master)
-
- def connectOneSlave(self, slavename, opts={}):
- port = self.master.slavePort._port.getHost().port
- self.rmtree("slavebase-%s" % slavename)
- os.mkdir("slavebase-%s" % slavename)
- slave = MyBuildSlave("localhost", port, slavename, "sekrit",
- "slavebase-%s" % slavename,
- keepalive=0, usePTY=1, debugOpts=opts)
- slave.info = {"admin": "one"}
- self.slaves[slavename] = slave
- slave.startService()
-
- def connectSlave(self, builders=["dummy"], slavename="bot1",
- opts={}):
- # connect buildslave 'slavename' and wait for it to connect to all of
- # the given builders
- dl = []
- # initiate call for all of them, before waiting on result,
- # otherwise we might miss some
- for b in builders:
- dl.append(self.master.botmaster.waitUntilBuilderAttached(b))
- d = defer.DeferredList(dl)
- self.connectOneSlave(slavename, opts)
- return d
-
- def connectSlaves(self, slavenames, builders):
- dl = []
- # initiate call for all of them, before waiting on result,
- # otherwise we might miss some
- for b in builders:
- dl.append(self.master.botmaster.waitUntilBuilderAttached(b))
- d = defer.DeferredList(dl)
- for name in slavenames:
- self.connectOneSlave(name)
- return d
-
- def connectSlave2(self):
- # this takes over for bot1, so it has to share the slavename
- port = self.master.slavePort._port.getHost().port
- self.rmtree("slavebase-bot2")
- os.mkdir("slavebase-bot2")
- # this uses bot1, really
- slave = MyBuildSlave("localhost", port, "bot1", "sekrit",
- "slavebase-bot2", keepalive=0, usePTY=1)
- slave.info = {"admin": "two"}
- self.slaves['bot2'] = slave
- slave.startService()
-
- def connectSlaveFastTimeout(self):
- # this slave has a very fast keepalive timeout
- port = self.master.slavePort._port.getHost().port
- self.rmtree("slavebase-bot1")
- os.mkdir("slavebase-bot1")
- slave = MyBuildSlave("localhost", port, "bot1", "sekrit",
- "slavebase-bot1", keepalive=2, usePTY=1,
- keepaliveTimeout=1)
- slave.info = {"admin": "one"}
- self.slaves['bot1'] = slave
- slave.startService()
- d = self.master.botmaster.waitUntilBuilderAttached("dummy")
- return d
-
- # things to start builds
- def requestBuild(self, builder):
- # returns a Deferred that fires with an IBuildStatus object when the
- # build is finished
- req = BuildRequest("forced build", SourceStamp())
- self.control.getBuilder(builder).requestBuild(req)
- return req.waitUntilFinished()
-
- def failUnlessBuildSucceeded(self, bs):
- self.failUnless(bs.getResults() == SUCCESS)
- return bs # useful for chaining
-
- def tearDown(self):
- log.msg("doing tearDown")
- d = self.shutdownAllSlaves()
- d.addCallback(self._tearDown_1)
- d.addCallback(self._tearDown_2)
- return maybeWait(d)
- def _tearDown_1(self, res):
- if self.master:
- return defer.maybeDeferred(self.master.stopService)
- def _tearDown_2(self, res):
- self.master = None
- log.msg("tearDown done")
-
-
- # various forms of slave death
-
- def shutdownAllSlaves(self):
- # the slave has disconnected normally: they SIGINT'ed it, or it shut
- # down willingly. This will kill child processes and give them a
- # chance to finish up. We return a Deferred that will fire when
- # everything is finished shutting down.
-
- log.msg("doing shutdownAllSlaves")
- dl = []
- for slave in self.slaves.values():
- dl.append(slave.waitUntilDisconnected())
- dl.append(defer.maybeDeferred(slave.stopService))
- d = defer.DeferredList(dl)
- d.addCallback(self._shutdownAllSlavesDone)
- return d
- def _shutdownAllSlavesDone(self, res):
- for name in self.slaves.keys():
- del self.slaves[name]
- return self.master.botmaster.waitUntilBuilderFullyDetached("dummy")
-
- def shutdownSlave(self, slavename, buildername):
- # this slave has disconnected normally: they SIGINT'ed it, or it shut
- # down willingly. This will kill child processes and give them a
- # chance to finish up. We return a Deferred that will fire when
- # everything is finished shutting down, and the given Builder knows
- # that the slave has gone away.
-
- s = self.slaves[slavename]
- dl = [self.master.botmaster.waitUntilBuilderDetached(buildername),
- s.waitUntilDisconnected()]
- d = defer.DeferredList(dl)
- d.addCallback(self._shutdownSlave_done, slavename)
- s.stopService()
- return d
- def _shutdownSlave_done(self, res, slavename):
- del self.slaves[slavename]
-
- def killSlave(self):
- # the slave has died, its host sent a FIN. The .notifyOnDisconnect
- # callbacks will terminate the current step, so the build should be
- # flunked (no further steps should be started).
- self.slaves['bot1'].bf.continueTrying = 0
- bot = self.slaves['bot1'].getServiceNamed("bot")
- broker = bot.builders["dummy"].remote.broker
- broker.transport.loseConnection()
- del self.slaves['bot1']
-
- def disappearSlave(self, slavename="bot1", buildername="dummy"):
- # the slave's host has vanished off the net, leaving the connection
- # dangling. This will be detected quickly by app-level keepalives or
- # a ping, or slowly by TCP timeouts.
-
- # simulate this by replacing the slave Broker's .dataReceived method
- # with one that just throws away all data.
- def discard(data):
- pass
- bot = self.slaves[slavename].getServiceNamed("bot")
- broker = bot.builders[buildername].remote.broker
- broker.dataReceived = discard # seal its ears
- broker.transport.write = discard # and take away its voice
-
- def ghostSlave(self):
- # the slave thinks it has lost the connection, and initiated a
- # reconnect. The master doesn't yet realize it has lost the previous
- # connection, and sees two connections at once.
- raise NotImplementedError
-
diff --git a/buildbot/buildbot-source/buildbot/test/sleep.py b/buildbot/buildbot-source/buildbot/test/sleep.py
deleted file mode 100644
index 48adc39b2..000000000
--- a/buildbot/buildbot-source/buildbot/test/sleep.py
+++ /dev/null
@@ -1,9 +0,0 @@
-#! /usr/bin/python
-
-import sys, time
-delay = int(sys.argv[1])
-
-sys.stdout.write("sleeping for %d seconds\n" % delay)
-time.sleep(delay)
-sys.stdout.write("woke up\n")
-sys.exit(0)
diff --git a/buildbot/buildbot-source/buildbot/test/subdir/emit.py b/buildbot/buildbot-source/buildbot/test/subdir/emit.py
deleted file mode 100644
index 368452906..000000000
--- a/buildbot/buildbot-source/buildbot/test/subdir/emit.py
+++ /dev/null
@@ -1,10 +0,0 @@
-#! /usr/bin/python
-
-import os, sys
-
-sys.stdout.write("this is stdout in subdir\n")
-sys.stderr.write("this is stderr\n")
-if os.environ.has_key("EMIT_TEST"):
- sys.stdout.write("EMIT_TEST: %s\n" % os.environ["EMIT_TEST"])
-rc = int(sys.argv[1])
-sys.exit(rc)
diff --git a/buildbot/buildbot-source/buildbot/test/test__versions.py b/buildbot/buildbot-source/buildbot/test/test__versions.py
deleted file mode 100644
index a69fcc425..000000000
--- a/buildbot/buildbot-source/buildbot/test/test__versions.py
+++ /dev/null
@@ -1,16 +0,0 @@
-
-# This is a fake test which just logs the version of Twisted, to make it
-# easier to track down failures in other tests.
-
-from twisted.trial import unittest
-from twisted.python import log
-from twisted import copyright
-import sys
-import buildbot
-
-class Versions(unittest.TestCase):
- def test_versions(self):
- log.msg("Python Version: %s" % sys.version)
- log.msg("Twisted Version: %s" % copyright.version)
- log.msg("Buildbot Version: %s" % buildbot.version)
-
diff --git a/buildbot/buildbot-source/buildbot/test/test_buildreq.py b/buildbot/buildbot-source/buildbot/test/test_buildreq.py
deleted file mode 100644
index f59f4970f..000000000
--- a/buildbot/buildbot-source/buildbot/test/test_buildreq.py
+++ /dev/null
@@ -1,182 +0,0 @@
-# -*- test-case-name: buildbot.test.test_buildreq -*-
-
-from twisted.trial import unittest
-
-from buildbot import buildset, interfaces, sourcestamp
-from buildbot.twcompat import maybeWait
-from buildbot.process import base
-from buildbot.status import builder
-from buildbot.changes.changes import Change
-
-class Request(unittest.TestCase):
- def testMerge(self):
- R = base.BuildRequest
- S = sourcestamp.SourceStamp
- b1 = R("why", S("branch1", None, None, None))
- b1r1 = R("why2", S("branch1", "rev1", None, None))
- b1r1a = R("why not", S("branch1", "rev1", None, None))
- b1r2 = R("why3", S("branch1", "rev2", None, None))
- b2r2 = R("why4", S("branch2", "rev2", None, None))
- b1r1p1 = R("why5", S("branch1", "rev1", (3, "diff"), None))
- c1 = Change("alice", [], "changed stuff", branch="branch1")
- c2 = Change("alice", [], "changed stuff", branch="branch1")
- c3 = Change("alice", [], "changed stuff", branch="branch1")
- c4 = Change("alice", [], "changed stuff", branch="branch1")
- c5 = Change("alice", [], "changed stuff", branch="branch1")
- c6 = Change("alice", [], "changed stuff", branch="branch1")
- b1c1 = R("changes", S("branch1", None, None, [c1,c2,c3]))
- b1c2 = R("changes", S("branch1", None, None, [c4,c5,c6]))
-
- self.failUnless(b1.canBeMergedWith(b1))
- self.failIf(b1.canBeMergedWith(b1r1))
- self.failIf(b1.canBeMergedWith(b2r2))
- self.failIf(b1.canBeMergedWith(b1r1p1))
- self.failIf(b1.canBeMergedWith(b1c1))
-
- self.failIf(b1r1.canBeMergedWith(b1))
- self.failUnless(b1r1.canBeMergedWith(b1r1))
- self.failIf(b1r1.canBeMergedWith(b2r2))
- self.failIf(b1r1.canBeMergedWith(b1r1p1))
- self.failIf(b1r1.canBeMergedWith(b1c1))
-
- self.failIf(b1r2.canBeMergedWith(b1))
- self.failIf(b1r2.canBeMergedWith(b1r1))
- self.failUnless(b1r2.canBeMergedWith(b1r2))
- self.failIf(b1r2.canBeMergedWith(b2r2))
- self.failIf(b1r2.canBeMergedWith(b1r1p1))
-
- self.failIf(b1r1p1.canBeMergedWith(b1))
- self.failIf(b1r1p1.canBeMergedWith(b1r1))
- self.failIf(b1r1p1.canBeMergedWith(b1r2))
- self.failIf(b1r1p1.canBeMergedWith(b2r2))
- self.failIf(b1r1p1.canBeMergedWith(b1c1))
-
- self.failIf(b1c1.canBeMergedWith(b1))
- self.failIf(b1c1.canBeMergedWith(b1r1))
- self.failIf(b1c1.canBeMergedWith(b1r2))
- self.failIf(b1c1.canBeMergedWith(b2r2))
- self.failIf(b1c1.canBeMergedWith(b1r1p1))
- self.failUnless(b1c1.canBeMergedWith(b1c1))
- self.failUnless(b1c1.canBeMergedWith(b1c2))
-
- sm = b1.mergeWith([])
- self.failUnlessEqual(sm.branch, "branch1")
- self.failUnlessEqual(sm.revision, None)
- self.failUnlessEqual(sm.patch, None)
- self.failUnlessEqual(sm.changes, [])
-
- ss = b1r1.mergeWith([b1r1])
- self.failUnlessEqual(ss, S("branch1", "rev1", None, None))
- why = b1r1.mergeReasons([b1r1])
- self.failUnlessEqual(why, "why2")
- why = b1r1.mergeReasons([b1r1a])
- self.failUnlessEqual(why, "why2, why not")
-
- ss = b1c1.mergeWith([b1c2])
- self.failUnlessEqual(ss, S("branch1", None, None, [c1,c2,c3,c4,c5,c6]))
- why = b1c1.mergeReasons([b1c2])
- self.failUnlessEqual(why, "changes")
-
-
-class FakeBuilder:
- name = "fake"
- def __init__(self):
- self.requests = []
- def submitBuildRequest(self, req):
- self.requests.append(req)
-
-
-class Set(unittest.TestCase):
- def testBuildSet(self):
- S = buildset.BuildSet
- a,b = FakeBuilder(), FakeBuilder()
-
- # two builds, the first one fails, the second one succeeds. The
- # waitUntilSuccess watcher fires as soon as the first one fails,
- # while the waitUntilFinished watcher doesn't fire until all builds
- # are complete.
-
- source = sourcestamp.SourceStamp()
- s = S(["a","b"], source, "forced build")
- s.start([a,b])
- self.failUnlessEqual(len(a.requests), 1)
- self.failUnlessEqual(len(b.requests), 1)
- r1 = a.requests[0]
- self.failUnlessEqual(r1.reason, s.reason)
- self.failUnlessEqual(r1.source, s.source)
-
- st = s.status
- self.failUnlessEqual(st.getSourceStamp(), source)
- self.failUnlessEqual(st.getReason(), "forced build")
- self.failUnlessEqual(st.getBuilderNames(), ["a","b"])
- self.failIf(st.isFinished())
- brs = st.getBuildRequests()
- self.failUnlessEqual(len(brs), 2)
-
- res = []
- d1 = s.waitUntilSuccess()
- d1.addCallback(lambda r: res.append(("success", r)))
- d2 = s.waitUntilFinished()
- d2.addCallback(lambda r: res.append(("finished", r)))
-
- self.failUnlessEqual(res, [])
-
- # the first build finishes here, with FAILURE
- builderstatus_a = builder.BuilderStatus("a")
- bsa = builder.BuildStatus(builderstatus_a, 1)
- bsa.setResults(builder.FAILURE)
- a.requests[0].finished(bsa)
-
- # any FAILURE flunks the BuildSet immediately, so the
- # waitUntilSuccess deferred fires right away. However, the
- # waitUntilFinished deferred must wait until all builds have
- # completed.
- self.failUnlessEqual(len(res), 1)
- self.failUnlessEqual(res[0][0], "success")
- bss = res[0][1]
- self.failUnless(interfaces.IBuildSetStatus(bss, None))
- self.failUnlessEqual(bss.getResults(), builder.FAILURE)
-
- # here we finish the second build
- builderstatus_b = builder.BuilderStatus("b")
- bsb = builder.BuildStatus(builderstatus_b, 1)
- bsb.setResults(builder.SUCCESS)
- b.requests[0].finished(bsb)
-
- # .. which ought to fire the waitUntilFinished deferred
- self.failUnlessEqual(len(res), 2)
- self.failUnlessEqual(res[1][0], "finished")
- self.failUnlessEqual(res[1][1], bss)
-
- # and finish the BuildSet overall
- self.failUnless(st.isFinished())
- self.failUnlessEqual(st.getResults(), builder.FAILURE)
-
- def testSuccess(self):
- S = buildset.BuildSet
- a,b = FakeBuilder(), FakeBuilder()
- # this time, both builds succeed
-
- source = sourcestamp.SourceStamp()
- s = S(["a","b"], source, "forced build")
- s.start([a,b])
-
- st = s.status
- self.failUnlessEqual(st.getSourceStamp(), source)
- self.failUnlessEqual(st.getReason(), "forced build")
- self.failUnlessEqual(st.getBuilderNames(), ["a","b"])
- self.failIf(st.isFinished())
-
- builderstatus_a = builder.BuilderStatus("a")
- bsa = builder.BuildStatus(builderstatus_a, 1)
- bsa.setResults(builder.SUCCESS)
- a.requests[0].finished(bsa)
-
- builderstatus_b = builder.BuilderStatus("b")
- bsb = builder.BuildStatus(builderstatus_b, 1)
- bsb.setResults(builder.SUCCESS)
- b.requests[0].finished(bsb)
-
- self.failUnless(st.isFinished())
- self.failUnlessEqual(st.getResults(), builder.SUCCESS)
-
diff --git a/buildbot/buildbot-source/buildbot/test/test_changes.py b/buildbot/buildbot-source/buildbot/test/test_changes.py
deleted file mode 100644
index df8662368..000000000
--- a/buildbot/buildbot-source/buildbot/test/test_changes.py
+++ /dev/null
@@ -1,192 +0,0 @@
-# -*- test-case-name: buildbot.test.test_changes -*-
-
-from twisted.trial import unittest
-from twisted.internet import defer, reactor
-from twisted.python import log
-
-from buildbot import master
-from buildbot.twcompat import maybeWait
-from buildbot.changes import pb
-from buildbot.scripts import runner
-
-d1 = {'files': ["Project/foo.c", "Project/bar/boo.c"],
- 'who': "marvin",
- 'comments': "Some changes in Project"}
-d2 = {'files': ["OtherProject/bar.c"],
- 'who': "zaphod",
- 'comments': "other changes"}
-d3 = {'files': ["Project/baz.c", "OtherProject/bloo.c"],
- 'who': "alice",
- 'comments': "mixed changes"}
-
-class TestChangePerspective(unittest.TestCase):
-
- def setUp(self):
- self.changes = []
-
- def addChange(self, c):
- self.changes.append(c)
-
- def testNoPrefix(self):
- p = pb.ChangePerspective(self, None)
- p.perspective_addChange(d1)
- self.failUnlessEqual(len(self.changes), 1)
- c1 = self.changes[0]
- self.failUnlessEqual(c1.files,
- ["Project/foo.c", "Project/bar/boo.c"])
- self.failUnlessEqual(c1.comments, "Some changes in Project")
- self.failUnlessEqual(c1.who, "marvin")
-
- def testPrefix(self):
- p = pb.ChangePerspective(self, "Project")
-
- p.perspective_addChange(d1)
- self.failUnlessEqual(len(self.changes), 1)
- c1 = self.changes[-1]
- self.failUnlessEqual(c1.files, ["foo.c", "bar/boo.c"])
- self.failUnlessEqual(c1.comments, "Some changes in Project")
- self.failUnlessEqual(c1.who, "marvin")
-
- p.perspective_addChange(d2) # should be ignored
- self.failUnlessEqual(len(self.changes), 1)
-
- p.perspective_addChange(d3) # should ignore the OtherProject file
- self.failUnlessEqual(len(self.changes), 2)
-
- c3 = self.changes[-1]
- self.failUnlessEqual(c3.files, ["baz.c"])
- self.failUnlessEqual(c3.comments, "mixed changes")
- self.failUnlessEqual(c3.who, "alice")
-
-config_empty = """
-BuildmasterConfig = c = {}
-c['bots'] = []
-c['builders'] = []
-c['sources'] = []
-c['schedulers'] = []
-c['slavePortnum'] = 0
-"""
-
-config_sender = config_empty + \
-"""
-from buildbot.changes import pb
-c['sources'] = [pb.PBChangeSource(port=None)]
-"""
-
-class Sender(unittest.TestCase):
- def setUp(self):
- self.master = master.BuildMaster(".")
- def tearDown(self):
- d = defer.maybeDeferred(self.master.stopService)
- # TODO: something in Twisted-2.0.0 (and probably 2.0.1) doesn't shut
- # down the Broker listening socket when it's supposed to.
- # Twisted-1.3.0, and current SVN (which will be post-2.0.1) are ok.
- # This iterate() is a quick hack to deal with the problem. I need to
- # investigate more thoroughly and find a better solution.
- d.addCallback(self.stall, 0.1)
- return maybeWait(d)
-
- def stall(self, res, timeout):
- d = defer.Deferred()
- reactor.callLater(timeout, d.callback, res)
- return d
-
- def testSender(self):
- self.master.loadConfig(config_empty)
- self.master.startService()
- # TODO: BuildMaster.loadChanges replaces the change_svc object, so we
- # have to load it twice. Clean this up.
- d = self.master.loadConfig(config_sender)
- d.addCallback(self._testSender_1)
- return maybeWait(d)
-
- def _testSender_1(self, res):
- self.cm = cm = self.master.change_svc
- s1 = list(self.cm)[0]
- port = self.master.slavePort._port.getHost().port
-
- self.options = {'username': "alice",
- 'master': "localhost:%d" % port,
- 'files': ["foo.c"],
- }
-
- d = runner.sendchange(self.options)
- d.addCallback(self._testSender_2)
- return d
-
- def _testSender_2(self, res):
- # now check that the change was received
- self.failUnlessEqual(len(self.cm.changes), 1)
- c = self.cm.changes.pop()
- self.failUnlessEqual(c.who, "alice")
- self.failUnlessEqual(c.files, ["foo.c"])
- self.failUnlessEqual(c.comments, "")
- self.failUnlessEqual(c.revision, None)
-
- self.options['revision'] = "r123"
- self.options['comments'] = "test change"
-
- d = runner.sendchange(self.options)
- d.addCallback(self._testSender_3)
- return d
-
- def _testSender_3(self, res):
- self.failUnlessEqual(len(self.cm.changes), 1)
- c = self.cm.changes.pop()
- self.failUnlessEqual(c.who, "alice")
- self.failUnlessEqual(c.files, ["foo.c"])
- self.failUnlessEqual(c.comments, "test change")
- self.failUnlessEqual(c.revision, "r123")
-
- # test options['logfile'] by creating a temporary file
- logfile = self.mktemp()
- f = open(logfile, "wt")
- f.write("longer test change")
- f.close()
- self.options['comments'] = None
- self.options['logfile'] = logfile
-
- d = runner.sendchange(self.options)
- d.addCallback(self._testSender_4)
- return d
-
- def _testSender_4(self, res):
- self.failUnlessEqual(len(self.cm.changes), 1)
- c = self.cm.changes.pop()
- self.failUnlessEqual(c.who, "alice")
- self.failUnlessEqual(c.files, ["foo.c"])
- self.failUnlessEqual(c.comments, "longer test change")
- self.failUnlessEqual(c.revision, "r123")
-
- # make sure that numeric revisions work too
- self.options['logfile'] = None
- del self.options['revision']
- self.options['revision_number'] = 42
-
- d = runner.sendchange(self.options)
- d.addCallback(self._testSender_5)
- return d
-
- def _testSender_5(self, res):
- self.failUnlessEqual(len(self.cm.changes), 1)
- c = self.cm.changes.pop()
- self.failUnlessEqual(c.who, "alice")
- self.failUnlessEqual(c.files, ["foo.c"])
- self.failUnlessEqual(c.comments, "")
- self.failUnlessEqual(c.revision, 42)
-
- # verify --branch too
- self.options['branch'] = "branches/test"
-
- d = runner.sendchange(self.options)
- d.addCallback(self._testSender_6)
- return d
-
- def _testSender_6(self, res):
- self.failUnlessEqual(len(self.cm.changes), 1)
- c = self.cm.changes.pop()
- self.failUnlessEqual(c.who, "alice")
- self.failUnlessEqual(c.files, ["foo.c"])
- self.failUnlessEqual(c.comments, "")
- self.failUnlessEqual(c.revision, 42)
- self.failUnlessEqual(c.branch, "branches/test")
diff --git a/buildbot/buildbot-source/buildbot/test/test_config.py b/buildbot/buildbot-source/buildbot/test/test_config.py
deleted file mode 100644
index 6eee7d74e..000000000
--- a/buildbot/buildbot-source/buildbot/test/test_config.py
+++ /dev/null
@@ -1,1007 +0,0 @@
-# -*- test-case-name: buildbot.test.test_config -*-
-
-from __future__ import generators
-import os, os.path
-
-from twisted.trial import unittest
-from twisted.python import components, failure
-from twisted.internet import defer
-
-try:
- import cvstoys
- from buildbot.changes.freshcvs import FreshCVSSource
-except ImportError:
- cvstoys = None
-
-from buildbot.twcompat import providedBy, maybeWait
-from buildbot.master import BuildMaster
-from buildbot import scheduler
-from buildbot import interfaces as ibb
-from twisted.application import service, internet
-from twisted.spread import pb
-from twisted.web.server import Site
-from twisted.web.distrib import ResourcePublisher
-from buildbot.process.builder import Builder
-from buildbot.process.factory import BasicBuildFactory
-from buildbot.process import step
-from buildbot.status import html, builder, base
-try:
- from buildbot.status import words
-except ImportError:
- words = None
-
-import sys
-from twisted.python import log
-#log.startLogging(sys.stdout)
-
-emptyCfg = \
-"""
-BuildmasterConfig = c = {}
-c['bots'] = []
-c['sources'] = []
-c['schedulers'] = []
-c['builders'] = []
-c['slavePortnum'] = 9999
-c['projectName'] = 'dummy project'
-c['projectURL'] = 'http://dummy.example.com'
-c['buildbotURL'] = 'http://dummy.example.com/buildbot'
-"""
-
-buildersCfg = \
-"""
-from buildbot.process.factory import BasicBuildFactory
-BuildmasterConfig = c = {}
-c['bots'] = [('bot1', 'pw1')]
-c['sources'] = []
-c['schedulers'] = []
-c['slavePortnum'] = 9999
-f1 = BasicBuildFactory('cvsroot', 'cvsmodule')
-c['builders'] = [{'name':'builder1', 'slavename':'bot1',
- 'builddir':'workdir', 'factory':f1}]
-"""
-
-buildersCfg2 = buildersCfg + \
-"""
-f1 = BasicBuildFactory('cvsroot', 'cvsmodule2')
-c['builders'] = [{'name':'builder1', 'slavename':'bot1',
- 'builddir':'workdir', 'factory':f1}]
-"""
-
-buildersCfg3 = buildersCfg2 + \
-"""
-c['builders'].append({'name': 'builder2', 'slavename': 'bot1',
- 'builddir': 'workdir2', 'factory': f1 })
-"""
-
-buildersCfg4 = buildersCfg2 + \
-"""
-c['builders'] = [{ 'name': 'builder1', 'slavename': 'bot1',
- 'builddir': 'newworkdir', 'factory': f1 },
- { 'name': 'builder2', 'slavename': 'bot1',
- 'builddir': 'workdir2', 'factory': f1 }]
-"""
-
-ircCfg1 = emptyCfg + \
-"""
-from buildbot.status import words
-c['status'] = [words.IRC('irc.us.freenode.net', 'buildbot', ['twisted'])]
-"""
-
-ircCfg2 = emptyCfg + \
-"""
-from buildbot.status import words
-c['status'] = [words.IRC('irc.us.freenode.net', 'buildbot', ['twisted']),
- words.IRC('irc.example.com', 'otherbot', ['chan1', 'chan2'])]
-"""
-
-ircCfg3 = emptyCfg + \
-"""
-from buildbot.status import words
-c['status'] = [words.IRC('irc.us.freenode.net', 'buildbot', ['knotted'])]
-"""
-
-webCfg1 = emptyCfg + \
-"""
-from buildbot.status import html
-c['status'] = [html.Waterfall(http_port=9980)]
-"""
-
-webCfg2 = emptyCfg + \
-"""
-from buildbot.status import html
-c['status'] = [html.Waterfall(http_port=9981)]
-"""
-
-webCfg3 = emptyCfg + \
-"""
-from buildbot.status import html
-c['status'] = [html.Waterfall(http_port='tcp:9981:interface=127.0.0.1')]
-"""
-
-webNameCfg1 = emptyCfg + \
-"""
-from buildbot.status import html
-c['status'] = [html.Waterfall(distrib_port='~/.twistd-web-pb')]
-"""
-
-webNameCfg2 = emptyCfg + \
-"""
-from buildbot.status import html
-c['status'] = [html.Waterfall(distrib_port='./bar.socket')]
-"""
-
-debugPasswordCfg = emptyCfg + \
-"""
-c['debugPassword'] = 'sekrit'
-"""
-
-interlockCfgBad = \
-"""
-from buildbot.process.factory import BasicBuildFactory
-c = {}
-c['bots'] = [('bot1', 'pw1')]
-c['sources'] = []
-c['schedulers'] = []
-f1 = BasicBuildFactory('cvsroot', 'cvsmodule')
-c['builders'] = [
- { 'name': 'builder1', 'slavename': 'bot1',
- 'builddir': 'workdir', 'factory': f1 },
- { 'name': 'builder2', 'slavename': 'bot1',
- 'builddir': 'workdir2', 'factory': f1 },
- ]
-# interlocks have been removed
-c['interlocks'] = [('lock1', ['builder1'], ['builder2', 'builder3']),
- ]
-c['slavePortnum'] = 9999
-BuildmasterConfig = c
-"""
-
-lockCfgBad1 = \
-"""
-from buildbot.process.step import Dummy
-from buildbot.process.factory import BuildFactory, s
-from buildbot.locks import MasterLock
-c = {}
-c['bots'] = [('bot1', 'pw1')]
-c['sources'] = []
-c['schedulers'] = []
-l1 = MasterLock('lock1')
-l2 = MasterLock('lock1') # duplicate lock name
-f1 = BuildFactory([s(Dummy, locks=[])])
-c['builders'] = [
- { 'name': 'builder1', 'slavename': 'bot1',
- 'builddir': 'workdir', 'factory': f1, 'locks': [l1, l2] },
- { 'name': 'builder2', 'slavename': 'bot1',
- 'builddir': 'workdir2', 'factory': f1 },
- ]
-c['slavePortnum'] = 9999
-BuildmasterConfig = c
-"""
-
-lockCfgBad2 = \
-"""
-from buildbot.process.step import Dummy
-from buildbot.process.factory import BuildFactory, s
-from buildbot.locks import MasterLock, SlaveLock
-c = {}
-c['bots'] = [('bot1', 'pw1')]
-c['sources'] = []
-c['schedulers'] = []
-l1 = MasterLock('lock1')
-l2 = SlaveLock('lock1') # duplicate lock name
-f1 = BuildFactory([s(Dummy, locks=[])])
-c['builders'] = [
- { 'name': 'builder1', 'slavename': 'bot1',
- 'builddir': 'workdir', 'factory': f1, 'locks': [l1, l2] },
- { 'name': 'builder2', 'slavename': 'bot1',
- 'builddir': 'workdir2', 'factory': f1 },
- ]
-c['slavePortnum'] = 9999
-BuildmasterConfig = c
-"""
-
-lockCfgBad3 = \
-"""
-from buildbot.process.step import Dummy
-from buildbot.process.factory import BuildFactory, s
-from buildbot.locks import MasterLock
-c = {}
-c['bots'] = [('bot1', 'pw1')]
-c['sources'] = []
-c['schedulers'] = []
-l1 = MasterLock('lock1')
-l2 = MasterLock('lock1') # duplicate lock name
-f1 = BuildFactory([s(Dummy, locks=[l2])])
-f2 = BuildFactory([s(Dummy)])
-c['builders'] = [
- { 'name': 'builder1', 'slavename': 'bot1',
- 'builddir': 'workdir', 'factory': f2, 'locks': [l1] },
- { 'name': 'builder2', 'slavename': 'bot1',
- 'builddir': 'workdir2', 'factory': f1 },
- ]
-c['slavePortnum'] = 9999
-BuildmasterConfig = c
-"""
-
-lockCfg1a = \
-"""
-from buildbot.process.factory import BasicBuildFactory
-from buildbot.locks import MasterLock
-c = {}
-c['bots'] = [('bot1', 'pw1')]
-c['sources'] = []
-c['schedulers'] = []
-f1 = BasicBuildFactory('cvsroot', 'cvsmodule')
-l1 = MasterLock('lock1')
-l2 = MasterLock('lock2')
-c['builders'] = [
- { 'name': 'builder1', 'slavename': 'bot1',
- 'builddir': 'workdir', 'factory': f1, 'locks': [l1, l2] },
- { 'name': 'builder2', 'slavename': 'bot1',
- 'builddir': 'workdir2', 'factory': f1 },
- ]
-c['slavePortnum'] = 9999
-BuildmasterConfig = c
-"""
-
-lockCfg1b = \
-"""
-from buildbot.process.factory import BasicBuildFactory
-from buildbot.locks import MasterLock
-c = {}
-c['bots'] = [('bot1', 'pw1')]
-c['sources'] = []
-c['schedulers'] = []
-f1 = BasicBuildFactory('cvsroot', 'cvsmodule')
-l1 = MasterLock('lock1')
-l2 = MasterLock('lock2')
-c['builders'] = [
- { 'name': 'builder1', 'slavename': 'bot1',
- 'builddir': 'workdir', 'factory': f1, 'locks': [l1] },
- { 'name': 'builder2', 'slavename': 'bot1',
- 'builddir': 'workdir2', 'factory': f1 },
- ]
-c['slavePortnum'] = 9999
-BuildmasterConfig = c
-"""
-
-# test out step Locks
-lockCfg2a = \
-"""
-from buildbot.process.step import Dummy
-from buildbot.process.factory import BuildFactory, s
-from buildbot.locks import MasterLock
-c = {}
-c['bots'] = [('bot1', 'pw1')]
-c['sources'] = []
-c['schedulers'] = []
-l1 = MasterLock('lock1')
-l2 = MasterLock('lock2')
-f1 = BuildFactory([s(Dummy, locks=[l1,l2])])
-f2 = BuildFactory([s(Dummy)])
-
-c['builders'] = [
- { 'name': 'builder1', 'slavename': 'bot1',
- 'builddir': 'workdir', 'factory': f1 },
- { 'name': 'builder2', 'slavename': 'bot1',
- 'builddir': 'workdir2', 'factory': f2 },
- ]
-c['slavePortnum'] = 9999
-BuildmasterConfig = c
-"""
-
-lockCfg2b = \
-"""
-from buildbot.process.step import Dummy
-from buildbot.process.factory import BuildFactory, s
-from buildbot.locks import MasterLock
-c = {}
-c['bots'] = [('bot1', 'pw1')]
-c['sources'] = []
-c['schedulers'] = []
-l1 = MasterLock('lock1')
-l2 = MasterLock('lock2')
-f1 = BuildFactory([s(Dummy, locks=[l1])])
-f2 = BuildFactory([s(Dummy)])
-
-c['builders'] = [
- { 'name': 'builder1', 'slavename': 'bot1',
- 'builddir': 'workdir', 'factory': f1 },
- { 'name': 'builder2', 'slavename': 'bot1',
- 'builddir': 'workdir2', 'factory': f2 },
- ]
-c['slavePortnum'] = 9999
-BuildmasterConfig = c
-"""
-
-lockCfg2c = \
-"""
-from buildbot.process.step import Dummy
-from buildbot.process.factory import BuildFactory, s
-from buildbot.locks import MasterLock
-c = {}
-c['bots'] = [('bot1', 'pw1')]
-c['sources'] = []
-c['schedulers'] = []
-l1 = MasterLock('lock1')
-l2 = MasterLock('lock2')
-f1 = BuildFactory([s(Dummy)])
-f2 = BuildFactory([s(Dummy)])
-
-c['builders'] = [
- { 'name': 'builder1', 'slavename': 'bot1',
- 'builddir': 'workdir', 'factory': f1 },
- { 'name': 'builder2', 'slavename': 'bot1',
- 'builddir': 'workdir2', 'factory': f2 },
- ]
-c['slavePortnum'] = 9999
-BuildmasterConfig = c
-"""
-
-class ConfigTest(unittest.TestCase):
- def setUp(self):
- self.buildmaster = BuildMaster(".")
-
- def failUnlessListsEquivalent(self, list1, list2):
- l1 = list1[:]
- l1.sort()
- l2 = list2[:]
- l2.sort()
- self.failUnlessEqual(l1, l2)
-
- def servers(self, s, types):
- # perform a recursive search of s.services, looking for instances of
- # twisted.application.internet.TCPServer, then extract their .args
- # values to find the TCP ports they want to listen on
- for child in s:
- if providedBy(child, service.IServiceCollection):
- for gc in self.servers(child, types):
- yield gc
- if isinstance(child, types):
- yield child
-
- def TCPports(self, s):
- return list(self.servers(s, internet.TCPServer))
- def UNIXports(self, s):
- return list(self.servers(s, internet.UNIXServer))
- def TCPclients(self, s):
- return list(self.servers(s, internet.TCPClient))
-
- def checkPorts(self, svc, expected):
- """Verify that the TCPServer and UNIXServer children of the given
- service have the expected portnum/pathname and factory classes. As a
- side-effect, return a list of servers in the same order as the
- 'expected' list. This can be used to verify properties of the
- factories contained therein."""
-
- expTCP = [e for e in expected if type(e[0]) == int]
- expUNIX = [e for e in expected if type(e[0]) == str]
- haveTCP = [(p.args[0], p.args[1].__class__)
- for p in self.TCPports(svc)]
- haveUNIX = [(p.args[0], p.args[1].__class__)
- for p in self.UNIXports(svc)]
- self.failUnlessListsEquivalent(expTCP, haveTCP)
- self.failUnlessListsEquivalent(expUNIX, haveUNIX)
- ret = []
- for e in expected:
- for have in self.TCPports(svc) + self.UNIXports(svc):
- if have.args[0] == e[0]:
- ret.append(have)
- continue
- assert(len(ret) == len(expected))
- return ret
-
- def testEmpty(self):
- self.failUnlessRaises(KeyError, self.buildmaster.loadConfig, "")
-
- def testSimple(self):
- # covers slavePortnum, base checker passwords
- master = self.buildmaster
- master.loadChanges()
-
- master.loadConfig(emptyCfg)
- # note: this doesn't actually start listening, because the app
- # hasn't been started running
- self.failUnlessEqual(master.slavePortnum, "tcp:9999")
- self.checkPorts(master, [(9999, pb.PBServerFactory)])
- self.failUnlessEqual(list(master.change_svc), [])
- self.failUnlessEqual(master.botmaster.builders, {})
- self.failUnlessEqual(master.checker.users,
- {"change": "changepw"})
- self.failUnlessEqual(master.projectName, "dummy project")
- self.failUnlessEqual(master.projectURL, "http://dummy.example.com")
- self.failUnlessEqual(master.buildbotURL,
- "http://dummy.example.com/buildbot")
-
- def testSlavePortnum(self):
- master = self.buildmaster
- master.loadChanges()
-
- master.loadConfig(emptyCfg)
- self.failUnlessEqual(master.slavePortnum, "tcp:9999")
- ports = self.checkPorts(master, [(9999, pb.PBServerFactory)])
- p = ports[0]
-
- master.loadConfig(emptyCfg)
- self.failUnlessEqual(master.slavePortnum, "tcp:9999")
- ports = self.checkPorts(master, [(9999, pb.PBServerFactory)])
- self.failUnlessIdentical(p, ports[0],
- "the slave port was changed even " + \
- "though the configuration was not")
-
- master.loadConfig(emptyCfg + "c['slavePortnum'] = 9000\n")
- self.failUnlessEqual(master.slavePortnum, "tcp:9000")
- ports = self.checkPorts(master, [(9000, pb.PBServerFactory)])
- self.failIf(p is ports[0],
- "slave port was unchanged but configuration was changed")
-
- def testBots(self):
- master = self.buildmaster
- master.loadChanges()
- master.loadConfig(emptyCfg)
- self.failUnlessEqual(master.botmaster.builders, {})
- self.failUnlessEqual(master.checker.users,
- {"change": "changepw"})
- botsCfg = (emptyCfg +
- "c['bots'] = [('bot1', 'pw1'), ('bot2', 'pw2')]\n")
- master.loadConfig(botsCfg)
- self.failUnlessEqual(master.checker.users,
- {"change": "changepw",
- "bot1": "pw1",
- "bot2": "pw2"})
- master.loadConfig(botsCfg)
- self.failUnlessEqual(master.checker.users,
- {"change": "changepw",
- "bot1": "pw1",
- "bot2": "pw2"})
- master.loadConfig(emptyCfg)
- self.failUnlessEqual(master.checker.users,
- {"change": "changepw"})
-
-
- def testSources(self):
- if not cvstoys:
- raise unittest.SkipTest("this test needs CVSToys installed")
- master = self.buildmaster
- master.loadChanges()
- master.loadConfig(emptyCfg)
- self.failUnlessEqual(list(master.change_svc), [])
-
- self.sourcesCfg = emptyCfg + \
-"""
-from buildbot.changes.freshcvs import FreshCVSSource
-s1 = FreshCVSSource('cvs.example.com', 1000, 'pname', 'spass',
- prefix='Prefix/')
-c['sources'] = [s1]
-"""
-
- d = master.loadConfig(self.sourcesCfg)
- d.addCallback(self._testSources_1)
- return maybeWait(d)
-
- def _testSources_1(self, res):
- self.failUnlessEqual(len(list(self.buildmaster.change_svc)), 1)
- s1 = list(self.buildmaster.change_svc)[0]
- self.failUnless(isinstance(s1, FreshCVSSource))
- self.failUnlessEqual(s1.host, "cvs.example.com")
- self.failUnlessEqual(s1.port, 1000)
- self.failUnlessEqual(s1.prefix, "Prefix/")
- self.failUnlessEqual(s1, list(self.buildmaster.change_svc)[0])
- self.failUnless(s1.parent)
-
- # verify that unchanged sources are not interrupted
- d = self.buildmaster.loadConfig(self.sourcesCfg)
- d.addCallback(self._testSources_2, s1)
- return d
-
- def _testSources_2(self, res, s1):
- self.failUnlessEqual(len(list(self.buildmaster.change_svc)), 1)
- s2 = list(self.buildmaster.change_svc)[0]
- self.failUnlessIdentical(s1, s2)
- self.failUnless(s1.parent)
-
- # make sure we can get rid of the sources too
- d = self.buildmaster.loadConfig(emptyCfg)
- d.addCallback(self._testSources_3)
- return d
-
- def _testSources_3(self, res):
- self.failUnlessEqual(list(self.buildmaster.change_svc), [])
-
- def shouldBeFailure(self, res, *expected):
- self.failUnless(isinstance(res, failure.Failure),
- "we expected this to fail, not produce %s" % (res,))
- res.trap(*expected)
- return None # all is good
-
- def testSchedulers(self):
- master = self.buildmaster
- master.loadChanges()
- master.loadConfig(emptyCfg)
- self.failUnlessEqual(master.allSchedulers(), [])
-
- self.schedulersCfg = \
-"""
-from buildbot.scheduler import Scheduler, Dependent
-from buildbot.process.factory import BasicBuildFactory
-c = {}
-c['bots'] = [('bot1', 'pw1')]
-c['sources'] = []
-c['schedulers'] = [Scheduler('full', None, 60, ['builder1'])]
-f1 = BasicBuildFactory('cvsroot', 'cvsmodule')
-c['builders'] = [{'name':'builder1', 'slavename':'bot1',
- 'builddir':'workdir', 'factory':f1}]
-c['slavePortnum'] = 9999
-c['projectName'] = 'dummy project'
-c['projectURL'] = 'http://dummy.example.com'
-c['buildbotURL'] = 'http://dummy.example.com/buildbot'
-BuildmasterConfig = c
-"""
-
- # c['schedulers'] must be a list
- badcfg = self.schedulersCfg + \
-"""
-c['schedulers'] = Scheduler('full', None, 60, ['builder1'])
-"""
- d = defer.maybeDeferred(self.buildmaster.loadConfig, badcfg)
- d.addBoth(self._testSchedulers_1)
- return maybeWait(d)
- def _testSchedulers_1(self, res):
- self.shouldBeFailure(res, AssertionError)
- # c['schedulers'] must be a list of IScheduler objects
- badcfg = self.schedulersCfg + \
-"""
-c['schedulers'] = ['oops', 'problem']
-"""
- d = defer.maybeDeferred(self.buildmaster.loadConfig, badcfg)
- d.addBoth(self._testSchedulers_2)
- return d
- def _testSchedulers_2(self, res):
- self.shouldBeFailure(res, AssertionError)
- # c['schedulers'] must point at real builders
- badcfg = self.schedulersCfg + \
-"""
-c['schedulers'] = [Scheduler('full', None, 60, ['builder-bogus'])]
-"""
- d = defer.maybeDeferred(self.buildmaster.loadConfig, badcfg)
- d.addBoth(self._testSchedulers_3)
- return d
- def _testSchedulers_3(self, res):
- self.shouldBeFailure(res, AssertionError)
- d = self.buildmaster.loadConfig(self.schedulersCfg)
- d.addCallback(self._testSchedulers_4)
- return d
- def _testSchedulers_4(self, res):
- sch = self.buildmaster.allSchedulers()
- self.failUnlessEqual(len(sch), 1)
- s = sch[0]
- self.failUnless(isinstance(s, scheduler.Scheduler))
- self.failUnlessEqual(s.name, "full")
- self.failUnlessEqual(s.branch, None)
- self.failUnlessEqual(s.treeStableTimer, 60)
- self.failUnlessEqual(s.builderNames, ['builder1'])
-
- newcfg = self.schedulersCfg + \
-"""
-s1 = Scheduler('full', None, 60, ['builder1'])
-c['schedulers'] = [s1, Dependent('downstream', s1, ['builder1'])]
-"""
- d = self.buildmaster.loadConfig(newcfg)
- d.addCallback(self._testSchedulers_5, newcfg)
- return d
- def _testSchedulers_5(self, res, newcfg):
- sch = self.buildmaster.allSchedulers()
- self.failUnlessEqual(len(sch), 2)
- s = sch[0]
- self.failUnless(isinstance(s, scheduler.Scheduler))
- s = sch[1]
- self.failUnless(isinstance(s, scheduler.Dependent))
- self.failUnlessEqual(s.name, "downstream")
- self.failUnlessEqual(s.builderNames, ['builder1'])
-
- # reloading the same config file should leave the schedulers in place
- d = self.buildmaster.loadConfig(newcfg)
- d.addCallback(self._testschedulers_6, sch)
- return d
- def _testschedulers_6(self, res, sch1):
- sch2 = self.buildmaster.allSchedulers()
- self.failUnlessEqual(len(sch2), 2)
- sch1.sort()
- sch2.sort()
- self.failUnlessEqual(sch1, sch2)
- self.failUnlessIdentical(sch1[0], sch2[0])
- self.failUnlessIdentical(sch1[1], sch2[1])
- self.failUnlessIdentical(sch1[0].parent, self.buildmaster)
- self.failUnlessIdentical(sch1[1].parent, self.buildmaster)
-
-
- def testBuilders(self):
- master = self.buildmaster
- master.loadConfig(emptyCfg)
- self.failUnlessEqual(master.botmaster.builders, {})
-
- master.loadConfig(buildersCfg)
- self.failUnlessEqual(master.botmaster.builderNames, ["builder1"])
- self.failUnlessEqual(master.botmaster.builders.keys(), ["builder1"])
- b = master.botmaster.builders["builder1"]
- self.failUnless(isinstance(b, Builder))
- self.failUnlessEqual(b.name, "builder1")
- self.failUnlessEqual(b.slavenames, ["bot1"])
- self.failUnlessEqual(b.builddir, "workdir")
- f1 = b.buildFactory
- self.failUnless(isinstance(f1, BasicBuildFactory))
- steps = f1.steps
- self.failUnlessEqual(len(steps), 3)
- self.failUnlessEqual(steps[0], (step.CVS,
- {'cvsroot': 'cvsroot',
- 'cvsmodule': 'cvsmodule',
- 'mode': 'clobber'}))
- self.failUnlessEqual(steps[1], (step.Compile,
- {'command': 'make all'}))
- self.failUnlessEqual(steps[2], (step.Test,
- {'command': 'make check'}))
-
-
- # make sure a reload of the same data doesn't interrupt the Builder
- master.loadConfig(buildersCfg)
- self.failUnlessEqual(master.botmaster.builderNames, ["builder1"])
- self.failUnlessEqual(master.botmaster.builders.keys(), ["builder1"])
- b2 = master.botmaster.builders["builder1"]
- self.failUnlessIdentical(b, b2)
- # TODO: test that the BuilderStatus object doesn't change
- #statusbag2 = master.client_svc.statusbags["builder1"]
- #self.failUnlessIdentical(statusbag, statusbag2)
-
- # but changing something should result in a new Builder
- master.loadConfig(buildersCfg2)
- self.failUnlessEqual(master.botmaster.builderNames, ["builder1"])
- self.failUnlessEqual(master.botmaster.builders.keys(), ["builder1"])
- b3 = master.botmaster.builders["builder1"]
- self.failIf(b is b3)
- # the statusbag remains the same TODO
- #statusbag3 = master.client_svc.statusbags["builder1"]
- #self.failUnlessIdentical(statusbag, statusbag3)
-
- # adding new builder
- master.loadConfig(buildersCfg3)
- self.failUnlessEqual(master.botmaster.builderNames, ["builder1",
- "builder2"])
- self.failUnlessListsEquivalent(master.botmaster.builders.keys(),
- ["builder1", "builder2"])
- b4 = master.botmaster.builders["builder1"]
- self.failUnlessIdentical(b3, b4)
-
- # changing first builder should leave it at the same place in the list
- master.loadConfig(buildersCfg4)
- self.failUnlessEqual(master.botmaster.builderNames, ["builder1",
- "builder2"])
- self.failUnlessListsEquivalent(master.botmaster.builders.keys(),
- ["builder1", "builder2"])
- b5 = master.botmaster.builders["builder1"]
- self.failIf(b4 is b5)
-
- # and removing it should make the Builder go away
- master.loadConfig(emptyCfg)
- self.failUnlessEqual(master.botmaster.builderNames, [])
- self.failUnlessEqual(master.botmaster.builders, {})
- #self.failUnlessEqual(master.client_svc.statusbags, {}) # TODO
-
- def checkIRC(self, m, expected):
- ircs = {}
- for irc in self.servers(m, words.IRC):
- ircs[irc.host] = (irc.nick, irc.channels)
- self.failUnlessEqual(ircs, expected)
-
- def testIRC(self):
- if not words:
- raise unittest.SkipTest("Twisted Words package is not installed")
- master = self.buildmaster
- master.loadChanges()
- d = master.loadConfig(emptyCfg)
- e1 = {}
- d.addCallback(lambda res: self.checkIRC(master, e1))
- d.addCallback(lambda res: master.loadConfig(ircCfg1))
- e2 = {'irc.us.freenode.net': ('buildbot', ['twisted'])}
- d.addCallback(lambda res: self.checkIRC(master, e2))
- d.addCallback(lambda res: master.loadConfig(ircCfg2))
- e3 = {'irc.us.freenode.net': ('buildbot', ['twisted']),
- 'irc.example.com': ('otherbot', ['chan1', 'chan2'])}
- d.addCallback(lambda res: self.checkIRC(master, e3))
- d.addCallback(lambda res: master.loadConfig(ircCfg3))
- e4 = {'irc.us.freenode.net': ('buildbot', ['knotted'])}
- d.addCallback(lambda res: self.checkIRC(master, e4))
- d.addCallback(lambda res: master.loadConfig(ircCfg1))
- e5 = {'irc.us.freenode.net': ('buildbot', ['twisted'])}
- d.addCallback(lambda res: self.checkIRC(master, e5))
- return maybeWait(d)
-
- def testWebPortnum(self):
- master = self.buildmaster
- master.loadChanges()
-
- d = master.loadConfig(webCfg1)
- d.addCallback(self._testWebPortnum_1)
- return maybeWait(d)
- def _testWebPortnum_1(self, res):
- ports = self.checkPorts(self.buildmaster, [(9999, pb.PBServerFactory),
- (9980, Site)])
- p = ports[1]
-
- d = self.buildmaster.loadConfig(webCfg1) # nothing should be changed
- d.addCallback(self._testWebPortnum_2, p)
- return d
- def _testWebPortnum_2(self, res, p):
- ports = self.checkPorts(self.buildmaster, [(9999, pb.PBServerFactory),
- (9980, Site)])
- self.failUnlessIdentical(p, ports[1],
- "web port was changed even though " + \
- "configuration was not")
-
- d = self.buildmaster.loadConfig(webCfg2) # changes to 9981
- d.addCallback(self._testWebPortnum_3, p)
- return d
- def _testWebPortnum_3(self, res, p):
- ports = self.checkPorts(self.buildmaster, [(9999, pb.PBServerFactory),
- (9981, Site)])
- self.failIf(p is ports[1],
- "configuration was changed but web port was unchanged")
- d = self.buildmaster.loadConfig(webCfg3) # 9981 on only localhost
- d.addCallback(self._testWebPortnum_4, ports[1])
- return d
- def _testWebPortnum_4(self, res, p):
- ports = self.checkPorts(self.buildmaster, [(9999, pb.PBServerFactory),
- (9981, Site)])
- self.failUnlessEqual(ports[1].kwargs['interface'], "127.0.0.1")
- d = self.buildmaster.loadConfig(emptyCfg)
- d.addCallback(lambda res:
- self.checkPorts(self.buildmaster,
- [(9999, pb.PBServerFactory)]))
- return d
-
- def testWebPathname(self):
- master = self.buildmaster
- master.loadChanges()
-
- d = master.loadConfig(webNameCfg1)
- d.addCallback(self._testWebPathname_1)
- return maybeWait(d)
- def _testWebPathname_1(self, res):
- self.checkPorts(self.buildmaster,
- [(9999, pb.PBServerFactory),
- ('~/.twistd-web-pb', pb.PBServerFactory)])
- unixports = self.UNIXports(self.buildmaster)
- f = unixports[0].args[1]
- self.failUnless(isinstance(f.root, ResourcePublisher))
-
- d = self.buildmaster.loadConfig(webNameCfg1)
- # nothing should be changed
- d.addCallback(self._testWebPathname_2, f)
- return d
- def _testWebPathname_2(self, res, f):
- self.checkPorts(self.buildmaster,
- [(9999, pb.PBServerFactory),
- ('~/.twistd-web-pb', pb.PBServerFactory)])
- self.failUnlessIdentical(f,
- self.UNIXports(self.buildmaster)[0].args[1],
- "web factory was changed even though " + \
- "configuration was not")
-
- d = self.buildmaster.loadConfig(webNameCfg2)
- d.addCallback(self._testWebPathname_3, f)
- return d
- def _testWebPathname_3(self, res, f):
- self.checkPorts(self.buildmaster,
- [(9999, pb.PBServerFactory),
- ('./bar.socket', pb.PBServerFactory)])
- self.failIf(f is self.UNIXports(self.buildmaster)[0].args[1],
- "web factory was unchanged but configuration was changed")
-
- d = self.buildmaster.loadConfig(emptyCfg)
- d.addCallback(lambda res:
- self.checkPorts(self.buildmaster,
- [(9999, pb.PBServerFactory)]))
- return d
-
- def testDebugPassword(self):
- master = self.buildmaster
-
- master.loadConfig(debugPasswordCfg)
- self.failUnlessEqual(master.checker.users,
- {"change": "changepw",
- "debug": "sekrit"})
-
- master.loadConfig(debugPasswordCfg)
- self.failUnlessEqual(master.checker.users,
- {"change": "changepw",
- "debug": "sekrit"})
-
- master.loadConfig(emptyCfg)
- self.failUnlessEqual(master.checker.users,
- {"change": "changepw"})
-
- def testLocks(self):
- master = self.buildmaster
- botmaster = master.botmaster
-
- # make sure that c['interlocks'] is rejected properly
- self.failUnlessRaises(KeyError, master.loadConfig, interlockCfgBad)
- # and that duplicate-named Locks are caught
- self.failUnlessRaises(ValueError, master.loadConfig, lockCfgBad1)
- self.failUnlessRaises(ValueError, master.loadConfig, lockCfgBad2)
- self.failUnlessRaises(ValueError, master.loadConfig, lockCfgBad3)
-
- # create a Builder that uses Locks
- master.loadConfig(lockCfg1a)
- b1 = master.botmaster.builders["builder1"]
- self.failUnlessEqual(len(b1.locks), 2)
-
- # reloading the same config should not change the Builder
- master.loadConfig(lockCfg1a)
- self.failUnlessIdentical(b1, master.botmaster.builders["builder1"])
- # but changing the set of locks used should change it
- master.loadConfig(lockCfg1b)
- self.failIfIdentical(b1, master.botmaster.builders["builder1"])
- b1 = master.botmaster.builders["builder1"]
- self.failUnlessEqual(len(b1.locks), 1)
-
- # similar test with step-scoped locks
- master.loadConfig(lockCfg2a)
- b1 = master.botmaster.builders["builder1"]
- # reloading the same config should not change the Builder
- master.loadConfig(lockCfg2a)
- self.failUnlessIdentical(b1, master.botmaster.builders["builder1"])
- # but changing the set of locks used should change it
- master.loadConfig(lockCfg2b)
- self.failIfIdentical(b1, master.botmaster.builders["builder1"])
- b1 = master.botmaster.builders["builder1"]
- # remove the locks entirely
- master.loadConfig(lockCfg2c)
- self.failIfIdentical(b1, master.botmaster.builders["builder1"])
-
-class ConfigElements(unittest.TestCase):
- # verify that ComparableMixin is working
- def testSchedulers(self):
- s1 = scheduler.Scheduler(name='quick', branch=None,
- treeStableTimer=30,
- builderNames=['quick'])
- s2 = scheduler.Scheduler(name="all", branch=None,
- treeStableTimer=5*60,
- builderNames=["a", "b"])
- s3 = scheduler.Try_Userpass("try", ["a","b"], port=9989,
- userpass=[("foo","bar")])
- s1a = scheduler.Scheduler(name='quick', branch=None,
- treeStableTimer=30,
- builderNames=['quick'])
- s2a = scheduler.Scheduler(name="all", branch=None,
- treeStableTimer=5*60,
- builderNames=["a", "b"])
- s3a = scheduler.Try_Userpass("try", ["a","b"], port=9989,
- userpass=[("foo","bar")])
- self.failUnless(s1 == s1)
- self.failUnless(s1 == s1a)
- self.failUnless(s1a in [s1, s2, s3])
- self.failUnless(s2a in [s1, s2, s3])
- self.failUnless(s3a in [s1, s2, s3])
-
-
-
-class ConfigFileTest(unittest.TestCase):
-
- def testFindConfigFile(self):
- os.mkdir("test_cf")
- open(os.path.join("test_cf", "master.cfg"), "w").write(emptyCfg)
- slaveportCfg = emptyCfg + "c['slavePortnum'] = 9000\n"
- open(os.path.join("test_cf", "alternate.cfg"), "w").write(slaveportCfg)
-
- m = BuildMaster("test_cf")
- m.loadTheConfigFile()
- self.failUnlessEqual(m.slavePortnum, "tcp:9999")
-
- m = BuildMaster("test_cf", "alternate.cfg")
- m.loadTheConfigFile()
- self.failUnlessEqual(m.slavePortnum, "tcp:9000")
-
-
-class MyTarget(base.StatusReceiverMultiService):
- def __init__(self, name):
- self.name = name
- base.StatusReceiverMultiService.__init__(self)
- def startService(self):
- # make a note in a list stashed in the BuildMaster
- self.parent.targetevents.append(("start", self.name))
- return base.StatusReceiverMultiService.startService(self)
- def stopService(self):
- self.parent.targetevents.append(("stop", self.name))
- return base.StatusReceiverMultiService.stopService(self)
-
-class MySlowTarget(MyTarget):
- def stopService(self):
- from twisted.internet import reactor
- d = base.StatusReceiverMultiService.stopService(self)
- def stall(res):
- d2 = defer.Deferred()
- reactor.callLater(0.1, d2.callback, res)
- return d2
- d.addCallback(stall)
- m = self.parent
- def finishedStalling(res):
- m.targetevents.append(("stop", self.name))
- return res
- d.addCallback(finishedStalling)
- return d
-
-# we can't actually startService a buildmaster with a config that uses a
-# fixed slavePortnum like 9999, so instead this makes it possible to pass '0'
-# for the first time, and then substitute back in the allocated port number
-# on subsequent passes.
-startableEmptyCfg = emptyCfg + \
-"""
-c['slavePortnum'] = %d
-"""
-
-targetCfg1 = startableEmptyCfg + \
-"""
-from buildbot.test.test_config import MyTarget
-c['status'] = [MyTarget('a')]
-"""
-
-targetCfg2 = startableEmptyCfg + \
-"""
-from buildbot.test.test_config import MySlowTarget
-c['status'] = [MySlowTarget('b')]
-"""
-
-class StartService(unittest.TestCase):
- def tearDown(self):
- return self.master.stopService()
-
- def testStartService(self):
- os.mkdir("test_ss")
- self.master = m = BuildMaster("test_ss")
- m.startService()
- d = m.loadConfig(startableEmptyCfg % 0)
- d.addCallback(self._testStartService_0)
- return maybeWait(d)
-
- def _testStartService_0(self, res):
- m = self.master
- m.targetevents = []
- # figure out what port got allocated
- self.portnum = m.slavePort._port.getHost().port
- d = m.loadConfig(targetCfg1 % self.portnum)
- d.addCallback(self._testStartService_1)
- return d
-
- def _testStartService_1(self, res):
- self.failUnlessEqual(len(self.master.statusTargets), 1)
- self.failUnless(isinstance(self.master.statusTargets[0], MyTarget))
- self.failUnlessEqual(self.master.targetevents,
- [('start', 'a')])
- self.master.targetevents = []
- # reloading the same config should not start or stop the target
- d = self.master.loadConfig(targetCfg1 % self.portnum)
- d.addCallback(self._testStartService_2)
- return d
-
- def _testStartService_2(self, res):
- self.failUnlessEqual(self.master.targetevents, [])
- # but loading a new config file should stop the old one, then
- # start the new one
- d = self.master.loadConfig(targetCfg2 % self.portnum)
- d.addCallback(self._testStartService_3)
- return d
-
- def _testStartService_3(self, res):
- self.failUnlessEqual(self.master.targetevents,
- [('stop', 'a'), ('start', 'b')])
- self.master.targetevents = []
- # and going back to the old one should do the same, in the same
- # order, even though the current MySlowTarget takes a moment to shut
- # down
- d = self.master.loadConfig(targetCfg1 % self.portnum)
- d.addCallback(self._testStartService_4)
- return d
-
- def _testStartService_4(self, res):
- self.failUnlessEqual(self.master.targetevents,
- [('stop', 'b'), ('start', 'a')])
diff --git a/buildbot/buildbot-source/buildbot/test/test_control.py b/buildbot/buildbot-source/buildbot/test/test_control.py
deleted file mode 100644
index 42cd1ece5..000000000
--- a/buildbot/buildbot-source/buildbot/test/test_control.py
+++ /dev/null
@@ -1,140 +0,0 @@
-# -*- test-case-name: buildbot.test.test_control -*-
-
-import sys, os, signal, shutil, time, errno
-
-from twisted.trial import unittest
-from twisted.internet import defer, reactor
-
-from buildbot import master, interfaces
-from buildbot.sourcestamp import SourceStamp
-from buildbot.twcompat import providedBy, maybeWait
-from buildbot.slave import bot
-from buildbot.status import builder
-from buildbot.status.builder import SUCCESS
-from buildbot.process import base
-
-config = """
-from buildbot.process import factory, step
-
-def s(klass, **kwargs):
- return (klass, kwargs)
-
-f1 = factory.BuildFactory([
- s(step.Dummy, timeout=1),
- ])
-c = {}
-c['bots'] = [['bot1', 'sekrit']]
-c['sources'] = []
-c['schedulers'] = []
-c['builders'] = [{'name': 'force', 'slavename': 'bot1',
- 'builddir': 'force-dir', 'factory': f1}]
-c['slavePortnum'] = 0
-BuildmasterConfig = c
-"""
-
-class FakeBuilder:
- name = "fake"
- def getSlaveCommandVersion(self, command, oldversion=None):
- return "1.10"
-
-class SignalMixin:
- sigchldHandler = None
-
- def setUpClass(self):
- # make sure SIGCHLD handler is installed, as it should be on
- # reactor.run(). problem is reactor may not have been run when this
- # test runs.
- if hasattr(reactor, "_handleSigchld") and hasattr(signal, "SIGCHLD"):
- self.sigchldHandler = signal.signal(signal.SIGCHLD,
- reactor._handleSigchld)
-
- def tearDownClass(self):
- if self.sigchldHandler:
- signal.signal(signal.SIGCHLD, self.sigchldHandler)
-
-class Force(unittest.TestCase):
-
- def rmtree(self, d):
- try:
- shutil.rmtree(d, ignore_errors=1)
- except OSError, e:
- # stupid 2.2 appears to ignore ignore_errors
- if e.errno != errno.ENOENT:
- raise
-
- def setUp(self):
- self.master = None
- self.slave = None
- self.rmtree("control_basedir")
- os.mkdir("control_basedir")
- self.master = master.BuildMaster("control_basedir")
- self.slavebase = os.path.abspath("control_slavebase")
- self.rmtree(self.slavebase)
- os.mkdir("control_slavebase")
-
- def connectSlave(self):
- port = self.master.slavePort._port.getHost().port
- slave = bot.BuildSlave("localhost", port, "bot1", "sekrit",
- self.slavebase, keepalive=0, usePTY=1)
- self.slave = slave
- slave.startService()
- d = self.master.botmaster.waitUntilBuilderAttached("force")
- return d
-
- def tearDown(self):
- dl = []
- if self.slave:
- dl.append(self.master.botmaster.waitUntilBuilderDetached("force"))
- dl.append(defer.maybeDeferred(self.slave.stopService))
- if self.master:
- dl.append(defer.maybeDeferred(self.master.stopService))
- return maybeWait(defer.DeferredList(dl))
-
- def testForce(self):
- # TODO: since BuilderControl.forceBuild has been deprecated, this
- # test is scheduled to be removed soon
- m = self.master
- m.loadConfig(config)
- m.startService()
- d = self.connectSlave()
- d.addCallback(self._testForce_1)
- return maybeWait(d)
-
- def _testForce_1(self, res):
- c = interfaces.IControl(self.master)
- builder_control = c.getBuilder("force")
- d = builder_control.forceBuild("bob", "I was bored")
- d.addCallback(self._testForce_2)
- return d
-
- def _testForce_2(self, build_control):
- self.failUnless(providedBy(build_control, interfaces.IBuildControl))
- d = build_control.getStatus().waitUntilFinished()
- d.addCallback(self._testForce_3)
- return d
-
- def _testForce_3(self, bs):
- self.failUnless(providedBy(bs, interfaces.IBuildStatus))
- self.failUnless(bs.isFinished())
- self.failUnlessEqual(bs.getResults(), SUCCESS)
- #self.failUnlessEqual(bs.getResponsibleUsers(), ["bob"]) # TODO
- self.failUnlessEqual(bs.getChanges(), [])
- #self.failUnlessEqual(bs.getReason(), "forced") # TODO
-
- def testRequest(self):
- m = self.master
- m.loadConfig(config)
- m.startService()
- d = self.connectSlave()
- d.addCallback(self._testRequest_1)
- return maybeWait(d)
- def _testRequest_1(self, res):
- c = interfaces.IControl(self.master)
- req = base.BuildRequest("I was bored", SourceStamp())
- builder_control = c.getBuilder("force")
- d = defer.Deferred()
- req.subscribe(d.callback)
- builder_control.requestBuild(req)
- d.addCallback(self._testForce_2)
- # we use the same check-the-results code as testForce
- return d
diff --git a/buildbot/buildbot-source/buildbot/test/test_dependencies.py b/buildbot/buildbot-source/buildbot/test/test_dependencies.py
deleted file mode 100644
index 6871adcf2..000000000
--- a/buildbot/buildbot-source/buildbot/test/test_dependencies.py
+++ /dev/null
@@ -1,170 +0,0 @@
-# -*- test-case-name: buildbot.test.test_dependencies -*-
-
-from twisted.trial import unittest
-
-from twisted.internet import reactor, defer
-
-from buildbot import interfaces
-from buildbot.process import step
-from buildbot.sourcestamp import SourceStamp
-from buildbot.process.base import BuildRequest
-from buildbot.test.runutils import RunMixin
-from buildbot.twcompat import maybeWait
-from buildbot.status import base
-
-config_1 = """
-from buildbot import scheduler
-from buildbot.process import step, factory
-s = factory.s
-from buildbot.test.test_locks import LockStep
-
-BuildmasterConfig = c = {}
-c['bots'] = [('bot1', 'sekrit'), ('bot2', 'sekrit')]
-c['sources'] = []
-c['schedulers'] = []
-c['slavePortnum'] = 0
-
-# upstream1 (fastfail, slowpass)
-# -> downstream2 (b3, b4)
-# upstream3 (slowfail, slowpass)
-# -> downstream4 (b3, b4)
-# -> downstream5 (b5)
-
-s1 = scheduler.Scheduler('upstream1', None, 10, ['slowpass', 'fastfail'])
-s2 = scheduler.Dependent('downstream2', s1, ['b3', 'b4'])
-s3 = scheduler.Scheduler('upstream3', None, 10, ['fastpass', 'slowpass'])
-s4 = scheduler.Dependent('downstream4', s3, ['b3', 'b4'])
-s5 = scheduler.Dependent('downstream5', s4, ['b5'])
-c['schedulers'] = [s1, s2, s3, s4, s5]
-
-f_fastpass = factory.BuildFactory([s(step.Dummy, timeout=1)])
-f_slowpass = factory.BuildFactory([s(step.Dummy, timeout=2)])
-f_fastfail = factory.BuildFactory([s(step.FailingDummy, timeout=1)])
-
-def builder(name, f):
- d = {'name': name, 'slavename': 'bot1', 'builddir': name, 'factory': f}
- return d
-
-c['builders'] = [builder('slowpass', f_slowpass),
- builder('fastfail', f_fastfail),
- builder('fastpass', f_fastpass),
- builder('b3', f_fastpass),
- builder('b4', f_fastpass),
- builder('b5', f_fastpass),
- ]
-"""
-
-class Logger(base.StatusReceiverMultiService):
- def __init__(self, master):
- base.StatusReceiverMultiService.__init__(self)
- self.builds = []
- for bn in master.status.getBuilderNames():
- master.status.getBuilder(bn).subscribe(self)
-
- def buildStarted(self, builderName, build):
- self.builds.append(builderName)
-
-class Dependencies(RunMixin, unittest.TestCase):
- def setUp(self):
- RunMixin.setUp(self)
- self.master.loadConfig(config_1)
- self.master.startService()
- d = self.connectSlave(["slowpass", "fastfail", "fastpass",
- "b3", "b4", "b5"])
- return maybeWait(d)
-
- def findScheduler(self, name):
- for s in self.master.allSchedulers():
- if s.name == name:
- return s
- raise KeyError("No Scheduler named '%s'" % name)
-
- def testParse(self):
- self.master.loadConfig(config_1)
- # that's it, just make sure this config file is loaded successfully
-
- def testRun_Fail(self):
- # add an extra status target to make pay attention to which builds
- # start and which don't.
- self.logger = Logger(self.master)
-
- # kick off upstream1, which has a failing Builder and thus will not
- # trigger downstream3
- s = self.findScheduler("upstream1")
- # this is an internal function of the Scheduler class
- s.fireTimer() # fires a build
- # t=0: two builders start: 'slowpass' and 'fastfail'
- # t=1: builder 'fastfail' finishes
- # t=2: builder 'slowpass' finishes
- d = defer.Deferred()
- d.addCallback(self._testRun_Fail_1)
- reactor.callLater(5, d.callback, None)
- return maybeWait(d)
-
- def _testRun_Fail_1(self, res):
- # 'slowpass' and 'fastfail' should have run one build each
- b = self.status.getBuilder('slowpass').getLastFinishedBuild()
- self.failUnless(b)
- self.failUnlessEqual(b.getNumber(), 0)
- b = self.status.getBuilder('fastfail').getLastFinishedBuild()
- self.failUnless(b)
- self.failUnlessEqual(b.getNumber(), 0)
-
- # none of the other builders should have run
- self.failIf(self.status.getBuilder('b3').getLastFinishedBuild())
- self.failIf(self.status.getBuilder('b4').getLastFinishedBuild())
- self.failIf(self.status.getBuilder('b5').getLastFinishedBuild())
-
- # in fact, none of them should have even started
- self.failUnlessEqual(len(self.logger.builds), 2)
- self.failUnless("slowpass" in self.logger.builds)
- self.failUnless("fastfail" in self.logger.builds)
- self.failIf("b3" in self.logger.builds)
- self.failIf("b4" in self.logger.builds)
- self.failIf("b5" in self.logger.builds)
-
- def testRun_Pass(self):
- # kick off upstream3, which will fire downstream4 and then
- # downstream5
- s = self.findScheduler("upstream3")
- # this is an internal function of the Scheduler class
- s.fireTimer() # fires a build
- # t=0: slowpass and fastpass start
- # t=1: builder 'fastpass' finishes
- # t=2: builder 'slowpass' finishes
- # scheduler 'downstream4' fires
- # builds b3 and b4 are started
- # t=3: builds b3 and b4 finish
- # scheduler 'downstream5' fires
- # build b5 is started
- # t=4: build b5 is finished
- d = defer.Deferred()
- d.addCallback(self._testRun_Pass_1)
- reactor.callLater(5, d.callback, None)
- return maybeWait(d)
-
- def _testRun_Pass_1(self, res):
- # 'fastpass' and 'slowpass' should have run one build each
- b = self.status.getBuilder('fastpass').getLastFinishedBuild()
- self.failUnless(b)
- self.failUnlessEqual(b.getNumber(), 0)
-
- b = self.status.getBuilder('slowpass').getLastFinishedBuild()
- self.failUnless(b)
- self.failUnlessEqual(b.getNumber(), 0)
-
- self.failIf(self.status.getBuilder('fastfail').getLastFinishedBuild())
-
- b = self.status.getBuilder('b3').getLastFinishedBuild()
- self.failUnless(b)
- self.failUnlessEqual(b.getNumber(), 0)
-
- b = self.status.getBuilder('b4').getLastFinishedBuild()
- self.failUnless(b)
- self.failUnlessEqual(b.getNumber(), 0)
-
- b = self.status.getBuilder('b4').getLastFinishedBuild()
- self.failUnless(b)
- self.failUnlessEqual(b.getNumber(), 0)
-
-
diff --git a/buildbot/buildbot-source/buildbot/test/test_locks.py b/buildbot/buildbot-source/buildbot/test/test_locks.py
deleted file mode 100644
index 2a3ec58d7..000000000
--- a/buildbot/buildbot-source/buildbot/test/test_locks.py
+++ /dev/null
@@ -1,165 +0,0 @@
-# -*- test-case-name: buildbot.test.test_locks -*-
-
-from twisted.trial import unittest
-from twisted.internet import defer
-
-from buildbot import interfaces
-from buildbot.process import step
-from buildbot.sourcestamp import SourceStamp
-from buildbot.process.base import BuildRequest
-from buildbot.test.runutils import RunMixin
-from buildbot.twcompat import maybeWait
-
-class LockStep(step.Dummy):
- def start(self):
- number = self.build.requests[0].number
- self.build.requests[0].events.append(("start", number))
- step.Dummy.start(self)
- def done(self):
- number = self.build.requests[0].number
- self.build.requests[0].events.append(("done", number))
- step.Dummy.done(self)
-
-config_1 = """
-from buildbot import locks
-from buildbot.process import step, factory
-s = factory.s
-from buildbot.test.test_locks import LockStep
-
-BuildmasterConfig = c = {}
-c['bots'] = [('bot1', 'sekrit'), ('bot2', 'sekrit')]
-c['sources'] = []
-c['schedulers'] = []
-c['slavePortnum'] = 0
-
-first_lock = locks.SlaveLock('first')
-second_lock = locks.MasterLock('second')
-f1 = factory.BuildFactory([s(LockStep, timeout=2, locks=[first_lock])])
-f2 = factory.BuildFactory([s(LockStep, timeout=3, locks=[second_lock])])
-f3 = factory.BuildFactory([s(LockStep, timeout=2, locks=[])])
-
-b1a = {'name': 'full1a', 'slavename': 'bot1', 'builddir': '1a', 'factory': f1}
-b1b = {'name': 'full1b', 'slavename': 'bot1', 'builddir': '1b', 'factory': f1}
-b1c = {'name': 'full1c', 'slavename': 'bot1', 'builddir': '1c', 'factory': f3,
- 'locks': [first_lock, second_lock]}
-b1d = {'name': 'full1d', 'slavename': 'bot1', 'builddir': '1d', 'factory': f2}
-b2a = {'name': 'full2a', 'slavename': 'bot2', 'builddir': '2a', 'factory': f1}
-b2b = {'name': 'full2b', 'slavename': 'bot2', 'builddir': '2b', 'factory': f3,
- 'locks': [second_lock]}
-c['builders'] = [b1a, b1b, b1c, b1d, b2a, b2b]
-"""
-
-config_1a = config_1 + \
-"""
-b1b = {'name': 'full1b', 'slavename': 'bot1', 'builddir': '1B', 'factory': f1}
-c['builders'] = [b1a, b1b, b1c, b1d, b2a, b2b]
-"""
-
-
-class Locks(RunMixin, unittest.TestCase):
- def setUp(self):
- RunMixin.setUp(self)
- self.req1 = req1 = BuildRequest("forced build", SourceStamp())
- req1.number = 1
- self.req2 = req2 = BuildRequest("forced build", SourceStamp())
- req2.number = 2
- self.req3 = req3 = BuildRequest("forced build", SourceStamp())
- req3.number = 3
- req1.events = req2.events = req3.events = self.events = []
- d = self.master.loadConfig(config_1)
- d.addCallback(lambda res: self.master.startService())
- d.addCallback(lambda res: self.connectSlaves(["bot1", "bot2"],
- ["full1a", "full1b",
- "full1c", "full1d",
- "full2a", "full2b"]))
- return maybeWait(d)
-
- def testLock1(self):
- self.control.getBuilder("full1a").requestBuild(self.req1)
- self.control.getBuilder("full1b").requestBuild(self.req2)
- d = defer.DeferredList([self.req1.waitUntilFinished(),
- self.req2.waitUntilFinished()])
- d.addCallback(self._testLock1_1)
- return maybeWait(d)
-
- def _testLock1_1(self, res):
- # full1a should complete its step before full1b starts it
- self.failUnlessEqual(self.events,
- [("start", 1), ("done", 1),
- ("start", 2), ("done", 2)])
-
- def testLock1a(self):
- # just like testLock1, but we reload the config file first, with a
- # change that causes full1b to be changed. This tickles a design bug
- # in which full1a and full1b wind up with distinct Lock instances.
- d = self.master.loadConfig(config_1a)
- d.addCallback(self._testLock1a_1)
- return maybeWait(d)
- def _testLock1a_1(self, res):
- self.control.getBuilder("full1a").requestBuild(self.req1)
- self.control.getBuilder("full1b").requestBuild(self.req2)
- d = defer.DeferredList([self.req1.waitUntilFinished(),
- self.req2.waitUntilFinished()])
- d.addCallback(self._testLock1a_2)
- return d
-
- def _testLock1a_2(self, res):
- # full1a should complete its step before full1b starts it
- self.failUnlessEqual(self.events,
- [("start", 1), ("done", 1),
- ("start", 2), ("done", 2)])
-
- def testLock2(self):
- # two builds run on separate slaves with slave-scoped locks should
- # not interfere
- self.control.getBuilder("full1a").requestBuild(self.req1)
- self.control.getBuilder("full2a").requestBuild(self.req2)
- d = defer.DeferredList([self.req1.waitUntilFinished(),
- self.req2.waitUntilFinished()])
- d.addCallback(self._testLock2_1)
- return maybeWait(d)
-
- def _testLock2_1(self, res):
- # full2a should start its step before full1a finishes it. They run on
- # different slaves, however, so they might start in either order.
- self.failUnless(self.events[:2] == [("start", 1), ("start", 2)] or
- self.events[:2] == [("start", 2), ("start", 1)])
-
- def testLock3(self):
- # two builds run on separate slaves with master-scoped locks should
- # not overlap
- self.control.getBuilder("full1c").requestBuild(self.req1)
- self.control.getBuilder("full2b").requestBuild(self.req2)
- d = defer.DeferredList([self.req1.waitUntilFinished(),
- self.req2.waitUntilFinished()])
- d.addCallback(self._testLock3_1)
- return maybeWait(d)
-
- def _testLock3_1(self, res):
- # full2b should not start until after full1c finishes. The builds run
- # on different slaves, so we can't really predict which will start
- # first. The important thing is that they don't overlap.
- self.failUnless(self.events == [("start", 1), ("done", 1),
- ("start", 2), ("done", 2)]
- or self.events == [("start", 2), ("done", 2),
- ("start", 1), ("done", 1)]
- )
-
- def testLock4(self):
- self.control.getBuilder("full1a").requestBuild(self.req1)
- self.control.getBuilder("full1c").requestBuild(self.req2)
- self.control.getBuilder("full1d").requestBuild(self.req3)
- d = defer.DeferredList([self.req1.waitUntilFinished(),
- self.req2.waitUntilFinished(),
- self.req3.waitUntilFinished()])
- d.addCallback(self._testLock4_1)
- return maybeWait(d)
-
- def _testLock4_1(self, res):
- # full1a starts, then full1d starts (because they do not interfere).
- # Once both are done, full1c can run.
- self.failUnlessEqual(self.events,
- [("start", 1), ("start", 3),
- ("done", 1), ("done", 3),
- ("start", 2), ("done", 2)])
-
diff --git a/buildbot/buildbot-source/buildbot/test/test_maildir.py b/buildbot/buildbot-source/buildbot/test/test_maildir.py
deleted file mode 100644
index 40819b9e6..000000000
--- a/buildbot/buildbot-source/buildbot/test/test_maildir.py
+++ /dev/null
@@ -1,79 +0,0 @@
-# -*- test-case-name: buildbot.test.test_maildir -*-
-
-from twisted.trial import unittest
-import os, shutil
-from buildbot.changes.mail import FCMaildirSource
-from twisted.internet import reactor
-from twisted.python import util
-
-class MaildirTest(unittest.TestCase):
- def setUp(self):
- print "creating empty maildir"
- self.maildir = "test-maildir"
- if os.path.isdir(self.maildir):
- shutil.rmtree(self.maildir)
- print "removing stale maildir"
- os.mkdir(self.maildir)
- os.mkdir(os.path.join(self.maildir, "cur"))
- os.mkdir(os.path.join(self.maildir, "new"))
- os.mkdir(os.path.join(self.maildir, "tmp"))
- self.source = None
- self.done = 0
-
- def tearDown(self):
- print "removing old maildir"
- shutil.rmtree(self.maildir)
- if self.source:
- self.source.stopService()
-
- def addChange(self, c):
- # NOTE: this assumes every message results in a Change, which isn't
- # true for msg8-prefix
- print "got change"
- self.changes.append(c)
-
- def deliverMail(self, msg):
- print "delivering", msg
- newdir = os.path.join(self.maildir, "new")
- # to do this right, use safecat
- shutil.copy(msg, newdir)
-
- def do_timeout(self):
- self.done = 1
-
- def testMaildir(self):
- self.changes = []
- s = self.source = FCMaildirSource(self.maildir)
- s.parent = self
- s.startService()
- testfiles_dir = util.sibpath(__file__, "mail")
- testfiles = [msg for msg in os.listdir(testfiles_dir)
- if msg.startswith("msg")]
- testfiles.sort()
- count = len(testfiles)
- for i in range(count):
- msg = testfiles[i]
- reactor.callLater(2*i, self.deliverMail,
- os.path.join(testfiles_dir, msg))
- t = reactor.callLater(2*i + 15, self.do_timeout)
- while not (self.done or len(self.changes) == count):
- reactor.iterate(0.1)
- s.stopService()
- if self.done:
- return self.fail("timeout: messages weren't received on time")
- t.cancel()
- # TODO: verify the messages, should use code from test_mailparse but
- # I'm not sure how to factor the verification routines out in a
- # useful fashion
- #for i in range(count):
- # msg, check = test_messages[i]
- # check(self, self.changes[i])
-
-
-if __name__ == '__main__':
- suite = unittest.TestSuite()
- suite.addTestClass(MaildirTest)
- import sys
- reporter = unittest.TextReporter(sys.stdout)
- suite.run(reporter)
-
diff --git a/buildbot/buildbot-source/buildbot/test/test_mailparse.py b/buildbot/buildbot-source/buildbot/test/test_mailparse.py
deleted file mode 100644
index 4bb660477..000000000
--- a/buildbot/buildbot-source/buildbot/test/test_mailparse.py
+++ /dev/null
@@ -1,248 +0,0 @@
-# -*- test-case-name: buildbot.test.test_mailparse -*-
-
-import os.path
-from twisted.trial import unittest
-from twisted.python import util
-from buildbot.changes.mail import parseFreshCVSMail, parseSyncmail
-
-class Test1(unittest.TestCase):
-
- def get(self, msg):
- msg = util.sibpath(__file__, msg)
- return parseFreshCVSMail(None, open(msg, "r"))
-
- def testMsg1(self):
- c = self.get("mail/msg1")
- self.assertEqual(c.who, "moshez")
- self.assertEqual(c.files, ["Twisted/debian/python-twisted.menu.in"])
- self.assertEqual(c.comments, "Instance massenger, apparently\n")
- self.assertEqual(c.isdir, 0)
-
- def testMsg2(self):
- c = self.get("mail/msg2")
- self.assertEqual(c.who, "itamarst")
- self.assertEqual(c.files, ["Twisted/twisted/web/woven/form.py",
- "Twisted/twisted/python/formmethod.py"])
- self.assertEqual(c.comments,
- "submit formmethod now subclass of Choice\n")
- self.assertEqual(c.isdir, 0)
-
- def testMsg3(self):
- # same as msg2 but missing the ViewCVS section
- c = self.get("mail/msg3")
- self.assertEqual(c.who, "itamarst")
- self.assertEqual(c.files, ["Twisted/twisted/web/woven/form.py",
- "Twisted/twisted/python/formmethod.py"])
- self.assertEqual(c.comments,
- "submit formmethod now subclass of Choice\n")
- self.assertEqual(c.isdir, 0)
-
- def testMsg4(self):
- # same as msg3 but also missing CVS patch section
- c = self.get("mail/msg4")
- self.assertEqual(c.who, "itamarst")
- self.assertEqual(c.files, ["Twisted/twisted/web/woven/form.py",
- "Twisted/twisted/python/formmethod.py"])
- self.assertEqual(c.comments,
- "submit formmethod now subclass of Choice\n")
- self.assertEqual(c.isdir, 0)
-
- def testMsg5(self):
- # creates a directory
- c = self.get("mail/msg5")
- self.assertEqual(c.who, "etrepum")
- self.assertEqual(c.files, ["Twisted/doc/examples/cocoaDemo"])
- self.assertEqual(c.comments,
- "Directory /cvs/Twisted/doc/examples/cocoaDemo added to the repository\n")
- self.assertEqual(c.isdir, 1)
-
- def testMsg6(self):
- # adds files
- c = self.get("mail/msg6")
- self.assertEqual(c.who, "etrepum")
- self.assertEqual(c.files, [
- "Twisted/doc/examples/cocoaDemo/MyAppDelegate.py",
- "Twisted/doc/examples/cocoaDemo/__main__.py",
- "Twisted/doc/examples/cocoaDemo/bin-python-main.m",
- "Twisted/doc/examples/cocoaDemo/English.lproj/InfoPlist.strings",
- "Twisted/doc/examples/cocoaDemo/English.lproj/MainMenu.nib/classes.nib",
- "Twisted/doc/examples/cocoaDemo/English.lproj/MainMenu.nib/info.nib",
- "Twisted/doc/examples/cocoaDemo/English.lproj/MainMenu.nib/keyedobjects.nib",
- "Twisted/doc/examples/cocoaDemo/cocoaDemo.pbproj/project.pbxproj"])
- self.assertEqual(c.comments,
- "Cocoa (OS X) clone of the QT demo, using polling reactor\n\nRequires pyobjc ( http://pyobjc.sourceforge.net ), it's not much different than the template project. The reactor is iterated periodically by a repeating NSTimer.\n")
- self.assertEqual(c.isdir, 0)
-
- def testMsg7(self):
- # deletes files
- c = self.get("mail/msg7")
- self.assertEqual(c.who, "etrepum")
- self.assertEqual(c.files, [
- "Twisted/doc/examples/cocoaDemo/MyAppDelegate.py",
- "Twisted/doc/examples/cocoaDemo/__main__.py",
- "Twisted/doc/examples/cocoaDemo/bin-python-main.m",
- "Twisted/doc/examples/cocoaDemo/English.lproj/InfoPlist.strings",
- "Twisted/doc/examples/cocoaDemo/English.lproj/MainMenu.nib/classes.nib",
- "Twisted/doc/examples/cocoaDemo/English.lproj/MainMenu.nib/info.nib",
- "Twisted/doc/examples/cocoaDemo/English.lproj/MainMenu.nib/keyedobjects.nib",
- "Twisted/doc/examples/cocoaDemo/cocoaDemo.pbproj/project.pbxproj"])
- self.assertEqual(c.comments,
- "Directories break debian build script, waiting for reasonable fix\n")
- self.assertEqual(c.isdir, 0)
-
- def testMsg8(self):
- # files outside Twisted/
- c = self.get("mail/msg8")
- self.assertEqual(c.who, "acapnotic")
- self.assertEqual(c.files, [ "CVSROOT/freshCfg" ])
- self.assertEqual(c.comments, "it doesn't work with invalid syntax\n")
- self.assertEqual(c.isdir, 0)
-
- def testMsg9(self):
- # also creates a directory
- c = self.get("mail/msg9")
- self.assertEqual(c.who, "exarkun")
- self.assertEqual(c.files, ["Twisted/sandbox/exarkun/persist-plugin"])
- self.assertEqual(c.comments,
- "Directory /cvs/Twisted/sandbox/exarkun/persist-plugin added to the repository\n")
- self.assertEqual(c.isdir, 1)
-
-
-class Test2(unittest.TestCase):
- def get(self, msg):
- msg = util.sibpath(__file__, msg)
- return parseFreshCVSMail(None, open(msg, "r"), prefix="Twisted")
-
- def testMsg1p(self):
- c = self.get("mail/msg1")
- self.assertEqual(c.who, "moshez")
- self.assertEqual(c.files, ["debian/python-twisted.menu.in"])
- self.assertEqual(c.comments, "Instance massenger, apparently\n")
-
- def testMsg2p(self):
- c = self.get("mail/msg2")
- self.assertEqual(c.who, "itamarst")
- self.assertEqual(c.files, ["twisted/web/woven/form.py",
- "twisted/python/formmethod.py"])
- self.assertEqual(c.comments,
- "submit formmethod now subclass of Choice\n")
-
- def testMsg3p(self):
- # same as msg2 but missing the ViewCVS section
- c = self.get("mail/msg3")
- self.assertEqual(c.who, "itamarst")
- self.assertEqual(c.files, ["twisted/web/woven/form.py",
- "twisted/python/formmethod.py"])
- self.assertEqual(c.comments,
- "submit formmethod now subclass of Choice\n")
-
- def testMsg4p(self):
- # same as msg3 but also missing CVS patch section
- c = self.get("mail/msg4")
- self.assertEqual(c.who, "itamarst")
- self.assertEqual(c.files, ["twisted/web/woven/form.py",
- "twisted/python/formmethod.py"])
- self.assertEqual(c.comments,
- "submit formmethod now subclass of Choice\n")
-
- def testMsg5p(self):
- # creates a directory
- c = self.get("mail/msg5")
- self.assertEqual(c.who, "etrepum")
- self.assertEqual(c.files, ["doc/examples/cocoaDemo"])
- self.assertEqual(c.comments,
- "Directory /cvs/Twisted/doc/examples/cocoaDemo added to the repository\n")
- self.assertEqual(c.isdir, 1)
-
- def testMsg6p(self):
- # adds files
- c = self.get("mail/msg6")
- self.assertEqual(c.who, "etrepum")
- self.assertEqual(c.files, [
- "doc/examples/cocoaDemo/MyAppDelegate.py",
- "doc/examples/cocoaDemo/__main__.py",
- "doc/examples/cocoaDemo/bin-python-main.m",
- "doc/examples/cocoaDemo/English.lproj/InfoPlist.strings",
- "doc/examples/cocoaDemo/English.lproj/MainMenu.nib/classes.nib",
- "doc/examples/cocoaDemo/English.lproj/MainMenu.nib/info.nib",
- "doc/examples/cocoaDemo/English.lproj/MainMenu.nib/keyedobjects.nib",
- "doc/examples/cocoaDemo/cocoaDemo.pbproj/project.pbxproj"])
- self.assertEqual(c.comments,
- "Cocoa (OS X) clone of the QT demo, using polling reactor\n\nRequires pyobjc ( http://pyobjc.sourceforge.net ), it's not much different than the template project. The reactor is iterated periodically by a repeating NSTimer.\n")
- self.assertEqual(c.isdir, 0)
-
- def testMsg7p(self):
- # deletes files
- c = self.get("mail/msg7")
- self.assertEqual(c.who, "etrepum")
- self.assertEqual(c.files, [
- "doc/examples/cocoaDemo/MyAppDelegate.py",
- "doc/examples/cocoaDemo/__main__.py",
- "doc/examples/cocoaDemo/bin-python-main.m",
- "doc/examples/cocoaDemo/English.lproj/InfoPlist.strings",
- "doc/examples/cocoaDemo/English.lproj/MainMenu.nib/classes.nib",
- "doc/examples/cocoaDemo/English.lproj/MainMenu.nib/info.nib",
- "doc/examples/cocoaDemo/English.lproj/MainMenu.nib/keyedobjects.nib",
- "doc/examples/cocoaDemo/cocoaDemo.pbproj/project.pbxproj"])
- self.assertEqual(c.comments,
- "Directories break debian build script, waiting for reasonable fix\n")
- self.assertEqual(c.isdir, 0)
-
- def testMsg8p(self):
- # files outside Twisted/
- c = self.get("mail/msg8")
- self.assertEqual(c, None)
-
-
-class Test3(unittest.TestCase):
- def get(self, msg):
- msg = util.sibpath(__file__, msg)
- return parseSyncmail(None, open(msg, "r"), prefix="buildbot")
-
- def getNoPrefix(self, msg):
- msg = util.sibpath(__file__, msg)
- return parseSyncmail(None, open(msg, "r"))
-
- def testMsgS1(self):
- c = self.get("mail/syncmail.1")
- self.failUnless(c is not None)
- self.assertEqual(c.who, "warner")
- self.assertEqual(c.files, ["buildbot/changes/freshcvsmail.py"])
- self.assertEqual(c.comments,
- "remove leftover code, leave a temporary compatibility import. Note! Start\nimporting FCMaildirSource from changes.mail instead of changes.freshcvsmail\n")
- self.assertEqual(c.isdir, 0)
-
- def testMsgS2(self):
- c = self.get("mail/syncmail.2")
- self.assertEqual(c.who, "warner")
- self.assertEqual(c.files, ["ChangeLog"])
- self.assertEqual(c.comments, "\t* NEWS: started adding new features\n")
- self.assertEqual(c.isdir, 0)
-
- def testMsgS3(self):
- c = self.get("mail/syncmail.3")
- self.failUnless(c == None)
-
- def testMsgS4(self):
- c = self.get("mail/syncmail.4")
- self.assertEqual(c.who, "warner")
- self.assertEqual(c.files, ["test/mail/syncmail.1",
- "test/mail/syncmail.2",
- "test/mail/syncmail.3"
- ])
- self.assertEqual(c.comments, "test cases for syncmail parser\n")
- self.assertEqual(c.isdir, 0)
- self.assertEqual(c.branch, None)
-
- # tests a tag
- def testMsgS5(self):
- c = self.getNoPrefix("mail/syncmail.5")
- self.failUnless(c)
- self.assertEqual(c.who, "thomas")
- self.assertEqual(c.files, ['test1/MANIFEST',
- 'test1/Makefile.am',
- 'test1/autogen.sh',
- 'test1/configure.in'
- ])
- self.assertEqual(c.branch, "BRANCH-DEVEL")
- self.assertEqual(c.isdir, 0)
diff --git a/buildbot/buildbot-source/buildbot/test/test_properties.py b/buildbot/buildbot-source/buildbot/test/test_properties.py
deleted file mode 100644
index 1c8560b03..000000000
--- a/buildbot/buildbot-source/buildbot/test/test_properties.py
+++ /dev/null
@@ -1,152 +0,0 @@
-# -*- test-case-name: buildbot.test.test_properties -*-
-
-import os
-
-from twisted.trial import unittest
-
-from buildbot.twcompat import maybeWait
-from buildbot.sourcestamp import SourceStamp
-from buildbot.process import base
-from buildbot.process.step import ShellCommand, WithProperties
-from buildbot.status import builder
-from buildbot.slave.commands import rmdirRecursive
-from buildbot.test.runutils import RunMixin
-
-class MyBuildStep(ShellCommand):
- def _interpolateProperties(self, command):
- command = ["tar", "czf",
- "build-%s.tar.gz" % self.getProperty("revision"),
- "source"]
- return ShellCommand._interpolateProperties(self, command)
-
-
-class FakeBuild:
- pass
-class FakeBuilder:
- statusbag = None
- name = "fakebuilder"
-class FakeSlave:
- slavename = "bot12"
-class FakeSlaveBuilder:
- slave = FakeSlave()
- def getSlaveCommandVersion(self, command, oldversion=None):
- return "1.10"
-
-class Interpolate(unittest.TestCase):
- def setUp(self):
- self.builder = FakeBuilder()
- self.builder_status = builder.BuilderStatus("fakebuilder")
- self.builder_status.basedir = "test_properties"
- self.builder_status.nextBuildNumber = 5
- rmdirRecursive(self.builder_status.basedir)
- os.mkdir(self.builder_status.basedir)
- self.build_status = self.builder_status.newBuild()
- req = base.BuildRequest("reason", SourceStamp(branch="branch2",
- revision=1234))
- self.build = base.Build([req])
- self.build.setBuilder(self.builder)
- self.build.setupStatus(self.build_status)
- self.build.setupSlaveBuilder(FakeSlaveBuilder())
-
- def testWithProperties(self):
- self.build.setProperty("revision", 47)
- self.failUnlessEqual(self.build_status.getProperty("revision"), 47)
- c = ShellCommand(workdir=dir, build=self.build,
- command=["tar", "czf",
- WithProperties("build-%s.tar.gz",
- "revision"),
- "source"])
- cmd = c._interpolateProperties(c.command)
- self.failUnlessEqual(cmd,
- ["tar", "czf", "build-47.tar.gz", "source"])
-
- def testWithPropertiesDict(self):
- self.build.setProperty("other", "foo")
- self.build.setProperty("missing", None)
- c = ShellCommand(workdir=dir, build=self.build,
- command=["tar", "czf",
- WithProperties("build-%(other)s.tar.gz"),
- "source"])
- cmd = c._interpolateProperties(c.command)
- self.failUnlessEqual(cmd,
- ["tar", "czf", "build-foo.tar.gz", "source"])
-
- def testWithPropertiesEmpty(self):
- self.build.setProperty("empty", None)
- c = ShellCommand(workdir=dir, build=self.build,
- command=["tar", "czf",
- WithProperties("build-%(empty)s.tar.gz"),
- "source"])
- cmd = c._interpolateProperties(c.command)
- self.failUnlessEqual(cmd,
- ["tar", "czf", "build-.tar.gz", "source"])
-
- def testCustomBuildStep(self):
- c = MyBuildStep(workdir=dir, build=self.build)
- cmd = c._interpolateProperties(c.command)
- self.failUnlessEqual(cmd,
- ["tar", "czf", "build-1234.tar.gz", "source"])
-
- def testSourceStamp(self):
- c = ShellCommand(workdir=dir, build=self.build,
- command=["touch",
- WithProperties("%s-dir", "branch"),
- WithProperties("%s-rev", "revision"),
- ])
- cmd = c._interpolateProperties(c.command)
- self.failUnlessEqual(cmd,
- ["touch", "branch2-dir", "1234-rev"])
-
- def testSlaveName(self):
- c = ShellCommand(workdir=dir, build=self.build,
- command=["touch",
- WithProperties("%s-slave", "slavename"),
- ])
- cmd = c._interpolateProperties(c.command)
- self.failUnlessEqual(cmd,
- ["touch", "bot12-slave"])
-
- def testBuildNumber(self):
- c = ShellCommand(workdir=dir, build=self.build,
- command=["touch",
- WithProperties("build-%d", "buildnumber"),
- WithProperties("builder-%s", "buildername"),
- ])
- cmd = c._interpolateProperties(c.command)
- self.failUnlessEqual(cmd,
- ["touch", "build-5", "builder-fakebuilder"])
-
-
-run_config = """
-from buildbot.process import step, factory
-from buildbot.process.step import ShellCommand, WithProperties
-s = factory.s
-
-BuildmasterConfig = c = {}
-c['bots'] = [('bot1', 'sekrit')]
-c['sources'] = []
-c['schedulers'] = []
-c['slavePortnum'] = 0
-
-f1 = factory.BuildFactory([s(step.ShellCommand,
- command=['touch',
- WithProperties('%s-slave', 'slavename'),
- ])])
-
-b1 = {'name': 'full1', 'slavename': 'bot1', 'builddir': 'bd1', 'factory': f1}
-c['builders'] = [b1]
-
-"""
-
-class Run(RunMixin, unittest.TestCase):
- def testInterpolate(self):
- # run an actual build with a step that interpolates a build property
- d = self.master.loadConfig(run_config)
- d.addCallback(lambda res: self.master.startService())
- d.addCallback(lambda res: self.connectOneSlave("bot1"))
- d.addCallback(lambda res: self.requestBuild("full1"))
- d.addCallback(self.failUnlessBuildSucceeded)
- return maybeWait(d)
-
-
-# we test got_revision in test_vc
diff --git a/buildbot/buildbot-source/buildbot/test/test_run.py b/buildbot/buildbot-source/buildbot/test/test_run.py
deleted file mode 100644
index dc1bcf99a..000000000
--- a/buildbot/buildbot-source/buildbot/test/test_run.py
+++ /dev/null
@@ -1,524 +0,0 @@
-# -*- test-case-name: buildbot.test.test_run -*-
-
-from twisted.trial import unittest
-from twisted.internet import reactor, defer
-from twisted.python import log
-import sys, os, os.path, shutil, time, errno
-#log.startLogging(sys.stderr)
-
-from buildbot import master, interfaces
-from buildbot.sourcestamp import SourceStamp
-from buildbot.slave import bot
-from buildbot.changes import changes
-from buildbot.status import builder
-from buildbot.process.base import BuildRequest
-from buildbot.twcompat import maybeWait
-
-from buildbot.test.runutils import RunMixin
-
-config_base = """
-from buildbot.process import factory, step
-s = factory.s
-
-f1 = factory.QuickBuildFactory('fakerep', 'cvsmodule', configure=None)
-
-f2 = factory.BuildFactory([
- s(step.Dummy, timeout=1),
- s(step.RemoteDummy, timeout=2),
- ])
-
-BuildmasterConfig = c = {}
-c['bots'] = [['bot1', 'sekrit']]
-c['sources'] = []
-c['schedulers'] = []
-c['builders'] = []
-c['builders'].append({'name':'quick', 'slavename':'bot1',
- 'builddir': 'quickdir', 'factory': f1})
-c['slavePortnum'] = 0
-"""
-
-config_run = config_base + """
-from buildbot.scheduler import Scheduler
-c['schedulers'] = [Scheduler('quick', None, 120, ['quick'])]
-"""
-
-config_2 = config_base + """
-c['builders'] = [{'name': 'dummy', 'slavename': 'bot1',
- 'builddir': 'dummy1', 'factory': f2},
- {'name': 'testdummy', 'slavename': 'bot1',
- 'builddir': 'dummy2', 'factory': f2, 'category': 'test'}]
-"""
-
-config_3 = config_2 + """
-c['builders'].append({'name': 'adummy', 'slavename': 'bot1',
- 'builddir': 'adummy3', 'factory': f2})
-c['builders'].append({'name': 'bdummy', 'slavename': 'bot1',
- 'builddir': 'adummy4', 'factory': f2,
- 'category': 'test'})
-"""
-
-config_4 = config_base + """
-c['builders'] = [{'name': 'dummy', 'slavename': 'bot1',
- 'builddir': 'dummy', 'factory': f2}]
-"""
-
-config_4_newbasedir = config_4 + """
-c['builders'] = [{'name': 'dummy', 'slavename': 'bot1',
- 'builddir': 'dummy2', 'factory': f2}]
-"""
-
-config_4_newbuilder = config_4_newbasedir + """
-c['builders'].append({'name': 'dummy2', 'slavename': 'bot1',
- 'builddir': 'dummy23', 'factory': f2})
-"""
-
-class Run(unittest.TestCase):
- def rmtree(self, d):
- try:
- shutil.rmtree(d, ignore_errors=1)
- except OSError, e:
- # stupid 2.2 appears to ignore ignore_errors
- if e.errno != errno.ENOENT:
- raise
-
- def testMaster(self):
- self.rmtree("basedir")
- os.mkdir("basedir")
- m = master.BuildMaster("basedir")
- m.loadConfig(config_run)
- m.readConfig = True
- m.startService()
- cm = m.change_svc
- c = changes.Change("bob", ["Makefile", "foo/bar.c"], "changed stuff")
- cm.addChange(c)
- # verify that the Scheduler is now waiting
- s = m.allSchedulers()[0]
- self.failUnless(s.timer)
- # halting the service will also stop the timer
- d = defer.maybeDeferred(m.stopService)
- return maybeWait(d)
-
-class Ping(RunMixin, unittest.TestCase):
- def testPing(self):
- self.master.loadConfig(config_2)
- self.master.readConfig = True
- self.master.startService()
-
- d = self.connectSlave()
- d.addCallback(self._testPing_1)
- return maybeWait(d)
-
- def _testPing_1(self, res):
- d = interfaces.IControl(self.master).getBuilder("dummy").ping(1)
- d.addCallback(self._testPing_2)
- return d
-
- def _testPing_2(self, res):
- pass
-
-class BuilderNames(unittest.TestCase):
-
- def testGetBuilderNames(self):
- os.mkdir("bnames")
- m = master.BuildMaster("bnames")
- s = m.getStatus()
-
- m.loadConfig(config_3)
- m.readConfig = True
-
- self.failUnlessEqual(s.getBuilderNames(),
- ["dummy", "testdummy", "adummy", "bdummy"])
- self.failUnlessEqual(s.getBuilderNames(categories=['test']),
- ["testdummy", "bdummy"])
-
-class Disconnect(RunMixin, unittest.TestCase):
-
- def setUp(self):
- RunMixin.setUp(self)
-
- # verify that disconnecting the slave during a build properly
- # terminates the build
- m = self.master
- s = self.status
- c = self.control
-
- m.loadConfig(config_2)
- m.readConfig = True
- m.startService()
-
- self.failUnlessEqual(s.getBuilderNames(), ["dummy", "testdummy"])
- self.s1 = s1 = s.getBuilder("dummy")
- self.failUnlessEqual(s1.getName(), "dummy")
- self.failUnlessEqual(s1.getState(), ("offline", []))
- self.failUnlessEqual(s1.getCurrentBuilds(), [])
- self.failUnlessEqual(s1.getLastFinishedBuild(), None)
- self.failUnlessEqual(s1.getBuild(-1), None)
-
- d = self.connectSlave()
- d.addCallback(self._disconnectSetup_1)
- return maybeWait(d)
-
- def _disconnectSetup_1(self, res):
- self.failUnlessEqual(self.s1.getState(), ("idle", []))
-
-
- def verifyDisconnect(self, bs):
- self.failUnless(bs.isFinished())
-
- step1 = bs.getSteps()[0]
- self.failUnlessEqual(step1.getText(), ["delay", "interrupted"])
- self.failUnlessEqual(step1.getResults()[0], builder.FAILURE)
-
- self.failUnlessEqual(bs.getResults(), builder.FAILURE)
-
- def verifyDisconnect2(self, bs):
- self.failUnless(bs.isFinished())
-
- step1 = bs.getSteps()[1]
- self.failUnlessEqual(step1.getText(), ["remote", "delay", "2 secs",
- "failed", "slave", "lost"])
- self.failUnlessEqual(step1.getResults()[0], builder.FAILURE)
-
- self.failUnlessEqual(bs.getResults(), builder.FAILURE)
-
-
- def testIdle1(self):
- # disconnect the slave before the build starts
- d = self.shutdownAllSlaves() # dies before it gets started
- d.addCallback(self._testIdle1_1)
- return d
- def _testIdle1_1(self, res):
- # trying to force a build now will cause an error. Regular builds
- # just wait for the slave to re-appear, but forced builds that
- # cannot be run right away trigger NoSlaveErrors
- fb = self.control.getBuilder("dummy").forceBuild
- self.failUnlessRaises(interfaces.NoSlaveError,
- fb, None, "forced build")
-
- def testIdle2(self):
- # now suppose the slave goes missing
- self.slaves['bot1'].bf.continueTrying = 0
- self.disappearSlave()
-
- # forcing a build will work: the build detect that the slave is no
- # longer available and will be re-queued. Wait 5 seconds, then check
- # to make sure the build is still in the 'waiting for a slave' queue.
- self.control.getBuilder("dummy").original.START_BUILD_TIMEOUT = 1
- req = BuildRequest("forced build", SourceStamp())
- self.failUnlessEqual(req.startCount, 0)
- self.control.getBuilder("dummy").requestBuild(req)
- # this should ping the slave, which doesn't respond, and then give up
- # after a second. The BuildRequest will be re-queued, and its
- # .startCount will be incremented.
- d = defer.Deferred()
- d.addCallback(self._testIdle2_1, req)
- reactor.callLater(3, d.callback, None)
- return maybeWait(d, 5)
- testIdle2.timeout = 5
-
- def _testIdle2_1(self, res, req):
- self.failUnlessEqual(req.startCount, 1)
- cancelled = req.cancel()
- self.failUnless(cancelled)
-
-
- def testBuild1(self):
- # this next sequence is timing-dependent. The dummy build takes at
- # least 3 seconds to complete, and this batch of commands must
- # complete within that time.
- #
- d = self.control.getBuilder("dummy").forceBuild(None, "forced build")
- d.addCallback(self._testBuild1_1)
- return maybeWait(d)
-
- def _testBuild1_1(self, bc):
- bs = bc.getStatus()
- # now kill the slave before it gets to start the first step
- d = self.shutdownAllSlaves() # dies before it gets started
- d.addCallback(self._testBuild1_2, bs)
- return d # TODO: this used to have a 5-second timeout
-
- def _testBuild1_2(self, res, bs):
- # now examine the just-stopped build and make sure it is really
- # stopped. This is checking for bugs in which the slave-detach gets
- # missed or causes an exception which prevents the build from being
- # marked as "finished due to an error".
- d = bs.waitUntilFinished()
- d2 = self.master.botmaster.waitUntilBuilderDetached("dummy")
- dl = defer.DeferredList([d, d2])
- dl.addCallback(self._testBuild1_3, bs)
- return dl # TODO: this had a 5-second timeout too
-
- def _testBuild1_3(self, res, bs):
- self.failUnlessEqual(self.s1.getState()[0], "offline")
- self.verifyDisconnect(bs)
-
-
- def testBuild2(self):
- # this next sequence is timing-dependent
- d = self.control.getBuilder("dummy").forceBuild(None, "forced build")
- d.addCallback(self._testBuild1_1)
- return maybeWait(d, 30)
- testBuild2.timeout = 30
-
- def _testBuild1_1(self, bc):
- bs = bc.getStatus()
- # shutdown the slave while it's running the first step
- reactor.callLater(0.5, self.shutdownAllSlaves)
-
- d = bs.waitUntilFinished()
- d.addCallback(self._testBuild2_2, bs)
- return d
-
- def _testBuild2_2(self, res, bs):
- # we hit here when the build has finished. The builder is still being
- # torn down, however, so spin for another second to allow the
- # callLater(0) in Builder.detached to fire.
- d = defer.Deferred()
- reactor.callLater(1, d.callback, None)
- d.addCallback(self._testBuild2_3, bs)
- return d
-
- def _testBuild2_3(self, res, bs):
- self.failUnlessEqual(self.s1.getState()[0], "offline")
- self.verifyDisconnect(bs)
-
-
- def testBuild3(self):
- # this next sequence is timing-dependent
- d = self.control.getBuilder("dummy").forceBuild(None, "forced build")
- d.addCallback(self._testBuild3_1)
- return maybeWait(d, 30)
- testBuild3.timeout = 30
-
- def _testBuild3_1(self, bc):
- bs = bc.getStatus()
- # kill the slave while it's running the first step
- reactor.callLater(0.5, self.killSlave)
- d = bs.waitUntilFinished()
- d.addCallback(self._testBuild3_2, bs)
- return d
-
- def _testBuild3_2(self, res, bs):
- # the builder is still being torn down, so give it another second
- d = defer.Deferred()
- reactor.callLater(1, d.callback, None)
- d.addCallback(self._testBuild3_3, bs)
- return d
-
- def _testBuild3_3(self, res, bs):
- self.failUnlessEqual(self.s1.getState()[0], "offline")
- self.verifyDisconnect(bs)
-
-
- def testBuild4(self):
- # this next sequence is timing-dependent
- d = self.control.getBuilder("dummy").forceBuild(None, "forced build")
- d.addCallback(self._testBuild4_1)
- return maybeWait(d, 30)
- testBuild4.timeout = 30
-
- def _testBuild4_1(self, bc):
- bs = bc.getStatus()
- # kill the slave while it's running the second (remote) step
- reactor.callLater(1.5, self.killSlave)
- d = bs.waitUntilFinished()
- d.addCallback(self._testBuild4_2, bs)
- return d
-
- def _testBuild4_2(self, res, bs):
- # at this point, the slave is in the process of being removed, so it
- # could either be 'idle' or 'offline'. I think there is a
- # reactor.callLater(0) standing between here and the offline state.
- #reactor.iterate() # TODO: remove the need for this
-
- self.failUnlessEqual(self.s1.getState()[0], "offline")
- self.verifyDisconnect2(bs)
-
-
- def testInterrupt(self):
- # this next sequence is timing-dependent
- d = self.control.getBuilder("dummy").forceBuild(None, "forced build")
- d.addCallback(self._testInterrupt_1)
- return maybeWait(d, 30)
- testInterrupt.timeout = 30
-
- def _testInterrupt_1(self, bc):
- bs = bc.getStatus()
- # halt the build while it's running the first step
- reactor.callLater(0.5, bc.stopBuild, "bang go splat")
- d = bs.waitUntilFinished()
- d.addCallback(self._testInterrupt_2, bs)
- return d
-
- def _testInterrupt_2(self, res, bs):
- self.verifyDisconnect(bs)
-
-
- def testDisappear(self):
- bc = self.control.getBuilder("dummy")
-
- # ping should succeed
- d = bc.ping(1)
- d.addCallback(self._testDisappear_1, bc)
- return maybeWait(d)
-
- def _testDisappear_1(self, res, bc):
- self.failUnlessEqual(res, True)
-
- # now, before any build is run, make the slave disappear
- self.slaves['bot1'].bf.continueTrying = 0
- self.disappearSlave()
-
- # at this point, a ping to the slave should timeout
- d = bc.ping(1)
- d.addCallback(self. _testDisappear_2)
- return d
- def _testDisappear_2(self, res):
- self.failUnlessEqual(res, False)
-
- def testDuplicate(self):
- bc = self.control.getBuilder("dummy")
- bs = self.status.getBuilder("dummy")
- ss = bs.getSlaves()[0]
-
- self.failUnless(ss.isConnected())
- self.failUnlessEqual(ss.getAdmin(), "one")
-
- # now, before any build is run, make the first slave disappear
- self.slaves['bot1'].bf.continueTrying = 0
- self.disappearSlave()
-
- d = self.master.botmaster.waitUntilBuilderDetached("dummy")
- # now let the new slave take over
- self.connectSlave2()
- d.addCallback(self._testDuplicate_1, ss)
- return maybeWait(d, 2)
- testDuplicate.timeout = 5
-
- def _testDuplicate_1(self, res, ss):
- d = self.master.botmaster.waitUntilBuilderAttached("dummy")
- d.addCallback(self._testDuplicate_2, ss)
- return d
-
- def _testDuplicate_2(self, res, ss):
- self.failUnless(ss.isConnected())
- self.failUnlessEqual(ss.getAdmin(), "two")
-
-
-class Disconnect2(RunMixin, unittest.TestCase):
-
- def setUp(self):
- RunMixin.setUp(self)
- # verify that disconnecting the slave during a build properly
- # terminates the build
- m = self.master
- s = self.status
- c = self.control
-
- m.loadConfig(config_2)
- m.readConfig = True
- m.startService()
-
- self.failUnlessEqual(s.getBuilderNames(), ["dummy", "testdummy"])
- self.s1 = s1 = s.getBuilder("dummy")
- self.failUnlessEqual(s1.getName(), "dummy")
- self.failUnlessEqual(s1.getState(), ("offline", []))
- self.failUnlessEqual(s1.getCurrentBuilds(), [])
- self.failUnlessEqual(s1.getLastFinishedBuild(), None)
- self.failUnlessEqual(s1.getBuild(-1), None)
-
- d = self.connectSlaveFastTimeout()
- d.addCallback(self._setup_disconnect2_1)
- return maybeWait(d)
-
- def _setup_disconnect2_1(self, res):
- self.failUnlessEqual(self.s1.getState(), ("idle", []))
-
-
- def testSlaveTimeout(self):
- # now suppose the slave goes missing. We want to find out when it
- # creates a new Broker, so we reach inside and mark it with the
- # well-known sigil of impending messy death.
- bd = self.slaves['bot1'].getServiceNamed("bot").builders["dummy"]
- broker = bd.remote.broker
- broker.redshirt = 1
-
- # make sure the keepalives will keep the connection up
- d = defer.Deferred()
- reactor.callLater(5, d.callback, None)
- d.addCallback(self._testSlaveTimeout_1)
- return maybeWait(d, 20)
- testSlaveTimeout.timeout = 20
-
- def _testSlaveTimeout_1(self, res):
- bd = self.slaves['bot1'].getServiceNamed("bot").builders["dummy"]
- if not bd.remote or not hasattr(bd.remote.broker, "redshirt"):
- self.fail("slave disconnected when it shouldn't have")
-
- d = self.master.botmaster.waitUntilBuilderDetached("dummy")
- # whoops! how careless of me.
- self.disappearSlave()
- # the slave will realize the connection is lost within 2 seconds, and
- # reconnect.
- d.addCallback(self._testSlaveTimeout_2)
- return d
-
- def _testSlaveTimeout_2(self, res):
- # the ReconnectingPBClientFactory will attempt a reconnect in two
- # seconds.
- d = self.master.botmaster.waitUntilBuilderAttached("dummy")
- d.addCallback(self._testSlaveTimeout_3)
- return d
-
- def _testSlaveTimeout_3(self, res):
- # make sure it is a new connection (i.e. a new Broker)
- bd = self.slaves['bot1'].getServiceNamed("bot").builders["dummy"]
- self.failUnless(bd.remote, "hey, slave isn't really connected")
- self.failIf(hasattr(bd.remote.broker, "redshirt"),
- "hey, slave's Broker is still marked for death")
-
-
-class Basedir(RunMixin, unittest.TestCase):
- def testChangeBuilddir(self):
- m = self.master
- m.loadConfig(config_4)
- m.readConfig = True
- m.startService()
-
- d = self.connectSlave()
- d.addCallback(self._testChangeBuilddir_1)
- return maybeWait(d)
-
- def _testChangeBuilddir_1(self, res):
- self.bot = bot = self.slaves['bot1'].bot
- self.builder = builder = bot.builders.get("dummy")
- self.failUnless(builder)
- self.failUnlessEqual(builder.builddir, "dummy")
- self.failUnlessEqual(builder.basedir,
- os.path.join("slavebase-bot1", "dummy"))
-
- d = self.master.loadConfig(config_4_newbasedir)
- d.addCallback(self._testChangeBuilddir_2)
- return d
-
- def _testChangeBuilddir_2(self, res):
- bot = self.bot
- # this causes the builder to be replaced
- self.failIfIdentical(self.builder, bot.builders.get("dummy"))
- builder = bot.builders.get("dummy")
- self.failUnless(builder)
- # the basedir should be updated
- self.failUnlessEqual(builder.builddir, "dummy2")
- self.failUnlessEqual(builder.basedir,
- os.path.join("slavebase-bot1", "dummy2"))
-
- # add a new builder, which causes the basedir list to be reloaded
- d = self.master.loadConfig(config_4_newbuilder)
- return d
-
-# TODO: test everything, from Change submission to Scheduler to Build to
-# Status. Use all the status types. Specifically I want to catch recurrences
-# of the bug where I forgot to make Waterfall inherit from StatusReceiver
-# such that buildSetSubmitted failed.
-
diff --git a/buildbot/buildbot-source/buildbot/test/test_runner.py b/buildbot/buildbot-source/buildbot/test/test_runner.py
deleted file mode 100644
index f82e33fb5..000000000
--- a/buildbot/buildbot-source/buildbot/test/test_runner.py
+++ /dev/null
@@ -1,299 +0,0 @@
-
-# this file tests the 'buildbot' command, with its various sub-commands
-
-from twisted.trial import unittest
-from twisted.python import runtime, usage
-import os, os.path, shutil, shlex
-
-from buildbot.scripts import runner, tryclient
-
-class Options(unittest.TestCase):
- optionsFile = "SDFsfsFSdfsfsFSD"
-
- def make(self, d, key):
- # we use a wacky filename here in case the test code discovers the
- # user's real ~/.buildbot/ directory
- os.makedirs(os.sep.join(d + [".buildbot"]))
- f = open(os.sep.join(d + [".buildbot", self.optionsFile]), "w")
- f.write("key = '%s'\n" % key)
- f.close()
-
- def check(self, d, key):
- basedir = os.sep.join(d)
- options = runner.loadOptions(self.optionsFile, here=basedir,
- home=self.home)
- if key is None:
- self.failIf(options.has_key('key'))
- else:
- self.failUnlessEqual(options['key'], key)
-
- def testFindOptions(self):
- self.make(["home", "dir1", "dir2", "dir3"], "one")
- self.make(["home", "dir1", "dir2"], "two")
- self.make(["home"], "home")
- self.home = os.path.abspath("home")
-
- self.check(["home", "dir1", "dir2", "dir3"], "one")
- self.check(["home", "dir1", "dir2"], "two")
- self.check(["home", "dir1"], "home")
-
- self.home = os.path.abspath("nothome")
- os.makedirs(os.sep.join(["nothome", "dir1"]))
- self.check(["nothome", "dir1"], None)
-
- def doForce(self, args, expected):
- o = runner.ForceOptions()
- o.parseOptions(args)
- self.failUnlessEqual(o.keys(), expected.keys())
- for k in o.keys():
- self.failUnlessEqual(o[k], expected[k],
- "[%s] got %s instead of %s" % (k, o[k],
- expected[k]))
-
- def testForceOptions(self):
- if not hasattr(shlex, "split"):
- raise unittest.SkipTest("need python>=2.3 for shlex.split")
-
- exp = {"builder": "b1", "reason": "reason",
- "branch": None, "revision": None}
- self.doForce(shlex.split("b1 reason"), exp)
- self.doForce(shlex.split("b1 'reason'"), exp)
- self.failUnlessRaises(usage.UsageError, self.doForce,
- shlex.split("--builder b1 'reason'"), exp)
- self.doForce(shlex.split("--builder b1 --reason reason"), exp)
- self.doForce(shlex.split("--builder b1 --reason 'reason'"), exp)
- self.doForce(shlex.split("--builder b1 --reason \"reason\""), exp)
-
- exp['reason'] = "longer reason"
- self.doForce(shlex.split("b1 'longer reason'"), exp)
- self.doForce(shlex.split("b1 longer reason"), exp)
- self.doForce(shlex.split("--reason 'longer reason' b1"), exp)
-
-
-class Create(unittest.TestCase):
- def failUnlessIn(self, substring, string, msg=None):
- # trial provides a version of this that requires python-2.3 to test
- # strings.
- self.failUnless(string.find(substring) != -1, msg)
- def failUnlessExists(self, filename):
- self.failUnless(os.path.exists(filename), "%s should exist" % filename)
- def failIfExists(self, filename):
- self.failIf(os.path.exists(filename), "%s should not exist" % filename)
-
- def testMaster(self):
- basedir = "test_runner.master"
- options = runner.MasterOptions()
- options.parseOptions(["-q", basedir])
- cwd = os.getcwd()
- runner.createMaster(options)
- os.chdir(cwd)
-
- tac = os.path.join(basedir, "buildbot.tac")
- self.failUnless(os.path.exists(tac))
- tacfile = open(tac,"rt").read()
- self.failUnlessIn("basedir", tacfile)
- self.failUnlessIn("configfile = r'master.cfg'", tacfile)
- self.failUnlessIn("BuildMaster(basedir, configfile)", tacfile)
-
- cfg = os.path.join(basedir, "master.cfg")
- self.failIfExists(cfg)
- samplecfg = os.path.join(basedir, "master.cfg.sample")
- self.failUnlessExists(samplecfg)
- cfgfile = open(samplecfg,"rt").read()
- self.failUnlessIn("This is a sample buildmaster config file", cfgfile)
-
- makefile = os.path.join(basedir, "Makefile.sample")
- self.failUnlessExists(makefile)
-
- # now verify that running it a second time (with the same options)
- # does the right thing: nothing changes
- runner.createMaster(options)
- os.chdir(cwd)
-
- self.failIfExists(os.path.join(basedir, "buildbot.tac.new"))
- self.failUnlessExists(os.path.join(basedir, "master.cfg.sample"))
-
- oldtac = open(os.path.join(basedir, "buildbot.tac"), "rt").read()
-
- # mutate Makefile.sample, since it should be rewritten
- f = open(os.path.join(basedir, "Makefile.sample"), "rt")
- oldmake = f.read()
- f = open(os.path.join(basedir, "Makefile.sample"), "wt")
- f.write(oldmake)
- f.write("# additional line added\n")
- f.close()
-
- # also mutate master.cfg.sample
- f = open(os.path.join(basedir, "master.cfg.sample"), "rt")
- oldsamplecfg = f.read()
- f = open(os.path.join(basedir, "master.cfg.sample"), "wt")
- f.write(oldsamplecfg)
- f.write("# additional line added\n")
- f.close()
-
- # now run it again (with different options)
- options = runner.MasterOptions()
- options.parseOptions(["-q", "--config", "other.cfg", basedir])
- runner.createMaster(options)
- os.chdir(cwd)
-
- tac = open(os.path.join(basedir, "buildbot.tac"), "rt").read()
- self.failUnlessEqual(tac, oldtac, "shouldn't change existing .tac")
- self.failUnlessExists(os.path.join(basedir, "buildbot.tac.new"))
-
- make = open(os.path.join(basedir, "Makefile.sample"), "rt").read()
- self.failUnlessEqual(make, oldmake, "*should* rewrite Makefile.sample")
-
- samplecfg = open(os.path.join(basedir, "master.cfg.sample"),
- "rt").read()
- self.failUnlessEqual(samplecfg, oldsamplecfg,
- "*should* rewrite master.cfg.sample")
-
-
- def testSlave(self):
- basedir = "test_runner.slave"
- options = runner.SlaveOptions()
- options.parseOptions(["-q", basedir, "buildmaster:1234",
- "botname", "passwd"])
- cwd = os.getcwd()
- runner.createSlave(options)
- os.chdir(cwd)
-
- tac = os.path.join(basedir, "buildbot.tac")
- self.failUnless(os.path.exists(tac))
- tacfile = open(tac,"rt").read()
- self.failUnlessIn("basedir", tacfile)
- self.failUnlessIn("host = 'buildmaster'", tacfile)
- self.failUnlessIn("port = 1234", tacfile)
- self.failUnlessIn("slavename = 'botname'", tacfile)
- self.failUnlessIn("passwd = 'passwd'", tacfile)
- self.failUnlessIn("keepalive = 600", tacfile)
- self.failUnlessIn("BuildSlave(host, port, slavename", tacfile)
-
- makefile = os.path.join(basedir, "Makefile.sample")
- self.failUnlessExists(makefile)
-
- self.failUnlessExists(os.path.join(basedir, "info", "admin"))
- self.failUnlessExists(os.path.join(basedir, "info", "host"))
- # edit one to make sure the later install doesn't change it
- f = open(os.path.join(basedir, "info", "admin"), "wt")
- f.write("updated@buildbot.example.org\n")
- f.close()
-
- # now verify that running it a second time (with the same options)
- # does the right thing: nothing changes
- runner.createSlave(options)
- os.chdir(cwd)
-
- self.failIfExists(os.path.join(basedir, "buildbot.tac.new"))
- admin = open(os.path.join(basedir, "info", "admin"), "rt").read()
- self.failUnlessEqual(admin, "updated@buildbot.example.org\n")
-
-
- # mutate Makefile.sample, since it should be rewritten
- oldmake = open(os.path.join(basedir, "Makefile.sample"), "rt").read()
- f = open(os.path.join(basedir, "Makefile.sample"), "wt")
- f.write(oldmake)
- f.write("# additional line added\n")
- f.close()
- oldtac = open(os.path.join(basedir, "buildbot.tac"), "rt").read()
-
- # now run it again (with different options)
- options = runner.SlaveOptions()
- options.parseOptions(["-q", "--keepalive", "30",
- basedir, "buildmaster:9999",
- "newbotname", "passwd"])
- runner.createSlave(options)
- os.chdir(cwd)
-
- tac = open(os.path.join(basedir, "buildbot.tac"), "rt").read()
- self.failUnlessEqual(tac, oldtac, "shouldn't change existing .tac")
- self.failUnlessExists(os.path.join(basedir, "buildbot.tac.new"))
- tacfile = open(os.path.join(basedir, "buildbot.tac.new"),"rt").read()
- self.failUnlessIn("basedir", tacfile)
- self.failUnlessIn("host = 'buildmaster'", tacfile)
- self.failUnlessIn("port = 9999", tacfile)
- self.failUnlessIn("slavename = 'newbotname'", tacfile)
- self.failUnlessIn("passwd = 'passwd'", tacfile)
- self.failUnlessIn("keepalive = 30", tacfile)
- self.failUnlessIn("BuildSlave(host, port, slavename", tacfile)
-
- make = open(os.path.join(basedir, "Makefile.sample"), "rt").read()
- self.failUnlessEqual(make, oldmake, "*should* rewrite Makefile.sample")
-
-class Try(unittest.TestCase):
- # test some aspects of the 'buildbot try' command
- def makeOptions(self, contents):
- if os.path.exists(".buildbot"):
- shutil.rmtree(".buildbot")
- os.mkdir(".buildbot")
- open(os.path.join(".buildbot", "options"), "w").write(contents)
-
- def testGetopt1(self):
- opts = "try_connect = 'ssh'\n" + "try_builders = ['a']\n"
- self.makeOptions(opts)
- config = runner.TryOptions()
- config.parseOptions([])
- t = tryclient.Try(config)
- self.failUnlessEqual(t.connect, "ssh")
- self.failUnlessEqual(t.builderNames, ['a'])
-
- def testGetopt2(self):
- opts = ""
- self.makeOptions(opts)
- config = runner.TryOptions()
- config.parseOptions(['--connect=ssh', '--builder', 'a'])
- t = tryclient.Try(config)
- self.failUnlessEqual(t.connect, "ssh")
- self.failUnlessEqual(t.builderNames, ['a'])
-
- def testGetopt3(self):
- opts = ""
- self.makeOptions(opts)
- config = runner.TryOptions()
- config.parseOptions(['--connect=ssh',
- '--builder', 'a', '--builder=b'])
- t = tryclient.Try(config)
- self.failUnlessEqual(t.connect, "ssh")
- self.failUnlessEqual(t.builderNames, ['a', 'b'])
-
- def testGetopt4(self):
- opts = "try_connect = 'ssh'\n" + "try_builders = ['a']\n"
- self.makeOptions(opts)
- config = runner.TryOptions()
- config.parseOptions(['--builder=b'])
- t = tryclient.Try(config)
- self.failUnlessEqual(t.connect, "ssh")
- self.failUnlessEqual(t.builderNames, ['b'])
-
- def testGetTopdir(self):
- os.mkdir("gettopdir")
- os.mkdir(os.path.join("gettopdir", "foo"))
- os.mkdir(os.path.join("gettopdir", "foo", "bar"))
- open(os.path.join("gettopdir", "1"),"w").write("1")
- open(os.path.join("gettopdir", "foo", "2"),"w").write("2")
- open(os.path.join("gettopdir", "foo", "bar", "3"),"w").write("3")
-
- target = os.path.abspath("gettopdir")
- t = tryclient.getTopdir("1", "gettopdir")
- self.failUnlessEqual(os.path.abspath(t), target)
- t = tryclient.getTopdir("1", os.path.join("gettopdir", "foo"))
- self.failUnlessEqual(os.path.abspath(t), target)
- t = tryclient.getTopdir("1", os.path.join("gettopdir", "foo", "bar"))
- self.failUnlessEqual(os.path.abspath(t), target)
-
- target = os.path.abspath(os.path.join("gettopdir", "foo"))
- t = tryclient.getTopdir("2", os.path.join("gettopdir", "foo"))
- self.failUnlessEqual(os.path.abspath(t), target)
- t = tryclient.getTopdir("2", os.path.join("gettopdir", "foo", "bar"))
- self.failUnlessEqual(os.path.abspath(t), target)
-
- target = os.path.abspath(os.path.join("gettopdir", "foo", "bar"))
- t = tryclient.getTopdir("3", os.path.join("gettopdir", "foo", "bar"))
- self.failUnlessEqual(os.path.abspath(t), target)
-
- nonexistent = "nonexistent\n29fis3kq\tBAR"
- # hopefully there won't be a real file with that name between here
- # and the filesystem root.
- self.failUnlessRaises(ValueError, tryclient.getTopdir, nonexistent)
-
diff --git a/buildbot/buildbot-source/buildbot/test/test_scheduler.py b/buildbot/buildbot-source/buildbot/test/test_scheduler.py
deleted file mode 100644
index d423f6c86..000000000
--- a/buildbot/buildbot-source/buildbot/test/test_scheduler.py
+++ /dev/null
@@ -1,313 +0,0 @@
-# -*- test-case-name: buildbot.test.test_scheduler -*-
-
-import os, time
-
-from twisted.trial import unittest
-from twisted.internet import defer, reactor
-from twisted.application import service
-from twisted.spread import pb
-
-from buildbot import scheduler, sourcestamp, buildset, status
-from buildbot.twcompat import maybeWait
-from buildbot.changes.changes import Change
-from buildbot.scripts import tryclient
-
-
-class FakeMaster(service.MultiService):
- d = None
- def submitBuildSet(self, bs):
- self.sets.append(bs)
- if self.d:
- reactor.callLater(0, self.d.callback, bs)
- self.d = None
- return pb.Referenceable() # makes the cleanup work correctly
-
-class Scheduling(unittest.TestCase):
- def setUp(self):
- self.master = master = FakeMaster()
- master.sets = []
- master.startService()
-
- def tearDown(self):
- d = self.master.stopService()
- return maybeWait(d)
-
- def addScheduler(self, s):
- s.setServiceParent(self.master)
-
- def testPeriodic1(self):
- self.addScheduler(scheduler.Periodic("quickly", ["a","b"], 2))
- d = defer.Deferred()
- reactor.callLater(5, d.callback, None)
- d.addCallback(self._testPeriodic1_1)
- return maybeWait(d)
- def _testPeriodic1_1(self, res):
- self.failUnless(len(self.master.sets) > 1)
- s1 = self.master.sets[0]
- self.failUnlessEqual(s1.builderNames, ["a","b"])
-
- def testNightly(self):
- # now == 15-Nov-2005, 00:05:36 AM . By using mktime, this is
- # converted into the local timezone, which happens to match what
- # Nightly is going to do anyway.
- MIN=60; HOUR=60*MIN; DAY=24*3600
- now = time.mktime((2005, 11, 15, 0, 5, 36, 1, 319, 0))
-
- s = scheduler.Nightly('nightly', ["a"], hour=3)
- t = s.calculateNextRunTimeFrom(now)
- self.failUnlessEqual(int(t-now), 2*HOUR+54*MIN+24)
-
- s = scheduler.Nightly('nightly', ["a"], minute=[3,8,54])
- t = s.calculateNextRunTimeFrom(now)
- self.failUnlessEqual(int(t-now), 2*MIN+24)
-
- s = scheduler.Nightly('nightly', ["a"],
- dayOfMonth=16, hour=1, minute=6)
- t = s.calculateNextRunTimeFrom(now)
- self.failUnlessEqual(int(t-now), DAY+HOUR+24)
-
- s = scheduler.Nightly('nightly', ["a"],
- dayOfMonth=16, hour=1, minute=3)
- t = s.calculateNextRunTimeFrom(now)
- self.failUnlessEqual(int(t-now), DAY+57*MIN+24)
-
- s = scheduler.Nightly('nightly', ["a"],
- dayOfMonth=15, hour=1, minute=3)
- t = s.calculateNextRunTimeFrom(now)
- self.failUnlessEqual(int(t-now), 57*MIN+24)
-
- s = scheduler.Nightly('nightly', ["a"],
- dayOfMonth=15, hour=0, minute=3)
- t = s.calculateNextRunTimeFrom(now)
- self.failUnlessEqual(int(t-now), 30*DAY-3*MIN+24)
-
-
- def isImportant(self, change):
- if "important" in change.files:
- return True
- return False
-
- def testBranch(self):
- s = scheduler.Scheduler("b1", "branch1", 2, ["a","b"],
- fileIsImportant=self.isImportant)
- self.addScheduler(s)
-
- c0 = Change("carol", ["important"], "other branch", branch="other")
- s.addChange(c0)
- self.failIf(s.timer)
- self.failIf(s.importantChanges)
-
- c1 = Change("alice", ["important", "not important"], "some changes",
- branch="branch1")
- s.addChange(c1)
- c2 = Change("bob", ["not important", "boring"], "some more changes",
- branch="branch1")
- s.addChange(c2)
- c3 = Change("carol", ["important", "dull"], "even more changes",
- branch="branch1")
- s.addChange(c3)
-
- self.failUnlessEqual(s.importantChanges, [c1,c3])
- self.failUnlessEqual(s.unimportantChanges, [c2])
- self.failUnless(s.timer)
-
- d = defer.Deferred()
- reactor.callLater(4, d.callback, None)
- d.addCallback(self._testBranch_1)
- return maybeWait(d)
- def _testBranch_1(self, res):
- self.failUnlessEqual(len(self.master.sets), 1)
- s = self.master.sets[0].source
- self.failUnlessEqual(s.branch, "branch1")
- self.failUnlessEqual(s.revision, None)
- self.failUnlessEqual(len(s.changes), 3)
- self.failUnlessEqual(s.patch, None)
-
-
- def testAnyBranch(self):
- s = scheduler.AnyBranchScheduler("b1", None, 1, ["a","b"],
- fileIsImportant=self.isImportant)
- self.addScheduler(s)
-
- c1 = Change("alice", ["important", "not important"], "some changes",
- branch="branch1")
- s.addChange(c1)
- c2 = Change("bob", ["not important", "boring"], "some more changes",
- branch="branch1")
- s.addChange(c2)
- c3 = Change("carol", ["important", "dull"], "even more changes",
- branch="branch1")
- s.addChange(c3)
-
- c4 = Change("carol", ["important"], "other branch", branch="branch2")
- s.addChange(c4)
-
- c5 = Change("carol", ["important"], "default branch", branch=None)
- s.addChange(c5)
-
- d = defer.Deferred()
- reactor.callLater(2, d.callback, None)
- d.addCallback(self._testAnyBranch_1)
- return maybeWait(d)
- def _testAnyBranch_1(self, res):
- self.failUnlessEqual(len(self.master.sets), 3)
- self.master.sets.sort(lambda a,b: cmp(a.source.branch,
- b.source.branch))
-
- s1 = self.master.sets[0].source
- self.failUnlessEqual(s1.branch, None)
- self.failUnlessEqual(s1.revision, None)
- self.failUnlessEqual(len(s1.changes), 1)
- self.failUnlessEqual(s1.patch, None)
-
- s2 = self.master.sets[1].source
- self.failUnlessEqual(s2.branch, "branch1")
- self.failUnlessEqual(s2.revision, None)
- self.failUnlessEqual(len(s2.changes), 3)
- self.failUnlessEqual(s2.patch, None)
-
- s3 = self.master.sets[2].source
- self.failUnlessEqual(s3.branch, "branch2")
- self.failUnlessEqual(s3.revision, None)
- self.failUnlessEqual(len(s3.changes), 1)
- self.failUnlessEqual(s3.patch, None)
-
- def testAnyBranch2(self):
- # like testAnyBranch but without fileIsImportant
- s = scheduler.AnyBranchScheduler("b1", None, 2, ["a","b"])
- self.addScheduler(s)
- c1 = Change("alice", ["important", "not important"], "some changes",
- branch="branch1")
- s.addChange(c1)
- c2 = Change("bob", ["not important", "boring"], "some more changes",
- branch="branch1")
- s.addChange(c2)
- c3 = Change("carol", ["important", "dull"], "even more changes",
- branch="branch1")
- s.addChange(c3)
-
- c4 = Change("carol", ["important"], "other branch", branch="branch2")
- s.addChange(c4)
-
- d = defer.Deferred()
- reactor.callLater(2, d.callback, None)
- d.addCallback(self._testAnyBranch2_1)
- return maybeWait(d)
- def _testAnyBranch2_1(self, res):
- self.failUnlessEqual(len(self.master.sets), 2)
- self.master.sets.sort(lambda a,b: cmp(a.source.branch,
- b.source.branch))
- s1 = self.master.sets[0].source
- self.failUnlessEqual(s1.branch, "branch1")
- self.failUnlessEqual(s1.revision, None)
- self.failUnlessEqual(len(s1.changes), 3)
- self.failUnlessEqual(s1.patch, None)
-
- s2 = self.master.sets[1].source
- self.failUnlessEqual(s2.branch, "branch2")
- self.failUnlessEqual(s2.revision, None)
- self.failUnlessEqual(len(s2.changes), 1)
- self.failUnlessEqual(s2.patch, None)
-
-
- def createMaildir(self, jobdir):
- os.mkdir(jobdir)
- os.mkdir(os.path.join(jobdir, "new"))
- os.mkdir(os.path.join(jobdir, "cur"))
- os.mkdir(os.path.join(jobdir, "tmp"))
-
- jobcounter = 1
- def pushJob(self, jobdir, job):
- while 1:
- filename = "job_%d" % self.jobcounter
- self.jobcounter += 1
- if os.path.exists(os.path.join(jobdir, "new", filename)):
- continue
- if os.path.exists(os.path.join(jobdir, "tmp", filename)):
- continue
- if os.path.exists(os.path.join(jobdir, "cur", filename)):
- continue
- break
- f = open(os.path.join(jobdir, "tmp", filename), "w")
- f.write(job)
- f.close()
- os.rename(os.path.join(jobdir, "tmp", filename),
- os.path.join(jobdir, "new", filename))
-
- def testTryJobdir(self):
- self.master.basedir = "try_jobdir"
- os.mkdir(self.master.basedir)
- jobdir = "jobdir1"
- jobdir_abs = os.path.join(self.master.basedir, jobdir)
- self.createMaildir(jobdir_abs)
- s = scheduler.Try_Jobdir("try1", ["a", "b"], jobdir)
- self.addScheduler(s)
- self.failIf(self.master.sets)
- job1 = tryclient.createJobfile("buildsetID",
- "branch1", "123", 1, "diff",
- ["a", "b"])
- self.master.d = d = defer.Deferred()
- self.pushJob(jobdir_abs, job1)
- d.addCallback(self._testTryJobdir_1)
- # N.B.: if we don't have DNotify, we poll every 10 seconds, so don't
- # set a .timeout here shorter than that. TODO: make it possible to
- # set the polling interval, so we can make it shorter.
- return maybeWait(d, 5)
-
- def _testTryJobdir_1(self, bs):
- self.failUnlessEqual(bs.builderNames, ["a", "b"])
- self.failUnlessEqual(bs.source.branch, "branch1")
- self.failUnlessEqual(bs.source.revision, "123")
- self.failUnlessEqual(bs.source.patch, (1, "diff"))
-
-
- def testTryUserpass(self):
- up = [("alice","pw1"), ("bob","pw2")]
- s = scheduler.Try_Userpass("try2", ["a", "b"], 0, userpass=up)
- self.addScheduler(s)
- port = s.getPort()
- config = {'connect': 'pb',
- 'username': 'alice',
- 'passwd': 'pw1',
- 'master': "localhost:%d" % port,
- 'builders': ["a", "b"],
- }
- t = tryclient.Try(config)
- ss = sourcestamp.SourceStamp("branch1", "123", (1, "diff"))
- t.sourcestamp = ss
- d2 = self.master.d = defer.Deferred()
- d = t.deliverJob()
- d.addCallback(self._testTryUserpass_1, t, d2)
- return maybeWait(d, 5)
- testTryUserpass.timeout = 5
- def _testTryUserpass_1(self, res, t, d2):
- # at this point, the Try object should have a RemoteReference to the
- # status object. The FakeMaster returns a stub.
- self.failUnless(t.buildsetStatus)
- d2.addCallback(self._testTryUserpass_2, t)
- return d2
- def _testTryUserpass_2(self, bs, t):
- # this should be the BuildSet submitted by the TryScheduler
- self.failUnlessEqual(bs.builderNames, ["a", "b"])
- self.failUnlessEqual(bs.source.branch, "branch1")
- self.failUnlessEqual(bs.source.revision, "123")
- self.failUnlessEqual(bs.source.patch, (1, "diff"))
-
- t.cleanup()
-
- # twisted-2.0.1 (but not later versions) seems to require a reactor
- # iteration before stopListening actually works. TODO: investigate
- # this.
- d = defer.Deferred()
- reactor.callLater(0, d.callback, None)
- return d
-
- def testGetBuildSets(self):
- # validate IStatus.getBuildSets
- s = status.builder.Status(None, ".")
- bs1 = buildset.BuildSet(["a","b"], sourcestamp.SourceStamp(),
- reason="one", bsid="1")
- s.buildsetSubmitted(bs1.status)
- self.failUnlessEqual(s.getBuildSets(), [bs1.status])
- bs1.status.notifyFinishedWatchers()
- self.failUnlessEqual(s.getBuildSets(), [])
diff --git a/buildbot/buildbot-source/buildbot/test/test_slavecommand.py b/buildbot/buildbot-source/buildbot/test/test_slavecommand.py
deleted file mode 100644
index dd791983e..000000000
--- a/buildbot/buildbot-source/buildbot/test/test_slavecommand.py
+++ /dev/null
@@ -1,265 +0,0 @@
-# -*- test-case-name: buildbot.test.test_slavecommand -*-
-
-from twisted.trial import unittest
-from twisted.internet import reactor, interfaces
-from twisted.python import util, runtime, failure
-from buildbot.twcompat import maybeWait
-
-noisy = False
-if noisy:
- from twisted.python.log import startLogging
- import sys
- startLogging(sys.stdout)
-
-import os, re, sys
-import signal
-
-from buildbot.slave import commands
-SlaveShellCommand = commands.SlaveShellCommand
-
-# test slavecommand.py by running the various commands with a fake
-# SlaveBuilder object that logs the calls to sendUpdate()
-
-def findDir():
- # the same directory that holds this script
- return util.sibpath(__file__, ".")
-
-class FakeSlaveBuilder:
- def __init__(self, usePTY):
- self.updates = []
- self.basedir = findDir()
- self.usePTY = usePTY
-
- def sendUpdate(self, data):
- if noisy: print "FakeSlaveBuilder.sendUpdate", data
- self.updates.append(data)
-
-
-class SignalMixin:
- sigchldHandler = None
-
- def setUpClass(self):
- # make sure SIGCHLD handler is installed, as it should be on
- # reactor.run(). problem is reactor may not have been run when this
- # test runs.
- if hasattr(reactor, "_handleSigchld") and hasattr(signal, "SIGCHLD"):
- self.sigchldHandler = signal.signal(signal.SIGCHLD,
- reactor._handleSigchld)
-
- def tearDownClass(self):
- if self.sigchldHandler:
- signal.signal(signal.SIGCHLD, self.sigchldHandler)
-
-
-class ShellBase(SignalMixin):
-
- def setUp(self):
- self.builder = FakeSlaveBuilder(self.usePTY)
-
- def failUnlessIn(self, substring, string):
- self.failUnless(string.find(substring) != -1)
-
- def getfile(self, which):
- got = ""
- for r in self.builder.updates:
- if r.has_key(which):
- got += r[which]
- return got
-
- def checkOutput(self, expected):
- """
- @type expected: list of (streamname, contents) tuples
- @param expected: the expected output
- """
- expected_linesep = os.linesep
- if self.usePTY:
- # PTYs change the line ending. I'm not sure why.
- expected_linesep = "\r\n"
- expected = [(stream, contents.replace("\n", expected_linesep, 1000))
- for (stream, contents) in expected]
- if self.usePTY:
- # PTYs merge stdout+stderr into a single stream
- expected = [('stdout', contents)
- for (stream, contents) in expected]
- # now merge everything into one string per stream
- streams = {}
- for (stream, contents) in expected:
- streams[stream] = streams.get(stream, "") + contents
- for (stream, contents) in streams.items():
- got = self.getfile(stream)
- self.assertEquals(got, contents)
-
- def getrc(self):
- self.failUnless(self.builder.updates[-1].has_key('rc'))
- got = self.builder.updates[-1]['rc']
- return got
- def checkrc(self, expected):
- got = self.getrc()
- self.assertEquals(got, expected)
-
- def testShell1(self):
- cmd = sys.executable + " emit.py 0"
- args = {'command': cmd, 'workdir': '.', 'timeout': 60}
- c = SlaveShellCommand(self.builder, None, args)
- d = c.start()
- expected = [('stdout', "this is stdout\n"),
- ('stderr', "this is stderr\n")]
- d.addCallback(self._checkPass, expected, 0)
- return maybeWait(d)
-
- def _checkPass(self, res, expected, rc):
- self.checkOutput(expected)
- self.checkrc(rc)
-
- def testShell2(self):
- cmd = [sys.executable, "emit.py", "0"]
- args = {'command': cmd, 'workdir': '.', 'timeout': 60}
- c = SlaveShellCommand(self.builder, None, args)
- d = c.start()
- expected = [('stdout', "this is stdout\n"),
- ('stderr', "this is stderr\n")]
- d.addCallback(self._checkPass, expected, 0)
- return maybeWait(d)
-
- def testShellRC(self):
- cmd = [sys.executable, "emit.py", "1"]
- args = {'command': cmd, 'workdir': '.', 'timeout': 60}
- c = SlaveShellCommand(self.builder, None, args)
- d = c.start()
- expected = [('stdout', "this is stdout\n"),
- ('stderr', "this is stderr\n")]
- d.addCallback(self._checkPass, expected, 1)
- return maybeWait(d)
-
- def testShellEnv(self):
- cmd = sys.executable + " emit.py 0"
- args = {'command': cmd, 'workdir': '.',
- 'env': {'EMIT_TEST': "envtest"}, 'timeout': 60}
- c = SlaveShellCommand(self.builder, None, args)
- d = c.start()
- expected = [('stdout', "this is stdout\n"),
- ('stderr', "this is stderr\n"),
- ('stdout', "EMIT_TEST: envtest\n"),
- ]
- d.addCallback(self._checkPass, expected, 0)
- return maybeWait(d)
-
- def testShellSubdir(self):
- cmd = sys.executable + " emit.py 0"
- args = {'command': cmd, 'workdir': "subdir", 'timeout': 60}
- c = SlaveShellCommand(self.builder, None, args)
- d = c.start()
- expected = [('stdout', "this is stdout in subdir\n"),
- ('stderr', "this is stderr\n")]
- d.addCallback(self._checkPass, expected, 0)
- return maybeWait(d)
-
- def testShellMissingCommand(self):
- args = {'command': "/bin/EndWorldHungerAndMakePigsFly",
- 'workdir': '.', 'timeout': 10,
- 'env': {"LC_ALL": "C"},
- }
- c = SlaveShellCommand(self.builder, None, args)
- d = c.start()
- d.addCallback(self._testShellMissingCommand_1)
- return maybeWait(d)
- def _testShellMissingCommand_1(self, res):
- self.failIfEqual(self.getrc(), 0)
- # we used to check the error message to make sure it said something
- # about a missing command, but there are a variety of shells out
- # there, and they emit message sin a variety of languages, so we
- # stopped trying.
-
- def testTimeout(self):
- args = {'command': [sys.executable, "sleep.py", "10"],
- 'workdir': '.', 'timeout': 2}
- c = SlaveShellCommand(self.builder, None, args)
- d = c.start()
- d.addCallback(self._testTimeout_1)
- return maybeWait(d)
- def _testTimeout_1(self, res):
- self.failIfEqual(self.getrc(), 0)
- got = self.getfile('header')
- self.failUnlessIn("command timed out: 2 seconds without output", got)
- if runtime.platformType == "posix":
- # the "killing pid" message is not present in windows
- self.failUnlessIn("killing pid", got)
- # but the process *ought* to be killed somehow
- self.failUnlessIn("process killed by signal", got)
- #print got
- if runtime.platformType != 'posix':
- testTimeout.todo = "timeout doesn't appear to work under windows"
-
- def testInterrupt1(self):
- args = {'command': [sys.executable, "sleep.py", "10"],
- 'workdir': '.', 'timeout': 20}
- c = SlaveShellCommand(self.builder, None, args)
- d = c.start()
- reactor.callLater(1, c.interrupt)
- d.addCallback(self._testInterrupt1_1)
- return maybeWait(d)
- def _testInterrupt1_1(self, res):
- self.failIfEqual(self.getrc(), 0)
- got = self.getfile('header')
- self.failUnlessIn("command interrupted", got)
- if runtime.platformType == "posix":
- self.failUnlessIn("process killed by signal", got)
- if runtime.platformType != 'posix':
- testInterrupt1.todo = "interrupt doesn't appear to work under windows"
-
-
- # todo: twisted-specific command tests
-
-class Shell(ShellBase, unittest.TestCase):
- usePTY = False
-
- def testInterrupt2(self):
- # test the backup timeout. This doesn't work under a PTY, because the
- # transport.loseConnection we do in the timeout handler actually
- # *does* kill the process.
- args = {'command': [sys.executable, "sleep.py", "5"],
- 'workdir': '.', 'timeout': 20}
- c = SlaveShellCommand(self.builder, None, args)
- d = c.start()
- c.command.BACKUP_TIMEOUT = 1
- # make it unable to kill the child, by changing the signal it uses
- # from SIGKILL to the do-nothing signal 0.
- c.command.KILL = None
- reactor.callLater(1, c.interrupt)
- d.addBoth(self._testInterrupt2_1)
- return maybeWait(d)
- def _testInterrupt2_1(self, res):
- # the slave should raise a TimeoutError exception. In a normal build
- # process (i.e. one that uses step.RemoteShellCommand), this
- # exception will be handed to the Step, which will acquire an ERROR
- # status. In our test environment, it isn't such a big deal.
- self.failUnless(isinstance(res, failure.Failure),
- "res is not a Failure: %s" % (res,))
- self.failUnless(res.check(commands.TimeoutError))
- self.checkrc(-1)
- return
- # the command is still actually running. Start another command, to
- # make sure that a) the old command's output doesn't interfere with
- # the new one, and b) the old command's actual termination doesn't
- # break anything
- args = {'command': [sys.executable, "sleep.py", "5"],
- 'workdir': '.', 'timeout': 20}
- c = SlaveShellCommand(self.builder, None, args)
- d = c.start()
- d.addCallback(self._testInterrupt2_2)
- return d
- def _testInterrupt2_2(self, res):
- self.checkrc(0)
- # N.B.: under windows, the trial process hangs out for another few
- # seconds. I assume that the win32eventreactor is waiting for one of
- # the lingering child processes to really finish.
-
-haveProcess = interfaces.IReactorProcess(reactor, None)
-if runtime.platformType == 'posix':
- # test with PTYs also
- class ShellPTY(ShellBase, unittest.TestCase):
- usePTY = True
- if not haveProcess:
- ShellPTY.skip = "this reactor doesn't support IReactorProcess"
-if not haveProcess:
- Shell.skip = "this reactor doesn't support IReactorProcess"
diff --git a/buildbot/buildbot-source/buildbot/test/test_slaves.py b/buildbot/buildbot-source/buildbot/test/test_slaves.py
deleted file mode 100644
index 588e08f0b..000000000
--- a/buildbot/buildbot-source/buildbot/test/test_slaves.py
+++ /dev/null
@@ -1,228 +0,0 @@
-# -*- test-case-name: buildbot.test.test_slaves -*-
-
-from twisted.trial import unittest
-from buildbot.twcompat import maybeWait
-from twisted.internet import defer, reactor
-
-from buildbot.test.runutils import RunMixin
-from buildbot.sourcestamp import SourceStamp
-from buildbot.process.base import BuildRequest
-from buildbot.status.builder import SUCCESS
-
-config_1 = """
-from buildbot.process import step, factory
-s = factory.s
-
-BuildmasterConfig = c = {}
-c['bots'] = [('bot1', 'sekrit'), ('bot2', 'sekrit'), ('bot3', 'sekrit')]
-c['sources'] = []
-c['schedulers'] = []
-c['slavePortnum'] = 0
-c['schedulers'] = []
-
-f = factory.BuildFactory([s(step.RemoteDummy, timeout=1)])
-
-c['builders'] = [
- {'name': 'b1', 'slavenames': ['bot1','bot2','bot3'],
- 'builddir': 'b1', 'factory': f},
- ]
-"""
-
-class Slave(RunMixin, unittest.TestCase):
-
- def setUp(self):
- RunMixin.setUp(self)
- self.master.loadConfig(config_1)
- self.master.startService()
- d = self.connectSlave(["b1"])
- d.addCallback(lambda res: self.connectSlave(["b1"], "bot2"))
- return maybeWait(d)
-
- def doBuild(self, buildername):
- br = BuildRequest("forced", SourceStamp())
- d = br.waitUntilFinished()
- self.control.getBuilder(buildername).requestBuild(br)
- return d
-
- def testSequence(self):
- # make sure both slaves appear in the list.
- attached_slaves = [c for c in self.master.botmaster.slaves.values()
- if c.slave]
- self.failUnlessEqual(len(attached_slaves), 2)
- b = self.master.botmaster.builders["b1"]
- self.failUnlessEqual(len(b.slaves), 2)
-
- # since the current scheduling algorithm is simple and does not
- # rotate or attempt any sort of load-balancing, two builds in
- # sequence should both use the first slave. This may change later if
- # we move to a more sophisticated scheme.
-
- d = self.doBuild("b1")
- d.addCallback(self._testSequence_1)
- return maybeWait(d)
- def _testSequence_1(self, res):
- self.failUnlessEqual(res.getResults(), SUCCESS)
- self.failUnlessEqual(res.getSlavename(), "bot1")
-
- d = self.doBuild("b1")
- d.addCallback(self._testSequence_2)
- return d
- def _testSequence_2(self, res):
- self.failUnlessEqual(res.getSlavename(), "bot1")
-
-
- def testSimultaneous(self):
- # make sure we can actually run two builds at the same time
- d1 = self.doBuild("b1")
- d2 = self.doBuild("b1")
- d1.addCallback(self._testSimultaneous_1, d2)
- return maybeWait(d1)
- def _testSimultaneous_1(self, res, d2):
- self.failUnlessEqual(res.getResults(), SUCCESS)
- self.failUnlessEqual(res.getSlavename(), "bot1")
- d2.addCallback(self._testSimultaneous_2)
- return d2
- def _testSimultaneous_2(self, res):
- self.failUnlessEqual(res.getResults(), SUCCESS)
- self.failUnlessEqual(res.getSlavename(), "bot2")
-
- def testFallback1(self):
- # detach the first slave, verify that a build is run using the second
- # slave instead
- d = self.shutdownSlave("bot1", "b1")
- d.addCallback(self._testFallback1_1)
- return maybeWait(d)
- def _testFallback1_1(self, res):
- attached_slaves = [c for c in self.master.botmaster.slaves.values()
- if c.slave]
- self.failUnlessEqual(len(attached_slaves), 1)
- self.failUnlessEqual(len(self.master.botmaster.builders["b1"].slaves),
- 1)
- d = self.doBuild("b1")
- d.addCallback(self._testFallback1_2)
- return d
- def _testFallback1_2(self, res):
- self.failUnlessEqual(res.getResults(), SUCCESS)
- self.failUnlessEqual(res.getSlavename(), "bot2")
-
- def testFallback2(self):
- # Disable the first slave, so that a slaveping will timeout. Then
- # start a build, and verify that the non-failing (second) one is
- # claimed for the build, and that the failing one is removed from the
- # list.
-
- # reduce the ping time so we'll failover faster
- self.master.botmaster.builders["b1"].START_BUILD_TIMEOUT = 1
- self.disappearSlave("bot1", "b1")
- d = self.doBuild("b1")
- d.addCallback(self._testFallback2_1)
- return maybeWait(d)
- def _testFallback2_1(self, res):
- self.failUnlessEqual(res.getResults(), SUCCESS)
- self.failUnlessEqual(res.getSlavename(), "bot2")
- b1slaves = self.master.botmaster.builders["b1"].slaves
- self.failUnlessEqual(len(b1slaves), 1)
- self.failUnlessEqual(b1slaves[0].slave.slavename, "bot2")
-
-
- def notFinished(self, brs):
- # utility method
- builds = brs.getBuilds()
- self.failIf(len(builds) > 1)
- if builds:
- self.failIf(builds[0].isFinished())
-
- def testDontClaimPingingSlave(self):
- # have two slaves connect for the same builder. Do something to the
- # first one so that slavepings are delayed (but do not fail
- # outright).
- timers = []
- self.slaves['bot1'].debugOpts["stallPings"] = (10, timers)
- br = BuildRequest("forced", SourceStamp())
- d1 = br.waitUntilFinished()
- self.control.getBuilder("b1").requestBuild(br)
- s1 = br.status # this is a BuildRequestStatus
- # give it a chance to start pinging
- d2 = defer.Deferred()
- d2.addCallback(self._testDontClaimPingingSlave_1, d1, s1, timers)
- reactor.callLater(1, d2.callback, None)
- return maybeWait(d2)
- def _testDontClaimPingingSlave_1(self, res, d1, s1, timers):
- # now the first build is running (waiting on the ping), so start the
- # second build. This should claim the second slave, not the first,
- # because the first is busy doing the ping.
- self.notFinished(s1)
- d3 = self.doBuild("b1")
- d3.addCallback(self._testDontClaimPingingSlave_2, d1, s1, timers)
- return d3
- def _testDontClaimPingingSlave_2(self, res, d1, s1, timers):
- self.failUnlessEqual(res.getSlavename(), "bot2")
- self.notFinished(s1)
- # now let the ping complete
- self.failUnlessEqual(len(timers), 1)
- timers[0].reset(0)
- d1.addCallback(self._testDontClaimPingingSlave_3)
- return d1
- def _testDontClaimPingingSlave_3(self, res):
- self.failUnlessEqual(res.getSlavename(), "bot1")
-
-
-class Slave2(RunMixin, unittest.TestCase):
-
- revision = 0
-
- def setUp(self):
- RunMixin.setUp(self)
- self.master.loadConfig(config_1)
- self.master.startService()
-
- def doBuild(self, buildername, reason="forced"):
- # we need to prevent these builds from being merged, so we create
- # each of them with a different revision specifier. The revision is
- # ignored because our build process does not have a source checkout
- # step.
- self.revision += 1
- br = BuildRequest(reason, SourceStamp(revision=self.revision))
- d = br.waitUntilFinished()
- self.control.getBuilder(buildername).requestBuild(br)
- return d
-
- def testFirstComeFirstServed(self):
- # submit three builds, then connect a slave which fails the
- # slaveping. The first build will claim the slave, do the slaveping,
- # give up, and re-queue the build. Verify that the build gets
- # re-queued in front of all other builds. This may be tricky, because
- # the other builds may attempt to claim the just-failed slave.
-
- d1 = self.doBuild("b1", "first")
- d2 = self.doBuild("b1", "second")
- #buildable = self.master.botmaster.builders["b1"].buildable
- #print [b.reason for b in buildable]
-
- # specifically, I want the poor build to get precedence over any
- # others that were waiting. To test this, we need more builds than
- # slaves.
-
- # now connect a broken slave. The first build started as soon as it
- # connects, so by the time we get to our _1 method, the ill-fated
- # build has already started.
- d = self.connectSlave(["b1"], opts={"failPingOnce": True})
- d.addCallback(self._testFirstComeFirstServed_1, d1, d2)
- return maybeWait(d)
- def _testFirstComeFirstServed_1(self, res, d1, d2):
- # the master has send the slaveping. When this is received, it will
- # fail, causing the master to hang up on the slave. When it
- # reconnects, it should find the first build at the front of the
- # queue. If we simply wait for both builds to complete, then look at
- # the status logs, we should see that the builds ran in the correct
- # order.
-
- d = defer.DeferredList([d1,d2])
- d.addCallback(self._testFirstComeFirstServed_2)
- return d
- def _testFirstComeFirstServed_2(self, res):
- b = self.status.getBuilder("b1")
- builds = b.getBuild(0), b.getBuild(1)
- reasons = [build.getReason() for build in builds]
- self.failUnlessEqual(reasons, ["first", "second"])
-
diff --git a/buildbot/buildbot-source/buildbot/test/test_status.py b/buildbot/buildbot-source/buildbot/test/test_status.py
deleted file mode 100644
index d8c0eb0da..000000000
--- a/buildbot/buildbot-source/buildbot/test/test_status.py
+++ /dev/null
@@ -1,949 +0,0 @@
-# -*- test-case-name: buildbot.test.test_status -*-
-
-import email, os
-
-from twisted.internet import defer, reactor
-from twisted.trial import unittest
-
-from buildbot import interfaces
-from buildbot.sourcestamp import SourceStamp
-from buildbot.process.base import BuildRequest
-from buildbot.twcompat import implements, providedBy, maybeWait
-from buildbot.status import builder, base
-try:
- from buildbot.status import mail
-except ImportError:
- mail = None
-from buildbot.status import progress, client # NEEDS COVERAGE
-from buildbot.test.runutils import RunMixin
-
-class MyStep:
- build = None
- def getName(self):
- return "step"
-
-class MyLogFileProducer(builder.LogFileProducer):
- # The reactor.callLater(0) in LogFileProducer.resumeProducing is a bit of
- # a nuisance from a testing point of view. This subclass adds a Deferred
- # to that call so we can find out when it is complete.
- def resumeProducing(self):
- d = defer.Deferred()
- reactor.callLater(0, self._resumeProducing, d)
- return d
- def _resumeProducing(self, d):
- builder.LogFileProducer._resumeProducing(self)
- reactor.callLater(0, d.callback, None)
-
-class MyLog(builder.LogFile):
- def __init__(self, basedir, name, text=None, step=None):
- self.fakeBuilderBasedir = basedir
- if not step:
- step = MyStep()
- builder.LogFile.__init__(self, step, name, name)
- if text:
- self.addStdout(text)
- self.finish()
- def getFilename(self):
- return os.path.join(self.fakeBuilderBasedir, self.name)
-
- def subscribeConsumer(self, consumer):
- p = MyLogFileProducer(self, consumer)
- d = p.resumeProducing()
- return d
-
-class MyHTMLLog(builder.HTMLLogFile):
- def __init__(self, basedir, name, html):
- step = MyStep()
- builder.HTMLLogFile.__init__(self, step, name, name, html)
-
-class MyLogSubscriber:
- def __init__(self):
- self.chunks = []
- def logChunk(self, build, step, log, channel, text):
- self.chunks.append((channel, text))
-
-class MyLogConsumer:
- def __init__(self, limit=None):
- self.chunks = []
- self.finished = False
- self.limit = limit
- def registerProducer(self, producer, streaming):
- self.producer = producer
- self.streaming = streaming
- def unregisterProducer(self):
- self.producer = None
- def writeChunk(self, chunk):
- self.chunks.append(chunk)
- if self.limit:
- self.limit -= 1
- if self.limit == 0:
- self.producer.pauseProducing()
- def finish(self):
- self.finished = True
-
-if mail:
- class MyMailer(mail.MailNotifier):
- def sendMessage(self, m, recipients):
- self.parent.messages.append((m, recipients))
-
-class MyStatus:
- def getBuildbotURL(self):
- return self.url
- def getURLForThing(self, thing):
- return None
-
-class MyBuilder(builder.BuilderStatus):
- nextBuildNumber = 0
-
-class MyBuild(builder.BuildStatus):
- testlogs = []
- def __init__(self, parent, number, results):
- builder.BuildStatus.__init__(self, parent, number)
- self.results = results
- self.source = SourceStamp(revision="1.14")
- self.reason = "build triggered by changes"
- self.finished = True
- def getLogs(self):
- return self.testlogs
-
-class MyLookup:
- if implements:
- implements(interfaces.IEmailLookup)
- else:
- __implements__ = interfaces.IEmailLookup,
-
- def getAddress(self, user):
- d = defer.Deferred()
- # With me now is Mr Thomas Walters of West Hartlepool who is totally
- # invisible.
- if user == "Thomas_Walters":
- d.callback(None)
- else:
- d.callback(user + "@" + "dev.com")
- return d
-
-class Mail(unittest.TestCase):
-
- def setUp(self):
- self.builder = MyBuilder("builder1")
-
- def stall(self, res, timeout):
- d = defer.Deferred()
- reactor.callLater(timeout, d.callback, res)
- return d
-
- def makeBuild(self, number, results):
- return MyBuild(self.builder, number, results)
-
- def failUnlessIn(self, substring, string):
- self.failUnless(string.find(substring) != -1)
-
- def getBuildbotURL(self):
- return "BUILDBOT_URL"
-
- def getURLForThing(self, thing):
- return None
-
- def testBuild1(self):
- mailer = MyMailer(fromaddr="buildbot@example.com",
- extraRecipients=["recip@example.com",
- "recip2@example.com"],
- lookup=mail.Domain("dev.com"))
- mailer.parent = self
- mailer.status = self
- self.messages = []
-
- b1 = self.makeBuild(3, builder.SUCCESS)
- b1.blamelist = ["bob"]
-
- mailer.buildFinished("builder1", b1, b1.results)
- self.failUnless(len(self.messages) == 1)
- m,r = self.messages.pop()
- t = m.as_string()
- self.failUnlessIn("To: bob@dev.com, recip2@example.com, "
- "recip@example.com\n", t)
- self.failUnlessIn("From: buildbot@example.com\n", t)
- self.failUnlessIn("Subject: buildbot success in builder1\n", t)
- self.failUnlessIn("Date: ", t)
- self.failUnlessIn("Build succeeded!\n", t)
- self.failUnlessIn("Buildbot URL: BUILDBOT_URL\n", t)
-
- def testBuild2(self):
- mailer = MyMailer(fromaddr="buildbot@example.com",
- extraRecipients=["recip@example.com",
- "recip2@example.com"],
- lookup="dev.com",
- sendToInterestedUsers=False)
- mailer.parent = self
- mailer.status = self
- self.messages = []
-
- b1 = self.makeBuild(3, builder.SUCCESS)
- b1.blamelist = ["bob"]
-
- mailer.buildFinished("builder1", b1, b1.results)
- self.failUnless(len(self.messages) == 1)
- m,r = self.messages.pop()
- t = m.as_string()
- self.failUnlessIn("To: recip2@example.com, "
- "recip@example.com\n", t)
- self.failUnlessIn("From: buildbot@example.com\n", t)
- self.failUnlessIn("Subject: buildbot success in builder1\n", t)
- self.failUnlessIn("Build succeeded!\n", t)
- self.failUnlessIn("Buildbot URL: BUILDBOT_URL\n", t)
-
- def testBuildStatusCategory(self):
- # a status client only interested in a category should only receive
- # from that category
- mailer = MyMailer(fromaddr="buildbot@example.com",
- extraRecipients=["recip@example.com",
- "recip2@example.com"],
- lookup="dev.com",
- sendToInterestedUsers=False,
- categories=["debug"])
-
- mailer.parent = self
- mailer.status = self
- self.messages = []
-
- b1 = self.makeBuild(3, builder.SUCCESS)
- b1.blamelist = ["bob"]
-
- mailer.buildFinished("builder1", b1, b1.results)
- self.failIf(self.messages)
-
- def testBuilderCategory(self):
- # a builder in a certain category should notify status clients that
- # did not list categories, or categories including this one
- mailer1 = MyMailer(fromaddr="buildbot@example.com",
- extraRecipients=["recip@example.com",
- "recip2@example.com"],
- lookup="dev.com",
- sendToInterestedUsers=False)
- mailer2 = MyMailer(fromaddr="buildbot@example.com",
- extraRecipients=["recip@example.com",
- "recip2@example.com"],
- lookup="dev.com",
- sendToInterestedUsers=False,
- categories=["active"])
- mailer3 = MyMailer(fromaddr="buildbot@example.com",
- extraRecipients=["recip@example.com",
- "recip2@example.com"],
- lookup="dev.com",
- sendToInterestedUsers=False,
- categories=["active", "debug"])
-
- builderd = MyBuilder("builder2", "debug")
-
- mailer1.parent = self
- mailer1.status = self
- mailer2.parent = self
- mailer2.status = self
- mailer3.parent = self
- mailer3.status = self
- self.messages = []
-
- t = mailer1.builderAdded("builder2", builderd)
- self.assertEqual(len(mailer1.watched), 1)
- self.assertEqual(t, mailer1)
- t = mailer2.builderAdded("builder2", builderd)
- self.assertEqual(len(mailer2.watched), 0)
- self.assertEqual(t, None)
- t = mailer3.builderAdded("builder2", builderd)
- self.assertEqual(len(mailer3.watched), 1)
- self.assertEqual(t, mailer3)
-
- b2 = MyBuild(builderd, 3, builder.SUCCESS)
- b2.blamelist = ["bob"]
-
- mailer1.buildFinished("builder2", b2, b2.results)
- self.failUnlessEqual(len(self.messages), 1)
- self.messages = []
- mailer2.buildFinished("builder2", b2, b2.results)
- self.failUnlessEqual(len(self.messages), 0)
- self.messages = []
- mailer3.buildFinished("builder2", b2, b2.results)
- self.failUnlessEqual(len(self.messages), 1)
-
- def testFailure(self):
- mailer = MyMailer(fromaddr="buildbot@example.com", mode="problem",
- extraRecipients=["recip@example.com",
- "recip2@example.com"],
- lookup=MyLookup())
- mailer.parent = self
- mailer.status = self
- self.messages = []
-
- b1 = self.makeBuild(3, builder.SUCCESS)
- b1.blamelist = ["dev1", "dev2"]
- b2 = self.makeBuild(4, builder.FAILURE)
- b2.setText(["snarkleack", "polarization", "failed"])
- b2.blamelist = ["dev3", "dev3", "dev3", "dev4",
- "Thomas_Walters"]
- mailer.buildFinished("builder1", b1, b1.results)
- self.failIf(self.messages)
- mailer.buildFinished("builder1", b2, b2.results)
- self.failUnless(len(self.messages) == 1)
- m,r = self.messages.pop()
- t = m.as_string()
- self.failUnlessIn("To: dev3@dev.com, dev4@dev.com, "
- "recip2@example.com, recip@example.com\n", t)
- self.failUnlessIn("From: buildbot@example.com\n", t)
- self.failUnlessIn("Subject: buildbot failure in builder1\n", t)
- self.failUnlessIn("The Buildbot has detected a new failure", t)
- self.failUnlessIn("BUILD FAILED: snarkleack polarization failed\n", t)
- self.failUnlessEqual(r, ["dev3@dev.com", "dev4@dev.com",
- "recip2@example.com", "recip@example.com"])
-
- def testLogs(self):
- basedir = "test_status_logs"
- os.mkdir(basedir)
- mailer = MyMailer(fromaddr="buildbot@example.com", addLogs=True,
- extraRecipients=["recip@example.com",
- "recip2@example.com"])
- mailer.parent = self
- mailer.status = self
- self.messages = []
-
- b1 = self.makeBuild(3, builder.WARNINGS)
- b1.testlogs = [MyLog(basedir, 'compile', "Compile log here\n"),
- MyLog(basedir,
- 'test', "Test log here\nTest 4 failed\n"),
- ]
- b1.text = ["unusual", "gnarzzler", "output"]
- mailer.buildFinished("builder1", b1, b1.results)
- self.failUnless(len(self.messages) == 1)
- m,r = self.messages.pop()
- t = m.as_string()
- self.failUnlessIn("Subject: buildbot warnings in builder1\n", t)
- m2 = email.message_from_string(t)
- p = m2.get_payload()
- self.failUnlessEqual(len(p), 3)
-
- self.failUnlessIn("Build Had Warnings: unusual gnarzzler output\n",
- p[0].get_payload())
-
- self.failUnlessEqual(p[1].get_filename(), "step.compile")
- self.failUnlessEqual(p[1].get_payload(), "Compile log here\n")
-
- self.failUnlessEqual(p[2].get_filename(), "step.test")
- self.failUnlessIn("Test log here\n", p[2].get_payload())
-
- def testMail(self):
- basedir = "test_status_mail"
- os.mkdir(basedir)
- dest = os.environ.get("BUILDBOT_TEST_MAIL")
- if not dest:
- raise unittest.SkipTest("define BUILDBOT_TEST_MAIL=dest to run this")
- mailer = mail.MailNotifier(fromaddr="buildbot@example.com",
- addLogs=True,
- extraRecipients=[dest])
- s = MyStatus()
- s.url = "project URL"
- mailer.status = s
-
- b1 = self.makeBuild(3, builder.SUCCESS)
- b1.testlogs = [MyLog(basedir, 'compile', "Compile log here\n"),
- MyLog(basedir,
- 'test', "Test log here\nTest 4 failed\n"),
- ]
-
- print "sending mail to", dest
- d = mailer.buildFinished("builder1", b1, b1.results)
- # When this fires, the mail has been sent, but the SMTP connection is
- # still up (because smtp.sendmail relies upon the server to hang up).
- # Spin for a moment to avoid the "unclean reactor" warning that Trial
- # gives us if we finish before the socket is disconnected. Really,
- # sendmail() ought to hang up the connection once it is finished:
- # otherwise a malicious SMTP server could make us consume lots of
- # memory.
- d.addCallback(self.stall, 0.1)
- return maybeWait(d)
-
-if not mail:
- Mail.skip = "the Twisted Mail package is not installed"
-
-class Progress(unittest.TestCase):
- def testWavg(self):
- bp = progress.BuildProgress([])
- e = progress.Expectations(bp)
- # wavg(old, current)
- self.failUnlessEqual(e.wavg(None, None), None)
- self.failUnlessEqual(e.wavg(None, 3), 3)
- self.failUnlessEqual(e.wavg(3, None), 3)
- self.failUnlessEqual(e.wavg(3, 4), 3.5)
- e.decay = 0.1
- self.failUnlessEqual(e.wavg(3, 4), 3.1)
-
-
-class Results(unittest.TestCase):
-
- def testAddResults(self):
- b = builder.BuildStatus(builder.BuilderStatus("test"), 12)
- testname = ("buildbot", "test", "test_status", "Results",
- "testAddResults")
- r1 = builder.TestResult(name=testname,
- results=builder.SUCCESS,
- text=["passed"],
- logs={'output': ""},
- )
- b.addTestResult(r1)
-
- res = b.getTestResults()
- self.failUnlessEqual(res.keys(), [testname])
- t = res[testname]
- self.failUnless(providedBy(t, interfaces.ITestResult))
- self.failUnlessEqual(t.getName(), testname)
- self.failUnlessEqual(t.getResults(), builder.SUCCESS)
- self.failUnlessEqual(t.getText(), ["passed"])
- self.failUnlessEqual(t.getLogs(), {'output': ""})
-
-class Log(unittest.TestCase):
- def setUpClass(self):
- self.basedir = "status_log_add"
- os.mkdir(self.basedir)
-
- def testAdd(self):
- l = MyLog(self.basedir, "compile", step=13)
- self.failUnlessEqual(l.getName(), "compile")
- self.failUnlessEqual(l.getStep(), 13)
- l.addHeader("HEADER\n")
- l.addStdout("Some text\n")
- l.addStderr("Some error\n")
- l.addStdout("Some more text\n")
- self.failIf(l.isFinished())
- l.finish()
- self.failUnless(l.isFinished())
- self.failUnlessEqual(l.getText(),
- "Some text\nSome error\nSome more text\n")
- self.failUnlessEqual(l.getTextWithHeaders(),
- "HEADER\n" +
- "Some text\nSome error\nSome more text\n")
- self.failUnlessEqual(len(list(l.getChunks())), 4)
-
- self.failUnless(l.hasContents())
- os.unlink(l.getFilename())
- self.failIf(l.hasContents())
-
- def TODO_testDuplicate(self):
- # create multiple logs for the same step with the same logname, make
- # sure their on-disk filenames are suitably uniquified. This
- # functionality actually lives in BuildStepStatus and BuildStatus, so
- # this test must involve more than just the MyLog class.
-
- # naieve approach, doesn't work
- l1 = MyLog(self.basedir, "duplicate")
- l1.addStdout("Some text\n")
- l1.finish()
- l2 = MyLog(self.basedir, "duplicate")
- l2.addStdout("Some more text\n")
- l2.finish()
- self.failIfEqual(l1.getFilename(), l2.getFilename())
-
- def testMerge1(self):
- l = MyLog(self.basedir, "merge1")
- l.addHeader("HEADER\n")
- l.addStdout("Some text\n")
- l.addStdout("Some more text\n")
- l.addStdout("more\n")
- l.finish()
- self.failUnlessEqual(l.getText(),
- "Some text\nSome more text\nmore\n")
- self.failUnlessEqual(l.getTextWithHeaders(),
- "HEADER\n" +
- "Some text\nSome more text\nmore\n")
- self.failUnlessEqual(len(list(l.getChunks())), 2)
-
- def testMerge2(self):
- l = MyLog(self.basedir, "merge2")
- l.addHeader("HEADER\n")
- for i in xrange(1000):
- l.addStdout("aaaa")
- for i in xrange(30):
- l.addStderr("bbbb")
- for i in xrange(10):
- l.addStdout("cc")
- target = 1000*"aaaa" + 30 * "bbbb" + 10 * "cc"
- self.failUnlessEqual(len(l.getText()), len(target))
- self.failUnlessEqual(l.getText(), target)
- l.finish()
- self.failUnlessEqual(len(l.getText()), len(target))
- self.failUnlessEqual(l.getText(), target)
- self.failUnlessEqual(len(list(l.getChunks())), 4)
-
- def testMerge3(self):
- l = MyLog(self.basedir, "merge3")
- l.chunkSize = 100
- l.addHeader("HEADER\n")
- for i in xrange(8):
- l.addStdout(10*"a")
- for i in xrange(8):
- l.addStdout(10*"a")
- self.failUnlessEqual(list(l.getChunks()),
- [(builder.HEADER, "HEADER\n"),
- (builder.STDOUT, 100*"a"),
- (builder.STDOUT, 60*"a")])
- l.finish()
- self.failUnlessEqual(l.getText(), 160*"a")
-
- def testChunks(self):
- l = MyLog(self.basedir, "chunks")
- c1 = l.getChunks()
- l.addHeader("HEADER\n")
- l.addStdout("Some text\n")
- self.failUnlessEqual("".join(l.getChunks(onlyText=True)),
- "HEADER\nSome text\n")
- c2 = l.getChunks()
-
- l.addStdout("Some more text\n")
- self.failUnlessEqual("".join(l.getChunks(onlyText=True)),
- "HEADER\nSome text\nSome more text\n")
- c3 = l.getChunks()
-
- l.addStdout("more\n")
- l.finish()
-
- self.failUnlessEqual(list(c1), [])
- self.failUnlessEqual(list(c2), [(builder.HEADER, "HEADER\n"),
- (builder.STDOUT, "Some text\n")])
- self.failUnlessEqual(list(c3), [(builder.HEADER, "HEADER\n"),
- (builder.STDOUT,
- "Some text\nSome more text\n")])
-
- self.failUnlessEqual(l.getText(),
- "Some text\nSome more text\nmore\n")
- self.failUnlessEqual(l.getTextWithHeaders(),
- "HEADER\n" +
- "Some text\nSome more text\nmore\n")
- self.failUnlessEqual(len(list(l.getChunks())), 2)
-
- def testUpgrade(self):
- l = MyLog(self.basedir, "upgrade")
- l.addHeader("HEADER\n")
- l.addStdout("Some text\n")
- l.addStdout("Some more text\n")
- l.addStdout("more\n")
- l.finish()
- self.failUnless(l.hasContents())
- # now doctor it to look like a 0.6.4-era non-upgraded logfile
- l.entries = list(l.getChunks())
- del l.filename
- os.unlink(l.getFilename())
- # now make sure we can upgrade it
- l.upgrade("upgrade")
- self.failUnlessEqual(l.getText(),
- "Some text\nSome more text\nmore\n")
- self.failUnlessEqual(len(list(l.getChunks())), 2)
- self.failIf(l.entries)
-
- # now, do it again, but make it look like an upgraded 0.6.4 logfile
- # (i.e. l.filename is missing, but the contents are there on disk)
- l.entries = list(l.getChunks())
- del l.filename
- l.upgrade("upgrade")
- self.failUnlessEqual(l.getText(),
- "Some text\nSome more text\nmore\n")
- self.failUnlessEqual(len(list(l.getChunks())), 2)
- self.failIf(l.entries)
- self.failUnless(l.hasContents())
-
- def testHTMLUpgrade(self):
- l = MyHTMLLog(self.basedir, "upgrade", "log contents")
- l.upgrade("filename")
-
- def testSubscribe(self):
- l1 = MyLog(self.basedir, "subscribe1")
- l1.finish()
- self.failUnless(l1.isFinished())
-
- s = MyLogSubscriber()
- l1.subscribe(s, True)
- l1.unsubscribe(s)
- self.failIf(s.chunks)
-
- s = MyLogSubscriber()
- l1.subscribe(s, False)
- l1.unsubscribe(s)
- self.failIf(s.chunks)
-
- finished = []
- l2 = MyLog(self.basedir, "subscribe2")
- l2.waitUntilFinished().addCallback(finished.append)
- l2.addHeader("HEADER\n")
- s1 = MyLogSubscriber()
- l2.subscribe(s1, True)
- s2 = MyLogSubscriber()
- l2.subscribe(s2, False)
- self.failUnlessEqual(s1.chunks, [(builder.HEADER, "HEADER\n")])
- self.failUnlessEqual(s2.chunks, [])
-
- l2.addStdout("Some text\n")
- self.failUnlessEqual(s1.chunks, [(builder.HEADER, "HEADER\n"),
- (builder.STDOUT, "Some text\n")])
- self.failUnlessEqual(s2.chunks, [(builder.STDOUT, "Some text\n")])
- l2.unsubscribe(s1)
-
- l2.addStdout("Some more text\n")
- self.failUnlessEqual(s1.chunks, [(builder.HEADER, "HEADER\n"),
- (builder.STDOUT, "Some text\n")])
- self.failUnlessEqual(s2.chunks, [(builder.STDOUT, "Some text\n"),
- (builder.STDOUT, "Some more text\n"),
- ])
- self.failIf(finished)
- l2.finish()
- self.failUnlessEqual(finished, [l2])
-
- def testConsumer(self):
- l1 = MyLog(self.basedir, "consumer1")
- l1.finish()
- self.failUnless(l1.isFinished())
-
- s = MyLogConsumer()
- d = l1.subscribeConsumer(s)
- d.addCallback(self._testConsumer_1, s)
- return maybeWait(d, 5)
- def _testConsumer_1(self, res, s):
- self.failIf(s.chunks)
- self.failUnless(s.finished)
- self.failIf(s.producer) # producer should be registered and removed
-
- l2 = MyLog(self.basedir, "consumer2")
- l2.addHeader("HEADER\n")
- l2.finish()
- self.failUnless(l2.isFinished())
-
- s = MyLogConsumer()
- d = l2.subscribeConsumer(s)
- d.addCallback(self._testConsumer_2, s)
- return d
- def _testConsumer_2(self, res, s):
- self.failUnlessEqual(s.chunks, [(builder.HEADER, "HEADER\n")])
- self.failUnless(s.finished)
- self.failIf(s.producer) # producer should be registered and removed
-
-
- l2 = MyLog(self.basedir, "consumer3")
- l2.chunkSize = 1000
- l2.addHeader("HEADER\n")
- l2.addStdout(800*"a")
- l2.addStdout(800*"a") # should now have two chunks on disk, 1000+600
- l2.addStdout(800*"b") # HEADER,1000+600*a on disk, 800*a in memory
- l2.addStdout(800*"b") # HEADER,1000+600*a,1000+600*b on disk
- l2.addStdout(200*"c") # HEADER,1000+600*a,1000+600*b on disk,
- # 200*c in memory
-
- s = MyLogConsumer(limit=1)
- d = l2.subscribeConsumer(s)
- d.addCallback(self._testConsumer_3, l2, s)
- return d
- def _testConsumer_3(self, res, l2, s):
- self.failUnless(s.streaming)
- self.failUnlessEqual(s.chunks, [(builder.HEADER, "HEADER\n")])
- s.limit = 1
- d = s.producer.resumeProducing()
- d.addCallback(self._testConsumer_4, l2, s)
- return d
- def _testConsumer_4(self, res, l2, s):
- self.failUnlessEqual(s.chunks, [(builder.HEADER, "HEADER\n"),
- (builder.STDOUT, 1000*"a"),
- ])
- s.limit = None
- d = s.producer.resumeProducing()
- d.addCallback(self._testConsumer_5, l2, s)
- return d
- def _testConsumer_5(self, res, l2, s):
- self.failUnlessEqual(s.chunks, [(builder.HEADER, "HEADER\n"),
- (builder.STDOUT, 1000*"a"),
- (builder.STDOUT, 600*"a"),
- (builder.STDOUT, 1000*"b"),
- (builder.STDOUT, 600*"b"),
- (builder.STDOUT, 200*"c")])
- l2.addStdout(1000*"c") # HEADER,1600*a,1600*b,1200*c on disk
- self.failUnlessEqual(s.chunks, [(builder.HEADER, "HEADER\n"),
- (builder.STDOUT, 1000*"a"),
- (builder.STDOUT, 600*"a"),
- (builder.STDOUT, 1000*"b"),
- (builder.STDOUT, 600*"b"),
- (builder.STDOUT, 200*"c"),
- (builder.STDOUT, 1000*"c")])
- l2.finish()
- self.failUnlessEqual(s.chunks, [(builder.HEADER, "HEADER\n"),
- (builder.STDOUT, 1000*"a"),
- (builder.STDOUT, 600*"a"),
- (builder.STDOUT, 1000*"b"),
- (builder.STDOUT, 600*"b"),
- (builder.STDOUT, 200*"c"),
- (builder.STDOUT, 1000*"c")])
- self.failIf(s.producer)
- self.failUnless(s.finished)
-
- def testLargeSummary(self):
- bigtext = "a" * 200000 # exceed the NetstringReceiver 100KB limit
- l = MyLog(self.basedir, "large", bigtext)
- s = MyLogConsumer()
- d = l.subscribeConsumer(s)
- def _check(res):
- for ctype,chunk in s.chunks:
- self.failUnless(len(chunk) < 100000)
- merged = "".join([c[1] for c in s.chunks])
- self.failUnless(merged == bigtext)
- d.addCallback(_check)
- # when this fails, it fails with a timeout, and there is an exception
- # sent to log.err(). This AttributeError exception is in
- # NetstringReceiver.dataReceived where it does
- # self.transport.loseConnection() because of the NetstringParseError,
- # however self.transport is None
- return maybeWait(d, 5)
- testLargeSummary.timeout = 5
-
-config_base = """
-from buildbot.process import factory, step
-s = factory.s
-
-f1 = factory.QuickBuildFactory('fakerep', 'cvsmodule', configure=None)
-
-f2 = factory.BuildFactory([
- s(step.Dummy, timeout=1),
- s(step.RemoteDummy, timeout=2),
- ])
-
-BuildmasterConfig = c = {}
-c['bots'] = [['bot1', 'sekrit']]
-c['sources'] = []
-c['schedulers'] = []
-c['builders'] = []
-c['builders'].append({'name':'quick', 'slavename':'bot1',
- 'builddir': 'quickdir', 'factory': f1})
-c['slavePortnum'] = 0
-"""
-
-config_2 = config_base + """
-c['builders'] = [{'name': 'dummy', 'slavename': 'bot1',
- 'builddir': 'dummy1', 'factory': f2},
- {'name': 'testdummy', 'slavename': 'bot1',
- 'builddir': 'dummy2', 'factory': f2, 'category': 'test'}]
-"""
-
-class STarget(base.StatusReceiver):
- debug = False
-
- def __init__(self, mode):
- self.mode = mode
- self.events = []
- def announce(self):
- if self.debug:
- print self.events[-1]
-
- def builderAdded(self, name, builder):
- self.events.append(("builderAdded", name, builder))
- self.announce()
- if "builder" in self.mode:
- return self
- def builderChangedState(self, name, state):
- self.events.append(("builderChangedState", name, state))
- self.announce()
- def buildStarted(self, name, build):
- self.events.append(("buildStarted", name, build))
- self.announce()
- if "eta" in self.mode:
- self.eta_build = build.getETA()
- if "build" in self.mode:
- return self
- def buildETAUpdate(self, build, ETA):
- self.events.append(("buildETAUpdate", build, ETA))
- self.announce()
- def stepStarted(self, build, step):
- self.events.append(("stepStarted", build, step))
- self.announce()
- if 0 and "eta" in self.mode:
- print "TIMES", step.getTimes()
- print "ETA", step.getETA()
- print "EXP", step.getExpectations()
- if "step" in self.mode:
- return self
- def stepETAUpdate(self, build, step, ETA, expectations):
- self.events.append(("stepETAUpdate", build, step, ETA, expectations))
- self.announce()
- def logStarted(self, build, step, log):
- self.events.append(("logStarted", build, step, log))
- self.announce()
- def logFinished(self, build, step, log):
- self.events.append(("logFinished", build, step, log))
- self.announce()
- def stepFinished(self, build, step, results):
- self.events.append(("stepFinished", build, step, results))
- if 0 and "eta" in self.mode:
- print "post-EXP", step.getExpectations()
- self.announce()
- def buildFinished(self, name, build, results):
- self.events.append(("buildFinished", name, build, results))
- self.announce()
- def builderRemoved(self, name):
- self.events.append(("builderRemoved", name))
- self.announce()
-
-class Subscription(RunMixin, unittest.TestCase):
- # verify that StatusTargets can subscribe/unsubscribe properly
-
- def testSlave(self):
- m = self.master
- s = m.getStatus()
- self.t1 = t1 = STarget(["builder"])
- #t1.debug = True; print
- s.subscribe(t1)
- self.failUnlessEqual(len(t1.events), 0)
-
- self.t3 = t3 = STarget(["builder", "build", "step"])
- s.subscribe(t3)
-
- m.loadConfig(config_2)
- m.readConfig = True
- m.startService()
-
- self.failUnlessEqual(len(t1.events), 4)
- self.failUnlessEqual(t1.events[0][0:2], ("builderAdded", "dummy"))
- self.failUnlessEqual(t1.events[1],
- ("builderChangedState", "dummy", "offline"))
- self.failUnlessEqual(t1.events[2][0:2], ("builderAdded", "testdummy"))
- self.failUnlessEqual(t1.events[3],
- ("builderChangedState", "testdummy", "offline"))
- t1.events = []
-
- self.failUnlessEqual(s.getBuilderNames(), ["dummy", "testdummy"])
- self.failUnlessEqual(s.getBuilderNames(categories=['test']),
- ["testdummy"])
- self.s1 = s1 = s.getBuilder("dummy")
- self.failUnlessEqual(s1.getName(), "dummy")
- self.failUnlessEqual(s1.getState(), ("offline", []))
- self.failUnlessEqual(s1.getCurrentBuilds(), [])
- self.failUnlessEqual(s1.getLastFinishedBuild(), None)
- self.failUnlessEqual(s1.getBuild(-1), None)
- #self.failUnlessEqual(s1.getEvent(-1), foo("created"))
-
- # status targets should, upon being subscribed, immediately get a
- # list of all current builders matching their category
- self.t2 = t2 = STarget([])
- s.subscribe(t2)
- self.failUnlessEqual(len(t2.events), 2)
- self.failUnlessEqual(t2.events[0][0:2], ("builderAdded", "dummy"))
- self.failUnlessEqual(t2.events[1][0:2], ("builderAdded", "testdummy"))
-
- d = self.connectSlave(builders=["dummy", "testdummy"])
- d.addCallback(self._testSlave_1, t1)
- return maybeWait(d)
-
- def _testSlave_1(self, res, t1):
- self.failUnlessEqual(len(t1.events), 2)
- self.failUnlessEqual(t1.events[0],
- ("builderChangedState", "dummy", "idle"))
- self.failUnlessEqual(t1.events[1],
- ("builderChangedState", "testdummy", "idle"))
- t1.events = []
-
- c = interfaces.IControl(self.master)
- req = BuildRequest("forced build for testing", SourceStamp())
- c.getBuilder("dummy").requestBuild(req)
- d = req.waitUntilFinished()
- d2 = self.master.botmaster.waitUntilBuilderIdle("dummy")
- dl = defer.DeferredList([d, d2])
- dl.addCallback(self._testSlave_2)
- return dl
-
- def _testSlave_2(self, res):
- # t1 subscribes to builds, but not anything lower-level
- ev = self.t1.events
- self.failUnlessEqual(len(ev), 4)
- self.failUnlessEqual(ev[0][0:3],
- ("builderChangedState", "dummy", "building"))
- self.failUnlessEqual(ev[1][0], "buildStarted")
- self.failUnlessEqual(ev[2][0:2]+ev[2][3:4],
- ("buildFinished", "dummy", builder.SUCCESS))
- self.failUnlessEqual(ev[3][0:3],
- ("builderChangedState", "dummy", "idle"))
-
- self.failUnlessEqual([ev[0] for ev in self.t3.events],
- ["builderAdded",
- "builderChangedState", # offline
- "builderAdded",
- "builderChangedState", # idle
- "builderChangedState", # offline
- "builderChangedState", # idle
- "builderChangedState", # building
- "buildStarted",
- "stepStarted", "stepETAUpdate", "stepFinished",
- "stepStarted", "stepETAUpdate",
- "logStarted", "logFinished", "stepFinished",
- "buildFinished",
- "builderChangedState", # idle
- ])
-
- b = self.s1.getLastFinishedBuild()
- self.failUnless(b)
- self.failUnlessEqual(b.getBuilder().getName(), "dummy")
- self.failUnlessEqual(b.getNumber(), 0)
- self.failUnlessEqual(b.getSourceStamp(), (None, None, None))
- self.failUnlessEqual(b.getReason(), "forced build for testing")
- self.failUnlessEqual(b.getChanges(), [])
- self.failUnlessEqual(b.getResponsibleUsers(), [])
- self.failUnless(b.isFinished())
- self.failUnlessEqual(b.getText(), ['build', 'successful'])
- self.failUnlessEqual(b.getColor(), "green")
- self.failUnlessEqual(b.getResults(), builder.SUCCESS)
-
- steps = b.getSteps()
- self.failUnlessEqual(len(steps), 2)
-
- eta = 0
- st1 = steps[0]
- self.failUnlessEqual(st1.getName(), "dummy")
- self.failUnless(st1.isFinished())
- self.failUnlessEqual(st1.getText(), ["delay", "1 secs"])
- start,finish = st1.getTimes()
- self.failUnless(0.5 < (finish-start) < 10)
- self.failUnlessEqual(st1.getExpectations(), [])
- self.failUnlessEqual(st1.getLogs(), [])
- eta += finish-start
-
- st2 = steps[1]
- self.failUnlessEqual(st2.getName(), "remote dummy")
- self.failUnless(st2.isFinished())
- self.failUnlessEqual(st2.getText(),
- ["remote", "delay", "2 secs"])
- start,finish = st2.getTimes()
- self.failUnless(1.5 < (finish-start) < 10)
- eta += finish-start
- self.failUnlessEqual(st2.getExpectations(), [('output', 38, None)])
- logs = st2.getLogs()
- self.failUnlessEqual(len(logs), 1)
- self.failUnlessEqual(logs[0].getName(), "log")
- self.failUnlessEqual(logs[0].getText(), "data")
-
- self.eta = eta
- # now we run it a second time, and we should have an ETA
-
- self.t4 = t4 = STarget(["builder", "build", "eta"])
- self.master.getStatus().subscribe(t4)
- c = interfaces.IControl(self.master)
- req = BuildRequest("forced build for testing", SourceStamp())
- c.getBuilder("dummy").requestBuild(req)
- d = req.waitUntilFinished()
- d2 = self.master.botmaster.waitUntilBuilderIdle("dummy")
- dl = defer.DeferredList([d, d2])
- dl.addCallback(self._testSlave_3)
- return dl
-
- def _testSlave_3(self, res):
- t4 = self.t4
- eta = self.eta
- self.failUnless(eta-1 < t4.eta_build < eta+1, # should be 3 seconds
- "t4.eta_build was %g, not in (%g,%g)"
- % (t4.eta_build, eta-1, eta+1))
-
-
-class Client(unittest.TestCase):
- def testAdaptation(self):
- b = builder.BuilderStatus("bname")
- b2 = client.makeRemote(b)
- self.failUnless(isinstance(b2, client.RemoteBuilder))
- b3 = client.makeRemote(None)
- self.failUnless(b3 is None)
diff --git a/buildbot/buildbot-source/buildbot/test/test_steps.py b/buildbot/buildbot-source/buildbot/test/test_steps.py
deleted file mode 100644
index bbe2871c2..000000000
--- a/buildbot/buildbot-source/buildbot/test/test_steps.py
+++ /dev/null
@@ -1,236 +0,0 @@
-# -*- test-case-name: buildbot.test.test_steps -*-
-
-# create the BuildStep with a fake .remote instance that logs the
-# .callRemote invocations and compares them against the expected calls. Then
-# the test harness should send statusUpdate() messages in with assorted
-# data, eventually calling remote_complete(). Then we can verify that the
-# Step's rc was correct, and that the status it was supposed to return
-# mathces.
-
-# sometimes, .callRemote should raise an exception because of a stale
-# reference. Sometimes it should errBack with an UnknownCommand failure.
-# Or other failure.
-
-# todo: test batched updates, by invoking remote_update(updates) instead of
-# statusUpdate(update). Also involves interrupted builds.
-
-import os, sys, time
-
-from twisted.trial import unittest
-from twisted.internet import reactor
-from twisted.internet.defer import Deferred
-
-from buildbot.sourcestamp import SourceStamp
-from buildbot.process import step, base, factory
-from buildbot.process.step import ShellCommand #, ShellCommands
-from buildbot.status import builder
-from buildbot.test.runutils import RunMixin
-from buildbot.twcompat import maybeWait
-from buildbot.slave import commands
-
-from twisted.python import log
-#log.startLogging(sys.stdout)
-
-class MyShellCommand(ShellCommand):
- started = False
- def runCommand(self, c):
- self.started = True
- self.rc = c
- return ShellCommand.runCommand(self, c)
-
-class FakeBuild:
- pass
-class FakeBuilder:
- statusbag = None
- name = "fakebuilder"
-class FakeSlaveBuilder:
- def getSlaveCommandVersion(self, command, oldversion=None):
- return "1.10"
-
-class FakeRemote:
- def __init__(self):
- self.events = []
- self.remoteCalls = 0
- #self.callRemoteNotifier = None
- def callRemote(self, methname, *args):
- event = ["callRemote", methname, args]
- self.events.append(event)
-## if self.callRemoteNotifier:
-## reactor.callLater(0, self.callRemoteNotifier, event)
- self.remoteCalls += 1
- self.deferred = Deferred()
- return self.deferred
- def notifyOnDisconnect(self, callback):
- pass
- def dontNotifyOnDisconnect(self, callback):
- pass
-
-
-class BuildStep(unittest.TestCase):
- def setUp(self):
- self.builder = FakeBuilder()
- self.builder_status = builder.BuilderStatus("fakebuilder")
- self.builder_status.basedir = "test_steps"
- self.builder_status.nextBuildNumber = 0
- os.mkdir(self.builder_status.basedir)
- self.build_status = self.builder_status.newBuild()
- req = base.BuildRequest("reason", SourceStamp())
- self.build = base.Build([req])
- self.build.build_status = self.build_status # fake it
- self.build.builder = self.builder
- self.build.slavebuilder = FakeSlaveBuilder()
- self.remote = FakeRemote()
- self.finished = 0
-
- def callback(self, results):
- self.failed = 0
- self.failure = None
- self.results = results
- self.finished = 1
- def errback(self, failure):
- self.failed = 1
- self.failure = failure
- self.results = None
- self.finished = 1
-
- def testShellCommand1(self):
- cmd = "argle bargle"
- dir = "murkle"
- expectedEvents = []
- step.RemoteCommand.commandCounter[0] = 3
- c = MyShellCommand(workdir=dir, command=cmd, build=self.build,
- timeout=10)
- self.assertEqual(self.remote.events, expectedEvents)
- self.build_status.addStep(c)
- d = c.startStep(self.remote)
- self.failUnless(c.started)
- rc = c.rc
- d.addCallbacks(self.callback, self.errback)
- timeout = time.time() + 10
- while self.remote.remoteCalls == 0:
- if time.time() > timeout:
- self.fail("timeout")
- reactor.iterate(0.01)
- expectedEvents.append(["callRemote", "startCommand",
- (rc, "3",
- "shell",
- {'command': "argle bargle",
- 'workdir': "murkle",
- 'want_stdout': 1,
- 'want_stderr': 1,
- 'timeout': 10,
- 'env': None}) ] )
- self.assertEqual(self.remote.events, expectedEvents)
-
- # we could do self.remote.deferred.errback(UnknownCommand) here. We
- # could also do .callback(), but generally the master end silently
- # ignores the slave's ack
-
- logs = c.step_status.getLogs()
- for log in logs:
- if log.getName() == "log":
- break
-
- rc.remoteUpdate({'header':
- "command 'argle bargle' in dir 'murkle'\n\n"})
- rc.remoteUpdate({'stdout': "foo\n"})
- self.assertEqual(log.getText(), "foo\n")
- self.assertEqual(log.getTextWithHeaders(),
- "command 'argle bargle' in dir 'murkle'\n\n"
- "foo\n")
- rc.remoteUpdate({'stderr': "bar\n"})
- self.assertEqual(log.getText(), "foo\nbar\n")
- self.assertEqual(log.getTextWithHeaders(),
- "command 'argle bargle' in dir 'murkle'\n\n"
- "foo\nbar\n")
- rc.remoteUpdate({'rc': 0})
- self.assertEqual(rc.rc, 0)
-
- rc.remote_complete()
- # that should fire the Deferred
- timeout = time.time() + 10
- while not self.finished:
- if time.time() > timeout:
- self.fail("timeout")
- reactor.iterate(0.01)
- self.assertEqual(self.failed, 0)
- self.assertEqual(self.results, 0)
-
-class Steps(unittest.TestCase):
- def testMultipleStepInstances(self):
- steps = [
- (step.CVS, {'cvsroot': "root", 'cvsmodule': "module"}),
- (step.Configure, {'command': "./configure"}),
- (step.Compile, {'command': "make"}),
- (step.Compile, {'command': "make more"}),
- (step.Compile, {'command': "make evenmore"}),
- (step.Test, {'command': "make test"}),
- (step.Test, {'command': "make testharder"}),
- ]
- f = factory.ConfigurableBuildFactory(steps)
- req = base.BuildRequest("reason", SourceStamp())
- b = f.newBuild([req])
- #for s in b.steps: print s.name
-
-class VersionCheckingStep(step.BuildStep):
- def start(self):
- # give our test a chance to run. It is non-trivial for a buildstep to
- # claw its way back out to the test case which is currently running.
- master = self.build.builder.botmaster.parent
- checker = master._checker
- checker(self)
- # then complete
- self.finished(step.SUCCESS)
-
-version_config = """
-from buildbot.process import factory, step
-from buildbot.test.test_steps import VersionCheckingStep
-BuildmasterConfig = c = {}
-f1 = factory.BuildFactory([
- factory.s(VersionCheckingStep),
- ])
-c['bots'] = [['bot1', 'sekrit']]
-c['sources'] = []
-c['schedulers'] = []
-c['builders'] = [{'name':'quick', 'slavename':'bot1',
- 'builddir': 'quickdir', 'factory': f1}]
-c['slavePortnum'] = 0
-"""
-
-class Version(RunMixin, unittest.TestCase):
- def setUp(self):
- RunMixin.setUp(self)
- self.master.loadConfig(version_config)
- self.master.startService()
- d = self.connectSlave(["quick"])
- return maybeWait(d)
-
- def doBuild(self, buildername):
- br = base.BuildRequest("forced", SourceStamp())
- d = br.waitUntilFinished()
- self.control.getBuilder(buildername).requestBuild(br)
- return d
-
-
- def checkCompare(self, s):
- v = s.slaveVersion("svn", None)
- # this insures that we are getting the version correctly
- self.failUnlessEqual(s.slaveVersion("svn", None), commands.cvs_ver)
- # and that non-existent commands do not provide a version
- self.failUnlessEqual(s.slaveVersion("NOSUCHCOMMAND"), None)
- # TODO: verify that a <=0.5.0 buildslave (which does not implement
- # remote_getCommands) handles oldversion= properly. This requires a
- # mutant slave which does not offer that method.
- #self.failUnlessEqual(s.slaveVersion("NOSUCHCOMMAND", "old"), "old")
-
- # now check the comparison functions
- self.failIf(s.slaveVersionIsOlderThan("svn", commands.cvs_ver))
- self.failIf(s.slaveVersionIsOlderThan("svn", "1.1"))
- self.failUnless(s.slaveVersionIsOlderThan("svn",
- commands.cvs_ver + ".1"))
-
- def testCompare(self):
- self.master._checker = self.checkCompare
- d = self.doBuild("quick")
- return maybeWait(d)
-
diff --git a/buildbot/buildbot-source/buildbot/test/test_twisted.py b/buildbot/buildbot-source/buildbot/test/test_twisted.py
deleted file mode 100644
index aa295477c..000000000
--- a/buildbot/buildbot-source/buildbot/test/test_twisted.py
+++ /dev/null
@@ -1,184 +0,0 @@
-# -*- test-case-name: buildbot.test.test_twisted -*-
-
-from twisted.trial import unittest
-
-from buildbot.process.step_twisted import countFailedTests, Trial
-from buildbot.status import builder
-
-noisy = 0
-if noisy:
- from twisted.python.log import startLogging
- import sys
- startLogging(sys.stdout)
-
-out1 = """
--------------------------------------------------------------------------------
-Ran 13 tests in 1.047s
-
-OK
-"""
-
-out2 = """
--------------------------------------------------------------------------------
-Ran 12 tests in 1.040s
-
-FAILED (failures=1)
-"""
-
-out3 = """
- NotImplementedError
--------------------------------------------------------------------------------
-Ran 13 tests in 1.042s
-
-FAILED (failures=1, errors=1)
-"""
-
-out4 = """
-unparseable
-"""
-
-out5 = """
- File "/usr/home/warner/stuff/python/twisted/Twisted-CVS/twisted/test/test_defer.py", line 79, in testTwoCallbacks
- self.fail("just because")
- File "/usr/home/warner/stuff/python/twisted/Twisted-CVS/twisted/trial/unittest.py", line 21, in fail
- raise AssertionError, message
- AssertionError: just because
-unparseable
-"""
-
-out6 = """
-===============================================================================
-SKIPPED: testProtocolLocalhost (twisted.flow.test.test_flow.FlowTest)
--------------------------------------------------------------------------------
-XXX freezes, fixme
-===============================================================================
-SKIPPED: testIPv6 (twisted.names.test.test_names.HostsTestCase)
--------------------------------------------------------------------------------
-IPv6 support is not in our hosts resolver yet
-===============================================================================
-EXPECTED FAILURE: testSlots (twisted.test.test_rebuild.NewStyleTestCase)
--------------------------------------------------------------------------------
-Traceback (most recent call last):
- File "/Users/buildbot/Buildbot/twisted/OSX-full2.3/Twisted/twisted/trial/unittest.py", line 240, in _runPhase
- stage(*args, **kwargs)
- File "/Users/buildbot/Buildbot/twisted/OSX-full2.3/Twisted/twisted/trial/unittest.py", line 262, in _main
- self.runner(self.method)
- File "/Users/buildbot/Buildbot/twisted/OSX-full2.3/Twisted/twisted/trial/runner.py", line 95, in runTest
- method()
- File "/Users/buildbot/Buildbot/twisted/OSX-full2.3/Twisted/twisted/test/test_rebuild.py", line 130, in testSlots
- rebuild.updateInstance(self.m.SlottedClass())
- File "/Users/buildbot/Buildbot/twisted/OSX-full2.3/Twisted/twisted/python/rebuild.py", line 114, in updateInstance
- self.__class__ = latestClass(self.__class__)
-TypeError: __class__ assignment: 'SlottedClass' object layout differs from 'SlottedClass'
-===============================================================================
-FAILURE: testBatchFile (twisted.conch.test.test_sftp.TestOurServerBatchFile)
--------------------------------------------------------------------------------
-Traceback (most recent call last):
- File "/Users/buildbot/Buildbot/twisted/OSX-full2.3/Twisted/twisted/trial/unittest.py", line 240, in _runPhase
- stage(*args, **kwargs)
- File "/Users/buildbot/Buildbot/twisted/OSX-full2.3/Twisted/twisted/trial/unittest.py", line 262, in _main
- self.runner(self.method)
- File "/Users/buildbot/Buildbot/twisted/OSX-full2.3/Twisted/twisted/trial/runner.py", line 95, in runTest
- method()
- File "/Users/buildbot/Buildbot/twisted/OSX-full2.3/Twisted/twisted/conch/test/test_sftp.py", line 450, in testBatchFile
- self.failUnlessEqual(res[1:-2], ['testDirectory', 'testRemoveFile', 'testRenameFile', 'testfile1'])
- File "/Users/buildbot/Buildbot/twisted/OSX-full2.3/Twisted/twisted/trial/unittest.py", line 115, in failUnlessEqual
- raise FailTest, (msg or '%r != %r' % (first, second))
-FailTest: [] != ['testDirectory', 'testRemoveFile', 'testRenameFile', 'testfile1']
--------------------------------------------------------------------------------
-Ran 1454 tests in 911.579s
-
-FAILED (failures=2, skips=49, expectedFailures=9)
-Exception exceptions.AttributeError: "'NoneType' object has no attribute 'StringIO'" in <bound method RemoteReference.__del__ of <twisted.spread.pb.RemoteReference instance at 0x27036c0>> ignored
-"""
-
-class MyTrial(Trial):
- def addTestResult(self, testname, results, text, logs):
- self.results.append((testname, results, text, logs))
- def addCompleteLog(self, name, log):
- pass
-
-class MyLogFile:
- def __init__(self, text):
- self.text = text
- def getText(self):
- return self.text
-
-
-class Count(unittest.TestCase):
-
- def count(self, total, failures=0, errors=0,
- expectedFailures=0, unexpectedSuccesses=0, skips=0):
- d = {
- 'total': total,
- 'failures': failures,
- 'errors': errors,
- 'expectedFailures': expectedFailures,
- 'unexpectedSuccesses': unexpectedSuccesses,
- 'skips': skips,
- }
- return d
-
- def testCountFailedTests(self):
- count = countFailedTests(out1)
- self.assertEquals(count, self.count(total=13))
- count = countFailedTests(out2)
- self.assertEquals(count, self.count(total=12, failures=1))
- count = countFailedTests(out3)
- self.assertEquals(count, self.count(total=13, failures=1, errors=1))
- count = countFailedTests(out4)
- self.assertEquals(count, self.count(total=None))
- count = countFailedTests(out5)
- self.assertEquals(count, self.count(total=None))
-
-class Parse(unittest.TestCase):
- def failUnlessIn(self, substr, string):
- self.failUnless(string.find(substr) != -1)
-
- def testParse(self):
- t = MyTrial(build=None, workdir=".", testpath=None, testChanges=True)
- t.results = []
- log = MyLogFile(out6)
- t.createSummary(log)
-
- self.failUnlessEqual(len(t.results), 4)
- r1, r2, r3, r4 = t.results
- testname, results, text, logs = r1
- self.failUnlessEqual(testname,
- ("twisted", "flow", "test", "test_flow",
- "FlowTest", "testProtocolLocalhost"))
- self.failUnlessEqual(results, builder.SKIPPED)
- self.failUnlessEqual(text, ['skipped'])
- self.failUnlessIn("XXX freezes, fixme", logs)
- self.failUnless(logs.startswith("SKIPPED:"))
- self.failUnless(logs.endswith("fixme\n"))
-
- testname, results, text, logs = r2
- self.failUnlessEqual(testname,
- ("twisted", "names", "test", "test_names",
- "HostsTestCase", "testIPv6"))
- self.failUnlessEqual(results, builder.SKIPPED)
- self.failUnlessEqual(text, ['skipped'])
- self.failUnless(logs.startswith("SKIPPED: testIPv6"))
- self.failUnless(logs.endswith("IPv6 support is not in our hosts resolver yet\n"))
-
- testname, results, text, logs = r3
- self.failUnlessEqual(testname,
- ("twisted", "test", "test_rebuild",
- "NewStyleTestCase", "testSlots"))
- self.failUnlessEqual(results, builder.SUCCESS)
- self.failUnlessEqual(text, ['expected', 'failure'])
- self.failUnless(logs.startswith("EXPECTED FAILURE: "))
- self.failUnlessIn("\nTraceback ", logs)
- self.failUnless(logs.endswith("layout differs from 'SlottedClass'\n"))
-
- testname, results, text, logs = r4
- self.failUnlessEqual(testname,
- ("twisted", "conch", "test", "test_sftp",
- "TestOurServerBatchFile", "testBatchFile"))
- self.failUnlessEqual(results, builder.FAILURE)
- self.failUnlessEqual(text, ['failure'])
- self.failUnless(logs.startswith("FAILURE: "))
- self.failUnlessIn("Traceback ", logs)
- self.failUnless(logs.endswith("'testRenameFile', 'testfile1']\n"))
-
diff --git a/buildbot/buildbot-source/buildbot/test/test_util.py b/buildbot/buildbot-source/buildbot/test/test_util.py
deleted file mode 100644
index b375390a7..000000000
--- a/buildbot/buildbot-source/buildbot/test/test_util.py
+++ /dev/null
@@ -1,26 +0,0 @@
-# -*- test-case-name: buildbot.test.test_util -*-
-
-from twisted.trial import unittest
-
-from buildbot import util
-
-
-class Foo(util.ComparableMixin):
- compare_attrs = ["a", "b"]
-
- def __init__(self, a, b, c):
- self.a, self.b, self.c = a,b,c
-
-
-class Bar(Foo, util.ComparableMixin):
- compare_attrs = ["b", "c"]
-
-class Compare(unittest.TestCase):
- def testCompare(self):
- f1 = Foo(1, 2, 3)
- f2 = Foo(1, 2, 4)
- f3 = Foo(1, 3, 4)
- b1 = Bar(1, 2, 3)
- self.failUnless(f1 == f2)
- self.failIf(f1 == f3)
- self.failIf(f1 == b1)
diff --git a/buildbot/buildbot-source/buildbot/test/test_vc.py b/buildbot/buildbot-source/buildbot/test/test_vc.py
deleted file mode 100644
index f65e75575..000000000
--- a/buildbot/buildbot-source/buildbot/test/test_vc.py
+++ /dev/null
@@ -1,2162 +0,0 @@
-# -*- test-case-name: buildbot.test.test_vc -*-
-
-from __future__ import generators
-
-import sys, os, signal, shutil, time, re
-from email.Utils import mktime_tz, parsedate_tz
-
-from twisted.trial import unittest
-from twisted.internet import defer, reactor, utils
-
-#defer.Deferred.debug = True
-
-from twisted.python import log
-#log.startLogging(sys.stderr)
-
-from buildbot import master, interfaces
-from buildbot.slave import bot, commands
-from buildbot.slave.commands import rmdirRecursive
-from buildbot.status.builder import SUCCESS, FAILURE
-from buildbot.process import step, base
-from buildbot.changes import changes
-from buildbot.sourcestamp import SourceStamp
-from buildbot.twcompat import maybeWait, which
-from buildbot.scripts import tryclient
-
-#step.LoggedRemoteCommand.debug = True
-
-# buildbot.twcompat will patch these into t.i.defer if necessary
-from twisted.internet.defer import waitForDeferred, deferredGenerator
-
-# Most of these tests (all but SourceStamp) depend upon having a set of
-# repositories from which we can perform checkouts. These repositories are
-# created by the setUp method at the start of each test class. In earlier
-# versions these repositories were created offline and distributed with a
-# separate tarball named 'buildbot-test-vc-1.tar.gz'. This is no longer
-# necessary.
-
-# CVS requires a local file repository. Providing remote access is beyond
-# the feasible abilities of this test program (needs pserver or ssh).
-
-# SVN requires a local file repository. To provide remote access over HTTP
-# requires an apache server with DAV support and mod_svn, way beyond what we
-# can test from here.
-
-# Arch and Darcs both allow remote (read-only) operation with any web
-# server. We test both local file access and HTTP access (by spawning a
-# small web server to provide access to the repository files while the test
-# is running).
-
-
-config_vc = """
-from buildbot.process import factory, step
-s = factory.s
-
-f1 = factory.BuildFactory([
- %s,
- ])
-c = {}
-c['bots'] = [['bot1', 'sekrit']]
-c['sources'] = []
-c['schedulers'] = []
-c['builders'] = [{'name': 'vc', 'slavename': 'bot1',
- 'builddir': 'vc-dir', 'factory': f1}]
-c['slavePortnum'] = 0
-BuildmasterConfig = c
-"""
-
-p0_diff = r"""
-Index: subdir/subdir.c
-===================================================================
-RCS file: /home/warner/stuff/Projects/BuildBot/code-arch/_trial_temp/test_vc/repositories/CVS-Repository/sample/subdir/subdir.c,v
-retrieving revision 1.1.1.1
-diff -u -r1.1.1.1 subdir.c
---- subdir/subdir.c 14 Aug 2005 01:32:49 -0000 1.1.1.1
-+++ subdir/subdir.c 14 Aug 2005 01:36:15 -0000
-@@ -4,6 +4,6 @@
- int
- main(int argc, const char *argv[])
- {
-- printf("Hello subdir.\n");
-+ printf("Hello patched subdir.\n");
- return 0;
- }
-"""
-
-# this patch does not include the filename headers, so it is
-# patchlevel-neutral
-TRY_PATCH = '''
-@@ -5,6 +5,6 @@
- int
- main(int argc, const char *argv[])
- {
-- printf("Hello subdir.\\n");
-+ printf("Hello try.\\n");
- return 0;
- }
-'''
-
-MAIN_C = '''
-// this is main.c
-#include <stdio.h>
-
-int
-main(int argc, const char *argv[])
-{
- printf("Hello world.\\n");
- return 0;
-}
-'''
-
-BRANCH_C = '''
-// this is main.c
-#include <stdio.h>
-
-int
-main(int argc, const char *argv[])
-{
- printf("Hello branch.\\n");
- return 0;
-}
-'''
-
-VERSION_C = '''
-// this is version.c
-#include <stdio.h>
-
-int
-main(int argc, const char *argv[])
-{
- printf("Hello world, version=%d\\n");
- return 0;
-}
-'''
-
-SUBDIR_C = '''
-// this is subdir/subdir.c
-#include <stdio.h>
-
-int
-main(int argc, const char *argv[])
-{
- printf("Hello subdir.\\n");
- return 0;
-}
-'''
-
-TRY_C = '''
-// this is subdir/subdir.c
-#include <stdio.h>
-
-int
-main(int argc, const char *argv[])
-{
- printf("Hello try.\\n");
- return 0;
-}
-'''
-
-class VCS_Helper:
- # this is a helper class which keeps track of whether each VC system is
- # available, and whether the repository for each has been created. There
- # is one instance of this class, at module level, shared between all test
- # cases.
-
- def __init__(self):
- self._helpers = {}
- self._isCapable = {}
- self._excuses = {}
- self._repoReady = {}
-
- def registerVC(self, name, helper):
- self._helpers[name] = helper
- self._repoReady[name] = False
-
- def skipIfNotCapable(self, name):
- """Either return None, or raise SkipTest"""
- d = self.capable(name)
- def _maybeSkip(res):
- if not res[0]:
- raise unittest.SkipTest(res[1])
- d.addCallback(_maybeSkip)
- return d
-
- def capable(self, name):
- """Return a Deferred that fires with (True,None) if this host offers
- the given VC tool, or (False,excuse) if it does not (and therefore
- the tests should be skipped)."""
-
- if self._isCapable.has_key(name):
- if self._isCapable[name]:
- return defer.succeed((True,None))
- else:
- return defer.succeed((False, self._excuses[name]))
- d = defer.maybeDeferred(self._helpers[name].capable)
- def _capable(res):
- if res[0]:
- self._isCapable[name] = True
- else:
- self._excuses[name] = res[1]
- return res
- d.addCallback(_capable)
- return d
-
- def getHelper(self, name):
- return self._helpers[name]
-
- def createRepository(self, name):
- """Return a Deferred that fires when the repository is set up."""
- if self._repoReady[name]:
- return defer.succeed(True)
- d = self._helpers[name].createRepository()
- def _ready(res):
- self._repoReady[name] = True
- d.addCallback(_ready)
- return d
-
-VCS = VCS_Helper()
-
-class SignalMixin:
- sigchldHandler = None
-
- def setUpClass(self):
- # make sure SIGCHLD handler is installed, as it should be on
- # reactor.run(). problem is reactor may not have been run when this
- # test runs.
- if hasattr(reactor, "_handleSigchld") and hasattr(signal, "SIGCHLD"):
- self.sigchldHandler = signal.signal(signal.SIGCHLD,
- reactor._handleSigchld)
-
- def tearDownClass(self):
- if self.sigchldHandler:
- signal.signal(signal.SIGCHLD, self.sigchldHandler)
-
-
-# the overall plan here:
-#
-# Each VC system is tested separately, all using the same source tree defined
-# in the 'files' dictionary above. Each VC system gets its own TestCase
-# subclass. The first test case that is run will create the repository during
-# setUp(), making two branches: 'trunk' and 'branch'. The trunk gets a copy
-# of all the files in 'files'. The variant of good.c is committed on the
-# branch.
-#
-# then testCheckout is run, which does a number of checkout/clobber/update
-# builds. These all use trunk r1. It then runs self.fix(), which modifies
-# 'fixable.c', then performs another build and makes sure the tree has been
-# updated.
-#
-# testBranch uses trunk-r1 and branch-r1, making sure that we clobber the
-# tree properly when we switch between them
-#
-# testPatch does a trunk-r1 checkout and applies a patch.
-#
-# testTryGetPatch performs a trunk-r1 checkout, modifies some files, then
-# verifies that tryclient.getSourceStamp figures out the base revision and
-# what got changed.
-
-
-# vc_create makes a repository at r1 with three files: main.c, version.c, and
-# subdir/foo.c . It also creates a branch from r1 (called b1) in which main.c
-# says "hello branch" instead of "hello world". self.trunk[] contains
-# revision stamps for everything on the trunk, and self.branch[] does the
-# same for the branch.
-
-# vc_revise() checks out a tree at HEAD, changes version.c, then checks it
-# back in. The new version stamp is appended to self.trunk[]. The tree is
-# removed afterwards.
-
-# vc_try_checkout(workdir, rev) checks out a tree at REV, then changes
-# subdir/subdir.c to say 'Hello try'
-# vc_try_finish(workdir) removes the tree and cleans up any VC state
-# necessary (like deleting the Arch archive entry).
-
-
-class BaseHelper:
- def __init__(self):
- self.trunk = []
- self.branch = []
- self.allrevs = []
-
- def capable(self):
- # this is also responsible for setting self.vcexe
- raise NotImplementedError
-
- def createBasedir(self):
- # you must call this from createRepository
- self.repbase = os.path.abspath(os.path.join("test_vc",
- "repositories"))
- if not os.path.isdir(self.repbase):
- os.makedirs(self.repbase)
-
- def createRepository(self):
- # this will only be called once per process
- raise NotImplementedError
-
- def populate(self, basedir):
- os.makedirs(basedir)
- os.makedirs(os.path.join(basedir, "subdir"))
- open(os.path.join(basedir, "main.c"), "w").write(MAIN_C)
- self.version = 1
- version_c = VERSION_C % self.version
- open(os.path.join(basedir, "version.c"), "w").write(version_c)
- open(os.path.join(basedir, "main.c"), "w").write(MAIN_C)
- open(os.path.join(basedir, "subdir", "subdir.c"), "w").write(SUBDIR_C)
-
- def populate_branch(self, basedir):
- open(os.path.join(basedir, "main.c"), "w").write(BRANCH_C)
-
- def addTrunkRev(self, rev):
- self.trunk.append(rev)
- self.allrevs.append(rev)
- def addBranchRev(self, rev):
- self.branch.append(rev)
- self.allrevs.append(rev)
-
- def runCommand(self, basedir, command, failureIsOk=False):
- # all commands passed to do() should be strings or lists. If they are
- # strings, none of the arguments may have spaces. This makes the
- # commands less verbose at the expense of restricting what they can
- # specify.
- if type(command) not in (list, tuple):
- command = command.split(" ")
- #print "do %s" % command
- env = os.environ.copy()
- env['LC_ALL'] = "C"
- d = utils.getProcessOutputAndValue(command[0], command[1:],
- env=env, path=basedir)
- def check((out, err, code)):
- #print
- #print "command: %s" % command
- #print "out: %s" % out
- #print "code: %s" % code
- if code != 0 and not failureIsOk:
- log.msg("command %s finished with exit code %d" %
- (command, code))
- log.msg(" and stdout %s" % (out,))
- log.msg(" and stderr %s" % (err,))
- raise RuntimeError("command %s finished with exit code %d"
- % (command, code)
- + ": see logs for stdout")
- return out
- d.addCallback(check)
- return d
-
- def do(self, basedir, command, failureIsOk=False):
- d = self.runCommand(basedir, command, failureIsOk=failureIsOk)
- return waitForDeferred(d)
-
- def dovc(self, basedir, command, failureIsOk=False):
- """Like do(), but the VC binary will be prepended to COMMAND."""
- command = self.vcexe + " " + command
- return self.do(basedir, command, failureIsOk)
-
-class VCBase(SignalMixin):
- metadir = None
- createdRepository = False
- master = None
- slave = None
- httpServer = None
- httpPort = None
- skip = None
- has_got_revision = False
- has_got_revision_branches_are_merged = False # for SVN
-
- def failUnlessIn(self, substring, string, msg=None):
- # trial provides a version of this that requires python-2.3 to test
- # strings.
- if msg is None:
- msg = ("did not see the expected substring '%s' in string '%s'" %
- (substring, string))
- self.failUnless(string.find(substring) != -1, msg)
-
- def setUp(self):
- d = VCS.skipIfNotCapable(self.vc_name)
- d.addCallback(self._setUp1)
- return maybeWait(d)
-
- def _setUp1(self, res):
- self.helper = VCS.getHelper(self.vc_name)
-
- if os.path.exists("basedir"):
- rmdirRecursive("basedir")
- os.mkdir("basedir")
- self.master = master.BuildMaster("basedir")
- self.slavebase = os.path.abspath("slavebase")
- if os.path.exists(self.slavebase):
- rmdirRecursive(self.slavebase)
- os.mkdir("slavebase")
-
- d = VCS.createRepository(self.vc_name)
- return d
-
- def connectSlave(self):
- port = self.master.slavePort._port.getHost().port
- slave = bot.BuildSlave("localhost", port, "bot1", "sekrit",
- self.slavebase, keepalive=0, usePTY=1)
- self.slave = slave
- slave.startService()
- d = self.master.botmaster.waitUntilBuilderAttached("vc")
- return d
-
- def loadConfig(self, config):
- # reloading the config file causes a new 'listDirs' command to be
- # sent to the slave. To synchronize on this properly, it is easiest
- # to stop and restart the slave.
- d = defer.succeed(None)
- if self.slave:
- d = self.master.botmaster.waitUntilBuilderDetached("vc")
- self.slave.stopService()
- d.addCallback(lambda res: self.master.loadConfig(config))
- d.addCallback(lambda res: self.connectSlave())
- return d
-
- def serveHTTP(self):
- # launch an HTTP server to serve the repository files
- from twisted.web import static, server
- from twisted.internet import reactor
- self.root = static.File(self.helper.repbase)
- self.site = server.Site(self.root)
- self.httpServer = reactor.listenTCP(0, self.site)
- self.httpPort = self.httpServer.getHost().port
-
- def doBuild(self, shouldSucceed=True, ss=None):
- c = interfaces.IControl(self.master)
-
- if ss is None:
- ss = SourceStamp()
- #print "doBuild(ss: b=%s rev=%s)" % (ss.branch, ss.revision)
- req = base.BuildRequest("test_vc forced build", ss)
- d = req.waitUntilFinished()
- c.getBuilder("vc").requestBuild(req)
- d.addCallback(self._doBuild_1, shouldSucceed)
- return d
- def _doBuild_1(self, bs, shouldSucceed):
- r = bs.getResults()
- if r != SUCCESS and shouldSucceed:
- print
- print
- if not bs.isFinished():
- print "Hey, build wasn't even finished!"
- print "Build did not succeed:", r, bs.getText()
- for s in bs.getSteps():
- for l in s.getLogs():
- print "--- START step %s / log %s ---" % (s.getName(),
- l.getName())
- print l.getTextWithHeaders()
- print "--- STOP ---"
- print
- self.fail("build did not succeed")
- return bs
-
- def touch(self, d, f):
- open(os.path.join(d,f),"w").close()
- def shouldExist(self, *args):
- target = os.path.join(*args)
- self.failUnless(os.path.exists(target),
- "expected to find %s but didn't" % target)
- def shouldNotExist(self, *args):
- target = os.path.join(*args)
- self.failIf(os.path.exists(target),
- "expected to NOT find %s, but did" % target)
- def shouldContain(self, d, f, contents):
- c = open(os.path.join(d, f), "r").read()
- self.failUnlessIn(contents, c)
-
- def checkGotRevision(self, bs, expected):
- if self.has_got_revision:
- self.failUnlessEqual(bs.getProperty("got_revision"), expected)
-
- def checkGotRevisionIsLatest(self, bs):
- expected = self.helper.trunk[-1]
- if self.has_got_revision_branches_are_merged:
- expected = self.helper.allrevs[-1]
- self.checkGotRevision(bs, expected)
-
- def do_vctest(self, testRetry=True):
- vctype = self.vctype
- args = self.helper.vcargs
- m = self.master
- self.vcdir = os.path.join(self.slavebase, "vc-dir", "source")
- self.workdir = os.path.join(self.slavebase, "vc-dir", "build")
- # woo double-substitution
- s = "s(%s, timeout=200, workdir='build', mode='%%s'" % (vctype,)
- for k,v in args.items():
- s += ", %s=%s" % (k, repr(v))
- s += ")"
- config = config_vc % s
-
- m.loadConfig(config % 'clobber')
- m.readConfig = True
- m.startService()
-
- d = self.connectSlave()
- d.addCallback(lambda res: log.msg("testing clobber"))
- d.addCallback(self._do_vctest_clobber)
- d.addCallback(lambda res: log.msg("doing update"))
- d.addCallback(lambda res: self.loadConfig(config % 'update'))
- d.addCallback(lambda res: log.msg("testing update"))
- d.addCallback(self._do_vctest_update)
- if testRetry:
- d.addCallback(lambda res: log.msg("testing update retry"))
- d.addCallback(self._do_vctest_update_retry)
- d.addCallback(lambda res: log.msg("doing copy"))
- d.addCallback(lambda res: self.loadConfig(config % 'copy'))
- d.addCallback(lambda res: log.msg("testing copy"))
- d.addCallback(self._do_vctest_copy)
- if self.metadir:
- d.addCallback(lambda res: log.msg("doing export"))
- d.addCallback(lambda res: self.loadConfig(config % 'export'))
- d.addCallback(lambda res: log.msg("testing export"))
- d.addCallback(self._do_vctest_export)
- return d
-
- def _do_vctest_clobber(self, res):
- d = self.doBuild() # initial checkout
- d.addCallback(self._do_vctest_clobber_1)
- return d
- def _do_vctest_clobber_1(self, bs):
- self.shouldExist(self.workdir, "main.c")
- self.shouldExist(self.workdir, "version.c")
- self.shouldExist(self.workdir, "subdir", "subdir.c")
- if self.metadir:
- self.shouldExist(self.workdir, self.metadir)
- self.failUnlessEqual(bs.getProperty("revision"), None)
- self.failUnlessEqual(bs.getProperty("branch"), None)
- self.checkGotRevisionIsLatest(bs)
-
- self.touch(self.workdir, "newfile")
- self.shouldExist(self.workdir, "newfile")
- d = self.doBuild() # rebuild clobbers workdir
- d.addCallback(self._do_vctest_clobber_2)
- return d
- def _do_vctest_clobber_2(self, res):
- self.shouldNotExist(self.workdir, "newfile")
-
- def _do_vctest_update(self, res):
- log.msg("_do_vctest_update")
- d = self.doBuild() # rebuild with update
- d.addCallback(self._do_vctest_update_1)
- return d
- def _do_vctest_update_1(self, bs):
- log.msg("_do_vctest_update_1")
- self.shouldExist(self.workdir, "main.c")
- self.shouldExist(self.workdir, "version.c")
- self.shouldContain(self.workdir, "version.c",
- "version=%d" % self.helper.version)
- if self.metadir:
- self.shouldExist(self.workdir, self.metadir)
- self.failUnlessEqual(bs.getProperty("revision"), None)
- self.checkGotRevisionIsLatest(bs)
-
- self.touch(self.workdir, "newfile")
- d = self.doBuild() # update rebuild leaves new files
- d.addCallback(self._do_vctest_update_2)
- return d
- def _do_vctest_update_2(self, bs):
- log.msg("_do_vctest_update_2")
- self.shouldExist(self.workdir, "main.c")
- self.shouldExist(self.workdir, "version.c")
- self.touch(self.workdir, "newfile")
- # now make a change to the repository and make sure we pick it up
- d = self.helper.vc_revise()
- d.addCallback(lambda res: self.doBuild())
- d.addCallback(self._do_vctest_update_3)
- return d
- def _do_vctest_update_3(self, bs):
- log.msg("_do_vctest_update_3")
- self.shouldExist(self.workdir, "main.c")
- self.shouldExist(self.workdir, "version.c")
- self.shouldContain(self.workdir, "version.c",
- "version=%d" % self.helper.version)
- self.shouldExist(self.workdir, "newfile")
- self.failUnlessEqual(bs.getProperty("revision"), None)
- self.checkGotRevisionIsLatest(bs)
-
- # now "update" to an older revision
- d = self.doBuild(ss=SourceStamp(revision=self.helper.trunk[-2]))
- d.addCallback(self._do_vctest_update_4)
- return d
- def _do_vctest_update_4(self, bs):
- log.msg("_do_vctest_update_4")
- self.shouldExist(self.workdir, "main.c")
- self.shouldExist(self.workdir, "version.c")
- self.shouldContain(self.workdir, "version.c",
- "version=%d" % (self.helper.version-1))
- self.failUnlessEqual(bs.getProperty("revision"),
- self.helper.trunk[-2])
- self.checkGotRevision(bs, self.helper.trunk[-2])
-
- # now update to the newer revision
- d = self.doBuild(ss=SourceStamp(revision=self.helper.trunk[-1]))
- d.addCallback(self._do_vctest_update_5)
- return d
- def _do_vctest_update_5(self, bs):
- log.msg("_do_vctest_update_5")
- self.shouldExist(self.workdir, "main.c")
- self.shouldExist(self.workdir, "version.c")
- self.shouldContain(self.workdir, "version.c",
- "version=%d" % self.helper.version)
- self.failUnlessEqual(bs.getProperty("revision"),
- self.helper.trunk[-1])
- self.checkGotRevision(bs, self.helper.trunk[-1])
-
-
- def _do_vctest_update_retry(self, res):
- # certain local changes will prevent an update from working. The
- # most common is to replace a file with a directory, or vice
- # versa. The slave code should spot the failure and do a
- # clobber/retry.
- os.unlink(os.path.join(self.workdir, "main.c"))
- os.mkdir(os.path.join(self.workdir, "main.c"))
- self.touch(os.path.join(self.workdir, "main.c"), "foo")
- self.touch(self.workdir, "newfile")
-
- d = self.doBuild() # update, but must clobber to handle the error
- d.addCallback(self._do_vctest_update_retry_1)
- return d
- def _do_vctest_update_retry_1(self, bs):
- self.shouldNotExist(self.workdir, "newfile")
-
- def _do_vctest_copy(self, res):
- d = self.doBuild() # copy rebuild clobbers new files
- d.addCallback(self._do_vctest_copy_1)
- return d
- def _do_vctest_copy_1(self, bs):
- if self.metadir:
- self.shouldExist(self.workdir, self.metadir)
- self.shouldNotExist(self.workdir, "newfile")
- self.touch(self.workdir, "newfile")
- self.touch(self.vcdir, "newvcfile")
- self.failUnlessEqual(bs.getProperty("revision"), None)
- self.checkGotRevisionIsLatest(bs)
-
- d = self.doBuild() # copy rebuild clobbers new files
- d.addCallback(self._do_vctest_copy_2)
- return d
- def _do_vctest_copy_2(self, bs):
- if self.metadir:
- self.shouldExist(self.workdir, self.metadir)
- self.shouldNotExist(self.workdir, "newfile")
- self.shouldExist(self.vcdir, "newvcfile")
- self.shouldExist(self.workdir, "newvcfile")
- self.failUnlessEqual(bs.getProperty("revision"), None)
- self.checkGotRevisionIsLatest(bs)
- self.touch(self.workdir, "newfile")
-
- def _do_vctest_export(self, res):
- d = self.doBuild() # export rebuild clobbers new files
- d.addCallback(self._do_vctest_export_1)
- return d
- def _do_vctest_export_1(self, bs):
- self.shouldNotExist(self.workdir, self.metadir)
- self.shouldNotExist(self.workdir, "newfile")
- self.failUnlessEqual(bs.getProperty("revision"), None)
- #self.checkGotRevisionIsLatest(bs)
- # VC 'export' is not required to have a got_revision
- self.touch(self.workdir, "newfile")
-
- d = self.doBuild() # export rebuild clobbers new files
- d.addCallback(self._do_vctest_export_2)
- return d
- def _do_vctest_export_2(self, bs):
- self.shouldNotExist(self.workdir, self.metadir)
- self.shouldNotExist(self.workdir, "newfile")
- self.failUnlessEqual(bs.getProperty("revision"), None)
- #self.checkGotRevisionIsLatest(bs)
- # VC 'export' is not required to have a got_revision
-
- def do_patch(self):
- vctype = self.vctype
- args = self.helper.vcargs
- m = self.master
- self.vcdir = os.path.join(self.slavebase, "vc-dir", "source")
- self.workdir = os.path.join(self.slavebase, "vc-dir", "build")
- s = "s(%s, timeout=200, workdir='build', mode='%%s'" % (vctype,)
- for k,v in args.items():
- s += ", %s=%s" % (k, repr(v))
- s += ")"
- self.config = config_vc % s
-
- m.loadConfig(self.config % "clobber")
- m.readConfig = True
- m.startService()
-
- ss = SourceStamp(revision=self.helper.trunk[-1], patch=(0, p0_diff))
-
- d = self.connectSlave()
- d.addCallback(lambda res: self.doBuild(ss=ss))
- d.addCallback(self._doPatch_1)
- return d
- def _doPatch_1(self, bs):
- self.shouldContain(self.workdir, "version.c",
- "version=%d" % self.helper.version)
- # make sure the file actually got patched
- subdir_c = os.path.join(self.slavebase, "vc-dir", "build",
- "subdir", "subdir.c")
- data = open(subdir_c, "r").read()
- self.failUnlessIn("Hello patched subdir.\\n", data)
- self.failUnlessEqual(bs.getProperty("revision"),
- self.helper.trunk[-1])
- self.checkGotRevision(bs, self.helper.trunk[-1])
-
- # make sure that a rebuild does not use the leftover patched workdir
- d = self.master.loadConfig(self.config % "update")
- d.addCallback(lambda res: self.doBuild(ss=None))
- d.addCallback(self._doPatch_2)
- return d
- def _doPatch_2(self, bs):
- # make sure the file is back to its original
- subdir_c = os.path.join(self.slavebase, "vc-dir", "build",
- "subdir", "subdir.c")
- data = open(subdir_c, "r").read()
- self.failUnlessIn("Hello subdir.\\n", data)
- self.failUnlessEqual(bs.getProperty("revision"), None)
- self.checkGotRevisionIsLatest(bs)
-
- # now make sure we can patch an older revision. We need at least two
- # revisions here, so we might have to create one first
- if len(self.helper.trunk) < 2:
- d = self.helper.vc_revise()
- d.addCallback(self._doPatch_3)
- return d
- return self._doPatch_3()
-
- def _doPatch_3(self, res=None):
- ss = SourceStamp(revision=self.helper.trunk[-2], patch=(0, p0_diff))
- d = self.doBuild(ss=ss)
- d.addCallback(self._doPatch_4)
- return d
- def _doPatch_4(self, bs):
- self.shouldContain(self.workdir, "version.c",
- "version=%d" % (self.helper.version-1))
- # and make sure the file actually got patched
- subdir_c = os.path.join(self.slavebase, "vc-dir", "build",
- "subdir", "subdir.c")
- data = open(subdir_c, "r").read()
- self.failUnlessIn("Hello patched subdir.\\n", data)
- self.failUnlessEqual(bs.getProperty("revision"),
- self.helper.trunk[-2])
- self.checkGotRevision(bs, self.helper.trunk[-2])
-
- # now check that we can patch a branch
- ss = SourceStamp(branch=self.helper.branchname,
- revision=self.helper.branch[-1],
- patch=(0, p0_diff))
- d = self.doBuild(ss=ss)
- d.addCallback(self._doPatch_5)
- return d
- def _doPatch_5(self, bs):
- self.shouldContain(self.workdir, "version.c",
- "version=%d" % 1)
- self.shouldContain(self.workdir, "main.c", "Hello branch.")
- subdir_c = os.path.join(self.slavebase, "vc-dir", "build",
- "subdir", "subdir.c")
- data = open(subdir_c, "r").read()
- self.failUnlessIn("Hello patched subdir.\\n", data)
- self.failUnlessEqual(bs.getProperty("revision"),
- self.helper.branch[-1])
- self.failUnlessEqual(bs.getProperty("branch"), self.helper.branchname)
- self.checkGotRevision(bs, self.helper.branch[-1])
-
-
- def do_vctest_once(self, shouldSucceed):
- m = self.master
- vctype = self.vctype
- args = self.helper.vcargs
- vcdir = os.path.join(self.slavebase, "vc-dir", "source")
- workdir = os.path.join(self.slavebase, "vc-dir", "build")
- # woo double-substitution
- s = "s(%s, timeout=200, workdir='build', mode='clobber'" % (vctype,)
- for k,v in args.items():
- s += ", %s=%s" % (k, repr(v))
- s += ")"
- config = config_vc % s
-
- m.loadConfig(config)
- m.readConfig = True
- m.startService()
-
- self.connectSlave()
- d = self.doBuild(shouldSucceed) # initial checkout
- return d
-
- def do_branch(self):
- log.msg("do_branch")
- vctype = self.vctype
- args = self.helper.vcargs
- m = self.master
- self.vcdir = os.path.join(self.slavebase, "vc-dir", "source")
- self.workdir = os.path.join(self.slavebase, "vc-dir", "build")
- s = "s(%s, timeout=200, workdir='build', mode='%%s'" % (vctype,)
- for k,v in args.items():
- s += ", %s=%s" % (k, repr(v))
- s += ")"
- self.config = config_vc % s
-
- m.loadConfig(self.config % "update")
- m.readConfig = True
- m.startService()
-
- # first we do a build of the trunk
- d = self.connectSlave()
- d.addCallback(lambda res: self.doBuild(ss=SourceStamp()))
- d.addCallback(self._doBranch_1)
- return d
- def _doBranch_1(self, bs):
- log.msg("_doBranch_1")
- # make sure the checkout was of the trunk
- main_c = os.path.join(self.slavebase, "vc-dir", "build", "main.c")
- data = open(main_c, "r").read()
- self.failUnlessIn("Hello world.", data)
-
- # now do a checkout on the branch. The change in branch name should
- # trigger a clobber.
- self.touch(self.workdir, "newfile")
- d = self.doBuild(ss=SourceStamp(branch=self.helper.branchname))
- d.addCallback(self._doBranch_2)
- return d
- def _doBranch_2(self, bs):
- log.msg("_doBranch_2")
- # make sure it was on the branch
- main_c = os.path.join(self.slavebase, "vc-dir", "build", "main.c")
- data = open(main_c, "r").read()
- self.failUnlessIn("Hello branch.", data)
- # and make sure the tree was clobbered
- self.shouldNotExist(self.workdir, "newfile")
-
- # doing another build on the same branch should not clobber the tree
- self.touch(self.workdir, "newbranchfile")
- d = self.doBuild(ss=SourceStamp(branch=self.helper.branchname))
- d.addCallback(self._doBranch_3)
- return d
- def _doBranch_3(self, bs):
- log.msg("_doBranch_3")
- # make sure it is still on the branch
- main_c = os.path.join(self.slavebase, "vc-dir", "build", "main.c")
- data = open(main_c, "r").read()
- self.failUnlessIn("Hello branch.", data)
- # and make sure the tree was not clobbered
- self.shouldExist(self.workdir, "newbranchfile")
-
- # now make sure that a non-branch checkout clobbers the tree
- d = self.doBuild(ss=SourceStamp())
- d.addCallback(self._doBranch_4)
- return d
- def _doBranch_4(self, bs):
- log.msg("_doBranch_4")
- # make sure it was on the trunk
- main_c = os.path.join(self.slavebase, "vc-dir", "build", "main.c")
- data = open(main_c, "r").read()
- self.failUnlessIn("Hello world.", data)
- self.shouldNotExist(self.workdir, "newbranchfile")
-
- def do_getpatch(self, doBranch=True):
- log.msg("do_getpatch")
- # prepare a buildslave to do checkouts
- vctype = self.vctype
- args = self.helper.vcargs
- m = self.master
- self.vcdir = os.path.join(self.slavebase, "vc-dir", "source")
- self.workdir = os.path.join(self.slavebase, "vc-dir", "build")
- # woo double-substitution
- s = "s(%s, timeout=200, workdir='build', mode='%%s'" % (vctype,)
- for k,v in args.items():
- s += ", %s=%s" % (k, repr(v))
- s += ")"
- config = config_vc % s
-
- m.loadConfig(config % 'clobber')
- m.readConfig = True
- m.startService()
-
- d = self.connectSlave()
-
- # then set up the "developer's tree". first we modify a tree from the
- # head of the trunk
- tmpdir = "try_workdir"
- self.trydir = os.path.join(self.helper.repbase, tmpdir)
- rmdirRecursive(self.trydir)
- d.addCallback(self.do_getpatch_trunkhead)
- d.addCallback(self.do_getpatch_trunkold)
- if doBranch:
- d.addCallback(self.do_getpatch_branch)
- d.addCallback(self.do_getpatch_finish)
- return d
-
- def do_getpatch_finish(self, res):
- log.msg("do_getpatch_finish")
- self.helper.vc_try_finish(self.trydir)
- return res
-
- def try_shouldMatch(self, filename):
- devfilename = os.path.join(self.trydir, filename)
- devfile = open(devfilename, "r").read()
- slavefilename = os.path.join(self.workdir, filename)
- slavefile = open(slavefilename, "r").read()
- self.failUnlessEqual(devfile, slavefile,
- ("slavefile (%s) contains '%s'. "
- "developer's file (%s) contains '%s'. "
- "These ought to match") %
- (slavefilename, slavefile,
- devfilename, devfile))
-
- def do_getpatch_trunkhead(self, res):
- log.msg("do_getpatch_trunkhead")
- d = self.helper.vc_try_checkout(self.trydir, self.helper.trunk[-1])
- d.addCallback(self._do_getpatch_trunkhead_1)
- return d
- def _do_getpatch_trunkhead_1(self, res):
- log.msg("_do_getpatch_trunkhead_1")
- d = tryclient.getSourceStamp(self.vctype_try, self.trydir, None)
- d.addCallback(self._do_getpatch_trunkhead_2)
- return d
- def _do_getpatch_trunkhead_2(self, ss):
- log.msg("_do_getpatch_trunkhead_2")
- d = self.doBuild(ss=ss)
- d.addCallback(self._do_getpatch_trunkhead_3)
- return d
- def _do_getpatch_trunkhead_3(self, res):
- log.msg("_do_getpatch_trunkhead_3")
- # verify that the resulting buildslave tree matches the developer's
- self.try_shouldMatch("main.c")
- self.try_shouldMatch("version.c")
- self.try_shouldMatch(os.path.join("subdir", "subdir.c"))
-
- def do_getpatch_trunkold(self, res):
- log.msg("do_getpatch_trunkold")
- # now try a tree from an older revision. We need at least two
- # revisions here, so we might have to create one first
- if len(self.helper.trunk) < 2:
- d = self.helper.vc_revise()
- d.addCallback(self._do_getpatch_trunkold_1)
- return d
- return self._do_getpatch_trunkold_1()
- def _do_getpatch_trunkold_1(self, res=None):
- log.msg("_do_getpatch_trunkold_1")
- d = self.helper.vc_try_checkout(self.trydir, self.helper.trunk[-2])
- d.addCallback(self._do_getpatch_trunkold_2)
- return d
- def _do_getpatch_trunkold_2(self, res):
- log.msg("_do_getpatch_trunkold_2")
- d = tryclient.getSourceStamp(self.vctype_try, self.trydir, None)
- d.addCallback(self._do_getpatch_trunkold_3)
- return d
- def _do_getpatch_trunkold_3(self, ss):
- log.msg("_do_getpatch_trunkold_3")
- d = self.doBuild(ss=ss)
- d.addCallback(self._do_getpatch_trunkold_4)
- return d
- def _do_getpatch_trunkold_4(self, res):
- log.msg("_do_getpatch_trunkold_4")
- # verify that the resulting buildslave tree matches the developer's
- self.try_shouldMatch("main.c")
- self.try_shouldMatch("version.c")
- self.try_shouldMatch(os.path.join("subdir", "subdir.c"))
-
- def do_getpatch_branch(self, res):
- log.msg("do_getpatch_branch")
- # now try a tree from a branch
- d = self.helper.vc_try_checkout(self.trydir, self.helper.branch[-1],
- self.helper.branchname)
- d.addCallback(self._do_getpatch_branch_1)
- return d
- def _do_getpatch_branch_1(self, res):
- log.msg("_do_getpatch_branch_1")
- d = tryclient.getSourceStamp(self.vctype_try, self.trydir,
- self.helper.try_branchname)
- d.addCallback(self._do_getpatch_branch_2)
- return d
- def _do_getpatch_branch_2(self, ss):
- log.msg("_do_getpatch_branch_2")
- d = self.doBuild(ss=ss)
- d.addCallback(self._do_getpatch_branch_3)
- return d
- def _do_getpatch_branch_3(self, res):
- log.msg("_do_getpatch_branch_3")
- # verify that the resulting buildslave tree matches the developer's
- self.try_shouldMatch("main.c")
- self.try_shouldMatch("version.c")
- self.try_shouldMatch(os.path.join("subdir", "subdir.c"))
-
-
- def dumpPatch(self, patch):
- # this exists to help me figure out the right 'patchlevel' value
- # should be returned by tryclient.getSourceStamp
- n = self.mktemp()
- open(n,"w").write(patch)
- d = self.runCommand(".", ["lsdiff", n])
- def p(res): print "lsdiff:", res.strip().split("\n")
- d.addCallback(p)
- return d
-
-
- def tearDown(self):
- d = defer.succeed(None)
- if self.slave:
- d2 = self.master.botmaster.waitUntilBuilderDetached("vc")
- d.addCallback(lambda res: self.slave.stopService())
- d.addCallback(lambda res: d2)
- if self.master:
- d.addCallback(lambda res: self.master.stopService())
- if self.httpServer:
- d.addCallback(lambda res: self.httpServer.stopListening())
- def stopHTTPTimer():
- try:
- from twisted.web import http # Twisted-2.0
- except ImportError:
- from twisted.protocols import http # Twisted-1.3
- http._logDateTimeStop() # shut down the internal timer. DUMB!
- d.addCallback(lambda res: stopHTTPTimer())
- d.addCallback(lambda res: self.tearDown2())
- return maybeWait(d)
-
- def tearDown2(self):
- pass
-
-class CVSHelper(BaseHelper):
- branchname = "branch"
- try_branchname = "branch"
-
- def capable(self):
- cvspaths = which('cvs')
- if not cvspaths:
- return (False, "CVS is not installed")
- # cvs-1.10 (as shipped with OS-X 10.3 "Panther") is too old for this
- # test. There is a situation where we check out a tree, make a
- # change, then commit it back, and CVS refuses to believe that we're
- # operating in a CVS tree. I tested cvs-1.12.9 and it works ok, OS-X
- # 10.4 "Tiger" comes with cvs-1.11, but I haven't tested that yet.
- # For now, skip the tests if we've got 1.10 .
- log.msg("running %s --version.." % (cvspaths[0],))
- d = utils.getProcessOutput(cvspaths[0], ["--version"],
- env=os.environ)
- d.addCallback(self._capable, cvspaths[0])
- return d
-
- def _capable(self, v, vcexe):
- m = re.search(r'\(CVS\) ([\d\.]+) ', v)
- if not m:
- log.msg("couldn't identify CVS version number in output:")
- log.msg("'''%s'''" % v)
- log.msg("skipping tests")
- return (False, "Found CVS but couldn't identify its version")
- ver = m.group(1)
- log.msg("found CVS version '%s'" % ver)
- if ver == "1.10":
- return (False, "Found CVS, but it is too old")
- self.vcexe = vcexe
- return (True, None)
-
- def getdate(self):
- # this timestamp is eventually passed to CVS in a -D argument, and
- # strftime's %z specifier doesn't seem to work reliably (I get +0000
- # where I should get +0700 under linux sometimes, and windows seems
- # to want to put a verbose 'Eastern Standard Time' in there), so
- # leave off the timezone specifier and treat this as localtime. A
- # valid alternative would be to use a hard-coded +0000 and
- # time.gmtime().
- return time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
-
- def createRepository(self):
- self.createBasedir()
- self.cvsrep = cvsrep = os.path.join(self.repbase, "CVS-Repository")
- tmp = os.path.join(self.repbase, "cvstmp")
-
- w = self.dovc(self.repbase, "-d %s init" % cvsrep)
- yield w; w.getResult() # we must getResult() to raise any exceptions
-
- self.populate(tmp)
- cmd = ("-d %s import" % cvsrep +
- " -m sample_project_files sample vendortag start")
- w = self.dovc(tmp, cmd)
- yield w; w.getResult()
- rmdirRecursive(tmp)
- # take a timestamp as the first revision number
- time.sleep(2)
- self.addTrunkRev(self.getdate())
- time.sleep(2)
-
- w = self.dovc(self.repbase,
- "-d %s checkout -d cvstmp sample" % self.cvsrep)
- yield w; w.getResult()
-
- w = self.dovc(tmp, "tag -b %s" % self.branchname)
- yield w; w.getResult()
- self.populate_branch(tmp)
- w = self.dovc(tmp,
- "commit -m commit_on_branch -r %s" % self.branchname)
- yield w; w.getResult()
- rmdirRecursive(tmp)
- time.sleep(2)
- self.addBranchRev(self.getdate())
- time.sleep(2)
- self.vcargs = { 'cvsroot': self.cvsrep, 'cvsmodule': "sample" }
- createRepository = deferredGenerator(createRepository)
-
-
- def vc_revise(self):
- tmp = os.path.join(self.repbase, "cvstmp")
-
- w = self.dovc(self.repbase,
- "-d %s checkout -d cvstmp sample" % self.cvsrep)
- yield w; w.getResult()
- self.version += 1
- version_c = VERSION_C % self.version
- open(os.path.join(tmp, "version.c"), "w").write(version_c)
- w = self.dovc(tmp,
- "commit -m revised_to_%d version.c" % self.version)
- yield w; w.getResult()
- rmdirRecursive(tmp)
- time.sleep(2)
- self.addTrunkRev(self.getdate())
- time.sleep(2)
- vc_revise = deferredGenerator(vc_revise)
-
- def vc_try_checkout(self, workdir, rev, branch=None):
- # 'workdir' is an absolute path
- assert os.path.abspath(workdir) == workdir
- cmd = [self.vcexe, "-d", self.cvsrep, "checkout",
- "-d", workdir,
- "-D", rev]
- if branch is not None:
- cmd.append("-r")
- cmd.append(branch)
- cmd.append("sample")
- w = self.do(self.repbase, cmd)
- yield w; w.getResult()
- open(os.path.join(workdir, "subdir", "subdir.c"), "w").write(TRY_C)
- vc_try_checkout = deferredGenerator(vc_try_checkout)
-
- def vc_try_finish(self, workdir):
- rmdirRecursive(workdir)
-
-class CVS(VCBase, unittest.TestCase):
- vc_name = "cvs"
-
- metadir = "CVS"
- vctype = "step.CVS"
- vctype_try = "cvs"
- # CVS gives us got_revision, but it is based entirely upon the local
- # clock, which means it is unlikely to match the timestamp taken earlier.
- # This might be enough for common use, but won't be good enough for our
- # tests to accept, so pretend it doesn't have got_revision at all.
- has_got_revision = False
-
- def testCheckout(self):
- d = self.do_vctest()
- return maybeWait(d)
-
- def testPatch(self):
- d = self.do_patch()
- return maybeWait(d)
-
- def testCheckoutBranch(self):
- d = self.do_branch()
- return maybeWait(d)
-
- def testTry(self):
- d = self.do_getpatch(doBranch=False)
- return maybeWait(d)
-
-VCS.registerVC(CVS.vc_name, CVSHelper())
-
-
-class SVNHelper(BaseHelper):
- branchname = "sample/branch"
- try_branchname = "sample/branch"
-
- def capable(self):
- svnpaths = which('svn')
- svnadminpaths = which('svnadmin')
- if not svnpaths:
- return (False, "SVN is not installed")
- if not svnadminpaths:
- return (False, "svnadmin is not installed")
- # we need svn to be compiled with the ra_local access
- # module
- log.msg("running svn --version..")
- env = os.environ.copy()
- env['LC_ALL'] = "C"
- d = utils.getProcessOutput(svnpaths[0], ["--version"],
- env=env)
- d.addCallback(self._capable, svnpaths[0], svnadminpaths[0])
- return d
-
- def _capable(self, v, vcexe, svnadmin):
- if v.find("handles 'file' schem") != -1:
- # older versions say 'schema', 1.2.0 and beyond say 'scheme'
- self.vcexe = vcexe
- self.svnadmin = svnadmin
- return (True, None)
- excuse = ("%s found but it does not support 'file:' " +
- "schema, skipping svn tests") % vcexe
- log.msg(excuse)
- return (False, excuse)
-
- def createRepository(self):
- self.createBasedir()
- self.svnrep = os.path.join(self.repbase,
- "SVN-Repository").replace('\\','/')
- tmp = os.path.join(self.repbase, "svntmp")
- if sys.platform == 'win32':
- # On Windows Paths do not start with a /
- self.svnurl = "file:///%s" % self.svnrep
- else:
- self.svnurl = "file://%s" % self.svnrep
- self.svnurl_trunk = self.svnurl + "/sample/trunk"
- self.svnurl_branch = self.svnurl + "/sample/branch"
-
- w = self.do(self.repbase, self.svnadmin+" create %s" % self.svnrep)
- yield w; w.getResult()
-
- self.populate(tmp)
- w = self.dovc(tmp,
- "import -m sample_project_files %s" %
- self.svnurl_trunk)
- yield w; out = w.getResult()
- rmdirRecursive(tmp)
- m = re.search(r'Committed revision (\d+)\.', out)
- assert m.group(1) == "1" # first revision is always "1"
- self.addTrunkRev(int(m.group(1)))
-
- w = self.dovc(self.repbase,
- "checkout %s svntmp" % self.svnurl_trunk)
- yield w; w.getResult()
-
- w = self.dovc(tmp, "cp -m make_branch %s %s" % (self.svnurl_trunk,
- self.svnurl_branch))
- yield w; w.getResult()
- w = self.dovc(tmp, "switch %s" % self.svnurl_branch)
- yield w; w.getResult()
- self.populate_branch(tmp)
- w = self.dovc(tmp, "commit -m commit_on_branch")
- yield w; out = w.getResult()
- rmdirRecursive(tmp)
- m = re.search(r'Committed revision (\d+)\.', out)
- self.addBranchRev(int(m.group(1)))
- createRepository = deferredGenerator(createRepository)
-
- def vc_revise(self):
- tmp = os.path.join(self.repbase, "svntmp")
- rmdirRecursive(tmp)
- log.msg("vc_revise" + self.svnurl_trunk)
- w = self.dovc(self.repbase,
- "checkout %s svntmp" % self.svnurl_trunk)
- yield w; w.getResult()
- self.version += 1
- version_c = VERSION_C % self.version
- open(os.path.join(tmp, "version.c"), "w").write(version_c)
- w = self.dovc(tmp, "commit -m revised_to_%d" % self.version)
- yield w; out = w.getResult()
- m = re.search(r'Committed revision (\d+)\.', out)
- self.addTrunkRev(int(m.group(1)))
- rmdirRecursive(tmp)
- vc_revise = deferredGenerator(vc_revise)
-
- def vc_try_checkout(self, workdir, rev, branch=None):
- assert os.path.abspath(workdir) == workdir
- if os.path.exists(workdir):
- rmdirRecursive(workdir)
- if not branch:
- svnurl = self.svnurl_trunk
- else:
- # N.B.: this is *not* os.path.join: SVN URLs use slashes
- # regardless of the host operating system's filepath separator
- svnurl = self.svnurl + "/" + branch
- w = self.dovc(self.repbase,
- "checkout %s %s" % (svnurl, workdir))
- yield w; w.getResult()
- open(os.path.join(workdir, "subdir", "subdir.c"), "w").write(TRY_C)
- vc_try_checkout = deferredGenerator(vc_try_checkout)
-
- def vc_try_finish(self, workdir):
- rmdirRecursive(workdir)
-
-
-class SVN(VCBase, unittest.TestCase):
- vc_name = "svn"
-
- metadir = ".svn"
- vctype = "step.SVN"
- vctype_try = "svn"
- has_got_revision = True
- has_got_revision_branches_are_merged = True
-
- def testCheckout(self):
- # we verify this one with the svnurl style of vcargs. We test the
- # baseURL/defaultBranch style in testPatch and testCheckoutBranch.
- self.helper.vcargs = { 'svnurl': self.helper.svnurl_trunk }
- d = self.do_vctest()
- return maybeWait(d)
-
- def testPatch(self):
- self.helper.vcargs = { 'baseURL': self.helper.svnurl + "/",
- 'defaultBranch': "sample/trunk",
- }
- d = self.do_patch()
- return maybeWait(d)
-
- def testCheckoutBranch(self):
- self.helper.vcargs = { 'baseURL': self.helper.svnurl + "/",
- 'defaultBranch': "sample/trunk",
- }
- d = self.do_branch()
- return maybeWait(d)
-
- def testTry(self):
- # extract the base revision and patch from a modified tree, use it to
- # create the same contents on the buildslave
- self.helper.vcargs = { 'baseURL': self.helper.svnurl + "/",
- 'defaultBranch': "sample/trunk",
- }
- d = self.do_getpatch()
- return maybeWait(d)
-
-VCS.registerVC(SVN.vc_name, SVNHelper())
-
-class DarcsHelper(BaseHelper):
- branchname = "branch"
- try_branchname = "branch"
-
- def capable(self):
- darcspaths = which('darcs')
- if not darcspaths:
- return (False, "Darcs is not installed")
- self.vcexe = darcspaths[0]
- return (True, None)
-
- def createRepository(self):
- self.createBasedir()
- self.darcs_base = os.path.join(self.repbase, "Darcs-Repository")
- self.rep_trunk = os.path.join(self.darcs_base, "trunk")
- self.rep_branch = os.path.join(self.darcs_base, "branch")
- tmp = os.path.join(self.repbase, "darcstmp")
-
- os.makedirs(self.rep_trunk)
- w = self.dovc(self.rep_trunk, "initialize")
- yield w; w.getResult()
- os.makedirs(self.rep_branch)
- w = self.dovc(self.rep_branch, "initialize")
- yield w; w.getResult()
-
- self.populate(tmp)
- w = self.dovc(tmp, "initialize")
- yield w; w.getResult()
- w = self.dovc(tmp, "add -r .")
- yield w; w.getResult()
- w = self.dovc(tmp, "record -a -m initial_import --skip-long-comment -A test@buildbot.sf.net")
- yield w; w.getResult()
- w = self.dovc(tmp, "push -a %s" % self.rep_trunk)
- yield w; w.getResult()
- w = self.dovc(tmp, "changes --context")
- yield w; out = w.getResult()
- self.addTrunkRev(out)
-
- self.populate_branch(tmp)
- w = self.dovc(tmp, "record -a --ignore-times -m commit_on_branch --skip-long-comment -A test@buildbot.sf.net")
- yield w; w.getResult()
- w = self.dovc(tmp, "push -a %s" % self.rep_branch)
- yield w; w.getResult()
- w = self.dovc(tmp, "changes --context")
- yield w; out = w.getResult()
- self.addBranchRev(out)
- rmdirRecursive(tmp)
- createRepository = deferredGenerator(createRepository)
-
- def vc_revise(self):
- tmp = os.path.join(self.repbase, "darcstmp")
- os.makedirs(tmp)
- w = self.dovc(tmp, "initialize")
- yield w; w.getResult()
- w = self.dovc(tmp, "pull -a %s" % self.rep_trunk)
- yield w; w.getResult()
-
- self.version += 1
- version_c = VERSION_C % self.version
- open(os.path.join(tmp, "version.c"), "w").write(version_c)
- w = self.dovc(tmp, "record -a --ignore-times -m revised_to_%d --skip-long-comment -A test@buildbot.sf.net" % self.version)
- yield w; w.getResult()
- w = self.dovc(tmp, "push -a %s" % self.rep_trunk)
- yield w; w.getResult()
- w = self.dovc(tmp, "changes --context")
- yield w; out = w.getResult()
- self.addTrunkRev(out)
- rmdirRecursive(tmp)
- vc_revise = deferredGenerator(vc_revise)
-
- def vc_try_checkout(self, workdir, rev, branch=None):
- assert os.path.abspath(workdir) == workdir
- if os.path.exists(workdir):
- rmdirRecursive(workdir)
- os.makedirs(workdir)
- w = self.dovc(workdir, "initialize")
- yield w; w.getResult()
- if not branch:
- rep = self.rep_trunk
- else:
- rep = os.path.join(self.darcs_base, branch)
- w = self.dovc(workdir, "pull -a %s" % rep)
- yield w; w.getResult()
- open(os.path.join(workdir, "subdir", "subdir.c"), "w").write(TRY_C)
- vc_try_checkout = deferredGenerator(vc_try_checkout)
-
- def vc_try_finish(self, workdir):
- rmdirRecursive(workdir)
-
-
-class Darcs(VCBase, unittest.TestCase):
- vc_name = "darcs"
-
- # Darcs has a metadir="_darcs", but it does not have an 'export'
- # mode
- metadir = None
- vctype = "step.Darcs"
- vctype_try = "darcs"
- has_got_revision = True
-
- def testCheckout(self):
- self.helper.vcargs = { 'repourl': self.helper.rep_trunk }
- d = self.do_vctest(testRetry=False)
-
- # TODO: testRetry has the same problem with Darcs as it does for
- # Arch
- return maybeWait(d)
-
- def testPatch(self):
- self.helper.vcargs = { 'baseURL': self.helper.darcs_base + "/",
- 'defaultBranch': "trunk" }
- d = self.do_patch()
- return maybeWait(d)
-
- def testCheckoutBranch(self):
- self.helper.vcargs = { 'baseURL': self.helper.darcs_base + "/",
- 'defaultBranch': "trunk" }
- d = self.do_branch()
- return maybeWait(d)
-
- def testCheckoutHTTP(self):
- self.serveHTTP()
- repourl = "http://localhost:%d/Darcs-Repository/trunk" % self.httpPort
- self.helper.vcargs = { 'repourl': repourl }
- d = self.do_vctest(testRetry=False)
- return maybeWait(d)
-
- def testTry(self):
- self.helper.vcargs = { 'baseURL': self.helper.darcs_base + "/",
- 'defaultBranch': "trunk" }
- d = self.do_getpatch()
- return maybeWait(d)
-
-VCS.registerVC(Darcs.vc_name, DarcsHelper())
-
-
-class ArchCommon:
- def registerRepository(self, coordinates):
- a = self.archname
- w = self.dovc(self.repbase, "archives %s" % a)
- yield w; out = w.getResult()
- if out:
- w = self.dovc(self.repbase, "register-archive -d %s" % a)
- yield w; w.getResult()
- w = self.dovc(self.repbase, "register-archive %s" % coordinates)
- yield w; w.getResult()
- registerRepository = deferredGenerator(registerRepository)
-
- def unregisterRepository(self):
- a = self.archname
- w = self.dovc(self.repbase, "archives %s" % a)
- yield w; out = w.getResult()
- if out:
- w = self.dovc(self.repbase, "register-archive -d %s" % a)
- yield w; out = w.getResult()
- unregisterRepository = deferredGenerator(unregisterRepository)
-
-class TlaHelper(BaseHelper, ArchCommon):
- defaultbranch = "testvc--mainline--1"
- branchname = "testvc--branch--1"
- try_branchname = None # TlaExtractor can figure it out by itself
- archcmd = "tla"
-
- def capable(self):
- tlapaths = which('tla')
- if not tlapaths:
- return (False, "Arch (tla) is not installed")
- self.vcexe = tlapaths[0]
- return (True, None)
-
- def do_get(self, basedir, archive, branch, newdir):
- # the 'get' syntax is different between tla and baz. baz, while
- # claiming to honor an --archive argument, in fact ignores it. The
- # correct invocation is 'baz get archive/revision newdir'.
- if self.archcmd == "tla":
- w = self.dovc(basedir,
- "get -A %s %s %s" % (archive, branch, newdir))
- else:
- w = self.dovc(basedir,
- "get %s/%s %s" % (archive, branch, newdir))
- return w
-
- def createRepository(self):
- self.createBasedir()
- # first check to see if bazaar is around, since we'll need to know
- # later
- d = VCS.capable(Bazaar.vc_name)
- d.addCallback(self._createRepository_1)
- return d
-
- def _createRepository_1(self, res):
- has_baz = res[0]
-
- # pick a hopefully unique string for the archive name, in the form
- # test-%d@buildbot.sf.net--testvc, since otherwise multiple copies of
- # the unit tests run in the same user account will collide (since the
- # archive names are kept in the per-user ~/.arch-params/ directory).
- pid = os.getpid()
- self.archname = "test-%s-%d@buildbot.sf.net--testvc" % (self.archcmd,
- pid)
- trunk = self.defaultbranch
- branch = self.branchname
-
- repword = self.archcmd.capitalize()
- self.archrep = os.path.join(self.repbase, "%s-Repository" % repword)
- tmp = os.path.join(self.repbase, "archtmp")
- a = self.archname
-
- self.populate(tmp)
-
- w = self.dovc(tmp, "my-id", failureIsOk=True)
- yield w; res = w.getResult()
- if not res:
- # tla will fail a lot of operations if you have not set an ID
- w = self.do(tmp, [self.vcexe, "my-id",
- "Buildbot Test Suite <test@buildbot.sf.net>"])
- yield w; w.getResult()
-
- if has_baz:
- # bazaar keeps a cache of revisions, but this test creates a new
- # archive each time it is run, so the cache causes errors.
- # Disable the cache to avoid these problems. This will be
- # slightly annoying for people who run the buildbot tests under
- # the same UID as one which uses baz on a regular basis, but
- # bazaar doesn't give us a way to disable the cache just for this
- # one archive.
- cmd = "%s cache-config --disable" % VCS.getHelper('bazaar').vcexe
- w = self.do(tmp, cmd)
- yield w; w.getResult()
-
- w = waitForDeferred(self.unregisterRepository())
- yield w; w.getResult()
-
- # these commands can be run in any directory
- w = self.dovc(tmp, "make-archive -l %s %s" % (a, self.archrep))
- yield w; w.getResult()
- if self.archcmd == "tla":
- w = self.dovc(tmp, "archive-setup -A %s %s" % (a, trunk))
- yield w; w.getResult()
- w = self.dovc(tmp, "archive-setup -A %s %s" % (a, branch))
- yield w; w.getResult()
- else:
- # baz does not require an 'archive-setup' step
- pass
-
- # these commands must be run in the directory that is to be imported
- w = self.dovc(tmp, "init-tree --nested %s/%s" % (a, trunk))
- yield w; w.getResult()
- files = " ".join(["main.c", "version.c", "subdir",
- os.path.join("subdir", "subdir.c")])
- w = self.dovc(tmp, "add-id %s" % files)
- yield w; w.getResult()
-
- w = self.dovc(tmp, "import %s/%s" % (a, trunk))
- yield w; out = w.getResult()
- self.addTrunkRev("base-0")
-
- # create the branch
- if self.archcmd == "tla":
- branchstart = "%s--base-0" % trunk
- w = self.dovc(tmp, "tag -A %s %s %s" % (a, branchstart, branch))
- yield w; w.getResult()
- else:
- w = self.dovc(tmp, "branch %s" % branch)
- yield w; w.getResult()
-
- rmdirRecursive(tmp)
-
- # check out the branch
- w = self.do_get(self.repbase, a, branch, "archtmp")
- yield w; w.getResult()
- # and edit the file
- self.populate_branch(tmp)
- logfile = "++log.%s--%s" % (branch, a)
- logmsg = "Summary: commit on branch\nKeywords:\n\n"
- open(os.path.join(tmp, logfile), "w").write(logmsg)
- w = self.dovc(tmp, "commit")
- yield w; out = w.getResult()
- m = re.search(r'committed %s/%s--([\S]+)' % (a, branch),
- out)
- assert (m.group(1) == "base-0" or m.group(1).startswith("patch-"))
- self.addBranchRev(m.group(1))
-
- w = waitForDeferred(self.unregisterRepository())
- yield w; w.getResult()
- rmdirRecursive(tmp)
-
- # we unregister the repository each time, because we might have
- # changed the coordinates (since we switch from a file: URL to an
- # http: URL for various tests). The buildslave code doesn't forcibly
- # unregister the archive, so we have to do it here.
- w = waitForDeferred(self.unregisterRepository())
- yield w; w.getResult()
-
- _createRepository_1 = deferredGenerator(_createRepository_1)
-
- def vc_revise(self):
- # the fix needs to be done in a workspace that is linked to a
- # read-write version of the archive (i.e., using file-based
- # coordinates instead of HTTP ones), so we re-register the repository
- # before we begin. We unregister it when we're done to make sure the
- # build will re-register the correct one for whichever test is
- # currently being run.
-
- # except, that step.Bazaar really doesn't like it when the archive
- # gets unregistered behind its back. The slave tries to do a 'baz
- # replay' in a tree with an archive that is no longer recognized, and
- # baz aborts with a botched invariant exception. This causes
- # mode=update to fall back to clobber+get, which flunks one of the
- # tests (the 'newfile' check in _do_vctest_update_3 fails)
-
- # to avoid this, we take heroic steps here to leave the archive
- # registration in the same state as we found it.
-
- tmp = os.path.join(self.repbase, "archtmp")
- a = self.archname
-
- w = self.dovc(self.repbase, "archives %s" % a)
- yield w; out = w.getResult()
- assert out
- lines = out.split("\n")
- coordinates = lines[1].strip()
-
- # now register the read-write location
- w = waitForDeferred(self.registerRepository(self.archrep))
- yield w; w.getResult()
-
- trunk = self.defaultbranch
-
- w = self.do_get(self.repbase, a, trunk, "archtmp")
- yield w; w.getResult()
-
- # tla appears to use timestamps to determine which files have
- # changed, so wait long enough for the new file to have a different
- # timestamp
- time.sleep(2)
- self.version += 1
- version_c = VERSION_C % self.version
- open(os.path.join(tmp, "version.c"), "w").write(version_c)
-
- logfile = "++log.%s--%s" % (trunk, a)
- logmsg = "Summary: revised_to_%d\nKeywords:\n\n" % self.version
- open(os.path.join(tmp, logfile), "w").write(logmsg)
- w = self.dovc(tmp, "commit")
- yield w; out = w.getResult()
- m = re.search(r'committed %s/%s--([\S]+)' % (a, trunk),
- out)
- assert (m.group(1) == "base-0" or m.group(1).startswith("patch-"))
- self.addTrunkRev(m.group(1))
-
- # now re-register the original coordinates
- w = waitForDeferred(self.registerRepository(coordinates))
- yield w; w.getResult()
- rmdirRecursive(tmp)
- vc_revise = deferredGenerator(vc_revise)
-
- def vc_try_checkout(self, workdir, rev, branch=None):
- assert os.path.abspath(workdir) == workdir
- if os.path.exists(workdir):
- rmdirRecursive(workdir)
-
- a = self.archname
-
- # register the read-write location, if it wasn't already registered
- w = waitForDeferred(self.registerRepository(self.archrep))
- yield w; w.getResult()
-
- w = self.do_get(self.repbase, a, "testvc--mainline--1", workdir)
- yield w; w.getResult()
-
- # timestamps. ick.
- time.sleep(2)
- open(os.path.join(workdir, "subdir", "subdir.c"), "w").write(TRY_C)
- vc_try_checkout = deferredGenerator(vc_try_checkout)
-
- def vc_try_finish(self, workdir):
- rmdirRecursive(workdir)
-
-class Arch(VCBase, unittest.TestCase):
- vc_name = "tla"
-
- metadir = None
- # Arch has a metadir="{arch}", but it does not have an 'export' mode.
- vctype = "step.Arch"
- vctype_try = "tla"
- has_got_revision = True
-
- def testCheckout(self):
- # these are the coordinates of the read-write archive used by all the
- # non-HTTP tests. testCheckoutHTTP overrides these.
- self.helper.vcargs = {'url': self.helper.archrep,
- 'version': self.helper.defaultbranch }
- d = self.do_vctest(testRetry=False)
- # the current testRetry=True logic doesn't have the desired effect:
- # "update" is a no-op because arch knows that the repository hasn't
- # changed. Other VC systems will re-checkout missing files on
- # update, arch just leaves the tree untouched. TODO: come up with
- # some better test logic, probably involving a copy of the
- # repository that has a few changes checked in.
-
- return maybeWait(d)
-
- def testCheckoutHTTP(self):
- self.serveHTTP()
- url = "http://localhost:%d/Tla-Repository" % self.httpPort
- self.helper.vcargs = { 'url': url,
- 'version': "testvc--mainline--1" }
- d = self.do_vctest(testRetry=False)
- return maybeWait(d)
-
- def testPatch(self):
- self.helper.vcargs = {'url': self.helper.archrep,
- 'version': self.helper.defaultbranch }
- d = self.do_patch()
- return maybeWait(d)
-
- def testCheckoutBranch(self):
- self.helper.vcargs = {'url': self.helper.archrep,
- 'version': self.helper.defaultbranch }
- d = self.do_branch()
- return maybeWait(d)
-
- def testTry(self):
- self.helper.vcargs = {'url': self.helper.archrep,
- 'version': self.helper.defaultbranch }
- d = self.do_getpatch()
- return maybeWait(d)
-
-VCS.registerVC(Arch.vc_name, TlaHelper())
-
-
-class BazaarHelper(TlaHelper):
- archcmd = "baz"
-
- def capable(self):
- bazpaths = which('baz')
- if not bazpaths:
- return (False, "Arch (baz) is not installed")
- self.vcexe = bazpaths[0]
- return (True, None)
-
- def setUp2(self, res):
- # we unregister the repository each time, because we might have
- # changed the coordinates (since we switch from a file: URL to an
- # http: URL for various tests). The buildslave code doesn't forcibly
- # unregister the archive, so we have to do it here.
- d = self.unregisterRepository()
- return d
-
-
-class Bazaar(Arch):
- vc_name = "bazaar"
-
- vctype = "step.Bazaar"
- vctype_try = "baz"
- has_got_revision = True
-
- fixtimer = None
-
- def testCheckout(self):
- self.helper.vcargs = {'url': self.helper.archrep,
- # Baz adds the required 'archive' argument
- 'archive': self.helper.archname,
- 'version': self.helper.defaultbranch,
- }
- d = self.do_vctest(testRetry=False)
- # the current testRetry=True logic doesn't have the desired effect:
- # "update" is a no-op because arch knows that the repository hasn't
- # changed. Other VC systems will re-checkout missing files on
- # update, arch just leaves the tree untouched. TODO: come up with
- # some better test logic, probably involving a copy of the
- # repository that has a few changes checked in.
-
- return maybeWait(d)
-
- def testCheckoutHTTP(self):
- self.serveHTTP()
- url = "http://localhost:%d/Baz-Repository" % self.httpPort
- self.helper.vcargs = { 'url': url,
- 'archive': self.helper.archname,
- 'version': self.helper.defaultbranch,
- }
- d = self.do_vctest(testRetry=False)
- return maybeWait(d)
-
- def testPatch(self):
- self.helper.vcargs = {'url': self.helper.archrep,
- # Baz adds the required 'archive' argument
- 'archive': self.helper.archname,
- 'version': self.helper.defaultbranch,
- }
- d = self.do_patch()
- return maybeWait(d)
-
- def testCheckoutBranch(self):
- self.helper.vcargs = {'url': self.helper.archrep,
- # Baz adds the required 'archive' argument
- 'archive': self.helper.archname,
- 'version': self.helper.defaultbranch,
- }
- d = self.do_branch()
- return maybeWait(d)
-
- def testTry(self):
- self.helper.vcargs = {'url': self.helper.archrep,
- # Baz adds the required 'archive' argument
- 'archive': self.helper.archname,
- 'version': self.helper.defaultbranch,
- }
- d = self.do_getpatch()
- return maybeWait(d)
-
- def fixRepository(self):
- self.fixtimer = None
- self.site.resource = self.root
-
- def testRetry(self):
- # we want to verify that step.Source(retry=) works, and the easiest
- # way to make VC updates break (temporarily) is to break the HTTP
- # server that's providing the repository. Anything else pretty much
- # requires mutating the (read-only) BUILDBOT_TEST_VC repository, or
- # modifying the buildslave's checkout command while it's running.
-
- # this test takes a while to run, so don't bother doing it with
- # anything other than baz
-
- self.serveHTTP()
-
- # break the repository server
- from twisted.web import static
- self.site.resource = static.Data("Sorry, repository is offline",
- "text/plain")
- # and arrange to fix it again in 5 seconds, while the test is
- # running.
- self.fixtimer = reactor.callLater(5, self.fixRepository)
-
- url = "http://localhost:%d/Baz-Repository" % self.httpPort
- self.helper.vcargs = { 'url': url,
- 'archive': self.helper.archname,
- 'version': self.helper.defaultbranch,
- 'retry': (5.0, 4),
- }
- d = self.do_vctest_once(True)
- d.addCallback(self._testRetry_1)
- return maybeWait(d)
- def _testRetry_1(self, bs):
- # make sure there was mention of the retry attempt in the logs
- l = bs.getLogs()[0]
- self.failUnlessIn("unable to access URL", l.getText(),
- "funny, VC operation didn't fail at least once")
- self.failUnlessIn("update failed, trying 4 more times after 5 seconds",
- l.getTextWithHeaders(),
- "funny, VC operation wasn't reattempted")
-
- def testRetryFails(self):
- # make sure that the build eventually gives up on a repository which
- # is completely unavailable
-
- self.serveHTTP()
-
- # break the repository server, and leave it broken
- from twisted.web import static
- self.site.resource = static.Data("Sorry, repository is offline",
- "text/plain")
-
- url = "http://localhost:%d/Baz-Repository" % self.httpPort
- self.helper.vcargs = {'url': url,
- 'archive': self.helper.archname,
- 'version': self.helper.defaultbranch,
- 'retry': (0.5, 3),
- }
- d = self.do_vctest_once(False)
- d.addCallback(self._testRetryFails_1)
- return maybeWait(d)
- def _testRetryFails_1(self, bs):
- self.failUnlessEqual(bs.getResults(), FAILURE)
-
- def tearDown2(self):
- if self.fixtimer:
- self.fixtimer.cancel()
- # tell tla to get rid of the leftover archive this test leaves in the
- # user's 'tla archives' listing. The name of this archive is provided
- # by the repository tarball, so the following command must use the
- # same name. We could use archive= to set it explicitly, but if you
- # change it from the default, then 'tla update' won't work.
- d = self.helper.unregisterRepository()
- return d
-
-VCS.registerVC(Bazaar.vc_name, BazaarHelper())
-
-class MercurialHelper(BaseHelper):
- branchname = "branch"
- try_branchname = "branch"
-
- def capable(self):
- hgpaths = which("hg")
- if not hgpaths:
- return (False, "Mercurial is not installed")
- self.vcexe = hgpaths[0]
- return (True, None)
-
- def extract_id(self, output):
- m = re.search(r'^(\w+)', output)
- return m.group(0)
-
- def createRepository(self):
- self.createBasedir()
- self.hg_base = os.path.join(self.repbase, "Mercurial-Repository")
- self.rep_trunk = os.path.join(self.hg_base, "trunk")
- self.rep_branch = os.path.join(self.hg_base, "branch")
- tmp = os.path.join(self.hg_base, "hgtmp")
-
- os.makedirs(self.rep_trunk)
- w = self.dovc(self.rep_trunk, "init")
- yield w; w.getResult()
- os.makedirs(self.rep_branch)
- w = self.dovc(self.rep_branch, "init")
- yield w; w.getResult()
-
- self.populate(tmp)
- w = self.dovc(tmp, "init")
- yield w; w.getResult()
- w = self.dovc(tmp, "add")
- yield w; w.getResult()
- w = self.dovc(tmp, "commit -m initial_import")
- yield w; w.getResult()
- w = self.dovc(tmp, "push %s" % self.rep_trunk)
- # note that hg-push does not actually update the working directory
- yield w; w.getResult()
- w = self.dovc(tmp, "identify")
- yield w; out = w.getResult()
- self.addTrunkRev(self.extract_id(out))
-
- self.populate_branch(tmp)
- w = self.dovc(tmp, "commit -m commit_on_branch")
- yield w; w.getResult()
- w = self.dovc(tmp, "push %s" % self.rep_branch)
- yield w; w.getResult()
- w = self.dovc(tmp, "identify")
- yield w; out = w.getResult()
- self.addBranchRev(self.extract_id(out))
- rmdirRecursive(tmp)
- createRepository = deferredGenerator(createRepository)
-
- def vc_revise(self):
- tmp = os.path.join(self.hg_base, "hgtmp2")
- w = self.dovc(self.hg_base, "clone %s %s" % (self.rep_trunk, tmp))
- yield w; w.getResult()
-
- self.version += 1
- version_c = VERSION_C % self.version
- version_c_filename = os.path.join(tmp, "version.c")
- open(version_c_filename, "w").write(version_c)
- # hg uses timestamps to distinguish files which have changed, so we
- # force the mtime forward a little bit
- future = time.time() + 2*self.version
- os.utime(version_c_filename, (future, future))
- w = self.dovc(tmp, "commit -m revised_to_%d" % self.version)
- yield w; w.getResult()
- w = self.dovc(tmp, "push %s" % self.rep_trunk)
- yield w; w.getResult()
- w = self.dovc(tmp, "identify")
- yield w; out = w.getResult()
- self.addTrunkRev(self.extract_id(out))
- rmdirRecursive(tmp)
- vc_revise = deferredGenerator(vc_revise)
-
- def vc_try_checkout(self, workdir, rev, branch=None):
- assert os.path.abspath(workdir) == workdir
- if os.path.exists(workdir):
- rmdirRecursive(workdir)
- if branch:
- src = self.rep_branch
- else:
- src = self.rep_trunk
- w = self.dovc(self.hg_base, "clone %s %s" % (src, workdir))
- yield w; w.getResult()
- try_c_filename = os.path.join(workdir, "subdir", "subdir.c")
- open(try_c_filename, "w").write(TRY_C)
- future = time.time() + 2*self.version
- os.utime(try_c_filename, (future, future))
- vc_try_checkout = deferredGenerator(vc_try_checkout)
-
- def vc_try_finish(self, workdir):
- rmdirRecursive(workdir)
-
-
-class Mercurial(VCBase, unittest.TestCase):
- vc_name = "hg"
-
- # Mercurial has a metadir=".hg", but it does not have an 'export' mode.
- metadir = None
- vctype = "step.Mercurial"
- vctype_try = "hg"
- has_got_revision = True
-
- def testCheckout(self):
- self.helper.vcargs = { 'repourl': self.helper.rep_trunk }
- d = self.do_vctest(testRetry=False)
-
- # TODO: testRetry has the same problem with Mercurial as it does for
- # Arch
- return maybeWait(d)
-
- def testPatch(self):
- self.helper.vcargs = { 'baseURL': self.helper.hg_base + "/",
- 'defaultBranch': "trunk" }
- d = self.do_patch()
- return maybeWait(d)
-
- def testCheckoutBranch(self):
- self.helper.vcargs = { 'baseURL': self.helper.hg_base + "/",
- 'defaultBranch': "trunk" }
- d = self.do_branch()
- return maybeWait(d)
-
- def testCheckoutHTTP(self):
- self.serveHTTP()
- repourl = "http://localhost:%d/Mercurial-Repository/trunk/.hg" % self.httpPort
- self.helper.vcargs = { 'repourl': repourl }
- d = self.do_vctest(testRetry=False)
- return maybeWait(d)
- # TODO: The easiest way to publish hg over HTTP is by running 'hg serve'
- # as a child process while the test is running. (you can also use a CGI
- # script, which sounds difficult, or you can publish the files directly,
- # which isn't well documented).
- testCheckoutHTTP.skip = "not yet implemented, use 'hg serve'"
-
- def testTry(self):
- self.helper.vcargs = { 'baseURL': self.helper.hg_base + "/",
- 'defaultBranch': "trunk" }
- d = self.do_getpatch()
- return maybeWait(d)
-
-VCS.registerVC(Mercurial.vc_name, MercurialHelper())
-
-
-class Sources(unittest.TestCase):
- # TODO: this needs serious rethink
- def makeChange(self, when=None, revision=None):
- if when:
- when = mktime_tz(parsedate_tz(when))
- return changes.Change("fred", [], "", when=when, revision=revision)
-
- def testCVS1(self):
- r = base.BuildRequest("forced build", SourceStamp())
- b = base.Build([r])
- s = step.CVS(cvsroot=None, cvsmodule=None, workdir=None, build=b)
- self.failUnlessEqual(s.computeSourceRevision(b.allChanges()), None)
-
- def testCVS2(self):
- c = []
- c.append(self.makeChange("Wed, 08 Sep 2004 09:00:00 -0700"))
- c.append(self.makeChange("Wed, 08 Sep 2004 09:01:00 -0700"))
- c.append(self.makeChange("Wed, 08 Sep 2004 09:02:00 -0700"))
- r = base.BuildRequest("forced", SourceStamp(changes=c))
- submitted = "Wed, 08 Sep 2004 09:04:00 -0700"
- r.submittedAt = mktime_tz(parsedate_tz(submitted))
- b = base.Build([r])
- s = step.CVS(cvsroot=None, cvsmodule=None, workdir=None, build=b)
- self.failUnlessEqual(s.computeSourceRevision(b.allChanges()),
- "Wed, 08 Sep 2004 16:03:00 -0000")
-
- def testCVS3(self):
- c = []
- c.append(self.makeChange("Wed, 08 Sep 2004 09:00:00 -0700"))
- c.append(self.makeChange("Wed, 08 Sep 2004 09:01:00 -0700"))
- c.append(self.makeChange("Wed, 08 Sep 2004 09:02:00 -0700"))
- r = base.BuildRequest("forced", SourceStamp(changes=c))
- submitted = "Wed, 08 Sep 2004 09:04:00 -0700"
- r.submittedAt = mktime_tz(parsedate_tz(submitted))
- b = base.Build([r])
- s = step.CVS(cvsroot=None, cvsmodule=None, workdir=None, build=b,
- checkoutDelay=10)
- self.failUnlessEqual(s.computeSourceRevision(b.allChanges()),
- "Wed, 08 Sep 2004 16:02:10 -0000")
-
- def testCVS4(self):
- c = []
- c.append(self.makeChange("Wed, 08 Sep 2004 09:00:00 -0700"))
- c.append(self.makeChange("Wed, 08 Sep 2004 09:01:00 -0700"))
- c.append(self.makeChange("Wed, 08 Sep 2004 09:02:00 -0700"))
- r1 = base.BuildRequest("forced", SourceStamp(changes=c))
- submitted = "Wed, 08 Sep 2004 09:04:00 -0700"
- r1.submittedAt = mktime_tz(parsedate_tz(submitted))
-
- c = []
- c.append(self.makeChange("Wed, 08 Sep 2004 09:05:00 -0700"))
- r2 = base.BuildRequest("forced", SourceStamp(changes=c))
- submitted = "Wed, 08 Sep 2004 09:07:00 -0700"
- r2.submittedAt = mktime_tz(parsedate_tz(submitted))
-
- b = base.Build([r1, r2])
- s = step.CVS(cvsroot=None, cvsmodule=None, workdir=None, build=b)
- self.failUnlessEqual(s.computeSourceRevision(b.allChanges()),
- "Wed, 08 Sep 2004 16:06:00 -0000")
-
- def testSVN1(self):
- r = base.BuildRequest("forced", SourceStamp())
- b = base.Build([r])
- s = step.SVN(svnurl="dummy", workdir=None, build=b)
- self.failUnlessEqual(s.computeSourceRevision(b.allChanges()), None)
-
- def testSVN2(self):
- c = []
- c.append(self.makeChange(revision=4))
- c.append(self.makeChange(revision=10))
- c.append(self.makeChange(revision=67))
- r = base.BuildRequest("forced", SourceStamp(changes=c))
- b = base.Build([r])
- s = step.SVN(svnurl="dummy", workdir=None, build=b)
- self.failUnlessEqual(s.computeSourceRevision(b.allChanges()), 67)
-
-class Patch(VCBase, unittest.TestCase):
- def setUp(self):
- pass
-
- def tearDown(self):
- pass
-
- def testPatch(self):
- # invoke 'patch' all by itself, to see if it works the way we think
- # it should. This is intended to ferret out some windows test
- # failures.
- helper = BaseHelper()
- self.workdir = os.path.join("test_vc", "testPatch")
- helper.populate(self.workdir)
- patch = which("patch")[0]
-
- command = [patch, "-p0"]
- class FakeBuilder:
- usePTY = False
- def sendUpdate(self, status):
- pass
- c = commands.ShellCommand(FakeBuilder(), command, self.workdir,
- sendRC=False, stdin=p0_diff)
- d = c.start()
- d.addCallback(self._testPatch_1)
- return maybeWait(d)
-
- def _testPatch_1(self, res):
- # make sure the file actually got patched
- subdir_c = os.path.join(self.workdir, "subdir", "subdir.c")
- data = open(subdir_c, "r").read()
- self.failUnlessIn("Hello patched subdir.\\n", data)
diff --git a/buildbot/buildbot-source/buildbot/test/test_web.py b/buildbot/buildbot-source/buildbot/test/test_web.py
deleted file mode 100644
index 4be9c26aa..000000000
--- a/buildbot/buildbot-source/buildbot/test/test_web.py
+++ /dev/null
@@ -1,493 +0,0 @@
-# -*- test-case-name: buildbot.test.test_web -*-
-
-import sys, os, os.path, time, shutil
-from twisted.python import log, components, util
-#log.startLogging(sys.stderr)
-
-from twisted.trial import unittest
-from buildbot.test.runutils import RunMixin
-
-from twisted.internet import reactor, defer, protocol
-from twisted.internet.interfaces import IReactorUNIX
-from twisted.web import client
-
-from buildbot import master, interfaces, buildset, sourcestamp
-from buildbot.twcompat import providedBy, maybeWait
-from buildbot.status import html, builder
-from buildbot.changes.changes import Change
-from buildbot.process import step, base
-
-class ConfiguredMaster(master.BuildMaster):
- """This BuildMaster variant has a static config file, provided as a
- string when it is created."""
-
- def __init__(self, basedir, config):
- self.config = config
- master.BuildMaster.__init__(self, basedir)
-
- def loadTheConfigFile(self):
- self.loadConfig(self.config)
-
-components.registerAdapter(master.Control, ConfiguredMaster,
- interfaces.IControl)
-
-
-base_config = """
-from buildbot.status import html
-BuildmasterConfig = c = {
- 'bots': [],
- 'sources': [],
- 'schedulers': [],
- 'builders': [],
- 'slavePortnum': 0,
- }
-"""
-
-
-
-class DistribUNIX:
- def __init__(self, unixpath):
- from twisted.web import server, resource, distrib
- root = resource.Resource()
- self.r = r = distrib.ResourceSubscription("unix", unixpath)
- root.putChild('remote', r)
- self.p = p = reactor.listenTCP(0, server.Site(root))
- self.portnum = p.getHost().port
- def shutdown(self):
- d = defer.maybeDeferred(self.p.stopListening)
- return d
-
-class DistribTCP:
- def __init__(self, port):
- from twisted.web import server, resource, distrib
- root = resource.Resource()
- self.r = r = distrib.ResourceSubscription("localhost", port)
- root.putChild('remote', r)
- self.p = p = reactor.listenTCP(0, server.Site(root))
- self.portnum = p.getHost().port
- def shutdown(self):
- d = defer.maybeDeferred(self.p.stopListening)
- d.addCallback(self._shutdown_1)
- return d
- def _shutdown_1(self, res):
- return self.r.publisher.broker.transport.loseConnection()
-
-class SlowReader(protocol.Protocol):
- didPause = False
- count = 0
- data = ""
- def __init__(self, req):
- self.req = req
- self.d = defer.Deferred()
- def connectionMade(self):
- self.transport.write(self.req)
- def dataReceived(self, data):
- self.data += data
- self.count += len(data)
- if not self.didPause and self.count > 10*1000:
- self.didPause = True
- self.transport.pauseProducing()
- reactor.callLater(2, self.resume)
- def resume(self):
- self.transport.resumeProducing()
- def connectionLost(self, why):
- self.d.callback(None)
-
-class CFactory(protocol.ClientFactory):
- def __init__(self, p):
- self.p = p
- def buildProtocol(self, addr):
- self.p.factory = self
- return self.p
-
-def stopHTTPLog():
- # grr.
- try:
- from twisted.web import http # Twisted-2.0
- except ImportError:
- from twisted.protocols import http # Twisted-1.3
- http._logDateTimeStop()
-
-class BaseWeb:
- master = None
-
- def failUnlessIn(self, substr, string):
- self.failUnless(string.find(substr) != -1)
-
- def tearDown(self):
- stopHTTPLog()
- if self.master:
- d = self.master.stopService()
- return maybeWait(d)
-
- def find_waterfall(self, master):
- return filter(lambda child: isinstance(child, html.Waterfall),
- list(master))
-
-class Ports(BaseWeb, unittest.TestCase):
-
- def test_webPortnum(self):
- # run a regular web server on a TCP socket
- config = base_config + "c['status'] = [html.Waterfall(http_port=0)]\n"
- os.mkdir("test_web1")
- self.master = m = ConfiguredMaster("test_web1", config)
- m.startService()
- # hack to find out what randomly-assigned port it is listening on
- port = list(self.find_waterfall(m)[0])[0]._port.getHost().port
-
- d = client.getPage("http://localhost:%d/" % port)
- d.addCallback(self._test_webPortnum_1)
- return maybeWait(d)
- test_webPortnum.timeout = 10
- def _test_webPortnum_1(self, page):
- #print page
- self.failUnless(page)
-
- def test_webPathname(self):
- # running a t.web.distrib server over a UNIX socket
- if not providedBy(reactor, IReactorUNIX):
- raise unittest.SkipTest("UNIX sockets not supported here")
- config = (base_config +
- "c['status'] = [html.Waterfall(distrib_port='.web-pb')]\n")
- os.mkdir("test_web2")
- self.master = m = ConfiguredMaster("test_web2", config)
- m.startService()
-
- p = DistribUNIX("test_web2/.web-pb")
-
- d = client.getPage("http://localhost:%d/remote/" % p.portnum)
- d.addCallback(self._test_webPathname_1, p)
- return maybeWait(d)
- test_webPathname.timeout = 10
- def _test_webPathname_1(self, page, p):
- #print page
- self.failUnless(page)
- return p.shutdown()
-
-
- def test_webPathname_port(self):
- # running a t.web.distrib server over TCP
- config = (base_config +
- "c['status'] = [html.Waterfall(distrib_port=0)]\n")
- os.mkdir("test_web3")
- self.master = m = ConfiguredMaster("test_web3", config)
- m.startService()
- dport = list(self.find_waterfall(m)[0])[0]._port.getHost().port
-
- p = DistribTCP(dport)
-
- d = client.getPage("http://localhost:%d/remote/" % p.portnum)
- d.addCallback(self._test_webPathname_port_1, p)
- return maybeWait(d)
- test_webPathname_port.timeout = 10
- def _test_webPathname_port_1(self, page, p):
- self.failUnlessIn("BuildBot", page)
- return p.shutdown()
-
-
-class Waterfall(BaseWeb, unittest.TestCase):
- def test_waterfall(self):
- os.mkdir("test_web4")
- os.mkdir("my-maildir"); os.mkdir("my-maildir/new")
- self.robots_txt = os.path.abspath(os.path.join("test_web4",
- "robots.txt"))
- self.robots_txt_contents = "User-agent: *\nDisallow: /\n"
- f = open(self.robots_txt, "w")
- f.write(self.robots_txt_contents)
- f.close()
- # this is the right way to configure the Waterfall status
- config1 = base_config + """
-from buildbot.changes import mail
-c['sources'] = [mail.SyncmailMaildirSource('my-maildir')]
-c['status'] = [html.Waterfall(http_port=0, robots_txt=%s)]
-""" % repr(self.robots_txt)
-
- self.master = m = ConfiguredMaster("test_web4", config1)
- m.startService()
- # hack to find out what randomly-assigned port it is listening on
- port = list(self.find_waterfall(m)[0])[0]._port.getHost().port
- self.port = port
- # insert an event
- m.change_svc.addChange(Change("user", ["foo.c"], "comments"))
-
- d = client.getPage("http://localhost:%d/" % port)
- d.addCallback(self._test_waterfall_1)
- return maybeWait(d)
- test_waterfall.timeout = 10
- def _test_waterfall_1(self, page):
- self.failUnless(page)
- self.failUnlessIn("current activity", page)
- self.failUnlessIn("<html", page)
- TZ = time.tzname[time.daylight]
- self.failUnlessIn("time (%s)" % TZ, page)
-
- # phase=0 is really for debugging the waterfall layout
- d = client.getPage("http://localhost:%d/?phase=0" % self.port)
- d.addCallback(self._test_waterfall_2)
- return d
- def _test_waterfall_2(self, page):
- self.failUnless(page)
- self.failUnlessIn("<html", page)
-
- d = client.getPage("http://localhost:%d/favicon.ico" % self.port)
- d.addCallback(self._test_waterfall_3)
- return d
- def _test_waterfall_3(self, icon):
- expected = open(html.buildbot_icon,"rb").read()
- self.failUnless(icon == expected)
-
- d = client.getPage("http://localhost:%d/changes" % self.port)
- d.addCallback(self._test_waterfall_4)
- return d
- def _test_waterfall_4(self, changes):
- self.failUnlessIn("<li>Syncmail mailing list in maildir " +
- "my-maildir</li>", changes)
-
- d = client.getPage("http://localhost:%d/robots.txt" % self.port)
- d.addCallback(self._test_waterfall_5)
- return d
- def _test_waterfall_5(self, robotstxt):
- self.failUnless(robotstxt == self.robots_txt_contents)
-
-
-geturl_config = """
-from buildbot.status import html
-from buildbot.changes import mail
-from buildbot.process import step, factory
-from buildbot.scheduler import Scheduler
-from buildbot.changes.base import ChangeSource
-s = factory.s
-
-class DiscardScheduler(Scheduler):
- def addChange(self, change):
- pass
-class DummyChangeSource(ChangeSource):
- pass
-
-BuildmasterConfig = c = {}
-c['bots'] = [('bot1', 'sekrit'), ('bot2', 'sekrit')]
-c['sources'] = [DummyChangeSource()]
-c['schedulers'] = [DiscardScheduler('discard', None, 60, ['b1'])]
-c['slavePortnum'] = 0
-c['status'] = [html.Waterfall(http_port=0)]
-
-f = factory.BuildFactory([s(step.RemoteDummy, timeout=1)])
-
-c['builders'] = [
- {'name': 'b1', 'slavenames': ['bot1','bot2'],
- 'builddir': 'b1', 'factory': f},
- ]
-c['buildbotURL'] = 'http://dummy.example.org:8010/'
-
-"""
-
-class GetURL(RunMixin, unittest.TestCase):
-
- def setUp(self):
- RunMixin.setUp(self)
- self.master.loadConfig(geturl_config)
- self.master.startService()
- d = self.connectSlave(["b1"])
- return maybeWait(d)
-
- def tearDown(self):
- stopHTTPLog()
- return RunMixin.tearDown(self)
-
- def doBuild(self, buildername):
- br = base.BuildRequest("forced", sourcestamp.SourceStamp())
- d = br.waitUntilFinished()
- self.control.getBuilder(buildername).requestBuild(br)
- return d
-
- def assertNoURL(self, target):
- self.failUnlessIdentical(self.status.getURLForThing(target), None)
-
- def assertURLEqual(self, target, expected):
- got = self.status.getURLForThing(target)
- full_expected = "http://dummy.example.org:8010/" + expected
- self.failUnlessEqual(got, full_expected)
-
- def testMissingBase(self):
- noweb_config1 = geturl_config + "del c['buildbotURL']\n"
- d = self.master.loadConfig(noweb_config1)
- d.addCallback(self._testMissingBase_1)
- return maybeWait(d)
- def _testMissingBase_1(self, res):
- s = self.status
- self.assertNoURL(s)
- builder = s.getBuilder("b1")
- self.assertNoURL(builder)
-
- def testBase(self):
- s = self.status
- self.assertURLEqual(s, "")
- builder = s.getBuilder("b1")
- self.assertURLEqual(builder, "b1")
-
- def testBrokenStuff(self):
- s = self.status
- self.assertURLEqual(s.getSchedulers()[0], "schedulers/0")
- self.assertURLEqual(s.getSlave("bot1"), "slaves/bot1")
- # we didn't put a Change into the actual Build before, so this fails
- #self.assertURLEqual(build.getChanges()[0], "changes/1")
- testBrokenStuff.todo = "not implemented yet"
-
- def testChange(self):
- s = self.status
- c = Change("user", ["foo.c"], "comments")
- self.master.change_svc.addChange(c)
- # TODO: something more like s.getChanges(), requires IChange and
- # an accessor in IStatus. The HTML page exists already, though
- self.assertURLEqual(c, "changes/1")
-
- def testBuild(self):
- # first we do some stuff so we'll have things to look at.
- s = self.status
- d = self.doBuild("b1")
- # maybe check IBuildSetStatus here?
- d.addCallback(self._testBuild_1)
- return maybeWait(d)
-
- def _testBuild_1(self, res):
- s = self.status
- builder = s.getBuilder("b1")
- build = builder.getLastFinishedBuild()
- self.assertURLEqual(build, "b1/builds/0")
- # no page for builder.getEvent(-1)
- step = build.getSteps()[0]
- self.assertURLEqual(step, "b1/builds/0/step-remote%20dummy")
- # maybe page for build.getTestResults?
- self.assertURLEqual(step.getLogs()[0],
- "b1/builds/0/step-remote%20dummy/0")
-
-
-
-class Logfile(BaseWeb, RunMixin, unittest.TestCase):
- def setUp(self):
- config = """
-from buildbot.status import html
-from buildbot.process.factory import BasicBuildFactory
-f1 = BasicBuildFactory('cvsroot', 'cvsmodule')
-BuildmasterConfig = {
- 'bots': [('bot1', 'passwd1')],
- 'sources': [],
- 'schedulers': [],
- 'builders': [{'name': 'builder1', 'slavename': 'bot1',
- 'builddir':'workdir', 'factory':f1}],
- 'slavePortnum': 0,
- 'status': [html.Waterfall(http_port=0)],
- }
-"""
- if os.path.exists("test_logfile"):
- shutil.rmtree("test_logfile")
- os.mkdir("test_logfile")
- self.master = m = ConfiguredMaster("test_logfile", config)
- m.startService()
- # hack to find out what randomly-assigned port it is listening on
- port = list(self.find_waterfall(m)[0])[0]._port.getHost().port
- self.port = port
- # insert an event
-
- s = m.status.getBuilder("builder1")
- req = base.BuildRequest("reason", sourcestamp.SourceStamp())
- bs = s.newBuild()
- build1 = base.Build([req])
- step1 = step.BuildStep(build=build1)
- step1.name = "setup"
- bs.addStep(step1)
- bs.buildStarted(build1)
- step1.step_status.stepStarted()
-
- log1 = step1.addLog("output")
- log1.addStdout("some stdout\n")
- log1.finish()
-
- log2 = step1.addHTMLLog("error", "<html>ouch</html>")
-
- log3 = step1.addLog("big")
- log3.addStdout("big log\n")
- for i in range(1000):
- log3.addStdout("a" * 500)
- log3.addStderr("b" * 500)
- log3.finish()
-
- log4 = step1.addCompleteLog("bigcomplete",
- "big2 log\n" + "a" * 1*1000*1000)
-
- step1.step_status.stepFinished(builder.SUCCESS)
- bs.buildFinished()
-
- def getLogURL(self, stepname, lognum):
- logurl = "http://localhost:%d/builder1/builds/0/step-%s/%d" \
- % (self.port, stepname, lognum)
- return logurl
-
- def test_logfile1(self):
- d = client.getPage("http://localhost:%d/" % self.port)
- d.addCallback(self._test_logfile1_1)
- return maybeWait(d)
- test_logfile1.timeout = 20
- def _test_logfile1_1(self, page):
- self.failUnless(page)
-
- def test_logfile2(self):
- logurl = self.getLogURL("setup", 0)
- d = client.getPage(logurl)
- d.addCallback(self._test_logfile2_1)
- return maybeWait(d)
- def _test_logfile2_1(self, logbody):
- self.failUnless(logbody)
-
- def test_logfile3(self):
- logurl = self.getLogURL("setup", 0)
- d = client.getPage(logurl + "/text")
- d.addCallback(self._test_logfile3_1)
- return maybeWait(d)
- def _test_logfile3_1(self, logtext):
- self.failUnlessEqual(logtext, "some stdout\n")
-
- def test_logfile4(self):
- logurl = self.getLogURL("setup", 1)
- d = client.getPage(logurl)
- d.addCallback(self._test_logfile4_1)
- return maybeWait(d)
- def _test_logfile4_1(self, logbody):
- self.failUnlessEqual(logbody, "<html>ouch</html>")
-
- def test_logfile5(self):
- # this is log3, which is about 1MB in size, made up of alternating
- # stdout/stderr chunks. buildbot-0.6.6, when run against
- # twisted-1.3.0, fails to resume sending chunks after the client
- # stalls for a few seconds, because of a recursive doWrite() call
- # that was fixed in twisted-2.0.0
- p = SlowReader("GET /builder1/builds/0/step-setup/2 HTTP/1.0\r\n\r\n")
- f = CFactory(p)
- c = reactor.connectTCP("localhost", self.port, f)
- d = p.d
- d.addCallback(self._test_logfile5_1, p)
- return maybeWait(d, 10)
- test_logfile5.timeout = 10
- def _test_logfile5_1(self, res, p):
- self.failUnlessIn("big log", p.data)
- self.failUnlessIn("a"*100, p.data)
- self.failUnless(p.count > 1*1000*1000)
-
- def test_logfile6(self):
- # this is log4, which is about 1MB in size, one big chunk.
- # buildbot-0.6.6 dies as the NetstringReceiver barfs on the
- # saved logfile, because it was using one big chunk and exceeding
- # NetstringReceiver.MAX_LENGTH
- p = SlowReader("GET /builder1/builds/0/step-setup/3 HTTP/1.0\r\n\r\n")
- f = CFactory(p)
- c = reactor.connectTCP("localhost", self.port, f)
- d = p.d
- d.addCallback(self._test_logfile6_1, p)
- return maybeWait(d, 10)
- test_logfile6.timeout = 10
- def _test_logfile6_1(self, res, p):
- self.failUnlessIn("big2 log", p.data)
- self.failUnlessIn("a"*100, p.data)
- self.failUnless(p.count > 1*1000*1000)
-
-
diff --git a/buildbot/buildbot-source/buildbot/twcompat.py b/buildbot/buildbot-source/buildbot/twcompat.py
deleted file mode 100644
index 02c89c5eb..000000000
--- a/buildbot/buildbot-source/buildbot/twcompat.py
+++ /dev/null
@@ -1,285 +0,0 @@
-
-if 0:
- print "hey python-mode, stop thinking I want 8-char indentation"
-
-"""
-utilities to be compatible with both Twisted-1.3 and 2.0
-
-implements. Use this like the following.
-
-from buildbot.tcompat import implements
-class Foo:
- if implements:
- implements(IFoo)
- else:
- __implements__ = IFoo,
-
-Interface:
- from buildbot.tcompat import Interface
- class IFoo(Interface)
-
-providedBy:
- from buildbot.tcompat import providedBy
- assert providedBy(obj, IFoo)
-"""
-
-import os, os.path
-
-from twisted.copyright import version
-from twisted.python import components
-
-# does our Twisted use zope.interface?
-if hasattr(components, "interface"):
- # yes
- from zope.interface import implements
- from zope.interface import Interface
- def providedBy(obj, iface):
- return iface.providedBy(obj)
-else:
- # nope
- implements = None
- from twisted.python.components import Interface
- providedBy = components.implements
-
-# are we using a version of Trial that allows setUp/testFoo/tearDown to
-# return Deferreds?
-oldtrial = version.startswith("1.3")
-
-# use this at the end of setUp/testFoo/tearDown methods
-def maybeWait(d, timeout="none"):
- from twisted.python import failure
- from twisted.trial import unittest
- if oldtrial:
- # this is required for oldtrial (twisted-1.3.0) compatibility. When we
- # move to retrial (twisted-2.0.0), replace these with a simple 'return
- # d'.
- try:
- if timeout == "none":
- unittest.deferredResult(d)
- else:
- unittest.deferredResult(d, timeout)
- except failure.Failure, f:
- if f.check(unittest.SkipTest):
- raise f.value
- raise
- return None
- return d
-
-# waitForDeferred and getProcessOutputAndValue are twisted-2.0 things. If
-# we're running under 1.3, patch them into place. These versions are copied
-# from twisted somewhat after 2.0.1 .
-
-from twisted.internet import defer
-if not hasattr(defer, 'waitForDeferred'):
- Deferred = defer.Deferred
- class waitForDeferred:
- """
- API Stability: semi-stable
-
- Maintainer: U{Christopher Armstrong<mailto:radix@twistedmatrix.com>}
-
- waitForDeferred and deferredGenerator help you write
- Deferred-using code that looks like it's blocking (but isn't
- really), with the help of generators.
-
- There are two important functions involved: waitForDeferred, and
- deferredGenerator.
-
- def thingummy():
- thing = waitForDeferred(makeSomeRequestResultingInDeferred())
- yield thing
- thing = thing.getResult()
- print thing #the result! hoorj!
- thingummy = deferredGenerator(thingummy)
-
- waitForDeferred returns something that you should immediately yield;
- when your generator is resumed, calling thing.getResult() will either
- give you the result of the Deferred if it was a success, or raise an
- exception if it was a failure.
-
- deferredGenerator takes one of these waitForDeferred-using
- generator functions and converts it into a function that returns a
- Deferred. The result of the Deferred will be the last
- value that your generator yielded (remember that 'return result' won't
- work; use 'yield result; return' in place of that).
-
- Note that not yielding anything from your generator will make the
- Deferred result in None. Yielding a Deferred from your generator
- is also an error condition; always yield waitForDeferred(d)
- instead.
-
- The Deferred returned from your deferred generator may also
- errback if your generator raised an exception.
-
- def thingummy():
- thing = waitForDeferred(makeSomeRequestResultingInDeferred())
- yield thing
- thing = thing.getResult()
- if thing == 'I love Twisted':
- # will become the result of the Deferred
- yield 'TWISTED IS GREAT!'
- return
- else:
- # will trigger an errback
- raise Exception('DESTROY ALL LIFE')
- thingummy = deferredGenerator(thingummy)
-
- Put succinctly, these functions connect deferred-using code with this
- 'fake blocking' style in both directions: waitForDeferred converts from
- a Deferred to the 'blocking' style, and deferredGenerator converts from
- the 'blocking' style to a Deferred.
- """
- def __init__(self, d):
- if not isinstance(d, Deferred):
- raise TypeError("You must give waitForDeferred a Deferred. You gave it %r." % (d,))
- self.d = d
-
- def getResult(self):
- if hasattr(self, 'failure'):
- self.failure.raiseException()
- return self.result
-
- def _deferGenerator(g, deferred=None, result=None):
- """
- See L{waitForDeferred}.
- """
- while 1:
- if deferred is None:
- deferred = defer.Deferred()
- try:
- result = g.next()
- except StopIteration:
- deferred.callback(result)
- return deferred
- except:
- deferred.errback()
- return deferred
-
- # Deferred.callback(Deferred) raises an error; we catch this case
- # early here and give a nicer error message to the user in case
- # they yield a Deferred. Perhaps eventually these semantics may
- # change.
- if isinstance(result, defer.Deferred):
- return defer.fail(TypeError("Yield waitForDeferred(d), not d!"))
-
- if isinstance(result, waitForDeferred):
- waiting=[True, None]
- # Pass vars in so they don't get changed going around the loop
- def gotResult(r, waiting=waiting, result=result):
- result.result = r
- if waiting[0]:
- waiting[0] = False
- waiting[1] = r
- else:
- _deferGenerator(g, deferred, r)
- def gotError(f, waiting=waiting, result=result):
- result.failure = f
- if waiting[0]:
- waiting[0] = False
- waiting[1] = f
- else:
- _deferGenerator(g, deferred, f)
- result.d.addCallbacks(gotResult, gotError)
- if waiting[0]:
- # Haven't called back yet, set flag so that we get reinvoked
- # and return from the loop
- waiting[0] = False
- return deferred
- else:
- result = waiting[1]
-
- def func_metamerge(f, g):
- """
- Merge function metadata from f -> g and return g
- """
- try:
- g.__doc__ = f.__doc__
- g.__dict__.update(f.__dict__)
- g.__name__ = f.__name__
- except (TypeError, AttributeError):
- pass
- return g
-
- def deferredGenerator(f):
- """
- See L{waitForDeferred}.
- """
- def unwindGenerator(*args, **kwargs):
- return _deferGenerator(f(*args, **kwargs))
- return func_metamerge(f, unwindGenerator)
-
- defer.waitForDeferred = waitForDeferred
- defer.deferredGenerator = deferredGenerator
-
-from twisted.internet import utils
-if not hasattr(utils, "getProcessOutputAndValue"):
- from twisted.internet import reactor, protocol
- _callProtocolWithDeferred = utils._callProtocolWithDeferred
- try:
- import cStringIO as StringIO
- except ImportError:
- import StringIO
-
- class _EverythingGetter(protocol.ProcessProtocol):
-
- def __init__(self, deferred):
- self.deferred = deferred
- self.outBuf = StringIO.StringIO()
- self.errBuf = StringIO.StringIO()
- self.outReceived = self.outBuf.write
- self.errReceived = self.errBuf.write
-
- def processEnded(self, reason):
- out = self.outBuf.getvalue()
- err = self.errBuf.getvalue()
- e = reason.value
- code = e.exitCode
- if e.signal:
- self.deferred.errback((out, err, e.signal))
- else:
- self.deferred.callback((out, err, code))
-
- def getProcessOutputAndValue(executable, args=(), env={}, path='.',
- reactor=reactor):
- """Spawn a process and returns a Deferred that will be called back
- with its output (from stdout and stderr) and it's exit code as (out,
- err, code) If a signal is raised, the Deferred will errback with the
- stdout and stderr up to that point, along with the signal, as (out,
- err, signalNum)
- """
- return _callProtocolWithDeferred(_EverythingGetter,
- executable, args, env, path,
- reactor)
- utils.getProcessOutputAndValue = getProcessOutputAndValue
-
-
-# copied from Twisted circa 2.2.0
-def _which(name, flags=os.X_OK):
- """Search PATH for executable files with the given name.
-
- @type name: C{str}
- @param name: The name for which to search.
-
- @type flags: C{int}
- @param flags: Arguments to L{os.access}.
-
- @rtype: C{list}
- @param: A list of the full paths to files found, in the
- order in which they were found.
- """
- result = []
- exts = filter(None, os.environ.get('PATHEXT', '').split(os.pathsep))
- for p in os.environ['PATH'].split(os.pathsep):
- p = os.path.join(p, name)
- if os.access(p, flags):
- result.append(p)
- for e in exts:
- pext = p + e
- if os.access(pext, flags):
- result.append(pext)
- return result
-
-try:
- from twisted.python.procutils import which
-except ImportError:
- which = _which
diff --git a/buildbot/buildbot-source/buildbot/util.py b/buildbot/buildbot-source/buildbot/util.py
deleted file mode 100644
index bb9d9943b..000000000
--- a/buildbot/buildbot-source/buildbot/util.py
+++ /dev/null
@@ -1,71 +0,0 @@
-# -*- test-case-name: buildbot.test.test_util -*-
-
-from twisted.internet.defer import Deferred
-from twisted.python import log
-from twisted.spread import pb
-import time
-
-def now():
- #return int(time.time())
- return time.time()
-
-def earlier(old, new):
- # minimum of two things, but "None" counts as +infinity
- if old:
- if new < old:
- return new
- return old
- return new
-
-def later(old, new):
- # maximum of two things, but "None" counts as -infinity
- if old:
- if new > old:
- return new
- return old
- return new
-
-class CancelableDeferred(Deferred):
- """I am a version of Deferred that can be canceled by calling my
- .cancel() method. After being canceled, no callbacks or errbacks will be
- executed.
- """
- def __init__(self):
- Deferred.__init__(self)
- self.canceled = 0
- def cancel(self):
- self.canceled = 1
- def _runCallbacks(self):
- if self.canceled:
- self.callbacks = []
- return
- Deferred._runCallbacks(self)
-
-def ignoreStaleRefs(failure):
- """d.addErrback(util.ignoreStaleRefs)"""
- r = failure.trap(pb.DeadReferenceError, pb.PBConnectionLost)
- return None
-
-class _None:
- pass
-
-class ComparableMixin:
- """Specify a list of attributes that are 'important'. These will be used
- for all comparison operations."""
-
- compare_attrs = []
-
- def __hash__(self):
- alist = [self.__class__] + \
- [getattr(self, name, _None) for name in self.compare_attrs]
- return hash(tuple(alist))
-
- def __cmp__(self, them):
- if cmp(type(self), type(them)):
- return cmp(type(self), type(them))
- if cmp(self.__class__, them.__class__):
- return cmp(self.__class__, them.__class__)
- assert self.compare_attrs == them.compare_attrs
- self_list= [getattr(self, name, _None) for name in self.compare_attrs]
- them_list= [getattr(them, name, _None) for name in self.compare_attrs]
- return cmp(self_list, them_list)
diff --git a/buildbot/buildbot-source/contrib/README.txt b/buildbot/buildbot-source/contrib/README.txt
deleted file mode 100644
index f89efa3b6..000000000
--- a/buildbot/buildbot-source/contrib/README.txt
+++ /dev/null
@@ -1,37 +0,0 @@
-Utility scripts, things contributed by users but not strictly a part of
-buildbot:
-
-debugclient.py (and debug.*): debugging gui for buildbot
-
-fakechange.py: connect to a running bb and submit a fake change to trigger
- builders
-
-run_maxq.py: a builder-helper for running maxq under buildbot
-
-svn_buildbot.py: a script intended to be run from a subversion hook-script
- which submits changes to svn (requires python 2.3)
-
-svnpoller.py: this script is intended to be run from a cronjob, and uses 'svn
- log' to poll a (possibly remote) SVN repository for changes.
- For each change it finds, it runs 'buildbot sendchange' to
- deliver them to a waiting PBChangeSource on a (possibly remote)
- buildmaster. Modify the svnurl to point at your own SVN
- repository, and of course the user running the script must have
- read permissions to that repository. It keeps track of the last
- revision in a file, change 'fname' to set the location of this
- state file. Modify the --master argument to the 'buildbot
- sendchange' command to point at your buildmaster. Contributed
- by John Pye. Note that if there are multiple changes within a
- single polling interval, this will miss all but the last one.
-
-svn_watcher.py: adapted from svnpoller.py by Niklaus Giger to add options and
- run under windows. Runs as a standalone script (it loops
- internally rather than expecting to run from a cronjob),
- polls an SVN repository every 10 minutes. It expects the
- svnurl and buildmaster location as command-line arguments.
-
-viewcvspoll.py: a standalone script which loops every 60 seconds and polls a
- (local?) MySQL database (presumably maintained by ViewCVS?)
- for information about new CVS changes, then delivers them
- over PB to a remote buildmaster's PBChangeSource. Contributed
- by Stephen Kennedy.
diff --git a/buildbot/buildbot-source/contrib/arch_buildbot.py b/buildbot/buildbot-source/contrib/arch_buildbot.py
deleted file mode 100755
index 2b9ab822f..000000000
--- a/buildbot/buildbot-source/contrib/arch_buildbot.py
+++ /dev/null
@@ -1,73 +0,0 @@
-#! /usr/bin/python
-
-# this script is meant to run as an Arch post-commit hook (and also as a
-# pre-commit hook), using the "arch-meta-hook" framework. See
-# http://wiki.gnuarch.org/NdimMetaHook for details. The pre-commit hook
-# creates a list of files (and log comments), while the post-commit hook
-# actually notifies the buildmaster.
-
-# this script doesn't handle partial commits quite right: it will tell the
-# buildmaster that everything changed, not just the filenames you give to
-# 'tla commit'.
-
-import os, commands, cStringIO
-from buildbot.scripts import runner
-
-# Just modify the appropriate values below and then put this file in two
-# places: ~/.arch-params/hooks/ARCHIVE/=precommit/90buildbot.py and
-# ~/.arch-params/hooks/ARCHIVE/=commit/10buildbot.py
-
-master = "localhost:9989"
-username = "myloginname"
-
-# Remember that for this to work, your buildmaster's master.cfg needs to have
-# a c['sources'] list which includes a pb.PBChangeSource instance.
-
-os.chdir(os.getenv("ARCH_TREE_ROOT"))
-filelist = ",,bb-files"
-commentfile = ",,bb-comments"
-
-if os.getenv("ARCH_HOOK_ACTION") == "precommit":
- files = []
- out = commands.getoutput("tla changes")
- for line in cStringIO.StringIO(out).readlines():
- if line[0] in "AMD": # add, modify, delete
- files.append(line[3:])
- if files:
- f = open(filelist, "w")
- f.write("".join(files))
- f.close()
- # comments
- logfiles = [f for f in os.listdir(".") if f.startswith("++log.")]
- if len(logfiles) > 1:
- print ("Warning, multiple ++log.* files found, getting comments "
- "from the first one")
- if logfiles:
- open(commentfile, "w").write(open(logfiles[0], "r").read())
-
-elif os.getenv("ARCH_HOOK_ACTION") == "commit":
- revision = os.getenv("ARCH_REVISION")
-
- files = []
- if os.path.exists(filelist):
- f = open(filelist, "r")
- for line in f.readlines():
- files.append(line.rstrip())
- if not files:
- # buildbot insists upon having at least one modified file (otherwise
- # the prefix-stripping mechanism will ignore the change)
- files = ["dummy"]
-
- if os.path.exists(commentfile):
- comments = open(commentfile, "r").read()
- else:
- comments = "commit from arch"
-
- c = {'master': master, 'username': username,
- 'revision': revision, 'comments': comments, 'files': files}
- runner.sendchange(c, True)
-
- if os.path.exists(filelist):
- os.unlink(filelist)
- if os.path.exists(commentfile):
- os.unlink(commentfile)
diff --git a/buildbot/buildbot-source/contrib/fakechange.py b/buildbot/buildbot-source/contrib/fakechange.py
deleted file mode 100755
index bc19f9e60..000000000
--- a/buildbot/buildbot-source/contrib/fakechange.py
+++ /dev/null
@@ -1,76 +0,0 @@
-#! /usr/bin/python
-
-"""
-This is an example of how to use the remote ChangeMaster interface, which is
-a port that allows a remote program to inject Changes into the buildmaster.
-
-The buildmaster can either pull changes in from external sources (see
-buildbot.changes.changes.ChangeMaster.addSource for an example), or those
-changes can be pushed in from outside. This script shows how to do the
-pushing.
-
-Changes are just dictionaries with three keys:
-
- 'who': a simple string with a username. Responsibility for this change will
- be assigned to the named user (if something goes wrong with the build, they
- will be blamed for it).
-
- 'files': a list of strings, each with a filename relative to the top of the
- source tree.
-
- 'comments': a (multiline) string with checkin comments.
-
-Each call to .addChange injects a single Change object: each Change
-represents multiple files, all changed by the same person, and all with the
-same checkin comments.
-
-The port that this script connects to is the same 'slavePort' that the
-buildslaves and other debug tools use. The ChangeMaster service will only be
-available on that port if 'change' is in the list of services passed to
-buildbot.master.makeApp (this service is turned ON by default).
-"""
-
-import sys
-from twisted.spread import pb
-from twisted.cred import credentials
-from twisted.internet import reactor
-from twisted.python import log
-import commands, random, os.path
-
-def done(*args):
- reactor.stop()
-
-users = ('zaphod', 'arthur', 'trillian', 'marvin', 'sbfast')
-dirs = ('src', 'doc', 'tests')
-sources = ('foo.c', 'bar.c', 'baz.c', 'Makefile')
-docs = ('Makefile', 'index.html', 'manual.texinfo')
-
-def makeFilename():
- d = random.choice(dirs)
- if d in ('src', 'tests'):
- f = random.choice(sources)
- else:
- f = random.choice(docs)
- return os.path.join(d, f)
-
-
-def send_change(remote):
- who = random.choice(users)
- if len(sys.argv) > 1:
- files = sys.argv[1:]
- else:
- files = [makeFilename()]
- comments = commands.getoutput("fortune")
- change = {'who': who, 'files': files, 'comments': comments}
- d = remote.callRemote('addChange', change)
- d.addCallback(done)
- print "%s: %s" % (who, " ".join(files))
-
-
-f = pb.PBClientFactory()
-d = f.login(credentials.UsernamePassword("change", "changepw"))
-reactor.connectTCP("localhost", 8007, f)
-err = lambda f: (log.err(), reactor.stop())
-d.addCallback(send_change).addErrback(err)
-
-reactor.run()
diff --git a/buildbot/buildbot-source/contrib/hg_buildbot.py b/buildbot/buildbot-source/contrib/hg_buildbot.py
deleted file mode 100755
index 0ab99fc56..000000000
--- a/buildbot/buildbot-source/contrib/hg_buildbot.py
+++ /dev/null
@@ -1,57 +0,0 @@
-#! /usr/bin/python
-
-# This is a script which delivers Change events from Mercurial to the
-# buildmaster each time a changeset is pushed into a repository. Add it to
-# the 'incoming' commit hook on your canonical "central" repository, by
-# putting something like the following in the .hg/hgrc file of that
-# repository:
-#
-# [hooks]
-# incoming.buildbot = /PATH/TO/hg_buildbot.py BUILDMASTER:PORT
-#
-# Note that both Buildbot and Mercurial must be installed on the repository
-# machine.
-
-import os, sys, commands
-from StringIO import StringIO
-from buildbot.scripts import runner
-
-MASTER = sys.argv[1]
-
-CHANGESET_ID = os.environ["HG_NODE"]
-
-# TODO: consider doing 'import mercurial.hg' and extract this information
-# using the native python
-out = commands.getoutput("hg -v log -r %s" % CHANGESET_ID)
-# TODO: or maybe use --template instead of trying hard to parse everything
-#out = commands.getoutput("hg --template SOMETHING log -r %s" % CHANGESET_ID)
-
-s = StringIO(out)
-while True:
- line = s.readline()
- if not line:
- break
- if line.startswith("user:"):
- user = line[line.find(":")+1:].strip()
- elif line.startswith("files:"):
- files = line[line.find(":")+1:].strip().split()
- elif line.startswith("description:"):
- comments = "".join(s.readlines())
- if comments[-1] == "\n":
- # this removes the additional newline that hg emits
- comments = comments[:-1]
- break
-
-change = {
- 'master': MASTER,
- # note: this is more likely to be a full email address, which would make
- # the left-hand "Changes" column kind of wide. The buildmaster should
- # probably be improved to display an abbreviation of the username.
- 'username': user,
- 'revision': CHANGESET_ID,
- 'comments': comments,
- 'files': files,
- }
-
-runner.sendchange(c, True)
-
diff --git a/buildbot/buildbot-source/contrib/run_maxq.py b/buildbot/buildbot-source/contrib/run_maxq.py
deleted file mode 100755
index 3f70446d8..000000000
--- a/buildbot/buildbot-source/contrib/run_maxq.py
+++ /dev/null
@@ -1,47 +0,0 @@
-#!/usr/bin/env jython
-
-import sys, glob
-
-testdir = sys.argv[1]
-
-orderfiles = glob.glob(testdir + '/*.tests')
-
-# wee. just be glad I didn't make this one gigantic nested listcomp.
-# anyway, this builds a once-nested list of files to test.
-
-#open!
-files = [open(fn) for fn in orderfiles]
-
-#create prelim list of lists of files!
-files = [f.readlines() for f in files]
-
-#shwack newlines and filter out empties!
-files = [filter(None, [fn.strip() for fn in fs]) for fs in files]
-
-#prefix with testdir
-files = [[testdir + '/' + fn.strip() for fn in fs] for fs in files]
-
-print "Will run these tests:", files
-
-i = 0
-
-for testlist in files:
-
- print "==========================="
- print "running tests from testlist", orderfiles[i]
- print "---------------------------"
- i = i + 1
-
- for test in testlist:
- print "running test", test
-
- try:
- execfile(test, globals().copy())
-
- except:
- ei = sys.exc_info()
- print "TEST FAILURE:", ei[1]
-
- else:
- print "SUCCESS"
-
diff --git a/buildbot/buildbot-source/contrib/svn_buildbot.py b/buildbot/buildbot-source/contrib/svn_buildbot.py
deleted file mode 100755
index ae23fcf62..000000000
--- a/buildbot/buildbot-source/contrib/svn_buildbot.py
+++ /dev/null
@@ -1,250 +0,0 @@
-#!/usr/bin/env python2.3
-
-# this requires python >=2.3 for the 'sets' module.
-
-# The sets.py from python-2.3 appears to work fine under python2.2 . To
-# install this script on a host with only python2.2, copy
-# /usr/lib/python2.3/sets.py from a newer python into somewhere on your
-# PYTHONPATH, then edit the #! line above to invoke python2.2
-
-# python2.1 is right out
-
-# If you run this program as part of your SVN post-commit hooks, it will
-# deliver Change notices to a buildmaster that is running a PBChangeSource
-# instance.
-
-# edit your svn-repository/hooks/post-commit file, and add lines that look
-# like this:
-
-'''
-# set up PYTHONPATH to contain Twisted/buildbot perhaps, if not already
-# installed site-wide
-. ~/.environment
-
-/path/to/svn_buildbot.py --repository "$REPOS" --revision "$REV" --bbserver localhost --bbport 9989
-'''
-
-import commands, sys, os
-import re
-import sets
-
-# We have hackish "-d" handling here rather than in the Options
-# subclass below because a common error will be to not have twisted in
-# PYTHONPATH; we want to be able to print that error to the log if
-# debug mode is on, so we set it up before the imports.
-
-DEBUG = None
-
-if '-d' in sys.argv:
- i = sys.argv.index('-d')
- DEBUG = sys.argv[i+1]
- del sys.argv[i]
- del sys.argv[i]
-
-if DEBUG:
- f = open(DEBUG, 'a')
- sys.stderr = f
- sys.stdout = f
-
-from twisted.internet import defer, reactor
-from twisted.python import usage
-from twisted.spread import pb
-from twisted.cred import credentials
-
-class Options(usage.Options):
- optParameters = [
- ['repository', 'r', None,
- "The repository that was changed."],
- ['revision', 'v', None,
- "The revision that we want to examine (default: latest)"],
- ['bbserver', 's', 'localhost',
- "The hostname of the server that buildbot is running on"],
- ['bbport', 'p', 8007,
- "The port that buildbot is listening on"],
- ['include', 'f', None,
- '''\
-Search the list of changed files for this regular expression, and if there is
-at least one match notify buildbot; otherwise buildbot will not do a build.
-You may provide more than one -f argument to try multiple
-patterns. If no filter is given, buildbot will always be notified.'''],
- ['filter', 'f', None, "Same as --include. (Deprecated)"],
- ['exclude', 'F', None,
- '''\
-The inverse of --filter. Changed files matching this expression will never
-be considered for a build.
-You may provide more than one -F argument to try multiple
-patterns. Excludes override includes, that is, patterns that match both an
-include and an exclude will be excluded.'''],
- ]
- optFlags = [
- ['dryrun', 'n', "Do not actually send changes"],
- ]
-
- def __init__(self):
- usage.Options.__init__(self)
- self._includes = []
- self._excludes = []
- self['includes'] = None
- self['excludes'] = None
-
- def opt_include(self, arg):
- self._includes.append('.*%s.*' % (arg,))
- opt_filter = opt_include
-
- def opt_exclude(self, arg):
- self._excludes.append('.*%s.*' % (arg,))
-
- def postOptions(self):
- if self['repository'] is None:
- raise usage.error("You must pass --repository")
- if self._includes:
- self['includes'] = '(%s)' % ('|'.join(self._includes),)
- if self._excludes:
- self['excludes'] = '(%s)' % ('|'.join(self._excludes),)
-
-def split_file_dummy(changed_file):
- """Split the repository-relative filename into a tuple of (branchname,
- branch_relative_filename). If you have no branches, this should just
- return (None, changed_file).
- """
- return (None, changed_file)
-
-# this version handles repository layouts that look like:
-# trunk/files.. -> trunk
-# branches/branch1/files.. -> branches/branch1
-# branches/branch2/files.. -> branches/branch2
-#
-def split_file_branches(changed_file):
- pieces = changed_file.split(os.sep)
- if pieces[0] == 'branches':
- return (os.path.join(*pieces[:2]),
- os.path.join(*pieces[2:]))
- if pieces[0] == 'trunk':
- return (pieces[0], os.path.join(*pieces[1:]))
- ## there are other sibilings of 'trunk' and 'branches'. Pretend they are
- ## all just funny-named branches, and let the Schedulers ignore them.
- #return (pieces[0], os.path.join(*pieces[1:]))
-
- raise RuntimeError("cannot determine branch for '%s'" % changed_file)
-
-split_file = split_file_dummy
-
-
-class ChangeSender:
-
- def getChanges(self, opts):
- """Generate and stash a list of Change dictionaries, ready to be sent
- to the buildmaster's PBChangeSource."""
-
- # first we extract information about the files that were changed
- repo = opts['repository']
- print "Repo:", repo
- rev_arg = ''
- if opts['revision']:
- rev_arg = '-r %s' % (opts['revision'],)
- changed = commands.getoutput('svnlook changed %s "%s"' % (rev_arg,
- repo)
- ).split('\n')
- changed = [x[1:].strip() for x in changed]
-
- message = commands.getoutput('svnlook log %s "%s"' % (rev_arg, repo))
- who = commands.getoutput('svnlook author %s "%s"' % (rev_arg, repo))
- revision = opts.get('revision')
- if revision is not None:
- revision = int(revision)
-
- # see if we even need to notify buildbot by looking at filters first
- changestring = '\n'.join(changed)
- fltpat = opts['includes']
- if fltpat:
- included = sets.Set(re.findall(fltpat, changestring))
- else:
- included = sets.Set(changed)
-
- expat = opts['excludes']
- if expat:
- excluded = sets.Set(re.findall(expat, changestring))
- else:
- excluded = sets.Set([])
- if len(included.difference(excluded)) == 0:
- print changestring
- print """\
- Buildbot was not interested, no changes matched any of these filters:\n %s
- or all the changes matched these exclusions:\n %s\
- """ % (fltpat, expat)
- sys.exit(0)
-
- # now see which branches are involved
- files_per_branch = {}
- for f in changed:
- branch, filename = split_file(f)
- if files_per_branch.has_key(branch):
- files_per_branch[branch].append(filename)
- else:
- files_per_branch[branch] = [filename]
-
- # now create the Change dictionaries
- changes = []
- for branch in files_per_branch.keys():
- d = {'who': who,
- 'branch': branch,
- 'files': files_per_branch[branch],
- 'comments': message,
- 'revision': revision}
- changes.append(d)
-
- return changes
-
- def sendChanges(self, opts, changes):
- pbcf = pb.PBClientFactory()
- reactor.connectTCP(opts['bbserver'], int(opts['bbport']), pbcf)
- d = pbcf.login(credentials.UsernamePassword('change', 'changepw'))
- d.addCallback(self.sendAllChanges, changes)
- return d
-
- def sendAllChanges(self, remote, changes):
- dl = [remote.callRemote('addChange', change)
- for change in changes]
- return defer.DeferredList(dl)
-
- def run(self):
- opts = Options()
- try:
- opts.parseOptions()
- except usage.error, ue:
- print opts
- print "%s: %s" % (sys.argv[0], ue)
- sys.exit()
-
- changes = self.getChanges(opts)
- if opts['dryrun']:
- for i,c in enumerate(changes):
- print "CHANGE #%d" % (i+1)
- keys = c.keys()
- keys.sort()
- for k in keys:
- print "[%10s]: %s" % (k, c[k])
- print "*NOT* sending any changes"
- return
-
- d = self.sendChanges(opts, changes)
-
- def quit(*why):
- print "quitting! because", why
- reactor.stop()
-
- def failed(f):
- print "FAILURE"
- print f
- reactor.stop()
-
- d.addCallback(quit, "SUCCESS")
- d.addErrback(failed)
- reactor.callLater(60, quit, "TIMEOUT")
- reactor.run()
-
-if __name__ == '__main__':
- s = ChangeSender()
- s.run()
-
-
diff --git a/buildbot/buildbot-source/contrib/svn_watcher.py b/buildbot/buildbot-source/contrib/svn_watcher.py
deleted file mode 100755
index ad1843545..000000000
--- a/buildbot/buildbot-source/contrib/svn_watcher.py
+++ /dev/null
@@ -1,88 +0,0 @@
-#!/usr/bin/python
-
-# This is a program which will poll a (remote) SVN repository, looking for
-# new revisions. It then uses the 'buildbot sendchange' command to deliver
-# information about the Change to a (remote) buildmaster. It can be run from
-# a cron job on a periodic basis, or can be told (with the 'watch' option) to
-# automatically repeat its check every 10 minutes.
-
-# This script does not store any state information, so to avoid spurious
-# changes you must use the 'watch' option and let it run forever.
-
-# You will need to provide it with the location of the buildmaster's
-# PBChangeSource port (in the form hostname:portnum), and the svnurl of the
-# repository to watch.
-
-
-# 15.03.06 by John Pye
-# 29.03.06 by Niklaus Giger, added support to run under windows, added invocation option
-import commands
-import xml.dom.minidom
-import sys
-import time
-import os
-if sys.platform == 'win32':
- import win32pipe
-
-def checkChanges(repo, master, verbose=False, oldRevision=-1):
- cmd ="svn log --non-interactive --xml --verbose --limit=1 "+repo
- if verbose == True:
- print "Getting last revision of repository: " + repo
-
- if sys.platform == 'win32':
- f = win32pipe.popen(cmd)
- xml1 = ''.join(f.readlines())
- f.close()
- else:
- xml1 = commands.getoutput(cmd)
-
- if verbose == True:
- print "XML\n-----------\n"+xml1+"\n\n"
-
- doc = xml.dom.minidom.parseString(xml1)
- el = doc.getElementsByTagName("logentry")[0]
- revision = el.getAttribute("revision")
- author = "".join([t.data for t in
- el.getElementsByTagName("author")[0].childNodes])
- comments = "".join([t.data for t in
- el.getElementsByTagName("msg")[0].childNodes])
-
- pathlist = el.getElementsByTagName("paths")[0]
- paths = []
- for p in pathlist.getElementsByTagName("path"):
- paths.append("".join([t.data for t in p.childNodes]))
-
- if verbose == True:
- print "PATHS"
- print paths
-
- if revision != oldRevision:
- cmd = "buildbot sendchange --master="+master+" --revision=\""+revision+"\" --username=\""+author+"\"--comments=\""+comments+"\" "+" ".join(paths)
-
- if verbose == True:
- print cmd
-
- if sys.platform == 'win32':
- f = win32pipe.popen(cmd)
- print time.strftime("%H.%M.%S ") + "Revision "+revision+ ": "+ ''.join(f.readlines())
- f.close()
- else:
- xml1 = commands.getoutput(cmd)
- else:
- print time.strftime("%H.%M.%S ") + "nothing has changed since revision "+revision
-
- return revision
-
-if __name__ == '__main__':
- if len(sys.argv) == 4 and sys.argv[3] == 'watch':
- oldRevision = -1
- print "Watching for changes in repo "+ sys.argv[1] + " master " + sys.argv[2]
- while 1:
- oldRevision = checkChanges(sys.argv[1], sys.argv[2], False, oldRevision)
- time.sleep(10*60) # Check the repository every 10 minutes
-
- elif len(sys.argv) == 3:
- checkChanges(sys.argv[1], sys.argv[2], True )
- else:
- print os.path.basename(sys.argv[0]) + ": http://host/path/to/repo master:port [watch]"
-
diff --git a/buildbot/buildbot-source/contrib/svnpoller.py b/buildbot/buildbot-source/contrib/svnpoller.py
deleted file mode 100755
index fd2a68a38..000000000
--- a/buildbot/buildbot-source/contrib/svnpoller.py
+++ /dev/null
@@ -1,95 +0,0 @@
-#!/usr/bin/python
-"""
- svn.py
- Script for BuildBot to monitor a remote Subversion repository.
- Copyright (C) 2006 John Pye
-"""
-# This script is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
-# USA
-
-import commands
-import xml.dom.minidom
-import ConfigParser
-import os.path
-import codecs
-
-# change these settings to match your project
-svnurl = "https://pse.cheme.cmu.edu/svn/ascend/code/trunk"
-statefilename = "~/changemonitor/config.ini"
-buildmaster = "buildbot.example.org:9989" # connects to a PBChangeSource
-
-xml1 = commands.getoutput("svn log --non-interactive --verbose --xml --limit=1 " + svnurl)
-#print "XML\n-----------\n"+xml1+"\n\n"
-
-try:
- doc = xml.dom.minidom.parseString(xml1)
- el = doc.getElementsByTagName("logentry")[0]
- revision = el.getAttribute("revision")
- author = "".join([t.data for t in el.getElementsByTagName("author")[0].childNodes])
- comments = "".join([t.data for t in el.getElementsByTagName("msg")[0].childNodes])
-
- pathlist = el.getElementsByTagName("paths")[0]
- paths = []
- for p in pathlist.getElementsByTagName("path"):
- paths.append("".join([t.data for t in p.childNodes]))
- #print "PATHS"
- #print paths
-except xml.parsers.expat.ExpatError, e:
- print "FAILED TO PARSE 'svn log' XML:"
- print str(e)
- print "----"
- print "RECEIVED TEXT:"
- print xml1
- import sys
- sys.exit(1)
-
-fname = statefilename
-fname = os.path.expanduser(fname)
-ini = ConfigParser.SafeConfigParser()
-
-try:
- ini.read(fname)
-except:
- print "Creating changemonitor config.ini:",fname
- ini.add_section("CurrentRevision")
- ini.set("CurrentRevision",-1)
-
-try:
- lastrevision = ini.get("CurrentRevision","changeset")
-except ConfigParser.NoOptionError:
- print "NO OPTION FOUND"
- lastrevision = -1
-except ConfigParser.NoSectionError:
- print "NO SECTION FOUND"
- lastrevision = -1
-
-if lastrevision != revision:
-
- #comments = codecs.encodings.unicode_escape.encode(comments)
- cmd = "buildbot sendchange --master="+buildmaster+" --branch=trunk --revision=\""+revision+"\" --username=\""+author+"\" --comments=\""+comments+"\" "+" ".join(paths)
-
- #print cmd
- res = commands.getoutput(cmd)
-
- print "SUBMITTING NEW REVISION",revision
- if not ini.has_section("CurrentRevision"):
- ini.add_section("CurrentRevision")
- try:
- ini.set("CurrentRevision","changeset",revision)
- f = open(fname,"w")
- ini.write(f)
- #print "WROTE CHANGES TO",fname
- except:
- print "FAILED TO RECORD INI FILE"
diff --git a/buildbot/buildbot-source/contrib/viewcvspoll.py b/buildbot/buildbot-source/contrib/viewcvspoll.py
deleted file mode 100755
index 3b2436a7a..000000000
--- a/buildbot/buildbot-source/contrib/viewcvspoll.py
+++ /dev/null
@@ -1,85 +0,0 @@
-#! /usr/bin/python
-
-"""Based on the fakechanges.py contrib script"""
-
-import sys
-from twisted.spread import pb
-from twisted.cred import credentials
-from twisted.internet import reactor, task
-from twisted.python import log
-import commands, random, os.path, time, MySQLdb
-
-class ViewCvsPoller:
-
- def __init__(self):
- def _load_rc():
- import user
- ret = {}
- for line in open(os.path.join(user.home,".cvsblamerc")).readlines():
- if line.find("=") != -1:
- key, val = line.split("=")
- ret[key.strip()] = val.strip()
- return ret
- # maybe add your own keys here db=xxx, user=xxx, passwd=xxx
- self.cvsdb = MySQLdb.connect("cvs", **_load_rc())
- #self.last_checkin = "2005-05-11" # for testing
- self.last_checkin = time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime())
-
- def get_changes(self):
- changes = []
-
- def empty_change():
- return {'who': None, 'files': [], 'comments': None }
- change = empty_change()
-
- cursor = self.cvsdb.cursor()
- cursor.execute("""SELECT whoid, descid, fileid, dirid, branchid, ci_when
- FROM checkins WHERE ci_when>='%s'""" % self.last_checkin)
- last_checkin = None
- for whoid, descid, fileid, dirid, branchid, ci_when in cursor.fetchall():
- if branchid != 1: # only head
- continue
- cursor.execute("""SELECT who from people where id=%s""" % whoid)
- who = cursor.fetchone()[0]
- cursor.execute("""SELECT description from descs where id=%s""" % descid)
- desc = cursor.fetchone()[0]
- cursor.execute("""SELECT file from files where id=%s""" % fileid)
- filename = cursor.fetchone()[0]
- cursor.execute("""SELECT dir from dirs where id=%s""" % dirid)
- dirname = cursor.fetchone()[0]
- if who == change["who"] and desc == change["comments"]:
- change["files"].append( "%s/%s" % (dirname, filename) )
- elif change["who"]:
- changes.append(change)
- change = empty_change()
- else:
- change["who"] = who
- change["files"].append( "%s/%s" % (dirname, filename) )
- change["comments"] = desc
- if last_checkin == None or ci_when > last_checkin:
- last_checkin = ci_when
- if last_checkin:
- self.last_checkin = last_checkin
- return changes
-
-poller = ViewCvsPoller()
-
-def error(*args):
- log.err()
- reactor.stop()
-
-def poll_changes(remote):
- print "GET CHANGES SINCE", poller.last_checkin,
- changes = poller.get_changes()
- for change in changes:
- print change["who"], "\n *", "\n * ".join(change["files"])
- remote.callRemote('addChange', change).addErrback(error)
- print
- reactor.callLater(60, poll_changes, remote)
-
-factory = pb.PBClientFactory()
-reactor.connectTCP("localhost", 9999, factory )
-deferred = factory.login(credentials.UsernamePassword("change", "changepw"))
-deferred.addCallback(poll_changes).addErrback(error)
-
-reactor.run()
diff --git a/buildbot/buildbot-source/contrib/windows/buildbot.bat b/buildbot/buildbot-source/contrib/windows/buildbot.bat
deleted file mode 100644
index 40736aaad..000000000
--- a/buildbot/buildbot-source/contrib/windows/buildbot.bat
+++ /dev/null
@@ -1,2 +0,0 @@
-@python C:\Python23\Scripts\buildbot %*
-
diff --git a/buildbot/buildbot-source/contrib/windows/buildbot2.bat b/buildbot/buildbot-source/contrib/windows/buildbot2.bat
deleted file mode 100644
index e211adc79..000000000
--- a/buildbot/buildbot-source/contrib/windows/buildbot2.bat
+++ /dev/null
@@ -1,98 +0,0 @@
-@echo off
-rem This is Windows helper batch file for Buildbot
-rem NOTE: You will need Windows NT5/XP to use some of the syntax here.
-
-rem Please note you must have Twisted Matrix installed to use this build system
-rem Details: http://twistedmatrix.com/ (Version 1.3.0 or more, preferrably 2.0+)
-
-rem NOTE: --reactor=win32 argument is need because of Twisted
-rem The Twisted default reactor is select based (ie. posix) (why?!)
-
-rem Keep environmental settings local to this file
-setlocal
-
-rem Change the following settings to suite your environment
-
-rem This is where you want Buildbot installed
-set BB_DIR=z:\Tools\PythonLibs
-
-rem Assuming you have TortoiseCVS installed [for CVS.exe].
-set CVS_EXE="c:\Program Files\TortoiseCVS\cvs.exe"
-
-rem Trial: --spew will give LOADS of information. Use -o for verbose.
-set TRIAL=python C:\Python23\scripts\trial.py -o --reactor=win32
-set BUILDBOT_TEST_VC=c:\temp
-
-if "%1"=="helper" (
- goto print_help
-)
-
-if "%1"=="bbinstall" (
- rem You will only need to run this when you install Buildbot
- echo BB: Install BuildBot at the location you set in the config:
- echo BB: BB_DIR= %BB_DIR%
- echo BB: You must be in the buildbot-x.y.z directory to run this:
- python setup.py install --prefix %BB_DIR% --install-lib %BB_DIR%
- goto end
-)
-
-if "%1"=="cvsco" (
- echo BB: Getting Buildbot from Sourceforge CVS [if CVS in path].
- if "%2"=="" (
- echo BB ERROR: Please give a root path for the check out, eg. z:\temp
- goto end
- )
-
- cd %2
- echo BB: Hit return as there is no password
- %CVS_EXE% -d:pserver:anonymous@cvs.sourceforge.net:/cvsroot/buildbot login
- %CVS_EXE% -z3 -d:pserver:anonymous@cvs.sourceforge.net:/cvsroot/buildbot co -P buildbot
- goto end
-)
-
-if "%1"=="cvsup" (
- echo BB: Updating Buildbot from Sourceforge CVS [if CVS in path].
- echo BB: Make sure you have the project checked out in local VCS.
-
- rem we only want buildbot code, the rest is from the install
- cd %BB_DIR%
- echo BB: Hit return as there is no password
- %CVS_EXE% -d:pserver:anonymous@cvs.sourceforge.net:/cvsroot/buildbot login
- %CVS_EXE% -z3 -d:pserver:anonymous@cvs.sourceforge.net:/cvsroot/buildbot up -P -d buildbot buildbot/buildbot
- goto end
-)
-
-if "%1"=="test" (
- rem Trial is a testing framework supplied by the Twisted Matrix package.
- rem It installs itself in the Python installation directory in a "scripts" folder,
- rem e.g. c:\python23\scripts
- rem This is just a convenience function because that directory is not in our path.
-
- if "%2" NEQ "" (
- echo BB: TEST: buildbot.test.%2
- %TRIAL% -m buildbot.test.%2
- ) else (
- echo BB: Running ALL buildbot tests...
- %TRIAL% buildbot.test
- )
- goto end
-)
-
-rem Okay, nothing that we recognised to pass to buildbot
-echo BB: Running buildbot...
-python -c "from buildbot.scripts import runner; runner.run()" %*
-goto end
-
-:print_help
-echo Buildbot helper script commands:
-echo helper This help message
-echo test Test buildbot is set up correctly
-echo Maintenance:
-echo bbinstall Install Buildbot from package
-echo cvsup Update from cvs
-echo cvsco [dir] Check buildbot out from cvs into [dir]
-
-:end
-rem End environment scope
-endlocal
-
diff --git a/buildbot/buildbot-source/docs/PyCon-2003/buildbot.html b/buildbot/buildbot-source/docs/PyCon-2003/buildbot.html
deleted file mode 100644
index 5a3e4c3ee..000000000
--- a/buildbot/buildbot-source/docs/PyCon-2003/buildbot.html
+++ /dev/null
@@ -1,276 +0,0 @@
-<?xml version="1.0"?><!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
- "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"><html xmlns="http://www.w3.org/1999/xhtml" lang="en"><head><title>BuildBot: build/test automation</title><link href="stylesheet.css" type="text/css" rel="stylesheet" /></head><body bgcolor="white"><h1 class="title">BuildBot: build/test automation</h1><div class="toc"><ol><li><a href="#auto0">Abstract</a></li><li><a href="#auto1">Features</a></li><li><a href="#auto2">Overview</a></li><li><a href="#auto3">Design</a></li><ul><li><a href="#auto4">Build Master</a></li><li><a href="#auto5">Builders and BuildProcesses</a></li><li><a href="#auto6">Build Slaves</a></li><li><a href="#auto7">Build Status</a></li></ul><li><a href="#auto8">Installation</a></li><li><a href="#auto9">Security</a></li><li><a href="#auto10">Inspirations and Competition</a></li><li><a href="#auto11">Current Status</a></li><li><a href="#auto12">Future Directions</a></li><li><a href="#auto13">More Information</a></li></ol></div><div class="content"><span></span><ul><li>Author: Brian Warner &lt;<code>warner@lothar.com</code>&gt;</li><li>BuildBot Home Page:
- <a href="http://buildbot.sourceforge.net">http://buildbot.sourceforge.net</a></li></ul><h2>Abstract<a name="auto0"></a></h2><p>The BuildBot is a system to automate the compile/test cycle required by
-most software projects to validate code changes. By automatically rebuilding
-and testing the tree each time something has changed, build problems are
-pinpointed quickly, before other developers are inconvenienced by the
-failure. The guilty developer can be identified and harassed without human
-intervention. By running the builds on a variety of platforms, developers
-who do not have the facilities to test their changes everywhere before
-checkin will at least know shortly afterwards whether they have broken the
-build or not. Warning counts, lint checks, image size, compile time, and
-other build parameters can be tracked over time, are more visible, and are
-therefore easier to improve.</p><p>The overall goal is to reduce tree breakage and provide a platform to run
-tests or code-quality checks that are too annoying or pedantic for any human
-to waste their time with. Developers get immediate (and potentially public)
-feedback about their changes, encouraging them to be more careful about
-testing before checkin.</p><h2>Features<a name="auto1"></a></h2><ul><li> run builds on a variety of slave platforms</li><li> arbitrary build process: handles projects using C, Python, whatever</li><li> minimal host requirements: python and Twisted</li><li> slaves can be behind a firewall if they can still do checkout</li><li> status delivery through web page, email, IRC, other protocols</li><li> track builds in progress, provide estimated completion time</li><li> flexible configuration by subclassing generic build process classes</li><li> debug tools to force a new build, submit fake Changes, query slave
- status</li><li> released under the GPL</li></ul><h2>Overview<a name="auto2"></a></h2><img src="waterfall.png" alt="waterfall display" height="457" align="right" width="323" /><p>In general, the buildbot watches a source code repository (CVS or other
-version control system) for <q>interesting</q> changes to occur, then
-triggers builds with various steps (checkout, compile, test, etc). The
-Builds are run on a variety of slave machines, to allow testing on different
-architectures, compilation against different libraries, kernel versions,
-etc. The results of the builds are collected and analyzed: compile succeeded
-/ failed / had warnings, which tests passed or failed, memory footprint of
-generated executables, total tree size, etc. The results are displayed on a
-central web page in a <q>waterfall</q> display: time along the vertical
-axis, build platform along the horizontal, <q>now</q> at the top. The
-overall build status (red for failing, green for successful) is at the very
-top of the page. After developers commit a change, they can check the web
-page to watch the various builds complete. They are on the hook until they
-see green for all builds: after that point they can reasonably assume that
-they did not break anything. If they see red, they can examine the build
-logs to find out what they broke.</p><p>The status information can be retrieved by a variety of means. The main
-web page is one path, but the underlying Twisted framework allows other
-protocols to be used: IRC or email, for example. A live status client (using
-Gtk+ or Tkinter) can run on the developers desktop, with a box per builder
-that turns green or red as the builds succeed or fail. Once the build has
-run a few times, the build process knows about how long it ought to take (by
-measuring elapsed time, quantity of text output by the compile process,
-searching for text indicating how many unit tests have been run, etc), so it
-can provide a progress bar and ETA display.</p><p>Each build involves a list of <q>Changes</q>: files that were changed
-since the last build. If a build fails where it used to succeed, there is a
-good chance that one of the Changes is to blame, so the developers who
-submitted those Changes are put on the <q>blamelist</q>. The unfortunates on
-this list are responsible for fixing their problems, and can be reminded of
-this responsibility in increasingly hostile ways. They can receive private
-mail, the main web page can put their name up in lights, etc. If the
-developers use IRC to communicate, the buildbot can sit in on the channel
-and tell developers directly about build status or failures.</p><p>The build master also provides a place where long-term statistics about
-the build can be tracked. It is occasionally useful to create a graph
-showing how the size of the compiled image or source tree has changed over
-months or years: by collecting such metrics on each build and archiving
-them, the historical data is available for later processing.</p><h2>Design<a name="auto3"></a></h2><p>The BuildBot consists of a master and a set of build slaves. The master
-runs on any conveniently-accessible host: it provides the status web server
-and must be reachable by the build slaves, so for public projects it should
-be reachable from the general internet. The slaves connect to the master and
-actually perform the builds: they can be behind a firewall as long as they
-can reach the master and check out source files.</p><h3>Build Master<a name="auto4"></a></h3><img src="overview.png" alt="overview diagram" height="383" width="595" /><p>The master receives information about changed source files from various
-sources: it can connect to a CVSToys server, or watch a mailbox that is
-subscribed to a CVS commit list of the type commonly provided for widely
-distributed development projects. New forms of change notification (e.g. for
-other version control systems) can be handled by writing an appropriate
-class: all are responsible for creating Change objects and delivering them
-to the ChangeMaster service inside the master.</p><p>The build master is given a working directory where it is allowed to save
-persistent information. It is told which TCP ports to use for slave
-connections, status client connections, the built-in HTTP server, etc. The
-master is also given a list of <q>build slaves</q> that are allowed to
-connect, described below. Each slave gets a name and a password to use. The
-buildbot administrator must give a password to each person who runs a build
-slave.</p><p>The build master is the central point of control. All the decisions about
-what gets built are made there, all the file change notices are sent there,
-all the status information is distributed from there. Build slave
-configuration is minimal: everything is controlled on the master side by the
-buildbot administrator. On the other hand, the build master does no actual
-compilation or testing. It does not have to be able to checkout or build the
-tree. The build slaves are responsible for doing any work that actually
-touches the project's source code.</p><h3>Builders and BuildProcesses<a name="auto5"></a></h3><p>Each <q>build process</q> is defined by an instance of a Builder class
-which receives a copy of every Change that goes into the repository. It gets
-to decide which changes are interesting (e.g. a Builder which only compiles
-C code could ignore changes to documentation files). It can decide how long
-to wait until starting the build: a quick build that just updates the files
-that were changed (and will probably finish quickly) could start after 30
-seconds, whereas a full build (remove the old tree, checkout a new tree,
-compile everything, test everything) would want to wait longer. The default
-10 minute delay gives developers a chance to finish checking in a set of
-related files while still providing timely feedback about the consequences
-of those changes.</p><p>Once the build is started, the build process controls how it proceeds
-with a series of BuildSteps, which are things like shell commands, CVS
-update or checkout commands, etc. Each BuildStep can invoke SlaveCommands on
-a connected slave. One generic command is ShellCommand, which takes a
-string, hands it to <code>/bin/sh</code>, and returns exit status and
-stdout/stderr text. Other commands are layered on top of ShellCommand:
-CVSCheckout, MakeTarget, CountKLOC, and so on. Some operations are faster or
-easier to do with python code on the slave side, some are easier to do on
-the master side.</p><p>The Builder walks through a state machine, starting BuildSteps and
-receiving a callback when they complete. Steps which fail may stop the
-overall build (if the CVS checkout fails, there is little point in
-attempting a compile), or may allow it to continue (unit tests could fail
-but documentation may still be buildable). When the last step finishes, the
-entire build is complete, and a function combines the completion status of
-all the steps to decide how the overall build should be described:
-successful, failing, or somewhere in between.</p><p>At each point in the build cycle (waiting to build, starting build,
-starting a BuildStep, finishing the build), status information is delivered
-to a special Status object. This information is used to update the main
-status web page, and can be delivered to real-time status clients that are
-attached at that moment. Intermediate status (stdout from a ShellCommand,
-for example) is also delivered while the Step runs. This status can be used
-to estimate how long the individual Step (or the overall build) has left
-before it is finished, so an ETA can be listed on the web page or in the
-status client.</p><p>The build master is persisted to disk when it is stopped with SIGINT,
-preserving the status and historical build statistics.</p><p>Builders are set up by the buildbot administrator. Each one gets a name
-and a BuildProcess object (which may be parameterized with things like which
-CVS repository to use, which targets to build, which version or python or
-gcc it should use, etc). Builders are also assigned to a BuildSlave,
-described below. In the current implementation, Builders are defined by
-adding lines to the setup script, but an HTML-based <q>create a builder</q>
-scheme is planned for the future.</p><h3>Build Slaves<a name="auto6"></a></h3><p>BuildSlaves are where the actual compilation and testing gets done. They
-are programs which run on a variety of platforms, and communicate with the
-BuildMaster over TCP connections.</p><p>Each build slave is given a name and a working directory. They are also
-given the buildmaster's contact information: hostname, port number, and a
-password. This information must come from the buildbot administrator, who
-has created a corresponding entry in the buildmaster. The password exists to
-make it clear that build slave operators need to coordinate with the
-buildbot administrator.</p><p>When the Builders are created, they are given a name (like
-<q>quick-bsd</q> or <q>full-linux</q>), and are tied to a particular slave.
-When that slave comes online, a RemoteBuilder object is created inside it,
-where all the SlaveCommands are run. Each RemoteBuilder gets a separate
-subdirectory inside the slave's working directory. Multiple Builders can
-share the same slave: typically all Builders for a given architecture would
-run inside the same slave.</p><img src="slave.png" alt="overview diagram" height="354" width="595" /><h3>Build Status<a name="auto7"></a></h3><p>The waterfall display holds short-term historical build status.
-Developers can easily see what the buildbot is doing right now, how long it
-will be until the current build is finished, and what are the results of
-each step of the build process. Change comments and compile/test logs are
-one click away. The top row shows overall status: green is good, red is bad,
-yellow is a build still in progress.</p><p>Also available through the web page is information on the individual
-builders: static information like what purpose the builder serves (specified
-by the admin when configuring the buildmaster), and non-build status
-information like which build slave it wants to use, whether the slave is
-online or not, and how frequently the build has succeeded in the last 10
-attempts. Build slave information is available here too, both data provided
-by the build slave operator (which machine the slave is running on, who to
-thank for the compute cycles being donated) and data extracted from the
-system automatically (CPU type, OS name, versions of various build
-tools).</p><p>The live status client shows the results of the last build, but does not
-otherwise show historical information. It provides extensive information
-about the current build: overall ETA, individual step ETA, data about what
-changes are being processed. It will be possible to get at the error logs
-from the last build through this interface.</p><p>Eventually, e-mail and IRC notices can be sent when builds have succeeded
-or failed. Mail messages can include the compile/test logs or summaries
-thereof. The buildmaster can sit on the IRC channel and accept queries about
-current build status, such as <q>how long until the current build
-finishes</q>, or <q>what tests are currently failing</q>.</p><p>Other status displays are possible. Test and compile errors can be
-tracked by filename or test case name, providing view on how that one file
-has fared over time. Errors can be tracked by username, giving a history of
-how one developer has affected the build over time. </p><h2>Installation<a name="auto8"></a></h2><p>The buildbot administrator will find a publically-reachable machine to
-host the buildmaster. They decide upon the BuildProcesses to be run, and
-create the Builders that use them. Creating complex build processes will
-involve writing a new python class to implement the necessary
-decision-making, but it will be possible to create simple ones like
-<q>checkout, make, make test</q> from the command line or through a
-web-based configuration interface. They also decide upon what forms of
-status notification should be used: what TCP port should be used for the web
-server, where mail should be sent, what IRC channels should receive
-success/failure messages.</p><p>Next, they need to arrange for change notification. If the repository is
-using <a href="http://purl.net/net/CVSToys">CVSToys</a>, then they simply
-tell the buildmaster the host, port, and login information for the CVSToys
-server. When the buildmaster starts up, it will contact the server and
-subscribe to hear about all CVS changes. If not, a <q>cvs-commits</q>
-mailing list is needed. Most large projects have such a list: every time a
-change is committed, an email is sent to everyone on the list which contains
-details about what was changed and why (the checkin comments). The admin
-should subscribe to this list, and dump the resulting mail into a
-qmail-style <q>maildir</q>. (It doesn't matter who is subscribed, it could
-be the admin themselves or a buildbot-specific account, just as long as the
-mail winds up in the right place). Then they tell the buildmaster to monitor
-that maildir. Each time a message arrives, it will be parsed, and the
-contents used to trigger the buildprocess. All forms of CVS notification
-include a filtering prefix, to tell the buildmaster it should ignore commits
-outside a certain directory. This is useful if the repository is used for
-multiple projects.</p><p>Finally, they need to arrange for build slaves. Some projects use
-dedicated machines for this purpose, but many do not have that luxury and
-simply use developer's personal workstations. Projects that would benefit
-from testing on multiple platforms will want to find build slaves on a
-variety of operating systems. Frequently these build slaves are run by
-volunteers or developers involved in the project who have access to the
-right equipment. The admin will give each of these people a name/password
-for their build slave, as well as the location (host/port) of the
-buildmaster. The build slave owners simply start a process on their systems
-with the appropriate parameters and it will connect to the build master.</p><p>Both the build master and the build slaves are Twisted
-<code>Application</code> instances. A <code>.tap</code> file holds the
-pickled application state, and a daemon-launching program called
-<code>twistd</code> is used to start the process, detach from the current
-tty, log output to a file, etc. When the program is terminated, it saves its
-state to another <code>.tap</code> file. Next time, <code>twistd</code> is
-told to start from that file and the application will be restarted exactly
-where it left off.</p><h2>Security<a name="auto9"></a></h2><p>The master is intended to be publically available, but of course
-limitations can be put on it for private projects. User accounts and
-passwords can be required for live status clients that want to connect, or
-the master can allow arbitrary anonymous access to status information.
-Twisted's <q>Perspective Broker</q> RPC system and careful design provides
-security for the real-time status client port: those clients are read-only,
-and cannot do anything to disrupt the build master or the build processes
-running on the slaves.</p><p>Build slaves each have a name and password, and typically the project
-coordinator would provide these to developers or volunteers who wished to
-offer a host machine for builds. The build slaves connect to the master, so
-they can be behind a firewall or NAT box, as long as they can still do a
-checkout and compile. Registering build slaves helps prevent DoS attacks
-where idiots attach fake build slaves that are not actually capable of
-performing the build, displacing the actual slave.</p><p>Running a build slave on your machine is equivalent to giving a local
-account to everyone who can commit code to the repository. Any such
-developer could add an <q><code>rm -rf /</code></q> or code to start a
-remotely-accessible shell to a Makefile and then do naughty things with the
-account under which the build slave was launched. If this is a concern, the
-build slave can be run inside a chroot jail or other means (like a
-user-mode-linux sub-kernel), as long as it is still capable of checking out
-a tree and running all commands necessary for the build.</p><h2>Inspirations and Competition<a name="auto10"></a></h2><p>Buildbot was originally inspired by Mozilla's Tinderbox project, but is
-intended to conserve resources better (tinderbox uses dedicated build
-machines to continually rebuild the tree, buildbot only rebuilds when
-something has changed, and not even then for some builds) and deliver more
-useful status information. I've seen other projects with similar goals
-[CruiseControl on sourceforge is a java-based one], but I believe this one
-is more flexible.</p><h2>Current Status<a name="auto11"></a></h2><p>Buildbot is currently under development. Basic builds, web-based status
-reporting, and a basic Gtk+-based real-time status client are all
-functional. More work is being done to make the build process more flexible
-and easier to configure, add better status reporting, and add new kinds of
-build steps. An instance has been running against the Twisted source tree
-(which includes extensive unit tests) since February 2003.</p><h2>Future Directions<a name="auto12"></a></h2><p>Once the configuration process is streamlined and a release is made, the
-next major feature is the <q>try</q> command. This will be a tool to which
-they developer can submit a series of <em>potential</em> changes, before
-they are actually checked in. <q>try</q> will assemble the changed and/or
-new files and deliver them to the build master, which will then initiate a
-build cycle with the current tree plus the potential changes. This build is
-private, just for the developer who requested it, so failures will not be
-announced publically. It will run all the usual tests from a full build and
-report the results back to the developer. This way, a developer can verify
-their changes, on more platforms then they directly have access to, with a
-single command. By making it easy to thoroughly test their changes before
-checkin, developers will have no excuse for breaking the build.</p><p>For projects that have unit tests which can be broken up into individual
-test cases, the BuildProcess will have some steps to track each test case
-separately. Developers will be able to look at the history of individual
-tests, to find out things like <q>test_import passed until foo.c was changed
-on monday, then failed until bar.c was changed last night</q>. This can also
-be used to make breaking a previously-passing test a higher crime than
-failing to fix an already-broken one. It can also help to detect
-intermittent failures, ones that need to be fixed but which can't be blamed
-on the last developer to commit changes. For test cases that represent new
-functionality which has not yet been implemented, the list of failing test
-cases can serve as a convenient TODO list.</p><p>If a large number of changes occur at the same time and the build fails
-afterwards, a clever process could try modifying one file (or one
-developer's files) at a time, to find one which is the actual cause of the
-failure. Intermittent test failures could be identified by re-running the
-failing test a number of times, looking for changes in the results.</p><p>Project-specific methods can be developed to identify the guilty
-developer more precisely, for example grepping through source files for a
-<q>Maintainer</q> tag, or a static table of module owners. Build failures
-could be reported to the owner of the module as well as the developer who
-made the offending change.</p><p>The Builder could update entries in a bug database automatically: a
-change could have comments which claim it <q>fixes #12345</q>, so the bug DB is
-queried to find out that test case ABC should be used to verify the bug. If
-test ABC was failing before and now passes, the bug DB can be told to mark
-#12345 as machine-verified. Such entries could also be used to identify
-which tests to run, for a quick build that wasn't running the entire test
-suite.</p><p>The Buildbot could be integrated into the release cycle: once per week,
-any build which passes a full test suite is automatically tagged and release
-tarballs are created.</p><p>It should be possible to create and configure the Builders from the main
-status web page, at least for processes that use a generic <q>checkout /
-make / make test</q> sequence. Twisted's <q>Woven</q> framework provides a
-powerful HTML tool that could be used create the necessary controls.</p><p>If the master or a slave is interrupted during a build, it is frequently
-possible to re-start the interrupted build. Some steps can simply be
-re-invoked (<q>make</q> or <q>cvs update</q>). Interrupting others may
-require the entire build to be re-started from scratch (<q>cvs export</q>).
-The Buildbot will be extended so that both master and slaves can report to
-the other what happened while they were disconnected, and as much work can
-be salvaged as possible.</p><h2>More Information<a name="auto13"></a></h2><p>The BuildBot home page is at <a href="http://buildbot.sourceforge.net">http://buildbot.sourceforge.net</a>,
-and has pointers to publically-visible BuildBot installations. Mailing
-lists, bug reporting, and of course source downloads are reachable from that
-page. </p><!-- $Id$ --></div></body></html> \ No newline at end of file
diff --git a/buildbot/buildbot-source/docs/PyCon-2003/overview.png b/buildbot/buildbot-source/docs/PyCon-2003/overview.png
deleted file mode 100644
index 90618adce..000000000
--- a/buildbot/buildbot-source/docs/PyCon-2003/overview.png
+++ /dev/null
Binary files differ
diff --git a/buildbot/buildbot-source/docs/PyCon-2003/slave.png b/buildbot/buildbot-source/docs/PyCon-2003/slave.png
deleted file mode 100644
index 303fe6487..000000000
--- a/buildbot/buildbot-source/docs/PyCon-2003/slave.png
+++ /dev/null
Binary files differ
diff --git a/buildbot/buildbot-source/docs/PyCon-2003/stylesheet.css b/buildbot/buildbot-source/docs/PyCon-2003/stylesheet.css
deleted file mode 100644
index 9d3caeadb..000000000
--- a/buildbot/buildbot-source/docs/PyCon-2003/stylesheet.css
+++ /dev/null
@@ -1,180 +0,0 @@
-
-body
-{
- margin-left: 2em;
- margin-right: 2em;
- border: 0px;
- padding: 0px;
- font-family: sans-serif;
- }
-
-.done { color: #005500; background-color: #99ff99 }
-.notdone { color: #550000; background-color: #ff9999;}
-
-pre
-{
- padding: 1em;
- font-family: Neep Alt, Courier New, Courier;
- font-size: 12pt;
- border: thin black solid;
-}
-
-.boxed
-{
- padding: 1em;
- border: thin black solid;
-}
-
-.shell
-{
- background-color: #ffffdd;
-}
-
-.python
-{
- background-color: #dddddd;
-}
-
-.htmlsource
-{
- background-color: #dddddd;
-}
-
-.py-prototype
-{
- background-color: #ddddff;
-}
-
-
-.python-interpreter
-{
- background-color: #ddddff;
-}
-
-.doit
-{
- border: thin blue dashed ;
- background-color: #0ef
-}
-
-.py-src-comment
-{
- color: #1111CC
-}
-
-.py-src-keyword
-{
- color: #3333CC;
- font-weight: bold;
-}
-
-.py-src-parameter
-{
- color: #000066;
- font-weight: bold;
-}
-
-.py-src-identifier
-{
- color: #CC0000
-}
-
-.py-src-string
-{
-
- color: #115511
-}
-
-.py-src-endmarker
-{
- display: block; /* IE hack; prevents following line from being sucked into the py-listing box. */
-}
-
-.py-listing
-{
- margin: 1ex;
- border: thin solid black;
- background-color: #eee;
-}
-
-.py-listing pre
-{
- margin: 0px;
- border: none;
- border-bottom: thin solid black;
-}
-
-.py-listing .python
-{
- margin-top: 0;
- margin-bottom: 0;
- border: none;
- border-bottom: thin solid black;
- }
-
-.py-listing .htmlsource
-{
- margin-top: 0;
- margin-bottom: 0;
- border: none;
- border-bottom: thin solid black;
- }
-
-.py-caption
-{
- text-align: center;
- padding-top: 0.5em;
- padding-bottom: 0.5em;
-}
-
-.py-filename
-{
- font-style: italic;
- }
-
-.manhole-output
-{
- color: blue;
-}
-
-hr
-{
- display: inline;
- }
-
-ul
-{
- padding: 0px;
- margin: 0px;
- margin-left: 1em;
- padding-left: 1em;
- border-left: 1em;
- }
-
-li
-{
- padding: 2px;
- }
-
-dt
-{
- font-weight: bold;
- margin-left: 1ex;
- }
-
-dd
-{
- margin-bottom: 1em;
- }
-
-div.note
-{
- background-color: #FFFFCC;
- margin-top: 1ex;
- margin-left: 5%;
- margin-right: 5%;
- padding-top: 1ex;
- padding-left: 5%;
- padding-right: 5%;
- border: thin black solid;
-}
diff --git a/buildbot/buildbot-source/docs/PyCon-2003/waterfall.png b/buildbot/buildbot-source/docs/PyCon-2003/waterfall.png
deleted file mode 100644
index 5df830584..000000000
--- a/buildbot/buildbot-source/docs/PyCon-2003/waterfall.png
+++ /dev/null
Binary files differ
diff --git a/buildbot/buildbot-source/docs/buildbot.info b/buildbot/buildbot-source/docs/buildbot.info
deleted file mode 100644
index 399a8394e..000000000
--- a/buildbot/buildbot-source/docs/buildbot.info
+++ /dev/null
@@ -1,4921 +0,0 @@
-This is buildbot.info, produced by makeinfo version 4.8 from
-buildbot.texinfo.
-
- This is the BuildBot manual.
-
- Copyright (C) 2005,2006 Brian Warner
-
- Copying and distribution of this file, with or without
-modification, are permitted in any medium without royalty provided
-the copyright notice and this notice are preserved.
-
-
-File: buildbot.info, Node: Top, Next: Introduction, Prev: (dir), Up: (dir)
-
-BuildBot
-********
-
-This is the BuildBot manual.
-
- Copyright (C) 2005,2006 Brian Warner
-
- Copying and distribution of this file, with or without
-modification, are permitted in any medium without royalty provided
-the copyright notice and this notice are preserved.
-
-* Menu:
-
-* Introduction:: What the BuildBot does.
-* Installation:: Creating a buildmaster and buildslaves,
- running them.
-* Concepts:: What goes on in the buildbot's little mind.
-* Configuration:: Controlling the buildbot.
-* Getting Source Code Changes:: Discovering when to run a build.
-* Build Process:: Controlling how each build is run.
-* Status Delivery:: Telling the world about the build's results.
-* Command-line tool::
-* Resources:: Getting help.
-* Developer's Appendix::
-* Index:: Complete index.
-
- --- The Detailed Node Listing ---
-
-Introduction
-
-* History and Philosophy::
-* System Architecture::
-* Control Flow::
-
-Installation
-
-* Requirements::
-* Installing the code::
-* Creating a buildmaster::
-* Creating a buildslave::
-* Launching the daemons::
-* Logfiles::
-* Shutdown::
-* Maintenance::
-* Troubleshooting::
-
-Creating a buildslave
-
-* Buildslave Options::
-
-Troubleshooting
-
-* Starting the buildslave::
-* Connecting to the buildmaster::
-* Forcing Builds::
-
-Concepts
-
-* Version Control Systems::
-* Schedulers::
-* BuildSet::
-* BuildRequest::
-* Builder::
-* Users::
-
-Version Control Systems
-
-* Generalizing VC Systems::
-* Source Tree Specifications::
-* How Different VC Systems Specify Sources::
-* Attributes of Changes::
-
-Users
-
-* Doing Things With Users::
-* Email Addresses::
-* IRC Nicknames::
-* Live Status Clients::
-
-Configuration
-
-* Config File Format::
-* Loading the Config File::
-* Defining the Project::
-* Listing Change Sources and Schedulers::
-* Setting the slaveport::
-* Buildslave Specifiers::
-* Defining Builders::
-* Defining Status Targets::
-* Debug options::
-
-Listing Change Sources and Schedulers
-
-* Scheduler Types::
-* Build Dependencies::
-
-Getting Source Code Changes
-
-* Change Sources::
-
-Change Sources
-
-* Choosing ChangeSources::
-* CVSToys - PBService::
-* CVSToys - mail notification::
-* Other mail notification ChangeSources::
-* PBChangeSource::
-
-Build Process
-
-* Build Steps::
-* Interlocks::
-* Build Factories::
-
-Build Steps
-
-* Common Parameters::
-* Source Checkout::
-* ShellCommand::
-* Simple ShellCommand Subclasses::
-
-Source Checkout
-
-* CVS::
-* SVN::
-* Darcs::
-* Mercurial::
-* Arch::
-* Bazaar::
-* P4Sync::
-
-Simple ShellCommand Subclasses
-
-* Configure::
-* Compile::
-* Test::
-* Writing New BuildSteps::
-* Build Properties::
-
-Build Factories
-
-* BuildStep Objects::
-* BuildFactory::
-* Process-Specific build factories::
-
-BuildFactory
-
-* BuildFactory Attributes::
-* Quick builds::
-
-Process-Specific build factories
-
-* GNUAutoconf::
-* CPAN::
-* Python distutils::
-* Python/Twisted/trial projects::
-
-Status Delivery
-
-* HTML Waterfall::
-* IRC Bot::
-* PBListener::
-
-Command-line tool
-
-* Administrator Tools::
-* Developer Tools::
-* Other Tools::
-* .buildbot config directory::
-
-Developer Tools
-
-* statuslog::
-* statusgui::
-* try::
-
-Other Tools
-
-* sendchange::
-* debugclient::
-
-
-File: buildbot.info, Node: Introduction, Next: Installation, Prev: Top, Up: Top
-
-1 Introduction
-**************
-
-The BuildBot is a system to automate the compile/test cycle required
-by most software projects to validate code changes. By automatically
-rebuilding and testing the tree each time something has changed,
-build problems are pinpointed quickly, before other developers are
-inconvenienced by the failure. The guilty developer can be identified
-and harassed without human intervention. By running the builds on a
-variety of platforms, developers who do not have the facilities to
-test their changes everywhere before checkin will at least know
-shortly afterwards whether they have broken the build or not. Warning
-counts, lint checks, image size, compile time, and other build
-parameters can be tracked over time, are more visible, and are
-therefore easier to improve.
-
- The overall goal is to reduce tree breakage and provide a platform
-to run tests or code-quality checks that are too annoying or pedantic
-for any human to waste their time with. Developers get immediate (and
-potentially public) feedback about their changes, encouraging them to
-be more careful about testing before checkin.
-
- Features:
-
- * run builds on a variety of slave platforms
-
- * arbitrary build process: handles projects using C, Python,
- whatever
-
- * minimal host requirements: python and Twisted
-
- * slaves can be behind a firewall if they can still do checkout
-
- * status delivery through web page, email, IRC, other protocols
-
- * track builds in progress, provide estimated completion time
-
- * flexible configuration by subclassing generic build process
- classes
-
- * debug tools to force a new build, submit fake Changes, query
- slave status
-
- * released under the GPL
-
-* Menu:
-
-* History and Philosophy::
-* System Architecture::
-* Control Flow::
-
-
-File: buildbot.info, Node: History and Philosophy, Next: System Architecture, Prev: Introduction, Up: Introduction
-
-1.1 History and Philosophy
-==========================
-
-The Buildbot was inspired by a similar project built for a development
-team writing a cross-platform embedded system. The various components
-of the project were supposed to compile and run on several flavors of
-unix (linux, solaris, BSD), but individual developers had their own
-preferences and tended to stick to a single platform. From time to
-time, incompatibilities would sneak in (some unix platforms want to
-use `string.h', some prefer `strings.h'), and then the tree would
-compile for some developers but not others. The buildbot was written
-to automate the human process of walking into the office, updating a
-tree, compiling (and discovering the breakage), finding the developer
-at fault, and complaining to them about the problem they had
-introduced. With multiple platforms it was difficult for developers to
-do the right thing (compile their potential change on all platforms);
-the buildbot offered a way to help.
-
- Another problem was when programmers would change the behavior of a
-library without warning its users, or change internal aspects that
-other code was (unfortunately) depending upon. Adding unit tests to
-the codebase helps here: if an application's unit tests pass despite
-changes in the libraries it uses, you can have more confidence that
-the library changes haven't broken anything. Many developers
-complained that the unit tests were inconvenient or took too long to
-run: having the buildbot run them reduces the developer's workload to
-a minimum.
-
- In general, having more visibility into the project is always good,
-and automation makes it easier for developers to do the right thing.
-When everyone can see the status of the project, developers are
-encouraged to keep the tree in good working order. Unit tests that
-aren't run on a regular basis tend to suffer from bitrot just like
-code does: exercising them on a regular basis helps to keep them
-functioning and useful.
-
- The current version of the Buildbot is additionally targeted at
-distributed free-software projects, where resources and platforms are
-only available when provided by interested volunteers. The buildslaves
-are designed to require an absolute minimum of configuration, reducing
-the effort a potential volunteer needs to expend to be able to
-contribute a new test environment to the project. The goal is for
-anyone who wishes that a given project would run on their favorite
-platform should be able to offer that project a buildslave, running on
-that platform, where they can verify that their portability code
-works, and keeps working.
-
-
-File: buildbot.info, Node: System Architecture, Next: Control Flow, Prev: History and Philosophy, Up: Introduction
-
-1.2 System Architecture
-=======================
-
-The Buildbot consists of a single `buildmaster' and one or more
-`buildslaves', connected in a star topology. The buildmaster makes
-all decisions about what and when to build. It sends commands to be
-run on the build slaves, which simply execute the commands and return
-the results. (certain steps involve more local decision making, where
-the overhead of sending a lot of commands back and forth would be
-inappropriate, but in general the buildmaster is responsible for
-everything).
-
- The buildmaster is usually fed `Changes' by some sort of version
-control system *Note Change Sources::, which may cause builds to be
-run. As the builds are performed, various status messages are
-produced, which are then sent to any registered Status Targets *Note
-Status Delivery::.
-
- TODO: picture of change sources, master, slaves, status targets
- should look like docs/PyCon-2003/sources/overview.svg
-
- The buildmaster is configured and maintained by the "buildmaster
-admin", who is generally the project team member responsible for
-build process issues. Each buildslave is maintained by a "buildslave
-admin", who do not need to be quite as involved. Generally slaves are
-run by anyone who has an interest in seeing the project work well on
-their platform.
-
-
-File: buildbot.info, Node: Control Flow, Prev: System Architecture, Up: Introduction
-
-1.3 Control Flow
-================
-
-A day in the life of the buildbot:
-
- * A developer commits some source code changes to the repository.
- A hook script or commit trigger of some sort sends information
- about this change to the buildmaster through one of its
- configured Change Sources. This notification might arrive via
- email, or over a network connection (either initiated by the
- buildmaster as it "subscribes" to changes, or by the commit
- trigger as it pushes Changes towards the buildmaster). The
- Change contains information about who made the change, what
- files were modified, which revision contains the change, and any
- checkin comments.
-
- * The buildmaster distributes this change to all of its configured
- Schedulers. Any "important" changes cause the "tree-stable-timer"
- to be started, and the Change is added to a list of those that
- will go into a new Build. When the timer expires, a Build is
- started on each of a set of configured Builders, all
- compiling/testing the same source code. Unless configured
- otherwise, all Builds run in parallel on the various buildslaves.
-
- * The Build consists of a series of Steps. Each Step causes some
- number of commands to be invoked on the remote buildslave
- associated with that Builder. The first step is almost always to
- perform a checkout of the appropriate revision from the same VC
- system that produced the Change. The rest generally perform a
- compile and run unit tests. As each Step runs, the buildslave
- reports back command output and return status to the buildmaster.
-
- * As the Build runs, status messages like "Build Started", "Step
- Started", "Build Finished", etc, are published to a collection of
- Status Targets. One of these targets is usually the HTML
- "Waterfall" display, which shows a chronological list of events,
- and summarizes the results of the most recent build at the top
- of each column. Developers can periodically check this page to
- see how their changes have fared. If they see red, they know
- that they've made a mistake and need to fix it. If they see
- green, they know that they've done their duty and don't need to
- worry about their change breaking anything.
-
- * If a MailNotifier status target is active, the completion of a
- build will cause email to be sent to any developers whose
- Changes were incorporated into this Build. The MailNotifier can
- be configured to only send mail upon failing builds, or for
- builds which have just transitioned from passing to failing.
- Other status targets can provide similar real-time notification
- via different communication channels, like IRC.
-
-
-
-File: buildbot.info, Node: Installation, Next: Concepts, Prev: Introduction, Up: Top
-
-2 Installation
-**************
-
-* Menu:
-
-* Requirements::
-* Installing the code::
-* Creating a buildmaster::
-* Creating a buildslave::
-* Launching the daemons::
-* Logfiles::
-* Shutdown::
-* Maintenance::
-* Troubleshooting::
-
-
-File: buildbot.info, Node: Requirements, Next: Installing the code, Prev: Installation, Up: Installation
-
-2.1 Requirements
-================
-
-At a bare minimum, you'll need the following (for both the buildmaster
-and a buildslave):
-
- * Python: http://www.python.org
-
- Buildbot requires python-2.2 or later, and is primarily developed
- against python-2.3. The buildmaster uses generators, a feature
- which is not available in python-2.1, and both master and slave
- require a version of Twisted which only works with python-2.2 or
- later. Certain features (like the inclusion of build logs in
- status emails) require python-2.2.2 or later. The IRC "force
- build" command requires python-2.3 (for the shlex.split
- function).
-
- * Twisted: http://twistedmatrix.com
-
- Both the buildmaster and the buildslaves require Twisted-1.3.0 or
- later. It has been mainly developed against Twisted-2.0.1, but
- has been tested against Twisted-2.1.0 (the most recent as of this
- writing), and might even work on versions as old as
- Twisted-1.1.0, but as always the most recent version is
- recommended.
-
- Twisted-1.3.0 and earlier were released as a single monolithic
- package. When you run Buildbot against Twisted-2.0.0 or later
- (which are split into a number of smaller subpackages), you'll
- need at least "Twisted" (the core package), and you'll also want
- TwistedMail, TwistedWeb, and TwistedWords (for sending email,
- serving a web status page, and delivering build status via IRC,
- respectively).
-
- Certain other packages may be useful on the system running the
-buildmaster:
-
- * CVSToys: http://purl.net/net/CVSToys
-
- If your buildmaster uses FreshCVSSource to receive change
- notification from a cvstoys daemon, it will require CVSToys be
- installed (tested with CVSToys-1.0.10). If the it doesn't use
- that source (i.e. if you only use a mail-parsing change source,
- or the SVN notification script), you will not need CVSToys.
-
-
- And of course, your project's build process will impose additional
-requirements on the buildslaves. These hosts must have all the tools
-necessary to compile and test your project's source code.
-
-
-File: buildbot.info, Node: Installing the code, Next: Creating a buildmaster, Prev: Requirements, Up: Installation
-
-2.2 Installing the code
-=======================
-
-The Buildbot is installed using the standard python `distutils'
-module. After unpacking the tarball, the process is:
-
- python setup.py build
- python setup.py install
-
- where the install step may need to be done as root. This will put
-the bulk of the code in somewhere like
-/usr/lib/python2.3/site-packages/buildbot . It will also install the
-`buildbot' command-line tool in /usr/bin/buildbot.
-
- To test this, shift to a different directory (like /tmp), and run:
-
- buildbot --version
-
- If it shows you the versions of Buildbot and Twisted, the install
-went ok. If it says `no such command' or it gets an `ImportError'
-when it tries to load the libaries, then something went wrong.
-`pydoc buildbot' is another useful diagnostic tool.
-
- Windows users will find these files in other places. You will need
-to make sure that python can find the libraries, and will probably
-find it convenient to have `buildbot' on your PATH.
-
- If you wish, you can run the buildbot unit test suite like this:
-
- PYTHONPATH=. trial buildbot.test
-
- This should run up to 192 tests, depending upon what VC tools you
-have installed. On my desktop machine it takes about five minutes to
-complete. Nothing should fail, a few might be skipped. If any of the
-tests fail, you should stop and investigate the cause before
-continuing the installation process, as it will probably be easier to
-track down the bug early.
-
- If you cannot or do not wish to install the buildbot into a
-site-wide location like `/usr' or `/usr/local', you can also install
-it into the account's home directory. Do the install command like
-this:
-
- python setup.py install --home=~
-
- That will populate `~/lib/python' and create `~/bin/buildbot'.
-Make sure this lib directory is on your `PYTHONPATH'.
-
-
-File: buildbot.info, Node: Creating a buildmaster, Next: Creating a buildslave, Prev: Installing the code, Up: Installation
-
-2.3 Creating a buildmaster
-==========================
-
-As you learned earlier (*note System Architecture::), the buildmaster
-runs on a central host (usually one that is publically visible, so
-everybody can check on the status of the project), and controls all
-aspects of the buildbot system. Let us call this host
-`buildbot.example.org'.
-
- You may wish to create a separate user account for the buildmaster,
-perhaps named `buildmaster'. This can help keep your personal
-configuration distinct from that of the buildmaster and is useful if
-you have to use a mail-based notification system (*note Change
-Sources::). However, the Buildbot will work just fine with your
-regular user account.
-
- You need to choose a directory for the buildmaster, called the
-`basedir'. This directory will be owned by the buildmaster, which
-will use configuration files therein, and create status files as it
-runs. `~/Buildbot' is a likely value. If you run multiple
-buildmasters in the same account, or if you run both masters and
-slaves, you may want a more distinctive name like
-`~/Buildbot/master/gnomovision' or `~/Buildmasters/fooproject'. If
-you are using a separate user account, this might just be
-`~buildmaster/masters/fooprojects'.
-
- Once you've picked a directory, use the `buildbot master' command
-to create the directory and populate it with startup files:
-
- buildbot master BASEDIR
-
- You will need to create a configuration file (*note
-Configuration::) before starting the buildmaster. Most of the rest of
-this manual is dedicated to explaining how to do this. A sample
-configuration file is placed in the working directory, named
-`master.cfg.sample', which can be copied to `master.cfg' and edited
-to suit your purposes.
-
- (Internal details: This command creates a file named
-`buildbot.tac' that contains all the state necessary to create the
-buildmaster. Twisted has a tool called `twistd' which can use this
-.tac file to create and launch a buildmaster instance. twistd takes
-care of logging and daemonization (running the program in the
-background). `/usr/bin/buildbot' is a front end which runs twistd for
-you.)
-
- In addition to `buildbot.tac', a small `Makefile.sample' is
-installed. This can be used as the basis for customized daemon
-startup, *Note Launching the daemons::.
-
-
-File: buildbot.info, Node: Creating a buildslave, Next: Launching the daemons, Prev: Creating a buildmaster, Up: Installation
-
-2.4 Creating a buildslave
-=========================
-
-Typically, you will be adding a buildslave to an existing buildmaster,
-to provide additional architecture coverage. The buildbot
-administrator will give you several pieces of information necessary to
-connect to the buildmaster. You should also be somewhat familiar with
-the project being tested, so you can troubleshoot build problems
-locally.
-
- The buildbot exists to make sure that the project's stated "how to
-build it" process actually works. To this end, the buildslave should
-run in an environment just like that of your regular developers.
-Typically the project build process is documented somewhere
-(`README', `INSTALL', etc), in a document that should mention all
-library dependencies and contain a basic set of build instructions.
-This document will be useful as you configure the host and account in
-which the buildslave runs.
-
- Here's a good checklist for setting up a buildslave:
-
- 1. Set up the account
-
- It is recommended (although not mandatory) to set up a separate
- user account for the buildslave. This account is frequently named
- `buildbot' or `buildslave'. This serves to isolate your personal
- working environment from that of the slave's, and helps to
- minimize the security threat posed by letting possibly-unknown
- contributors run arbitrary code on your system. The account
- should have a minimum of fancy init scripts.
-
- 2. Install the buildbot code
-
- Follow the instructions given earlier (*note Installing the
- code::). If you use a separate buildslave account, and you
- didn't install the buildbot code to a shared location, then you
- will need to install it with `--home=~' for each account that
- needs it.
-
- 3. Set up the host
-
- Make sure the host can actually reach the buildmaster. Usually
- the buildmaster is running a status webserver on the same
- machine, so simply point your web browser at it and see if you
- can get there. Install whatever additional packages or
- libraries the project's INSTALL document advises. (or not: if
- your buildslave is supposed to make sure that building without
- optional libraries still works, then don't install those
- libraries).
-
- Again, these libraries don't necessarily have to be installed to
- a site-wide shared location, but they must be available to your
- build process. Accomplishing this is usually very specific to
- the build process, so installing them to `/usr' or `/usr/local'
- is usually the best approach.
-
- 4. Test the build process
-
- Follow the instructions in the INSTALL document, in the
- buildslave's account. Perform a full CVS (or whatever) checkout,
- configure, make, run tests, etc. Confirm that the build works
- without manual fussing. If it doesn't work when you do it by
- hand, it will be unlikely to work when the buildbot attempts to
- do it in an automated fashion.
-
- 5. Choose a base directory
-
- This should be somewhere in the buildslave's account, typically
- named after the project which is being tested. The buildslave
- will not touch any file outside of this directory. Something
- like `~/Buildbot' or `~/Buildslaves/fooproject' is appropriate.
-
- 6. Get the buildmaster host/port, botname, and password
-
- When the buildbot admin configures the buildmaster to accept and
- use your buildslave, they will provide you with the following
- pieces of information:
-
- * your buildslave's name
-
- * the password assigned to your buildslave
-
- * the hostname and port number of the buildmaster, i.e.
- buildbot.example.org:8007
-
- 7. Create the buildslave
-
- Now run the 'buildbot' command as follows:
-
- buildbot slave BASEDIR MASTERHOST:PORT SLAVENAME PASSWORD
-
- This will create the base directory and a collection of files
- inside, including the `buildbot.tac' file that contains all the
- information you passed to the `buildbot' command.
-
- 8. Fill in the hostinfo files
-
- When it first connects, the buildslave will send a few files up
- to the buildmaster which describe the host that it is running
- on. These files are presented on the web status display so that
- developers have more information to reproduce any test failures
- that are witnessed by the buildbot. There are sample files in
- the `info' subdirectory of the buildbot's base directory. You
- should edit these to correctly describe you and your host.
-
- `BASEDIR/info/admin' should contain your name and email address.
- This is the "buildslave admin address", and will be visible from
- the build status page (so you may wish to munge it a bit if
- address-harvesting spambots are a concern).
-
- `BASEDIR/info/host' should be filled with a brief description of
- the host: OS, version, memory size, CPU speed, versions of
- relevant libraries installed, and finally the version of the
- buildbot code which is running the buildslave.
-
- If you run many buildslaves, you may want to create a single
- `~buildslave/info' file and share it among all the buildslaves
- with symlinks.
-
-
-* Menu:
-
-* Buildslave Options::
-
-
-File: buildbot.info, Node: Buildslave Options, Prev: Creating a buildslave, Up: Creating a buildslave
-
-2.4.1 Buildslave Options
-------------------------
-
-There are a handful of options you might want to use when creating the
-buildslave with the `buildbot slave <options> DIR <params>' command.
-You can type `buildbot slave --help' for a summary. To use these,
-just include them on the `buildbot slave' command line, like this:
-
- buildbot slave --umask=022 ~/buildslave buildmaster.example.org:42012 myslavename mypasswd
-
-`--usepty'
- This is a boolean flag that tells the buildslave whether to
- launch child processes in a PTY (the default) or with regular
- pipes. The advantage of using a PTY is that "grandchild"
- processes are more likely to be cleaned up if the build is
- interrupted or times out (since it enables the use of a "process
- group" in which all child processes will be placed). The
- disadvantages: some forms of Unix have problems with PTYs, some
- of your unit tests may behave differently when run under a PTY
- (generally those which check to see if they are being run
- interactively), and PTYs will merge the stdout and stderr
- streams into a single output stream (which means the red-vs-black
- coloring in the logfiles will be lost). If you encounter
- problems, you can add `--usepty=0' to disable the use of PTYs.
- Note that windows buildslaves never use PTYs.
-
-`--umask'
- This is a string (generally an octal representation of an
- integer) which will cause the buildslave process' "umask" value
- to be set shortly after initialization. The "twistd"
- daemonization utility forces the umask to 077 at startup (which
- means that all files created by the buildslave or its child
- processes will be unreadable by any user other than the
- buildslave account). If you want build products to be readable
- by other accounts, you can add `--umask=022' to tell the
- buildslave to fix the umask after twistd clobbers it. If you want
- build products to be _writable_ by other accounts too, use
- `--umask=000', but this is likely to be a security problem.
-
-`--keepalive'
- This is a number that indicates how frequently "keepalive"
- messages should be sent from the buildslave to the buildmaster,
- expressed in seconds. The default (600) causes a message to be
- sent to the buildmaster at least once every 10 minutes. To set
- this to a lower value, use e.g. `--keepalive=120'.
-
- If the buildslave is behind a NAT box or stateful firewall, these
- messages may help to keep the connection alive: some NAT boxes
- tend to forget about a connection if it has not been used in a
- while. When this happens, the buildmaster will think that the
- buildslave has disappeared, and builds will time out. Meanwhile
- the buildslave will not realize than anything is wrong.
-
-
-
-File: buildbot.info, Node: Launching the daemons, Next: Logfiles, Prev: Creating a buildslave, Up: Installation
-
-2.5 Launching the daemons
-=========================
-
-Both the buildmaster and the buildslave run as daemon programs. To
-launch them, pass the working directory to the `buildbot' command:
-
- buildbot start BASEDIR
-
- This command will start the daemon and then return, so normally it
-will not produce any output. To verify that the programs are indeed
-running, look for a pair of files named `twistd.log' and `twistd.pid'
-that should be created in the working directory. `twistd.pid'
-contains the process ID of the newly-spawned daemon.
-
- When the buildslave connects to the buildmaster, new directories
-will start appearing in its base directory. The buildmaster tells the
-slave to create a directory for each Builder which will be using that
-slave. All build operations are performed within these directories:
-CVS checkouts, compiles, and tests.
-
- Once you get everything running, you will want to arrange for the
-buildbot daemons to be started at boot time. One way is to use
-`cron', by putting them in a @reboot crontab entry(1):
-
- @reboot buildbot start BASEDIR
-
- When you run `crontab' to set this up, remember to do it as the
-buildmaster or buildslave account! If you add this to your crontab
-when running as your regular account (or worse yet, root), then the
-daemon will run as the wrong user, quite possibly as one with more
-authority than you intended to provide.
-
- It is important to remember that the environment provided to cron
-jobs and init scripts can be quite different that your normal runtime.
-There may be fewer environment variables specified, and the PATH may
-be shorter than usual. It is a good idea to test out this method of
-launching the buildslave by using a cron job with a time in the near
-future, with the same command, and then check `twistd.log' to make
-sure the slave actually started correctly. Common problems here are
-for `/usr/local' or `~/bin' to not be on your `PATH', or for
-`PYTHONPATH' to not be set correctly. Sometimes `HOME' is messed up
-too.
-
- To modify the way the daemons are started (perhaps you want to set
-some environment variables first, or perform some cleanup each time),
-you can create a file named `Makefile.buildbot' in the base
-directory. When the `buildbot' front-end tool is told to `start' the
-daemon, and it sees this file (and `/usr/bin/make' exists), it will
-do `make -f Makefile.buildbot start' instead of its usual action
-(which involves running `twistd'). When the buildmaster or buildslave
-is installed, a `Makefile.sample' is created which implements the
-same behavior as the the `buildbot' tool uses, so if you want to
-customize the process, just copy `Makefile.sample' to
-`Makefile.buildbot' and edit it as necessary.
-
- ---------- Footnotes ----------
-
- (1) this @reboot syntax is understood by Vixie cron, which is the
-flavor usually provided with linux systems. Other unices may have a
-cron that doesn't understand @reboot
-
-
-File: buildbot.info, Node: Logfiles, Next: Shutdown, Prev: Launching the daemons, Up: Installation
-
-2.6 Logfiles
-============
-
-While a buildbot daemon runs, it emits text to a logfile, named
-`twistd.log'. A command like `tail -f twistd.log' is useful to watch
-the command output as it runs.
-
- The buildmaster will announce any errors with its configuration
-file in the logfile, so it is a good idea to look at the log at
-startup time to check for any problems. Most buildmaster activities
-will cause lines to be added to the log.
-
-
-File: buildbot.info, Node: Shutdown, Next: Maintenance, Prev: Logfiles, Up: Installation
-
-2.7 Shutdown
-============
-
-To stop a buildmaster or buildslave manually, use:
-
- buildbot stop BASEDIR
-
- This simply looks for the `twistd.pid' file and kills whatever
-process is identified within.
-
- At system shutdown, all processes are sent a `SIGKILL'. The
-buildmaster and buildslave will respond to this by shutting down
-normally.
-
- The buildmaster will respond to a `SIGHUP' by re-reading its
-config file. The following shortcut is available:
-
- buildbot sighup BASEDIR
-
- When you update the Buildbot code to a new release, you will need
-to restart the buildmaster and/or buildslave before it can take
-advantage of the new code. You can do a `buildbot stop BASEDIR' and
-`buildbot start BASEDIR' in quick succession, or you can use the
-`restart' shortcut, which does both steps for you:
-
- buildbot restart BASEDIR
-
-
-File: buildbot.info, Node: Maintenance, Next: Troubleshooting, Prev: Shutdown, Up: Installation
-
-2.8 Maintenance
-===============
-
-It is a good idea to check the buildmaster's status page every once in
-a while, to see if your buildslave is still online. Eventually the
-buildbot will probably be enhanced to send you email (via the
-`info/admin' email address) when the slave has been offline for more
-than a few hours.
-
- If you find you can no longer provide a buildslave to the project,
-please let the project admins know, so they can put out a call for a
-replacement.
-
- The Buildbot records status and logs output continually, each time
-a build is performed. The status tends to be small, but the build logs
-can become quite large. Each build and log are recorded in a separate
-file, arranged hierarchically under the buildmaster's base directory.
-To prevent these files from growing without bound, you should
-periodically delete old build logs. A simple cron job to delete
-anything older than, say, two weeks should do the job. The only trick
-is to leave the `buildbot.tac' and other support files alone, for
-which find's `-mindepth' argument helps skip everything in the top
-directory. You can use something like the following:
-
- @weekly cd BASEDIR && find . -mindepth 2 -type f -mtime +14 -exec rm {} \;
- @weekly cd BASEDIR && find twistd.log* -mtime +14 -exec rm {} \;
-
-
-File: buildbot.info, Node: Troubleshooting, Prev: Maintenance, Up: Installation
-
-2.9 Troubleshooting
-===================
-
-Here are a few hints on diagnosing common problems.
-
-* Menu:
-
-* Starting the buildslave::
-* Connecting to the buildmaster::
-* Forcing Builds::
-
-
-File: buildbot.info, Node: Starting the buildslave, Next: Connecting to the buildmaster, Prev: Troubleshooting, Up: Troubleshooting
-
-2.9.1 Starting the buildslave
------------------------------
-
-Cron jobs are typically run with a minimal shell (`/bin/sh', not
-`/bin/bash'), and tilde expansion is not always performed in such
-commands. You may want to use explicit paths, because the `PATH' is
-usually quite short and doesn't include anything set by your shell's
-startup scripts (`.profile', `.bashrc', etc). If you've installed
-buildbot (or other python libraries) to an unusual location, you may
-need to add a `PYTHONPATH' specification (note that python will do
-tilde-expansion on `PYTHONPATH' elements by itself). Sometimes it is
-safer to fully-specify everything:
-
- @reboot PYTHONPATH=~/lib/python /usr/local/bin/buildbot start /usr/home/buildbot/basedir
-
- Take the time to get the @reboot job set up. Otherwise, things
-will work fine for a while, but the first power outage or system
-reboot you have will stop the buildslave with nothing but the cries
-of sorrowful developers to remind you that it has gone away.
-
-
-File: buildbot.info, Node: Connecting to the buildmaster, Next: Forcing Builds, Prev: Starting the buildslave, Up: Troubleshooting
-
-2.9.2 Connecting to the buildmaster
------------------------------------
-
-If the buildslave cannot connect to the buildmaster, the reason should
-be described in the `twistd.log' logfile. Some common problems are an
-incorrect master hostname or port number, or a mistyped bot name or
-password. If the buildslave loses the connection to the master, it is
-supposed to attempt to reconnect with an exponentially-increasing
-backoff. Each attempt (and the time of the next attempt) will be
-logged. If you get impatient, just manually stop and re-start the
-buildslave.
-
- When the buildmaster is restarted, all slaves will be disconnected,
-and will attempt to reconnect as usual. The reconnect time will depend
-upon how long the buildmaster is offline (i.e. how far up the
-exponential backoff curve the slaves have travelled). Again,
-`buildbot stop BASEDIR; buildbot start BASEDIR' will speed up the
-process.
-
-
-File: buildbot.info, Node: Forcing Builds, Prev: Connecting to the buildmaster, Up: Troubleshooting
-
-2.9.3 Forcing Builds
---------------------
-
-From the buildmaster's main status web page, you can force a build to
-be run on your build slave. Figure out which column is for a builder
-that runs on your slave, click on that builder's name, and the page
-that comes up will have a "Force Build" button. Fill in the form, hit
-the button, and a moment later you should see your slave's
-`twistd.log' filling with commands being run. Using `pstree' or `top'
-should also reveal the cvs/make/gcc/etc processes being run by the
-buildslave. Note that the same web page should also show the `admin'
-and `host' information files that you configured earlier.
-
-
-File: buildbot.info, Node: Concepts, Next: Configuration, Prev: Installation, Up: Top
-
-3 Concepts
-**********
-
-This chapter defines some of the basic concepts that the Buildbot
-uses. You'll need to understand how the Buildbot sees the world to
-configure it properly.
-
-* Menu:
-
-* Version Control Systems::
-* Schedulers::
-* BuildSet::
-* BuildRequest::
-* Builder::
-* Users::
-
-
-File: buildbot.info, Node: Version Control Systems, Next: Schedulers, Prev: Concepts, Up: Concepts
-
-3.1 Version Control Systems
-===========================
-
-These source trees come from a Version Control System of some kind.
-CVS and Subversion are two popular ones, but the Buildbot supports
-others. All VC systems have some notion of an upstream `repository'
-which acts as a server(1), from which clients can obtain source trees
-according to various parameters. The VC repository provides source
-trees of various projects, for different branches, and from various
-points in time. The first thing we have to do is to specify which
-source tree we want to get.
-
-* Menu:
-
-* Generalizing VC Systems::
-* Source Tree Specifications::
-* How Different VC Systems Specify Sources::
-* Attributes of Changes::
-
- ---------- Footnotes ----------
-
- (1) except Darcs, but since the Buildbot never modifies its local
-source tree we can ignore the fact that Darcs uses a less centralized
-model
-
-
-File: buildbot.info, Node: Generalizing VC Systems, Next: Source Tree Specifications, Prev: Version Control Systems, Up: Version Control Systems
-
-3.1.1 Generalizing VC Systems
------------------------------
-
-For the purposes of the Buildbot, we will try to generalize all VC
-systems as having repositories that each provide sources for a variety
-of projects. Each project is defined as a directory tree with source
-files. The individual files may each have revisions, but we ignore
-that and treat the project as a whole as having a set of revisions.
-Each time someone commits a change to the project, a new revision
-becomes available. These revisions can be described by a tuple with
-two items: the first is a branch tag, and the second is some kind of
-timestamp or revision stamp. Complex projects may have multiple branch
-tags, but there is always a default branch. The timestamp may be an
-actual timestamp (such as the -D option to CVS), or it may be a
-monotonically-increasing transaction number (such as the change number
-used by SVN and P4, or the revision number used by Arch, or a labeled
-tag used in CVS)(1). The SHA1 revision ID used by Monotone and
-Mercurial is also a kind of revision stamp, in that it specifies a
-unique copy of the source tree, as does a Darcs "context" file.
-
- When we aren't intending to make any changes to the sources we
-check out (at least not any that need to be committed back upstream),
-there are two basic ways to use a VC system:
-
- * Retrieve a specific set of source revisions: some tag or key is
- used to index this set, which is fixed and cannot be changed by
- subsequent developers committing new changes to the tree.
- Releases are built from tagged revisions like this, so that they
- can be rebuilt again later (probably with controlled
- modifications).
-
- * Retrieve the latest sources along a specific branch: some tag is
- used to indicate which branch is to be used, but within that
- constraint we want to get the latest revisions.
-
- Build personnel or CM staff typically use the first approach: the
-build that results is (ideally) completely specified by the two
-parameters given to the VC system: repository and revision tag. This
-gives QA and end-users something concrete to point at when reporting
-bugs. Release engineers are also reportedly fond of shipping code that
-can be traced back to a concise revision tag of some sort.
-
- Developers are more likely to use the second approach: each morning
-the developer does an update to pull in the changes committed by the
-team over the last day. These builds are not easy to fully specify: it
-depends upon exactly when you did a checkout, and upon what local
-changes the developer has in their tree. Developers do not normally
-tag each build they produce, because there is usually significant
-overhead involved in creating these tags. Recreating the trees used by
-one of these builds can be a challenge. Some VC systems may provide
-implicit tags (like a revision number), while others may allow the use
-of timestamps to mean "the state of the tree at time X" as opposed to
-a tree-state that has been explicitly marked.
-
- The Buildbot is designed to help developers, so it usually works in
-terms of _the latest_ sources as opposed to specific tagged
-revisions. However, it would really prefer to build from reproducible
-source trees, so implicit revisions are used whenever possible.
-
- ---------- Footnotes ----------
-
- (1) many VC systems provide more complexity than this: in
-particular the local views that P4 and ClearCase can assemble out of
-various source directories are more complex than we're prepared to
-take advantage of here
-
-
-File: buildbot.info, Node: Source Tree Specifications, Next: How Different VC Systems Specify Sources, Prev: Generalizing VC Systems, Up: Version Control Systems
-
-3.1.2 Source Tree Specifications
---------------------------------
-
-So for the Buildbot's purposes we treat each VC system as a server
-which can take a list of specifications as input and produce a source
-tree as output. Some of these specifications are static: they are
-attributes of the builder and do not change over time. Others are more
-variable: each build will have a different value. The repository is
-changed over time by a sequence of Changes, each of which represents a
-single developer making changes to some set of files. These Changes
-are cumulative(1).
-
- For normal builds, the Buildbot wants to get well-defined source
-trees that contain specific Changes, and exclude other Changes that
-may have occurred after the desired ones. We assume that the Changes
-arrive at the buildbot (through one of the mechanisms described in
-*note Change Sources::) in the same order in which they are committed
-to the repository. The Buildbot waits for the tree to become "stable"
-before initiating a build, for two reasons. The first is that
-developers frequently make multiple related commits in quick
-succession, even when the VC system provides ways to make atomic
-transactions involving multiple files at the same time. Running a
-build in the middle of these sets of changes would use an inconsistent
-set of source files, and is likely to fail (and is certain to be less
-useful than a build which uses the full set of changes). The
-tree-stable-timer is intended to avoid these useless builds that
-include some of the developer's changes but not all. The second reason
-is that some VC systems (i.e. CVS) do not provide repository-wide
-transaction numbers, so that timestamps are the only way to refer to
-a specific repository state. These timestamps may be somewhat
-ambiguous, due to processing and notification delays. By waiting until
-the tree has been stable for, say, 10 minutes, we can choose a
-timestamp from the middle of that period to use for our source
-checkout, and then be reasonably sure that any clock-skew errors will
-not cause the build to be performed on an inconsistent set of source
-files.
-
- The Schedulers always use the tree-stable-timer, with a timeout
-that is configured to reflect a reasonable tradeoff between build
-latency and change frequency. When the VC system provides coherent
-repository-wide revision markers (such as Subversion's revision
-numbers, or in fact anything other than CVS's timestamps), the
-resulting Build is simply performed against a source tree defined by
-that revision marker. When the VC system does not provide this, a
-timestamp from the middle of the tree-stable period is used to
-generate the source tree(2).
-
- ---------- Footnotes ----------
-
- (1) Monotone's _multiple heads_ feature violates this assumption
-of cumulative Changes, but in most situations the changes don't occur
-frequently enough for this to be a significant problem
-
- (2) this `checkoutDelay' defaults to half the tree-stable timer,
-but it can be overridden with an argument to the Source Step
-
-
-File: buildbot.info, Node: How Different VC Systems Specify Sources, Next: Attributes of Changes, Prev: Source Tree Specifications, Up: Version Control Systems
-
-3.1.3 How Different VC Systems Specify Sources
-----------------------------------------------
-
-For CVS, the static specifications are `repository' and `module'. In
-addition to those, each build uses a timestamp (or omits the
-timestamp to mean `the latest') and `branch tag' (which defaults to
-HEAD). These parameters collectively specify a set of sources from
-which a build may be performed.
-
- Subversion (http://subversion.tigris.org) combines the repository,
-module, and branch into a single `Subversion URL' parameter. Within
-that scope, source checkouts can be specified by a numeric `revision
-number' (a repository-wide monotonically-increasing marker, such that
-each transaction that changes the repository is indexed by a
-different revision number), or a revision timestamp. When branches
-are used, the repository and module form a static `baseURL', while
-each build has a `revision number' and a `branch' (which defaults to a
-statically-specified `defaultBranch'). The `baseURL' and `branch' are
-simply concatenated together to derive the `svnurl' to use for the
-checkout.
-
- Arch (http://wiki.gnuarch.org/) and Bazaar
-(http://bazaar.canonical.com/) specify a repository by URL, as well
-as a `version' which is kind of like a branch name. Arch uses the
-word `archive' to represent the repository. Arch lets you push
-changes from one archive to another, removing the strict
-centralization required by CVS and SVN. It retains the distinction
-between repository and working directory that most other VC systems
-use. For complex multi-module directory structures, Arch has a
-built-in `build config' layer with which the checkout process has two
-steps. First, an initial bootstrap checkout is performed to retrieve
-a set of build-config files. Second, one of these files is used to
-figure out which archives/modules should be used to populate
-subdirectories of the initial checkout.
-
- Builders which use Arch and Bazaar therefore have a static archive
-`url', and a default "branch" (which is a string that specifies a
-complete category-branch-version triple). Each build can have its own
-branch (the category-branch-version string) to override the default,
-as well as a revision number (which is turned into a -patch-NN suffix
-when performing the checkout).
-
- Darcs (http://abridgegame.org/darcs/) doesn't really have the
-notion of a single master repository. Nor does it really have
-branches. In Darcs, each working directory is also a repository, and
-there are operations to push and pull patches from one of these
-`repositories' to another. For the Buildbot's purposes, all you need
-to do is specify the URL of a repository that you want to build from.
-The build slave will then pull the latest patches from that
-repository and build them. Multiple branches are implemented by using
-multiple repositories (possibly living on the same server).
-
- Builders which use Darcs therefore have a static `repourl' which
-specifies the location of the repository. If branches are being used,
-the source Step is instead configured with a `baseURL' and a
-`defaultBranch', and the two strings are simply concatenated together
-to obtain the repository's URL. Each build then has a specific branch
-which replaces `defaultBranch', or just uses the default one. Instead
-of a revision number, each build can have a "context", which is a
-string that records all the patches that are present in a given tree
-(this is the output of `darcs changes --context', and is considerably
-less concise than, e.g. Subversion's revision number, but the
-patch-reordering flexibility of Darcs makes it impossible to provide
-a shorter useful specification).
-
- Mercurial (http://selenic.com/mercurial) is like Darcs, in that
-each branch is stored in a separate repository. The `repourl',
-`baseURL', and `defaultBranch' arguments are all handled the same way
-as with Darcs. The "revision", however, is the hash identifier
-returned by `hg identify'.
-
-
-File: buildbot.info, Node: Attributes of Changes, Prev: How Different VC Systems Specify Sources, Up: Version Control Systems
-
-3.1.4 Attributes of Changes
----------------------------
-
-Who
-===
-
-Each Change has a `who' attribute, which specifies which developer is
-responsible for the change. This is a string which comes from a
-namespace controlled by the VC repository. Frequently this means it
-is a username on the host which runs the repository, but not all VC
-systems require this (Arch, for example, uses a fully-qualified `Arch
-ID', which looks like an email address, as does Darcs). Each
-StatusNotifier will map the `who' attribute into something
-appropriate for their particular means of communication: an email
-address, an IRC handle, etc.
-
-Files
-=====
-
-It also has a list of `files', which are just the tree-relative
-filenames of any files that were added, deleted, or modified for this
-Change. These filenames are used by the `isFileImportant' function
-(in the Scheduler) to decide whether it is worth triggering a new
-build or not, e.g. the function could use `filename.endswith(".c")'
-to only run a build if a C file were checked in. Certain BuildSteps
-can also use the list of changed files to run a more targeted series
-of tests, e.g. the `step_twisted.Trial' step can run just the unit
-tests that provide coverage for the modified .py files instead of
-running the full test suite.
-
-Comments
-========
-
-The Change also has a `comments' attribute, which is a string
-containing any checkin comments.
-
-Revision
-========
-
-Each Change can have a `revision' attribute, which describes how to
-get a tree with a specific state: a tree which includes this Change
-(and all that came before it) but none that come after it. If this
-information is unavailable, the `.revision' attribute will be `None'.
-These revisions are provided by the ChangeSource, and consumed by the
-`computeSourceRevision' method in the appropriate `step.Source' class.
-
-`CVS'
- `revision' is an int, seconds since the epoch
-
-`SVN'
- `revision' is an int, a transation number (r%d)
-
-`Darcs'
- `revision' is a large string, the output of `darcs changes
- --context'
-
-`Mercurial'
- `revision' is a short string (a hash ID), the output of `hg
- identify'
-
-`Arch/Bazaar'
- `revision' is the full revision ID (ending in -patch-%d)
-
-`P4'
- `revision' is an int, the transaction number
-
-Branches
-========
-
-The Change might also have a `branch' attribute. This indicates that
-all of the Change's files are in the same named branch. The
-Schedulers get to decide whether the branch should be built or not.
-
- For VC systems like CVS, Arch, and Monotone, the `branch' name is
-unrelated to the filename. (that is, the branch name and the filename
-inhabit unrelated namespaces). For SVN, branches are expressed as
-subdirectories of the repository, so the file's "svnurl" is a
-combination of some base URL, the branch name, and the filename within
-the branch. (In a sense, the branch name and the filename inhabit the
-same namespace). Darcs branches are subdirectories of a base URL just
-like SVN. Mercurial branches are the same as Darcs.
-
-`CVS'
- branch='warner-newfeature', files=['src/foo.c']
-
-`SVN'
- branch='branches/warner-newfeature', files=['src/foo.c']
-
-`Darcs'
- branch='warner-newfeature', files=['src/foo.c']
-
-`Mercurial'
- branch='warner-newfeature', files=['src/foo.c']
-
-`Arch/Bazaar'
- branch='buildbot-usebranches-0', files=['buildbot/master.py']
-
-Links
-=====
-
-Finally, the Change might have a `links' list, which is intended to
-provide a list of URLs to a _viewcvs_-style web page that provides
-more detail for this Change, perhaps including the full file diffs.
-
-
-File: buildbot.info, Node: Schedulers, Next: BuildSet, Prev: Version Control Systems, Up: Concepts
-
-3.2 Schedulers
-==============
-
-Each Buildmaster has a set of `Scheduler' objects, each of which gets
-a copy of every incoming Change. The Schedulers are responsible for
-deciding when Builds should be run. Some Buildbot installations might
-have a single Scheduler, while others may have several, each for a
-different purpose.
-
- For example, a "quick" scheduler might exist to give immediate
-feedback to developers, hoping to catch obvious problems in the code
-that can be detected quickly. These typically do not run the full test
-suite, nor do they run on a wide variety of platforms. They also
-usually do a VC update rather than performing a brand-new checkout
-each time. You could have a "quick" scheduler which used a 30 second
-timeout, and feeds a single "quick" Builder that uses a VC
-`mode='update'' setting.
-
- A separate "full" scheduler would run more comprehensive tests a
-little while later, to catch more subtle problems. This scheduler
-would have a longer tree-stable-timer, maybe 30 minutes, and would
-feed multiple Builders (with a `mode=' of `'copy'', `'clobber'', or
-`'export'').
-
- The `tree-stable-timer' and `isFileImportant' decisions are made
-by the Scheduler. Dependencies are also implemented here. Periodic
-builds (those which are run every N seconds rather than after new
-Changes arrive) are triggered by a special `Periodic' Scheduler
-subclass. The default Scheduler class can also be told to watch for
-specific branches, ignoring Changes on other branches. This may be
-useful if you have a trunk and a few release branches which should be
-tracked, but when you don't want to have the Buildbot pay attention
-to several dozen private user branches.
-
- Some Schedulers may trigger builds for other reasons, other than
-recent Changes. For example, a Scheduler subclass could connect to a
-remote buildmaster and watch for builds of a library to succeed before
-triggering a local build that uses that library.
-
- Each Scheduler creates and submits `BuildSet' objects to the
-`BuildMaster', which is then responsible for making sure the
-individual `BuildRequests' are delivered to the target `Builders'.
-
- `Scheduler' instances are activated by placing them in the
-`c['schedulers']' list in the buildmaster config file. Each Scheduler
-has a unique name.
-
-
-File: buildbot.info, Node: BuildSet, Next: BuildRequest, Prev: Schedulers, Up: Concepts
-
-3.3 BuildSet
-============
-
-A `BuildSet' is the name given to a set of Builds that all
-compile/test the same version of the tree on multiple Builders. In
-general, all these component Builds will perform the same sequence of
-Steps, using the same source code, but on different platforms or
-against a different set of libraries.
-
- The `BuildSet' is tracked as a single unit, which fails if any of
-the component Builds have failed, and therefore can succeed only if
-_all_ of the component Builds have succeeded. There are two kinds of
-status notification messages that can be emitted for a BuildSet: the
-`firstFailure' type (which fires as soon as we know the BuildSet will
-fail), and the `Finished' type (which fires once the BuildSet has
-completely finished, regardless of whether the overall set passed or
-failed).
-
- A `BuildSet' is created with a _source stamp_ tuple of (branch,
-revision, changes, patch), some of which may be None, and a list of
-Builders on which it is to be run. They are then given to the
-BuildMaster, which is responsible for creating a separate
-`BuildRequest' for each Builder.
-
- There are a couple of different likely values for the
-`SourceStamp':
-
-`(revision=None, changes=[CHANGES], patch=None)'
- This is a `SourceStamp' used when a series of Changes have
- triggered a build. The VC step will attempt to check out a tree
- that contains CHANGES (and any changes that occurred before
- CHANGES, but not any that occurred after them).
-
-`(revision=None, changes=None, patch=None)'
- This builds the most recent code on the default branch. This is
- the sort of `SourceStamp' that would be used on a Build that was
- triggered by a user request, or a Periodic scheduler. It is also
- possible to configure the VC Source Step to always check out the
- latest sources rather than paying attention to the Changes in the
- SourceStamp, which will result in same behavior as this.
-
-`(branch=BRANCH, revision=None, changes=None, patch=None)'
- This builds the most recent code on the given BRANCH. Again,
- this is generally triggered by a user request or Periodic build.
-
-`(revision=REV, changes=None, patch=(LEVEL, DIFF))'
- This checks out the tree at the given revision REV, then applies
- a patch (using `diff -pLEVEL <DIFF'). The *Note try:: feature
- uses this kind of `SourceStamp'. If `patch' is None, the patching
- step is bypassed.
-
-
- The buildmaster is responsible for turning the `BuildSet' into a
-set of `BuildRequest' objects and queueing them on the appropriate
-Builders.
-
-
-File: buildbot.info, Node: BuildRequest, Next: Builder, Prev: BuildSet, Up: Concepts
-
-3.4 BuildRequest
-================
-
-A `BuildRequest' is a request to build a specific set of sources on a
-single specific Builder. Each Builder runs the `BuildRequest' as soon
-as it can (i.e. when an associated buildslave becomes free).
-
- The `BuildRequest' contains the `SourceStamp' specification. The
-actual process of running the build (the series of Steps that will be
-executed) is implemented by the `Build' object. In this future this
-might be changed, to have the `Build' define _what_ gets built, and a
-separate `BuildProcess' (provided by the Builder) to define _how_ it
-gets built.
-
- The `BuildRequest' may be mergeable with other compatible
-`BuildRequest's. Builds that are triggered by incoming Changes will
-generally be mergeable. Builds that are triggered by user requests
-are generally not, unless they are multiple requests to build the
-_latest sources_ of the same branch.
-
-
-File: buildbot.info, Node: Builder, Next: Users, Prev: BuildRequest, Up: Concepts
-
-3.5 Builder
-===========
-
-The `Builder' is a long-lived object which controls all Builds of a
-given type. Each one is created when the config file is first parsed,
-and lives forever (or rather until it is removed from the config
-file). It mediates the connections to the buildslaves that do all the
-work, and is responsible for creating the `Build' objects that decide
-_how_ a build is performed (i.e., which steps are executed in what
-order).
-
- Each `Builder' gets a unique name, and the path name of a
-directory where it gets to do all its work (there is a
-buildmaster-side directory for keeping status information, as well as
-a buildslave-side directory where the actual checkout/compile/test
-commands are executed). It also gets a `BuildFactory', which is
-responsible for creating new `Build' instances: because the `Build'
-instance is what actually performs each build, choosing the
-`BuildFactory' is the way to specify what happens each time a build
-is done.
-
- Each `Builder' is associated with one of more `BuildSlaves'. A
-`Builder' which is used to perform OS-X builds (as opposed to Linux
-or Solaris builds) should naturally be associated with an OS-X-based
-buildslave.
-
-
-File: buildbot.info, Node: Users, Prev: Builder, Up: Concepts
-
-3.6 Users
-=========
-
-Buildbot has a somewhat limited awareness of _users_. It assumes the
-world consists of a set of developers, each of whom can be described
-by a couple of simple attributes. These developers make changes to
-the source code, causing builds which may succeed or fail.
-
- Each developer is primarily known through the source control
-system. Each Change object that arrives is tagged with a `who' field
-that typically gives the account name (on the repository machine) of
-the user responsible for that change. This string is the primary key
-by which the User is known, and is displayed on the HTML status pages
-and in each Build's "blamelist".
-
- To do more with the User than just refer to them, this username
-needs to be mapped into an address of some sort. The responsibility
-for this mapping is left up to the status module which needs the
-address. The core code knows nothing about email addresses or IRC
-nicknames, just user names.
-
-* Menu:
-
-* Doing Things With Users::
-* Email Addresses::
-* IRC Nicknames::
-* Live Status Clients::
-
-
-File: buildbot.info, Node: Doing Things With Users, Next: Email Addresses, Prev: Users, Up: Users
-
-3.6.1 Doing Things With Users
------------------------------
-
-Each Change has a single User who is responsible for that Change. Most
-Builds have a set of Changes: the Build represents the first time
-these Changes have been built and tested by the Buildbot. The build
-has a "blamelist" that consists of a simple union of the Users
-responsible for all the Build's Changes.
-
- The Build provides (through the IBuildStatus interface) a list of
-Users who are "involved" in the build. For now this is equal to the
-blamelist, but in the future it will be expanded to include a "build
-sheriff" (a person who is "on duty" at that time and responsible for
-watching over all builds that occur during their shift), as well as
-per-module owners who simply want to keep watch over their domain
-(chosen by subdirectory or a regexp matched against the filenames
-pulled out of the Changes). The Involved Users are those who probably
-have an interest in the results of any given build.
-
- In the future, Buildbot will acquire the concept of "Problems",
-which last longer than builds and have beginnings and ends. For
-example, a test case which passed in one build and then failed in the
-next is a Problem. The Problem lasts until the test case starts
-passing again, at which point the Problem is said to be "resolved".
-
- If there appears to be a code change that went into the tree at the
-same time as the test started failing, that Change is marked as being
-resposible for the Problem, and the user who made the change is added
-to the Problem's "Guilty" list. In addition to this user, there may
-be others who share responsibility for the Problem (module owners,
-sponsoring developers). In addition to the Responsible Users, there
-may be a set of Interested Users, who take an interest in the fate of
-the Problem.
-
- Problems therefore have sets of Users who may want to be kept
-aware of the condition of the problem as it changes over time. If
-configured, the Buildbot can pester everyone on the Responsible list
-with increasing harshness until the problem is resolved, with the
-most harshness reserved for the Guilty parties themselves. The
-Interested Users may merely be told when the problem starts and
-stops, as they are not actually responsible for fixing anything.
-
-
-File: buildbot.info, Node: Email Addresses, Next: IRC Nicknames, Prev: Doing Things With Users, Up: Users
-
-3.6.2 Email Addresses
----------------------
-
-The `buildbot.status.mail.MailNotifier' class provides a status
-target which can send email about the results of each build. It
-accepts a static list of email addresses to which each message should
-be delivered, but it can also be configured to send mail to the
-Build's Interested Users. To do this, it needs a way to convert User
-names into email addresses.
-
- For many VC systems, the User Name is actually an account name on
-the system which hosts the repository. As such, turning the name into
-an email address is a simple matter of appending
-"@repositoryhost.com". Some projects use other kinds of mappings (for
-example the preferred email address may be at "project.org" despite
-the repository host being named "cvs.project.org"), and some VC
-systems have full separation between the concept of a user and that
-of an account on the repository host (like Perforce). Some systems
-(like Arch) put a full contact email address in every change.
-
- To convert these names to addresses, the MailNotifier uses an
-EmailLookup object. This provides a .getAddress method which accepts
-a name and (eventually) returns an address. The default `MailNotifier'
-module provides an EmailLookup which simply appends a static string,
-configurable when the notifier is created. To create more complex
-behaviors (perhaps using an LDAP lookup, or using "finger" on a
-central host to determine a preferred address for the developer),
-provide a different object as the `lookup' argument.
-
- In the future, when the Problem mechanism has been set up, the
-Buildbot will need to send mail to arbitrary Users. It will do this
-by locating a MailNotifier-like object among all the buildmaster's
-status targets, and asking it to send messages to various Users. This
-means the User-to-address mapping only has to be set up once, in your
-MailNotifier, and every email message the buildbot emits will take
-advantage of it.
-
-
-File: buildbot.info, Node: IRC Nicknames, Next: Live Status Clients, Prev: Email Addresses, Up: Users
-
-3.6.3 IRC Nicknames
--------------------
-
-Like MailNotifier, the `buildbot.status.words.IRC' class provides a
-status target which can announce the results of each build. It also
-provides an interactive interface by responding to online queries
-posted in the channel or sent as private messages.
-
- In the future, the buildbot can be configured map User names to IRC
-nicknames, to watch for the recent presence of these nicknames, and to
-deliver build status messages to the interested parties. Like
-`MailNotifier' does for email addresses, the `IRC' object will have
-an `IRCLookup' which is responsible for nicknames. The mapping can be
-set up statically, or it can be updated by online users themselves
-(by claiming a username with some kind of "buildbot: i am user
-warner" commands).
-
- Once the mapping is established, the rest of the buildbot can ask
-the `IRC' object to send messages to various users. It can report on
-the likelihood that the user saw the given message (based upon how
-long the user has been inactive on the channel), which might prompt
-the Problem Hassler logic to send them an email message instead.
-
-
-File: buildbot.info, Node: Live Status Clients, Prev: IRC Nicknames, Up: Users
-
-3.6.4 Live Status Clients
--------------------------
-
-The Buildbot also offers a PB-based status client interface which can
-display real-time build status in a GUI panel on the developer's
-desktop. This interface is normally anonymous, but it could be
-configured to let the buildmaster know _which_ developer is using the
-status client. The status client could then be used as a
-message-delivery service, providing an alternative way to deliver
-low-latency high-interruption messages to the developer (like "hey,
-you broke the build").
-
-
-File: buildbot.info, Node: Configuration, Next: Getting Source Code Changes, Prev: Concepts, Up: Top
-
-4 Configuration
-***************
-
-The buildbot's behavior is defined by the "config file", which
-normally lives in the `master.cfg' file in the buildmaster's base
-directory (but this can be changed with an option to the `buildbot
-master' command). This file completely specifies which Builders are
-to be run, which slaves they should use, how Changes should be
-tracked, and where the status information is to be sent. The
-buildmaster's `buildbot.tac' file names the base directory;
-everything else comes from the config file.
-
- A sample config file was installed for you when you created the
-buildmaster, but you will need to edit it before your buildbot will do
-anything useful.
-
- This chapter gives an overview of the format of this file and the
-various sections in it. You will need to read the later chapters to
-understand how to fill in each section properly.
-
-* Menu:
-
-* Config File Format::
-* Loading the Config File::
-* Defining the Project::
-* Listing Change Sources and Schedulers::
-* Setting the slaveport::
-* Buildslave Specifiers::
-* Defining Builders::
-* Defining Status Targets::
-* Debug options::
-
-
-File: buildbot.info, Node: Config File Format, Next: Loading the Config File, Prev: Configuration, Up: Configuration
-
-4.1 Config File Format
-======================
-
-The config file is, fundamentally, just a piece of Python code which
-defines a dictionary named `BuildmasterConfig', with a number of keys
-that are treated specially. You don't need to know Python to do basic
-configuration, though, you can just copy the syntax of the sample
-file. If you _are_ comfortable writing Python code, however, you can
-use all the power of a full programming language to achieve more
-complicated configurations.
-
- The `BuildmasterConfig' name is the only one which matters: all
-other names defined during the execution of the file are discarded.
-When parsing the config file, the Buildmaster generally compares the
-old configuration with the new one and performs the minimum set of
-actions necessary to bring the buildbot up to date: Builders which are
-not changed are left untouched, and Builders which are modified get to
-keep their old event history.
-
- Basic Python syntax: comments start with a hash character ("#"),
-tuples are defined with `(parenthesis, pairs)', arrays are defined
-with `[square, brackets]', tuples and arrays are mostly
-interchangeable. Dictionaries (data structures which map "keys" to
-"values") are defined with curly braces: `{'key1': 'value1', 'key2':
-'value2'} '. Function calls (and object instantiation) can use named
-parameters, like `w = html.Waterfall(http_port=8010)'.
-
- The config file starts with a series of `import' statements, which
-make various kinds of Steps and Status targets available for later
-use. The main `BuildmasterConfig' dictionary is created, then it is
-populated with a variety of keys. These keys are broken roughly into
-the following sections, each of which is documented in the rest of
-this chapter:
-
- * Project Definitions
-
- * Change Sources / Schedulers
-
- * Slaveport
-
- * Buildslave Configuration
-
- * Builders / Interlocks
-
- * Status Targets
-
- * Debug options
-
- The config file can use a few names which are placed into its
-namespace:
-
-`basedir'
- the base directory for the buildmaster. This string has not been
- expanded, so it may start with a tilde. It needs to be expanded
- before use. The config file is located in
- `os.path.expanduser(os.path.join(basedir, 'master.cfg'))'
-
-
-
-File: buildbot.info, Node: Loading the Config File, Next: Defining the Project, Prev: Config File Format, Up: Configuration
-
-4.2 Loading the Config File
-===========================
-
-The config file is only read at specific points in time. It is first
-read when the buildmaster is launched. Once it is running, there are
-various ways to ask it to reload the config file. If you are on the
-system hosting the buildmaster, you can send a `SIGHUP' signal to it:
-the `buildbot' tool has a shortcut for this:
-
- buildbot sighup BASEDIR
-
- The debug tool (`buildbot debugclient --master HOST:PORT') has a
-"Reload .cfg" button which will also trigger a reload. In the future,
-there will be other ways to accomplish this step (probably a
-password-protected button on the web page, as well as a privileged IRC
-command).
-
-
-File: buildbot.info, Node: Defining the Project, Next: Listing Change Sources and Schedulers, Prev: Loading the Config File, Up: Configuration
-
-4.3 Defining the Project
-========================
-
-There are a couple of basic settings that you use to tell the buildbot
-what project it is working on. This information is used by status
-reporters to let users find out more about the codebase being
-exercised by this particular Buildbot installation.
-
- c['projectName'] = "Buildbot"
- c['projectURL'] = "http://buildbot.sourceforge.net/"
- c['buildbotURL'] = "http://localhost:8010/"
-
- `projectName' is a short string will be used to describe the
-project that this buildbot is working on. For example, it is used as
-the title of the waterfall HTML page.
-
- `projectURL' is a string that gives a URL for the project as a
-whole. HTML status displays will show `projectName' as a link to
-`projectURL', to provide a link from buildbot HTML pages to your
-project's home page.
-
- The `buildbotURL' string should point to the location where the
-buildbot's internal web server (usually the `html.Waterfall' page) is
-visible. This typically uses the port number set when you create the
-`Waterfall' object: the buildbot needs your help to figure out a
-suitable externally-visible host name.
-
- When status notices are sent to users (either by email or over
-IRC), `buildbotURL' will be used to create a URL to the specific build
-or problem that they are being notified about. It will also be made
-available to queriers (over IRC) who want to find out where to get
-more information about this buildbot.
-
-
-File: buildbot.info, Node: Listing Change Sources and Schedulers, Next: Setting the slaveport, Prev: Defining the Project, Up: Configuration
-
-4.4 Listing Change Sources and Schedulers
-=========================================
-
-The `c['sources']' key is a list of ChangeSource instances(1). This
-defines how the buildmaster learns about source code changes. More
-information about what goes here is available in *Note Getting Source
-Code Changes::.
-
- import buildbot.changes.pb
- c['sources'] = [buildbot.changes.pb.PBChangeSource()]
-
- `c['schedulers']' is a list of Scheduler instances, each of which
-causes builds to be started on a particular set of Builders. The two
-basic Scheduler classes you are likely to start with are `Scheduler'
-and `Periodic', but you can write a customized subclass to implement
-more complicated build scheduling.
-
- The docstring for `buildbot.scheduler.Scheduler' is the best place
-to see all the options that can be used. Type `pydoc
-buildbot.scheduler.Scheduler' to see it, or look in
-`buildbot/scheduler.py' directly.
-
- The basic Scheduler takes four arguments:
-
-`name'
- Each Scheduler must have a unique name. This is only used in
- status displays.
-
-`branch'
- This Scheduler will pay attention to a single branch, ignoring
- Changes that occur on other branches. Setting `branch' equal to
- the special value of `None' means it should only pay attention
- to the default branch. Note that `None' is a keyword, not a
- string, so you want to use `None' and not `"None"'.
-
-`treeStableTimer'
- The Scheduler will wait for this many seconds before starting the
- build. If new changes are made during this interval, the timer
- will be restarted, so really the build will be started after a
- change and then after this many seconds of inactivity.
-
-`builderNames'
- When the tree-stable-timer finally expires, builds will be
- started on these Builders. Each Builder gets a unique name:
- these strings must match.
-
-
- from buildbot import scheduler
- quick = scheduler.Scheduler("quick", None, 60,
- ["quick-linux", "quick-netbsd"])
- full = scheduler.Scheduler("full", None, 5*60,
- ["full-linux", "full-netbsd", "full-OSX"])
- nightly = scheduler.Periodic("nightly", ["full-solaris"], 24*60*60)
- c['schedulers'] = [quick, full, nightly]
-
- In this example, the two "quick" builds are triggered 60 seconds
-after the tree has been changed. The "full" builds do not run quite
-so quickly (they wait 5 minutes), so hopefully if the quick builds
-fail due to a missing file or really simple typo, the developer can
-discover and fix the problem before the full builds are started. Both
-Schedulers only pay attention to the default branch: any changes on
-other branches are ignored by these Schedulers. Each Scheduler
-triggers a different set of Builders, referenced by name.
-
- The third Scheduler in this example just runs the full solaris
-build once per day. (note that this Scheduler only lets you control
-the time between builds, not the absolute time-of-day of each Build,
-so this could easily wind up a "daily" or "every afternoon" scheduler
-depending upon when it was first activated).
-
-* Menu:
-
-* Scheduler Types::
-* Build Dependencies::
-
- ---------- Footnotes ----------
-
- (1) To be precise, it is a list of objects which all implement the
-`buildbot.interfaces.IChangeSource' Interface
-
-
-File: buildbot.info, Node: Scheduler Types, Next: Build Dependencies, Prev: Listing Change Sources and Schedulers, Up: Listing Change Sources and Schedulers
-
-4.4.1 Scheduler Types
----------------------
-
-Here is a brief catalog of the available Scheduler types. All these
-Schedulers are classes in `buildbot.scheduler', and the docstrings
-there are the best source of documentation on the arguments taken by
-each one.
-
-`Scheduler'
- This is the default Scheduler class. It follows exactly one
- branch, and starts a configurable tree-stable-timer after each
- change on that branch. When the timer expires, it starts a build
- on some set of Builders. The Scheduler accepts a
- `fileIsImportant' function which can be used to ignore some
- Changes if they do not affect any "important" files.
-
-`AnyBranchScheduler'
- This scheduler uses a tree-stable-timer like the default one, but
- follows multiple branches at once. Each branch gets a separate
- timer.
-
-`Dependent'
- This scheduler watches an "upstream" Builder. When that Builder
- successfully builds a particular set of Changes, it triggers
- builds of the same code on a configured set of "downstream"
- builders. The next section (*note Build Dependencies::)
- describes this scheduler in more detail.
-
-`Periodic'
- This simple scheduler just triggers a build every N seconds.
-
-`Nightly'
- This is highly configurable periodic build scheduler, which
- triggers a build at particular times of day, week, month, or
- year. The configuration syntax is very similar to the well-known
- `crontab' format, in which you provide values for minute, hour,
- day, and month (some of which can be wildcards), and a build is
- triggered whenever the current time matches the given
- constraints. This can run a build every night, every morning,
- every weekend, alternate Thursdays, on your boss's birthday, etc.
-
-`Try_Jobdir / Try_Userpass'
- This scheduler allows developers to use the `buildbot try'
- command to trigger builds of code they have not yet committed.
- See *Note try:: for complete details.
-
-
-
-File: buildbot.info, Node: Build Dependencies, Prev: Scheduler Types, Up: Listing Change Sources and Schedulers
-
-4.4.2 Build Dependencies
-------------------------
-
-It is common to wind up with one kind of build which should only be
-performed if the same source code was successfully handled by some
-other kind of build first. An example might be a packaging step: you
-might only want to produce .deb or RPM packages from a tree that was
-known to compile successfully and pass all unit tests. You could put
-the packaging step in the same Build as the compile and testing steps,
-but there might be other reasons to not do this (in particular you
-might have several Builders worth of compiles/tests, but only wish to
-do the packaging once). Another example is if you want to skip the
-"full" builds after a failing "quick" build of the same source code.
-Or, if one Build creates a product (like a compiled library) that is
-used by some other Builder, you'd want to make sure the consuming
-Build is run _after_ the producing one.
-
- You can use `Dependencies' to express this relationship to the
-Buildbot. There is a special kind of Scheduler named
-`scheduler.Dependent' that will watch an "upstream" Scheduler for
-builds to complete successfully (on all of its Builders). Each time
-that happens, the same source code (i.e. the same `SourceStamp') will
-be used to start a new set of builds, on a different set of Builders.
-This "downstream" scheduler doesn't pay attention to Changes at all,
-it only pays attention to the upstream scheduler.
-
- If the SourceStamp fails on any of the Builders in the upstream
-set, the downstream builds will not fire.
-
- from buildbot import scheduler
- tests = scheduler.Scheduler("tests", None, 5*60,
- ["full-linux", "full-netbsd", "full-OSX"])
- package = scheduler.Dependent("package",
- tests, # upstream scheduler
- ["make-tarball", "make-deb", "make-rpm"])
- c['schedulers'] = [tests, package]
-
- Note that `Dependent''s upstream scheduler argument is given as a
-`Scheduler' _instance_, not a name. This makes it impossible to
-create circular dependencies in the config file.
-
-
-File: buildbot.info, Node: Setting the slaveport, Next: Buildslave Specifiers, Prev: Listing Change Sources and Schedulers, Up: Configuration
-
-4.5 Setting the slaveport
-=========================
-
-The buildmaster will listen on a TCP port of your choosing for
-connections from buildslaves. It can also use this port for
-connections from remote Change Sources, status clients, and debug
-tools. This port should be visible to the outside world, and you'll
-need to tell your buildslave admins about your choice.
-
- It does not matter which port you pick, as long it is externally
-visible, however you should probably use something larger than 1024,
-since most operating systems don't allow non-root processes to bind to
-low-numbered ports. If your buildmaster is behind a firewall or a NAT
-box of some sort, you may have to configure your firewall to permit
-inbound connections to this port.
-
- c['slavePortnum'] = 10000
-
- `c['slavePortnum']' is a _strports_ specification string, defined
-in the `twisted.application.strports' module (try `pydoc
-twisted.application.strports' to get documentation on the format).
-This means that you can have the buildmaster listen on a
-localhost-only port by doing:
-
- c['slavePortnum'] = "tcp:10000:interface=127.0.0.1"
-
- This might be useful if you only run buildslaves on the same
-machine, and they are all configured to contact the buildmaster at
-`localhost:10000'.
-
-
-File: buildbot.info, Node: Buildslave Specifiers, Next: Defining Builders, Prev: Setting the slaveport, Up: Configuration
-
-4.6 Buildslave Specifiers
-=========================
-
-The `c['bots']' key is a list of known buildslaves. Each buildslave
-is defined by a tuple of (slavename, slavepassword). These are the
-same two values that need to be provided to the buildslave
-administrator when they create the buildslave.
-
- c['bots'] = [('bot-solaris', 'solarispasswd'),
- ('bot-bsd', 'bsdpasswd'),
- ]
-
- The slavenames must be unique, of course. The password exists to
-prevent evildoers from interfering with the buildbot by inserting
-their own (broken) buildslaves into the system and thus displacing the
-real ones.
-
- Buildslaves with an unrecognized slavename or a non-matching
-password will be rejected when they attempt to connect, and a message
-describing the problem will be put in the log file (see *Note
-Logfiles::).
-
-
-File: buildbot.info, Node: Defining Builders, Next: Defining Status Targets, Prev: Buildslave Specifiers, Up: Configuration
-
-4.7 Defining Builders
-=====================
-
-The `c['builders']' key is a list of dictionaries which specify the
-Builders. The Buildmaster runs a collection of Builders, each of
-which handles a single type of build (e.g. full versus quick), on a
-single build slave. A Buildbot which makes sure that the latest code
-("HEAD") compiles correctly across four separate architecture will
-have four Builders, each performing the same build but on different
-slaves (one per platform).
-
- Each Builder gets a separate column in the waterfall display. In
-general, each Builder runs independently (although various kinds of
-interlocks can cause one Builder to have an effect on another).
-
- Each Builder specification dictionary has several required keys:
-
-`name'
- This specifies the Builder's name, which is used in status
- reports.
-
-`slavename'
- This specifies which buildslave will be used by this Builder.
- `slavename' must appear in the `c['bots']' list. Each buildslave
- can accomodate multiple Builders.
-
-`slavenames'
- If you provide `slavenames' instead of `slavename', you can give
- a list of buildslaves which are capable of running this Builder.
- If multiple buildslaves are available for any given Builder, you
- will have some measure of redundancy: in case one slave goes
- offline, the others can still keep the Builder working. In
- addition, multiple buildslaves will allow multiple simultaneous
- builds for the same Builder, which might be useful if you have a
- lot of forced or "try" builds taking place.
-
- If you use this feature, it is important to make sure that the
- buildslaves are all, in fact, capable of running the given
- build. The slave hosts should be configured similarly, otherwise
- you will spend a lot of time trying (unsuccessfully) to
- reproduce a failure that only occurs on some of the buildslaves
- and not the others. Different platforms, operating systems,
- versions of major programs or libraries, all these things mean
- you should use separate Builders.
-
-`builddir'
- This specifies the name of a subdirectory (under the base
- directory) in which everything related to this builder will be
- placed. On the buildmaster, this holds build status information.
- On the buildslave, this is where checkouts, compiles, and tests
- are run.
-
-`factory'
- This is a `buildbot.process.factory.BuildFactory' instance which
- controls how the build is performed. Full details appear in
- their own chapter, *Note Build Process::. Parameters like the
- location of the CVS repository and the compile-time options used
- for the build are generally provided as arguments to the
- factory's constructor.
-
-
- Other optional keys may be set on each Builder:
-
-`category'
- If provided, this is a string that identifies a category for the
- builder to be a part of. Status clients can limit themselves to a
- subset of the available categories. A common use for this is to
- add new builders to your setup (for a new module, or for a new
- buildslave) that do not work correctly yet and allow you to
- integrate them with the active builders. You can put these new
- builders in a test category, make your main status clients
- ignore them, and have only private status clients pick them up.
- As soon as they work, you can move them over to the active
- category.
-
-
-
-File: buildbot.info, Node: Defining Status Targets, Next: Debug options, Prev: Defining Builders, Up: Configuration
-
-4.8 Defining Status Targets
-===========================
-
-The Buildmaster has a variety of ways to present build status to
-various users. Each such delivery method is a "Status Target" object
-in the configuration's `status' list. To add status targets, you just
-append more objects to this list:
-
- c['status'] = []
-
- from buildbot.status import html
- c['status'].append(html.Waterfall(http_port=8010))
-
- from buildbot.status import mail
- m = mail.MailNotifier(fromaddr="buildbot@localhost",
- extraRecipients=["builds@lists.example.com"],
- sendToInterestedUsers=False)
- c['status'].append(m)
-
- from buildbot.status import words
- c['status'].append(words.IRC(host="irc.example.com", nick="bb",
- channels=["#example"]))
-
- Status delivery has its own chapter, *Note Status Delivery::, in
-which all the built-in status targets are documented.
-
-
-File: buildbot.info, Node: Debug options, Prev: Defining Status Targets, Up: Configuration
-
-4.9 Debug options
-=================
-
-If you set `c['debugPassword']', then you can connect to the
-buildmaster with the diagnostic tool launched by `buildbot
-debugclient MASTER:PORT'. From this tool, you can reload the config
-file, manually force builds, and inject changes, which may be useful
-for testing your buildmaster without actually commiting changes to
-your repository (or before you have the Change Sources set up). The
-debug tool uses the same port number as the slaves do:
-`c['slavePortnum']', and is authenticated with this password.
-
- c['debugPassword'] = "debugpassword"
-
- If you set `c['manhole']' to an instance of the
-`buildbot.master.Manhole' class, you can telnet into the buildmaster
-and get an interactive Python shell, which may be useful for
-debugging buildbot internals. It is probably only useful for buildbot
-developers. It exposes full access to the buildmaster's account
-(including the ability to modify and delete files), so it should not
-be enabled with a weak or easily guessable password.
-
- The `Manhole' instance can be configured to listen on a specific
-port. You may wish to have this listening port bind to the loopback
-interface (sometimes known as "lo0", "localhost", or 127.0.0.1) to
-restrict access to clients which are running on the same host.
-
- from buildbot.master import Manhole
- c['manhole'] = Manhole("tcp:9999:interface=127.0.0.1", "admin", "password")
-
- To have the `Manhole' listen on all interfaces, use `"tcp:9999"'.
-This port specification uses `twisted.application.strports', so you
-can make it listen on SSL or even UNIX-domain sockets if you want.
-
-
-File: buildbot.info, Node: Getting Source Code Changes, Next: Build Process, Prev: Configuration, Up: Top
-
-5 Getting Source Code Changes
-*****************************
-
-The most common way to use the Buildbot is centered around the idea of
-`Source Trees': a directory tree filled with source code of some form
-which can be compiled and/or tested. Some projects use languages that
-don't involve any compilation step: nevertheless there may be a
-`build' phase where files are copied or rearranged into a form that
-is suitable for installation. Some projects do not have unit tests,
-and the Buildbot is merely helping to make sure that the sources can
-compile correctly. But in all of these cases, the thing-being-tested
-is a single source tree.
-
- A Version Control System mantains a source tree, and tells the
-buildmaster when it changes. The first step of each Build is typically
-to acquire a copy of some version of this tree.
-
- This chapter describes how the Buildbot learns about what Changes
-have occurred. For more information on VC systems and Changes, see
-*Note Version Control Systems::.
-
-* Menu:
-
-* Change Sources::
-
-
-File: buildbot.info, Node: Change Sources, Prev: Getting Source Code Changes, Up: Getting Source Code Changes
-
-5.1 Change Sources
-==================
-
-Each Buildmaster watches a single source tree. Changes can be provided
-by a variety of ChangeSource types, however any given project will
-typically have only a single ChangeSource active. This section
-provides a description of all available ChangeSource types and
-explains how to set up each of them.
-
- There are a variety of ChangeSources available, some of which are
-meant to be used in conjunction with other tools to deliver Change
-events from the VC repository to the buildmaster.
-
- * CVSToys This ChangeSource opens a TCP connection from the
- buildmaster to a waiting FreshCVS daemon that lives on the
- repository machine, and subscribes to hear about Changes.
-
- * MaildirSource This one watches a local maildir-format inbox for
- email sent out by the repository when a change is made. When a
- message arrives, it is parsed to create the Change object. A
- variety of parsing functions are available to accomodate
- different email-sending tools.
-
- * PBChangeSource This ChangeSource listens on a local TCP socket
- for inbound connections from a separate tool. Usually, this tool
- would be run on the VC repository machine in a commit hook. It
- is expected to connect to the TCP socket and send a Change
- message over the network connection. The `buildbot sendchange'
- command is one example of a tool that knows how to send these
- messages, so you can write a commit script for your VC system
- that calls it to deliver the Change. There are other tools in
- the contrib/ directory that use the same protocol.
-
-
- As a quick guide, here is a list of VC systems and the
-ChangeSources that might be useful with them. All of these
-ChangeSources are in the `buildbot.changes' module.
-
-`CVS'
- * freshcvs.FreshCVSSource (connected via TCP to the freshcvs
- daemon)
-
- * mail.FCMaildirSource (watching for email sent by a freshcvs
- daemon)
-
- * mail.BonsaiMaildirSource (watching for email sent by Bonsai)
-
- * mail.SyncmailMaildirSource (watching for email sent by
- syncmail)
-
- * pb.PBChangeSource (listening for connections from `buildbot
- sendchange' run in a loginfo script)
-
- * pb.PBChangeSource (listening for connections from a
- long-running `contrib/viewcvspoll.py' polling process which
- examines the ViewCVS database directly
-
-`SVN'
- * pb.PBChangeSource (listening for connections from
- `contrib/svn_buildbot.py' run in a postcommit script)
-
- * pb.PBChangeSource (listening for connections from a
- long-running `contrib/svn_watcher.py' or
- `contrib/svnpoller.py' polling process
-
-`Darcs'
- * pb.PBChangeSource (listening for connections from `buildbot
- sendchange' in a commit script
-
-`Mercurial'
- * pb.PBChangeSource (listening for connections from
- `contrib/hg_buildbot.py' run in an 'incoming' hook)
-
-`Arch/Bazaar'
- * pb.PBChangeSource (listening for connections from
- `contrib/arch_buildbot.py' run in a commit hook)
-
-
- All VC systems can be driven by a PBChangeSource and the `buildbot
-sendchange' tool run from some form of commit script. If you write
-an email parsing function, they can also all be driven by a suitable
-`MaildirSource'.
-
-* Menu:
-
-* Choosing ChangeSources::
-* CVSToys - PBService::
-* CVSToys - mail notification::
-* Other mail notification ChangeSources::
-* PBChangeSource::
-
-
-File: buildbot.info, Node: Choosing ChangeSources, Next: CVSToys - PBService, Prev: Change Sources, Up: Change Sources
-
-5.1.1 Choosing ChangeSources
-----------------------------
-
-The `master.cfg' configuration file has a dictionary key named
-`BuildmasterConfig['sources']', which holds a list of `IChangeSource'
-objects. The config file will typically create an object from one of
-the classes described below and stuff it into the list.
-
- s = FreshCVSSourceNewcred(host="host", port=4519,
- user="alice", passwd="secret",
- prefix="Twisted")
- BuildmasterConfig['sources'] = [s]
-
- Each source tree has a nominal `top'. Each Change has a list of
-filenames, which are all relative to this top location. The
-ChangeSource is responsible for doing whatever is necessary to
-accomplish this. Most sources have a `prefix' argument: a partial
-pathname which is stripped from the front of all filenames provided to
-that `ChangeSource'. Files which are outside this sub-tree are
-ignored by the changesource: it does not generate Changes for those
-files.
-
-
-File: buildbot.info, Node: CVSToys - PBService, Next: CVSToys - mail notification, Prev: Choosing ChangeSources, Up: Change Sources
-
-5.1.2 CVSToys - PBService
--------------------------
-
-The CVSToys (http://purl.net/net/CVSToys) package provides a server
-which runs on the machine that hosts the CVS repository it watches.
-It has a variety of ways to distribute commit notifications, and
-offers a flexible regexp-based way to filter out uninteresting
-changes. One of the notification options is named `PBService' and
-works by listening on a TCP port for clients. These clients subscribe
-to hear about commit notifications.
-
- The buildmaster has a CVSToys-compatible `PBService' client built
-in. There are two versions of it, one for old versions of CVSToys
-(1.0.9 and earlier) which used the `oldcred' authentication
-framework, and one for newer versions (1.0.10 and later) which use
-`newcred'. Both are classes in the `buildbot.changes.freshcvs'
-package.
-
- `FreshCVSSourceNewcred' objects are created with the following
-parameters:
-
-``host' and `port''
- these specify where the CVSToys server can be reached
-
-``user' and `passwd''
- these specify the login information for the CVSToys server
- (`freshcvs'). These must match the server's values, which are
- defined in the `freshCfg' configuration file (which lives in the
- CVSROOT directory of the repository).
-
-``prefix''
- this is the prefix to be found and stripped from filenames
- delivered by the CVSToys server. Most projects live in
- sub-directories of the main repository, as siblings of the
- CVSROOT sub-directory, so typically this prefix is set to that
- top sub-directory name.
-
-
-Example
-=======
-
-To set up the freshCVS server, add a statement like the following to
-your `freshCfg' file:
-
- pb = ConfigurationSet([
- (None, None, None, PBService(userpass=('foo', 'bar'), port=4519)),
- ])
-
- This will announce all changes to a client which connects to port
-4519 using a username of 'foo' and a password of 'bar'.
-
- Then add a clause like this to your buildmaster's `master.cfg':
-
- BuildmasterConfig['sources'] = [FreshCVSSource("cvs.example.com", 4519,
- "foo", "bar",
- prefix="glib/")]
-
- where "cvs.example.com" is the host that is running the FreshCVS
-daemon, and "glib" is the top-level directory (relative to the
-repository's root) where all your source code lives. Most projects
-keep one or more projects in the same repository (along with CVSROOT/
-to hold admin files like loginfo and freshCfg); the prefix= argument
-tells the buildmaster to ignore everything outside that directory,
-and to strip that common prefix from all pathnames it handles.
-
-
-File: buildbot.info, Node: CVSToys - mail notification, Next: Other mail notification ChangeSources, Prev: CVSToys - PBService, Up: Change Sources
-
-5.1.3 CVSToys - mail notification
----------------------------------
-
-CVSToys also provides a `MailNotification' action which will send
-email to a list of recipients for each commit. This tends to work
-better than using `/bin/mail' from within the CVSROOT/loginfo file
-directly, as CVSToys will batch together all files changed during the
-same CVS invocation, and can provide more information (like creating
-a ViewCVS URL for each file changed).
-
- The Buildbot's `FCMaildirSource' is a ChangeSource which knows how
-to parse these CVSToys messages and turn them into Change objects.
-It watches a Maildir for new messages. The usually installation
-process looks like:
-
- 1. Create a mailing list, `projectname-commits'.
-
- 2. In CVSToys' freshCfg file, use a `MailNotification' action to
- send commit mail to this mailing list.
-
- 3. Subscribe the buildbot user to the mailing list.
-
- 4. Configure your .qmail or .forward file to deliver these messages
- into a maildir.
-
- 5. In the Buildbot's master.cfg file, use a `FCMaildirSource' to
- watch the maildir for commit messages.
-
- The `FCMaildirSource' is created with two parameters: the
-directory name of the maildir root, and the prefix to strip.
-
-
-File: buildbot.info, Node: Other mail notification ChangeSources, Next: PBChangeSource, Prev: CVSToys - mail notification, Up: Change Sources
-
-5.1.4 Other mail notification ChangeSources
--------------------------------------------
-
-There are other types of maildir-watching ChangeSources, which only
-differ in the function used to parse the message body.
-
- `SyncmailMaildirSource' knows how to parse the message format used
-in mail sent by Syncmail.
-
- `BonsaiMaildirSource' parses messages sent out by Bonsai.
-
-
-File: buildbot.info, Node: PBChangeSource, Prev: Other mail notification ChangeSources, Up: Change Sources
-
-5.1.5 PBChangeSource
---------------------
-
-The last kind of ChangeSource actually listens on a TCP port for
-clients to connect and push change notices _into_ the Buildmaster.
-This is used by the built-in `buildbot sendchange' notification tool,
-as well as the VC-specific `contrib/svn_buildbot.py' and
-`contrib/arch_buildbot.py' tools. These tools are run by the
-repository (in a commit hook script), and connect to the buildmaster
-directly each time a file is comitted. This is also useful for
-creating new kinds of change sources that work on a `push' model
-instead of some kind of subscription scheme, for example a script
-which is run out of an email .forward file.
-
- This ChangeSource can be configured to listen on its own TCP port,
-or it can share the port that the buildmaster is already using for the
-buildslaves to connect. (This is possible because the
-`PBChangeSource' uses the same protocol as the buildslaves, and they
-can be distinguished by the `username' attribute used when the
-initial connection is established). It might be useful to have it
-listen on a different port if, for example, you wanted to establish
-different firewall rules for that port. You could allow only the SVN
-repository machine access to the `PBChangeSource' port, while
-allowing only the buildslave machines access to the slave port. Or you
-could just expose one port and run everything over it. _Note: this
-feature is not yet implemented, the PBChangeSource will always share
-the slave port and will always have a `user' name of `change', and a
-passwd of `changepw'. These limitations will be removed in the
-future._.
-
- The `PBChangeSource' is created with the following arguments:
-
-``port''
- which port to listen on. If `None' (which is the default), it
- shares the port used for buildslave connections. _Not
- Implemented, always set to `None'_.
-
-``user' and `passwd''
- the user/passwd account information that the client program must
- use to connect. Defaults to `change' and `changepw'. _Not
- Implemented, `user' is currently always set to `change',
- `passwd' is always set to `changepw'_.
-
-``prefix''
- the prefix to be found and stripped from filenames delivered
- over the connection.
-
-
-File: buildbot.info, Node: Build Process, Next: Status Delivery, Prev: Getting Source Code Changes, Up: Top
-
-6 Build Process
-***************
-
-A `Build' object is responsible for actually performing a build. It
-gets access to a remote `SlaveBuilder' where it may run commands, and
-a `BuildStatus' object where it must emit status events. The `Build'
-is created by the Builder's `BuildFactory'.
-
- The default `Build' class is made up of a fixed sequence of
-`BuildSteps', executed one after another until all are complete (or
-one of them indicates that the build should be halted early). The
-default `BuildFactory' creates instances of this `Build' class with a
-list of `BuildSteps', so the basic way to configure the build is to
-provide a list of `BuildSteps' to your `BuildFactory'.
-
- More complicated `Build' subclasses can make other decisions:
-execute some steps only if certain files were changed, or if certain
-previous steps passed or failed. The base class has been written to
-allow users to express basic control flow without writing code, but
-you can always subclass and customize to achieve more specialized
-behavior.
-
-* Menu:
-
-* Build Steps::
-* Interlocks::
-* Build Factories::
-
-
-File: buildbot.info, Node: Build Steps, Next: Interlocks, Prev: Build Process, Up: Build Process
-
-6.1 Build Steps
-===============
-
-`BuildStep's are usually specified in the buildmaster's configuration
-file, in a list of "step specifications" that is used to create the
-`BuildFactory'. These "step specifications" are not actual steps, but
-rather a tuple of the `BuildStep' subclass to be created and a
-dictionary of arguments. (the actual `BuildStep' instances are not
-created until the Build is started, so that each Build gets an
-independent copy of each BuildStep). There is a convenience function
-named "`s'" in the `buildbot.process.factory' module for creating
-these specification tuples. It allows you to create a
-`BuildFactory'-ready list like this:
-
- from buildbot.process import step, factory
- from buildbot.process.factory import s
-
- steps = [s(step.SVN, svnurl="http://svn.example.org/Trunk/"),
- s(step.ShellCommand, command=["make", "all"]),
- s(step.ShellCommand, command=["make", "test"]),
- ]
- f = factory.BuildFactory(steps)
-
- The rest of this section lists all the standard BuildStep objects
-available for use in a Build, and the parameters which can be used to
-control each.
-
-* Menu:
-
-* Common Parameters::
-* Source Checkout::
-* ShellCommand::
-* Simple ShellCommand Subclasses::
-
-
-File: buildbot.info, Node: Common Parameters, Next: Source Checkout, Prev: Build Steps, Up: Build Steps
-
-6.1.1 Common Parameters
------------------------
-
-The standard `Build' runs a series of `BuildStep's in order, only
-stopping when it runs out of steps or if one of them requests that
-the build be halted. It collects status information from each one to
-create an overall build status (of SUCCESS, WARNINGS, or FAILURE).
-
- All BuildSteps accept some common parameters. Some of these control
-how their individual status affects the overall build. Others are used
-to specify which `Locks' (see *note Interlocks::) should be acquired
-before allowing the step to run.
-
- Arguments common to all `BuildStep' subclasses:
-
-`name'
- the name used to describe the step on the status display. It is
- also used to give a name to any LogFiles created by this step.
-
-`haltOnFailure'
- if True, a FAILURE of this build step will cause the build to
- halt immediately with an overall result of FAILURE.
-
-`flunkOnWarnings'
- when True, a WARNINGS or FAILURE of this build step will mark the
- overall build as FAILURE. The remaining steps will still be
- executed.
-
-`flunkOnFailure'
- when True, a FAILURE of this build step will mark the overall
- build as a FAILURE. The remaining steps will still be executed.
-
-`warnOnWarnings'
- when True, a WARNINGS or FAILURE of this build step will mark the
- overall build as having WARNINGS. The remaining steps will still
- be executed.
-
-`warnOnFailure'
- when True, a FAILURE of this build step will mark the overall
- build as having WARNINGS. The remaining steps will still be
- executed.
-
-`locks'
- a list of Locks (instances of `buildbot.locks.SlaveLock' or
- `buildbot.locks.MasterLock') that should be acquired before
- starting this Step. The Locks will be released when the step is
- complete. Note that this is a list of actual Lock instances, not
- names. Also note that all Locks must have unique names.
-
-
-
-File: buildbot.info, Node: Source Checkout, Next: ShellCommand, Prev: Common Parameters, Up: Build Steps
-
-6.1.2 Source Checkout
----------------------
-
-The first step of any build is typically to acquire the source code
-from which the build will be performed. There are several classes to
-handle this, one for each of the different source control system that
-Buildbot knows about. For a description of how Buildbot treats source
-control in general, see *Note Version Control Systems::.
-
- All source checkout steps accept some common parameters to control
-how they get the sources and where they should be placed. The
-remaining per-VC-system parameters are mostly to specify where
-exactly the sources are coming from.
-
-`mode'
- a string describing the kind of VC operation that is desired.
- Defaults to `update'.
-
- `update'
- specifies that the CVS checkout/update should be performed
- directly into the workdir. Each build is performed in the
- same directory, allowing for incremental builds. This
- minimizes disk space, bandwidth, and CPU time. However, it
- may encounter problems if the build process does not handle
- dependencies properly (sometimes you must do a "clean
- build" to make sure everything gets compiled), or if source
- files are deleted but generated files can influence test
- behavior (e.g. python's .pyc files), or when source
- directories are deleted but generated files prevent CVS
- from removing them. Builds ought to be correct regardless
- of whether they are done "from scratch" or incrementally,
- but it is useful to test both kinds: this mode exercises the
- incremental-build style.
-
- `copy'
- specifies that the CVS workspace should be maintained in a
- separate directory (called the 'copydir'), using checkout
- or update as necessary. For each build, a new workdir is
- created with a copy of the source tree (rm -rf workdir; cp
- -r copydir workdir). This doubles the disk space required,
- but keeps the bandwidth low (update instead of a full
- checkout). A full 'clean' build is performed each time. This
- avoids any generated-file build problems, but is still
- occasionally vulnerable to CVS problems such as a
- repository being manually rearranged, causing CVS errors on
- update which are not an issue with a full checkout.
-
- `clobber'
- specifes that the working directory should be deleted each
- time, necessitating a full checkout for each build. This
- insures a clean build off a complete checkout, avoiding any
- of the problems described above. This mode exercises the
- "from-scratch" build style.
-
- `export'
- this is like `clobber', except that the 'cvs export'
- command is used to create the working directory. This
- command removes all CVS metadata files (the CVS/
- directories) from the tree, which is sometimes useful for
- creating source tarballs (to avoid including the metadata
- in the tar file).
-
-`workdir'
- like all Steps, this indicates the directory where the build
- will take place. Source Steps are special in that they perform
- some operations outside of the workdir (like creating the
- workdir itself).
-
-`alwaysUseLatest'
- if True, bypass the usual "update to the last Change" behavior,
- and always update to the latest changes instead.
-
-`retry'
- If set, this specifies a tuple of `(delay, repeats)' which means
- that when a full VC checkout fails, it should be retried up to
- REPEATS times, waiting DELAY seconds between attempts. If you
- don't provide this, it defaults to `None', which means VC
- operations should not be retried. This is provided to make life
- easier for buildslaves which are stuck behind poor network
- connections.
-
-
- My habit as a developer is to do a `cvs update' and `make' each
-morning. Problems can occur, either because of bad code being checked
-in, or by incomplete dependencies causing a partial rebuild to fail
-where a complete from-scratch build might succeed. A quick Builder
-which emulates this incremental-build behavior would use the
-`mode='update'' setting.
-
- On the other hand, other kinds of dependency problems can cause a
-clean build to fail where a partial build might succeed. This
-frequently results from a link step that depends upon an object file
-that was removed from a later version of the tree: in the partial
-tree, the object file is still around (even though the Makefiles no
-longer know how to create it).
-
- "official" builds (traceable builds performed from a known set of
-source revisions) are always done as clean builds, to make sure it is
-not influenced by any uncontrolled factors (like leftover files from a
-previous build). A "full" Builder which behaves this way would want
-to use the `mode='clobber'' setting.
-
- Each VC system has a corresponding source checkout class: their
-arguments are described on the following pages.
-
-* Menu:
-
-* CVS::
-* SVN::
-* Darcs::
-* Mercurial::
-* Arch::
-* Bazaar::
-* P4Sync::
-
-
-File: buildbot.info, Node: CVS, Next: SVN, Prev: Source Checkout, Up: Source Checkout
-
-6.1.2.1 CVS
-...........
-
-The `CVS' build step performs a CVS (http://www.nongnu.org/cvs/)
-checkout or update. It takes the following arguments:
-
-`cvsroot'
- (required): specify the CVSROOT value, which points to a CVS
- repository, probably on a remote machine. For example, the
- cvsroot value you would use to get a copy of the Buildbot source
- code is
- `:pserver:anonymous@cvs.sourceforge.net:/cvsroot/buildbot'
-
-`cvsmodule'
- (required): specify the cvs `module', which is generally a
- subdirectory of the CVSROOT. The cvsmodule for the Buildbot
- source code is `buildbot'.
-
-`branch'
- a string which will be used in a `-r' argument. This is most
- useful for specifying a branch to work on. Defaults to `HEAD'.
-
-`global_options'
- a list of flags to be put before the verb in the CVS command.
-
-`checkoutDelay'
- if set, the number of seconds to put between the timestamp of
- the last known Change and the value used for the `-D' option.
- Defaults to half of the parent Build's treeStableTimer.
-
-
-
-File: buildbot.info, Node: SVN, Next: Darcs, Prev: CVS, Up: Source Checkout
-
-6.1.2.2 SVN
-...........
-
-The `SVN' build step performs a Subversion
-(http://subversion.tigris.org) checkout or update. There are two
-basic ways of setting up the checkout step, depending upon whether
-you are using multiple branches or not.
-
- If all of your builds use the same branch, then you should create
-the `SVN' step with the `svnurl' argument:
-
-`svnurl'
- (required): this specifies the `URL' argument that will be given
- to the `svn checkout' command. It dictates both where the
- repository is located and which sub-tree should be extracted. In
- this respect, it is like a combination of the CVS `cvsroot' and
- `cvsmodule' arguments. For example, if you are using a remote
- Subversion repository which is accessible through HTTP at a URL
- of `http://svn.example.com/repos', and you wanted to check out
- the `trunk/calc' sub-tree, you would use
- `svnurl="http://svn.example.com/repos/trunk/calc"' as an argument
- to your `SVN' step.
-
- If, on the other hand, you are building from multiple branches,
-then you should create the `SVN' step with the `baseURL' and
-`defaultBranch' arguments instead:
-
-`baseURL'
- (required): this specifies the base repository URL, to which a
- branch name will be appended. It should probably end in a slash.
-
-`defaultBranch'
- this specifies the name of the branch to use when a Build does
- not provide one of its own. This will be appended to `baseURL' to
- create the string that will be passed to the `svn checkout'
- command.
-
- If you are using branches, you must also make sure your
-`ChangeSource' will report the correct branch names.
-
-branch example
-==============
-
-Let's suppose that the "MyProject" repository uses branches for the
-trunk, for various users' individual development efforts, and for
-several new features that will require some amount of work (involving
-multiple developers) before they are ready to merge onto the trunk.
-Such a repository might be organized as follows:
-
- svn://svn.example.org/MyProject/trunk
- svn://svn.example.org/MyProject/branches/User1/foo
- svn://svn.example.org/MyProject/branches/User1/bar
- svn://svn.example.org/MyProject/branches/User2/baz
- svn://svn.example.org/MyProject/features/newthing
- svn://svn.example.org/MyProject/features/otherthing
-
- Further assume that we want the Buildbot to run tests against the
-trunk and against all the feature branches (i.e., do a
-checkout/compile/build of branch X when a file has been changed on
-branch X, when X is in the set [trunk, features/newthing,
-features/otherthing]). We do not want the Buildbot to automatically
-build any of the user branches, but it should be willing to build a
-user branch when explicitly requested (most likely by the user who
-owns that branch).
-
- There are three things that need to be set up to accomodate this
-system. The first is a ChangeSource that is capable of identifying the
-branch which owns any given file. This depends upon a user-supplied
-function, in an external program that runs in the SVN commit hook and
-connects to the buildmaster's `PBChangeSource' over a TCP connection.
-(you can use the "`buildbot sendchange'" utility for this purpose,
-but you will still need an external program to decide what value
-should be passed to the `--branch=' argument). For example, a change
-to a file with the SVN url of
-"svn://svn.example.org/MyProject/features/newthing/src/foo.c" should
-be broken down into a Change instance with
-`branch='features/newthing'' and `file='src/foo.c''.
-
- The second piece is an `AnyBranchScheduler' which will pay
-attention to the desired branches. It will not pay attention to the
-user branches, so it will not automatically start builds in response
-to changes there. The AnyBranchScheduler class requires you to
-explicitly list all the branches you want it to use, but it would not
-be difficult to write a subclass which used
-`branch.startswith('features/'' to remove the need for this explicit
-list. Or, if you want to build user branches too, you can use
-AnyBranchScheduler with `branches=None' to indicate that you want it
-to pay attention to all branches.
-
- The third piece is an `SVN' checkout step that is configured to
-handle the branches correctly, with a `baseURL' value that matches
-the way the ChangeSource splits each file's URL into base, branch,
-and file.
-
- from buildbot.changes.pb import PBChangeSource
- from buildbot.scheduler import AnyBranchScheduler
- from buildbot.process import step, factory
- from buildbot.process.factory import s
-
- c['sources'] = [PBChangeSource()]
- s1 = AnyBranchScheduler('main',
- ['trunk', 'features/newthing', 'features/otherthing'],
- 10*60, ['test-i386', 'test-ppc'])
- c['schedulers'] = [s1]
- source = s(step.SVN, mode='update',
- baseURL='svn://svn.example.org/MyProject/',
- defaultBranch='trunk')
- f = factory.BuildFactory([source,
- s(step.Compile, command="make all"),
- s(step.Test, command="make test")])
- c['builders'] = [
- {'name':'test-i386', 'slavename':'bot-i386', 'builddir':'test-i386',
- 'factory':f },
- {'name':'test-ppc', 'slavename':'bot-ppc', 'builddir':'test-ppc',
- 'factory':f },
- ]
-
- In this example, when a change arrives with a `branch' attribute
-of "trunk", the resulting build will have an SVN step that
-concatenates "svn://svn.example.org/MyProject/" (the baseURL) with
-"trunk" (the branch name) to get the correct svn command. If the
-"newthing" branch has a change to "src/foo.c", then the SVN step will
-concatenate "svn://svn.example.org/MyProject/" with
-"features/newthing" to get the svnurl for checkout.
-
-
-File: buildbot.info, Node: Darcs, Next: Mercurial, Prev: SVN, Up: Source Checkout
-
-6.1.2.3 Darcs
-.............
-
-The `Darcs' build step performs a Darcs
-(http://abridgegame.org/darcs/) checkout or update.
-
- Like *Note SVN::, this step can either be configured to always
-check out a specific tree, or set up to pull from a particular branch
-that gets specified separately for each build. Also like SVN, the
-repository URL given to Darcs is created by concatenating a `baseURL'
-with the branch name, and if no particular branch is requested, it
-uses a `defaultBranch'. The only difference in usage is that each
-potential Darcs repository URL must point to a fully-fledged
-repository, whereas SVN URLs usually point to sub-trees of the main
-Subversion repository. In other words, doing an SVN checkout of
-`baseURL' is legal, but silly, since you'd probably wind up with a
-copy of every single branch in the whole repository. Doing a Darcs
-checkout of `baseURL' is just plain wrong, since the parent directory
-of a collection of Darcs repositories is not itself a valid
-repository.
-
- The Darcs step takes the following arguments:
-
-`repourl'
- (required unless `baseURL' is provided): the URL at which the
- Darcs source repository is available.
-
-`baseURL'
- (required unless `repourl' is provided): the base repository URL,
- to which a branch name will be appended. It should probably end
- in a slash.
-
-`defaultBranch'
- (allowed if and only if `baseURL' is provided): this specifies
- the name of the branch to use when a Build does not provide one
- of its own. This will be appended to `baseURL' to create the
- string that will be passed to the `darcs get' command.
-
-
-File: buildbot.info, Node: Mercurial, Next: Arch, Prev: Darcs, Up: Source Checkout
-
-6.1.2.4 Mercurial
-.................
-
-The `Mercurial' build step performs a Mercurial
-(http://selenic.com/mercurial) (aka "hg") checkout or update.
-
- Branches are handled just like *Note Darcs::.
-
- The Mercurial step takes the following arguments:
-
-`repourl'
- (required unless `baseURL' is provided): the URL at which the
- Mercurial source repository is available.
-
-`baseURL'
- (required unless `repourl' is provided): the base repository URL,
- to which a branch name will be appended. It should probably end
- in a slash.
-
-`defaultBranch'
- (allowed if and only if `baseURL' is provided): this specifies
- the name of the branch to use when a Build does not provide one
- of its own. This will be appended to `baseURL' to create the
- string that will be passed to the `hg clone' command.
-
-
-File: buildbot.info, Node: Arch, Next: Bazaar, Prev: Mercurial, Up: Source Checkout
-
-6.1.2.5 Arch
-............
-
-The `Arch' build step performs an Arch (http://gnuarch.org/) checkout
-or update using the `tla' client. It takes the following arguments:
-
-`url'
- (required): this specifies the URL at which the Arch source
- archive is available.
-
-`version'
- (required): this specifies which "development line" (like a
- branch) should be used. This provides the default branch name,
- but individual builds may specify a different one.
-
-`archive'
- (optional): Each repository knows its own archive name. If this
- parameter is provided, it must match the repository's archive
- name. The parameter is accepted for compatibility with the
- `Bazaar' step, below.
-
-
-
-File: buildbot.info, Node: Bazaar, Next: P4Sync, Prev: Arch, Up: Source Checkout
-
-6.1.2.6 Bazaar
-..............
-
-`Bazaar' is an alternate implementation of the Arch VC system, which
-uses a client named `baz'. The checkout semantics are just different
-enough from `tla' that there is a separate BuildStep for it.
-
- It takes exactly the same arguments as `Arch', except that the
-`archive=' parameter is required. (baz does not emit the archive name
-when you do `baz register-archive', so we must provide it ourselves).
-
-
-File: buildbot.info, Node: P4Sync, Prev: Bazaar, Up: Source Checkout
-
-6.1.2.7 P4Sync
-..............
-
-The `P4Sync' build step performs a Perforce
-(http://www.perforce.com/) update. It is a temporary facility: a more
-complete P4 checkout step (named `P4') will eventually replace it.
-This step requires significant manual setup on each build slave. It
-takes the following arguments.
-
-`p4port'
- (required): the host:port string describing how to get to the P4
- Depot (repository), used as the P4PORT environment variable for
- all p4 commands
-
-
-File: buildbot.info, Node: ShellCommand, Next: Simple ShellCommand Subclasses, Prev: Source Checkout, Up: Build Steps
-
-6.1.3 ShellCommand
-------------------
-
-This is a useful base class for just about everything you might want
-to do during a build (except for the initial source checkout). It runs
-a single command in a child shell on the buildslave. All stdout/stderr
-is recorded into a LogFile. The step finishes with a status of FAILURE
-if the command's exit code is non-zero, otherwise it has a status of
-SUCCESS.
-
- The preferred way to specify the command is with a list of argv
-strings, since this allows for spaces in filenames and avoids doing
-any fragile shell-escaping. You can also specify the command with a
-single string, in which case the string is given to '/bin/sh -c
-COMMAND' for parsing.
-
- All ShellCommands are run by default in the "workdir", which
-defaults to the "`build'" subdirectory of the slave builder's base
-directory. The absolute path of the workdir will thus be the slave's
-basedir (set as an option to `buildbot slave', *note Creating a
-buildslave::) plus the builder's basedir (set in the builder's
-`c['builddir']' key in master.cfg) plus the workdir itself (a
-class-level attribute of the BuildFactory, defaults to "`build'").
-
- `ShellCommand' arguments:
-
-`command'
- a list of strings (preferred) or single string (discouraged)
- which specifies the command to be run
-
-`env'
- a dictionary of environment strings which will be added to the
- child command's environment.
-
-`want_stdout'
- if False, stdout from the child process is discarded rather than
- being sent to the buildmaster for inclusion in the step's
- LogFile.
-
-`want_stderr'
- like `want_stdout' but for stderr. Note that commands run through
- a PTY do not have separate stdout/stderr streams: both are
- merged into stdout.
-
-`timeout'
- if the command fails to produce any output for this many
- seconds, it is assumed to be locked up and will be killed.
-
-`description'
- This will be used to describe the command (on the Waterfall
- display) while the command is still running. It should be a
- single imperfect-tense verb, like "compiling" or "testing".
-
-`descriptionDone'
- This will be used to describe the command once it has finished. A
- simple noun like "compile" or "tests" should be used.
-
- If neither `description' nor `descriptionDone' are set, the
- actual command arguments will be used to construct the
- description. This may be a bit too wide to fit comfortably on
- the Waterfall display.
-
-
-
-File: buildbot.info, Node: Simple ShellCommand Subclasses, Prev: ShellCommand, Up: Build Steps
-
-6.1.4 Simple ShellCommand Subclasses
-------------------------------------
-
-Several subclasses of ShellCommand are provided as starting points for
-common build steps. These are all very simple: they just override a
-few parameters so you don't have to specify them yourself, making the
-master.cfg file less verbose.
-
-* Menu:
-
-* Configure::
-* Compile::
-* Test::
-* Writing New BuildSteps::
-* Build Properties::
-
-
-File: buildbot.info, Node: Configure, Next: Compile, Prev: Simple ShellCommand Subclasses, Up: Simple ShellCommand Subclasses
-
-6.1.4.1 Configure
-.................
-
-This is intended to handle the `./configure' step from autoconf-style
-projects, or the `perl Makefile.PL' step from perl MakeMaker.pm-style
-modules. The default command is `./configure' but you can change this
-by providing a `command=' parameter.
-
-
-File: buildbot.info, Node: Compile, Next: Test, Prev: Configure, Up: Simple ShellCommand Subclasses
-
-6.1.4.2 Compile
-...............
-
-This is meant to handle compiling or building a project written in C.
-The default command is `make all'. When the compile is finished, the
-log file is scanned for GCC error/warning messages and a summary log
-is created with any problems that were seen (TODO: the summary is not
-yet created).
-
-
-File: buildbot.info, Node: Test, Next: Writing New BuildSteps, Prev: Compile, Up: Simple ShellCommand Subclasses
-
-6.1.4.3 Test
-............
-
-This is meant to handle unit tests. The default command is `make
-test', and the `warnOnFailure' flag is set.
-
-
-File: buildbot.info, Node: Writing New BuildSteps, Next: Build Properties, Prev: Test, Up: Simple ShellCommand Subclasses
-
-6.1.4.4 Writing New BuildSteps
-..............................
-
-While it is a good idea to keep your build process self-contained in
-the source code tree, sometimes it is convenient to put more
-intelligence into your Buildbot configuration. One was to do this is
-to write a custom BuildStep. Once written, this Step can be used in
-the `master.cfg' file.
-
- The best reason for writing a custom BuildStep is to better parse
-the results of the command being run. For example, a BuildStep that
-knows about JUnit could look at the logfiles to determine which tests
-had been run, how many passed and how many failed, and then report
-more detailed information than a simple `rc==0' -based "good/bad"
-decision.
-
- TODO: add more description of BuildSteps.
-
-
-File: buildbot.info, Node: Build Properties, Prev: Writing New BuildSteps, Up: Simple ShellCommand Subclasses
-
-6.1.4.5 Build Properties
-........................
-
-Each build has a set of "Build Properties", which can be used by its
-BuildStep to modify their actions. For example, the SVN revision
-number of the source code being built is available as a build
-property, and a ShellCommand step could incorporate this number into a
-command which create a numbered release tarball.
-
- Some build properties are set when the build starts, such as the
-SourceStamp information. Other properties can be set by BuildSteps as
-they run, for example the various Source steps will set the
-`got_revision' property to the source revision that was actually
-checked out (which can be useful when the SourceStamp in use merely
-requested the "latest revision": `got_revision' will tell you what
-was actually built).
-
- In custom BuildSteps, you can get and set the build properties with
-the `getProperty'/`setProperty' methods. Each takes a string for the
-name of the property, and returns or accepts an arbitrary(1) object.
-For example:
-
- class MakeTarball(step.ShellCommand):
- def start(self):
- self.setCommand(["tar", "czf",
- "build-%s.tar.gz" % self.getProperty("revision"),
- "source"])
- step.ShellCommand.start(self)
-
- You can use build properties in ShellCommands by using the
-`WithProperties' wrapper when setting the arguments of the
-ShellCommand. This interpolates the named build properties into the
-generated shell command.
-
- from buildbot.process.step import ShellCommand, WithProperties
-
- s(ShellCommand,
- command=["tar", "czf",
- WithProperties("build-%s.tar.gz", "revision"),
- "source"],
- )
-
- If this BuildStep were used in a tree obtained from Subversion, it
-would create a tarball with a name like `build-1234.tar.gz'.
-
- The `WithProperties' function does `printf'-style string
-interpolation, using strings obtained by calling
-`build.getProperty(propname)'. Note that for every `%s' (or `%d',
-etc), you must have exactly one additional argument to indicate which
-build property you want to insert.
-
- You can also use python dictionary-style string interpolation by
-using the `%(propname)s' syntax. In this form, the property name goes
-in the parentheses, and WithProperties takes _no_ additional
-arguments:
-
- s(ShellCommand,
- command=["tar", "czf",
- WithProperties("build-%(revision)s.tar.gz"),
- "source"],
- )
-
- Don't forget the extra "s" after the closing parenthesis! This is
-the cause of many confusing errors.
-
- Note that, like python, you can either do positional-argument
-interpolation _or_ keyword-argument interpolation, not both. Thus you
-cannot use a string like `WithProperties("foo-%(revision)s-%s",
-"branch")'.
-
- At the moment, the only way to set build properties is by writing a
-custom BuildStep.
-
-Common Build Properties
-=======================
-
-The following build properties are set when the build is started, and
-are available to all steps.
-
-`branch'
- This comes from the build's SourceStamp, and describes which
- branch is being checked out. This will be `None' (which
- interpolates into `WithProperties' as an empty string) if the
- build is on the default branch, which is generally the trunk.
- Otherwise it will be a string like "branches/beta1.4". The exact
- syntax depends upon the VC system being used.
-
-`revision'
- This also comes from the SourceStamp, and is the revision of the
- source code tree that was requested from the VC system. When a
- build is requested of a specific revision (as is generally the
- case when the build is triggered by Changes), this will contain
- the revision specification. The syntax depends upon the VC
- system in use: for SVN it is an integer, for Mercurial it is a
- short string, for Darcs it is a rather large string, etc.
-
- If the "force build" button was pressed, the revision will be
- `None', which means to use the most recent revision available.
- This is a "trunk build". This will be interpolated as an empty
- string.
-
-`got_revision'
- This is set when a Source step checks out the source tree, and
- provides the revision that was actually obtained from the VC
- system. In general this should be the same as `revision',
- except for trunk builds, where `got_revision' indicates what
- revision was current when the checkout was performed. This can
- be used to rebuild the same source code later.
-
- Note that for some VC systems (Darcs in particular), the
- revision is a large string containing newlines, and is not
- suitable for interpolation into a filename.
-
-`buildername'
- This is a string that indicates which Builder the build was a
- part of. The combination of buildername and buildnumber
- uniquely identify a build.
-
-`buildnumber'
- Each build gets a number, scoped to the Builder (so the first
- build performed on any given Builder will have a build number of
- 0). This integer property contains the build's number.
-
-`slavename'
- This is a string which identifies which buildslave the build is
- running on.
-
-
- ---------- Footnotes ----------
-
- (1) Build properties are serialized along with the build results,
-so they must be serializable. For this reason, the value of any build
-property should be simple inert data: strings, numbers, lists,
-tuples, and dictionaries. They should not contain class instances.
-
-
-File: buildbot.info, Node: Interlocks, Next: Build Factories, Prev: Build Steps, Up: Build Process
-
-6.2 Interlocks
-==============
-
-For various reasons, you may want to prevent certain Steps (or perhaps
-entire Builds) from running simultaneously. Limited CPU speed or
-network bandwidth to the VC server, problems with simultaneous access
-to a database server used by unit tests, or multiple Builds which
-access shared state may all require some kind of interlock to prevent
-corruption, confusion, or resource overload.
-
- `Locks' are the mechanism used to express these kinds of
-constraints on when Builds or Steps can be run. There are two kinds of
-`Locks', each with their own scope: `SlaveLock's are scoped to a
-single buildslave, while `MasterLock' instances are scoped to the
-buildbot as a whole. Each `Lock' is created with a unique name.
-
- To use a lock, simply include it in the `locks=' argument of the
-`BuildStep' object that should obtain the lock before it runs. This
-argument accepts a list of `Lock' objects: the Step will acquire all
-of them before it runs.
-
- To claim a lock for the whole Build, add a `'locks'' key to the
-builder specification dictionary with the same list of `Lock'
-objects. (This is the dictionary that has the `'name'',
-`'slavename'', `'builddir'', and `'factory'' keys). The `Build'
-object also accepts a `locks=' argument, but unless you are writing
-your own `BuildFactory' subclass then it will be easier to set the
-locks in the builder dictionary.
-
- Note that there are no partial-acquire or partial-release
-semantics: this prevents deadlocks caused by two Steps each waiting
-for a lock held by the other(1). This also means that waiting to
-acquire a `Lock' can take an arbitrarily long time: if the
-buildmaster is very busy, a Step or Build which requires only one
-`Lock' may starve another that is waiting for that `Lock' plus some
-others.
-
- In the following example, we run the same build on three different
-platforms. The unit-test steps of these builds all use a common
-database server, and would interfere with each other if allowed to run
-simultaneously. The `Lock' prevents more than one of these builds
-from happening at the same time.
-
- from buildbot import locks
- from buildbot.process import s, step, factory
-
- db_lock = locks.MasterLock("database")
- steps = [s(step.SVN, svnurl="http://example.org/svn/Trunk"),
- s(step.ShellCommand, command="make all"),
- s(step.ShellCommand, command="make test", locks=[db_lock]),
- ]
- f = factory.BuildFactory(steps)
- b1 = {'name': 'full1', 'slavename': 'bot-1', builddir='f1', 'factory': f}
- b2 = {'name': 'full2', 'slavename': 'bot-2', builddir='f2', 'factory': f}
- b3 = {'name': 'full3', 'slavename': 'bot-3', builddir='f3', 'factory': f}
- c['builders'] = [b1, b2, b3]
-
- In the next example, we have one buildslave hosting three separate
-Builders (each running tests against a different version of Python).
-The machine which hosts this buildslave is not particularly fast, so
-we want to prevent the builds from all happening at the same time. We
-use a `SlaveLock' because the builds happening on the slow slave do
-not affect builds running on other slaves, and we use the lock on the
-build as a whole because the slave is so slow that even multiple SVN
-checkouts would be taxing.
-
- from buildbot import locks
- from buildbot.process import s, step, factory
-
- slow_lock = locks.SlaveLock("cpu")
- source = s(step.SVN, svnurl="http://example.org/svn/Trunk")
- f22 = factory.Trial(source, trialpython=["python2.2"])
- f23 = factory.Trial(source, trialpython=["python2.3"])
- f24 = factory.Trial(source, trialpython=["python2.4"])
- b1 = {'name': 'p22', 'slavename': 'bot-1', builddir='p22', 'factory': f22,
- 'locks': [slow_lock] }
- b2 = {'name': 'p23', 'slavename': 'bot-1', builddir='p23', 'factory': f23,
- 'locks': [slow_lock] }
- b3 = {'name': 'p24', 'slavename': 'bot-1', builddir='p24', 'factory': f24,
- 'locks': [slow_lock] }
- c['builders'] = [b1, b2, b3]
-
- In the last example, we use two Locks at the same time. In this
-case, we're concerned about both of the previous constraints, but
-we'll say that only the tests are computationally intensive, and that
-they have been split into those which use the database and those
-which do not. In addition, two of the Builds run on a fast machine
-which does not need to worry about the cpu lock, but which still must
-be prevented from simultaneous database access.
-
- from buildbot import locks
- from buildbot.process import s, step, factory
-
- db_lock = locks.MasterLock("database")
- cpu_lock = locks.SlaveLock("cpu")
- slow_steps = [s(step.SVN, svnurl="http://example.org/svn/Trunk"),
- s(step.ShellCommand, command="make all", locks=[cpu_lock]),
- s(step.ShellCommand, command="make test", locks=[cpu_lock]),
- s(step.ShellCommand, command="make db-test",
- locks=[db_lock, cpu_lock]),
- ]
- slow_f = factory.BuildFactory(slow_steps)
- fast_steps = [s(step.SVN, svnurl="http://example.org/svn/Trunk"),
- s(step.ShellCommand, command="make all", locks=[]),
- s(step.ShellCommand, command="make test", locks=[]),
- s(step.ShellCommand, command="make db-test",
- locks=[db_lock]),
- ]
- fast_factory = factory.BuildFactory(fast_steps)
- b1 = {'name': 'full1', 'slavename': 'bot-slow', builddir='full1',
- 'factory': slow_factory}
- b2 = {'name': 'full2', 'slavename': 'bot-slow', builddir='full2',
- 'factory': slow_factory}
- b3 = {'name': 'full3', 'slavename': 'bot-fast', builddir='full3',
- 'factory': fast_factory}
- b4 = {'name': 'full4', 'slavename': 'bot-fast', builddir='full4',
- 'factory': fast_factory}
- c['builders'] = [b1, b2, b3, b4]
-
- As a final note, remember that a unit test system which breaks when
-multiple people run it at the same time is fragile and should be
-fixed. Asking your human developers to serialize themselves when
-running unit tests will just discourage them from running the unit
-tests at all. Find a way to fix this: change the database tests to
-create a new (uniquely-named) user or table for each test run, don't
-use fixed listening TCP ports for network tests (instead listen on
-port 0 to let the kernel choose a port for you and then query the
-socket to find out what port was allocated). `MasterLock's can be
-used to accomodate broken test systems like this, but are really
-intended for other purposes: build processes that store or retrieve
-products in shared directories, or which do things that human
-developers would not (or which might slow down or break in ways that
-require human attention to deal with).
-
- `SlaveLocks's can be used to keep automated performance tests from
-interfering with each other, when there are multiple Builders all
-using the same buildslave. But they can't prevent other users from
-running CPU-intensive jobs on that host while the tests are running.
-
- ---------- Footnotes ----------
-
- (1) Also note that a clever buildmaster admin could still create
-the opportunity for deadlock: Build A obtains Lock 1, inside which
-Step A.two tries to acquire Lock 2 at the Step level. Meanwhile
-Build B obtains Lock 2, and has a Step B.two which wants to acquire
-Lock 1 at the Step level. Don't Do That.
-
-
-File: buildbot.info, Node: Build Factories, Prev: Interlocks, Up: Build Process
-
-6.3 Build Factories
-===================
-
-Each Builder is equipped with a "build factory", which is responsible
-for producing the actual `Build' objects that perform each build.
-This factory is created in the configuration file, and attached to a
-Builder through the `factory' element of its dictionary.
-
- The standard `BuildFactory' object creates `Build' objects by
-default. These Builds will each execute a collection of BuildSteps in
-a fixed sequence. Each step can affect the results of the build, but
-in general there is little intelligence to tie the different steps
-together. You can create subclasses of `Build' to implement more
-sophisticated build processes, and then use a subclass of
-`BuildFactory' (or simply set the `buildClass' attribute) to create
-instances of your new Build subclass.
-
-* Menu:
-
-* BuildStep Objects::
-* BuildFactory::
-* Process-Specific build factories::
-
-
-File: buildbot.info, Node: BuildStep Objects, Next: BuildFactory, Prev: Build Factories, Up: Build Factories
-
-6.3.1 BuildStep Objects
------------------------
-
-The steps used by these builds are all subclasses of `BuildStep'.
-The standard ones provided with Buildbot are documented later, *Note
-Build Steps::. You can also write your own subclasses to use in
-builds.
-
- The basic behavior for a `BuildStep' is to:
-
- * run for a while, then stop
-
- * possibly invoke some RemoteCommands on the attached build slave
-
- * possibly produce a set of log files
-
- * finish with a status described by one of four values defined in
- buildbot.status.builder: SUCCESS, WARNINGS, FAILURE, SKIPPED
-
- * provide a list of short strings to describe the step
-
- * define a color (generally green, orange, or red) with which the
- step should be displayed
-
- More sophisticated steps may produce additional information and
-provide it to later build steps, or store it in the factory to provide
-to later builds.
-
-
-File: buildbot.info, Node: BuildFactory, Next: Process-Specific build factories, Prev: BuildStep Objects, Up: Build Factories
-
-6.3.2 BuildFactory
-------------------
-
-The default `BuildFactory', provided in the
-`buildbot.process.factory' module, is constructed with a list of
-"BuildStep specifications": a list of `(step_class, kwargs)' tuples
-for each. When asked to create a Build, it loads the list of steps
-into the new Build object. When the Build is actually started, these
-step specifications are used to create the actual set of BuildSteps,
-which are then executed one at a time. For example, a build which
-consists of a CVS checkout followed by a `make build' would be
-constructed as follows:
-
- from buildbot.process import step, factory
- from buildbot.factory import s
- # s is a convenience function, defined with:
- # def s(steptype, **kwargs): return (steptype, kwargs)
-
- f = factory.BuildFactory([s(step.CVS,
- cvsroot=CVSROOT, cvsmodule="project",
- mode="update"),
- s(step.Compile, command=["make", "build"])])
-
- Each step can affect the build process in the following ways:
-
- * If the step's `haltOnFailure' attribute is True, then a failure
- in the step (i.e. if it completes with a result of FAILURE) will
- cause the whole build to be terminated immediately: no further
- steps will be executed. This is useful for setup steps upon
- which the rest of the build depends: if the CVS checkout or
- `./configure' process fails, there is no point in trying to
- compile or test the resulting tree.
-
- * If the `flunkOnFailure' or `flunkOnWarnings' flag is set, then a
- result of FAILURE or WARNINGS will mark the build as a whole as
- FAILED. However, the remaining steps will still be executed.
- This is appropriate for things like multiple testing steps: a
- failure in any one of them will indicate that the build has
- failed, however it is still useful to run them all to completion.
-
- * Similarly, if the `warnOnFailure' or `warnOnWarnings' flag is
- set, then a result of FAILURE or WARNINGS will mark the build as
- having WARNINGS, and the remaining steps will still be executed.
- This may be appropriate for certain kinds of optional build or
- test steps. For example, a failure experienced while building
- documentation files should be made visible with a WARNINGS
- result but not be serious enough to warrant marking the whole
- build with a FAILURE.
-
-
- In addition, each Step produces its own results, may create
-logfiles, etc. However only the flags described above have any effect
-on the build as a whole.
-
- The pre-defined BuildSteps like `CVS' and `Compile' have
-reasonably appropriate flags set on them already. For example, without
-a source tree there is no point in continuing the build, so the `CVS'
-class has the `haltOnFailure' flag set to True. Look in
-`buildbot/process/step.py' to see how the other Steps are marked.
-
- Each Step is created with an additional `workdir' argument that
-indicates where its actions should take place. This is specified as a
-subdirectory of the slave builder's base directory, with a default
-value of `build'. This is only implemented as a step argument (as
-opposed to simply being a part of the base directory) because the
-CVS/SVN steps need to perform their checkouts from the parent
-directory.
-
-* Menu:
-
-* BuildFactory Attributes::
-* Quick builds::
-
-
-File: buildbot.info, Node: BuildFactory Attributes, Next: Quick builds, Prev: BuildFactory, Up: BuildFactory
-
-6.3.2.1 BuildFactory Attributes
-...............................
-
-Some attributes from the BuildFactory are copied into each Build.
-
-`useProgress'
- (defaults to True): if True, the buildmaster keeps track of how
- long each step takes, so it can provide estimates of how long
- future builds will take. If builds are not expected to take a
- consistent amount of time (such as incremental builds in which a
- random set of files are recompiled or tested each time), this
- should be set to False to inhibit progress-tracking.
-
-
-
-File: buildbot.info, Node: Quick builds, Prev: BuildFactory Attributes, Up: BuildFactory
-
-6.3.2.2 Quick builds
-....................
-
-The difference between a "full build" and a "quick build" is that
-quick builds are generally done incrementally, starting with the tree
-where the previous build was performed. That simply means that the
-source-checkout step should be given a `mode='update'' flag, to do
-the source update in-place.
-
- In addition to that, the `useProgress' flag should be set to
-False. Incremental builds will (or at least the ought to) compile as
-few files as necessary, so they will take an unpredictable amount of
-time to run. Therefore it would be misleading to claim to predict how
-long the build will take.
-
-
-File: buildbot.info, Node: Process-Specific build factories, Prev: BuildFactory, Up: Build Factories
-
-6.3.3 Process-Specific build factories
---------------------------------------
-
-Many projects use one of a few popular build frameworks to simplify
-the creation and maintenance of Makefiles or other compilation
-structures. Buildbot provides several pre-configured BuildFactory
-subclasses which let you build these projects with a minimum of fuss.
-
-* Menu:
-
-* GNUAutoconf::
-* CPAN::
-* Python distutils::
-* Python/Twisted/trial projects::
-
-
-File: buildbot.info, Node: GNUAutoconf, Next: CPAN, Prev: Process-Specific build factories, Up: Process-Specific build factories
-
-6.3.3.1 GNUAutoconf
-...................
-
-GNU Autoconf (http://www.gnu.org/software/autoconf/) is a software
-portability tool, intended to make it possible to write programs in C
-(and other languages) which will run on a variety of UNIX-like
-systems. Most GNU software is built using autoconf. It is frequently
-used in combination with GNU automake. These tools both encourage a
-build process which usually looks like this:
-
- % CONFIG_ENV=foo ./configure --with-flags
- % make all
- % make check
- # make install
-
- (except of course the Buildbot always skips the `make install'
-part).
-
- The Buildbot's `buildbot.process.factory.GNUAutoconf' factory is
-designed to build projects which use GNU autoconf and/or automake. The
-configuration environment variables, the configure flags, and command
-lines used for the compile and test are all configurable, in general
-the default values will be suitable.
-
- Example:
-
- # use the s() convenience function defined earlier
- f = factory.GNUAutoconf(source=s(step.SVN, svnurl=URL, mode="copy"),
- flags=["--disable-nls"])
-
- Required Arguments:
-
-`source'
- This argument must be a step specification tuple that provides a
- BuildStep to generate the source tree.
-
- Optional Arguments:
-
-`configure'
- The command used to configure the tree. Defaults to
- `./configure'. Accepts either a string or a list of shell argv
- elements.
-
-`configureEnv'
- The environment used for the initial configuration step. This
- accepts a dictionary which will be merged into the buildslave's
- normal environment. This is commonly used to provide things like
- `CFLAGS="-O2 -g"' (to turn off debug symbols during the compile).
- Defaults to an empty dictionary.
-
-`configureFlags'
- A list of flags to be appended to the argument list of the
- configure command. This is commonly used to enable or disable
- specific features of the autoconf-controlled package, like
- `["--without-x"]' to disable windowing support. Defaults to an
- empty list.
-
-`compile'
- this is a shell command or list of argv values which is used to
- actually compile the tree. It defaults to `make all'. If set to
- None, the compile step is skipped.
-
-`test'
- this is a shell command or list of argv values which is used to
- run the tree's self-tests. It defaults to `make check'. If set to
- None, the test step is skipped.
-
-
-
-File: buildbot.info, Node: CPAN, Next: Python distutils, Prev: GNUAutoconf, Up: Process-Specific build factories
-
-6.3.3.2 CPAN
-............
-
-Most Perl modules available from the CPAN (http://www.cpan.org/)
-archive use the `MakeMaker' module to provide configuration, build,
-and test services. The standard build routine for these modules looks
-like:
-
- % perl Makefile.PL
- % make
- % make test
- # make install
-
- (except again Buildbot skips the install step)
-
- Buildbot provides a `CPAN' factory to compile and test these
-projects.
-
- Arguments:
-`source'
- (required): A step specification tuple, that that used by
- GNUAutoconf.
-
-`perl'
- A string which specifies the `perl' executable to use. Defaults
- to just `perl'.
-
-
-
-File: buildbot.info, Node: Python distutils, Next: Python/Twisted/trial projects, Prev: CPAN, Up: Process-Specific build factories
-
-6.3.3.3 Python distutils
-........................
-
-Most Python modules use the `distutils' package to provide
-configuration and build services. The standard build process looks
-like:
-
- % python ./setup.py build
- % python ./setup.py install
-
- Unfortunately, although Python provides a standard unit-test
-framework named `unittest', to the best of my knowledge `distutils'
-does not provide a standardized target to run such unit tests. (please
-let me know if I'm wrong, and I will update this factory).
-
- The `Distutils' factory provides support for running the build
-part of this process. It accepts the same `source=' parameter as the
-other build factories.
-
- Arguments:
-`source'
- (required): A step specification tuple, that that used by
- GNUAutoconf.
-
-`python'
- A string which specifies the `python' executable to use. Defaults
- to just `python'.
-
-`test'
- Provides a shell command which runs unit tests. This accepts
- either a string or a list. The default value is None, which
- disables the test step (since there is no common default command
- to run unit tests in distutils modules).
-
-
-
-File: buildbot.info, Node: Python/Twisted/trial projects, Prev: Python distutils, Up: Process-Specific build factories
-
-6.3.3.4 Python/Twisted/trial projects
-.....................................
-
-Twisted provides a unit test tool named `trial' which provides a few
-improvements over Python's built-in `unittest' module. Many python
-projects which use Twisted for their networking or application
-services also use trial for their unit tests. These modules are
-usually built and tested with something like the following:
-
- % python ./setup.py build
- % PYTHONPATH=build/lib.linux-i686-2.3 trial -v PROJECTNAME.test
- % python ./setup.py install
-
- Unfortunately, the `build/lib' directory into which the
-built/copied .py files are placed is actually architecture-dependent,
-and I do not yet know of a simple way to calculate its value. For many
-projects it is sufficient to import their libraries "in place" from
-the tree's base directory (`PYTHONPATH=.').
-
- In addition, the PROJECTNAME value where the test files are
-located is project-dependent: it is usually just the project's
-top-level library directory, as common practice suggests the unit test
-files are put in the `test' sub-module. This value cannot be guessed,
-the `Trial' class must be told where to find the test files.
-
- The `Trial' class provides support for building and testing
-projects which use distutils and trial. If the test module name is
-specified, trial will be invoked. The library path used for testing
-can also be set.
-
- One advantage of trial is that the Buildbot happens to know how to
-parse trial output, letting it identify which tests passed and which
-ones failed. The Buildbot can then provide fine-grained reports about
-how many tests have failed, when individual tests fail when they had
-been passing previously, etc.
-
- Another feature of trial is that you can give it a series of source
-.py files, and it will search them for special `test-case-name' tags
-that indicate which test cases provide coverage for that file. Trial
-can then run just the appropriate tests. This is useful for quick
-builds, where you want to only run the test cases that cover the
-changed functionality.
-
- Arguments:
-`source'
- (required): A step specification tuple, like that used by
- GNUAutoconf.
-
-`buildpython'
- A list (argv array) of strings which specifies the `python'
- executable to use when building the package. Defaults to just
- `['python']'. It may be useful to add flags here, to supress
- warnings during compilation of extension modules. This list is
- extended with `['./setup.py', 'build']' and then executed in a
- ShellCommand.
-
-`testpath'
- Provides a directory to add to `PYTHONPATH' when running the unit
- tests, if tests are being run. Defaults to `.' to include the
- project files in-place. The generated build library is frequently
- architecture-dependent, but may simply be `build/lib' for
- pure-python modules.
-
-`trialpython'
- Another list of strings used to build the command that actually
- runs trial. This is prepended to the contents of the `trial'
- argument below. It may be useful to add `-W' flags here to
- supress warnings that occur while tests are being run. Defaults
- to an empty list, meaning `trial' will be run without an explicit
- interpreter, which is generally what you want if you're using
- `/usr/bin/trial' instead of, say, the `./bin/trial' that lives
- in the Twisted source tree.
-
-`trial'
- provides the name of the `trial' command. It is occasionally
- useful to use an alternate executable, such as `trial2.2' which
- might run the tests under an older version of Python. Defaults to
- `trial'.
-
-`tests'
- Provides a module name or names which contain the unit tests for
- this project. Accepts a string, typically `PROJECTNAME.test', or
- a list of strings. Defaults to None, indicating that no tests
- should be run. You must either set this or `useTestCaseNames' to
- do anyting useful with the Trial factory.
-
-`useTestCaseNames'
- Tells the Step to provide the names of all changed .py files to
- trial, so it can look for test-case-name tags and run just the
- matching test cases. Suitable for use in quick builds. Defaults
- to False.
-
-`randomly'
- If `True', tells Trial (with the `--random=0' argument) to run
- the test cases in random order, which sometimes catches subtle
- inter-test dependency bugs. Defaults to `False'.
-
-`recurse'
- If `True', tells Trial (with the `--recurse' argument) to look
- in all subdirectories for additional test cases. It isn't clear
- to me how this works, but it may be useful to deal with the
- unknown-PROJECTNAME problem described above, and is currently
- used in the Twisted buildbot to accomodate the fact that test
- cases are now distributed through multiple
- twisted.SUBPROJECT.test directories.
-
-
- Unless one of `trialModule' or `useTestCaseNames' are set, no
-tests will be run.
-
- Some quick examples follow. Most of these examples assume that the
-target python code (the "code under test") can be reached directly
-from the root of the target tree, rather than being in a `lib/'
-subdirectory.
-
- # Trial(source, tests="toplevel.test") does:
- # python ./setup.py build
- # PYTHONPATH=. trial -to toplevel.test
-
- # Trial(source, tests=["toplevel.test", "other.test"]) does:
- # python ./setup.py build
- # PYTHONPATH=. trial -to toplevel.test other.test
-
- # Trial(source, useTestCaseNames=True) does:
- # python ./setup.py build
- # PYTHONPATH=. trial -to --testmodule=foo/bar.py.. (from Changes)
-
- # Trial(source, buildpython=["python2.3", "-Wall"], tests="foo.tests"):
- # python2.3 -Wall ./setup.py build
- # PYTHONPATH=. trial -to foo.tests
-
- # Trial(source, trialpython="python2.3", trial="/usr/bin/trial",
- # tests="foo.tests") does:
- # python2.3 -Wall ./setup.py build
- # PYTHONPATH=. python2.3 /usr/bin/trial -to foo.tests
-
- # For running trial out of the tree being tested (only useful when the
- # tree being built is Twisted itself):
- # Trial(source, trialpython=["python2.3", "-Wall"], trial="./bin/trial",
- # tests="foo.tests") does:
- # python2.3 -Wall ./setup.py build
- # PYTHONPATH=. python2.3 -Wall ./bin/trial -to foo.tests
-
- If the output directory of `./setup.py build' is known, you can
-pull the python code from the built location instead of the source
-directories. This should be able to handle variations in where the
-source comes from, as well as accomodating binary extension modules:
-
- # Trial(source,tests="toplevel.test",testpath='build/lib.linux-i686-2.3')
- # does:
- # python ./setup.py build
- # PYTHONPATH=build/lib.linux-i686-2.3 trial -to toplevel.test
-
-
-File: buildbot.info, Node: Status Delivery, Next: Command-line tool, Prev: Build Process, Up: Top
-
-7 Status Delivery
-*****************
-
-More details are available in the docstrings for each class, use
-`pydoc buildbot.status.html.Waterfall' to see them. Most status
-delivery objects take a `categories=' argument, which can contain a
-list of "category" names: in this case, it will only show status for
-Builders that are in one of the named categories.
-
- (implementor's note: each of these objects should be a
-service.MultiService which will be attached to the BuildMaster object
-when the configuration is processed. They should use
-`self.parent.getStatus()' to get access to the top-level IStatus
-object, either inside `startService' or later. They may call
-`status.subscribe()' in `startService' to receive notifications of
-builder events, in which case they must define `builderAdded' and
-related methods. See the docstrings in `buildbot/interfaces.py' for
-full details.)
-
-* Menu:
-
-* HTML Waterfall::
-* IRC Bot::
-* PBListener::
-
-
-File: buildbot.info, Node: HTML Waterfall, Next: IRC Bot, Prev: Status Delivery, Up: Status Delivery
-
-7.0.1 HTML Waterfall
---------------------
-
- from buildbot.status import html
- w = html.Waterfall(http_port=8080)
- c['status'].append(w)
-
- The `buildbot.status.html.Waterfall' status target creates an HTML
-"waterfall display", which shows a time-based chart of events. This
-display provides detailed information about all steps of all recent
-builds, and provides hyperlinks to look at individual build logs and
-source changes. If the `http_port' argument is provided, it provides
-a strports specification for the port that the web server should
-listen on. This can be a simple port number, or a string like
-`tcp:8080:interface=127.0.0.1' (to limit connections to the loopback
-interface, and therefore to clients running on the same host)(1).
-
- If instead (or in addition) you provide the `distrib_port'
-argument, a twisted.web distributed server will be started either on a
-TCP port (if `distrib_port' is like `"tcp:12345"') or more likely on
-a UNIX socket (if `distrib_port' is like `"unix:/path/to/socket"').
-
- The `distrib_port' option means that, on a host with a
-suitably-configured twisted-web server, you do not need to consume a
-separate TCP port for the buildmaster's status web page. When the web
-server is constructed with `mktap web --user', URLs that point to
-`http://host/~username/' are dispatched to a sub-server that is
-listening on a UNIX socket at `~username/.twisted-web-pb'. On such a
-system, it is convenient to create a dedicated `buildbot' user, then
-set `distrib_port' to
-`"unix:"+os.path.expanduser("~/.twistd-web-pb")'. This configuration
-will make the HTML status page available at `http://host/~buildbot/'
-. Suitable URL remapping can make it appear at
-`http://host/buildbot/', and the right virtual host setup can even
-place it at `http://buildbot.host/' .
-
- Other arguments:
-
-`allowForce'
- If set to True (the default), then the web page will provide a
- "Force Build" button that allows visitors to manually trigger
- builds. This is useful for developers to re-run builds that have
- failed because of intermittent problems in the test suite, or
- because of libraries that were not installed at the time of the
- previous build. You may not wish to allow strangers to cause a
- build to run: in that case, set this to False to remove these
- buttons.
-
-`favicon'
- If set to a string, this will be interpreted as a filename
- containing a "favicon": a small image that contains an icon for
- the web site. This is returned to browsers that request the
- `favicon.ico' file, and should point to a .png or .ico image
- file. The default value uses the buildbot/buildbot.png image (a
- small hex nut) contained in the buildbot distribution. You can
- set this to None to avoid using a favicon at all.
-
-`robots_txt'
- If set to a string, this will be interpreted as a filename
- containing the contents of "robots.txt". Many search engine
- spiders request this file before indexing the site. Setting it
- to a file which contains:
- User-agent: *
- Disallow: /
- will prevent most search engines from trawling the (voluminous)
- generated status pages.
-
-
- ---------- Footnotes ----------
-
- (1) It may even be possible to provide SSL access by using a
-specification like
-`"ssl:12345:privateKey=mykey.pen:certKey=cert.pem"', but this is
-completely untested
-
-
-File: buildbot.info, Node: IRC Bot, Next: PBListener, Prev: HTML Waterfall, Up: Status Delivery
-
-7.0.2 IRC Bot
--------------
-
-The `buildbot.status.words.IRC' status target creates an IRC bot
-which will attach to certain channels and be available for status
-queries. It can also be asked to announce builds as they occur, or be
-told to shut up.
-
- from twisted.status import words
- irc = words.IRC("irc.example.org", "botnickname",
- channels=["channel1", "channel2"],
- password="mysecretpassword")
- c['status'].append(irc)
-
- Take a look at the docstring for `words.IRC' for more details on
-configuring this service. The `password' argument, if provided, will
-be sent to Nickserv to claim the nickname: some IRC servers will not
-allow clients to send private messages until they have logged in with
-a password.
-
- To use the service, you address messages at the buildbot, either
-normally (`botnickname: status') or with private messages (`/msg
-botnickname status'). The buildbot will respond in kind.
-
- Some of the commands currently available:
-
-`list builders'
- Emit a list of all configured builders
-
-`status BUILDER'
- Announce the status of a specific Builder: what it is doing
- right now.
-
-`status all'
- Announce the status of all Builders
-
-`watch BUILDER'
- If the given Builder is currently running, wait until the Build
- is finished and then announce the results.
-
-`last BUILDER'
- Return the results of the last build to run on the given Builder.
-
-`help COMMAND'
- Describe a command. Use `help commands' to get a list of known
- commands.
-
-`source'
- Announce the URL of the Buildbot's home page.
-
-`version'
- Announce the version of this Buildbot.
-
- If the `allowForce=True' option was used, some addtional commands
-will be available:
-
-`force build BUILDER REASON'
- Tell the given Builder to start a build of the latest code. The
- user requesting the build and REASON are recorded in the Build
- status. The buildbot will announce the build's status when it
- finishes.
-
-`stop build BUILDER REASON'
- Terminate any running build in the given Builder. REASON will be
- added to the build status to explain why it was stopped. You
- might use this if you committed a bug, corrected it right away,
- and don't want to wait for the first build (which is destined to
- fail) to complete before starting the second (hopefully fixed)
- build.
-
-
-File: buildbot.info, Node: PBListener, Prev: IRC Bot, Up: Status Delivery
-
-7.0.3 PBListener
-----------------
-
- import buildbot.status.client
- pbl = buildbot.status.client.PBListener(port=int, user=str,
- passwd=str)
- c['status'].append(pbl)
-
- This sets up a PB listener on the given TCP port, to which a
-PB-based status client can connect and retrieve status information.
-`buildbot statusgui' (*note statusgui::) is an example of such a
-status client. The `port' argument can also be a strports
-specification string.
-
-
-File: buildbot.info, Node: Command-line tool, Next: Resources, Prev: Status Delivery, Up: Top
-
-8 Command-line tool
-*******************
-
-The `buildbot' command-line tool can be used to start or stop a
-buildmaster or buildbot, and to interact with a running buildmaster.
-Some of its subcommands are intended for buildmaster admins, while
-some are for developers who are editing the code that the buildbot is
-monitoring.
-
-* Menu:
-
-* Administrator Tools::
-* Developer Tools::
-* Other Tools::
-* .buildbot config directory::
-
-
-File: buildbot.info, Node: Administrator Tools, Next: Developer Tools, Prev: Command-line tool, Up: Command-line tool
-
-8.1 Administrator Tools
-=======================
-
-The following `buildbot' sub-commands are intended for buildmaster
-administrators:
-
-master
-======
-
-This creates a new directory and populates it with files that allow it
-to be used as a buildmaster's base directory.
-
- buildbot master BASEDIR
-
-slave
-=====
-
-This creates a new directory and populates it with files that let it
-be used as a buildslave's base directory. You must provide several
-arguments, which are used to create the initial `buildbot.tac' file.
-
- buildbot slave BASEDIR MASTERHOST:PORT SLAVENAME PASSWORD
-
-start
-=====
-
-This starts a buildmaster or buildslave which was already created in
-the given base directory. The daemon is launched in the background,
-with events logged to a file named `twistd.log'.
-
- buildbot start BASEDIR
-
-stop
-====
-
-This terminates the daemon (either buildmaster or buildslave) running
-in the given directory.
-
- buildbot stop BASEDIR
-
-sighup
-======
-
-This sends a SIGHUP to the buildmaster running in the given directory,
-which causes it to re-read its `master.cfg' file.
-
- buildbot sighup BASEDIR
-
-
-File: buildbot.info, Node: Developer Tools, Next: Other Tools, Prev: Administrator Tools, Up: Command-line tool
-
-8.2 Developer Tools
-===================
-
-These tools are provided for use by the developers who are working on
-the code that the buildbot is monitoring.
-
-* Menu:
-
-* statuslog::
-* statusgui::
-* try::
-
-
-File: buildbot.info, Node: statuslog, Next: statusgui, Prev: Developer Tools, Up: Developer Tools
-
-8.2.1 statuslog
----------------
-
- buildbot statuslog --master MASTERHOST:PORT
-
- This command starts a simple text-based status client, one which
-just prints out a new line each time an event occurs on the
-buildmaster.
-
- The `--master' option provides the location of the
-`client.PBListener' status port, used to deliver build information to
-realtime status clients. The option is always in the form of a
-string, with hostname and port number separated by a colon
-(`HOSTNAME:PORTNUM'). Note that this port is _not_ the same as the
-slaveport (although a future version may allow the same port number
-to be used for both purposes).
-
- The `--master' option can also be provided by the `masterstatus'
-name in `.buildbot/options' (*note .buildbot config directory::).
-
-
-File: buildbot.info, Node: statusgui, Next: try, Prev: statuslog, Up: Developer Tools
-
-8.2.2 statusgui
----------------
-
-If you have set up a PBListener (*note PBListener::), you will be able
-to monitor your Buildbot using a simple Gtk+ application invoked with
-the `buildbot statusgui' command:
-
- buildbot statusgui --master MASTERHOST:PORT
-
- This command starts a simple Gtk+-based status client, which
-contains a few boxes for each Builder that change color as events
-occur. It uses the same `--master' argument as the `buildbot
-statuslog' command (*note statuslog::).
-
-
-File: buildbot.info, Node: try, Prev: statusgui, Up: Developer Tools
-
-8.2.3 try
----------
-
-This lets a developer to ask the question "What would happen if I
-committed this patch right now?". It runs the unit test suite (across
-multiple build platforms) on the developer's current code, allowing
-them to make sure they will not break the tree when they finally
-commit their changes.
-
- The `buildbot try' command is meant to be run from within a
-developer's local tree, and starts by figuring out the base revision
-of that tree (what revision was current the last time the tree was
-updated), and a patch that can be applied to that revision of the tree
-to make it match the developer's copy. This (revision, patch) pair is
-then sent to the buildmaster, which runs a build with that
-SourceStamp. If you want, the tool will emit status messages as the
-builds run, and will not terminate until the first failure has been
-detected (or the last success).
-
- For this command to work, several pieces must be in place:
-
-TryScheduler
-============
-
-The buildmaster must have a `scheduler.Try' instance in the config
-file's `c['schedulers']' list. This lets the administrator control
-who may initiate these "trial" builds, which branches are eligible
-for trial builds, and which Builders should be used for them.
-
- The `TryScheduler' has various means to accept build requests: all
-of them enforce more security than the usual buildmaster ports do.
-Any source code being built can be used to compromise the buildslave
-accounts, but in general that code must be checked out from the VC
-repository first, so only people with commit privileges can get
-control of the buildslaves. The usual force-build control channels can
-waste buildslave time but do not allow arbitrary commands to be
-executed by people who don't have those commit privileges. However,
-the source code patch that is provided with the trial build does not
-have to go through the VC system first, so it is important to make
-sure these builds cannot be abused by a non-committer to acquire as
-much control over the buildslaves as a committer has. Ideally, only
-developers who have commit access to the VC repository would be able
-to start trial builds, but unfortunately the buildmaster does not, in
-general, have access to VC system's user list.
-
- As a result, the `TryScheduler' requires a bit more configuration.
-There are currently two ways to set this up:
-
-*jobdir (ssh)*
- This approach creates a command queue directory, called the
- "jobdir", in the buildmaster's working directory. The buildmaster
- admin sets the ownership and permissions of this directory to
- only grant write access to the desired set of developers, all of
- whom must have accounts on the machine. The `buildbot try'
- command creates a special file containing the source stamp
- information and drops it in the jobdir, just like a standard
- maildir. When the buildmaster notices the new file, it unpacks
- the information inside and starts the builds.
-
- The config file entries used by 'buildbot try' either specify a
- local queuedir (for which write and mv are used) or a remote one
- (using scp and ssh).
-
- The advantage of this scheme is that it is quite secure, the
- disadvantage is that it requires fiddling outside the buildmaster
- config (to set the permissions on the jobdir correctly). If the
- buildmaster machine happens to also house the VC repository,
- then it can be fairly easy to keep the VC userlist in sync with
- the trial-build userlist. If they are on different machines,
- this will be much more of a hassle. It may also involve granting
- developer accounts on a machine that would not otherwise require
- them.
-
- To implement this, the buildslave invokes 'ssh -l username host
- buildbot tryserver ARGS', passing the patch contents over stdin.
- The arguments must include the inlet directory and the revision
- information.
-
-*user+password (PB)*
- In this approach, each developer gets a username/password pair,
- which are all listed in the buildmaster's configuration file.
- When the developer runs `buildbot try', their machine connects
- to the buildmaster via PB and authenticates themselves using
- that username and password, then sends a PB command to start the
- trial build.
-
- The advantage of this scheme is that the entire configuration is
- performed inside the buildmaster's config file. The
- disadvantages are that it is less secure (while the "cred"
- authentication system does not expose the password in plaintext
- over the wire, it does not offer most of the other security
- properties that SSH does). In addition, the buildmaster admin is
- responsible for maintaining the username/password list, adding
- and deleting entries as developers come and go.
-
-
- For example, to set up the "jobdir" style of trial build, using a
-command queue directory of `MASTERDIR/jobdir' (and assuming that all
-your project developers were members of the `developers' unix group),
-you would first create that directory (with `mkdir MASTERDIR/jobdir
-MASTERDIR/jobdir/new MASTERDIR/jobdir/cur MASTERDIR/jobdir/tmp; chgrp
-developers MASTERDIR/jobdir MASTERDIR/jobdir/*; chmod g+rwx,o-rwx
-MASTERDIR/jobdir MASTERDIR/jobdir/*'), and then use the following
-scheduler in the buildmaster's config file:
-
- from buildbot.scheduler import Try_Jobdir
- s = Try_Jobdir("try1", ["full-linux", "full-netbsd", "full-OSX"],
- jobdir="jobdir")
- c['schedulers'] = [s]
-
- Note that you must create the jobdir before telling the
-buildmaster to use this configuration, otherwise you will get an
-error. Also remember that the buildmaster must be able to read and
-write to the jobdir as well. Be sure to watch the `twistd.log' file
-(*note Logfiles::) as you start using the jobdir, to make sure the
-buildmaster is happy with it.
-
- To use the username/password form of authentication, create a
-`Try_Userpass' instance instead. It takes the same `builderNames'
-argument as the `Try_Jobdir' form, but accepts an addtional `port'
-argument (to specify the TCP port to listen on) and a `userpass' list
-of username/password pairs to accept. Remember to use good passwords
-for this: the security of the buildslave accounts depends upon it:
-
- from buildbot.scheduler import Try_Userpass
- s = Try_Userpass("try2", ["full-linux", "full-netbsd", "full-OSX"],
- port=8031, userpass=[("alice","pw1"), ("bob", "pw2")] )
- c['schedulers'] = [s]
-
- Like most places in the buildbot, the `port' argument takes a
-strports specification. See `twisted.application.strports' for
-details.
-
-locating the master
-===================
-
-The `try' command needs to be told how to connect to the
-`TryScheduler', and must know which of the authentication approaches
-described above is in use by the buildmaster. You specify the
-approach by using `--connect=ssh' or `--connect=pb' (or `try_connect
-= 'ssh'' or `try_connect = 'pb'' in `.buildbot/options').
-
- For the PB approach, the command must be given a `--master'
-argument (in the form HOST:PORT) that points to TCP port that you
-picked in the `Try_Userpass' scheduler. It also takes a `--username'
-and `--passwd' pair of arguments that match one of the entries in the
-buildmaster's `userpass' list. These arguments can also be provided
-as `try_master', `try_username', and `try_password' entries in the
-`.buildbot/options' file.
-
- For the SSH approach, the command must be given `--tryhost',
-`--username', and optionally `--password' (TODO: really?) to get to
-the buildmaster host. It must also be given `--trydir', which points
-to the inlet directory configured above. The trydir can be relative
-to the user's home directory, but most of the time you will use an
-explicit path like `~buildbot/project/trydir'. These arguments can be
-provided in `.buildbot/options' as `try_host', `try_username',
-`try_password', and `try_dir'.
-
- In addition, the SSH approach needs to connect to a PBListener
-status port, so it can retrieve and report the results of the build
-(the PB approach uses the existing connection to retrieve status
-information, so this step is not necessary). This requires a
-`--master' argument, or a `masterstatus' entry in `.buildbot/options',
-in the form of a HOSTNAME:PORT string.
-
-choosing the Builders
-=====================
-
-A trial build is performed on multiple Builders at the same time, and
-the developer gets to choose which Builders are used (limited to a set
-selected by the buildmaster admin with the TryScheduler's
-`builderNames=' argument). The set you choose will depend upon what
-your goals are: if you are concerned about cross-platform
-compatibility, you should use multiple Builders, one from each
-platform of interest. You might use just one builder if that platform
-has libraries or other facilities that allow better test coverage than
-what you can accomplish on your own machine, or faster test runs.
-
- The set of Builders to use can be specified with multiple
-`--builder' arguments on the command line. It can also be specified
-with a single `try_builders' option in `.buildbot/options' that uses
-a list of strings to specify all the Builder names:
-
- try_builders = ["full-OSX", "full-win32", "full-linux"]
-
-specifying the VC system
-========================
-
-The `try' command also needs to know how to take the developer's
-current tree and extract the (revision, patch) source-stamp pair.
-Each VC system uses a different process, so you start by telling the
-`try' command which VC system you are using, with an argument like
-`--vc=cvs' or `--vc=tla'. This can also be provided as `try_vc' in
-`.buildbot/options'.
-
- The following names are recognized: `cvs' `svn' `baz' `tla' `hg'
-`darcs'
-
-finding the top of the tree
-===========================
-
-Some VC systems (notably CVS and SVN) track each directory
-more-or-less independently, which means the `try' command needs to
-move up to the top of the project tree before it will be able to
-construct a proper full-tree patch. To accomplish this, the `try'
-command will crawl up through the parent directories until it finds a
-marker file. The default name for this marker file is
-`.buildbot-top', so when you are using CVS or SVN you should `touch
-.buildbot-top' from the top of your tree before running `buildbot
-try'. Alternatively, you can use a filename like `ChangeLog' or
-`README', since many projects put one of these files in their
-top-most directory (and nowhere else). To set this filename, use
-`--try-topfile=ChangeLog', or set it in the options file with
-`try_topfile = 'ChangeLog''.
-
- You can also manually set the top of the tree with
-`--try-topdir=~/trees/mytree', or `try_topdir = '~/trees/mytree''. If
-you use `try_topdir', in a `.buildbot/options' file, you will need a
-separate options file for each tree you use, so it may be more
-convenient to use the `try_topfile' approach instead.
-
- Other VC systems which work on full projects instead of individual
-directories (tla, baz, darcs, monotone, mercurial) do not require
-`try' to know the top directory, so the `--try-topfile' and
-`--try-topdir' arguments will be ignored.
-
- If the `try' command cannot find the top directory, it will abort
-with an error message.
-
-determining the branch name
-===========================
-
-Some VC systems record the branch information in a way that "try" can
-locate it, in particular Arch (both `tla' and `baz'). For the others,
-if you are using something other than the default branch, you will
-have to tell the buildbot which branch your tree is using. You can do
-this with either the `--branch' argument, or a `try_branch' entry in
-the `.buildbot/options' file.
-
-determining the revision and patch
-==================================
-
-Each VC system has a separate approach for determining the tree's base
-revision and computing a patch.
-
-`CVS'
- `try' pretends that the tree is up to date. It converts the
- current time into a `-D' time specification, uses it as the base
- revision, and computes the diff between the upstream tree as of
- that point in time versus the current contents. This works, more
- or less, but requires that the local clock be in reasonably good
- sync with the repository.
-
-`SVN'
- `try' does a `svn status -u' to find the latest repository
- revision number (emitted on the last line in the "Status against
- revision: NN" message). It then performs an `svn diff -rNN' to
- find out how your tree differs from the repository version, and
- sends the resulting patch to the buildmaster. If your tree is not
- up to date, this will result in the "try" tree being created with
- the latest revision, then _backwards_ patches applied to bring it
- "back" to the version you actually checked out (plus your actual
- code changes), but this will still result in the correct tree
- being used for the build.
-
-`baz'
- `try' does a `baz tree-id' to determine the fully-qualified
- version and patch identifier for the tree
- (ARCHIVE/VERSION-patch-NN), and uses the VERSION-patch-NN
- component as the base revision. It then does a `baz diff' to
- obtain the patch.
-
-`tla'
- `try' does a `tla tree-version' to get the fully-qualified
- version identifier (ARCHIVE/VERSION), then takes the first line
- of `tla logs --reverse' to figure out the base revision. Then it
- does `tla changes --diffs' to obtain the patch.
-
-`Darcs'
- `darcs changes --context' emits a text file that contains a list
- of all patches back to and including the last tag was made. This
- text file (plus the location of a repository that contains all
- these patches) is sufficient to re-create the tree. Therefore
- the contents of this "context" file _are_ the revision stamp for
- a Darcs-controlled source tree.
-
- So `try' does a `darcs changes --context' to determine what your
- tree's base revision is, and then does a `darcs diff -u' to
- compute the patch relative to that revision.
-
-`Mercurial'
- `hg identify' emits a short revision ID (basically a truncated
- SHA1 hash of the current revision's contents), which is used as
- the base revision. `hg diff' then provides the patch relative to
- that revision. For `try' to work, your working directory must
- only have patches that are available from the same
- remotely-available repository that the build process'
- `step.Mercurial' will use.
-
-
-waiting for results
-===================
-
-If you provide the `--wait' option (or `try_wait = True' in
-`.buildbot/options'), the `buildbot try' command will wait until your
-changes have either been proven good or bad before exiting. Unless
-you use the `--quiet' option (or `try_quiet=True'), it will emit a
-progress message every 60 seconds until the builds have completed.
-
-
-File: buildbot.info, Node: Other Tools, Next: .buildbot config directory, Prev: Developer Tools, Up: Command-line tool
-
-8.3 Other Tools
-===============
-
-These tools are generally used by buildmaster administrators.
-
-* Menu:
-
-* sendchange::
-* debugclient::
-
-
-File: buildbot.info, Node: sendchange, Next: debugclient, Prev: Other Tools, Up: Other Tools
-
-8.3.1 sendchange
-----------------
-
-This command is used to tell the buildmaster about source changes. It
-is intended to be used from within a commit script, installed on the
-VC server.
-
- buildbot sendchange --master MASTERHOST:PORT --username USER FILENAMES..
-
- There are other (optional) arguments which can influence the
-`Change' that gets submitted:
-
-`--branch'
- This provides the (string) branch specifier. If omitted, it
- defaults to None, indicating the "default branch". All files
- included in this Change must be on the same branch.
-
-`--revision_number'
- This provides a (numeric) revision number for the change, used
- for VC systems that use numeric transaction numbers (like
- Subversion).
-
-`--revision'
- This provides a (string) revision specifier, for VC systems that
- use strings (Arch would use something like patch-42 etc).
-
-`--revision_file'
- This provides a filename which will be opened and the contents
- used as the revision specifier. This is specifically for Darcs,
- which uses the output of `darcs changes --context' as a revision
- specifier. This context file can be a couple of kilobytes long,
- spanning a couple lines per patch, and would be a hassle to pass
- as a command-line argument.
-
-`--comments'
- This provides the change comments as a single argument. You may
- want to use `--logfile' instead.
-
-`--logfile'
- This instructs the tool to read the change comments from the
- given file. If you use `-' as the filename, the tool will read
- the change comments from stdin.
-
-
-File: buildbot.info, Node: debugclient, Prev: sendchange, Up: Other Tools
-
-8.3.2 debugclient
------------------
-
- buildbot debugclient --master MASTERHOST:PORT --passwd DEBUGPW
-
- This launches a small Gtk+/Glade-based debug tool, connecting to
-the buildmaster's "debug port". This debug port shares the same port
-number as the slaveport (*note Setting the slaveport::), but the
-`debugPort' is only enabled if you set a debug password in the
-buildmaster's config file (*note Debug options::). The `--passwd'
-option must match the `c['debugPassword']' value.
-
- `--master' can also be provided in `.debug/options' by the
-`master' key. `--passwd' can be provided by the `debugPassword' key.
-
- The `Connect' button must be pressed before any of the other
-buttons will be active. This establishes the connection to the
-buildmaster. The other sections of the tool are as follows:
-
-`Reload .cfg'
- Forces the buildmaster to reload its `master.cfg' file. This is
- equivalent to sending a SIGHUP to the buildmaster, but can be
- done remotely through the debug port. Note that it is a good
- idea to be watching the buildmaster's `twistd.log' as you reload
- the config file, as any errors which are detected in the config
- file will be announced there.
-
-`Rebuild .py'
- (not yet implemented). The idea here is to use Twisted's
- "rebuild" facilities to replace the buildmaster's running code
- with a new version. Even if this worked, it would only be used
- by buildbot developers.
-
-`poke IRC'
- This locates a `words.IRC' status target and causes it to emit a
- message on all the channels to which it is currently connected.
- This was used to debug a problem in which the buildmaster lost
- the connection to the IRC server and did not attempt to
- reconnect.
-
-`Commit'
- This allows you to inject a Change, just as if a real one had
- been delivered by whatever VC hook you are using. You can set
- the name of the committed file and the name of the user who is
- doing the commit. Optionally, you can also set a revision for
- the change. If the revision you provide looks like a number, it
- will be sent as an integer, otherwise it will be sent as a
- string.
-
-`Force Build'
- This lets you force a Builder (selected by name) to start a
- build of the current source tree.
-
-`Currently'
- (obsolete). This was used to manually set the status of the given
- Builder, but the status-assignment code was changed in an
- incompatible way and these buttons are no longer meaningful.
-
-
-
-File: buildbot.info, Node: .buildbot config directory, Prev: Other Tools, Up: Command-line tool
-
-8.4 .buildbot config directory
-==============================
-
-Many of the `buildbot' tools must be told how to contact the
-buildmaster that they interact with. This specification can be
-provided as a command-line argument, but most of the time it will be
-easier to set them in an "options" file. The `buildbot' command will
-look for a special directory named `.buildbot', starting from the
-current directory (where the command was run) and crawling upwards,
-eventually looking in the user's home directory. It will look for a
-file named `options' in this directory, and will evaluate it as a
-python script, looking for certain names to be set. You can just put
-simple `name = 'value'' pairs in this file to set the options.
-
- For a description of the names used in this file, please see the
-documentation for the individual `buildbot' sub-commands. The
-following is a brief sample of what this file's contents could be.
-
- # for status-reading tools
- masterstatus = 'buildbot.example.org:12345'
- # for 'sendchange' or the debug port
- master = 'buildbot.example.org:18990'
- debugPassword = 'eiv7Po'
-
-`masterstatus'
- Location of the `client.PBListener' status port, used by
- `statuslog' and `statusgui'.
-
-`master'
- Location of the `debugPort' (for `debugclient'). Also the
- location of the `pb.PBChangeSource' (for `sendchange'). Usually
- shares the slaveport, but a future version may make it possible
- to have these listen on a separate port number.
-
-`debugPassword'
- Must match the value of `c['debugPassword']', used to protect the
- debug port, for the `debugclient' command.
-
-`username'
- Provides a default username for the `sendchange' command.
-
-
-
-File: buildbot.info, Node: Resources, Next: Developer's Appendix, Prev: Command-line tool, Up: Top
-
-9 Resources
-***********
-
-The Buildbot's home page is at `http://buildbot.sourceforge.net/'
-
- For configuration questions and general discussion, please use the
-`buildbot-devel' mailing list. The subscription instructions and
-archives are available at
-`http://lists.sourceforge.net/lists/listinfo/buildbot-devel'
-
-
-File: buildbot.info, Node: Developer's Appendix, Next: Index, Prev: Resources, Up: Top
-
-Developer's Appendix
-********************
-
-This appendix contains random notes about the implementation of the
-Buildbot, and is likely to only be of use to people intending to
-extend the Buildbot's internals.
-
- The buildmaster consists of a tree of Service objects, which is
-shaped as follows:
-
- BuildMaster
- ChangeMaster (in .change_svc)
- [IChangeSource instances]
- [IScheduler instances] (in .schedulers)
- BotMaster (in .botmaster)
- [IStatusTarget instances] (in .statusTargets)
-
- The BotMaster has a collection of Builder objects as values of its
-`.builders' dictionary.
-
-
-File: buildbot.info, Node: Index, Prev: Developer's Appendix, Up: Top
-
-Index
-*****
-
-
-* Menu:
-
-* Arch Checkout: Arch. (line 6)
-* Bazaar Checkout: Bazaar. (line 6)
-* build properties: Build Properties. (line 6)
-* Builder: Builder. (line 6)
-* BuildRequest: BuildRequest. (line 6)
-* BuildSet: BuildSet. (line 6)
-* c['bots']: Buildslave Specifiers.
- (line 6)
-* c['buildbotURL']: Defining the Project.
- (line 24)
-* c['builders']: Defining Builders. (line 6)
-* c['debugPassword']: Debug options. (line 6)
-* c['manhole']: Debug options. (line 17)
-* c['projectName']: Defining the Project.
- (line 15)
-* c['projectURL']: Defining the Project.
- (line 19)
-* c['schedulers']: Listing Change Sources and Schedulers.
- (line 14)
-* c['slavePortnum']: Setting the slaveport.
- (line 6)
-* c['sources']: Listing Change Sources and Schedulers.
- (line 6)
-* c['status']: Defining Status Targets.
- (line 11)
-* Configuration: Configuration. (line 6)
-* CVS Checkout: CVS. (line 6)
-* Darcs Checkout: Darcs. (line 6)
-* Dependencies: Build Dependencies. (line 6)
-* Dependent: Build Dependencies. (line 6)
-* installation: Installing the code.
- (line 6)
-* introduction: Introduction. (line 6)
-* IRC: IRC Bot. (line 6)
-* locks: Interlocks. (line 6)
-* logfiles: Logfiles. (line 6)
-* Mercurial Checkout: Mercurial. (line 6)
-* PBListener: PBListener. (line 6)
-* Perforce Update: P4Sync. (line 6)
-* Philosophy of operation: History and Philosophy.
- (line 6)
-* Scheduler: Schedulers. (line 6)
-* statusgui: statusgui. (line 6)
-* SVN Checkout: SVN. (line 6)
-* treeStableTimer: BuildFactory Attributes.
- (line 8)
-* Users: Users. (line 6)
-* Version Control: Version Control Systems.
- (line 6)
-* Waterfall: HTML Waterfall. (line 6)
-* WithProperties: Build Properties. (line 32)
-
-
-
-Tag Table:
-Node: Top332
-Node: Introduction3643
-Node: History and Philosophy5520
-Node: System Architecture8245
-Node: Control Flow9676
-Node: Installation12514
-Node: Requirements12829
-Node: Installing the code15063
-Node: Creating a buildmaster17013
-Node: Creating a buildslave19436
-Node: Buildslave Options24780
-Node: Launching the daemons27700
-Ref: Launching the daemons-Footnote-130567
-Node: Logfiles30742
-Node: Shutdown31281
-Node: Maintenance32214
-Node: Troubleshooting33606
-Node: Starting the buildslave33877
-Node: Connecting to the buildmaster35008
-Node: Forcing Builds36049
-Node: Concepts36799
-Node: Version Control Systems37177
-Ref: Version Control Systems-Footnote-138019
-Node: Generalizing VC Systems38165
-Ref: Generalizing VC Systems-Footnote-141627
-Node: Source Tree Specifications41848
-Ref: Source Tree Specifications-Footnote-144721
-Ref: Source Tree Specifications-Footnote-244915
-Node: How Different VC Systems Specify Sources45045
-Node: Attributes of Changes49140
-Node: Schedulers52829
-Node: BuildSet55219
-Node: BuildRequest57878
-Node: Builder58866
-Node: Users60139
-Node: Doing Things With Users61263
-Node: Email Addresses63628
-Node: IRC Nicknames65684
-Node: Live Status Clients66919
-Node: Configuration67541
-Node: Config File Format68766
-Node: Loading the Config File71141
-Node: Defining the Project71962
-Node: Listing Change Sources and Schedulers73570
-Ref: Listing Change Sources and Schedulers-Footnote-176923
-Node: Scheduler Types77040
-Node: Build Dependencies79180
-Node: Setting the slaveport81410
-Node: Buildslave Specifiers82828
-Node: Defining Builders83795
-Node: Defining Status Targets87354
-Node: Debug options88434
-Node: Getting Source Code Changes90154
-Node: Change Sources91288
-Node: Choosing ChangeSources94897
-Node: CVSToys - PBService96014
-Node: CVSToys - mail notification98774
-Node: Other mail notification ChangeSources100142
-Node: PBChangeSource100663
-Node: Build Process102998
-Node: Build Steps104198
-Node: Common Parameters105557
-Node: Source Checkout107575
-Node: CVS112802
-Node: SVN113944
-Node: Darcs119853
-Node: Mercurial121559
-Node: Arch122473
-Node: Bazaar123269
-Node: P4Sync123795
-Node: ShellCommand124352
-Node: Simple ShellCommand Subclasses126939
-Node: Configure127448
-Node: Compile127866
-Node: Test128299
-Node: Writing New BuildSteps128556
-Node: Build Properties129436
-Ref: Build Properties-Footnote-1134794
-Node: Interlocks135064
-Ref: Interlocks-Footnote-1142429
-Node: Build Factories142739
-Node: BuildStep Objects143716
-Node: BuildFactory144733
-Node: BuildFactory Attributes148246
-Node: Quick builds148908
-Node: Process-Specific build factories149644
-Node: GNUAutoconf150188
-Node: CPAN152767
-Node: Python distutils153528
-Node: Python/Twisted/trial projects154802
-Node: Status Delivery161677
-Node: HTML Waterfall162716
-Ref: HTML Waterfall-Footnote-1166046
-Node: IRC Bot166215
-Node: PBListener168694
-Node: Command-line tool169274
-Node: Administrator Tools169800
-Node: Developer Tools171034
-Node: statuslog171353
-Node: statusgui172231
-Node: try172815
-Node: Other Tools187691
-Node: sendchange187954
-Node: debugclient189635
-Node: .buildbot config directory192211
-Node: Resources194024
-Node: Developer's Appendix194445
-Node: Index195152
-
-End Tag Table
diff --git a/buildbot/buildbot-source/docs/buildbot.texinfo b/buildbot/buildbot-source/docs/buildbot.texinfo
deleted file mode 100644
index 07787d968..000000000
--- a/buildbot/buildbot-source/docs/buildbot.texinfo
+++ /dev/null
@@ -1,4825 +0,0 @@
-\input texinfo @c -*-texinfo-*-
-@c %**start of header
-@setfilename buildbot.info
-@settitle BuildBot Manual 0.7.3
-@c %**end of header
-
-@copying
-This is the BuildBot manual.
-
-Copyright (C) 2005,2006 Brian Warner
-
-Copying and distribution of this file, with or without
-modification, are permitted in any medium without royalty
-provided the copyright notice and this notice are preserved.
-
-@end copying
-
-@titlepage
-@title BuildBot
-@page
-@vskip 0pt plus 1filll
-@insertcopying
-@end titlepage
-
-@c Output the table of the contents at the beginning.
-@contents
-
-@ifnottex
-@node Top, Introduction, (dir), (dir)
-@top BuildBot
-
-@insertcopying
-@end ifnottex
-
-@menu
-* Introduction:: What the BuildBot does.
-* Installation:: Creating a buildmaster and buildslaves,
- running them.
-* Concepts:: What goes on in the buildbot's little mind.
-* Configuration:: Controlling the buildbot.
-* Getting Source Code Changes:: Discovering when to run a build.
-* Build Process:: Controlling how each build is run.
-* Status Delivery:: Telling the world about the build's results.
-* Command-line tool::
-* Resources:: Getting help.
-* Developer's Appendix::
-* Index:: Complete index.
-
-@detailmenu
- --- The Detailed Node Listing ---
-
-Introduction
-
-* History and Philosophy::
-* System Architecture::
-* Control Flow::
-
-Installation
-
-* Requirements::
-* Installing the code::
-* Creating a buildmaster::
-* Creating a buildslave::
-* Launching the daemons::
-* Logfiles::
-* Shutdown::
-* Maintenance::
-* Troubleshooting::
-
-Creating a buildslave
-
-* Buildslave Options::
-
-Troubleshooting
-
-* Starting the buildslave::
-* Connecting to the buildmaster::
-* Forcing Builds::
-
-Concepts
-
-* Version Control Systems::
-* Schedulers::
-* BuildSet::
-* BuildRequest::
-* Builder::
-* Users::
-
-Version Control Systems
-
-* Generalizing VC Systems::
-* Source Tree Specifications::
-* How Different VC Systems Specify Sources::
-* Attributes of Changes::
-
-Users
-
-* Doing Things With Users::
-* Email Addresses::
-* IRC Nicknames::
-* Live Status Clients::
-
-Configuration
-
-* Config File Format::
-* Loading the Config File::
-* Defining the Project::
-* Listing Change Sources and Schedulers::
-* Setting the slaveport::
-* Buildslave Specifiers::
-* Defining Builders::
-* Defining Status Targets::
-* Debug options::
-
-Listing Change Sources and Schedulers
-
-* Scheduler Types::
-* Build Dependencies::
-
-Getting Source Code Changes
-
-* Change Sources::
-
-Change Sources
-
-* Choosing ChangeSources::
-* CVSToys - PBService::
-* CVSToys - mail notification::
-* Other mail notification ChangeSources::
-* PBChangeSource::
-
-Build Process
-
-* Build Steps::
-* Interlocks::
-* Build Factories::
-
-Build Steps
-
-* Common Parameters::
-* Source Checkout::
-* ShellCommand::
-* Simple ShellCommand Subclasses::
-
-Source Checkout
-
-* CVS::
-* SVN::
-* Darcs::
-* Mercurial::
-* Arch::
-* Bazaar::
-* P4Sync::
-
-Simple ShellCommand Subclasses
-
-* Configure::
-* Compile::
-* Test::
-* Writing New BuildSteps::
-* Build Properties::
-
-Build Factories
-
-* BuildStep Objects::
-* BuildFactory::
-* Process-Specific build factories::
-
-BuildFactory
-
-* BuildFactory Attributes::
-* Quick builds::
-
-Process-Specific build factories
-
-* GNUAutoconf::
-* CPAN::
-* Python distutils::
-* Python/Twisted/trial projects::
-
-Status Delivery
-
-* HTML Waterfall::
-* IRC Bot::
-* PBListener::
-
-Command-line tool
-
-* Administrator Tools::
-* Developer Tools::
-* Other Tools::
-* .buildbot config directory::
-
-Developer Tools
-
-* statuslog::
-* statusgui::
-* try::
-
-Other Tools
-
-* sendchange::
-* debugclient::
-
-@end detailmenu
-@end menu
-
-@node Introduction, Installation, Top, Top
-@chapter Introduction
-
-@cindex introduction
-
-The BuildBot is a system to automate the compile/test cycle required by most
-software projects to validate code changes. By automatically rebuilding and
-testing the tree each time something has changed, build problems are
-pinpointed quickly, before other developers are inconvenienced by the
-failure. The guilty developer can be identified and harassed without human
-intervention. By running the builds on a variety of platforms, developers
-who do not have the facilities to test their changes everywhere before
-checkin will at least know shortly afterwards whether they have broken the
-build or not. Warning counts, lint checks, image size, compile time, and
-other build parameters can be tracked over time, are more visible, and
-are therefore easier to improve.
-
-The overall goal is to reduce tree breakage and provide a platform to
-run tests or code-quality checks that are too annoying or pedantic for
-any human to waste their time with. Developers get immediate (and
-potentially public) feedback about their changes, encouraging them to
-be more careful about testing before checkin.
-
-Features:
-
-@itemize @bullet
-@item
-run builds on a variety of slave platforms
-@item
-arbitrary build process: handles projects using C, Python, whatever
-@item
-minimal host requirements: python and Twisted
-@item
-slaves can be behind a firewall if they can still do checkout
-@item
-status delivery through web page, email, IRC, other protocols
-@item
-track builds in progress, provide estimated completion time
-@item
-flexible configuration by subclassing generic build process classes
-@item
-debug tools to force a new build, submit fake Changes, query slave status
-@item
-released under the GPL
-@end itemize
-
-@menu
-* History and Philosophy::
-* System Architecture::
-* Control Flow::
-@end menu
-
-
-@node History and Philosophy, System Architecture, Introduction, Introduction
-@section History and Philosophy
-
-@cindex Philosophy of operation
-
-The Buildbot was inspired by a similar project built for a development
-team writing a cross-platform embedded system. The various components
-of the project were supposed to compile and run on several flavors of
-unix (linux, solaris, BSD), but individual developers had their own
-preferences and tended to stick to a single platform. From time to
-time, incompatibilities would sneak in (some unix platforms want to
-use @code{string.h}, some prefer @code{strings.h}), and then the tree
-would compile for some developers but not others. The buildbot was
-written to automate the human process of walking into the office,
-updating a tree, compiling (and discovering the breakage), finding the
-developer at fault, and complaining to them about the problem they had
-introduced. With multiple platforms it was difficult for developers to
-do the right thing (compile their potential change on all platforms);
-the buildbot offered a way to help.
-
-Another problem was when programmers would change the behavior of a
-library without warning its users, or change internal aspects that
-other code was (unfortunately) depending upon. Adding unit tests to
-the codebase helps here: if an application's unit tests pass despite
-changes in the libraries it uses, you can have more confidence that
-the library changes haven't broken anything. Many developers
-complained that the unit tests were inconvenient or took too long to
-run: having the buildbot run them reduces the developer's workload to
-a minimum.
-
-In general, having more visibility into the project is always good,
-and automation makes it easier for developers to do the right thing.
-When everyone can see the status of the project, developers are
-encouraged to keep the tree in good working order. Unit tests that
-aren't run on a regular basis tend to suffer from bitrot just like
-code does: exercising them on a regular basis helps to keep them
-functioning and useful.
-
-The current version of the Buildbot is additionally targeted at
-distributed free-software projects, where resources and platforms are
-only available when provided by interested volunteers. The buildslaves
-are designed to require an absolute minimum of configuration, reducing
-the effort a potential volunteer needs to expend to be able to
-contribute a new test environment to the project. The goal is for
-anyone who wishes that a given project would run on their favorite
-platform should be able to offer that project a buildslave, running on
-that platform, where they can verify that their portability code
-works, and keeps working.
-
-@node System Architecture, Control Flow, History and Philosophy, Introduction
-@comment node-name, next, previous, up
-@section System Architecture
-
-The Buildbot consists of a single @code{buildmaster} and one or more
-@code{buildslaves}, connected in a star topology. The buildmaster
-makes all decisions about what and when to build. It sends commands to
-be run on the build slaves, which simply execute the commands and
-return the results. (certain steps involve more local decision making,
-where the overhead of sending a lot of commands back and forth would
-be inappropriate, but in general the buildmaster is responsible for
-everything).
-
-The buildmaster is usually fed @code{Changes} by some sort of version
-control system @xref{Change Sources}, which may cause builds to be
-run. As the builds are performed, various status messages are
-produced, which are then sent to any registered Status Targets
-@xref{Status Delivery}.
-
-@ifinfo
-@smallexample
-@group
- TODO: picture of change sources, master, slaves, status targets
- should look like docs/PyCon-2003/sources/overview.svg
-@end group
-@end smallexample
-@end ifinfo
-@ifnotinfo
-@c @image{images/overview}
-@end ifnotinfo
-
-The buildmaster is configured and maintained by the ``buildmaster
-admin'', who is generally the project team member responsible for
-build process issues. Each buildslave is maintained by a ``buildslave
-admin'', who do not need to be quite as involved. Generally slaves are
-run by anyone who has an interest in seeing the project work well on
-their platform.
-
-
-@node Control Flow, , System Architecture, Introduction
-@comment node-name, next, previous, up
-@section Control Flow
-
-A day in the life of the buildbot:
-
-@itemize @bullet
-
-@item
-A developer commits some source code changes to the repository. A hook
-script or commit trigger of some sort sends information about this
-change to the buildmaster through one of its configured Change
-Sources. This notification might arrive via email, or over a network
-connection (either initiated by the buildmaster as it ``subscribes''
-to changes, or by the commit trigger as it pushes Changes towards the
-buildmaster). The Change contains information about who made the
-change, what files were modified, which revision contains the change,
-and any checkin comments.
-
-@item
-The buildmaster distributes this change to all of its configured
-Schedulers. Any ``important'' changes cause the ``tree-stable-timer''
-to be started, and the Change is added to a list of those that will go
-into a new Build. When the timer expires, a Build is started on each
-of a set of configured Builders, all compiling/testing the same source
-code. Unless configured otherwise, all Builds run in parallel on the
-various buildslaves.
-
-@item
-The Build consists of a series of Steps. Each Step causes some number
-of commands to be invoked on the remote buildslave associated with
-that Builder. The first step is almost always to perform a checkout of
-the appropriate revision from the same VC system that produced the
-Change. The rest generally perform a compile and run unit tests. As
-each Step runs, the buildslave reports back command output and return
-status to the buildmaster.
-
-@item
-As the Build runs, status messages like ``Build Started'', ``Step
-Started'', ``Build Finished'', etc, are published to a collection of
-Status Targets. One of these targets is usually the HTML ``Waterfall''
-display, which shows a chronological list of events, and summarizes
-the results of the most recent build at the top of each column.
-Developers can periodically check this page to see how their changes
-have fared. If they see red, they know that they've made a mistake and
-need to fix it. If they see green, they know that they've done their
-duty and don't need to worry about their change breaking anything.
-
-@item
-If a MailNotifier status target is active, the completion of a build
-will cause email to be sent to any developers whose Changes were
-incorporated into this Build. The MailNotifier can be configured to
-only send mail upon failing builds, or for builds which have just
-transitioned from passing to failing. Other status targets can provide
-similar real-time notification via different communication channels,
-like IRC.
-
-@end itemize
-
-
-@node Installation, Concepts, Introduction, Top
-@chapter Installation
-
-@menu
-* Requirements::
-* Installing the code::
-* Creating a buildmaster::
-* Creating a buildslave::
-* Launching the daemons::
-* Logfiles::
-* Shutdown::
-* Maintenance::
-* Troubleshooting::
-@end menu
-
-@node Requirements, Installing the code, Installation, Installation
-@section Requirements
-
-At a bare minimum, you'll need the following (for both the buildmaster
-and a buildslave):
-
-@itemize @bullet
-@item
-Python: http://www.python.org
-
-Buildbot requires python-2.2 or later, and is primarily developed
-against python-2.3. The buildmaster uses generators, a feature which
-is not available in python-2.1, and both master and slave require a
-version of Twisted which only works with python-2.2 or later. Certain
-features (like the inclusion of build logs in status emails) require
-python-2.2.2 or later. The IRC ``force build'' command requires
-python-2.3 (for the shlex.split function).
-
-@item
-Twisted: http://twistedmatrix.com
-
-Both the buildmaster and the buildslaves require Twisted-1.3.0 or
-later. It has been mainly developed against Twisted-2.0.1, but has
-been tested against Twisted-2.1.0 (the most recent as of this
-writing), and might even work on versions as old as Twisted-1.1.0, but
-as always the most recent version is recommended.
-
-Twisted-1.3.0 and earlier were released as a single monolithic
-package. When you run Buildbot against Twisted-2.0.0 or later (which
-are split into a number of smaller subpackages), you'll need at least
-"Twisted" (the core package), and you'll also want TwistedMail,
-TwistedWeb, and TwistedWords (for sending email, serving a web status
-page, and delivering build status via IRC, respectively).
-@end itemize
-
-Certain other packages may be useful on the system running the
-buildmaster:
-
-@itemize @bullet
-@item
-CVSToys: http://purl.net/net/CVSToys
-
-If your buildmaster uses FreshCVSSource to receive change notification
-from a cvstoys daemon, it will require CVSToys be installed (tested
-with CVSToys-1.0.10). If the it doesn't use that source (i.e. if you
-only use a mail-parsing change source, or the SVN notification
-script), you will not need CVSToys.
-
-@end itemize
-
-And of course, your project's build process will impose additional
-requirements on the buildslaves. These hosts must have all the tools
-necessary to compile and test your project's source code.
-
-
-@node Installing the code, Creating a buildmaster, Requirements, Installation
-@section Installing the code
-
-@cindex installation
-
-The Buildbot is installed using the standard python @code{distutils}
-module. After unpacking the tarball, the process is:
-
-@example
-python setup.py build
-python setup.py install
-@end example
-
-where the install step may need to be done as root. This will put the
-bulk of the code in somewhere like
-/usr/lib/python2.3/site-packages/buildbot . It will also install the
-@code{buildbot} command-line tool in /usr/bin/buildbot.
-
-To test this, shift to a different directory (like /tmp), and run:
-
-@example
-buildbot --version
-@end example
-
-If it shows you the versions of Buildbot and Twisted, the install went
-ok. If it says @code{no such command} or it gets an @code{ImportError}
-when it tries to load the libaries, then something went wrong.
-@code{pydoc buildbot} is another useful diagnostic tool.
-
-Windows users will find these files in other places. You will need to
-make sure that python can find the libraries, and will probably find
-it convenient to have @code{buildbot} on your PATH.
-
-If you wish, you can run the buildbot unit test suite like this:
-
-@example
-PYTHONPATH=. trial buildbot.test
-@end example
-
-This should run up to 192 tests, depending upon what VC tools you have
-installed. On my desktop machine it takes about five minutes to
-complete. Nothing should fail, a few might be skipped. If any of the
-tests fail, you should stop and investigate the cause before
-continuing the installation process, as it will probably be easier to
-track down the bug early.
-
-If you cannot or do not wish to install the buildbot into a site-wide
-location like @file{/usr} or @file{/usr/local}, you can also install
-it into the account's home directory. Do the install command like
-this:
-
-@example
-python setup.py install --home=~
-@end example
-
-That will populate @file{~/lib/python} and create
-@file{~/bin/buildbot}. Make sure this lib directory is on your
-@code{PYTHONPATH}.
-
-
-@node Creating a buildmaster, Creating a buildslave, Installing the code, Installation
-@section Creating a buildmaster
-
-As you learned earlier (@pxref{System Architecture}), the buildmaster
-runs on a central host (usually one that is publically visible, so
-everybody can check on the status of the project), and controls all
-aspects of the buildbot system. Let us call this host
-@code{buildbot.example.org}.
-
-You may wish to create a separate user account for the buildmaster,
-perhaps named @code{buildmaster}. This can help keep your personal
-configuration distinct from that of the buildmaster and is useful if
-you have to use a mail-based notification system (@pxref{Change
-Sources}). However, the Buildbot will work just fine with your regular
-user account.
-
-You need to choose a directory for the buildmaster, called the
-@code{basedir}. This directory will be owned by the buildmaster, which
-will use configuration files therein, and create status files as it
-runs. @file{~/Buildbot} is a likely value. If you run multiple
-buildmasters in the same account, or if you run both masters and
-slaves, you may want a more distinctive name like
-@file{~/Buildbot/master/gnomovision} or
-@file{~/Buildmasters/fooproject}. If you are using a separate user
-account, this might just be @file{~buildmaster/masters/fooprojects}.
-
-Once you've picked a directory, use the @command{buildbot master}
-command to create the directory and populate it with startup files:
-
-@example
-buildbot master @var{basedir}
-@end example
-
-You will need to create a configuration file (@pxref{Configuration})
-before starting the buildmaster. Most of the rest of this manual is
-dedicated to explaining how to do this. A sample configuration file is
-placed in the working directory, named @file{master.cfg.sample}, which
-can be copied to @file{master.cfg} and edited to suit your purposes.
-
-(Internal details: This command creates a file named
-@file{buildbot.tac} that contains all the state necessary to create
-the buildmaster. Twisted has a tool called @code{twistd} which can use
-this .tac file to create and launch a buildmaster instance. twistd
-takes care of logging and daemonization (running the program in the
-background). @file{/usr/bin/buildbot} is a front end which runs twistd
-for you.)
-
-In addition to @file{buildbot.tac}, a small @file{Makefile.sample} is
-installed. This can be used as the basis for customized daemon startup,
-@xref{Launching the daemons}.
-
-
-@node Creating a buildslave, Launching the daemons, Creating a buildmaster, Installation
-@section Creating a buildslave
-
-Typically, you will be adding a buildslave to an existing buildmaster,
-to provide additional architecture coverage. The buildbot
-administrator will give you several pieces of information necessary to
-connect to the buildmaster. You should also be somewhat familiar with
-the project being tested, so you can troubleshoot build problems
-locally.
-
-The buildbot exists to make sure that the project's stated ``how to
-build it'' process actually works. To this end, the buildslave should
-run in an environment just like that of your regular developers.
-Typically the project build process is documented somewhere
-(@file{README}, @file{INSTALL}, etc), in a document that should
-mention all library dependencies and contain a basic set of build
-instructions. This document will be useful as you configure the host
-and account in which the buildslave runs.
-
-Here's a good checklist for setting up a buildslave:
-
-@enumerate
-@item
-Set up the account
-
-It is recommended (although not mandatory) to set up a separate user
-account for the buildslave. This account is frequently named
-@code{buildbot} or @code{buildslave}. This serves to isolate your
-personal working environment from that of the slave's, and helps to
-minimize the security threat posed by letting possibly-unknown
-contributors run arbitrary code on your system. The account should
-have a minimum of fancy init scripts.
-
-@item
-Install the buildbot code
-
-Follow the instructions given earlier (@pxref{Installing the code}).
-If you use a separate buildslave account, and you didn't install the
-buildbot code to a shared location, then you will need to install it
-with @code{--home=~} for each account that needs it.
-
-@item
-Set up the host
-
-Make sure the host can actually reach the buildmaster. Usually the
-buildmaster is running a status webserver on the same machine, so
-simply point your web browser at it and see if you can get there.
-Install whatever additional packages or libraries the project's
-INSTALL document advises. (or not: if your buildslave is supposed to
-make sure that building without optional libraries still works, then
-don't install those libraries).
-
-Again, these libraries don't necessarily have to be installed to a
-site-wide shared location, but they must be available to your build
-process. Accomplishing this is usually very specific to the build
-process, so installing them to @file{/usr} or @file{/usr/local} is
-usually the best approach.
-
-@item
-Test the build process
-
-Follow the instructions in the INSTALL document, in the buildslave's
-account. Perform a full CVS (or whatever) checkout, configure, make,
-run tests, etc. Confirm that the build works without manual fussing.
-If it doesn't work when you do it by hand, it will be unlikely to work
-when the buildbot attempts to do it in an automated fashion.
-
-@item
-Choose a base directory
-
-This should be somewhere in the buildslave's account, typically named
-after the project which is being tested. The buildslave will not touch
-any file outside of this directory. Something like @file{~/Buildbot}
-or @file{~/Buildslaves/fooproject} is appropriate.
-
-@item
-Get the buildmaster host/port, botname, and password
-
-When the buildbot admin configures the buildmaster to accept and use
-your buildslave, they will provide you with the following pieces of
-information:
-
-@itemize @bullet
-@item
-your buildslave's name
-@item
-the password assigned to your buildslave
-@item
-the hostname and port number of the buildmaster, i.e. buildbot.example.org:8007
-@end itemize
-
-@item
-Create the buildslave
-
-Now run the 'buildbot' command as follows:
-
-@example
-buildbot slave @var{BASEDIR} @var{MASTERHOST}:@var{PORT} @var{SLAVENAME} @var{PASSWORD}
-@end example
-
-This will create the base directory and a collection of files inside,
-including the @file{buildbot.tac} file that contains all the
-information you passed to the @code{buildbot} command.
-
-@item
-Fill in the hostinfo files
-
-When it first connects, the buildslave will send a few files up to the
-buildmaster which describe the host that it is running on. These files
-are presented on the web status display so that developers have more
-information to reproduce any test failures that are witnessed by the
-buildbot. There are sample files in the @file{info} subdirectory of
-the buildbot's base directory. You should edit these to correctly
-describe you and your host.
-
-@file{BASEDIR/info/admin} should contain your name and email address.
-This is the ``buildslave admin address'', and will be visible from the
-build status page (so you may wish to munge it a bit if
-address-harvesting spambots are a concern).
-
-@file{BASEDIR/info/host} should be filled with a brief description of
-the host: OS, version, memory size, CPU speed, versions of relevant
-libraries installed, and finally the version of the buildbot code
-which is running the buildslave.
-
-If you run many buildslaves, you may want to create a single
-@file{~buildslave/info} file and share it among all the buildslaves
-with symlinks.
-
-@end enumerate
-
-@menu
-* Buildslave Options::
-@end menu
-
-@node Buildslave Options, , Creating a buildslave, Creating a buildslave
-@subsection Buildslave Options
-
-There are a handful of options you might want to use when creating the
-buildslave with the @command{buildbot slave <options> DIR <params>}
-command. You can type @command{buildbot slave --help} for a summary.
-To use these, just include them on the @command{buildbot slave}
-command line, like this:
-
-@example
-buildbot slave --umask=022 ~/buildslave buildmaster.example.org:42012 myslavename mypasswd
-@end example
-
-@table @code
-@item --usepty
-This is a boolean flag that tells the buildslave whether to launch
-child processes in a PTY (the default) or with regular pipes. The
-advantage of using a PTY is that ``grandchild'' processes are more
-likely to be cleaned up if the build is interrupted or times out
-(since it enables the use of a ``process group'' in which all child
-processes will be placed). The disadvantages: some forms of Unix have
-problems with PTYs, some of your unit tests may behave differently
-when run under a PTY (generally those which check to see if they are
-being run interactively), and PTYs will merge the stdout and stderr
-streams into a single output stream (which means the red-vs-black
-coloring in the logfiles will be lost). If you encounter problems, you
-can add @code{--usepty=0} to disable the use of PTYs. Note that
-windows buildslaves never use PTYs.
-
-@item --umask
-This is a string (generally an octal representation of an integer)
-which will cause the buildslave process' ``umask'' value to be set
-shortly after initialization. The ``twistd'' daemonization utility
-forces the umask to 077 at startup (which means that all files created
-by the buildslave or its child processes will be unreadable by any
-user other than the buildslave account). If you want build products to
-be readable by other accounts, you can add @code{--umask=022} to tell
-the buildslave to fix the umask after twistd clobbers it. If you want
-build products to be @emph{writable} by other accounts too, use
-@code{--umask=000}, but this is likely to be a security problem.
-
-@item --keepalive
-This is a number that indicates how frequently ``keepalive'' messages
-should be sent from the buildslave to the buildmaster, expressed in
-seconds. The default (600) causes a message to be sent to the
-buildmaster at least once every 10 minutes. To set this to a lower
-value, use e.g. @code{--keepalive=120}.
-
-If the buildslave is behind a NAT box or stateful firewall, these
-messages may help to keep the connection alive: some NAT boxes tend to
-forget about a connection if it has not been used in a while. When
-this happens, the buildmaster will think that the buildslave has
-disappeared, and builds will time out. Meanwhile the buildslave will
-not realize than anything is wrong.
-
-@end table
-
-
-@node Launching the daemons, Logfiles, Creating a buildslave, Installation
-@section Launching the daemons
-
-Both the buildmaster and the buildslave run as daemon programs. To
-launch them, pass the working directory to the @code{buildbot}
-command:
-
-@example
-buildbot start @var{BASEDIR}
-@end example
-
-This command will start the daemon and then return, so normally it
-will not produce any output. To verify that the programs are indeed
-running, look for a pair of files named @file{twistd.log} and
-@file{twistd.pid} that should be created in the working directory.
-@file{twistd.pid} contains the process ID of the newly-spawned daemon.
-
-When the buildslave connects to the buildmaster, new directories will
-start appearing in its base directory. The buildmaster tells the slave
-to create a directory for each Builder which will be using that slave.
-All build operations are performed within these directories: CVS
-checkouts, compiles, and tests.
-
-Once you get everything running, you will want to arrange for the
-buildbot daemons to be started at boot time. One way is to use
-@code{cron}, by putting them in a @@reboot crontab entry@footnote{this
-@@reboot syntax is understood by Vixie cron, which is the flavor
-usually provided with linux systems. Other unices may have a cron that
-doesn't understand @@reboot}:
-
-@example
-@@reboot buildbot start @var{BASEDIR}
-@end example
-
-When you run @command{crontab} to set this up, remember to do it as
-the buildmaster or buildslave account! If you add this to your crontab
-when running as your regular account (or worse yet, root), then the
-daemon will run as the wrong user, quite possibly as one with more
-authority than you intended to provide.
-
-It is important to remember that the environment provided to cron jobs
-and init scripts can be quite different that your normal runtime.
-There may be fewer environment variables specified, and the PATH may
-be shorter than usual. It is a good idea to test out this method of
-launching the buildslave by using a cron job with a time in the near
-future, with the same command, and then check @file{twistd.log} to
-make sure the slave actually started correctly. Common problems here
-are for @file{/usr/local} or @file{~/bin} to not be on your
-@code{PATH}, or for @code{PYTHONPATH} to not be set correctly.
-Sometimes @code{HOME} is messed up too.
-
-To modify the way the daemons are started (perhaps you want to set
-some environment variables first, or perform some cleanup each time),
-you can create a file named @file{Makefile.buildbot} in the base
-directory. When the @file{buildbot} front-end tool is told to
-@command{start} the daemon, and it sees this file (and
-@file{/usr/bin/make} exists), it will do @command{make -f
-Makefile.buildbot start} instead of its usual action (which involves
-running @command{twistd}). When the buildmaster or buildslave is
-installed, a @file{Makefile.sample} is created which implements the
-same behavior as the the @file{buildbot} tool uses, so if you want to
-customize the process, just copy @file{Makefile.sample} to
-@file{Makefile.buildbot} and edit it as necessary.
-
-@node Logfiles, Shutdown, Launching the daemons, Installation
-@section Logfiles
-
-@cindex logfiles
-
-While a buildbot daemon runs, it emits text to a logfile, named
-@file{twistd.log}. A command like @code{tail -f twistd.log} is useful
-to watch the command output as it runs.
-
-The buildmaster will announce any errors with its configuration file
-in the logfile, so it is a good idea to look at the log at startup
-time to check for any problems. Most buildmaster activities will cause
-lines to be added to the log.
-
-@node Shutdown, Maintenance, Logfiles, Installation
-@section Shutdown
-
-To stop a buildmaster or buildslave manually, use:
-
-@example
-buildbot stop @var{BASEDIR}
-@end example
-
-This simply looks for the @file{twistd.pid} file and kills whatever
-process is identified within.
-
-At system shutdown, all processes are sent a @code{SIGKILL}. The
-buildmaster and buildslave will respond to this by shutting down
-normally.
-
-The buildmaster will respond to a @code{SIGHUP} by re-reading its
-config file. The following shortcut is available:
-
-@example
-buildbot sighup @var{BASEDIR}
-@end example
-
-When you update the Buildbot code to a new release, you will need to
-restart the buildmaster and/or buildslave before it can take advantage
-of the new code. You can do a @code{buildbot stop @var{BASEDIR}} and
-@code{buildbot start @var{BASEDIR}} in quick succession, or you can
-use the @code{restart} shortcut, which does both steps for you:
-
-@example
-buildbot restart @var{BASEDIR}
-@end example
-
-
-@node Maintenance, Troubleshooting, Shutdown, Installation
-@section Maintenance
-
-It is a good idea to check the buildmaster's status page every once in
-a while, to see if your buildslave is still online. Eventually the
-buildbot will probably be enhanced to send you email (via the
-@file{info/admin} email address) when the slave has been offline for
-more than a few hours.
-
-If you find you can no longer provide a buildslave to the project, please
-let the project admins know, so they can put out a call for a
-replacement.
-
-The Buildbot records status and logs output continually, each time a
-build is performed. The status tends to be small, but the build logs
-can become quite large. Each build and log are recorded in a separate
-file, arranged hierarchically under the buildmaster's base directory.
-To prevent these files from growing without bound, you should
-periodically delete old build logs. A simple cron job to delete
-anything older than, say, two weeks should do the job. The only trick
-is to leave the @file{buildbot.tac} and other support files alone, for
-which find's @code{-mindepth} argument helps skip everything in the
-top directory. You can use something like the following:
-
-@example
-@@weekly cd BASEDIR && find . -mindepth 2 -type f -mtime +14 -exec rm @{@} \;
-@@weekly cd BASEDIR && find twistd.log* -mtime +14 -exec rm @{@} \;
-@end example
-
-@node Troubleshooting, , Maintenance, Installation
-@section Troubleshooting
-
-Here are a few hints on diagnosing common problems.
-
-@menu
-* Starting the buildslave::
-* Connecting to the buildmaster::
-* Forcing Builds::
-@end menu
-
-@node Starting the buildslave, Connecting to the buildmaster, Troubleshooting, Troubleshooting
-@subsection Starting the buildslave
-
-Cron jobs are typically run with a minimal shell (@file{/bin/sh}, not
-@file{/bin/bash}), and tilde expansion is not always performed in such
-commands. You may want to use explicit paths, because the @code{PATH}
-is usually quite short and doesn't include anything set by your
-shell's startup scripts (@file{.profile}, @file{.bashrc}, etc). If
-you've installed buildbot (or other python libraries) to an unusual
-location, you may need to add a @code{PYTHONPATH} specification (note
-that python will do tilde-expansion on @code{PYTHONPATH} elements by
-itself). Sometimes it is safer to fully-specify everything:
-
-@example
-@@reboot PYTHONPATH=~/lib/python /usr/local/bin/buildbot start /usr/home/buildbot/basedir
-@end example
-
-Take the time to get the @@reboot job set up. Otherwise, things will work
-fine for a while, but the first power outage or system reboot you have will
-stop the buildslave with nothing but the cries of sorrowful developers to
-remind you that it has gone away.
-
-@node Connecting to the buildmaster, Forcing Builds, Starting the buildslave, Troubleshooting
-@subsection Connecting to the buildmaster
-
-If the buildslave cannot connect to the buildmaster, the reason should
-be described in the @file{twistd.log} logfile. Some common problems
-are an incorrect master hostname or port number, or a mistyped bot
-name or password. If the buildslave loses the connection to the
-master, it is supposed to attempt to reconnect with an
-exponentially-increasing backoff. Each attempt (and the time of the
-next attempt) will be logged. If you get impatient, just manually stop
-and re-start the buildslave.
-
-When the buildmaster is restarted, all slaves will be disconnected,
-and will attempt to reconnect as usual. The reconnect time will depend
-upon how long the buildmaster is offline (i.e. how far up the
-exponential backoff curve the slaves have travelled). Again,
-@code{buildbot stop @var{BASEDIR}; buildbot start @var{BASEDIR}} will
-speed up the process.
-
-@node Forcing Builds, , Connecting to the buildmaster, Troubleshooting
-@subsection Forcing Builds
-
-From the buildmaster's main status web page, you can force a build to
-be run on your build slave. Figure out which column is for a builder
-that runs on your slave, click on that builder's name, and the page
-that comes up will have a ``Force Build'' button. Fill in the form,
-hit the button, and a moment later you should see your slave's
-@file{twistd.log} filling with commands being run. Using @code{pstree}
-or @code{top} should also reveal the cvs/make/gcc/etc processes being
-run by the buildslave. Note that the same web page should also show
-the @file{admin} and @file{host} information files that you configured
-earlier.
-
-@node Concepts, Configuration, Installation, Top
-@chapter Concepts
-
-This chapter defines some of the basic concepts that the Buildbot
-uses. You'll need to understand how the Buildbot sees the world to
-configure it properly.
-
-@menu
-* Version Control Systems::
-* Schedulers::
-* BuildSet::
-* BuildRequest::
-* Builder::
-* Users::
-@end menu
-
-@node Version Control Systems, Schedulers, Concepts, Concepts
-@section Version Control Systems
-
-@cindex Version Control
-
-These source trees come from a Version Control System of some kind.
-CVS and Subversion are two popular ones, but the Buildbot supports
-others. All VC systems have some notion of an upstream
-@code{repository} which acts as a server@footnote{except Darcs, but
-since the Buildbot never modifies its local source tree we can ignore
-the fact that Darcs uses a less centralized model}, from which clients
-can obtain source trees according to various parameters. The VC
-repository provides source trees of various projects, for different
-branches, and from various points in time. The first thing we have to
-do is to specify which source tree we want to get.
-
-@menu
-* Generalizing VC Systems::
-* Source Tree Specifications::
-* How Different VC Systems Specify Sources::
-* Attributes of Changes::
-@end menu
-
-@node Generalizing VC Systems, Source Tree Specifications, Version Control Systems, Version Control Systems
-@subsection Generalizing VC Systems
-
-For the purposes of the Buildbot, we will try to generalize all VC
-systems as having repositories that each provide sources for a variety
-of projects. Each project is defined as a directory tree with source
-files. The individual files may each have revisions, but we ignore
-that and treat the project as a whole as having a set of revisions.
-Each time someone commits a change to the project, a new revision
-becomes available. These revisions can be described by a tuple with
-two items: the first is a branch tag, and the second is some kind of
-timestamp or revision stamp. Complex projects may have multiple branch
-tags, but there is always a default branch. The timestamp may be an
-actual timestamp (such as the -D option to CVS), or it may be a
-monotonically-increasing transaction number (such as the change number
-used by SVN and P4, or the revision number used by Arch, or a labeled
-tag used in CVS)@footnote{many VC systems provide more complexity than
-this: in particular the local views that P4 and ClearCase can assemble
-out of various source directories are more complex than we're prepared
-to take advantage of here}. The SHA1 revision ID used by Monotone and
-Mercurial is also a kind of revision stamp, in that it specifies a
-unique copy of the source tree, as does a Darcs ``context'' file.
-
-When we aren't intending to make any changes to the sources we check out
-(at least not any that need to be committed back upstream), there are two
-basic ways to use a VC system:
-
-@itemize @bullet
-@item
-Retrieve a specific set of source revisions: some tag or key is used
-to index this set, which is fixed and cannot be changed by subsequent
-developers committing new changes to the tree. Releases are built from
-tagged revisions like this, so that they can be rebuilt again later
-(probably with controlled modifications).
-@item
-Retrieve the latest sources along a specific branch: some tag is used
-to indicate which branch is to be used, but within that constraint we want
-to get the latest revisions.
-@end itemize
-
-Build personnel or CM staff typically use the first approach: the
-build that results is (ideally) completely specified by the two
-parameters given to the VC system: repository and revision tag. This
-gives QA and end-users something concrete to point at when reporting
-bugs. Release engineers are also reportedly fond of shipping code that
-can be traced back to a concise revision tag of some sort.
-
-Developers are more likely to use the second approach: each morning
-the developer does an update to pull in the changes committed by the
-team over the last day. These builds are not easy to fully specify: it
-depends upon exactly when you did a checkout, and upon what local
-changes the developer has in their tree. Developers do not normally
-tag each build they produce, because there is usually significant
-overhead involved in creating these tags. Recreating the trees used by
-one of these builds can be a challenge. Some VC systems may provide
-implicit tags (like a revision number), while others may allow the use
-of timestamps to mean ``the state of the tree at time X'' as opposed
-to a tree-state that has been explicitly marked.
-
-The Buildbot is designed to help developers, so it usually works in
-terms of @emph{the latest} sources as opposed to specific tagged
-revisions. However, it would really prefer to build from reproducible
-source trees, so implicit revisions are used whenever possible.
-
-@node Source Tree Specifications, How Different VC Systems Specify Sources, Generalizing VC Systems, Version Control Systems
-@subsection Source Tree Specifications
-
-So for the Buildbot's purposes we treat each VC system as a server
-which can take a list of specifications as input and produce a source
-tree as output. Some of these specifications are static: they are
-attributes of the builder and do not change over time. Others are more
-variable: each build will have a different value. The repository is
-changed over time by a sequence of Changes, each of which represents a
-single developer making changes to some set of files. These Changes
-are cumulative@footnote{Monotone's @emph{multiple heads} feature
-violates this assumption of cumulative Changes, but in most situations
-the changes don't occur frequently enough for this to be a significant
-problem}.
-
-For normal builds, the Buildbot wants to get well-defined source trees
-that contain specific Changes, and exclude other Changes that may have
-occurred after the desired ones. We assume that the Changes arrive at
-the buildbot (through one of the mechanisms described in @pxref{Change
-Sources}) in the same order in which they are committed to the
-repository. The Buildbot waits for the tree to become ``stable''
-before initiating a build, for two reasons. The first is that
-developers frequently make multiple related commits in quick
-succession, even when the VC system provides ways to make atomic
-transactions involving multiple files at the same time. Running a
-build in the middle of these sets of changes would use an inconsistent
-set of source files, and is likely to fail (and is certain to be less
-useful than a build which uses the full set of changes). The
-tree-stable-timer is intended to avoid these useless builds that
-include some of the developer's changes but not all. The second reason
-is that some VC systems (i.e. CVS) do not provide repository-wide
-transaction numbers, so that timestamps are the only way to refer to
-a specific repository state. These timestamps may be somewhat
-ambiguous, due to processing and notification delays. By waiting until
-the tree has been stable for, say, 10 minutes, we can choose a
-timestamp from the middle of that period to use for our source
-checkout, and then be reasonably sure that any clock-skew errors will
-not cause the build to be performed on an inconsistent set of source
-files.
-
-The Schedulers always use the tree-stable-timer, with a timeout that
-is configured to reflect a reasonable tradeoff between build latency
-and change frequency. When the VC system provides coherent
-repository-wide revision markers (such as Subversion's revision
-numbers, or in fact anything other than CVS's timestamps), the
-resulting Build is simply performed against a source tree defined by
-that revision marker. When the VC system does not provide this, a
-timestamp from the middle of the tree-stable period is used to
-generate the source tree@footnote{this @code{checkoutDelay} defaults
-to half the tree-stable timer, but it can be overridden with an
-argument to the Source Step}.
-
-@node How Different VC Systems Specify Sources, Attributes of Changes, Source Tree Specifications, Version Control Systems
-@subsection How Different VC Systems Specify Sources
-
-For CVS, the static specifications are @code{repository} and
-@code{module}. In addition to those, each build uses a timestamp (or
-omits the timestamp to mean @code{the latest}) and @code{branch tag}
-(which defaults to HEAD). These parameters collectively specify a set
-of sources from which a build may be performed.
-
-@uref{http://subversion.tigris.org, Subversion} combines the
-repository, module, and branch into a single @code{Subversion URL}
-parameter. Within that scope, source checkouts can be specified by a
-numeric @code{revision number} (a repository-wide
-monotonically-increasing marker, such that each transaction that
-changes the repository is indexed by a different revision number), or
-a revision timestamp. When branches are used, the repository and
-module form a static @code{baseURL}, while each build has a
-@code{revision number} and a @code{branch} (which defaults to a
-statically-specified @code{defaultBranch}). The @code{baseURL} and
-@code{branch} are simply concatenated together to derive the
-@code{svnurl} to use for the checkout.
-
-@uref{http://wiki.gnuarch.org/, Arch} and
-@uref{http://bazaar.canonical.com/, Bazaar} specify a repository by
-URL, as well as a @code{version} which is kind of like a branch name.
-Arch uses the word @code{archive} to represent the repository. Arch
-lets you push changes from one archive to another, removing the strict
-centralization required by CVS and SVN. It retains the distinction
-between repository and working directory that most other VC systems
-use. For complex multi-module directory structures, Arch has a
-built-in @code{build config} layer with which the checkout process has
-two steps. First, an initial bootstrap checkout is performed to
-retrieve a set of build-config files. Second, one of these files is
-used to figure out which archives/modules should be used to populate
-subdirectories of the initial checkout.
-
-Builders which use Arch and Bazaar therefore have a static archive
-@code{url}, and a default ``branch'' (which is a string that specifies
-a complete category--branch--version triple). Each build can have its
-own branch (the category--branch--version string) to override the
-default, as well as a revision number (which is turned into a
---patch-NN suffix when performing the checkout).
-
-@uref{http://abridgegame.org/darcs/, Darcs} doesn't really have the
-notion of a single master repository. Nor does it really have
-branches. In Darcs, each working directory is also a repository, and
-there are operations to push and pull patches from one of these
-@code{repositories} to another. For the Buildbot's purposes, all you
-need to do is specify the URL of a repository that you want to build
-from. The build slave will then pull the latest patches from that
-repository and build them. Multiple branches are implemented by using
-multiple repositories (possibly living on the same server).
-
-Builders which use Darcs therefore have a static @code{repourl} which
-specifies the location of the repository. If branches are being used,
-the source Step is instead configured with a @code{baseURL} and a
-@code{defaultBranch}, and the two strings are simply concatenated
-together to obtain the repository's URL. Each build then has a
-specific branch which replaces @code{defaultBranch}, or just uses the
-default one. Instead of a revision number, each build can have a
-``context'', which is a string that records all the patches that are
-present in a given tree (this is the output of @command{darcs changes
---context}, and is considerably less concise than, e.g. Subversion's
-revision number, but the patch-reordering flexibility of Darcs makes
-it impossible to provide a shorter useful specification).
-
-@uref{http://selenic.com/mercurial, Mercurial} is like Darcs, in that
-each branch is stored in a separate repository. The @code{repourl},
-@code{baseURL}, and @code{defaultBranch} arguments are all handled the
-same way as with Darcs. The ``revision'', however, is the hash
-identifier returned by @command{hg identify}.
-
-
-@node Attributes of Changes, , How Different VC Systems Specify Sources, Version Control Systems
-@subsection Attributes of Changes
-
-@heading Who
-
-Each Change has a @code{who} attribute, which specifies which
-developer is responsible for the change. This is a string which comes
-from a namespace controlled by the VC repository. Frequently this
-means it is a username on the host which runs the repository, but not
-all VC systems require this (Arch, for example, uses a fully-qualified
-@code{Arch ID}, which looks like an email address, as does Darcs).
-Each StatusNotifier will map the @code{who} attribute into something
-appropriate for their particular means of communication: an email
-address, an IRC handle, etc.
-
-@heading Files
-
-It also has a list of @code{files}, which are just the tree-relative
-filenames of any files that were added, deleted, or modified for this
-Change. These filenames are used by the @code{isFileImportant}
-function (in the Scheduler) to decide whether it is worth triggering a
-new build or not, e.g. the function could use
-@code{filename.endswith(".c")} to only run a build if a C file were
-checked in. Certain BuildSteps can also use the list of changed files
-to run a more targeted series of tests, e.g. the
-@code{step_twisted.Trial} step can run just the unit tests that
-provide coverage for the modified .py files instead of running the
-full test suite.
-
-@heading Comments
-
-The Change also has a @code{comments} attribute, which is a string
-containing any checkin comments.
-
-@heading Revision
-
-Each Change can have a @code{revision} attribute, which describes how
-to get a tree with a specific state: a tree which includes this Change
-(and all that came before it) but none that come after it. If this
-information is unavailable, the @code{.revision} attribute will be
-@code{None}. These revisions are provided by the ChangeSource, and
-consumed by the @code{computeSourceRevision} method in the appropriate
-@code{step.Source} class.
-
-@table @samp
-@item CVS
-@code{revision} is an int, seconds since the epoch
-@item SVN
-@code{revision} is an int, a transation number (r%d)
-@item Darcs
-@code{revision} is a large string, the output of @code{darcs changes --context}
-@item Mercurial
-@code{revision} is a short string (a hash ID), the output of @code{hg identify}
-@item Arch/Bazaar
-@code{revision} is the full revision ID (ending in --patch-%d)
-@item P4
-@code{revision} is an int, the transaction number
-@end table
-
-@heading Branches
-
-The Change might also have a @code{branch} attribute. This indicates
-that all of the Change's files are in the same named branch. The
-Schedulers get to decide whether the branch should be built or not.
-
-For VC systems like CVS, Arch, and Monotone, the @code{branch} name is
-unrelated to the filename. (that is, the branch name and the filename
-inhabit unrelated namespaces). For SVN, branches are expressed as
-subdirectories of the repository, so the file's ``svnurl'' is a
-combination of some base URL, the branch name, and the filename within
-the branch. (In a sense, the branch name and the filename inhabit the
-same namespace). Darcs branches are subdirectories of a base URL just
-like SVN. Mercurial branches are the same as Darcs.
-
-@table @samp
-@item CVS
-branch='warner-newfeature', files=['src/foo.c']
-@item SVN
-branch='branches/warner-newfeature', files=['src/foo.c']
-@item Darcs
-branch='warner-newfeature', files=['src/foo.c']
-@item Mercurial
-branch='warner-newfeature', files=['src/foo.c']
-@item Arch/Bazaar
-branch='buildbot--usebranches--0', files=['buildbot/master.py']
-@end table
-
-@heading Links
-
-@c TODO: who is using 'links'? how is it being used?
-
-Finally, the Change might have a @code{links} list, which is intended
-to provide a list of URLs to a @emph{viewcvs}-style web page that
-provides more detail for this Change, perhaps including the full file
-diffs.
-
-
-@node Schedulers, BuildSet, Version Control Systems, Concepts
-@section Schedulers
-
-@cindex Scheduler
-
-Each Buildmaster has a set of @code{Scheduler} objects, each of which
-gets a copy of every incoming Change. The Schedulers are responsible
-for deciding when Builds should be run. Some Buildbot installations
-might have a single Scheduler, while others may have several, each for
-a different purpose.
-
-For example, a ``quick'' scheduler might exist to give immediate
-feedback to developers, hoping to catch obvious problems in the code
-that can be detected quickly. These typically do not run the full test
-suite, nor do they run on a wide variety of platforms. They also
-usually do a VC update rather than performing a brand-new checkout
-each time. You could have a ``quick'' scheduler which used a 30 second
-timeout, and feeds a single ``quick'' Builder that uses a VC
-@code{mode='update'} setting.
-
-A separate ``full'' scheduler would run more comprehensive tests a
-little while later, to catch more subtle problems. This scheduler
-would have a longer tree-stable-timer, maybe 30 minutes, and would
-feed multiple Builders (with a @code{mode=} of @code{'copy'},
-@code{'clobber'}, or @code{'export'}).
-
-The @code{tree-stable-timer} and @code{isFileImportant} decisions are
-made by the Scheduler. Dependencies are also implemented here.
-Periodic builds (those which are run every N seconds rather than after
-new Changes arrive) are triggered by a special @code{Periodic}
-Scheduler subclass. The default Scheduler class can also be told to
-watch for specific branches, ignoring Changes on other branches. This
-may be useful if you have a trunk and a few release branches which
-should be tracked, but when you don't want to have the Buildbot pay
-attention to several dozen private user branches.
-
-Some Schedulers may trigger builds for other reasons, other than
-recent Changes. For example, a Scheduler subclass could connect to a
-remote buildmaster and watch for builds of a library to succeed before
-triggering a local build that uses that library.
-
-Each Scheduler creates and submits @code{BuildSet} objects to the
-@code{BuildMaster}, which is then responsible for making sure the
-individual @code{BuildRequests} are delivered to the target
-@code{Builders}.
-
-@code{Scheduler} instances are activated by placing them in the
-@code{c['schedulers']} list in the buildmaster config file. Each
-Scheduler has a unique name.
-
-
-@node BuildSet, BuildRequest, Schedulers, Concepts
-@section BuildSet
-
-@cindex BuildSet
-
-A @code{BuildSet} is the name given to a set of Builds that all
-compile/test the same version of the tree on multiple Builders. In
-general, all these component Builds will perform the same sequence of
-Steps, using the same source code, but on different platforms or
-against a different set of libraries.
-
-The @code{BuildSet} is tracked as a single unit, which fails if any of
-the component Builds have failed, and therefore can succeed only if
-@emph{all} of the component Builds have succeeded. There are two kinds
-of status notification messages that can be emitted for a BuildSet:
-the @code{firstFailure} type (which fires as soon as we know the
-BuildSet will fail), and the @code{Finished} type (which fires once
-the BuildSet has completely finished, regardless of whether the
-overall set passed or failed).
-
-A @code{BuildSet} is created with a @emph{source stamp} tuple of
-(branch, revision, changes, patch), some of which may be None, and a
-list of Builders on which it is to be run. They are then given to the
-BuildMaster, which is responsible for creating a separate
-@code{BuildRequest} for each Builder.
-
-There are a couple of different likely values for the
-@code{SourceStamp}:
-
-@table @code
-@item (revision=None, changes=[CHANGES], patch=None)
-This is a @code{SourceStamp} used when a series of Changes have
-triggered a build. The VC step will attempt to check out a tree that
-contains CHANGES (and any changes that occurred before CHANGES, but
-not any that occurred after them).
-
-@item (revision=None, changes=None, patch=None)
-This builds the most recent code on the default branch. This is the
-sort of @code{SourceStamp} that would be used on a Build that was
-triggered by a user request, or a Periodic scheduler. It is also
-possible to configure the VC Source Step to always check out the
-latest sources rather than paying attention to the Changes in the
-SourceStamp, which will result in same behavior as this.
-
-@item (branch=BRANCH, revision=None, changes=None, patch=None)
-This builds the most recent code on the given BRANCH. Again, this is
-generally triggered by a user request or Periodic build.
-
-@item (revision=REV, changes=None, patch=(LEVEL, DIFF))
-This checks out the tree at the given revision REV, then applies a
-patch (using @code{diff -pLEVEL <DIFF}). The @ref{try} feature uses
-this kind of @code{SourceStamp}. If @code{patch} is None, the patching
-step is bypassed.
-
-@end table
-
-The buildmaster is responsible for turning the @code{BuildSet} into a
-set of @code{BuildRequest} objects and queueing them on the
-appropriate Builders.
-
-
-@node BuildRequest, Builder, BuildSet, Concepts
-@section BuildRequest
-
-@cindex BuildRequest
-
-A @code{BuildRequest} is a request to build a specific set of sources
-on a single specific Builder. Each Builder runs the
-@code{BuildRequest} as soon as it can (i.e. when an associated
-buildslave becomes free).
-
-The @code{BuildRequest} contains the @code{SourceStamp} specification.
-The actual process of running the build (the series of Steps that will
-be executed) is implemented by the @code{Build} object. In this future
-this might be changed, to have the @code{Build} define @emph{what}
-gets built, and a separate @code{BuildProcess} (provided by the
-Builder) to define @emph{how} it gets built.
-
-The @code{BuildRequest} may be mergeable with other compatible
-@code{BuildRequest}s. Builds that are triggered by incoming Changes
-will generally be mergeable. Builds that are triggered by user
-requests are generally not, unless they are multiple requests to build
-the @emph{latest sources} of the same branch.
-
-@node Builder, Users, BuildRequest, Concepts
-@section Builder
-
-@cindex Builder
-
-The @code{Builder} is a long-lived object which controls all Builds of
-a given type. Each one is created when the config file is first
-parsed, and lives forever (or rather until it is removed from the
-config file). It mediates the connections to the buildslaves that do
-all the work, and is responsible for creating the @code{Build} objects
-that decide @emph{how} a build is performed (i.e., which steps are
-executed in what order).
-
-Each @code{Builder} gets a unique name, and the path name of a
-directory where it gets to do all its work (there is a
-buildmaster-side directory for keeping status information, as well as
-a buildslave-side directory where the actual checkout/compile/test
-commands are executed). It also gets a @code{BuildFactory}, which is
-responsible for creating new @code{Build} instances: because the
-@code{Build} instance is what actually performs each build, choosing
-the @code{BuildFactory} is the way to specify what happens each time a
-build is done.
-
-Each @code{Builder} is associated with one of more @code{BuildSlaves}.
-A @code{Builder} which is used to perform OS-X builds (as opposed to
-Linux or Solaris builds) should naturally be associated with an
-OS-X-based buildslave.
-
-
-@node Users, , Builder, Concepts
-@section Users
-
-@cindex Users
-
-Buildbot has a somewhat limited awareness of @emph{users}. It assumes
-the world consists of a set of developers, each of whom can be
-described by a couple of simple attributes. These developers make
-changes to the source code, causing builds which may succeed or fail.
-
-Each developer is primarily known through the source control system. Each
-Change object that arrives is tagged with a @code{who} field that
-typically gives the account name (on the repository machine) of the user
-responsible for that change. This string is the primary key by which the
-User is known, and is displayed on the HTML status pages and in each Build's
-``blamelist''.
-
-To do more with the User than just refer to them, this username needs to
-be mapped into an address of some sort. The responsibility for this mapping
-is left up to the status module which needs the address. The core code knows
-nothing about email addresses or IRC nicknames, just user names.
-
-@menu
-* Doing Things With Users::
-* Email Addresses::
-* IRC Nicknames::
-* Live Status Clients::
-@end menu
-
-@node Doing Things With Users, Email Addresses, Users, Users
-@subsection Doing Things With Users
-
-Each Change has a single User who is responsible for that Change. Most
-Builds have a set of Changes: the Build represents the first time these
-Changes have been built and tested by the Buildbot. The build has a
-``blamelist'' that consists of a simple union of the Users responsible
-for all the Build's Changes.
-
-The Build provides (through the IBuildStatus interface) a list of Users
-who are ``involved'' in the build. For now this is equal to the
-blamelist, but in the future it will be expanded to include a ``build
-sheriff'' (a person who is ``on duty'' at that time and responsible for
-watching over all builds that occur during their shift), as well as
-per-module owners who simply want to keep watch over their domain (chosen by
-subdirectory or a regexp matched against the filenames pulled out of the
-Changes). The Involved Users are those who probably have an interest in the
-results of any given build.
-
-In the future, Buildbot will acquire the concept of ``Problems'',
-which last longer than builds and have beginnings and ends. For example, a
-test case which passed in one build and then failed in the next is a
-Problem. The Problem lasts until the test case starts passing again, at
-which point the Problem is said to be ``resolved''.
-
-If there appears to be a code change that went into the tree at the
-same time as the test started failing, that Change is marked as being
-resposible for the Problem, and the user who made the change is added
-to the Problem's ``Guilty'' list. In addition to this user, there may
-be others who share responsibility for the Problem (module owners,
-sponsoring developers). In addition to the Responsible Users, there
-may be a set of Interested Users, who take an interest in the fate of
-the Problem.
-
-Problems therefore have sets of Users who may want to be kept aware of
-the condition of the problem as it changes over time. If configured, the
-Buildbot can pester everyone on the Responsible list with increasing
-harshness until the problem is resolved, with the most harshness reserved
-for the Guilty parties themselves. The Interested Users may merely be told
-when the problem starts and stops, as they are not actually responsible for
-fixing anything.
-
-@node Email Addresses, IRC Nicknames, Doing Things With Users, Users
-@subsection Email Addresses
-
-The @code{buildbot.status.mail.MailNotifier} class provides a
-status target which can send email about the results of each build. It
-accepts a static list of email addresses to which each message should be
-delivered, but it can also be configured to send mail to the Build's
-Interested Users. To do this, it needs a way to convert User names into
-email addresses.
-
-For many VC systems, the User Name is actually an account name on the
-system which hosts the repository. As such, turning the name into an
-email address is a simple matter of appending
-``@@repositoryhost.com''. Some projects use other kinds of mappings
-(for example the preferred email address may be at ``project.org''
-despite the repository host being named ``cvs.project.org''), and some
-VC systems have full separation between the concept of a user and that
-of an account on the repository host (like Perforce). Some systems
-(like Arch) put a full contact email address in every change.
-
-To convert these names to addresses, the MailNotifier uses an EmailLookup
-object. This provides a .getAddress method which accepts a name and
-(eventually) returns an address. The default @code{MailNotifier}
-module provides an EmailLookup which simply appends a static string,
-configurable when the notifier is created. To create more complex behaviors
-(perhaps using an LDAP lookup, or using ``finger'' on a central host to
-determine a preferred address for the developer), provide a different object
-as the @code{lookup} argument.
-
-In the future, when the Problem mechanism has been set up, the Buildbot
-will need to send mail to arbitrary Users. It will do this by locating a
-MailNotifier-like object among all the buildmaster's status targets, and
-asking it to send messages to various Users. This means the User-to-address
-mapping only has to be set up once, in your MailNotifier, and every email
-message the buildbot emits will take advantage of it.
-
-@node IRC Nicknames, Live Status Clients, Email Addresses, Users
-@subsection IRC Nicknames
-
-Like MailNotifier, the @code{buildbot.status.words.IRC} class
-provides a status target which can announce the results of each build. It
-also provides an interactive interface by responding to online queries
-posted in the channel or sent as private messages.
-
-In the future, the buildbot can be configured map User names to IRC
-nicknames, to watch for the recent presence of these nicknames, and to
-deliver build status messages to the interested parties. Like
-@code{MailNotifier} does for email addresses, the @code{IRC} object
-will have an @code{IRCLookup} which is responsible for nicknames. The
-mapping can be set up statically, or it can be updated by online users
-themselves (by claiming a username with some kind of ``buildbot: i am
-user warner'' commands).
-
-Once the mapping is established, the rest of the buildbot can ask the
-@code{IRC} object to send messages to various users. It can report on
-the likelihood that the user saw the given message (based upon how long the
-user has been inactive on the channel), which might prompt the Problem
-Hassler logic to send them an email message instead.
-
-@node Live Status Clients, , IRC Nicknames, Users
-@subsection Live Status Clients
-
-The Buildbot also offers a PB-based status client interface which can
-display real-time build status in a GUI panel on the developer's desktop.
-This interface is normally anonymous, but it could be configured to let the
-buildmaster know @emph{which} developer is using the status client. The
-status client could then be used as a message-delivery service, providing an
-alternative way to deliver low-latency high-interruption messages to the
-developer (like ``hey, you broke the build'').
-
-
-@node Configuration, Getting Source Code Changes, Concepts, Top
-@chapter Configuration
-
-@cindex Configuration
-
-The buildbot's behavior is defined by the ``config file'', which
-normally lives in the @file{master.cfg} file in the buildmaster's base
-directory (but this can be changed with an option to the
-@code{buildbot master} command). This file completely specifies which
-Builders are to be run, which slaves they should use, how Changes
-should be tracked, and where the status information is to be sent. The
-buildmaster's @file{buildbot.tac} file names the base directory;
-everything else comes from the config file.
-
-A sample config file was installed for you when you created the
-buildmaster, but you will need to edit it before your buildbot will do
-anything useful.
-
-This chapter gives an overview of the format of this file and the
-various sections in it. You will need to read the later chapters to
-understand how to fill in each section properly.
-
-@menu
-* Config File Format::
-* Loading the Config File::
-* Defining the Project::
-* Listing Change Sources and Schedulers::
-* Setting the slaveport::
-* Buildslave Specifiers::
-* Defining Builders::
-* Defining Status Targets::
-* Debug options::
-@end menu
-
-@node Config File Format, Loading the Config File, Configuration, Configuration
-@section Config File Format
-
-The config file is, fundamentally, just a piece of Python code which
-defines a dictionary named @code{BuildmasterConfig}, with a number of
-keys that are treated specially. You don't need to know Python to do
-basic configuration, though, you can just copy the syntax of the
-sample file. If you @emph{are} comfortable writing Python code,
-however, you can use all the power of a full programming language to
-achieve more complicated configurations.
-
-The @code{BuildmasterConfig} name is the only one which matters: all
-other names defined during the execution of the file are discarded.
-When parsing the config file, the Buildmaster generally compares the
-old configuration with the new one and performs the minimum set of
-actions necessary to bring the buildbot up to date: Builders which are
-not changed are left untouched, and Builders which are modified get to
-keep their old event history.
-
-Basic Python syntax: comments start with a hash character (``#''),
-tuples are defined with @code{(parenthesis, pairs)}, arrays are
-defined with @code{[square, brackets]}, tuples and arrays are mostly
-interchangeable. Dictionaries (data structures which map ``keys'' to
-``values'') are defined with curly braces: @code{@{'key1': 'value1',
-'key2': 'value2'@} }. Function calls (and object instantiation) can use
-named parameters, like @code{w = html.Waterfall(http_port=8010)}.
-
-The config file starts with a series of @code{import} statements,
-which make various kinds of Steps and Status targets available for
-later use. The main @code{BuildmasterConfig} dictionary is created,
-then it is populated with a variety of keys. These keys are broken
-roughly into the following sections, each of which is documented in
-the rest of this chapter:
-
-@itemize @bullet
-@item
-Project Definitions
-@item
-Change Sources / Schedulers
-@item
-Slaveport
-@item
-Buildslave Configuration
-@item
-Builders / Interlocks
-@item
-Status Targets
-@item
-Debug options
-@end itemize
-
-The config file can use a few names which are placed into its namespace:
-
-@table @code
-@item basedir
-the base directory for the buildmaster. This string has not been
-expanded, so it may start with a tilde. It needs to be expanded before
-use. The config file is located in
-@code{os.path.expanduser(os.path.join(basedir, 'master.cfg'))}
-
-@end table
-
-
-@node Loading the Config File, Defining the Project, Config File Format, Configuration
-@section Loading the Config File
-
-The config file is only read at specific points in time. It is first
-read when the buildmaster is launched. Once it is running, there are
-various ways to ask it to reload the config file. If you are on the
-system hosting the buildmaster, you can send a @code{SIGHUP} signal to
-it: the @command{buildbot} tool has a shortcut for this:
-
-@example
-buildbot sighup @var{BASEDIR}
-@end example
-
-The debug tool (@code{buildbot debugclient --master HOST:PORT}) has a
-``Reload .cfg'' button which will also trigger a reload. In the
-future, there will be other ways to accomplish this step (probably a
-password-protected button on the web page, as well as a privileged IRC
-command).
-
-
-@node Defining the Project, Listing Change Sources and Schedulers, Loading the Config File, Configuration
-@section Defining the Project
-
-There are a couple of basic settings that you use to tell the buildbot
-what project it is working on. This information is used by status
-reporters to let users find out more about the codebase being
-exercised by this particular Buildbot installation.
-
-@example
-c['projectName'] = "Buildbot"
-c['projectURL'] = "http://buildbot.sourceforge.net/"
-c['buildbotURL'] = "http://localhost:8010/"
-@end example
-
-@cindex c['projectName']
-@code{projectName} is a short string will be used to describe the
-project that this buildbot is working on. For example, it is used as
-the title of the waterfall HTML page.
-
-@cindex c['projectURL']
-@code{projectURL} is a string that gives a URL for the project as a
-whole. HTML status displays will show @code{projectName} as a link to
-@code{projectURL}, to provide a link from buildbot HTML pages to your
-project's home page.
-
-@cindex c['buildbotURL']
-The @code{buildbotURL} string should point to the location where the
-buildbot's internal web server (usually the @code{html.Waterfall}
-page) is visible. This typically uses the port number set when you
-create the @code{Waterfall} object: the buildbot needs your help to
-figure out a suitable externally-visible host name.
-
-When status notices are sent to users (either by email or over IRC),
-@code{buildbotURL} will be used to create a URL to the specific build
-or problem that they are being notified about. It will also be made
-available to queriers (over IRC) who want to find out where to get
-more information about this buildbot.
-
-
-@node Listing Change Sources and Schedulers, Setting the slaveport, Defining the Project, Configuration
-@section Listing Change Sources and Schedulers
-
-@cindex c['sources']
-The @code{c['sources']} key is a list of ChangeSource
-instances@footnote{To be precise, it is a list of objects which all
-implement the @code{buildbot.interfaces.IChangeSource} Interface}.
-This defines how the buildmaster learns about source code changes.
-More information about what goes here is available in @xref{Getting
-Source Code Changes}.
-
-@example
-import buildbot.changes.pb
-c['sources'] = [buildbot.changes.pb.PBChangeSource()]
-@end example
-
-@cindex c['schedulers']
-@code{c['schedulers']} is a list of Scheduler instances, each of which
-causes builds to be started on a particular set of Builders. The two
-basic Scheduler classes you are likely to start with are
-@code{Scheduler} and @code{Periodic}, but you can write a customized
-subclass to implement more complicated build scheduling.
-
-The docstring for @code{buildbot.scheduler.Scheduler} is the best
-place to see all the options that can be used. Type @code{pydoc
-buildbot.scheduler.Scheduler} to see it, or look in
-@file{buildbot/scheduler.py} directly.
-
-The basic Scheduler takes four arguments:
-
-@table @code
-@item name
-Each Scheduler must have a unique name. This is only used in status
-displays.
-
-@item branch
-This Scheduler will pay attention to a single branch, ignoring Changes
-that occur on other branches. Setting @code{branch} equal to the
-special value of @code{None} means it should only pay attention to the
-default branch. Note that @code{None} is a keyword, not a string, so
-you want to use @code{None} and not @code{"None"}.
-
-@item treeStableTimer
-The Scheduler will wait for this many seconds before starting the
-build. If new changes are made during this interval, the timer will be
-restarted, so really the build will be started after a change and then
-after this many seconds of inactivity.
-
-@item builderNames
-When the tree-stable-timer finally expires, builds will be started on
-these Builders. Each Builder gets a unique name: these strings must
-match.
-
-@end table
-
-@example
-from buildbot import scheduler
-quick = scheduler.Scheduler("quick", None, 60,
- ["quick-linux", "quick-netbsd"])
-full = scheduler.Scheduler("full", None, 5*60,
- ["full-linux", "full-netbsd", "full-OSX"])
-nightly = scheduler.Periodic("nightly", ["full-solaris"], 24*60*60)
-c['schedulers'] = [quick, full, nightly]
-@end example
-
-In this example, the two ``quick'' builds are triggered 60 seconds
-after the tree has been changed. The ``full'' builds do not run quite
-so quickly (they wait 5 minutes), so hopefully if the quick builds
-fail due to a missing file or really simple typo, the developer can
-discover and fix the problem before the full builds are started. Both
-Schedulers only pay attention to the default branch: any changes on
-other branches are ignored by these Schedulers. Each Scheduler
-triggers a different set of Builders, referenced by name.
-
-The third Scheduler in this example just runs the full solaris build
-once per day. (note that this Scheduler only lets you control the time
-between builds, not the absolute time-of-day of each Build, so this
-could easily wind up a ``daily'' or ``every afternoon'' scheduler
-depending upon when it was first activated).
-
-@menu
-* Scheduler Types::
-* Build Dependencies::
-@end menu
-
-@node Scheduler Types, Build Dependencies, Listing Change Sources and Schedulers, Listing Change Sources and Schedulers
-@subsection Scheduler Types
-
-Here is a brief catalog of the available Scheduler types. All these
-Schedulers are classes in @code{buildbot.scheduler}, and the
-docstrings there are the best source of documentation on the arguments
-taken by each one.
-
-@table @code
-@item Scheduler
-This is the default Scheduler class. It follows exactly one branch,
-and starts a configurable tree-stable-timer after each change on that
-branch. When the timer expires, it starts a build on some set of
-Builders. The Scheduler accepts a @code{fileIsImportant} function
-which can be used to ignore some Changes if they do not affect any
-``important'' files.
-
-@item AnyBranchScheduler
-This scheduler uses a tree-stable-timer like the default one, but
-follows multiple branches at once. Each branch gets a separate timer.
-
-@item Dependent
-This scheduler watches an ``upstream'' Builder. When that Builder
-successfully builds a particular set of Changes, it triggers builds of
-the same code on a configured set of ``downstream'' builders. The next
-section (@pxref{Build Dependencies}) describes this scheduler in more
-detail.
-
-@item Periodic
-This simple scheduler just triggers a build every N seconds.
-
-@item Nightly
-This is highly configurable periodic build scheduler, which triggers a
-build at particular times of day, week, month, or year. The
-configuration syntax is very similar to the well-known @code{crontab}
-format, in which you provide values for minute, hour, day, and month
-(some of which can be wildcards), and a build is triggered whenever
-the current time matches the given constraints. This can run a build
-every night, every morning, every weekend, alternate Thursdays, on
-your boss's birthday, etc.
-
-@item Try_Jobdir / Try_Userpass
-This scheduler allows developers to use the @code{buildbot try}
-command to trigger builds of code they have not yet committed. See
-@ref{try} for complete details.
-
-@end table
-
-@node Build Dependencies, , Scheduler Types, Listing Change Sources and Schedulers
-@subsection Build Dependencies
-
-@cindex Dependent
-@cindex Dependencies
-
-It is common to wind up with one kind of build which should only be
-performed if the same source code was successfully handled by some
-other kind of build first. An example might be a packaging step: you
-might only want to produce .deb or RPM packages from a tree that was
-known to compile successfully and pass all unit tests. You could put
-the packaging step in the same Build as the compile and testing steps,
-but there might be other reasons to not do this (in particular you
-might have several Builders worth of compiles/tests, but only wish to
-do the packaging once). Another example is if you want to skip the
-``full'' builds after a failing ``quick'' build of the same source
-code. Or, if one Build creates a product (like a compiled library)
-that is used by some other Builder, you'd want to make sure the
-consuming Build is run @emph{after} the producing one.
-
-You can use @code{Dependencies} to express this relationship to the
-Buildbot. There is a special kind of Scheduler named
-@code{scheduler.Dependent} that will watch an ``upstream'' Scheduler
-for builds to complete successfully (on all of its Builders). Each
-time that happens, the same source code (i.e. the same
-@code{SourceStamp}) will be used to start a new set of builds, on a
-different set of Builders. This ``downstream'' scheduler doesn't pay
-attention to Changes at all, it only pays attention to the upstream
-scheduler.
-
-If the SourceStamp fails on any of the Builders in the upstream set,
-the downstream builds will not fire.
-
-@example
-from buildbot import scheduler
-tests = scheduler.Scheduler("tests", None, 5*60,
- ["full-linux", "full-netbsd", "full-OSX"])
-package = scheduler.Dependent("package",
- tests, # upstream scheduler
- ["make-tarball", "make-deb", "make-rpm"])
-c['schedulers'] = [tests, package]
-@end example
-
-Note that @code{Dependent}'s upstream scheduler argument is given as a
-@code{Scheduler} @emph{instance}, not a name. This makes it impossible
-to create circular dependencies in the config file.
-
-
-@node Setting the slaveport, Buildslave Specifiers, Listing Change Sources and Schedulers, Configuration
-@section Setting the slaveport
-
-@cindex c['slavePortnum']
-
-The buildmaster will listen on a TCP port of your choosing for
-connections from buildslaves. It can also use this port for
-connections from remote Change Sources, status clients, and debug
-tools. This port should be visible to the outside world, and you'll
-need to tell your buildslave admins about your choice.
-
-It does not matter which port you pick, as long it is externally
-visible, however you should probably use something larger than 1024,
-since most operating systems don't allow non-root processes to bind to
-low-numbered ports. If your buildmaster is behind a firewall or a NAT
-box of some sort, you may have to configure your firewall to permit
-inbound connections to this port.
-
-@example
-c['slavePortnum'] = 10000
-@end example
-
-@code{c['slavePortnum']} is a @emph{strports} specification string,
-defined in the @code{twisted.application.strports} module (try
-@command{pydoc twisted.application.strports} to get documentation on
-the format). This means that you can have the buildmaster listen on a
-localhost-only port by doing:
-
-@example
-c['slavePortnum'] = "tcp:10000:interface=127.0.0.1"
-@end example
-
-This might be useful if you only run buildslaves on the same machine,
-and they are all configured to contact the buildmaster at
-@code{localhost:10000}.
-
-
-@node Buildslave Specifiers, Defining Builders, Setting the slaveport, Configuration
-@section Buildslave Specifiers
-
-@cindex c['bots']
-
-The @code{c['bots']} key is a list of known buildslaves. Each
-buildslave is defined by a tuple of (slavename, slavepassword). These
-are the same two values that need to be provided to the buildslave
-administrator when they create the buildslave.
-
-@example
-c['bots'] = [('bot-solaris', 'solarispasswd'),
- ('bot-bsd', 'bsdpasswd'),
- ]
-@end example
-
-The slavenames must be unique, of course. The password exists to
-prevent evildoers from interfering with the buildbot by inserting
-their own (broken) buildslaves into the system and thus displacing the
-real ones.
-
-Buildslaves with an unrecognized slavename or a non-matching password
-will be rejected when they attempt to connect, and a message
-describing the problem will be put in the log file (see @ref{Logfiles}).
-
-
-@node Defining Builders, Defining Status Targets, Buildslave Specifiers, Configuration
-@section Defining Builders
-
-@cindex c['builders']
-
-The @code{c['builders']} key is a list of dictionaries which specify
-the Builders. The Buildmaster runs a collection of Builders, each of
-which handles a single type of build (e.g. full versus quick), on a
-single build slave. A Buildbot which makes sure that the latest code
-(``HEAD'') compiles correctly across four separate architecture will
-have four Builders, each performing the same build but on different
-slaves (one per platform).
-
-Each Builder gets a separate column in the waterfall display. In
-general, each Builder runs independently (although various kinds of
-interlocks can cause one Builder to have an effect on another).
-
-Each Builder specification dictionary has several required keys:
-
-@table @code
-@item name
-This specifies the Builder's name, which is used in status
-reports.
-
-@item slavename
-This specifies which buildslave will be used by this Builder.
-@code{slavename} must appear in the @code{c['bots']} list. Each
-buildslave can accomodate multiple Builders.
-
-@item slavenames
-If you provide @code{slavenames} instead of @code{slavename}, you can
-give a list of buildslaves which are capable of running this Builder.
-If multiple buildslaves are available for any given Builder, you will
-have some measure of redundancy: in case one slave goes offline, the
-others can still keep the Builder working. In addition, multiple
-buildslaves will allow multiple simultaneous builds for the same
-Builder, which might be useful if you have a lot of forced or ``try''
-builds taking place.
-
-If you use this feature, it is important to make sure that the
-buildslaves are all, in fact, capable of running the given build. The
-slave hosts should be configured similarly, otherwise you will spend a
-lot of time trying (unsuccessfully) to reproduce a failure that only
-occurs on some of the buildslaves and not the others. Different
-platforms, operating systems, versions of major programs or libraries,
-all these things mean you should use separate Builders.
-
-@item builddir
-This specifies the name of a subdirectory (under the base directory)
-in which everything related to this builder will be placed. On the
-buildmaster, this holds build status information. On the buildslave,
-this is where checkouts, compiles, and tests are run.
-
-@item factory
-This is a @code{buildbot.process.factory.BuildFactory} instance which
-controls how the build is performed. Full details appear in their own
-chapter, @xref{Build Process}. Parameters like the location of the CVS
-repository and the compile-time options used for the build are
-generally provided as arguments to the factory's constructor.
-
-@end table
-
-Other optional keys may be set on each Builder:
-
-@table @code
-
-@item category
-If provided, this is a string that identifies a category for the
-builder to be a part of. Status clients can limit themselves to a
-subset of the available categories. A common use for this is to add
-new builders to your setup (for a new module, or for a new buildslave)
-that do not work correctly yet and allow you to integrate them with
-the active builders. You can put these new builders in a test
-category, make your main status clients ignore them, and have only
-private status clients pick them up. As soon as they work, you can
-move them over to the active category.
-
-@end table
-
-
-@node Defining Status Targets, Debug options, Defining Builders, Configuration
-@section Defining Status Targets
-
-The Buildmaster has a variety of ways to present build status to
-various users. Each such delivery method is a ``Status Target'' object
-in the configuration's @code{status} list. To add status targets, you
-just append more objects to this list:
-
-@cindex c['status']
-
-@example
-c['status'] = []
-
-from buildbot.status import html
-c['status'].append(html.Waterfall(http_port=8010))
-
-from buildbot.status import mail
-m = mail.MailNotifier(fromaddr="buildbot@@localhost",
- extraRecipients=["builds@@lists.example.com"],
- sendToInterestedUsers=False)
-c['status'].append(m)
-
-from buildbot.status import words
-c['status'].append(words.IRC(host="irc.example.com", nick="bb",
- channels=["#example"]))
-@end example
-
-Status delivery has its own chapter, @xref{Status Delivery}, in which
-all the built-in status targets are documented.
-
-
-@node Debug options, , Defining Status Targets, Configuration
-@section Debug options
-
-
-@cindex c['debugPassword']
-If you set @code{c['debugPassword']}, then you can connect to the
-buildmaster with the diagnostic tool launched by @code{buildbot
-debugclient MASTER:PORT}. From this tool, you can reload the config
-file, manually force builds, and inject changes, which may be useful
-for testing your buildmaster without actually commiting changes to
-your repository (or before you have the Change Sources set up). The
-debug tool uses the same port number as the slaves do:
-@code{c['slavePortnum']}, and is authenticated with this password.
-
-@example
-c['debugPassword'] = "debugpassword"
-@end example
-
-@cindex c['manhole']
-If you set @code{c['manhole']} to an instance of the
-@code{buildbot.master.Manhole} class, you can telnet into the
-buildmaster and get an interactive Python shell, which may be useful
-for debugging buildbot internals. It is probably only useful for
-buildbot developers. It exposes full access to the buildmaster's
-account (including the ability to modify and delete files), so it
-should not be enabled with a weak or easily guessable password.
-
-The @code{Manhole} instance can be configured to listen on a specific
-port. You may wish to have this listening port bind to the loopback
-interface (sometimes known as ``lo0'', ``localhost'', or 127.0.0.1) to
-restrict access to clients which are running on the same host.
-
-@example
-from buildbot.master import Manhole
-c['manhole'] = Manhole("tcp:9999:interface=127.0.0.1", "admin", "password")
-@end example
-
-To have the @code{Manhole} listen on all interfaces, use
-@code{"tcp:9999"}. This port specification uses
-@code{twisted.application.strports}, so you can make it listen on SSL
-or even UNIX-domain sockets if you want.
-
-
-@node Getting Source Code Changes, Build Process, Configuration, Top
-@chapter Getting Source Code Changes
-
-The most common way to use the Buildbot is centered around the idea of
-@code{Source Trees}: a directory tree filled with source code of some form
-which can be compiled and/or tested. Some projects use languages that don't
-involve any compilation step: nevertheless there may be a @code{build} phase
-where files are copied or rearranged into a form that is suitable for
-installation. Some projects do not have unit tests, and the Buildbot is
-merely helping to make sure that the sources can compile correctly. But in
-all of these cases, the thing-being-tested is a single source tree.
-
-A Version Control System mantains a source tree, and tells the
-buildmaster when it changes. The first step of each Build is typically
-to acquire a copy of some version of this tree.
-
-This chapter describes how the Buildbot learns about what Changes have
-occurred. For more information on VC systems and Changes, see
-@ref{Version Control Systems}.
-
-
-@menu
-* Change Sources::
-@end menu
-
-
-
-@node Change Sources, , Getting Source Code Changes, Getting Source Code Changes
-@section Change Sources
-
-@c TODO: rework this, the one-buildmaster-one-tree thing isn't quite
-@c so narrow-minded anymore
-
-Each Buildmaster watches a single source tree. Changes can be provided
-by a variety of ChangeSource types, however any given project will
-typically have only a single ChangeSource active. This section
-provides a description of all available ChangeSource types and
-explains how to set up each of them.
-
-There are a variety of ChangeSources available, some of which are
-meant to be used in conjunction with other tools to deliver Change
-events from the VC repository to the buildmaster.
-
-@itemize @bullet
-
-@item CVSToys
-This ChangeSource opens a TCP connection from the buildmaster to a
-waiting FreshCVS daemon that lives on the repository machine, and
-subscribes to hear about Changes.
-
-@item MaildirSource
-This one watches a local maildir-format inbox for email sent out by
-the repository when a change is made. When a message arrives, it is
-parsed to create the Change object. A variety of parsing functions are
-available to accomodate different email-sending tools.
-
-@item PBChangeSource
-This ChangeSource listens on a local TCP socket for inbound
-connections from a separate tool. Usually, this tool would be run on
-the VC repository machine in a commit hook. It is expected to connect
-to the TCP socket and send a Change message over the network
-connection. The @command{buildbot sendchange} command is one example
-of a tool that knows how to send these messages, so you can write a
-commit script for your VC system that calls it to deliver the Change.
-There are other tools in the contrib/ directory that use the same
-protocol.
-
-@end itemize
-
-As a quick guide, here is a list of VC systems and the ChangeSources
-that might be useful with them. All of these ChangeSources are in the
-@code{buildbot.changes} module.
-
-@table @code
-@item CVS
-
-@itemize @bullet
-@item freshcvs.FreshCVSSource (connected via TCP to the freshcvs daemon)
-@item mail.FCMaildirSource (watching for email sent by a freshcvs daemon)
-@item mail.BonsaiMaildirSource (watching for email sent by Bonsai)
-@item mail.SyncmailMaildirSource (watching for email sent by syncmail)
-@item pb.PBChangeSource (listening for connections from @code{buildbot
-sendchange} run in a loginfo script)
-@item pb.PBChangeSource (listening for connections from a long-running
-@code{contrib/viewcvspoll.py} polling process which examines the ViewCVS
-database directly
-@end itemize
-
-@item SVN
-@itemize @bullet
-@item pb.PBChangeSource (listening for connections from
-@code{contrib/svn_buildbot.py} run in a postcommit script)
-@item pb.PBChangeSource (listening for connections from a long-running
-@code{contrib/svn_watcher.py} or @code{contrib/svnpoller.py} polling
-process
-@end itemize
-
-@item Darcs
-@itemize @bullet
-@item pb.PBChangeSource (listening for connections from @code{buildbot
-sendchange} in a commit script
-@end itemize
-
-@item Mercurial
-@itemize @bullet
-@item pb.PBChangeSource (listening for connections from
-@code{contrib/hg_buildbot.py} run in an 'incoming' hook)
-@end itemize
-
-@item Arch/Bazaar
-@itemize @bullet
-@item pb.PBChangeSource (listening for connections from
-@code{contrib/arch_buildbot.py} run in a commit hook)
-@end itemize
-
-@end table
-
-All VC systems can be driven by a PBChangeSource and the
-@code{buildbot sendchange} tool run from some form of commit script.
-If you write an email parsing function, they can also all be driven by
-a suitable @code{MaildirSource}.
-
-
-@menu
-* Choosing ChangeSources::
-* CVSToys - PBService::
-* CVSToys - mail notification::
-* Other mail notification ChangeSources::
-* PBChangeSource::
-@end menu
-
-@node Choosing ChangeSources, CVSToys - PBService, Change Sources, Change Sources
-@subsection Choosing ChangeSources
-
-The @code{master.cfg} configuration file has a dictionary key named
-@code{BuildmasterConfig['sources']}, which holds a list of
-@code{IChangeSource} objects. The config file will typically create an
-object from one of the classes described below and stuff it into the
-list.
-
-@example
-s = FreshCVSSourceNewcred(host="host", port=4519,
- user="alice", passwd="secret",
- prefix="Twisted")
-BuildmasterConfig['sources'] = [s]
-@end example
-
-Each source tree has a nominal @code{top}. Each Change has a list of
-filenames, which are all relative to this top location. The
-ChangeSource is responsible for doing whatever is necessary to
-accomplish this. Most sources have a @code{prefix} argument: a partial
-pathname which is stripped from the front of all filenames provided to
-that @code{ChangeSource}. Files which are outside this sub-tree are
-ignored by the changesource: it does not generate Changes for those
-files.
-
-
-@node CVSToys - PBService, CVSToys - mail notification, Choosing ChangeSources, Change Sources
-@subsection CVSToys - PBService
-
-The @uref{http://purl.net/net/CVSToys, CVSToys} package provides a
-server which runs on the machine that hosts the CVS repository it
-watches. It has a variety of ways to distribute commit notifications,
-and offers a flexible regexp-based way to filter out uninteresting
-changes. One of the notification options is named @code{PBService} and
-works by listening on a TCP port for clients. These clients subscribe
-to hear about commit notifications.
-
-The buildmaster has a CVSToys-compatible @code{PBService} client built
-in. There are two versions of it, one for old versions of CVSToys
-(1.0.9 and earlier) which used the @code{oldcred} authentication
-framework, and one for newer versions (1.0.10 and later) which use
-@code{newcred}. Both are classes in the
-@code{buildbot.changes.freshcvs} package.
-
-@code{FreshCVSSourceNewcred} objects are created with the following
-parameters:
-
-@table @samp
-
-@item @code{host} and @code{port}
-these specify where the CVSToys server can be reached
-
-@item @code{user} and @code{passwd}
-these specify the login information for the CVSToys server
-(@code{freshcvs}). These must match the server's values, which are
-defined in the @code{freshCfg} configuration file (which lives in the
-CVSROOT directory of the repository).
-
-@item @code{prefix}
-this is the prefix to be found and stripped from filenames delivered
-by the CVSToys server. Most projects live in sub-directories of the
-main repository, as siblings of the CVSROOT sub-directory, so
-typically this prefix is set to that top sub-directory name.
-
-@end table
-
-@heading Example
-
-To set up the freshCVS server, add a statement like the following to
-your @file{freshCfg} file:
-
-@example
-pb = ConfigurationSet([
- (None, None, None, PBService(userpass=('foo', 'bar'), port=4519)),
- ])
-@end example
-
-This will announce all changes to a client which connects to port 4519
-using a username of 'foo' and a password of 'bar'.
-
-Then add a clause like this to your buildmaster's @file{master.cfg}:
-
-@example
-BuildmasterConfig['sources'] = [FreshCVSSource("cvs.example.com", 4519,
- "foo", "bar",
- prefix="glib/")]
-@end example
-
-where "cvs.example.com" is the host that is running the FreshCVS daemon, and
-"glib" is the top-level directory (relative to the repository's root) where
-all your source code lives. Most projects keep one or more projects in the
-same repository (along with CVSROOT/ to hold admin files like loginfo and
-freshCfg); the prefix= argument tells the buildmaster to ignore everything
-outside that directory, and to strip that common prefix from all pathnames
-it handles.
-
-
-
-@node CVSToys - mail notification, Other mail notification ChangeSources, CVSToys - PBService, Change Sources
-@subsection CVSToys - mail notification
-
-CVSToys also provides a @code{MailNotification} action which will send
-email to a list of recipients for each commit. This tends to work
-better than using @code{/bin/mail} from within the CVSROOT/loginfo
-file directly, as CVSToys will batch together all files changed during
-the same CVS invocation, and can provide more information (like
-creating a ViewCVS URL for each file changed).
-
-The Buildbot's @code{FCMaildirSource} is a ChangeSource which knows
-how to parse these CVSToys messages and turn them into Change objects.
-It watches a Maildir for new messages. The usually installation
-process looks like:
-
-@enumerate
-@item
-Create a mailing list, @code{projectname-commits}.
-@item
-In CVSToys' freshCfg file, use a @code{MailNotification} action to
-send commit mail to this mailing list.
-@item
-Subscribe the buildbot user to the mailing list.
-@item
-Configure your .qmail or .forward file to deliver these messages into
-a maildir.
-@item
-In the Buildbot's master.cfg file, use a @code{FCMaildirSource} to
-watch the maildir for commit messages.
-@end enumerate
-
-The @code{FCMaildirSource} is created with two parameters: the
-directory name of the maildir root, and the prefix to strip.
-
-@node Other mail notification ChangeSources, PBChangeSource, CVSToys - mail notification, Change Sources
-@subsection Other mail notification ChangeSources
-
-There are other types of maildir-watching ChangeSources, which only
-differ in the function used to parse the message body.
-
-@code{SyncmailMaildirSource} knows how to parse the message format
-used in mail sent by Syncmail.
-
-@code{BonsaiMaildirSource} parses messages sent out by Bonsai.
-
-@node PBChangeSource, , Other mail notification ChangeSources, Change Sources
-@subsection PBChangeSource
-
-The last kind of ChangeSource actually listens on a TCP port for
-clients to connect and push change notices @emph{into} the
-Buildmaster. This is used by the built-in @code{buildbot sendchange}
-notification tool, as well as the VC-specific
-@file{contrib/svn_buildbot.py} and @file{contrib/arch_buildbot.py}
-tools. These tools are run by the repository (in a commit hook
-script), and connect to the buildmaster directly each time a file is
-comitted. This is also useful for creating new kinds of change sources
-that work on a @code{push} model instead of some kind of subscription
-scheme, for example a script which is run out of an email .forward
-file.
-
-This ChangeSource can be configured to listen on its own TCP port, or
-it can share the port that the buildmaster is already using for the
-buildslaves to connect. (This is possible because the
-@code{PBChangeSource} uses the same protocol as the buildslaves, and
-they can be distinguished by the @code{username} attribute used when
-the initial connection is established). It might be useful to have it
-listen on a different port if, for example, you wanted to establish
-different firewall rules for that port. You could allow only the SVN
-repository machine access to the @code{PBChangeSource} port, while
-allowing only the buildslave machines access to the slave port. Or you
-could just expose one port and run everything over it. @emph{Note:
-this feature is not yet implemented, the PBChangeSource will always
-share the slave port and will always have a @code{user} name of
-@code{change}, and a passwd of @code{changepw}. These limitations will
-be removed in the future.}.
-
-
-The @code{PBChangeSource} is created with the following
-arguments:
-
-@table @samp
-@item @code{port}
-which port to listen on. If @code{None} (which is the default), it
-shares the port used for buildslave connections. @emph{Not
-Implemented, always set to @code{None}}.
-
-@item @code{user} and @code{passwd}
-the user/passwd account information that the client program must use
-to connect. Defaults to @code{change} and @code{changepw}. @emph{Not
-Implemented, @code{user} is currently always set to @code{change},
-@code{passwd} is always set to @code{changepw}}.
-
-@item @code{prefix}
-the prefix to be found and stripped from filenames delivered over the
-connection.
-@end table
-
-
-@node Build Process, Status Delivery, Getting Source Code Changes, Top
-@chapter Build Process
-
-A @code{Build} object is responsible for actually performing a build.
-It gets access to a remote @code{SlaveBuilder} where it may run
-commands, and a @code{BuildStatus} object where it must emit status
-events. The @code{Build} is created by the Builder's
-@code{BuildFactory}.
-
-The default @code{Build} class is made up of a fixed sequence of
-@code{BuildSteps}, executed one after another until all are complete
-(or one of them indicates that the build should be halted early). The
-default @code{BuildFactory} creates instances of this @code{Build}
-class with a list of @code{BuildSteps}, so the basic way to configure
-the build is to provide a list of @code{BuildSteps} to your
-@code{BuildFactory}.
-
-More complicated @code{Build} subclasses can make other decisions:
-execute some steps only if certain files were changed, or if certain
-previous steps passed or failed. The base class has been written to
-allow users to express basic control flow without writing code, but
-you can always subclass and customize to achieve more specialized
-behavior.
-
-@menu
-* Build Steps::
-* Interlocks::
-* Build Factories::
-@end menu
-
-@node Build Steps, Interlocks, Build Process, Build Process
-@section Build Steps
-
-@code{BuildStep}s are usually specified in the buildmaster's
-configuration file, in a list of ``step specifications'' that is used
-to create the @code{BuildFactory}. These ``step specifications'' are
-not actual steps, but rather a tuple of the @code{BuildStep} subclass
-to be created and a dictionary of arguments. (the actual
-@code{BuildStep} instances are not created until the Build is started,
-so that each Build gets an independent copy of each BuildStep). There
-is a convenience function named ``@code{s}'' in the
-@code{buildbot.process.factory} module for creating these
-specification tuples. It allows you to create a
-@code{BuildFactory}-ready list like this:
-
-@example
-from buildbot.process import step, factory
-from buildbot.process.factory import s
-
-steps = [s(step.SVN, svnurl="http://svn.example.org/Trunk/"),
- s(step.ShellCommand, command=["make", "all"]),
- s(step.ShellCommand, command=["make", "test"]),
- ]
-f = factory.BuildFactory(steps)
-@end example
-
-The rest of this section lists all the standard BuildStep objects
-available for use in a Build, and the parameters which can be used to
-control each.
-
-@menu
-* Common Parameters::
-* Source Checkout::
-* ShellCommand::
-* Simple ShellCommand Subclasses::
-@end menu
-
-@node Common Parameters, Source Checkout, Build Steps, Build Steps
-@subsection Common Parameters
-
-The standard @code{Build} runs a series of @code{BuildStep}s in order,
-only stopping when it runs out of steps or if one of them requests
-that the build be halted. It collects status information from each one
-to create an overall build status (of SUCCESS, WARNINGS, or FAILURE).
-
-All BuildSteps accept some common parameters. Some of these control
-how their individual status affects the overall build. Others are used
-to specify which @code{Locks} (see @pxref{Interlocks}) should be
-acquired before allowing the step to run.
-
-Arguments common to all @code{BuildStep} subclasses:
-
-
-@table @code
-@item name
-the name used to describe the step on the status display. It is also
-used to give a name to any LogFiles created by this step.
-
-@item haltOnFailure
-if True, a FAILURE of this build step will cause the build to halt
-immediately with an overall result of FAILURE.
-
-@item flunkOnWarnings
-when True, a WARNINGS or FAILURE of this build step will mark the
-overall build as FAILURE. The remaining steps will still be executed.
-
-@item flunkOnFailure
-when True, a FAILURE of this build step will mark the overall build as
-a FAILURE. The remaining steps will still be executed.
-
-@item warnOnWarnings
-when True, a WARNINGS or FAILURE of this build step will mark the
-overall build as having WARNINGS. The remaining steps will still be
-executed.
-
-@item warnOnFailure
-when True, a FAILURE of this build step will mark the overall build as
-having WARNINGS. The remaining steps will still be executed.
-
-@item locks
-a list of Locks (instances of @code{buildbot.locks.SlaveLock} or
-@code{buildbot.locks.MasterLock}) that should be acquired before
-starting this Step. The Locks will be released when the step is
-complete. Note that this is a list of actual Lock instances, not
-names. Also note that all Locks must have unique names.
-
-@end table
-
-
-@node Source Checkout, ShellCommand, Common Parameters, Build Steps
-@subsection Source Checkout
-
-The first step of any build is typically to acquire the source code
-from which the build will be performed. There are several classes to
-handle this, one for each of the different source control system that
-Buildbot knows about. For a description of how Buildbot treats source
-control in general, see @ref{Version Control Systems}.
-
-All source checkout steps accept some common parameters to control how
-they get the sources and where they should be placed. The remaining
-per-VC-system parameters are mostly to specify where exactly the
-sources are coming from.
-
-@table @code
-@item mode
-
-a string describing the kind of VC operation that is desired. Defaults
-to @code{update}.
-
-@table @code
-@item update
-specifies that the CVS checkout/update should be performed directly
-into the workdir. Each build is performed in the same directory,
-allowing for incremental builds. This minimizes disk space, bandwidth,
-and CPU time. However, it may encounter problems if the build process
-does not handle dependencies properly (sometimes you must do a ``clean
-build'' to make sure everything gets compiled), or if source files are
-deleted but generated files can influence test behavior (e.g. python's
-.pyc files), or when source directories are deleted but generated
-files prevent CVS from removing them. Builds ought to be correct
-regardless of whether they are done ``from scratch'' or incrementally,
-but it is useful to test both kinds: this mode exercises the
-incremental-build style.
-
-@item copy
-specifies that the CVS workspace should be maintained in a separate
-directory (called the 'copydir'), using checkout or update as
-necessary. For each build, a new workdir is created with a copy of the
-source tree (rm -rf workdir; cp -r copydir workdir). This doubles the
-disk space required, but keeps the bandwidth low (update instead of a
-full checkout). A full 'clean' build is performed each time. This
-avoids any generated-file build problems, but is still occasionally
-vulnerable to CVS problems such as a repository being manually
-rearranged, causing CVS errors on update which are not an issue with a
-full checkout.
-
-@c TODO: something is screwy about this, revisit. Is it the source
-@c directory or the working directory that is deleted each time?
-
-@item clobber
-specifes that the working directory should be deleted each time,
-necessitating a full checkout for each build. This insures a clean
-build off a complete checkout, avoiding any of the problems described
-above. This mode exercises the ``from-scratch'' build style.
-
-@item export
-this is like @code{clobber}, except that the 'cvs export' command is
-used to create the working directory. This command removes all CVS
-metadata files (the CVS/ directories) from the tree, which is
-sometimes useful for creating source tarballs (to avoid including the
-metadata in the tar file).
-@end table
-
-@item workdir
-like all Steps, this indicates the directory where the build will take
-place. Source Steps are special in that they perform some operations
-outside of the workdir (like creating the workdir itself).
-
-@item alwaysUseLatest
-if True, bypass the usual ``update to the last Change'' behavior, and
-always update to the latest changes instead.
-
-@item retry
-If set, this specifies a tuple of @code{(delay, repeats)} which means
-that when a full VC checkout fails, it should be retried up to
-@var{repeats} times, waiting @var{delay} seconds between attempts. If
-you don't provide this, it defaults to @code{None}, which means VC
-operations should not be retried. This is provided to make life easier
-for buildslaves which are stuck behind poor network connections.
-
-@end table
-
-
-My habit as a developer is to do a @code{cvs update} and @code{make} each
-morning. Problems can occur, either because of bad code being checked in, or
-by incomplete dependencies causing a partial rebuild to fail where a
-complete from-scratch build might succeed. A quick Builder which emulates
-this incremental-build behavior would use the @code{mode='update'}
-setting.
-
-On the other hand, other kinds of dependency problems can cause a clean
-build to fail where a partial build might succeed. This frequently results
-from a link step that depends upon an object file that was removed from a
-later version of the tree: in the partial tree, the object file is still
-around (even though the Makefiles no longer know how to create it).
-
-``official'' builds (traceable builds performed from a known set of
-source revisions) are always done as clean builds, to make sure it is
-not influenced by any uncontrolled factors (like leftover files from a
-previous build). A ``full'' Builder which behaves this way would want
-to use the @code{mode='clobber'} setting.
-
-Each VC system has a corresponding source checkout class: their
-arguments are described on the following pages.
-
-
-@menu
-* CVS::
-* SVN::
-* Darcs::
-* Mercurial::
-* Arch::
-* Bazaar::
-* P4Sync::
-@end menu
-
-@node CVS, SVN, Source Checkout, Source Checkout
-@subsubsection CVS
-
-@cindex CVS Checkout
-
-The @code{CVS} build step performs a @uref{http://www.nongnu.org/cvs/,
-CVS} checkout or update. It takes the following arguments:
-
-@table @code
-@item cvsroot
-(required): specify the CVSROOT value, which points to a CVS
-repository, probably on a remote machine. For example, the cvsroot
-value you would use to get a copy of the Buildbot source code is
-@code{:pserver:anonymous@@cvs.sourceforge.net:/cvsroot/buildbot}
-
-@item cvsmodule
-(required): specify the cvs @code{module}, which is generally a
-subdirectory of the CVSROOT. The cvsmodule for the Buildbot source
-code is @code{buildbot}.
-
-@item branch
-a string which will be used in a @code{-r} argument. This is most
-useful for specifying a branch to work on. Defaults to @code{HEAD}.
-
-@item global_options
-a list of flags to be put before the verb in the CVS command.
-
-@item checkoutDelay
-if set, the number of seconds to put between the timestamp of the last
-known Change and the value used for the @code{-D} option. Defaults to
-half of the parent Build's treeStableTimer.
-
-@end table
-
-
-@node SVN, Darcs, CVS, Source Checkout
-@subsubsection SVN
-
-@cindex SVN Checkout
-
-The @code{SVN} build step performs a
-@uref{http://subversion.tigris.org, Subversion} checkout or update.
-There are two basic ways of setting up the checkout step, depending
-upon whether you are using multiple branches or not.
-
-If all of your builds use the same branch, then you should create the
-@code{SVN} step with the @code{svnurl} argument:
-
-@table @code
-@item svnurl
-(required): this specifies the @code{URL} argument that will be given
-to the @code{svn checkout} command. It dictates both where the
-repository is located and which sub-tree should be extracted. In this
-respect, it is like a combination of the CVS @code{cvsroot} and
-@code{cvsmodule} arguments. For example, if you are using a remote
-Subversion repository which is accessible through HTTP at a URL of
-@code{http://svn.example.com/repos}, and you wanted to check out the
-@code{trunk/calc} sub-tree, you would use
-@code{svnurl="http://svn.example.com/repos/trunk/calc"} as an argument
-to your @code{SVN} step.
-@end table
-
-If, on the other hand, you are building from multiple branches, then
-you should create the @code{SVN} step with the @code{baseURL} and
-@code{defaultBranch} arguments instead:
-
-@table @code
-@item baseURL
-(required): this specifies the base repository URL, to which a branch
-name will be appended. It should probably end in a slash.
-
-@item defaultBranch
-this specifies the name of the branch to use when a Build does not
-provide one of its own. This will be appended to @code{baseURL} to
-create the string that will be passed to the @code{svn checkout}
-command.
-@end table
-
-If you are using branches, you must also make sure your
-@code{ChangeSource} will report the correct branch names.
-
-@heading branch example
-
-Let's suppose that the ``MyProject'' repository uses branches for the
-trunk, for various users' individual development efforts, and for
-several new features that will require some amount of work (involving
-multiple developers) before they are ready to merge onto the trunk.
-Such a repository might be organized as follows:
-
-@example
-svn://svn.example.org/MyProject/trunk
-svn://svn.example.org/MyProject/branches/User1/foo
-svn://svn.example.org/MyProject/branches/User1/bar
-svn://svn.example.org/MyProject/branches/User2/baz
-svn://svn.example.org/MyProject/features/newthing
-svn://svn.example.org/MyProject/features/otherthing
-@end example
-
-Further assume that we want the Buildbot to run tests against the
-trunk and against all the feature branches (i.e., do a
-checkout/compile/build of branch X when a file has been changed on
-branch X, when X is in the set [trunk, features/newthing,
-features/otherthing]). We do not want the Buildbot to automatically
-build any of the user branches, but it should be willing to build a
-user branch when explicitly requested (most likely by the user who
-owns that branch).
-
-There are three things that need to be set up to accomodate this
-system. The first is a ChangeSource that is capable of identifying the
-branch which owns any given file. This depends upon a user-supplied
-function, in an external program that runs in the SVN commit hook and
-connects to the buildmaster's @code{PBChangeSource} over a TCP
-connection. (you can use the ``@code{buildbot sendchange}'' utility
-for this purpose, but you will still need an external program to
-decide what value should be passed to the @code{--branch=} argument).
-For example, a change to a file with the SVN url of
-``svn://svn.example.org/MyProject/features/newthing/src/foo.c'' should
-be broken down into a Change instance with
-@code{branch='features/newthing'} and @code{file='src/foo.c'}.
-
-The second piece is an @code{AnyBranchScheduler} which will pay
-attention to the desired branches. It will not pay attention to the
-user branches, so it will not automatically start builds in response
-to changes there. The AnyBranchScheduler class requires you to
-explicitly list all the branches you want it to use, but it would not
-be difficult to write a subclass which used
-@code{branch.startswith('features/'} to remove the need for this
-explicit list. Or, if you want to build user branches too, you can use
-AnyBranchScheduler with @code{branches=None} to indicate that you want
-it to pay attention to all branches.
-
-The third piece is an @code{SVN} checkout step that is configured to
-handle the branches correctly, with a @code{baseURL} value that
-matches the way the ChangeSource splits each file's URL into base,
-branch, and file.
-
-@example
-from buildbot.changes.pb import PBChangeSource
-from buildbot.scheduler import AnyBranchScheduler
-from buildbot.process import step, factory
-from buildbot.process.factory import s
-
-c['sources'] = [PBChangeSource()]
-s1 = AnyBranchScheduler('main',
- ['trunk', 'features/newthing', 'features/otherthing'],
- 10*60, ['test-i386', 'test-ppc'])
-c['schedulers'] = [s1]
-source = s(step.SVN, mode='update',
- baseURL='svn://svn.example.org/MyProject/',
- defaultBranch='trunk')
-f = factory.BuildFactory([source,
- s(step.Compile, command="make all"),
- s(step.Test, command="make test")])
-c['builders'] = [
- @{'name':'test-i386', 'slavename':'bot-i386', 'builddir':'test-i386',
- 'factory':f @},
- @{'name':'test-ppc', 'slavename':'bot-ppc', 'builddir':'test-ppc',
- 'factory':f @},
- ]
-@end example
-
-In this example, when a change arrives with a @code{branch} attribute
-of ``trunk'', the resulting build will have an SVN step that
-concatenates ``svn://svn.example.org/MyProject/'' (the baseURL) with
-``trunk'' (the branch name) to get the correct svn command. If the
-``newthing'' branch has a change to ``src/foo.c'', then the SVN step
-will concatenate ``svn://svn.example.org/MyProject/'' with
-``features/newthing'' to get the svnurl for checkout.
-
-@node Darcs, Mercurial, SVN, Source Checkout
-@subsubsection Darcs
-
-@cindex Darcs Checkout
-
-The @code{Darcs} build step performs a
-@uref{http://abridgegame.org/darcs/, Darcs} checkout or update.
-
-Like @xref{SVN}, this step can either be configured to always check
-out a specific tree, or set up to pull from a particular branch that
-gets specified separately for each build. Also like SVN, the
-repository URL given to Darcs is created by concatenating a
-@code{baseURL} with the branch name, and if no particular branch is
-requested, it uses a @code{defaultBranch}. The only difference in
-usage is that each potential Darcs repository URL must point to a
-fully-fledged repository, whereas SVN URLs usually point to sub-trees
-of the main Subversion repository. In other words, doing an SVN
-checkout of @code{baseURL} is legal, but silly, since you'd probably
-wind up with a copy of every single branch in the whole repository.
-Doing a Darcs checkout of @code{baseURL} is just plain wrong, since
-the parent directory of a collection of Darcs repositories is not
-itself a valid repository.
-
-The Darcs step takes the following arguments:
-
-@table @code
-@item repourl
-(required unless @code{baseURL} is provided): the URL at which the
-Darcs source repository is available.
-
-@item baseURL
-(required unless @code{repourl} is provided): the base repository URL,
-to which a branch name will be appended. It should probably end in a
-slash.
-
-@item defaultBranch
-(allowed if and only if @code{baseURL} is provided): this specifies
-the name of the branch to use when a Build does not provide one of its
-own. This will be appended to @code{baseURL} to create the string that
-will be passed to the @code{darcs get} command.
-@end table
-
-@node Mercurial, Arch, Darcs, Source Checkout
-@subsubsection Mercurial
-
-@cindex Mercurial Checkout
-
-The @code{Mercurial} build step performs a
-@uref{http://selenic.com/mercurial, Mercurial} (aka ``hg'') checkout
-or update.
-
-Branches are handled just like @xref{Darcs}.
-
-The Mercurial step takes the following arguments:
-
-@table @code
-@item repourl
-(required unless @code{baseURL} is provided): the URL at which the
-Mercurial source repository is available.
-
-@item baseURL
-(required unless @code{repourl} is provided): the base repository URL,
-to which a branch name will be appended. It should probably end in a
-slash.
-
-@item defaultBranch
-(allowed if and only if @code{baseURL} is provided): this specifies
-the name of the branch to use when a Build does not provide one of its
-own. This will be appended to @code{baseURL} to create the string that
-will be passed to the @code{hg clone} command.
-@end table
-
-
-@node Arch, Bazaar, Mercurial, Source Checkout
-@subsubsection Arch
-
-@cindex Arch Checkout
-
-The @code{Arch} build step performs an @uref{http://gnuarch.org/,
-Arch} checkout or update using the @code{tla} client. It takes the
-following arguments:
-
-@table @code
-@item url
-(required): this specifies the URL at which the Arch source archive is
-available.
-
-@item version
-(required): this specifies which ``development line'' (like a branch)
-should be used. This provides the default branch name, but individual
-builds may specify a different one.
-
-@item archive
-(optional): Each repository knows its own archive name. If this
-parameter is provided, it must match the repository's archive name.
-The parameter is accepted for compatibility with the @code{Bazaar}
-step, below.
-
-@end table
-
-@node Bazaar, P4Sync, Arch, Source Checkout
-@subsubsection Bazaar
-
-@cindex Bazaar Checkout
-
-@code{Bazaar} is an alternate implementation of the Arch VC system,
-which uses a client named @code{baz}. The checkout semantics are just
-different enough from @code{tla} that there is a separate BuildStep for
-it.
-
-It takes exactly the same arguments as @code{Arch}, except that the
-@code{archive=} parameter is required. (baz does not emit the archive
-name when you do @code{baz register-archive}, so we must provide it
-ourselves).
-
-
-@node P4Sync, , Bazaar, Source Checkout
-@subsubsection P4Sync
-
-@cindex Perforce Update
-
-The @code{P4Sync} build step performs a
-@uref{http://www.perforce.com/, Perforce} update. It is a temporary
-facility: a more complete P4 checkout step (named @code{P4}) will
-eventually replace it. This step requires significant manual setup on
-each build slave. It takes the following arguments.
-
-@table @code
-@item p4port
-(required): the host:port string describing how to get to the P4 Depot
-(repository), used as the P4PORT environment variable for all p4
-commands
-@end table
-
-@node ShellCommand, Simple ShellCommand Subclasses, Source Checkout, Build Steps
-@subsection ShellCommand
-
-This is a useful base class for just about everything you might want
-to do during a build (except for the initial source checkout). It runs
-a single command in a child shell on the buildslave. All stdout/stderr
-is recorded into a LogFile. The step finishes with a status of FAILURE
-if the command's exit code is non-zero, otherwise it has a status of
-SUCCESS.
-
-The preferred way to specify the command is with a list of argv strings,
-since this allows for spaces in filenames and avoids doing any fragile
-shell-escaping. You can also specify the command with a single string, in
-which case the string is given to '/bin/sh -c COMMAND' for parsing.
-
-All ShellCommands are run by default in the ``workdir'', which
-defaults to the ``@file{build}'' subdirectory of the slave builder's
-base directory. The absolute path of the workdir will thus be the
-slave's basedir (set as an option to @code{buildbot slave},
-@pxref{Creating a buildslave}) plus the builder's basedir (set in the
-builder's @code{c['builddir']} key in master.cfg) plus the workdir
-itself (a class-level attribute of the BuildFactory, defaults to
-``@file{build}'').
-
-@code{ShellCommand} arguments:
-
-@table @code
-@item command
-a list of strings (preferred) or single string (discouraged) which
-specifies the command to be run
-
-@item env
-a dictionary of environment strings which will be added to the child
-command's environment.
-
-@item want_stdout
-if False, stdout from the child process is discarded rather than being
-sent to the buildmaster for inclusion in the step's LogFile.
-
-@item want_stderr
-like @code{want_stdout} but for stderr. Note that commands run through
-a PTY do not have separate stdout/stderr streams: both are merged into
-stdout.
-
-@item timeout
-if the command fails to produce any output for this many seconds, it
-is assumed to be locked up and will be killed.
-
-@item description
-This will be used to describe the command (on the Waterfall display)
-while the command is still running. It should be a single
-imperfect-tense verb, like ``compiling'' or ``testing''.
-
-@item descriptionDone
-This will be used to describe the command once it has finished. A
-simple noun like ``compile'' or ``tests'' should be used.
-
-If neither @code{description} nor @code{descriptionDone} are set, the
-actual command arguments will be used to construct the description.
-This may be a bit too wide to fit comfortably on the Waterfall
-display.
-
-@end table
-
-@node Simple ShellCommand Subclasses, , ShellCommand, Build Steps
-@subsection Simple ShellCommand Subclasses
-
-Several subclasses of ShellCommand are provided as starting points for
-common build steps. These are all very simple: they just override a few
-parameters so you don't have to specify them yourself, making the master.cfg
-file less verbose.
-
-@menu
-* Configure::
-* Compile::
-* Test::
-* Writing New BuildSteps::
-* Build Properties::
-@end menu
-
-@node Configure, Compile, Simple ShellCommand Subclasses, Simple ShellCommand Subclasses
-@subsubsection Configure
-
-This is intended to handle the @code{./configure} step from
-autoconf-style projects, or the @code{perl Makefile.PL} step from perl
-MakeMaker.pm-style modules. The default command is @code{./configure}
-but you can change this by providing a @code{command=} parameter.
-
-@node Compile, Test, Configure, Simple ShellCommand Subclasses
-@subsubsection Compile
-
-This is meant to handle compiling or building a project written in C. The
-default command is @code{make all}. When the compile is finished, the
-log file is scanned for GCC error/warning messages and a summary log is
-created with any problems that were seen (TODO: the summary is not yet
-created).
-
-@node Test, Writing New BuildSteps, Compile, Simple ShellCommand Subclasses
-@subsubsection Test
-
-This is meant to handle unit tests. The default command is @code{make
-test}, and the @code{warnOnFailure} flag is set.
-
-
-
-
-
-@node Writing New BuildSteps, Build Properties, Test, Simple ShellCommand Subclasses
-@subsubsection Writing New BuildSteps
-
-While it is a good idea to keep your build process self-contained in
-the source code tree, sometimes it is convenient to put more
-intelligence into your Buildbot configuration. One was to do this is
-to write a custom BuildStep. Once written, this Step can be used in
-the @file{master.cfg} file.
-
-The best reason for writing a custom BuildStep is to better parse the
-results of the command being run. For example, a BuildStep that knows
-about JUnit could look at the logfiles to determine which tests had
-been run, how many passed and how many failed, and then report more
-detailed information than a simple @code{rc==0} -based ``good/bad''
-decision.
-
-TODO: add more description of BuildSteps.
-
-
-@node Build Properties, , Writing New BuildSteps, Simple ShellCommand Subclasses
-@subsubsection Build Properties
-
-@cindex build properties
-
-Each build has a set of ``Build Properties'', which can be used by its
-BuildStep to modify their actions. For example, the SVN revision
-number of the source code being built is available as a build
-property, and a ShellCommand step could incorporate this number into a
-command which create a numbered release tarball.
-
-Some build properties are set when the build starts, such as the
-SourceStamp information. Other properties can be set by BuildSteps as
-they run, for example the various Source steps will set the
-@code{got_revision} property to the source revision that was actually
-checked out (which can be useful when the SourceStamp in use merely
-requested the ``latest revision'': @code{got_revision} will tell you
-what was actually built).
-
-In custom BuildSteps, you can get and set the build properties with
-the @code{getProperty}/@code{setProperty} methods. Each takes a string
-for the name of the property, and returns or accepts an
-arbitrary@footnote{Build properties are serialized along with the
-build results, so they must be serializable. For this reason, the
-value of any build property should be simple inert data: strings,
-numbers, lists, tuples, and dictionaries. They should not contain
-class instances.} object. For example:
-
-@example
-class MakeTarball(step.ShellCommand):
- def start(self):
- self.setCommand(["tar", "czf",
- "build-%s.tar.gz" % self.getProperty("revision"),
- "source"])
- step.ShellCommand.start(self)
-@end example
-
-@cindex WithProperties
-
-You can use build properties in ShellCommands by using the
-@code{WithProperties} wrapper when setting the arguments of the
-ShellCommand. This interpolates the named build properties into the
-generated shell command.
-
-@example
-from buildbot.process.step import ShellCommand, WithProperties
-
-s(ShellCommand,
- command=["tar", "czf",
- WithProperties("build-%s.tar.gz", "revision"),
- "source"],
- )
-@end example
-
-If this BuildStep were used in a tree obtained from Subversion, it
-would create a tarball with a name like @file{build-1234.tar.gz}.
-
-The @code{WithProperties} function does @code{printf}-style string
-interpolation, using strings obtained by calling
-@code{build.getProperty(propname)}. Note that for every @code{%s} (or
-@code{%d}, etc), you must have exactly one additional argument to
-indicate which build property you want to insert.
-
-
-You can also use python dictionary-style string interpolation by using
-the @code{%(propname)s} syntax. In this form, the property name goes
-in the parentheses, and WithProperties takes @emph{no} additional
-arguments:
-
-@example
-s(ShellCommand,
- command=["tar", "czf",
- WithProperties("build-%(revision)s.tar.gz"),
- "source"],
- )
-@end example
-
-Don't forget the extra ``s'' after the closing parenthesis! This is
-the cause of many confusing errors.
-
-Note that, like python, you can either do positional-argument
-interpolation @emph{or} keyword-argument interpolation, not both. Thus
-you cannot use a string like
-@code{WithProperties("foo-%(revision)s-%s", "branch")}.
-
-At the moment, the only way to set build properties is by writing a
-custom BuildStep.
-
-@heading Common Build Properties
-
-The following build properties are set when the build is started, and
-are available to all steps.
-
-@table @code
-@item branch
-
-This comes from the build's SourceStamp, and describes which branch is
-being checked out. This will be @code{None} (which interpolates into
-@code{WithProperties} as an empty string) if the build is on the
-default branch, which is generally the trunk. Otherwise it will be a
-string like ``branches/beta1.4''. The exact syntax depends upon the VC
-system being used.
-
-@item revision
-
-This also comes from the SourceStamp, and is the revision of the
-source code tree that was requested from the VC system. When a build
-is requested of a specific revision (as is generally the case when the
-build is triggered by Changes), this will contain the revision
-specification. The syntax depends upon the VC system in use: for SVN
-it is an integer, for Mercurial it is a short string, for Darcs it is
-a rather large string, etc.
-
-If the ``force build'' button was pressed, the revision will be
-@code{None}, which means to use the most recent revision available.
-This is a ``trunk build''. This will be interpolated as an empty
-string.
-
-@item got_revision
-
-This is set when a Source step checks out the source tree, and
-provides the revision that was actually obtained from the VC system.
-In general this should be the same as @code{revision}, except for
-trunk builds, where @code{got_revision} indicates what revision was
-current when the checkout was performed. This can be used to rebuild
-the same source code later.
-
-Note that for some VC systems (Darcs in particular), the revision is a
-large string containing newlines, and is not suitable for
-interpolation into a filename.
-
-@item buildername
-
-This is a string that indicates which Builder the build was a part of.
-The combination of buildername and buildnumber uniquely identify a
-build.
-
-@item buildnumber
-
-Each build gets a number, scoped to the Builder (so the first build
-performed on any given Builder will have a build number of 0). This
-integer property contains the build's number.
-
-@item slavename
-
-This is a string which identifies which buildslave the build is
-running on.
-
-@end table
-
-
-@node Interlocks, Build Factories, Build Steps, Build Process
-@section Interlocks
-
-@cindex locks
-
-For various reasons, you may want to prevent certain Steps (or perhaps
-entire Builds) from running simultaneously. Limited CPU speed or
-network bandwidth to the VC server, problems with simultaneous access
-to a database server used by unit tests, or multiple Builds which
-access shared state may all require some kind of interlock to prevent
-corruption, confusion, or resource overload.
-
-@code{Locks} are the mechanism used to express these kinds of
-constraints on when Builds or Steps can be run. There are two kinds of
-@code{Locks}, each with their own scope: @code{SlaveLock}s are scoped
-to a single buildslave, while @code{MasterLock} instances are scoped
-to the buildbot as a whole. Each @code{Lock} is created with a unique
-name.
-
-To use a lock, simply include it in the @code{locks=} argument of the
-@code{BuildStep} object that should obtain the lock before it runs.
-This argument accepts a list of @code{Lock} objects: the Step will
-acquire all of them before it runs.
-
-To claim a lock for the whole Build, add a @code{'locks'} key to the
-builder specification dictionary with the same list of @code{Lock}
-objects. (This is the dictionary that has the @code{'name'},
-@code{'slavename'}, @code{'builddir'}, and @code{'factory'} keys). The
-@code{Build} object also accepts a @code{locks=} argument, but unless
-you are writing your own @code{BuildFactory} subclass then it will be
-easier to set the locks in the builder dictionary.
-
-Note that there are no partial-acquire or partial-release semantics:
-this prevents deadlocks caused by two Steps each waiting for a lock
-held by the other@footnote{Also note that a clever buildmaster admin
-could still create the opportunity for deadlock: Build A obtains Lock
-1, inside which Step A.two tries to acquire Lock 2 at the Step level.
-Meanwhile Build B obtains Lock 2, and has a Step B.two which wants to
-acquire Lock 1 at the Step level. Don't Do That.}. This also means
-that waiting to acquire a @code{Lock} can take an arbitrarily long
-time: if the buildmaster is very busy, a Step or Build which requires
-only one @code{Lock} may starve another that is waiting for that
-@code{Lock} plus some others.
-
-
-In the following example, we run the same build on three different
-platforms. The unit-test steps of these builds all use a common
-database server, and would interfere with each other if allowed to run
-simultaneously. The @code{Lock} prevents more than one of these builds
-from happening at the same time.
-
-@example
-from buildbot import locks
-from buildbot.process import s, step, factory
-
-db_lock = locks.MasterLock("database")
-steps = [s(step.SVN, svnurl="http://example.org/svn/Trunk"),
- s(step.ShellCommand, command="make all"),
- s(step.ShellCommand, command="make test", locks=[db_lock]),
- ]
-f = factory.BuildFactory(steps)
-b1 = @{'name': 'full1', 'slavename': 'bot-1', builddir='f1', 'factory': f@}
-b2 = @{'name': 'full2', 'slavename': 'bot-2', builddir='f2', 'factory': f@}
-b3 = @{'name': 'full3', 'slavename': 'bot-3', builddir='f3', 'factory': f@}
-c['builders'] = [b1, b2, b3]
-@end example
-
-In the next example, we have one buildslave hosting three separate
-Builders (each running tests against a different version of Python).
-The machine which hosts this buildslave is not particularly fast, so
-we want to prevent the builds from all happening at the same time. We
-use a @code{SlaveLock} because the builds happening on the slow slave
-do not affect builds running on other slaves, and we use the lock on
-the build as a whole because the slave is so slow that even multiple
-SVN checkouts would be taxing.
-
-@example
-from buildbot import locks
-from buildbot.process import s, step, factory
-
-slow_lock = locks.SlaveLock("cpu")
-source = s(step.SVN, svnurl="http://example.org/svn/Trunk")
-f22 = factory.Trial(source, trialpython=["python2.2"])
-f23 = factory.Trial(source, trialpython=["python2.3"])
-f24 = factory.Trial(source, trialpython=["python2.4"])
-b1 = @{'name': 'p22', 'slavename': 'bot-1', builddir='p22', 'factory': f22,
- 'locks': [slow_lock] @}
-b2 = @{'name': 'p23', 'slavename': 'bot-1', builddir='p23', 'factory': f23,
- 'locks': [slow_lock] @}
-b3 = @{'name': 'p24', 'slavename': 'bot-1', builddir='p24', 'factory': f24,
- 'locks': [slow_lock] @}
-c['builders'] = [b1, b2, b3]
-@end example
-
-In the last example, we use two Locks at the same time. In this case,
-we're concerned about both of the previous constraints, but we'll say
-that only the tests are computationally intensive, and that they have
-been split into those which use the database and those which do not.
-In addition, two of the Builds run on a fast machine which does not
-need to worry about the cpu lock, but which still must be prevented
-from simultaneous database access.
-
-@example
-from buildbot import locks
-from buildbot.process import s, step, factory
-
-db_lock = locks.MasterLock("database")
-cpu_lock = locks.SlaveLock("cpu")
-slow_steps = [s(step.SVN, svnurl="http://example.org/svn/Trunk"),
- s(step.ShellCommand, command="make all", locks=[cpu_lock]),
- s(step.ShellCommand, command="make test", locks=[cpu_lock]),
- s(step.ShellCommand, command="make db-test",
- locks=[db_lock, cpu_lock]),
- ]
-slow_f = factory.BuildFactory(slow_steps)
-fast_steps = [s(step.SVN, svnurl="http://example.org/svn/Trunk"),
- s(step.ShellCommand, command="make all", locks=[]),
- s(step.ShellCommand, command="make test", locks=[]),
- s(step.ShellCommand, command="make db-test",
- locks=[db_lock]),
- ]
-fast_factory = factory.BuildFactory(fast_steps)
-b1 = @{'name': 'full1', 'slavename': 'bot-slow', builddir='full1',
- 'factory': slow_factory@}
-b2 = @{'name': 'full2', 'slavename': 'bot-slow', builddir='full2',
- 'factory': slow_factory@}
-b3 = @{'name': 'full3', 'slavename': 'bot-fast', builddir='full3',
- 'factory': fast_factory@}
-b4 = @{'name': 'full4', 'slavename': 'bot-fast', builddir='full4',
- 'factory': fast_factory@}
-c['builders'] = [b1, b2, b3, b4]
-@end example
-
-As a final note, remember that a unit test system which breaks when
-multiple people run it at the same time is fragile and should be
-fixed. Asking your human developers to serialize themselves when
-running unit tests will just discourage them from running the unit
-tests at all. Find a way to fix this: change the database tests to
-create a new (uniquely-named) user or table for each test run, don't
-use fixed listening TCP ports for network tests (instead listen on
-port 0 to let the kernel choose a port for you and then query the
-socket to find out what port was allocated). @code{MasterLock}s can be
-used to accomodate broken test systems like this, but are really
-intended for other purposes: build processes that store or retrieve
-products in shared directories, or which do things that human
-developers would not (or which might slow down or break in ways that
-require human attention to deal with).
-
-@code{SlaveLocks}s can be used to keep automated performance tests
-from interfering with each other, when there are multiple Builders all
-using the same buildslave. But they can't prevent other users from
-running CPU-intensive jobs on that host while the tests are running.
-
-@node Build Factories, , Interlocks, Build Process
-@section Build Factories
-
-
-Each Builder is equipped with a ``build factory'', which is
-responsible for producing the actual @code{Build} objects that perform
-each build. This factory is created in the configuration file, and
-attached to a Builder through the @code{factory} element of its
-dictionary.
-
-The standard @code{BuildFactory} object creates @code{Build} objects
-by default. These Builds will each execute a collection of BuildSteps
-in a fixed sequence. Each step can affect the results of the build,
-but in general there is little intelligence to tie the different steps
-together. You can create subclasses of @code{Build} to implement more
-sophisticated build processes, and then use a subclass of
-@code{BuildFactory} (or simply set the @code{buildClass} attribute) to
-create instances of your new Build subclass.
-
-
-@menu
-* BuildStep Objects::
-* BuildFactory::
-* Process-Specific build factories::
-@end menu
-
-@node BuildStep Objects, BuildFactory, Build Factories, Build Factories
-@subsection BuildStep Objects
-
-The steps used by these builds are all subclasses of @code{BuildStep}.
-The standard ones provided with Buildbot are documented later,
-@xref{Build Steps}. You can also write your own subclasses to use in
-builds.
-
-The basic behavior for a @code{BuildStep} is to:
-
-@itemize @bullet
-@item
-run for a while, then stop
-@item
-possibly invoke some RemoteCommands on the attached build slave
-@item
-possibly produce a set of log files
-@item
-finish with a status described by one of four values defined in
-buildbot.status.builder: SUCCESS, WARNINGS, FAILURE, SKIPPED
-@item
-provide a list of short strings to describe the step
-@item
-define a color (generally green, orange, or red) with which the
-step should be displayed
-@end itemize
-
-
-More sophisticated steps may produce additional information and
-provide it to later build steps, or store it in the factory to provide
-to later builds.
-
-
-@node BuildFactory, Process-Specific build factories, BuildStep Objects, Build Factories
-@subsection BuildFactory
-
-The default @code{BuildFactory}, provided in the
-@code{buildbot.process.factory} module, is constructed with a list of
-``BuildStep specifications'': a list of @code{(step_class, kwargs)}
-tuples for each. When asked to create a Build, it loads the list of
-steps into the new Build object. When the Build is actually started,
-these step specifications are used to create the actual set of
-BuildSteps, which are then executed one at a time. For example, a
-build which consists of a CVS checkout followed by a @code{make build}
-would be constructed as follows:
-
-@example
-from buildbot.process import step, factory
-from buildbot.factory import s
-# s is a convenience function, defined with:
-# def s(steptype, **kwargs): return (steptype, kwargs)
-
-f = factory.BuildFactory([s(step.CVS,
- cvsroot=CVSROOT, cvsmodule="project",
- mode="update"),
- s(step.Compile, command=["make", "build"])])
-@end example
-
-Each step can affect the build process in the following ways:
-
-@itemize @bullet
-@item
-If the step's @code{haltOnFailure} attribute is True, then a failure
-in the step (i.e. if it completes with a result of FAILURE) will cause
-the whole build to be terminated immediately: no further steps will be
-executed. This is useful for setup steps upon which the rest of the
-build depends: if the CVS checkout or @code{./configure} process
-fails, there is no point in trying to compile or test the resulting
-tree.
-
-@item
-If the @code{flunkOnFailure} or @code{flunkOnWarnings} flag is set,
-then a result of FAILURE or WARNINGS will mark the build as a whole as
-FAILED. However, the remaining steps will still be executed. This is
-appropriate for things like multiple testing steps: a failure in any
-one of them will indicate that the build has failed, however it is
-still useful to run them all to completion.
-
-@item
-Similarly, if the @code{warnOnFailure} or @code{warnOnWarnings} flag
-is set, then a result of FAILURE or WARNINGS will mark the build as
-having WARNINGS, and the remaining steps will still be executed. This
-may be appropriate for certain kinds of optional build or test steps.
-For example, a failure experienced while building documentation files
-should be made visible with a WARNINGS result but not be serious
-enough to warrant marking the whole build with a FAILURE.
-
-@end itemize
-
-In addition, each Step produces its own results, may create logfiles,
-etc. However only the flags described above have any effect on the
-build as a whole.
-
-The pre-defined BuildSteps like @code{CVS} and @code{Compile} have
-reasonably appropriate flags set on them already. For example, without
-a source tree there is no point in continuing the build, so the
-@code{CVS} class has the @code{haltOnFailure} flag set to True. Look
-in @file{buildbot/process/step.py} to see how the other Steps are
-marked.
-
-Each Step is created with an additional @code{workdir} argument that
-indicates where its actions should take place. This is specified as a
-subdirectory of the slave builder's base directory, with a default
-value of @code{build}. This is only implemented as a step argument (as
-opposed to simply being a part of the base directory) because the
-CVS/SVN steps need to perform their checkouts from the parent
-directory.
-
-@menu
-* BuildFactory Attributes::
-* Quick builds::
-@end menu
-
-@node BuildFactory Attributes, Quick builds, BuildFactory, BuildFactory
-@subsubsection BuildFactory Attributes
-
-Some attributes from the BuildFactory are copied into each Build.
-
-@cindex treeStableTimer
-
-@table @code
-@item useProgress
-(defaults to True): if True, the buildmaster keeps track of how long
-each step takes, so it can provide estimates of how long future builds
-will take. If builds are not expected to take a consistent amount of
-time (such as incremental builds in which a random set of files are
-recompiled or tested each time), this should be set to False to
-inhibit progress-tracking.
-
-@end table
-
-
-@node Quick builds, , BuildFactory Attributes, BuildFactory
-@subsubsection Quick builds
-
-The difference between a ``full build'' and a ``quick build'' is that
-quick builds are generally done incrementally, starting with the tree
-where the previous build was performed. That simply means that the
-source-checkout step should be given a @code{mode='update'} flag, to
-do the source update in-place.
-
-In addition to that, the @code{useProgress} flag should be set to
-False. Incremental builds will (or at least the ought to) compile as
-few files as necessary, so they will take an unpredictable amount of
-time to run. Therefore it would be misleading to claim to predict how
-long the build will take.
-
-
-@node Process-Specific build factories, , BuildFactory, Build Factories
-@subsection Process-Specific build factories
-
-Many projects use one of a few popular build frameworks to simplify
-the creation and maintenance of Makefiles or other compilation
-structures. Buildbot provides several pre-configured BuildFactory
-subclasses which let you build these projects with a minimum of fuss.
-
-@menu
-* GNUAutoconf::
-* CPAN::
-* Python distutils::
-* Python/Twisted/trial projects::
-@end menu
-
-@node GNUAutoconf, CPAN, Process-Specific build factories, Process-Specific build factories
-@subsubsection GNUAutoconf
-
-@uref{http://www.gnu.org/software/autoconf/, GNU Autoconf} is a
-software portability tool, intended to make it possible to write
-programs in C (and other languages) which will run on a variety of
-UNIX-like systems. Most GNU software is built using autoconf. It is
-frequently used in combination with GNU automake. These tools both
-encourage a build process which usually looks like this:
-
-@example
-% CONFIG_ENV=foo ./configure --with-flags
-% make all
-% make check
-# make install
-@end example
-
-(except of course the Buildbot always skips the @code{make install}
-part).
-
-The Buildbot's @code{buildbot.process.factory.GNUAutoconf} factory is
-designed to build projects which use GNU autoconf and/or automake. The
-configuration environment variables, the configure flags, and command
-lines used for the compile and test are all configurable, in general
-the default values will be suitable.
-
-Example:
-
-@example
-# use the s() convenience function defined earlier
-f = factory.GNUAutoconf(source=s(step.SVN, svnurl=URL, mode="copy"),
- flags=["--disable-nls"])
-@end example
-
-Required Arguments:
-
-@table @code
-@item source
-This argument must be a step specification tuple that provides a
-BuildStep to generate the source tree.
-@end table
-
-Optional Arguments:
-
-@table @code
-@item configure
-The command used to configure the tree. Defaults to
-@code{./configure}. Accepts either a string or a list of shell argv
-elements.
-
-@item configureEnv
-The environment used for the initial configuration step. This accepts
-a dictionary which will be merged into the buildslave's normal
-environment. This is commonly used to provide things like
-@code{CFLAGS="-O2 -g"} (to turn off debug symbols during the compile).
-Defaults to an empty dictionary.
-
-@item configureFlags
-A list of flags to be appended to the argument list of the configure
-command. This is commonly used to enable or disable specific features
-of the autoconf-controlled package, like @code{["--without-x"]} to
-disable windowing support. Defaults to an empty list.
-
-@item compile
-this is a shell command or list of argv values which is used to
-actually compile the tree. It defaults to @code{make all}. If set to
-None, the compile step is skipped.
-
-@item test
-this is a shell command or list of argv values which is used to run
-the tree's self-tests. It defaults to @code{make check}. If set to
-None, the test step is skipped.
-
-@end table
-
-
-@node CPAN, Python distutils, GNUAutoconf, Process-Specific build factories
-@subsubsection CPAN
-
-Most Perl modules available from the @uref{http://www.cpan.org/, CPAN}
-archive use the @code{MakeMaker} module to provide configuration,
-build, and test services. The standard build routine for these modules
-looks like:
-
-@example
-% perl Makefile.PL
-% make
-% make test
-# make install
-@end example
-
-(except again Buildbot skips the install step)
-
-Buildbot provides a @code{CPAN} factory to compile and test these
-projects.
-
-
-Arguments:
-@table @code
-@item source
-(required): A step specification tuple, that that used by GNUAutoconf.
-
-@item perl
-A string which specifies the @code{perl} executable to use. Defaults
-to just @code{perl}.
-
-@end table
-
-
-@node Python distutils, Python/Twisted/trial projects, CPAN, Process-Specific build factories
-@subsubsection Python distutils
-
-Most Python modules use the @code{distutils} package to provide
-configuration and build services. The standard build process looks
-like:
-
-@example
-% python ./setup.py build
-% python ./setup.py install
-@end example
-
-Unfortunately, although Python provides a standard unit-test framework
-named @code{unittest}, to the best of my knowledge @code{distutils}
-does not provide a standardized target to run such unit tests. (please
-let me know if I'm wrong, and I will update this factory).
-
-The @code{Distutils} factory provides support for running the build
-part of this process. It accepts the same @code{source=} parameter as
-the other build factories.
-
-
-Arguments:
-@table @code
-@item source
-(required): A step specification tuple, that that used by GNUAutoconf.
-
-@item python
-A string which specifies the @code{python} executable to use. Defaults
-to just @code{python}.
-
-@item test
-Provides a shell command which runs unit tests. This accepts either a
-string or a list. The default value is None, which disables the test
-step (since there is no common default command to run unit tests in
-distutils modules).
-
-@end table
-
-
-@node Python/Twisted/trial projects, , Python distutils, Process-Specific build factories
-@subsubsection Python/Twisted/trial projects
-
-Twisted provides a unit test tool named @code{trial} which provides a
-few improvements over Python's built-in @code{unittest} module. Many
-python projects which use Twisted for their networking or application
-services also use trial for their unit tests. These modules are
-usually built and tested with something like the following:
-
-@example
-% python ./setup.py build
-% PYTHONPATH=build/lib.linux-i686-2.3 trial -v PROJECTNAME.test
-% python ./setup.py install
-@end example
-
-Unfortunately, the @file{build/lib} directory into which the
-built/copied .py files are placed is actually architecture-dependent,
-and I do not yet know of a simple way to calculate its value. For many
-projects it is sufficient to import their libraries ``in place'' from
-the tree's base directory (@code{PYTHONPATH=.}).
-
-In addition, the @var{PROJECTNAME} value where the test files are
-located is project-dependent: it is usually just the project's
-top-level library directory, as common practice suggests the unit test
-files are put in the @code{test} sub-module. This value cannot be
-guessed, the @code{Trial} class must be told where to find the test
-files.
-
-The @code{Trial} class provides support for building and testing
-projects which use distutils and trial. If the test module name is
-specified, trial will be invoked. The library path used for testing
-can also be set.
-
-One advantage of trial is that the Buildbot happens to know how to
-parse trial output, letting it identify which tests passed and which
-ones failed. The Buildbot can then provide fine-grained reports about
-how many tests have failed, when individual tests fail when they had
-been passing previously, etc.
-
-Another feature of trial is that you can give it a series of source
-.py files, and it will search them for special @code{test-case-name}
-tags that indicate which test cases provide coverage for that file.
-Trial can then run just the appropriate tests. This is useful for
-quick builds, where you want to only run the test cases that cover the
-changed functionality.
-
-Arguments:
-@table @code
-@item source
-(required): A step specification tuple, like that used by GNUAutoconf.
-
-@item buildpython
-A list (argv array) of strings which specifies the @code{python}
-executable to use when building the package. Defaults to just
-@code{['python']}. It may be useful to add flags here, to supress
-warnings during compilation of extension modules. This list is
-extended with @code{['./setup.py', 'build']} and then executed in a
-ShellCommand.
-
-@item testpath
-Provides a directory to add to @code{PYTHONPATH} when running the unit
-tests, if tests are being run. Defaults to @code{.} to include the
-project files in-place. The generated build library is frequently
-architecture-dependent, but may simply be @file{build/lib} for
-pure-python modules.
-
-@item trialpython
-Another list of strings used to build the command that actually runs
-trial. This is prepended to the contents of the @code{trial} argument
-below. It may be useful to add @code{-W} flags here to supress
-warnings that occur while tests are being run. Defaults to an empty
-list, meaning @code{trial} will be run without an explicit
-interpreter, which is generally what you want if you're using
-@file{/usr/bin/trial} instead of, say, the @file{./bin/trial} that
-lives in the Twisted source tree.
-
-@item trial
-provides the name of the @code{trial} command. It is occasionally
-useful to use an alternate executable, such as @code{trial2.2} which
-might run the tests under an older version of Python. Defaults to
-@code{trial}.
-
-@item tests
-Provides a module name or names which contain the unit tests for this
-project. Accepts a string, typically @code{PROJECTNAME.test}, or a
-list of strings. Defaults to None, indicating that no tests should be
-run. You must either set this or @code{useTestCaseNames} to do anyting
-useful with the Trial factory.
-
-@item useTestCaseNames
-Tells the Step to provide the names of all changed .py files to trial,
-so it can look for test-case-name tags and run just the matching test
-cases. Suitable for use in quick builds. Defaults to False.
-
-@item randomly
-If @code{True}, tells Trial (with the @code{--random=0} argument) to
-run the test cases in random order, which sometimes catches subtle
-inter-test dependency bugs. Defaults to @code{False}.
-
-@item recurse
-If @code{True}, tells Trial (with the @code{--recurse} argument) to
-look in all subdirectories for additional test cases. It isn't clear
-to me how this works, but it may be useful to deal with the
-unknown-PROJECTNAME problem described above, and is currently used in
-the Twisted buildbot to accomodate the fact that test cases are now
-distributed through multiple twisted.SUBPROJECT.test directories.
-
-@end table
-
-Unless one of @code{trialModule} or @code{useTestCaseNames}
-are set, no tests will be run.
-
-Some quick examples follow. Most of these examples assume that the
-target python code (the ``code under test'') can be reached directly
-from the root of the target tree, rather than being in a @file{lib/}
-subdirectory.
-
-@example
-# Trial(source, tests="toplevel.test") does:
-# python ./setup.py build
-# PYTHONPATH=. trial -to toplevel.test
-
-# Trial(source, tests=["toplevel.test", "other.test"]) does:
-# python ./setup.py build
-# PYTHONPATH=. trial -to toplevel.test other.test
-
-# Trial(source, useTestCaseNames=True) does:
-# python ./setup.py build
-# PYTHONPATH=. trial -to --testmodule=foo/bar.py.. (from Changes)
-
-# Trial(source, buildpython=["python2.3", "-Wall"], tests="foo.tests"):
-# python2.3 -Wall ./setup.py build
-# PYTHONPATH=. trial -to foo.tests
-
-# Trial(source, trialpython="python2.3", trial="/usr/bin/trial",
-# tests="foo.tests") does:
-# python2.3 -Wall ./setup.py build
-# PYTHONPATH=. python2.3 /usr/bin/trial -to foo.tests
-
-# For running trial out of the tree being tested (only useful when the
-# tree being built is Twisted itself):
-# Trial(source, trialpython=["python2.3", "-Wall"], trial="./bin/trial",
-# tests="foo.tests") does:
-# python2.3 -Wall ./setup.py build
-# PYTHONPATH=. python2.3 -Wall ./bin/trial -to foo.tests
-@end example
-
-If the output directory of @code{./setup.py build} is known, you can
-pull the python code from the built location instead of the source
-directories. This should be able to handle variations in where the
-source comes from, as well as accomodating binary extension modules:
-
-@example
-# Trial(source,tests="toplevel.test",testpath='build/lib.linux-i686-2.3')
-# does:
-# python ./setup.py build
-# PYTHONPATH=build/lib.linux-i686-2.3 trial -to toplevel.test
-@end example
-
-
-@node Status Delivery, Command-line tool, Build Process, Top
-@chapter Status Delivery
-
-More details are available in the docstrings for each class, use
-@code{pydoc buildbot.status.html.Waterfall} to see them. Most status
-delivery objects take a @code{categories=} argument, which can contain
-a list of ``category'' names: in this case, it will only show status
-for Builders that are in one of the named categories.
-
-(implementor's note: each of these objects should be a
-service.MultiService which will be attached to the BuildMaster object
-when the configuration is processed. They should use
-@code{self.parent.getStatus()} to get access to the top-level IStatus
-object, either inside @code{startService} or later. They may call
-@code{status.subscribe()} in @code{startService} to receive
-notifications of builder events, in which case they must define
-@code{builderAdded} and related methods. See the docstrings in
-@file{buildbot/interfaces.py} for full details.)
-
-@menu
-* HTML Waterfall::
-* IRC Bot::
-* PBListener::
-@end menu
-
-@node HTML Waterfall, IRC Bot, Status Delivery, Status Delivery
-@subsection HTML Waterfall
-
-@cindex Waterfall
-
-@example
-from buildbot.status import html
-w = html.Waterfall(http_port=8080)
-c['status'].append(w)
-@end example
-
-The @code{buildbot.status.html.Waterfall} status target creates an
-HTML ``waterfall display'', which shows a time-based chart of events.
-This display provides detailed information about all steps of all
-recent builds, and provides hyperlinks to look at individual build
-logs and source changes. If the @code{http_port} argument is provided,
-it provides a strports specification for the port that the web server
-should listen on. This can be a simple port number, or a string like
-@code{tcp:8080:interface=127.0.0.1} (to limit connections to the
-loopback interface, and therefore to clients running on the same
-host)@footnote{It may even be possible to provide SSL access by using
-a specification like
-@code{"ssl:12345:privateKey=mykey.pen:certKey=cert.pem"}, but this is
-completely untested}.
-
-If instead (or in addition) you provide the @code{distrib_port}
-argument, a twisted.web distributed server will be started either on a
-TCP port (if @code{distrib_port} is like @code{"tcp:12345"}) or more
-likely on a UNIX socket (if @code{distrib_port} is like
-@code{"unix:/path/to/socket"}).
-
-The @code{distrib_port} option means that, on a host with a
-suitably-configured twisted-web server, you do not need to consume a
-separate TCP port for the buildmaster's status web page. When the web
-server is constructed with @code{mktap web --user}, URLs that point to
-@code{http://host/~username/} are dispatched to a sub-server that is
-listening on a UNIX socket at @code{~username/.twisted-web-pb}. On
-such a system, it is convenient to create a dedicated @code{buildbot}
-user, then set @code{distrib_port} to
-@code{"unix:"+os.path.expanduser("~/.twistd-web-pb")}. This
-configuration will make the HTML status page available at
-@code{http://host/~buildbot/} . Suitable URL remapping can make it
-appear at @code{http://host/buildbot/}, and the right virtual host
-setup can even place it at @code{http://buildbot.host/} .
-
-Other arguments:
-
-@table @code
-@item allowForce
-If set to True (the default), then the web page will provide a ``Force
-Build'' button that allows visitors to manually trigger builds. This
-is useful for developers to re-run builds that have failed because of
-intermittent problems in the test suite, or because of libraries that
-were not installed at the time of the previous build. You may not wish
-to allow strangers to cause a build to run: in that case, set this to
-False to remove these buttons.
-
-@item favicon
-If set to a string, this will be interpreted as a filename containing
-a ``favicon'': a small image that contains an icon for the web site.
-This is returned to browsers that request the @code{favicon.ico} file,
-and should point to a .png or .ico image file. The default value uses
-the buildbot/buildbot.png image (a small hex nut) contained in the
-buildbot distribution. You can set this to None to avoid using a
-favicon at all.
-
-@item robots_txt
-If set to a string, this will be interpreted as a filename containing
-the contents of ``robots.txt''. Many search engine spiders request
-this file before indexing the site. Setting it to a file which
-contains:
-@example
-User-agent: *
-Disallow: /
-@end example
-will prevent most search engines from trawling the (voluminous)
-generated status pages.
-
-@end table
-
-
-@node IRC Bot, PBListener, HTML Waterfall, Status Delivery
-@subsection IRC Bot
-
-@cindex IRC
-
-The @code{buildbot.status.words.IRC} status target creates an IRC bot
-which will attach to certain channels and be available for status
-queries. It can also be asked to announce builds as they occur, or be
-told to shut up.
-
-@example
-from twisted.status import words
-irc = words.IRC("irc.example.org", "botnickname",
- channels=["channel1", "channel2"],
- password="mysecretpassword")
-c['status'].append(irc)
-@end example
-
-Take a look at the docstring for @code{words.IRC} for more details on
-configuring this service. The @code{password} argument, if provided,
-will be sent to Nickserv to claim the nickname: some IRC servers will
-not allow clients to send private messages until they have logged in
-with a password.
-
-To use the service, you address messages at the buildbot, either
-normally (@code{botnickname: status}) or with private messages
-(@code{/msg botnickname status}). The buildbot will respond in kind.
-
-Some of the commands currently available:
-
-@table @code
-
-@item list builders
-Emit a list of all configured builders
-@item status BUILDER
-Announce the status of a specific Builder: what it is doing right now.
-@item status all
-Announce the status of all Builders
-@item watch BUILDER
-If the given Builder is currently running, wait until the Build is
-finished and then announce the results.
-@item last BUILDER
-Return the results of the last build to run on the given Builder.
-
-@item help COMMAND
-Describe a command. Use @code{help commands} to get a list of known
-commands.
-@item source
-Announce the URL of the Buildbot's home page.
-@item version
-Announce the version of this Buildbot.
-@end table
-
-If the @code{allowForce=True} option was used, some addtional commands
-will be available:
-
-@table @code
-@item force build BUILDER REASON
-Tell the given Builder to start a build of the latest code. The user
-requesting the build and REASON are recorded in the Build status. The
-buildbot will announce the build's status when it finishes.
-
-@item stop build BUILDER REASON
-Terminate any running build in the given Builder. REASON will be added
-to the build status to explain why it was stopped. You might use this
-if you committed a bug, corrected it right away, and don't want to
-wait for the first build (which is destined to fail) to complete
-before starting the second (hopefully fixed) build.
-@end table
-
-@node PBListener, , IRC Bot, Status Delivery
-@subsection PBListener
-
-@cindex PBListener
-
-@example
-import buildbot.status.client
-pbl = buildbot.status.client.PBListener(port=int, user=str,
- passwd=str)
-c['status'].append(pbl)
-@end example
-
-This sets up a PB listener on the given TCP port, to which a PB-based
-status client can connect and retrieve status information.
-@code{buildbot statusgui} (@pxref{statusgui}) is an example of such a
-status client. The @code{port} argument can also be a strports
-specification string.
-
-@node Command-line tool, Resources, Status Delivery, Top
-@chapter Command-line tool
-
-The @command{buildbot} command-line tool can be used to start or stop a
-buildmaster or buildbot, and to interact with a running buildmaster.
-Some of its subcommands are intended for buildmaster admins, while
-some are for developers who are editing the code that the buildbot is
-monitoring.
-
-@menu
-* Administrator Tools::
-* Developer Tools::
-* Other Tools::
-* .buildbot config directory::
-@end menu
-
-@node Administrator Tools, Developer Tools, Command-line tool, Command-line tool
-@section Administrator Tools
-
-The following @command{buildbot} sub-commands are intended for
-buildmaster administrators:
-
-@heading master
-
-This creates a new directory and populates it with files that allow it
-to be used as a buildmaster's base directory.
-
-@example
-buildbot master BASEDIR
-@end example
-
-@heading slave
-
-This creates a new directory and populates it with files that let it
-be used as a buildslave's base directory. You must provide several
-arguments, which are used to create the initial @file{buildbot.tac}
-file.
-
-@example
-buildbot slave @var{BASEDIR} @var{MASTERHOST}:@var{PORT} @var{SLAVENAME} @var{PASSWORD}
-@end example
-
-@heading start
-
-This starts a buildmaster or buildslave which was already created in
-the given base directory. The daemon is launched in the background,
-with events logged to a file named @file{twistd.log}.
-
-@example
-buildbot start BASEDIR
-@end example
-
-@heading stop
-
-This terminates the daemon (either buildmaster or buildslave) running
-in the given directory.
-
-@example
-buildbot stop BASEDIR
-@end example
-
-@heading sighup
-
-This sends a SIGHUP to the buildmaster running in the given directory,
-which causes it to re-read its @file{master.cfg} file.
-
-@example
-buildbot sighup BASEDIR
-@end example
-
-@node Developer Tools, Other Tools, Administrator Tools, Command-line tool
-@section Developer Tools
-
-These tools are provided for use by the developers who are working on
-the code that the buildbot is monitoring.
-
-@menu
-* statuslog::
-* statusgui::
-* try::
-@end menu
-
-@node statuslog, statusgui, Developer Tools, Developer Tools
-@subsection statuslog
-
-@example
-buildbot statuslog --master @var{MASTERHOST}:@var{PORT}
-@end example
-
-This command starts a simple text-based status client, one which just
-prints out a new line each time an event occurs on the buildmaster.
-
-The @option{--master} option provides the location of the
-@code{client.PBListener} status port, used to deliver build
-information to realtime status clients. The option is always in the
-form of a string, with hostname and port number separated by a colon
-(@code{HOSTNAME:PORTNUM}). Note that this port is @emph{not} the same
-as the slaveport (although a future version may allow the same port
-number to be used for both purposes).
-
-The @option{--master} option can also be provided by the
-@code{masterstatus} name in @file{.buildbot/options} (@pxref{.buildbot
-config directory}).
-
-@node statusgui, try, statuslog, Developer Tools
-@subsection statusgui
-
-@cindex statusgui
-
-If you have set up a PBListener (@pxref{PBListener}), you will be able
-to monitor your Buildbot using a simple Gtk+ application invoked with
-the @code{buildbot statusgui} command:
-
-@example
-buildbot statusgui --master @var{MASTERHOST}:@var{PORT}
-@end example
-
-This command starts a simple Gtk+-based status client, which contains
-a few boxes for each Builder that change color as events occur. It
-uses the same @option{--master} argument as the @command{buildbot
-statuslog} command (@pxref{statuslog}).
-
-@node try, , statusgui, Developer Tools
-@subsection try
-
-This lets a developer to ask the question ``What would happen if I
-committed this patch right now?''. It runs the unit test suite (across
-multiple build platforms) on the developer's current code, allowing
-them to make sure they will not break the tree when they finally
-commit their changes.
-
-The @command{buildbot try} command is meant to be run from within a
-developer's local tree, and starts by figuring out the base revision
-of that tree (what revision was current the last time the tree was
-updated), and a patch that can be applied to that revision of the tree
-to make it match the developer's copy. This (revision, patch) pair is
-then sent to the buildmaster, which runs a build with that
-SourceStamp. If you want, the tool will emit status messages as the
-builds run, and will not terminate until the first failure has been
-detected (or the last success).
-
-For this command to work, several pieces must be in place:
-
-
-@heading TryScheduler
-
-The buildmaster must have a @code{scheduler.Try} instance in
-the config file's @code{c['schedulers']} list. This lets the
-administrator control who may initiate these ``trial'' builds, which
-branches are eligible for trial builds, and which Builders should be
-used for them.
-
-The @code{TryScheduler} has various means to accept build requests:
-all of them enforce more security than the usual buildmaster ports do.
-Any source code being built can be used to compromise the buildslave
-accounts, but in general that code must be checked out from the VC
-repository first, so only people with commit privileges can get
-control of the buildslaves. The usual force-build control channels can
-waste buildslave time but do not allow arbitrary commands to be
-executed by people who don't have those commit privileges. However,
-the source code patch that is provided with the trial build does not
-have to go through the VC system first, so it is important to make
-sure these builds cannot be abused by a non-committer to acquire as
-much control over the buildslaves as a committer has. Ideally, only
-developers who have commit access to the VC repository would be able
-to start trial builds, but unfortunately the buildmaster does not, in
-general, have access to VC system's user list.
-
-As a result, the @code{TryScheduler} requires a bit more
-configuration. There are currently two ways to set this up:
-
-@table @strong
-@item jobdir (ssh)
-
-This approach creates a command queue directory, called the
-``jobdir'', in the buildmaster's working directory. The buildmaster
-admin sets the ownership and permissions of this directory to only
-grant write access to the desired set of developers, all of whom must
-have accounts on the machine. The @code{buildbot try} command creates
-a special file containing the source stamp information and drops it in
-the jobdir, just like a standard maildir. When the buildmaster notices
-the new file, it unpacks the information inside and starts the builds.
-
-The config file entries used by 'buildbot try' either specify a local
-queuedir (for which write and mv are used) or a remote one (using scp
-and ssh).
-
-The advantage of this scheme is that it is quite secure, the
-disadvantage is that it requires fiddling outside the buildmaster
-config (to set the permissions on the jobdir correctly). If the
-buildmaster machine happens to also house the VC repository, then it
-can be fairly easy to keep the VC userlist in sync with the
-trial-build userlist. If they are on different machines, this will be
-much more of a hassle. It may also involve granting developer accounts
-on a machine that would not otherwise require them.
-
-To implement this, the buildslave invokes 'ssh -l username host
-buildbot tryserver ARGS', passing the patch contents over stdin. The
-arguments must include the inlet directory and the revision
-information.
-
-@item user+password (PB)
-
-In this approach, each developer gets a username/password pair, which
-are all listed in the buildmaster's configuration file. When the
-developer runs @code{buildbot try}, their machine connects to the
-buildmaster via PB and authenticates themselves using that username
-and password, then sends a PB command to start the trial build.
-
-The advantage of this scheme is that the entire configuration is
-performed inside the buildmaster's config file. The disadvantages are
-that it is less secure (while the ``cred'' authentication system does
-not expose the password in plaintext over the wire, it does not offer
-most of the other security properties that SSH does). In addition, the
-buildmaster admin is responsible for maintaining the username/password
-list, adding and deleting entries as developers come and go.
-
-@end table
-
-
-For example, to set up the ``jobdir'' style of trial build, using a
-command queue directory of @file{MASTERDIR/jobdir} (and assuming that
-all your project developers were members of the @code{developers} unix
-group), you would first create that directory (with @command{mkdir
-MASTERDIR/jobdir MASTERDIR/jobdir/new MASTERDIR/jobdir/cur
-MASTERDIR/jobdir/tmp; chgrp developers MASTERDIR/jobdir
-MASTERDIR/jobdir/*; chmod g+rwx,o-rwx MASTERDIR/jobdir
-MASTERDIR/jobdir/*}), and then use the following scheduler in the
-buildmaster's config file:
-
-@example
-from buildbot.scheduler import Try_Jobdir
-s = Try_Jobdir("try1", ["full-linux", "full-netbsd", "full-OSX"],
- jobdir="jobdir")
-c['schedulers'] = [s]
-@end example
-
-Note that you must create the jobdir before telling the buildmaster to
-use this configuration, otherwise you will get an error. Also remember
-that the buildmaster must be able to read and write to the jobdir as
-well. Be sure to watch the @file{twistd.log} file (@pxref{Logfiles})
-as you start using the jobdir, to make sure the buildmaster is happy
-with it.
-
-To use the username/password form of authentication, create a
-@code{Try_Userpass} instance instead. It takes the same
-@code{builderNames} argument as the @code{Try_Jobdir} form, but
-accepts an addtional @code{port} argument (to specify the TCP port to
-listen on) and a @code{userpass} list of username/password pairs to
-accept. Remember to use good passwords for this: the security of the
-buildslave accounts depends upon it:
-
-@example
-from buildbot.scheduler import Try_Userpass
-s = Try_Userpass("try2", ["full-linux", "full-netbsd", "full-OSX"],
- port=8031, userpass=[("alice","pw1"), ("bob", "pw2")] )
-c['schedulers'] = [s]
-@end example
-
-Like most places in the buildbot, the @code{port} argument takes a
-strports specification. See @code{twisted.application.strports} for
-details.
-
-
-@heading locating the master
-
-The @command{try} command needs to be told how to connect to the
-@code{TryScheduler}, and must know which of the authentication
-approaches described above is in use by the buildmaster. You specify
-the approach by using @option{--connect=ssh} or @option{--connect=pb}
-(or @code{try_connect = 'ssh'} or @code{try_connect = 'pb'} in
-@file{.buildbot/options}).
-
-For the PB approach, the command must be given a @option{--master}
-argument (in the form HOST:PORT) that points to TCP port that you
-picked in the @code{Try_Userpass} scheduler. It also takes a
-@option{--username} and @option{--passwd} pair of arguments that match
-one of the entries in the buildmaster's @code{userpass} list. These
-arguments can also be provided as @code{try_master},
-@code{try_username}, and @code{try_password} entries in the
-@file{.buildbot/options} file.
-
-For the SSH approach, the command must be given @option{--tryhost},
-@option{--username}, and optionally @option{--password} (TODO:
-really?) to get to the buildmaster host. It must also be given
-@option{--trydir}, which points to the inlet directory configured
-above. The trydir can be relative to the user's home directory, but
-most of the time you will use an explicit path like
-@file{~buildbot/project/trydir}. These arguments can be provided in
-@file{.buildbot/options} as @code{try_host}, @code{try_username},
-@code{try_password}, and @code{try_dir}.
-
-In addition, the SSH approach needs to connect to a PBListener status
-port, so it can retrieve and report the results of the build (the PB
-approach uses the existing connection to retrieve status information,
-so this step is not necessary). This requires a @option{--master}
-argument, or a @code{masterstatus} entry in @file{.buildbot/options},
-in the form of a HOSTNAME:PORT string.
-
-
-@heading choosing the Builders
-
-A trial build is performed on multiple Builders at the same time, and
-the developer gets to choose which Builders are used (limited to a set
-selected by the buildmaster admin with the TryScheduler's
-@code{builderNames=} argument). The set you choose will depend upon
-what your goals are: if you are concerned about cross-platform
-compatibility, you should use multiple Builders, one from each
-platform of interest. You might use just one builder if that platform
-has libraries or other facilities that allow better test coverage than
-what you can accomplish on your own machine, or faster test runs.
-
-The set of Builders to use can be specified with multiple
-@option{--builder} arguments on the command line. It can also be
-specified with a single @code{try_builders} option in
-@file{.buildbot/options} that uses a list of strings to specify all
-the Builder names:
-
-@example
-try_builders = ["full-OSX", "full-win32", "full-linux"]
-@end example
-
-@heading specifying the VC system
-
-The @command{try} command also needs to know how to take the
-developer's current tree and extract the (revision, patch)
-source-stamp pair. Each VC system uses a different process, so you
-start by telling the @command{try} command which VC system you are
-using, with an argument like @option{--vc=cvs} or @option{--vc=tla}.
-This can also be provided as @code{try_vc} in
-@file{.buildbot/options}.
-
-The following names are recognized: @code{cvs} @code{svn} @code{baz}
-@code{tla} @code{hg} @code{darcs}
-
-
-@heading finding the top of the tree
-
-Some VC systems (notably CVS and SVN) track each directory
-more-or-less independently, which means the @command{try} command
-needs to move up to the top of the project tree before it will be able
-to construct a proper full-tree patch. To accomplish this, the
-@command{try} command will crawl up through the parent directories
-until it finds a marker file. The default name for this marker file is
-@file{.buildbot-top}, so when you are using CVS or SVN you should
-@code{touch .buildbot-top} from the top of your tree before running
-@command{buildbot try}. Alternatively, you can use a filename like
-@file{ChangeLog} or @file{README}, since many projects put one of
-these files in their top-most directory (and nowhere else). To set
-this filename, use @option{--try-topfile=ChangeLog}, or set it in the
-options file with @code{try_topfile = 'ChangeLog'}.
-
-You can also manually set the top of the tree with
-@option{--try-topdir=~/trees/mytree}, or @code{try_topdir =
-'~/trees/mytree'}. If you use @code{try_topdir}, in a
-@file{.buildbot/options} file, you will need a separate options file
-for each tree you use, so it may be more convenient to use the
-@code{try_topfile} approach instead.
-
-Other VC systems which work on full projects instead of individual
-directories (tla, baz, darcs, monotone, mercurial) do not require
-@command{try} to know the top directory, so the @option{--try-topfile}
-and @option{--try-topdir} arguments will be ignored.
-@c is this true? I think I currently require topdirs all the time.
-
-If the @command{try} command cannot find the top directory, it will
-abort with an error message.
-
-@heading determining the branch name
-
-Some VC systems record the branch information in a way that ``try''
-can locate it, in particular Arch (both @command{tla} and
-@command{baz}). For the others, if you are using something other than
-the default branch, you will have to tell the buildbot which branch
-your tree is using. You can do this with either the @option{--branch}
-argument, or a @option{try_branch} entry in the
-@file{.buildbot/options} file.
-
-@heading determining the revision and patch
-
-Each VC system has a separate approach for determining the tree's base
-revision and computing a patch.
-
-@table @code
-
-@item CVS
-
-@command{try} pretends that the tree is up to date. It converts the
-current time into a @code{-D} time specification, uses it as the base
-revision, and computes the diff between the upstream tree as of that
-point in time versus the current contents. This works, more or less,
-but requires that the local clock be in reasonably good sync with the
-repository.
-
-@item SVN
-@command{try} does a @code{svn status -u} to find the latest
-repository revision number (emitted on the last line in the ``Status
-against revision: NN'' message). It then performs an @code{svn diff
--rNN} to find out how your tree differs from the repository version,
-and sends the resulting patch to the buildmaster. If your tree is not
-up to date, this will result in the ``try'' tree being created with
-the latest revision, then @emph{backwards} patches applied to bring it
-``back'' to the version you actually checked out (plus your actual
-code changes), but this will still result in the correct tree being
-used for the build.
-
-@item baz
-@command{try} does a @code{baz tree-id} to determine the
-fully-qualified version and patch identifier for the tree
-(ARCHIVE/VERSION--patch-NN), and uses the VERSION--patch-NN component
-as the base revision. It then does a @code{baz diff} to obtain the
-patch.
-
-@item tla
-@command{try} does a @code{tla tree-version} to get the
-fully-qualified version identifier (ARCHIVE/VERSION), then takes the
-first line of @code{tla logs --reverse} to figure out the base
-revision. Then it does @code{tla changes --diffs} to obtain the patch.
-
-@item Darcs
-@code{darcs changes --context} emits a text file that contains a list
-of all patches back to and including the last tag was made. This text
-file (plus the location of a repository that contains all these
-patches) is sufficient to re-create the tree. Therefore the contents
-of this ``context'' file @emph{are} the revision stamp for a
-Darcs-controlled source tree.
-
-So @command{try} does a @code{darcs changes --context} to determine
-what your tree's base revision is, and then does a @code{darcs diff
--u} to compute the patch relative to that revision.
-
-@item Mercurial
-@code{hg identify} emits a short revision ID (basically a truncated
-SHA1 hash of the current revision's contents), which is used as the
-base revision. @code{hg diff} then provides the patch relative to that
-revision. For @command{try} to work, your working directory must only
-have patches that are available from the same remotely-available
-repository that the build process' @code{step.Mercurial} will use.
-
-@c TODO: monotone, git
-@end table
-
-@heading waiting for results
-
-If you provide the @option{--wait} option (or @code{try_wait = True}
-in @file{.buildbot/options}), the @command{buildbot try} command will
-wait until your changes have either been proven good or bad before
-exiting. Unless you use the @option{--quiet} option (or
-@code{try_quiet=True}), it will emit a progress message every 60
-seconds until the builds have completed.
-
-
-@node Other Tools, .buildbot config directory, Developer Tools, Command-line tool
-@section Other Tools
-
-These tools are generally used by buildmaster administrators.
-
-@menu
-* sendchange::
-* debugclient::
-@end menu
-
-@node sendchange, debugclient, Other Tools, Other Tools
-@subsection sendchange
-
-This command is used to tell the buildmaster about source changes. It
-is intended to be used from within a commit script, installed on the
-VC server.
-
-@example
-buildbot sendchange --master @var{MASTERHOST}:@var{PORT} --username @var{USER} @var{FILENAMES..}
-@end example
-
-There are other (optional) arguments which can influence the
-@code{Change} that gets submitted:
-
-@table @code
-@item --branch
-This provides the (string) branch specifier. If omitted, it defaults
-to None, indicating the ``default branch''. All files included in this
-Change must be on the same branch.
-
-@item --revision_number
-This provides a (numeric) revision number for the change, used for VC systems
-that use numeric transaction numbers (like Subversion).
-
-@item --revision
-This provides a (string) revision specifier, for VC systems that use
-strings (Arch would use something like patch-42 etc).
-
-@item --revision_file
-This provides a filename which will be opened and the contents used as
-the revision specifier. This is specifically for Darcs, which uses the
-output of @command{darcs changes --context} as a revision specifier.
-This context file can be a couple of kilobytes long, spanning a couple
-lines per patch, and would be a hassle to pass as a command-line
-argument.
-
-@item --comments
-This provides the change comments as a single argument. You may want
-to use @option{--logfile} instead.
-
-@item --logfile
-This instructs the tool to read the change comments from the given
-file. If you use @code{-} as the filename, the tool will read the
-change comments from stdin.
-@end table
-
-
-@node debugclient, , sendchange, Other Tools
-@subsection debugclient
-
-@example
-buildbot debugclient --master @var{MASTERHOST}:@var{PORT} --passwd @var{DEBUGPW}
-@end example
-
-This launches a small Gtk+/Glade-based debug tool, connecting to the
-buildmaster's ``debug port''. This debug port shares the same port
-number as the slaveport (@pxref{Setting the slaveport}), but the
-@code{debugPort} is only enabled if you set a debug password in the
-buildmaster's config file (@pxref{Debug options}). The
-@option{--passwd} option must match the @code{c['debugPassword']}
-value.
-
-@option{--master} can also be provided in @file{.debug/options} by the
-@code{master} key. @option{--passwd} can be provided by the
-@code{debugPassword} key.
-
-The @code{Connect} button must be pressed before any of the other
-buttons will be active. This establishes the connection to the
-buildmaster. The other sections of the tool are as follows:
-
-@table @code
-@item Reload .cfg
-Forces the buildmaster to reload its @file{master.cfg} file. This is
-equivalent to sending a SIGHUP to the buildmaster, but can be done
-remotely through the debug port. Note that it is a good idea to be
-watching the buildmaster's @file{twistd.log} as you reload the config
-file, as any errors which are detected in the config file will be
-announced there.
-
-@item Rebuild .py
-(not yet implemented). The idea here is to use Twisted's ``rebuild''
-facilities to replace the buildmaster's running code with a new
-version. Even if this worked, it would only be used by buildbot
-developers.
-
-@item poke IRC
-This locates a @code{words.IRC} status target and causes it to emit a
-message on all the channels to which it is currently connected. This
-was used to debug a problem in which the buildmaster lost the
-connection to the IRC server and did not attempt to reconnect.
-
-@item Commit
-This allows you to inject a Change, just as if a real one had been
-delivered by whatever VC hook you are using. You can set the name of
-the committed file and the name of the user who is doing the commit.
-Optionally, you can also set a revision for the change. If the
-revision you provide looks like a number, it will be sent as an
-integer, otherwise it will be sent as a string.
-
-@item Force Build
-This lets you force a Builder (selected by name) to start a build of
-the current source tree.
-
-@item Currently
-(obsolete). This was used to manually set the status of the given
-Builder, but the status-assignment code was changed in an incompatible
-way and these buttons are no longer meaningful.
-
-@end table
-
-
-@node .buildbot config directory, , Other Tools, Command-line tool
-@section .buildbot config directory
-
-Many of the @command{buildbot} tools must be told how to contact the
-buildmaster that they interact with. This specification can be
-provided as a command-line argument, but most of the time it will be
-easier to set them in an ``options'' file. The @command{buildbot}
-command will look for a special directory named @file{.buildbot},
-starting from the current directory (where the command was run) and
-crawling upwards, eventually looking in the user's home directory. It
-will look for a file named @file{options} in this directory, and will
-evaluate it as a python script, looking for certain names to be set.
-You can just put simple @code{name = 'value'} pairs in this file to
-set the options.
-
-For a description of the names used in this file, please see the
-documentation for the individual @command{buildbot} sub-commands. The
-following is a brief sample of what this file's contents could be.
-
-@example
-# for status-reading tools
-masterstatus = 'buildbot.example.org:12345'
-# for 'sendchange' or the debug port
-master = 'buildbot.example.org:18990'
-debugPassword = 'eiv7Po'
-@end example
-
-@table @code
-@item masterstatus
-Location of the @code{client.PBListener} status port, used by
-@command{statuslog} and @command{statusgui}.
-
-@item master
-Location of the @code{debugPort} (for @command{debugclient}). Also the
-location of the @code{pb.PBChangeSource} (for @command{sendchange}).
-Usually shares the slaveport, but a future version may make it
-possible to have these listen on a separate port number.
-
-@item debugPassword
-Must match the value of @code{c['debugPassword']}, used to protect the
-debug port, for the @command{debugclient} command.
-
-@item username
-Provides a default username for the @command{sendchange} command.
-
-@end table
-
-
-
-@node Resources, Developer's Appendix, Command-line tool, Top
-@chapter Resources
-
-The Buildbot's home page is at @uref{http://buildbot.sourceforge.net/}
-
-For configuration questions and general discussion, please use the
-@code{buildbot-devel} mailing list. The subscription instructions and
-archives are available at
-@uref{http://lists.sourceforge.net/lists/listinfo/buildbot-devel}
-
-@node Developer's Appendix, Index, Resources, Top
-@unnumbered Developer's Appendix
-
-This appendix contains random notes about the implementation of the
-Buildbot, and is likely to only be of use to people intending to
-extend the Buildbot's internals.
-
-The buildmaster consists of a tree of Service objects, which is shaped
-as follows:
-
-@example
-BuildMaster
- ChangeMaster (in .change_svc)
- [IChangeSource instances]
- [IScheduler instances] (in .schedulers)
- BotMaster (in .botmaster)
- [IStatusTarget instances] (in .statusTargets)
-@end example
-
-The BotMaster has a collection of Builder objects as values of its
-@code{.builders} dictionary.
-
-
-@node Index, , Developer's Appendix, Top
-@unnumbered Index
-
-@printindex cp
-
-@bye
-
diff --git a/buildbot/buildbot-source/docs/epyrun b/buildbot/buildbot-source/docs/epyrun
deleted file mode 100644
index db60b5a28..000000000
--- a/buildbot/buildbot-source/docs/epyrun
+++ /dev/null
@@ -1,195 +0,0 @@
-#!/usr/bin/env python
-
-import sys
-import os
-
-from twisted.python import reflect
-from twisted.internet import reactor
-
-# epydoc
-import epydoc
-assert epydoc.__version__[0] == '2', "You need epydoc 2.x!"
-from epydoc.cli import cli
-
-class FakeModule:
-
- def __init__(self, name, level):
- self.__level = level
- self.__name__ = name
-
- def __repr__(self):
- return '<Fake %s>' % self.__name__
- __str__ = __repr__
-
- def __nonzero__(self):
- return 1
-
- def __call__(self, *args, **kw):
- pass #print 'Called:', args
-
- def __getattr__(self, attr):
- if self.__level == 0:
- raise AttributeError
- return FakeModule(self.__name__+'.'+attr, self.__level-1)
-
- def __cmp__(self, other):
- if not hasattr(other, '___name__'):
- return -1
- return cmp(self.__name__, other.__name__)
-
-
-def fakeOut(modname):
- modpath = modname.split('.')
- prevmod = None
- for m in range(len(modpath)):
- mp = '.'.join(modpath[:m+1])
- nm = FakeModule(mp, 4)
- if prevmod:
- setattr(prevmod, modpath[m], nm)
- sys.modules[mp] = nm
- prevmod = nm
-
-#fakeOut("twisted")
-
-# HACK: Another "only doc what we tell you". We don't want epydoc to
-# automatically recurse into subdirectories: "twisted"'s presence was
-# causing "twisted/test" to be docced, even thought we explicitly
-# didn't put any twisted/test in our modnames.
-
-from epydoc import imports
-orig_find_modules = imports.find_modules
-
-import re
-
-def find_modules(dirname):
- if not os.path.isdir(dirname): return []
- found_init = 0
- modules = {}
- dirs = []
-
- # Search for directories & modules, and check for __init__.py.
- # Don't include duplicates (like foo.py and foo.pyc), and give
- # precedance to the .py files.
- for file in os.listdir(dirname):
- filepath = os.path.join(dirname, file)
- if os.path.isdir(filepath): dirs.append(filepath)
- elif not re.match(r'\w+.py.?', file):
- continue # Ignore things like ".#foo.py" or "a-b.py"
- elif file[-3:] == '.py':
- modules[file] = os.path.join(dirname, file)
- if file == '__init__.py': found_init = 1
- elif file[-4:-1] == '.py':
- modules.setdefault(file[:-1], file)
- if file[:-1] == '__init__.py': found_init = 1
- modules = modules.values()
-
- # If there was no __init__.py, then this isn't a package
- # directory; return nothing.
- if not found_init: return []
-
- # Recurse to the child directories.
- # **twisted** here's the change: commented next line out
- #for d in dirs: modules += find_modules(d)
- return modules
-
-imports.find_modules = find_modules
-
-
-
-# Now, set up the list of modules for epydoc to document
-modnames = []
-def addMod(arg, path, files):
- for fn in files:
- file = os.path.join(path, fn).replace('%s__init__'%os.sep, '')
- if file[-3:] == '.py' and not file.count('%stest%s' % (os.sep,os.sep)):
- modName = file[:-3].replace(os.sep,'.')
- try:
- #print 'pre-loading', modName
- reflect.namedModule(modName)
- except ImportError, e:
- print 'import error:', modName, e
- except Exception, e:
- print 'other error:', modName, e
- else:
- modnames.append(modName)
-
-document_all = True # are we doing a full build?
-names = ['buildbot/'] #default, may be overriden below
-
-#get list of modules/pkgs on cmd-line
-try:
- i = sys.argv.index("--modules")
-except:
- pass
-else:
- names = sys.argv[i+1:]
- document_all = False
- sys.argv[i:] = []
- #sanity check on names
- for i in range(len(names)):
- try:
- j = names[i].rindex('buildbot/')
- except:
- raise SystemExit, 'You can only specify buildbot modules or packages'
- else:
- #strip off any leading directories before the 'twisted/'
- #dir. this makes it easy to specify full paths, such as
- #from TwistedEmacs
- names[i] = names[i][j:]
-
- old_out_dir = "html"
- #if -o was specified, we need to change it to point to a tmp dir
- #otherwise add our own -o option
- try:
- i = sys.argv.index('-o')
- old_out_dir = sys.argv[i+1]
- try:
- os.mkdir(tmp_dir)
- except OSError:
- pass
- sys.argv[i+1] = tmp_dir
- except ValueError:
- sys.argv[1:1] = ['-o', tmp_dir]
-
-osrv = sys.argv
-sys.argv=["IGNORE"]
-
-for name in names:
- if name.endswith(".py"):
- # turn it in to a python module name
- name = name[:-3].replace(os.sep, ".")
- try:
- reflect.namedModule(name)
- except ImportError:
- print 'import error:', name
- except:
- print 'other error:', name
- else:
- modnames.append(name)
- else: #assume it's a dir
- os.path.walk(name, addMod, None)
-
-sys.argv = osrv
-
-if 'buildbot.test' in modnames:
- modnames.remove('buildbot.test')
-##if 'twisted' in modnames:
-## modnames.remove('twisted')
-
-sys.argv.extend(modnames)
-
-import buildbot
-
-
-sys.argv[1:1] = [
- '-n', 'BuildBot %s' % buildbot.version,
- '-u', 'http://buildbot.sourceforge.net/', '--no-private']
-
-# Make it easy to profile epyrun
-if 0:
- import profile
- profile.run('cli()', 'epyrun.prof')
-else:
- cli()
-
-print 'Done!'
diff --git a/buildbot/buildbot-source/docs/examples/glib_master.cfg b/buildbot/buildbot-source/docs/examples/glib_master.cfg
deleted file mode 100644
index e595ea0e9..000000000
--- a/buildbot/buildbot-source/docs/examples/glib_master.cfg
+++ /dev/null
@@ -1,55 +0,0 @@
-#! /usr/bin/python
-
-from buildbot.changes.freshcvs import FreshCVSSource
-from buildbot.process.step import CVS
-from buildbot.process.factory import GNUAutoconf, s
-from buildbot.status import html
-
-c = {}
-
-c['bots'] = [["bot1", "sekrit"]]
-
-c['sources'] = [FreshCVSSource("localhost", 4519,
- "foo", "bar",
- prefix="glib")]
-#c['sources'] = []
-c['builders'] = []
-
-repository = "/usr/home/warner/stuff/Projects/BuildBot/fakerep"
-cvsmodule = "glib"
-
-f1 = GNUAutoconf(s(CVS, cvsroot=repository, cvsmodule=cvsmodule,
- mode="update"),
- #configure="./configure --disable-shared",
- #configureEnv={'CFLAGS': '-O0'},
- configure=None)
-f1.useProgress = False
-
-b1 = {'name': "glib-quick",
- 'slavename': "bot1",
- 'builddir': "glib-quick",
- 'factory': f1,
- }
-c['builders'].append(b1)
-
-f2 = GNUAutoconf(s(CVS, cvsroot=repository, cvsmodule=cvsmodule,
- mode="copy"),
- configure="./configure --disable-shared",
- configureEnv={'CFLAGS': '-O0'},
- )
-
-b2 = {'name': "glib-full",
- 'slavename': "bot1",
- 'builddir': "glib-full",
- 'factory': f2,
- }
-c['builders'].append(b2)
-
-#c['irc'] = {("localhost", 6667): ('buildbot', ["private"])}
-
-c['slavePortnum'] = 8007
-
-c['status'] = [html.Waterfall(http_port=8080)]
-c['debugPassword'] = "asdf"
-
-BuildmasterConfig = c
diff --git a/buildbot/buildbot-source/docs/examples/hello.cfg b/buildbot/buildbot-source/docs/examples/hello.cfg
deleted file mode 100644
index b0f469bb5..000000000
--- a/buildbot/buildbot-source/docs/examples/hello.cfg
+++ /dev/null
@@ -1,102 +0,0 @@
-#! /usr/bin/python
-
-from buildbot import master
-from buildbot.process import factory, step
-from buildbot.status import html, client
-from buildbot.changes.pb import PBChangeSource
-s = factory.s
-
-BuildmasterConfig = c = {}
-
-c['bots'] = [["bot1", "sekrit"]]
-
-c['sources'] = []
-c['sources'].append(PBChangeSource(prefix="trunk"))
-c['builders'] = []
-
-if 1:
- steps = [
- s(step.CVS,
- cvsroot="/usr/home/warner/stuff/Projects/BuildBot/demo/Repository",
- cvsmodule="hello",
- mode="clobber",
- checkoutDelay=6,
- alwaysUseLatest=True,
- ),
- s(step.Configure),
- s(step.Compile),
- s(step.Test, command=["make", "check"]),
- ]
- b1 = {"name": "cvs-hello",
- "slavename": "bot1",
- "builddir": "cvs-hello",
- "factory": factory.BuildFactory(steps),
- }
- c['builders'].append(b1)
-
-if 1:
- svnrep="file:///usr/home/warner/stuff/Projects/BuildBot/demo/SVN-Repository"
- steps = [
- s(step.SVN,
- svnurl=svnrep+"/hello",
- mode="update",
- ),
- s(step.Configure),
- s(step.Compile),
- s(step.Test, command=["make", "check"]),
- ]
- b1 = {"name": "svn-hello",
- "slavename": "bot1",
- "builddir": "svn-hello",
- "factory": factory.BuildFactory(steps),
- }
- c['builders'].append(b1)
-
-if 1:
- steps = [
- s(step.Darcs,
- repourl="http://localhost/~warner/hello-darcs",
- mode="copy",
- ),
- s(step.Configure, command=["/bin/sh", "./configure"]),
- s(step.Compile),
- s(step.Test, command=["make", "check"]),
- ]
- b1 = {"name": "darcs-hello",
- "slavename": "bot1",
- "builddir": "darcs-hello",
- "factory": factory.BuildFactory(steps),
- }
- c['builders'].append(b1)
-
-if 1:
- steps = [
- s(step.Arch,
- url="http://localhost/~warner/hello-arch",
- version="gnu-hello--release--2.1.1",
- mode="copy",
- ),
- s(step.Configure),
- s(step.Compile),
- s(step.Test, command=["make", "check"]),
- ]
- b1 = {"name": "arch-hello",
- "slavename": "bot1",
- "builddir": "arch-hello",
- "factory": factory.BuildFactory(steps),
- }
- c['builders'].append(b1)
-
-
-c['projectName'] = "Hello"
-c['projectURL'] = "http://www.hello.example.com"
-c['buildbotURL'] = "http://localhost:8080"
-
-c['slavePortnum'] = 8007
-c['debugPassword'] = "asdf"
-c['manhole'] = master.Manhole(9900, "username", "password")
-
-c['status'] = [html.Waterfall(http_port=8080),
- client.PBListener(port=8008),
- ]
-
diff --git a/buildbot/buildbot-source/docs/examples/twisted_master.cfg b/buildbot/buildbot-source/docs/examples/twisted_master.cfg
deleted file mode 100644
index 979e8292e..000000000
--- a/buildbot/buildbot-source/docs/examples/twisted_master.cfg
+++ /dev/null
@@ -1,267 +0,0 @@
-#! /usr/bin/python
-
-# This configuration file is described in $BUILDBOT/docs/config.xhtml
-
-# This is used (with online=True) to run the Twisted Buildbot at
-# http://www.twistedmatrix.com/buildbot/ . Passwords and other secret
-# information are loaded from a neighboring file called 'private.py'.
-
-import sys
-sys.path.append('/home/buildbot/BuildBot/support-master')
-
-import os.path
-
-from buildbot import master
-from buildbot.changes.pb import PBChangeSource
-from buildbot.scheduler import Scheduler, Try_Userpass
-from buildbot.process import step
-from buildbot.process.factory import s
-from buildbot.process.process_twisted import \
- QuickTwistedBuildFactory, \
- FullTwistedBuildFactory, \
- TwistedDebsBuildFactory, \
- TwistedReactorsBuildFactory
-from buildbot.status import html, words, client, mail
-
-import private # holds passwords
-reload(private) # make it possible to change the contents without a restart
-
-BuildmasterConfig = c = {}
-
-# I set really=False when testing this configuration at home
-really = True
-usePBChangeSource = True
-
-
-c['bots'] = []
-for bot in private.bot_passwords.keys():
- c['bots'].append((bot, private.bot_passwords[bot]))
-
-c['sources'] = []
-
-# the Twisted buildbot currently uses the contrib/svn_buildbot.py script.
-# This makes a TCP connection to the ChangeMaster service to push Changes
-# into the build master. The script is invoked by
-# /svn/Twisted/hooks/post-commit, so it will only be run for things inside
-# the Twisted repository. However, the standard SVN practice is to put the
-# actual trunk in a subdirectory named "trunk/" (to leave room for
-# "branches/" and "tags/"). We want to only pay attention to the trunk, so
-# we use "trunk" as a prefix for the ChangeSource. This also strips off that
-# prefix, so that the Builders all see sensible pathnames (which means they
-# can do things like ignore the sandbox properly).
-
-source = PBChangeSource(prefix="trunk")
-c['sources'].append(source)
-
-
-## configure the builders
-
-if 0:
- # always build on trunk
- svnurl = "svn://svn.twistedmatrix.com/svn/Twisted/trunk"
- source_update = s(step.SVN, svnurl=svnurl, mode="update")
- source_copy = s(step.SVN, svnurl=svnurl, mode="copy")
- source_export = s(step.SVN, svnurl=svnurl, mode="export")
-else:
- # for build-on-branch, we use these instead
- baseURL = "svn://svn.twistedmatrix.com/svn/Twisted/"
- defaultBranch = "trunk"
- source_update = s(step.SVN, baseURL=baseURL, defaultBranch=defaultBranch,
- mode="update")
- source_copy = s(step.SVN, baseURL=baseURL, defaultBranch=defaultBranch,
- mode="copy")
- source_export = s(step.SVN, baseURL=baseURL, defaultBranch=defaultBranch,
- mode="export")
-
-
-builders = []
-
-
-b1 = {'name': "quick",
- 'slavename': "bot1",
- 'builddir': "quick",
- 'factory': QuickTwistedBuildFactory(source_update,
- python=["python2.3", "python2.4"]),
- }
-builders.append(b1)
-
-b23compile_opts = [
- "-Wignore::PendingDeprecationWarning:distutils.command.build_py",
- "-Wignore::PendingDeprecationWarning:distutils.command.build_ext",
- ]
-b23 = {'name': "full-2.3",
- 'slavename': "bot-exarkun-boson",
- 'builddir': "full2.3",
- 'factory': FullTwistedBuildFactory(source_copy,
- python=["python2.3", "-Wall"],
- # use -Werror soon
- compileOpts=b23compile_opts,
- processDocs=1,
- runTestsRandomly=1),
- }
-builders.append(b23)
-
-b24compile_opts = [
- "-Wignore::PendingDeprecationWarning:distutils.command.build_py",
- "-Wignore::PendingDeprecationWarning:distutils.command.build_ext",
- ]
-b24 = {'name': "full-2.4",
- 'slavenames': ["bot-exarkun"],
- 'builddir': "full2.4",
- 'factory': FullTwistedBuildFactory(source_copy,
- python=["python2.4", "-Wall"],
- # use -Werror soon
- compileOpts=b24compile_opts,
- runTestsRandomly=1),
- }
-builders.append(b24)
-
-# debuild is offline while we figure out how to build 2.0 .debs from SVN
-# b3 = {'name': "debuild",
-# 'slavename': "bot2",
-# 'builddir': "debuild",
-# 'factory': TwistedDebsBuildFactory(source_export,
-# python="python2.4"),
-# }
-# builders.append(b3)
-
-reactors = ['gtk2', 'gtk', 'qt', 'poll']
-b4 = {'name': "reactors",
- 'slavename': "bot2",
- 'builddir': "reactors",
- 'factory': TwistedReactorsBuildFactory(source_copy,
- python="python2.4",
- reactors=reactors),
- }
-builders.append(b4)
-
-# jml's machine is specifically for testing Qt
-bqt = {'name': "Qt",
- 'slavename': "bot-jml-qt",
- 'builddir': "qt",
- 'factory': TwistedReactorsBuildFactory(source_copy,
- python="python2.4",
- reactors=['qt']),
- }
-builders.append(bqt)
-
-jf = TwistedReactorsBuildFactory(source_copy,
- python="python2.4", reactors=["default"])
-jf.steps.insert(0, s(step.ShellCommand, workdir=".",
- command=["ktrace", "rm", "-rf", "Twisted"]))
-b24osx = {'name': "OS-X",
- 'slavename': "bot-jerub",
- 'builddir': "OSX-full2.4",
-# 'factory': TwistedReactorsBuildFactory(source_copy,
-# python="python2.4",
-# reactors=["default"],
-# ),
- 'factory': jf,
- }
-builders.append(b24osx)
-
-b24w32_select = {
- 'name': "win32-select",
- 'slavename': "bot-win32-select",
- 'builddir': "W32-full2.4-select",
- 'factory': TwistedReactorsBuildFactory(source_copy,
- python="python",
- compileOpts2=["-c","mingw32"],
- reactors=["default"]),
- }
-builders.append(b24w32_select)
-
-
-b24w32_win32er = {
- 'name': "win32-win32er",
- 'slavename': "bot-win32-win32er",
- 'builddir': "W32-full2.4-win32er",
- 'factory': TwistedReactorsBuildFactory(source_copy,
- python="python",
- compileOpts2=["-c","mingw32"],
- reactors=["win32"]),
- }
-builders.append(b24w32_win32er)
-
-
-b24w32_iocp = {
- 'name': "win32-iocp",
- 'slavename': "bot-win32-iocp",
- 'builddir': "W32-full2.4-iocp",
- 'factory': TwistedReactorsBuildFactory(source_copy,
- python="python",
- compileOpts2=["-c","mingw32"],
- reactors=["iocp"]),
- }
-builders.append(b24w32_iocp)
-
-
-b24freebsd = {'name': "freebsd",
- 'slavename': "bot-landonf",
- 'builddir': "freebsd-full2.4",
- 'factory':
- TwistedReactorsBuildFactory(source_copy,
- python="python2.4",
- reactors=["default",
- "kqueue",
- ]),
- }
-builders.append(b24freebsd)
-
-# b24threadless = {'name': 'threadless',
-# 'slavename': 'bot-threadless',
-# 'builddir': 'debian-threadless-2.4',
-# 'factory': TwistedReactorsBuildFactory(source_copy,
-# python='python',
-# reactors=['default'])}
-# builders.append(b24threadless)
-
-c['builders'] = builders
-
-# now set up the schedulers. We do this after setting up c['builders'] so we
-# can auto-generate a list of all of them.
-all_builders = [b['name'] for b in c['builders']]
-all_builders.sort()
-all_builders.remove("quick")
-
-## configure the schedulers
-s_quick = Scheduler(name="quick", branch=None, treeStableTimer=30,
- builderNames=["quick"])
-s_all = Scheduler(name="all", branch=None, treeStableTimer=5*60,
- builderNames=all_builders)
-s_try = Try_Userpass("try", all_builders, port=9989,
- userpass=private.try_users)
-
-c['schedulers'] = [s_quick, s_all, s_try]
-
-
-
-# configure other status things
-
-c['slavePortnum'] = 9987
-c['status'] = []
-if really:
- p = os.path.expanduser("~/.twistd-web-pb")
- c['status'].append(html.Waterfall(distrib_port=p))
-else:
- c['status'].append(html.Waterfall(http_port=9988))
-if really:
- c['status'].append(words.IRC(host="irc.us.freenode.net",
- nick='buildbot',
- channels=["twisted"]))
-
-c['debugPassword'] = private.debugPassword
-#c['interlocks'] = [("do-deb", ["full-2.2"], ["debuild"])]
-if hasattr(private, "manhole"):
- c['manhole'] = master.Manhole(*private.manhole)
-c['status'].append(client.PBListener(9936))
-m = mail.MailNotifier(fromaddr="buildbot@twistedmatrix.com",
- builders=["quick", "full-2.3"],
- sendToInterestedUsers=True,
- extraRecipients=["warner@lothar.com"],
- mode="problem",
- )
-c['status'].append(m)
-c['projectName'] = "Twisted"
-c['projectURL'] = "http://twistedmatrix.com/"
-c['buildbotURL'] = "http://twistedmatrix.com/buildbot/"
diff --git a/buildbot/buildbot-source/docs/gen-reference b/buildbot/buildbot-source/docs/gen-reference
deleted file mode 100644
index 1094c1674..000000000
--- a/buildbot/buildbot-source/docs/gen-reference
+++ /dev/null
@@ -1 +0,0 @@
-cd .. && python docs/epyrun -o docs/reference
diff --git a/buildbot/buildbot-source/setup.py b/buildbot/buildbot-source/setup.py
deleted file mode 100644
index 37ae3a374..000000000
--- a/buildbot/buildbot-source/setup.py
+++ /dev/null
@@ -1,65 +0,0 @@
-#! /usr/bin/python
-
-import sys
-from distutils.core import setup
-from buildbot import version
-
-# Path: twisted!cvstoys!buildbot
-from distutils.command.install_data import install_data
-class install_data_twisted(install_data):
- """make sure data files are installed in package.
- this is evil.
- copied from Twisted/setup.py.
- """
- def finalize_options(self):
- self.set_undefined_options('install',
- ('install_lib', 'install_dir')
- )
- install_data.finalize_options(self)
-
-long_description="""
-The BuildBot is a system to automate the compile/test cycle required by
-most software projects to validate code changes. By automatically
-rebuilding and testing the tree each time something has changed, build
-problems are pinpointed quickly, before other developers are
-inconvenienced by the failure. The guilty developer can be identified
-and harassed without human intervention. By running the builds on a
-variety of platforms, developers who do not have the facilities to test
-their changes everywhere before checkin will at least know shortly
-afterwards whether they have broken the build or not. Warning counts,
-lint checks, image size, compile time, and other build parameters can
-be tracked over time, are more visible, and are therefore easier to
-improve.
-"""
-
-scripts = ["bin/buildbot"]
-if sys.platform == "win32":
- scripts.append("contrib/windows/buildbot.bat")
-
-setup(name="buildbot",
- version=version,
- description="BuildBot build automation system",
- long_description=long_description,
- author="Brian Warner",
- author_email="warner-buildbot@lothar.com",
- url="http://buildbot.sourceforge.net/",
- license="GNU GPL",
- packages=["buildbot",
- "buildbot.status",
- "buildbot.changes",
- "buildbot.process",
- "buildbot.clients",
- "buildbot.slave",
- "buildbot.scripts",
- "buildbot.test"],
- data_files=[("buildbot", ["buildbot/buildbot.png"]),
- ("buildbot/clients", ["buildbot/clients/debug.glade"]),
- ("buildbot/status", ["buildbot/status/classic.css"]),
- ("buildbot/scripts", ["buildbot/scripts/sample.cfg"]),],
- scripts = scripts,
- cmdclass={'install_data': install_data_twisted},
- )
-
-# Local Variables:
-# fill-column: 71
-# End: