summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJeremy Allison <jra@samba.org>2010-03-31 10:01:03 -0700
committerJeremy Allison <jra@samba.org>2010-03-31 10:01:03 -0700
commit2e839a636b2ea3f4d8dfcf5a8e99d9725787ba61 (patch)
tree42f219978f7d07d8fa196cb9ebd9db7be971450d
parentf58d02dbeeeba037ee79fba93a707e959e90ffa3 (diff)
parent6f30b9a6ff57ca6112e6319c64c411d2bf09be79 (diff)
downloadsamba-2e839a636b2ea3f4d8dfcf5a8e99d9725787ba61.tar.gz
samba-2e839a636b2ea3f4d8dfcf5a8e99d9725787ba61.tar.bz2
samba-2e839a636b2ea3f4d8dfcf5a8e99d9725787ba61.zip
Merge branch 'master' of ssh://git.samba.org/data/git/samba
-rw-r--r--lib/subunit/Apache-2.0202
-rw-r--r--lib/subunit/BSD26
-rw-r--r--lib/subunit/COPYING36
-rw-r--r--lib/subunit/INSTALL25
-rw-r--r--lib/subunit/Makefile.am136
-rw-r--r--lib/subunit/NEWS174
-rw-r--r--lib/subunit/README217
-rw-r--r--lib/subunit/c++/README50
-rw-r--r--lib/subunit/c++/SubunitTestProgressListener.cpp63
-rw-r--r--lib/subunit/c++/SubunitTestProgressListener.h56
-rw-r--r--lib/subunit/c/README68
-rw-r--r--lib/subunit/c/include/subunit/child.h79
-rw-r--r--lib/subunit/c/lib/child.c82
-rw-r--r--lib/subunit/c/tests/test_child.c192
-rw-r--r--lib/subunit/configure.ac75
-rwxr-xr-xlib/subunit/filters/subunit-filter105
-rwxr-xr-xlib/subunit/filters/subunit-ls93
-rwxr-xr-xlib/subunit/filters/subunit-notify65
-rwxr-xr-xlib/subunit/filters/subunit-stats41
-rwxr-xr-xlib/subunit/filters/subunit-tags26
-rwxr-xr-xlib/subunit/filters/subunit2gtk259
-rwxr-xr-xlib/subunit/filters/subunit2junitxml65
-rwxr-xr-xlib/subunit/filters/subunit2pyunit48
-rw-r--r--lib/subunit/libcppunit_subunit.pc.in11
-rw-r--r--lib/subunit/libsubunit.pc.in11
-rwxr-xr-xlib/subunit/perl/Makefile.PL.in20
-rw-r--r--lib/subunit/perl/lib/Subunit.pm162
-rw-r--r--lib/subunit/perl/lib/Subunit/Diff.pm85
-rwxr-xr-xlib/subunit/perl/subunit-diff31
-rwxr-xr-xlib/subunit/runtests.py138
-rw-r--r--lib/subunit/shell/README62
-rw-r--r--lib/subunit/shell/share/subunit.sh56
-rwxr-xr-xlib/subunit/shell/tests/test_function_output.sh97
-rwxr-xr-xlib/subunit/shell/tests/test_source_library.sh108
-rwxr-xr-xlib/subunit/update.sh16
-rw-r--r--lib/testtools/HACKING139
-rw-r--r--lib/testtools/LICENSE19
-rw-r--r--lib/testtools/MANIFEST.in9
-rw-r--r--lib/testtools/MANUAL213
-rw-r--r--lib/testtools/Makefile28
-rw-r--r--lib/testtools/NEWS191
-rw-r--r--lib/testtools/README54
-rwxr-xr-xlib/testtools/setup.py25
-rw-r--r--lib/testtools/testtools/__init__.py (renamed from lib/subunit/python/testtools/__init__.py)0
-rw-r--r--lib/testtools/testtools/content.py (renamed from lib/subunit/python/testtools/content.py)0
-rw-r--r--lib/testtools/testtools/content_type.py (renamed from lib/subunit/python/testtools/content_type.py)0
-rw-r--r--lib/testtools/testtools/matchers.py (renamed from lib/subunit/python/testtools/matchers.py)0
-rwxr-xr-xlib/testtools/testtools/run.py (renamed from lib/subunit/python/testtools/run.py)0
-rw-r--r--lib/testtools/testtools/runtest.py (renamed from lib/subunit/python/testtools/runtest.py)0
-rw-r--r--lib/testtools/testtools/testcase.py (renamed from lib/subunit/python/testtools/testcase.py)0
-rw-r--r--lib/testtools/testtools/testresult/__init__.py (renamed from lib/subunit/python/testtools/testresult/__init__.py)0
-rw-r--r--lib/testtools/testtools/testresult/doubles.py (renamed from lib/subunit/python/testtools/testresult/doubles.py)0
-rw-r--r--lib/testtools/testtools/testresult/real.py (renamed from lib/subunit/python/testtools/testresult/real.py)0
-rw-r--r--lib/testtools/testtools/tests/__init__.py (renamed from lib/subunit/python/testtools/tests/__init__.py)0
-rw-r--r--lib/testtools/testtools/tests/helpers.py (renamed from lib/subunit/python/testtools/tests/helpers.py)0
-rw-r--r--lib/testtools/testtools/tests/test_content.py (renamed from lib/subunit/python/testtools/tests/test_content.py)0
-rw-r--r--lib/testtools/testtools/tests/test_content_type.py (renamed from lib/subunit/python/testtools/tests/test_content_type.py)0
-rw-r--r--lib/testtools/testtools/tests/test_matchers.py (renamed from lib/subunit/python/testtools/tests/test_matchers.py)0
-rw-r--r--lib/testtools/testtools/tests/test_runtest.py (renamed from lib/subunit/python/testtools/tests/test_runtest.py)0
-rw-r--r--lib/testtools/testtools/tests/test_testresult.py (renamed from lib/subunit/python/testtools/tests/test_testresult.py)0
-rw-r--r--lib/testtools/testtools/tests/test_testsuite.py (renamed from lib/subunit/python/testtools/tests/test_testsuite.py)0
-rw-r--r--lib/testtools/testtools/tests/test_testtools.py (renamed from lib/subunit/python/testtools/tests/test_testtools.py)0
-rw-r--r--lib/testtools/testtools/testsuite.py (renamed from lib/subunit/python/testtools/testsuite.py)0
-rw-r--r--lib/testtools/testtools/utils.py (renamed from lib/subunit/python/testtools/utils.py)0
-rw-r--r--lib/torture/subunit.c24
-rw-r--r--lib/torture/torture.c21
-rw-r--r--lib/torture/torture.h9
-rwxr-xr-xlib/update-external.sh16
-rw-r--r--selftest/Subunit.pm29
-rwxr-xr-xselftest/filter-subunit49
-rwxr-xr-xselftest/filter-subunit.pl100
-rwxr-xr-xselftest/format-subunit40
-rwxr-xr-xselftest/selftest.pl6
-rw-r--r--selftest/subunithelper.py164
-rw-r--r--source3/Makefile.in2
-rw-r--r--source3/smbd/process.c14
-rwxr-xr-xsource4/lib/ldb/tests/python/deletetest.py1
-rwxr-xr-xsource4/lib/ldb/tests/python/urgent_replication.py8
-rwxr-xr-xsource4/script/installmisc.sh9
-rw-r--r--source4/scripting/python/config.mk4
-rwxr-xr-xsource4/scripting/python/samba_external/missing.py13
-rw-r--r--source4/selftest/config.mk2
-rw-r--r--source4/selftest/knownfail86
-rwxr-xr-xsource4/selftest/tests.sh14
-rw-r--r--source4/torture/basic/denytest.c123
-rw-r--r--source4/torture/raw/open.c27
-rw-r--r--source4/torture/smbtorture.c8
87 files changed, 4124 insertions, 303 deletions
diff --git a/lib/subunit/Apache-2.0 b/lib/subunit/Apache-2.0
new file mode 100644
index 0000000000..d645695673
--- /dev/null
+++ b/lib/subunit/Apache-2.0
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/lib/subunit/BSD b/lib/subunit/BSD
new file mode 100644
index 0000000000..fa130cd529
--- /dev/null
+++ b/lib/subunit/BSD
@@ -0,0 +1,26 @@
+Copyright (c) Robert Collins and Subunit contributors
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions
+are met:
+1. Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+2. Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+3. Neither the name of Robert Collins nor the names of Subunit contributors
+ may be used to endorse or promote products derived from this software
+ without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY ROBERT COLLINS AND SUBUNIT CONTRIBUTORS ``AS IS''
+AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE
+FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
+OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
+OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+SUCH DAMAGE.
diff --git a/lib/subunit/COPYING b/lib/subunit/COPYING
new file mode 100644
index 0000000000..3ba50f8e08
--- /dev/null
+++ b/lib/subunit/COPYING
@@ -0,0 +1,36 @@
+Subunit is licensed under two licenses, the Apache License, Version 2.0 or the
+3-clause BSD License. You may use this project under either of these licenses
+- choose the one that works best for you.
+
+We require contributions to be licensed under both licenses. The primary
+difference between them is that the Apache license takes care of potential
+issues with Patents and other intellectual property concerns. This is
+important to Subunit as Subunit wants to be license compatible in a very
+broad manner to allow reuse and incorporation into other projects.
+
+Generally every source file in Subunit needs a license grant under both these
+licenses. As the code is shipped as a single unit, a brief form is used:
+----
+Copyright (c) [yyyy][,yyyy]* [name or 'Subunit Contributors']
+
+Licensed under either the Apache License, Version 2.0 or the BSD 3-clause
+license at the users choice. A copy of both licenses are available in the
+project source as Apache-2.0 and BSD. You may not use this file except in
+compliance with one of these two licences.
+
+Unless required by applicable law or agreed to in writing, software
+distributed under these licenses is distributed on an "AS IS" BASIS, WITHOUT
+WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+license you chose for the specific language governing permissions and
+limitations under that license.
+----
+
+Code that has been incorporated into Subunit from other projects will
+naturally be under its own license, and will retain that license.
+
+A known list of such code is maintained here:
+* The python/iso8601 module by Michael Twomey, distributed under an MIT style
+ licence - see python/iso8601/LICENSE for details.
+* The runtests.py and python/subunit/tests/TestUtil.py module are GPL test
+ support modules. There are not installed by Subunit - they are only ever
+ used on the build machine. Copyright 2004 Canonical Limited.
diff --git a/lib/subunit/INSTALL b/lib/subunit/INSTALL
new file mode 100644
index 0000000000..79cf7c18d0
--- /dev/null
+++ b/lib/subunit/INSTALL
@@ -0,0 +1,25 @@
+To install subunit
+------------------
+
+Bootstrap::
+ autoreconf -vi
+Configure::
+ ./configure
+Install::
+ make install
+
+Dependencies
+------------
+
+* Python for the filters
+* 'testtools' (On Debian and Ubuntu systems the 'python-testtools' package,
+ the testtools package on pypi, or https://launchpad.net/testtools) for
+ the extended test API which permits attachments. Version 0.9.2 or newer is
+ required. Of particular note, http://testtools.python-hosting.com/ is not
+ the testtools you want.
+* A C compiler for the C bindings
+* Perl for the Perl tools (including subunit-diff)
+* Check to run the subunit test suite.
+* python-gtk2 if you wish to use subunit2gtk
+* python-junitxml if you wish to use subunit2junitxml
+* pkg-config for configure detection of supporting libraries.
diff --git a/lib/subunit/Makefile.am b/lib/subunit/Makefile.am
new file mode 100644
index 0000000000..716fa0fe21
--- /dev/null
+++ b/lib/subunit/Makefile.am
@@ -0,0 +1,136 @@
+EXTRA_DIST = \
+ .bzrignore \
+ Apache-2.0 \
+ BSD \
+ INSTALL \
+ Makefile.am \
+ NEWS \
+ README \
+ c++/README \
+ c/README \
+ c/check-subunit-0.9.3.patch \
+ c/check-subunit-0.9.5.patch \
+ c/check-subunit-0.9.6.patch \
+ perl/Makefile.PL.in \
+ perl/lib/Subunit.pm \
+ perl/lib/Subunit/Diff.pm \
+ perl/subunit-diff \
+ python/iso8601/LICENSE \
+ python/iso8601/README \
+ python/iso8601/README.subunit \
+ python/iso8601/setup.py \
+ python/iso8601/test_iso8601.py \
+ python/subunit/tests/TestUtil.py \
+ python/subunit/tests/__init__.py \
+ python/subunit/tests/sample-script.py \
+ python/subunit/tests/sample-two-script.py \
+ python/subunit/tests/test_chunked.py \
+ python/subunit/tests/test_details.py \
+ python/subunit/tests/test_progress_model.py \
+ python/subunit/tests/test_subunit_filter.py \
+ python/subunit/tests/test_subunit_stats.py \
+ python/subunit/tests/test_subunit_tags.py \
+ python/subunit/tests/test_tap2subunit.py \
+ python/subunit/tests/test_test_protocol.py \
+ python/subunit/tests/test_test_results.py \
+ runtests.py \
+ shell/README \
+ shell/share/subunit.sh \
+ shell/subunit-ui.patch \
+ shell/tests/test_function_output.sh \
+ shell/tests/test_source_library.sh
+
+ACLOCAL_AMFLAGS = -I m4
+
+include_subunitdir = $(includedir)/subunit
+
+dist_bin_SCRIPTS = \
+ filters/subunit-filter \
+ filters/subunit-ls \
+ filters/subunit-stats \
+ filters/subunit-tags \
+ filters/subunit2gtk \
+ filters/subunit2junitxml \
+ filters/subunit2pyunit \
+ filters/tap2subunit
+
+TESTS_ENVIRONMENT = SHELL_SHARE='$(top_srcdir)/shell/share/' PYTHONPATH='$(abs_top_srcdir)/python':${PYTHONPATH}
+TESTS = runtests.py $(check_PROGRAMS)
+
+## install libsubunit.pc
+pcdatadir = $(libdir)/pkgconfig
+pcdata_DATA = \
+ libsubunit.pc \
+ libcppunit_subunit.pc
+
+pkgpython_PYTHON = \
+ python/subunit/__init__.py \
+ python/subunit/chunked.py \
+ python/subunit/details.py \
+ python/subunit/iso8601.py \
+ python/subunit/progress_model.py \
+ python/subunit/run.py \
+ python/subunit/test_results.py
+
+lib_LTLIBRARIES = libsubunit.la
+lib_LTLIBRARIES += libcppunit_subunit.la
+
+include_subunit_HEADERS = \
+ c/include/subunit/child.h \
+ c++/SubunitTestProgressListener.h
+
+check_PROGRAMS = \
+ c/tests/test_child
+
+check_SCRIPTS = \
+ runtests.py
+
+libsubunit_la_SOURCES = \
+ c/lib/child.c \
+ c/include/subunit/child.h
+
+libcppunit_subunit_la_SOURCES = \
+ c++/SubunitTestProgressListener.cpp \
+ c++/SubunitTestProgressListener.h
+
+tests_LDADD = @CHECK_LIBS@ $(top_builddir)/libsubunit.la
+c_tests_test_child_CFLAGS = -I$(top_srcdir)/c/include $(SUBUNIT_CFLAGS) @CHECK_CFLAGS@
+c_tests_test_child_LDADD = $(tests_LDADD)
+
+
+all-local: perl/Makefile
+ $(MAKE) -C perl all
+
+check-local: perl/Makefile
+ $(MAKE) -C perl check
+
+clean-local:
+ find . -type f -name "*.pyc" -exec rm {} ';'
+ rm -f perl/Makefile
+
+# Remove perl dir for VPATH builds.
+distclean-local:
+ -rmdir perl > /dev/null
+ -rm perl/Makefile.PL > /dev/null
+
+install-exec-local: perl/Makefile
+ $(MAKE) -C perl install
+
+mostlyclean-local:
+ rm -rf perl/blib
+ rm -rf perl/pm_to_blib
+
+# 'uninstall' perl files during distcheck
+uninstall-local:
+ if [ "_inst" = `basename ${prefix}` ]; then \
+ $(MAKE) -C perl uninstall_distcheck; \
+ rm -f "$(DESTDIR)$(bindir)"/subunit-diff; \
+ fi
+
+# The default for MakeMaker; can be overridden by exporting
+INSTALLDIRS ?= site
+
+perl/Makefile: perl/Makefile.PL
+ mkdir -p perl
+ cd perl && perl Makefile.PL INSTALLDIRS=${INSTALLDIRS}
+ -rm perl/Makefile.old > /dev/null
diff --git a/lib/subunit/NEWS b/lib/subunit/NEWS
new file mode 100644
index 0000000000..7c933c8f6e
--- /dev/null
+++ b/lib/subunit/NEWS
@@ -0,0 +1,174 @@
+---------------------
+subunit release notes
+---------------------
+
+NEXT (In development)
+---------------------
+
+BUG FIXES
+~~~~~~~~~
+
+* Fix incorrect reference to subunit_test_failf in c/README.
+ (Brad Hards, #524341)
+
+* Fix incorrect ordering of tags method parameters in TestResultDecorator. This
+ is purely cosmetic as the parameters are passed down with no interpretation.
+ (Robert Collins, #537611)
+
+0.0.5
+-----
+
+BUG FIXES
+~~~~~~~~~
+
+* make check was failing if subunit wasn't installed due to a missing include
+ path for the test program test_child.
+
+* make distcheck was failing due to a missing $(top_srcdir) rune.
+
+IMPROVEMENTS
+~~~~~~~~~~~~
+
+* New filter `subunit-notify` that will show a notification window with test
+ statistics when the test run finishes.
+
+* subunit.run will now pipe its output to the command in the
+ SUBUNIT_FORMATTER environment variable, if set.
+
+0.0.4
+-----
+
+BUG FIXES
+~~~~~~~~~
+
+* subunit2junitxml -f required a value, this is now fixed and -f acts as a
+ boolean switch with no parameter.
+
+* Building with autoconf 2.65 is now supported.
+
+
+0.0.3
+-----
+
+ CHANGES:
+
+ * License change, by unanimous agreement of contributors to BSD/Apache
+ License Version 2.0. This makes Subunit compatible with more testing
+ frameworks.
+
+ IMPROVEMENTS:
+
+ * CPPUnit is now directly supported: subunit builds a cppunit listener
+ ``libcppunit-subunit``.
+
+ * In the python API ``addExpectedFailure`` and ``addUnexpectedSuccess``
+ from python 2.7/3.1 are now supported. ``addExpectedFailure`` is
+ serialised as ``xfail``, and ``addUnexpectedSuccess`` as ``success``.
+ The ``ProtocolTestCase`` parser now calls outcomes using an extended
+ API that permits attaching arbitrary MIME resources such as text files
+ log entries and so on. This extended API is being developed with the
+ Python testing community, and is in flux. ``TestResult`` objects that
+ do not support the API will be detected and transparently downgraded
+ back to the regular Python unittest API.
+
+ * INSTALLDIRS can be set to control the perl MakeMaker 'INSTALLDIRS'
+ viarable when installing.
+
+ * Multipart test outcomes are tentatively supported; the exact protocol
+ for them, both serialiser and object is not yet finalised. Testers and
+ early adopters are sought. As part of this and also in an attempt to
+ provider a more precise focus on the wire protocol and toolchain,
+ Subunit now depends on testtools (http://launchpad.net/testtools)
+ release 0.9.0 or newer.
+
+ * subunit2junitxml supports a new option, --forward which causes it
+ to forward the raw subunit stream in a similar manner to tee. This
+ is used with the -o option to both write a xml report and get some
+ other subunit filter to process the stream.
+
+ * The C library now has ``subunit_test_skip``.
+
+ BUG FIXES:
+
+ * Install progress_model.py correctly.
+
+ * Non-gcc builds will no longer try to use gcc specific flags.
+ (Thanks trondn-norbye)
+
+ API CHANGES:
+
+ INTERNALS:
+
+0.0.2
+-----
+
+ CHANGES:
+
+ IMPROVEMENTS:
+
+ * A number of filters now support ``--no-passthrough`` to cause all
+ non-subunit content to be discarded. This is useful when precise control
+ over what is output is required - such as with subunit2junitxml.
+
+ * A small perl parser is now included, and a new ``subunit-diff`` tool
+ using that is included. (Jelmer Vernooij)
+
+ * Subunit streams can now include optional, incremental lookahead
+ information about progress. This allows reporters to make estimates
+ about completion, when such information is available. See the README
+ under ``progress`` for more details.
+
+ * ``subunit-filter`` now supports regex filtering via ``--with`` and
+ ``without`` options. (Martin Pool)
+
+ * ``subunit2gtk`` has been added, a filter that shows a GTK summary of a
+ test stream.
+
+ * ``subunit2pyunit`` has a --progress flag which will cause the bzrlib
+ test reporter to be used, which has a textual progress bar. This requires
+ a recent bzrlib as a minor bugfix was required in bzrlib to support this.
+
+ * ``subunit2junitxml`` has been added. This filter converts a subunit
+ stream to a single JUnit style XML stream using the pyjunitxml
+ python library.
+
+ * The shell functions support skipping via ``subunit_skip_test`` now.
+
+ BUG FIXES:
+
+ * ``xfail`` outcomes are now passed to python TestResult's via
+ addExpectedFailure if it is present on the TestResult. Python 2.6 and
+ earlier which do not have this function will have ``xfail`` outcomes
+ passed through as success outcomes as earlier versions of subunit did.
+
+ API CHANGES:
+
+ * tags are no longer passed around in python via the ``TestCase.tags``
+ attribute. Instead ``TestResult.tags(new_tags, gone_tags)`` is called,
+ and like in the protocol, if called while a test is active only applies
+ to that test. (Robert Collins)
+
+ * ``TestResultFilter`` takes a new optional constructor parameter
+ ``filter_predicate``. (Martin Pool)
+
+ * When a progress: directive is encountered in a subunit stream, the
+ python bindings now call the ``progress(offset, whence)`` method on
+ ``TestResult``.
+
+ * When a time: directive is encountered in a subunit stream, the python
+ bindings now call the ``time(seconds)`` method on ``TestResult``.
+
+ INTERNALS:
+
+ * (python) Added ``subunit.test_results.AutoTimingTestResultDecorator``. Most
+ users of subunit will want to wrap their ``TestProtocolClient`` objects
+ in this decorator to get test timing data for performance analysis.
+
+ * (python) ExecTestCase supports passing arguments to test scripts.
+
+ * (python) New helper ``subunit.test_results.HookedTestResultDecorator``
+ which can be used to call some code on every event, without having to
+ implement all the event methods.
+
+ * (python) ``TestProtocolClient.time(a_datetime)`` has been added which
+ causes a timestamp to be output to the stream.
diff --git a/lib/subunit/README b/lib/subunit/README
index c657992c7a..9740d013a5 100644
--- a/lib/subunit/README
+++ b/lib/subunit/README
@@ -1,7 +1,212 @@
-This directory contains some helper code for the Subunit protocol. It is
-a partial import of the code from the upstream subunit project, which can
-be found at https://launchpad.net/subunit.
-To update the snapshot, run update.sh in this directory. When making changes
-here, please also submit them upstream - otherwise they'll be gone by the
-next time we import subunit.
+ subunit: A streaming protocol for test results
+ Copyright (C) 2005-2009 Robert Collins <robertc@robertcollins.net>
+
+ Licensed under either the Apache License, Version 2.0 or the BSD 3-clause
+ license at the users choice. A copy of both licenses are available in the
+ project source as Apache-2.0 and BSD. You may not use this file except in
+ compliance with one of these two licences.
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under these licenses is distributed on an "AS IS" BASIS, WITHOUT
+ WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ license you chose for the specific language governing permissions and
+ limitations under that license.
+
+ See the COPYING file for full details on the licensing of Subunit.
+
+ subunit reuses iso8601 by Michael Twomey, distributed under an MIT style
+ licence - see python/iso8601/LICENSE for details.
+
+Subunit
+-------
+
+Subunit is a streaming protocol for test results. The protocol is human
+readable and easily generated and parsed. By design all the components of
+the protocol conceptually fit into the xUnit TestCase->TestResult interaction.
+
+Subunit comes with command line filters to process a subunit stream and
+language bindings for python, C, C++ and shell. Bindings are easy to write
+for other languages.
+
+A number of useful things can be done easily with subunit:
+ * Test aggregation: Tests run separately can be combined and then
+ reported/displayed together. For instance, tests from different languages
+ can be shown as a seamless whole.
+ * Test archiving: A test run may be recorded and replayed later.
+ * Test isolation: Tests that may crash or otherwise interact badly with each
+ other can be run seperately and then aggregated, rather than interfering
+ with each other.
+ * Grid testing: subunit can act as the necessary serialisation and
+ deserialiation to get test runs on distributed machines to be reported in
+ real time.
+
+Subunit supplies the following filters:
+ * tap2subunit - convert perl's TestAnythingProtocol to subunit.
+ * subunit2pyunit - convert a subunit stream to pyunit test results.
+ * subunit2gtk - show a subunit stream in GTK.
+ * subunit2junitxml - convert a subunit stream to JUnit's XML format.
+ * subunit-diff - compare two subunit streams.
+ * subunit-filter - filter out tests from a subunit stream.
+ * subunit-ls - list info about tests present in a subunit stream.
+ * subunit-stats - generate a summary of a subunit stream.
+ * subunit-tags - add or remove tags from a stream.
+
+Integration with other tools
+----------------------------
+
+Subunit's language bindings act as integration with various test runners like
+'check', 'cppunit', Python's 'unittest'. Beyond that a small amount of glue
+(typically a few lines) will allow Subunit to be used in more sophisticated
+ways.
+
+Python
+======
+
+Subunit has excellent Python support: most of the filters and tools are written
+in python and there are facilities for using Subunit to increase test isolation
+seamlessly within a test suite.
+
+One simple way to run an existing python test suite and have it output subunit
+is the module ``subunit.run``::
+
+ $ python -m subunit.run mypackage.tests.test_suite
+
+For more information on the Python support Subunit offers , please see
+``pydoc subunit``, or the source in ``python/subunit/__init__.py``
+
+C
+=
+
+Subunit has C bindings to emit the protocol, and comes with a patch for 'check'
+which has been nominally accepted by the 'check' developers. See 'c/README' for
+more details.
+
+C++
+===
+
+The C library is includable and usable directly from C++. A TestListener for
+CPPUnit is included in the Subunit distribution. See 'c++/README' for details.
+
+shell
+=====
+
+Similar to C, the shell bindings consist of simple functions to output protocol
+elements, and a patch for adding subunit output to the 'ShUnit' shell test
+runner. See 'shell/README' for details.
+
+Filter recipes
+--------------
+
+To ignore some failing tests whose root cause is already known::
+
+ subunit-filter --without 'AttributeError.*flavor'
+
+
+The protocol
+------------
+
+Sample subunit wire contents
+----------------------------
+
+The following::
+ test: test foo works
+ success: test foo works.
+ test: tar a file.
+ failure: tar a file. [
+ ..
+ ].. space is eaten.
+ foo.c:34 WARNING foo is not defined.
+ ]
+ a writeln to stdout
+
+When run through subunit2pyunit::
+ .F
+ a writeln to stdout
+
+ ========================
+ FAILURE: tar a file.
+ -------------------
+ ..
+ ].. space is eaten.
+ foo.c:34 WARNING foo is not defined.
+
+
+Subunit protocol description
+============================
+
+This description is being ported to an EBNF style. Currently its only partly in
+that style, but should be fairly clear all the same. When in doubt, refer the
+source (and ideally help fix up the description!). Generally the protocol is
+line orientated and consists of either directives and their parameters, or
+when outside a DETAILS region unexpected lines which are not interpreted by
+the parser - they should be forwarded unaltered.
+
+test|testing|test:|testing: test label
+success|success:|successful|successful: test label
+success|success:|successful|successful: test label DETAILS
+failure: test label
+failure: test label DETAILS
+error: test label
+error: test label DETAILS
+skip[:] test label
+skip[:] test label DETAILS
+xfail[:] test label
+xfail[:] test label DETAILS
+progress: [+|-]X
+progress: push
+progress: pop
+tags: [-]TAG ...
+time: YYYY-MM-DD HH:MM:SSZ
+
+DETAILS ::= BRACKETED | MULTIPART
+BRACKETED ::= '[' CR lines ']' CR
+MULTIPART ::= '[ multipart' CR PART* ']' CR
+PART ::= PART_TYPE CR NAME CR PART_BYTES CR
+PART_TYPE ::= Content-Type: type/sub-type(;parameter=value,parameter=value)
+PART_BYTES ::= (DIGITS CR LF BYTE{DIGITS})* '0' CR LF
+
+unexpected output on stdout -> stdout.
+exit w/0 or last test completing -> error
+
+Tags given outside a test are applied to all following tests
+Tags given after a test: line and before the result line for the same test
+apply only to that test, and inherit the current global tags.
+A '-' before a tag is used to remove tags - e.g. to prevent a global tag
+applying to a single test, or to cancel a global tag.
+
+The progress directive is used to provide progress information about a stream
+so that stream consumer can provide completion estimates, progress bars and so
+on. Stream generators that know how many tests will be present in the stream
+should output "progress: COUNT". Stream filters that add tests should output
+"progress: +COUNT", and those that remove tests should output
+"progress: -COUNT". An absolute count should reset the progress indicators in
+use - it indicates that two separate streams from different generators have
+been trivially concatenated together, and there is no knowledge of how many
+more complete streams are incoming. Smart concatenation could scan each stream
+for their count and sum them, or alternatively translate absolute counts into
+relative counts inline. It is recommended that outputters avoid absolute counts
+unless necessary. The push and pop directives are used to provide local regions
+for progress reporting. This fits with hierarchically operating test
+environments - such as those that organise tests into suites - the top-most
+runner can report on the number of suites, and each suite surround its output
+with a (push, pop) pair. Interpreters should interpret a pop as also advancing
+the progress of the restored level by one step. Encountering progress
+directives between the start and end of a test pair indicates that a previous
+test was interrupted and did not cleanly terminate: it should be implicitly
+closed with an error (the same as when a stream ends with no closing test
+directive for the most recently started test).
+
+The time directive acts as a clock event - it sets the time for all future
+events. The value should be a valid ISO8601 time.
+
+The skip result is used to indicate a test that was found by the runner but not
+fully executed due to some policy or dependency issue. This is represented in
+python using the addSkip interface that testtools
+(https://edge.launchpad.net/testtools) defines. When communicating with a non
+skip aware test result, the test is reported as an error.
+The xfail result is used to indicate a test that was expected to fail failing
+in the expected manner. As this is a normal condition for such tests it is
+represented as a successful test in Python.
+In future, skip and xfail results will be represented semantically in Python,
+but some discussion is underway on the right way to do this.
diff --git a/lib/subunit/c++/README b/lib/subunit/c++/README
new file mode 100644
index 0000000000..7b8184400e
--- /dev/null
+++ b/lib/subunit/c++/README
@@ -0,0 +1,50 @@
+#
+# subunit C++ bindings.
+# Copyright (C) 2006 Robert Collins <robertc@robertcollins.net>
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+#
+
+Currently there are no native C++ bindings for subunit. However the C library
+can be used from C++ safely. A CPPUnit listener is built as part of Subunit to
+allow CPPUnit users to simply get Subunit output.
+
+To use the listener, use pkg-config (or your preferred replacement) to get the
+cflags and link settings from libcppunit_subunit.pc.
+
+In your test driver main, use SubunitTestProgressListener, as shown in this
+example main::
+
+ {
+ // Create the event manager and test controller
+ CPPUNIT_NS::TestResult controller;
+
+ // Add a listener that collects test result
+ // so we can get the overall status.
+ // note this isn't needed for subunit...
+ CPPUNIT_NS::TestResultCollector result;
+ controller.addListener( &result );
+
+ // Add a listener that print test activity in subunit format.
+ CPPUNIT_NS::SubunitTestProgressListener progress;
+ controller.addListener( &progress );
+
+ // Add the top suite to the test runner
+ CPPUNIT_NS::TestRunner runner;
+ runner.addTest( CPPUNIT_NS::TestFactoryRegistry::getRegistry().makeTest() );
+ runner.run( controller );
+
+ return result.wasSuccessful() ? 0 : 1;
+ }
diff --git a/lib/subunit/c++/SubunitTestProgressListener.cpp b/lib/subunit/c++/SubunitTestProgressListener.cpp
new file mode 100644
index 0000000000..76cd9e1194
--- /dev/null
+++ b/lib/subunit/c++/SubunitTestProgressListener.cpp
@@ -0,0 +1,63 @@
+/* Subunit test listener for cppunit (http://cppunit.sourceforge.net).
+ * Copyright (C) 2006 Robert Collins <robertc@robertcollins.net>
+ *
+ * Licensed under either the Apache License, Version 2.0 or the BSD 3-clause
+ * license at the users choice. A copy of both licenses are available in the
+ * project source as Apache-2.0 and BSD. You may not use this file except in
+ * compliance with one of these two licences.
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under these licenses is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the license you chose for the specific language governing permissions
+ * and limitations under that license.
+ */
+
+#include <cppunit/Exception.h>
+#include <cppunit/Test.h>
+#include <cppunit/TestFailure.h>
+#include <cppunit/TextOutputter.h>
+#include <iostream>
+
+// Have to be able to import the public interface without config.h.
+#include "SubunitTestProgressListener.h"
+#include "config.h"
+#include "subunit/child.h"
+
+
+CPPUNIT_NS_BEGIN
+
+
+void
+SubunitTestProgressListener::startTest( Test *test )
+{
+ subunit_test_start(test->getName().c_str());
+ last_test_failed = false;
+}
+
+void
+SubunitTestProgressListener::addFailure( const TestFailure &failure )
+{
+ std::ostringstream capture_stream;
+ TextOutputter outputter(NULL, capture_stream);
+ outputter.printFailureLocation(failure.sourceLine());
+ outputter.printFailureDetail(failure.thrownException());
+
+ if (failure.isError())
+ subunit_test_error(failure.failedTestName().c_str(),
+ capture_stream.str().c_str());
+ else
+ subunit_test_fail(failure.failedTestName().c_str(),
+ capture_stream.str().c_str());
+ last_test_failed = true;
+}
+
+void
+SubunitTestProgressListener::endTest( Test *test)
+{
+ if (!last_test_failed)
+ subunit_test_pass(test->getName().c_str());
+}
+
+
+CPPUNIT_NS_END
diff --git a/lib/subunit/c++/SubunitTestProgressListener.h b/lib/subunit/c++/SubunitTestProgressListener.h
new file mode 100644
index 0000000000..5206d833c7
--- /dev/null
+++ b/lib/subunit/c++/SubunitTestProgressListener.h
@@ -0,0 +1,56 @@
+/* Subunit test listener for cppunit (http://cppunit.sourceforge.net).
+ * Copyright (C) 2006 Robert Collins <robertc@robertcollins.net>
+ *
+ * Licensed under either the Apache License, Version 2.0 or the BSD 3-clause
+ * license at the users choice. A copy of both licenses are available in the
+ * project source as Apache-2.0 and BSD. You may not use this file except in
+ * compliance with one of these two licences.
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under these licenses is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the license you chose for the specific language governing permissions
+ * and limitations under that license.
+ */
+#ifndef CPPUNIT_SUBUNITTESTPROGRESSLISTENER_H
+#define CPPUNIT_SUBUNITTESTPROGRESSLISTENER_H
+
+#include <cppunit/TestListener.h>
+
+
+CPPUNIT_NS_BEGIN
+
+
+/*!
+ * \brief TestListener that outputs subunit
+ * (http://www.robertcollins.net/unittest/subunit) compatible output.
+ * \ingroup TrackingTestExecution
+ */
+class CPPUNIT_API SubunitTestProgressListener : public TestListener
+{
+public:
+
+ SubunitTestProgressListener() {}
+
+ void startTest( Test *test );
+
+ void addFailure( const TestFailure &failure );
+
+ void endTest( Test *test );
+
+private:
+ /// Prevents the use of the copy constructor.
+ SubunitTestProgressListener( const SubunitTestProgressListener &copy );
+
+ /// Prevents the use of the copy operator.
+ void operator =( const SubunitTestProgressListener &copy );
+
+private:
+ int last_test_failed;
+};
+
+
+CPPUNIT_NS_END
+
+#endif // CPPUNIT_SUBUNITTESTPROGRESSLISTENER_H
+
diff --git a/lib/subunit/c/README b/lib/subunit/c/README
new file mode 100644
index 0000000000..b62fd45395
--- /dev/null
+++ b/lib/subunit/c/README
@@ -0,0 +1,68 @@
+#
+# subunit C bindings.
+# Copyright (C) 2006 Robert Collins <robertc@robertcollins.net>
+#
+# Licensed under either the Apache License, Version 2.0 or the BSD 3-clause
+# license at the users choice. A copy of both licenses are available in the
+# project source as Apache-2.0 and BSD. You may not use this file except in
+# compliance with one of these two licences.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under these licenses is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# license you chose for the specific language governing permissions and
+# limitations under that license.
+
+This subtree contains an implementation of the subunit child protocol.
+Currently I have no plans to write a test runner in C, so I have not written
+an implementation of the parent protocol. [but will happily accept patches].
+This implementation is built using SCons and tested via 'check'.
+See the tests/ directory for the test programs.
+You can use `make check` or `scons check` to run the tests.
+
+The C protocol consists of four functions which you can use to output test
+metadata trivially. See lib/subunit_child.[ch] for details.
+
+However, this is not a test runner - subunit provides no support for [for
+instance] managing assertions, cleaning up on errors etc. You can look at
+'check' (http://check.sourceforge.net/) or
+'gunit' (https://garage.maemo.org/projects/gunit) for C unit test
+frameworks.
+There is a patch for 'check' (check-subunit-*.patch) in this source tree.
+Its also available as request ID #1470750 in the sourceforge request tracker
+http://sourceforge.net/tracker/index.php. The 'check' developers have indicated
+they will merge this during the current release cycle.
+
+If you are a test environment maintainer - either homegrown, or 'check' or
+'gunit' or some other, you will to know how the subunit calls should be used.
+Here is what a manually written test using the bindings might look like:
+
+
+void
+a_test(void) {
+ int result;
+ subunit_test_start("test name");
+ # determine if test passes or fails
+ result = SOME_VALUE;
+ if (!result) {
+ subunit_test_pass("test name");
+ } else {
+ subunit_test_fail("test name",
+ "Something went wrong running something:\n"
+ "exited with result: '%s'", result);
+ }
+}
+
+Which when run with a subunit test runner will generate something like:
+test name ... ok
+
+on success, and:
+
+test name ... FAIL
+
+======================================================================
+FAIL: test name
+----------------------------------------------------------------------
+RemoteError:
+Something went wrong running something:
+exited with result: '1'
diff --git a/lib/subunit/c/include/subunit/child.h b/lib/subunit/c/include/subunit/child.h
new file mode 100644
index 0000000000..0a4e60127b
--- /dev/null
+++ b/lib/subunit/c/include/subunit/child.h
@@ -0,0 +1,79 @@
+/**
+ *
+ * subunit C bindings.
+ * Copyright (C) 2006 Robert Collins <robertc@robertcollins.net>
+ *
+ * Licensed under either the Apache License, Version 2.0 or the BSD 3-clause
+ * license at the users choice. A copy of both licenses are available in the
+ * project source as Apache-2.0 and BSD. You may not use this file except in
+ * compliance with one of these two licences.
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under these licenses is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the license you chose for the specific language governing permissions
+ * and limitations under that license.
+ **/
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+/**
+ * subunit_test_start:
+ *
+ * Report that a test is starting.
+ * @name: test case name
+ */
+extern void subunit_test_start(char const * const name);
+
+
+/**
+ * subunit_test_pass:
+ *
+ * Report that a test has passed.
+ *
+ * @name: test case name
+ */
+extern void subunit_test_pass(char const * const name);
+
+
+/**
+ * subunit_test_fail:
+ *
+ * Report that a test has failed.
+ * @name: test case name
+ * @error: a string describing the error.
+ */
+extern void subunit_test_fail(char const * const name, char const * const error);
+
+
+/**
+ * subunit_test_error:
+ *
+ * Report that a test has errored. An error is an unintentional failure - i.e.
+ * a segfault rather than a failed assertion.
+ * @name: test case name
+ * @error: a string describing the error.
+ */
+extern void subunit_test_error(char const * const name,
+ char const * const error);
+
+
+/**
+ * subunit_test_skip:
+ *
+ * Report that a test has been skipped. An skip is a test that has not run to
+ * conclusion but hasn't given an error either - its result is unknown.
+ * @name: test case name
+ * @reason: a string describing the reason for the skip.
+ */
+extern void subunit_test_skip(char const * const name,
+ char const * const reason);
+
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/lib/subunit/c/lib/child.c b/lib/subunit/c/lib/child.c
new file mode 100644
index 0000000000..2b59747c0e
--- /dev/null
+++ b/lib/subunit/c/lib/child.c
@@ -0,0 +1,82 @@
+/**
+ *
+ * subunit C child-side bindings: report on tests being run.
+ * Copyright (C) 2006 Robert Collins <robertc@robertcollins.net>
+ *
+ * Licensed under either the Apache License, Version 2.0 or the BSD 3-clause
+ * license at the users choice. A copy of both licenses are available in the
+ * project source as Apache-2.0 and BSD. You may not use this file except in
+ * compliance with one of these two licences.
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under these licenses is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the license you chose for the specific language governing permissions
+ * and limitations under that license.
+ **/
+
+#include <stdio.h>
+#include <string.h>
+#include "subunit/child.h"
+
+/* Write details about a test event. It is the callers responsibility to ensure
+ * that details are only provided for events the protocol expects details on.
+ * @event: The event - e.g. 'skip'
+ * @name: The test name/id.
+ * @details: The details of the event, may be NULL if no details are present.
+ */
+static void
+subunit_send_event(char const * const event, char const * const name,
+ char const * const details)
+{
+ if (NULL == details) {
+ fprintf(stdout, "%s: %s\n", event, name);
+ } else {
+ fprintf(stdout, "%s: %s [\n", event, name);
+ fprintf(stdout, "%s", details);
+ if (details[strlen(details) - 1] != '\n')
+ fprintf(stdout, "\n");
+ fprintf(stdout, "]\n");
+ }
+ fflush(stdout);
+}
+
+/* these functions all flush to ensure that the test runner knows the action
+ * that has been taken even if the subsequent test etc takes a long time or
+ * never completes (i.e. a segfault).
+ */
+
+void
+subunit_test_start(char const * const name)
+{
+ subunit_send_event("test", name, NULL);
+}
+
+
+void
+subunit_test_pass(char const * const name)
+{
+ /* TODO: add success details as an option */
+ subunit_send_event("success", name, NULL);
+}
+
+
+void
+subunit_test_fail(char const * const name, char const * const error)
+{
+ subunit_send_event("failure", name, error);
+}
+
+
+void
+subunit_test_error(char const * const name, char const * const error)
+{
+ subunit_send_event("error", name, error);
+}
+
+
+void
+subunit_test_skip(char const * const name, char const * const reason)
+{
+ subunit_send_event("skip", name, reason);
+}
diff --git a/lib/subunit/c/tests/test_child.c b/lib/subunit/c/tests/test_child.c
new file mode 100644
index 0000000000..6399eeb645
--- /dev/null
+++ b/lib/subunit/c/tests/test_child.c
@@ -0,0 +1,192 @@
+/**
+ *
+ * subunit C bindings.
+ * Copyright (C) 2006 Robert Collins <robertc@robertcollins.net>
+ *
+ * Licensed under either the Apache License, Version 2.0 or the BSD 3-clause
+ * license at the users choice. A copy of both licenses are available in the
+ * project source as Apache-2.0 and BSD. You may not use this file except in
+ * compliance with one of these two licences.
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under these licenses is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the license you chose for the specific language governing permissions
+ * and limitations under that license.
+ **/
+
+#include <stdlib.h>
+#include <unistd.h>
+#include <string.h>
+#include <check.h>
+
+#include "subunit/child.h"
+
+/**
+ * Helper function to capture stdout, run some call, and check what
+ * was written.
+ * @expected the expected stdout content
+ * @function the function to call.
+ **/
+static void
+test_stdout_function(char const * expected,
+ void (*function)(void))
+{
+ /* test that the start function emits a correct test: line. */
+ int bytecount;
+ int old_stdout;
+ int new_stdout[2];
+ char buffer[100];
+ /* we need a socketpair to capture stdout in */
+ fail_if(pipe(new_stdout), "Failed to create a socketpair.");
+ /* backup stdout so we can replace it */
+ old_stdout = dup(1);
+ if (old_stdout == -1) {
+ close(new_stdout[0]);
+ close(new_stdout[1]);
+ fail("Failed to backup stdout before replacing.");
+ }
+ /* redirect stdout so we can analyse it */
+ if (dup2(new_stdout[1], 1) != 1) {
+ close(old_stdout);
+ close(new_stdout[0]);
+ close(new_stdout[1]);
+ fail("Failed to redirect stdout");
+ }
+ /* yes this can block. Its a test case with < 100 bytes of output.
+ * DEAL.
+ */
+ function();
+ /* restore stdout now */
+ if (dup2(old_stdout, 1) != 1) {
+ close(old_stdout);
+ close(new_stdout[0]);
+ close(new_stdout[1]);
+ fail("Failed to restore stdout");
+ }
+ /* and we dont need the write side any more */
+ if (close(new_stdout[1])) {
+ close(new_stdout[0]);
+ fail("Failed to close write side of socketpair.");
+ }
+ /* get the output */
+ bytecount = read(new_stdout[0], buffer, 100);
+ if (0 > bytecount) {
+ close(new_stdout[0]);
+ fail("Failed to read captured output.");
+ }
+ buffer[bytecount]='\0';
+ /* and we dont need the read side any more */
+ fail_if(close(new_stdout[0]), "Failed to close write side of socketpair.");
+ /* compare with expected outcome */
+ fail_if(strcmp(expected, buffer), "Did not get expected output [%s], got [%s]", expected, buffer);
+}
+
+
+static void
+call_test_start(void)
+{
+ subunit_test_start("test case");
+}
+
+
+START_TEST (test_start)
+{
+ test_stdout_function("test: test case\n", call_test_start);
+}
+END_TEST
+
+
+static void
+call_test_pass(void)
+{
+ subunit_test_pass("test case");
+}
+
+
+START_TEST (test_pass)
+{
+ test_stdout_function("success: test case\n", call_test_pass);
+}
+END_TEST
+
+
+static void
+call_test_fail(void)
+{
+ subunit_test_fail("test case", "Multiple lines\n of error\n");
+}
+
+
+START_TEST (test_fail)
+{
+ test_stdout_function("failure: test case [\n"
+ "Multiple lines\n"
+ " of error\n"
+ "]\n",
+ call_test_fail);
+}
+END_TEST
+
+
+static void
+call_test_error(void)
+{
+ subunit_test_error("test case", "Multiple lines\n of output\n");
+}
+
+
+START_TEST (test_error)
+{
+ test_stdout_function("error: test case [\n"
+ "Multiple lines\n"
+ " of output\n"
+ "]\n",
+ call_test_error);
+}
+END_TEST
+
+
+static void
+call_test_skip(void)
+{
+ subunit_test_skip("test case", "Multiple lines\n of output\n");
+}
+
+
+START_TEST (test_skip)
+{
+ test_stdout_function("skip: test case [\n"
+ "Multiple lines\n"
+ " of output\n"
+ "]\n",
+ call_test_skip);
+}
+END_TEST
+
+static Suite *
+child_suite(void)
+{
+ Suite *s = suite_create("subunit_child");
+ TCase *tc_core = tcase_create("Core");
+ suite_add_tcase (s, tc_core);
+ tcase_add_test (tc_core, test_start);
+ tcase_add_test (tc_core, test_pass);
+ tcase_add_test (tc_core, test_fail);
+ tcase_add_test (tc_core, test_error);
+ tcase_add_test (tc_core, test_skip);
+ return s;
+}
+
+
+int
+main(void)
+{
+ int nf;
+ Suite *s = child_suite();
+ SRunner *sr = srunner_create(s);
+ srunner_run_all(sr, CK_NORMAL);
+ nf = srunner_ntests_failed(sr);
+ srunner_free(sr);
+ return (nf == 0) ? EXIT_SUCCESS : EXIT_FAILURE;
+}
diff --git a/lib/subunit/configure.ac b/lib/subunit/configure.ac
new file mode 100644
index 0000000000..496aea5719
--- /dev/null
+++ b/lib/subunit/configure.ac
@@ -0,0 +1,75 @@
+m4_define([SUBUNIT_MAJOR_VERSION], [0])
+m4_define([SUBUNIT_MINOR_VERSION], [0])
+m4_define([SUBUNIT_MICRO_VERSION], [5])
+m4_define([SUBUNIT_VERSION],
+m4_defn([SUBUNIT_MAJOR_VERSION]).m4_defn([SUBUNIT_MINOR_VERSION]).m4_defn([SUBUNIT_MICRO_VERSION]))
+AC_PREREQ([2.59])
+AC_INIT([subunit], [SUBUNIT_VERSION], [subunit-dev@lists.launchpad.net])
+AC_CONFIG_SRCDIR([c/lib/child.c])
+AM_INIT_AUTOMAKE([-Wall -Werror foreign subdir-objects])
+AC_CONFIG_MACRO_DIR([m4])
+[SUBUNIT_MAJOR_VERSION]=SUBUNIT_MAJOR_VERSION
+[SUBUNIT_MINOR_VERSION]=SUBUNIT_MINOR_VERSION
+[SUBUNIT_MICRO_VERSION]=SUBUNIT_MICRO_VERSION
+[SUBUNIT_VERSION]=SUBUNIT_VERSION
+AC_SUBST([SUBUNIT_MAJOR_VERSION])
+AC_SUBST([SUBUNIT_MINOR_VERSION])
+AC_SUBST([SUBUNIT_MICRO_VERSION])
+AC_SUBST([SUBUNIT_VERSION])
+AC_USE_SYSTEM_EXTENSIONS
+AC_PROG_CC
+AC_PROG_CXX
+AM_PROG_CC_C_O
+AC_PROG_INSTALL
+AC_PROG_LN_S
+AC_PROG_LIBTOOL
+AM_PATH_PYTHON
+
+AS_IF([test "$GCC" = "yes"],
+ [
+ SUBUNIT_CFLAGS="-Wall -Werror -Wextra -Wstrict-prototypes "
+ SUBUNIT_CFLAGS="$SUBUNIT_CFLAGS -Wmissing-prototypes -Wwrite-strings "
+ SUBUNIT_CFLAGS="$SUBUNIT_CFLAGS -Wno-variadic-macros "
+ SUBUNIT_CXXFLAGS="-Wall -Werror -Wextra -Wwrite-strings -Wno-variadic-macros"
+ ])
+
+AM_CFLAGS="$SUBUNIT_CFLAGS -I\$(top_srcdir)/c/include"
+AM_CXXFLAGS="$SUBUNIT_CXXFLAGS -I\$(top_srcdir)/c/include"
+AC_SUBST(AM_CFLAGS)
+AC_SUBST(AM_CXXFLAGS)
+
+# Checks for libraries.
+
+# Checks for header files.
+AC_CHECK_HEADERS([stdlib.h])
+
+# Checks for typedefs, structures, and compiler characteristics.
+AC_C_CONST
+AC_TYPE_PID_T
+AC_TYPE_SIZE_T
+AC_HEADER_TIME
+AC_STRUCT_TM
+
+AC_CHECK_SIZEOF(int, 4)
+AC_CHECK_SIZEOF(short, 2)
+AC_CHECK_SIZEOF(long, 4)
+
+# Checks for library functions.
+AC_FUNC_MALLOC
+AC_FUNC_REALLOC
+
+# Easier memory management.
+# C unit testing.
+PKG_CHECK_MODULES([CHECK], [check >= 0.9.4])
+# C++ unit testing.
+PKG_CHECK_MODULES([CPPUNIT], [cppunit])
+
+# Output files
+AC_CONFIG_HEADERS([config.h])
+
+AC_CONFIG_FILES([libsubunit.pc
+ libcppunit_subunit.pc
+ Makefile
+ perl/Makefile.PL
+ ])
+AC_OUTPUT
diff --git a/lib/subunit/filters/subunit-filter b/lib/subunit/filters/subunit-filter
new file mode 100755
index 0000000000..c06a03a827
--- /dev/null
+++ b/lib/subunit/filters/subunit-filter
@@ -0,0 +1,105 @@
+#!/usr/bin/env python
+# subunit: extensions to python unittest to get test results from subprocesses.
+# Copyright (C) 2008 Robert Collins <robertc@robertcollins.net>
+# (C) 2009 Martin Pool
+#
+# Licensed under either the Apache License, Version 2.0 or the BSD 3-clause
+# license at the users choice. A copy of both licenses are available in the
+# project source as Apache-2.0 and BSD. You may not use this file except in
+# compliance with one of these two licences.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under these licenses is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# license you chose for the specific language governing permissions and
+# limitations under that license.
+#
+
+"""Filter a subunit stream to include/exclude tests.
+
+The default is to strip successful tests.
+
+Tests can be filtered by Python regular expressions with --with and --without,
+which match both the test name and the error text (if any). The result
+contains tests which match any of the --with expressions and none of the
+--without expressions. For case-insensitive matching prepend '(?i)'.
+Remember to quote shell metacharacters.
+"""
+
+from optparse import OptionParser
+import sys
+import unittest
+import re
+
+from subunit import (
+ DiscardStream,
+ ProtocolTestCase,
+ TestProtocolClient,
+ )
+from subunit.test_results import TestResultFilter
+
+parser = OptionParser(description=__doc__)
+parser.add_option("--error", action="store_false",
+ help="include errors", default=False, dest="error")
+parser.add_option("-e", "--no-error", action="store_true",
+ help="exclude errors", dest="error")
+parser.add_option("--failure", action="store_false",
+ help="include failures", default=False, dest="failure")
+parser.add_option("-f", "--no-failure", action="store_true",
+ help="include failures", dest="failure")
+parser.add_option("--no-passthrough", action="store_true",
+ help="Hide all non subunit input.", default=False, dest="no_passthrough")
+parser.add_option("-s", "--success", action="store_false",
+ help="include successes", dest="success")
+parser.add_option("--no-skip", action="store_true",
+ help="exclude skips", dest="skip")
+parser.add_option("--no-success", action="store_true",
+ help="exclude successes", default=True, dest="success")
+parser.add_option("-m", "--with", type=str,
+ help="regexp to include (case-sensitive by default)",
+ action="append", dest="with_regexps")
+parser.add_option("--without", type=str,
+ help="regexp to exclude (case-sensitive by default)",
+ action="append", dest="without_regexps")
+
+(options, args) = parser.parse_args()
+
+
+def _compile_re_from_list(l):
+ return re.compile("|".join(l), re.MULTILINE)
+
+
+def _make_regexp_filter(with_regexps, without_regexps):
+ """Make a callback that checks tests against regexps.
+
+ with_regexps and without_regexps are each either a list of regexp strings,
+ or None.
+ """
+ with_re = with_regexps and _compile_re_from_list(with_regexps)
+ without_re = without_regexps and _compile_re_from_list(without_regexps)
+
+ def check_regexps(test, outcome, err, details):
+ """Check if this test and error match the regexp filters."""
+ test_str = str(test) + outcome + str(err) + str(details)
+ if with_re and not with_re.search(test_str):
+ return False
+ if without_re and without_re.search(test_str):
+ return False
+ return True
+ return check_regexps
+
+
+regexp_filter = _make_regexp_filter(options.with_regexps,
+ options.without_regexps)
+result = TestProtocolClient(sys.stdout)
+result = TestResultFilter(result, filter_error=options.error,
+ filter_failure=options.failure, filter_success=options.success,
+ filter_skip=options.skip,
+ filter_predicate=regexp_filter)
+if options.no_passthrough:
+ passthrough_stream = DiscardStream()
+else:
+ passthrough_stream = None
+test = ProtocolTestCase(sys.stdin, passthrough=passthrough_stream)
+test.run(result)
+sys.exit(0)
diff --git a/lib/subunit/filters/subunit-ls b/lib/subunit/filters/subunit-ls
new file mode 100755
index 0000000000..15ec4b01e6
--- /dev/null
+++ b/lib/subunit/filters/subunit-ls
@@ -0,0 +1,93 @@
+#!/usr/bin/env python
+# subunit: extensions to python unittest to get test results from subprocesses.
+# Copyright (C) 2008 Robert Collins <robertc@robertcollins.net>
+#
+# Licensed under either the Apache License, Version 2.0 or the BSD 3-clause
+# license at the users choice. A copy of both licenses are available in the
+# project source as Apache-2.0 and BSD. You may not use this file except in
+# compliance with one of these two licences.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under these licenses is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# license you chose for the specific language governing permissions and
+# limitations under that license.
+#
+
+"""List tests in a subunit stream."""
+
+from optparse import OptionParser
+import sys
+import unittest
+
+from subunit import DiscardStream, ProtocolTestCase
+
+class TestIdPrintingResult(unittest.TestResult):
+
+ def __init__(self, stream, show_times=False):
+ """Create a FilterResult object outputting to stream."""
+ unittest.TestResult.__init__(self)
+ self._stream = stream
+ self.failed_tests = 0
+ self.__time = 0
+ self.show_times = show_times
+ self._test = None
+ self._test_duration = 0
+
+ def addError(self, test, err):
+ self.failed_tests += 1
+ self._test = test
+
+ def addFailure(self, test, err):
+ self.failed_tests += 1
+ self._test = test
+
+ def addSuccess(self, test):
+ self._test = test
+
+ def reportTest(self, test, duration):
+ if self.show_times:
+ seconds = duration.seconds
+ seconds += duration.days * 3600 * 24
+ seconds += duration.microseconds / 1000000.0
+ self._stream.write(test.id() + ' %0.3f\n' % seconds)
+ else:
+ self._stream.write(test.id() + '\n')
+
+ def startTest(self, test):
+ self._start_time = self._time()
+
+ def stopTest(self, test):
+ test_duration = self._time() - self._start_time
+ self.reportTest(self._test, test_duration)
+
+ def time(self, time):
+ self.__time = time
+
+ def _time(self):
+ return self.__time
+
+ def wasSuccessful(self):
+ "Tells whether or not this result was a success"
+ return self.failed_tests == 0
+
+
+parser = OptionParser(description=__doc__)
+parser.add_option("--times", action="store_true",
+ help="list the time each test took (requires a timestamped stream)",
+ default=False)
+parser.add_option("--no-passthrough", action="store_true",
+ help="Hide all non subunit input.", default=False, dest="no_passthrough")
+(options, args) = parser.parse_args()
+result = TestIdPrintingResult(sys.stdout, options.times)
+if options.no_passthrough:
+ passthrough_stream = DiscardStream()
+else:
+ passthrough_stream = None
+test = ProtocolTestCase(sys.stdin, passthrough=passthrough_stream)
+test.run(result)
+if result.wasSuccessful():
+ exit_code = 0
+else:
+ exit_code = 1
+sys.exit(exit_code)
diff --git a/lib/subunit/filters/subunit-notify b/lib/subunit/filters/subunit-notify
new file mode 100755
index 0000000000..758e7fc8ff
--- /dev/null
+++ b/lib/subunit/filters/subunit-notify
@@ -0,0 +1,65 @@
+#!/usr/bin/env python
+# subunit: extensions to python unittest to get test results from subprocesses.
+# Copyright (C) 2010 Jelmer Vernooij <jelmer@samba.org>
+#
+# Licensed under either the Apache License, Version 2.0 or the BSD 3-clause
+# license at the users choice. A copy of both licenses are available in the
+# project source as Apache-2.0 and BSD. You may not use this file except in
+# compliance with one of these two licences.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under these licenses is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# license you chose for the specific language governing permissions and
+# limitations under that license.
+#
+
+"""Notify the user of a finished test run."""
+
+from optparse import OptionParser
+import sys
+
+import pygtk
+pygtk.require('2.0')
+import pynotify
+
+from subunit import DiscardStream, ProtocolTestCase, TestResultStats
+
+if not pynotify.init("Subunit-notify"):
+ sys.exit(1)
+
+parser = OptionParser(description=__doc__)
+parser.add_option("--no-passthrough", action="store_true",
+ help="Hide all non subunit input.", default=False, dest="no_passthrough")
+parser.add_option("-f", "--forward", action="store_true", default=False,
+ help="Forward subunit stream on stdout.")
+(options, args) = parser.parse_args()
+result = TestResultStats(sys.stdout)
+if options.no_passthrough:
+ passthrough_stream = DiscardStream()
+else:
+ passthrough_stream = None
+if options.forward:
+ forward_stream = sys.stdout
+else:
+ forward_stream = None
+test = ProtocolTestCase(sys.stdin, passthrough=passthrough_stream,
+ forward=forward_stream)
+test.run(result)
+if result.failed_tests > 0:
+ summary = "Test run failed"
+else:
+ summary = "Test run successful"
+body = "Total tests: %d; Passed: %d; Failed: %d" % (
+ result.total_tests,
+ result.passed_tests,
+ result.failed_tests,
+ )
+nw = pynotify.Notification(summary, body)
+nw.show()
+
+if result.wasSuccessful():
+ exit_code = 0
+else:
+ exit_code = 1
+sys.exit(exit_code)
diff --git a/lib/subunit/filters/subunit-stats b/lib/subunit/filters/subunit-stats
new file mode 100755
index 0000000000..4734988fc2
--- /dev/null
+++ b/lib/subunit/filters/subunit-stats
@@ -0,0 +1,41 @@
+#!/usr/bin/env python
+# subunit: extensions to python unittest to get test results from subprocesses.
+# Copyright (C) 2009 Robert Collins <robertc@robertcollins.net>
+#
+# Licensed under either the Apache License, Version 2.0 or the BSD 3-clause
+# license at the users choice. A copy of both licenses are available in the
+# project source as Apache-2.0 and BSD. You may not use this file except in
+# compliance with one of these two licences.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under these licenses is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# license you chose for the specific language governing permissions and
+# limitations under that license.
+#
+
+"""Filter a subunit stream to get aggregate statistics."""
+
+from optparse import OptionParser
+import sys
+import unittest
+
+from subunit import DiscardStream, ProtocolTestCase, TestResultStats
+
+parser = OptionParser(description=__doc__)
+parser.add_option("--no-passthrough", action="store_true",
+ help="Hide all non subunit input.", default=False, dest="no_passthrough")
+(options, args) = parser.parse_args()
+result = TestResultStats(sys.stdout)
+if options.no_passthrough:
+ passthrough_stream = DiscardStream()
+else:
+ passthrough_stream = None
+test = ProtocolTestCase(sys.stdin, passthrough=passthrough_stream)
+test.run(result)
+result.formatStats()
+if result.wasSuccessful():
+ exit_code = 0
+else:
+ exit_code = 1
+sys.exit(exit_code)
diff --git a/lib/subunit/filters/subunit-tags b/lib/subunit/filters/subunit-tags
new file mode 100755
index 0000000000..edbbfce480
--- /dev/null
+++ b/lib/subunit/filters/subunit-tags
@@ -0,0 +1,26 @@
+#!/usr/bin/env python
+# subunit: extensions to python unittest to get test results from subprocesses.
+# Copyright (C) 2009 Robert Collins <robertc@robertcollins.net>
+#
+# Licensed under either the Apache License, Version 2.0 or the BSD 3-clause
+# license at the users choice. A copy of both licenses are available in the
+# project source as Apache-2.0 and BSD. You may not use this file except in
+# compliance with one of these two licences.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under these licenses is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# license you chose for the specific language governing permissions and
+# limitations under that license.
+#
+
+"""A filter to change tags on a subunit stream.
+
+subunit-tags foo -> adds foo
+subunit-tags foo -bar -> adds foo and removes bar
+"""
+
+import sys
+
+from subunit import tag_stream
+sys.exit(tag_stream(sys.stdin, sys.stdout, sys.argv[1:]))
diff --git a/lib/subunit/filters/subunit2gtk b/lib/subunit/filters/subunit2gtk
new file mode 100755
index 0000000000..c2cb2de3ce
--- /dev/null
+++ b/lib/subunit/filters/subunit2gtk
@@ -0,0 +1,259 @@
+#!/usr/bin/env python
+# subunit: extensions to python unittest to get test results from subprocesses.
+# Copyright (C) 2009 Robert Collins <robertc@robertcollins.net>
+#
+# Licensed under either the Apache License, Version 2.0 or the BSD 3-clause
+# license at the users choice. A copy of both licenses are available in the
+# project source as Apache-2.0 and BSD. You may not use this file except in
+# compliance with one of these two licences.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under these licenses is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# license you chose for the specific language governing permissions and
+# limitations under that license.
+#
+
+### The GTK progress bar __init__ function is derived from the pygtk tutorial:
+# The PyGTK Tutorial is Copyright (C) 2001-2005 John Finlay.
+#
+# The GTK Tutorial is Copyright (C) 1997 Ian Main.
+#
+# Copyright (C) 1998-1999 Tony Gale.
+#
+# Permission is granted to make and distribute verbatim copies of this manual
+# provided the copyright notice and this permission notice are preserved on all
+# copies.
+#
+# Permission is granted to copy and distribute modified versions of this
+# document under the conditions for verbatim copying, provided that this
+# copyright notice is included exactly as in the original, and that the entire
+# resulting derived work is distributed under the terms of a permission notice
+# identical to this one.
+#
+# Permission is granted to copy and distribute translations of this document
+# into another language, under the above conditions for modified versions.
+#
+# If you are intending to incorporate this document into a published work,
+# please contact the maintainer, and we will make an effort to ensure that you
+# have the most up to date information available.
+#
+# There is no guarantee that this document lives up to its intended purpose.
+# This is simply provided as a free resource. As such, the authors and
+# maintainers of the information provided within can not make any guarantee
+# that the information is even accurate.
+
+"""Display a subunit stream in a gtk progress window."""
+
+import sys
+import unittest
+
+import pygtk
+pygtk.require('2.0')
+import gtk, gtk.gdk, gobject
+
+from subunit import (
+ PROGRESS_POP,
+ PROGRESS_PUSH,
+ PROGRESS_SET,
+ TestProtocolServer,
+ )
+from subunit.progress_model import ProgressModel
+
+
+class GTKTestResult(unittest.TestResult):
+
+ def __init__(self):
+ super(GTKTestResult, self).__init__()
+ # Instance variables (in addition to TestResult)
+ self.window = None
+ self.run_label = None
+ self.ok_label = None
+ self.not_ok_label = None
+ self.total_tests = None
+
+ self.window = gtk.Window(gtk.WINDOW_TOPLEVEL)
+ self.window.set_resizable(True)
+
+ self.window.connect("destroy", gtk.main_quit)
+ self.window.set_title("Tests...")
+ self.window.set_border_width(0)
+
+ vbox = gtk.VBox(False, 5)
+ vbox.set_border_width(10)
+ self.window.add(vbox)
+ vbox.show()
+
+ # Create a centering alignment object
+ align = gtk.Alignment(0.5, 0.5, 0, 0)
+ vbox.pack_start(align, False, False, 5)
+ align.show()
+
+ # Create the ProgressBar
+ self.pbar = gtk.ProgressBar()
+ align.add(self.pbar)
+ self.pbar.set_text("Running")
+ self.pbar.show()
+ self.progress_model = ProgressModel()
+
+ separator = gtk.HSeparator()
+ vbox.pack_start(separator, False, False, 0)
+ separator.show()
+
+ # rows, columns, homogeneous
+ table = gtk.Table(2, 3, False)
+ vbox.pack_start(table, False, True, 0)
+ table.show()
+ # Show summary details about the run. Could use an expander.
+ label = gtk.Label("Run:")
+ table.attach(label, 0, 1, 1, 2, gtk.EXPAND | gtk.FILL,
+ gtk.EXPAND | gtk.FILL, 5, 5)
+ label.show()
+ self.run_label = gtk.Label("N/A")
+ table.attach(self.run_label, 1, 2, 1, 2, gtk.EXPAND | gtk.FILL,
+ gtk.EXPAND | gtk.FILL, 5, 5)
+ self.run_label.show()
+
+ label = gtk.Label("OK:")
+ table.attach(label, 0, 1, 2, 3, gtk.EXPAND | gtk.FILL,
+ gtk.EXPAND | gtk.FILL, 5, 5)
+ label.show()
+ self.ok_label = gtk.Label("N/A")
+ table.attach(self.ok_label, 1, 2, 2, 3, gtk.EXPAND | gtk.FILL,
+ gtk.EXPAND | gtk.FILL, 5, 5)
+ self.ok_label.show()
+
+ label = gtk.Label("Not OK:")
+ table.attach(label, 0, 1, 3, 4, gtk.EXPAND | gtk.FILL,
+ gtk.EXPAND | gtk.FILL, 5, 5)
+ label.show()
+ self.not_ok_label = gtk.Label("N/A")
+ table.attach(self.not_ok_label, 1, 2, 3, 4, gtk.EXPAND | gtk.FILL,
+ gtk.EXPAND | gtk.FILL, 5, 5)
+ self.not_ok_label.show()
+
+ self.window.show()
+ # For the demo.
+ self.window.set_keep_above(True)
+ self.window.present()
+
+ def stopTest(self, test):
+ super(GTKTestResult, self).stopTest(test)
+ self.progress_model.advance()
+ if self.progress_model.width() == 0:
+ self.pbar.pulse()
+ else:
+ pos = self.progress_model.pos()
+ width = self.progress_model.width()
+ percentage = (pos / float(width))
+ self.pbar.set_fraction(percentage)
+
+ def stopTestRun(self):
+ try:
+ super(GTKTestResult, self).stopTestRun()
+ except AttributeError:
+ pass
+ self.pbar.set_text('Finished')
+
+ def addError(self, test, err):
+ super(GTKTestResult, self).addError(test, err)
+ self.update_counts()
+
+ def addFailure(self, test, err):
+ super(GTKTestResult, self).addFailure(test, err)
+ self.update_counts()
+
+ def addSuccess(self, test):
+ super(GTKTestResult, self).addSuccess(test)
+ self.update_counts()
+
+ def addSkip(self, test, reason):
+ # addSkip is new in Python 2.7/3.1
+ addSkip = getattr(super(GTKTestResult, self), 'addSkip', None)
+ if callable(addSkip):
+ addSkip(test, reason)
+ self.update_counts()
+
+ def addExpectedFailure(self, test, err):
+ # addExpectedFailure is new in Python 2.7/3.1
+ addExpectedFailure = getattr(super(GTKTestResult, self),
+ 'addExpectedFailure', None)
+ if callable(addExpectedFailure):
+ addExpectedFailure(test, err)
+ self.update_counts()
+
+ def addUnexpectedSuccess(self, test):
+ # addUnexpectedSuccess is new in Python 2.7/3.1
+ addUnexpectedSuccess = getattr(super(GTKTestResult, self),
+ 'addUnexpectedSuccess', None)
+ if callable(addUnexpectedSuccess):
+ addUnexpectedSuccess(test)
+ self.update_counts()
+
+ def progress(self, offset, whence):
+ if whence == PROGRESS_PUSH:
+ self.progress_model.push()
+ elif whence == PROGRESS_POP:
+ self.progress_model.pop()
+ elif whence == PROGRESS_SET:
+ self.total_tests = offset
+ self.progress_model.set_width(offset)
+ else:
+ self.total_tests += offset
+ self.progress_model.adjust_width(offset)
+
+ def time(self, a_datetime):
+ # We don't try to estimate completion yet.
+ pass
+
+ def update_counts(self):
+ self.run_label.set_text(str(self.testsRun))
+ bad = len(self.failures + self.errors)
+ self.ok_label.set_text(str(self.testsRun - bad))
+ self.not_ok_label.set_text(str(bad))
+
+
+class GIOProtocolTestCase(object):
+
+ def __init__(self, stream, result, on_finish):
+ self.stream = stream
+ self.schedule_read()
+ self.hup_id = gobject.io_add_watch(stream, gobject.IO_HUP, self.hup)
+ self.protocol = TestProtocolServer(result)
+ self.on_finish = on_finish
+
+ def read(self, source, condition, all=False):
+ #NB: \o/ actually blocks
+ line = source.readline()
+ if not line:
+ self.protocol.lostConnection()
+ self.on_finish()
+ return False
+ self.protocol.lineReceived(line)
+ # schedule more IO shortly - if we say we're willing to do it
+ # immediately we starve things.
+ if not all:
+ source_id = gobject.timeout_add(1, self.schedule_read)
+ return False
+ else:
+ return True
+
+ def schedule_read(self):
+ self.read_id = gobject.io_add_watch(self.stream, gobject.IO_IN, self.read)
+
+ def hup(self, source, condition):
+ while self.read(source, condition, all=True): pass
+ self.protocol.lostConnection()
+ gobject.source_remove(self.read_id)
+ self.on_finish()
+ return False
+
+
+result = GTKTestResult()
+test = GIOProtocolTestCase(sys.stdin, result, result.stopTestRun)
+gtk.main()
+if result.wasSuccessful():
+ exit_code = 0
+else:
+ exit_code = 1
+sys.exit(exit_code)
diff --git a/lib/subunit/filters/subunit2junitxml b/lib/subunit/filters/subunit2junitxml
new file mode 100755
index 0000000000..bea795d2bd
--- /dev/null
+++ b/lib/subunit/filters/subunit2junitxml
@@ -0,0 +1,65 @@
+#!/usr/bin/env python
+# subunit: extensions to python unittest to get test results from subprocesses.
+# Copyright (C) 2009 Robert Collins <robertc@robertcollins.net>
+#
+# Licensed under either the Apache License, Version 2.0 or the BSD 3-clause
+# license at the users choice. A copy of both licenses are available in the
+# project source as Apache-2.0 and BSD. You may not use this file except in
+# compliance with one of these two licences.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under these licenses is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# license you chose for the specific language governing permissions and
+# limitations under that license.
+#
+
+"""Filter a subunit stream to get aggregate statistics."""
+
+from optparse import OptionParser
+import sys
+import unittest
+
+from subunit import DiscardStream, ProtocolTestCase
+try:
+ from junitxml import JUnitXmlResult
+except ImportError:
+ sys.stderr.write("python-junitxml (https://launchpad.net/pyjunitxml or "
+ "http://pypi.python.org/pypi/junitxml) is required for this filter.")
+ raise
+
+parser = OptionParser(description=__doc__)
+parser.add_option("--no-passthrough", action="store_true",
+ help="Hide all non subunit input.", default=False, dest="no_passthrough")
+parser.add_option("-o", "--output-to",
+ help="Output the XML to this path rather than stdout.")
+parser.add_option("-f", "--forward", action="store_true", default=False,
+ help="Forward subunit stream on stdout.")
+(options, args) = parser.parse_args()
+if options.output_to is None:
+ output_to = sys.stdout
+else:
+ output_to = file(options.output_to, 'wb')
+try:
+ result = JUnitXmlResult(output_to)
+ if options.no_passthrough:
+ passthrough_stream = DiscardStream()
+ else:
+ passthrough_stream = None
+ if options.forward:
+ forward_stream = sys.stdout
+ else:
+ forward_stream = None
+ test = ProtocolTestCase(sys.stdin, passthrough=passthrough_stream,
+ forward=forward_stream)
+ result.startTestRun()
+ test.run(result)
+ result.stopTestRun()
+finally:
+ if options.output_to is not None:
+ output_to.close()
+if result.wasSuccessful():
+ exit_code = 0
+else:
+ exit_code = 1
+sys.exit(exit_code)
diff --git a/lib/subunit/filters/subunit2pyunit b/lib/subunit/filters/subunit2pyunit
new file mode 100755
index 0000000000..83a23d14d1
--- /dev/null
+++ b/lib/subunit/filters/subunit2pyunit
@@ -0,0 +1,48 @@
+#!/usr/bin/env python
+# subunit: extensions to python unittest to get test results from subprocesses.
+# Copyright (C) 2009 Robert Collins <robertc@robertcollins.net>
+#
+# Licensed under either the Apache License, Version 2.0 or the BSD 3-clause
+# license at the users choice. A copy of both licenses are available in the
+# project source as Apache-2.0 and BSD. You may not use this file except in
+# compliance with one of these two licences.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under these licenses is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# license you chose for the specific language governing permissions and
+# limitations under that license.
+#
+
+"""Display a subunit stream through python's unittest test runner."""
+
+from optparse import OptionParser
+import sys
+import unittest
+
+from subunit import DiscardStream, ProtocolTestCase, TestProtocolServer
+
+parser = OptionParser(description=__doc__)
+parser.add_option("--no-passthrough", action="store_true",
+ help="Hide all non subunit input.", default=False, dest="no_passthrough")
+parser.add_option("--progress", action="store_true",
+ help="Use bzrlib's test reporter (requires bzrlib)",
+ default=False)
+(options, args) = parser.parse_args()
+if options.no_passthrough:
+ passthrough_stream = DiscardStream()
+else:
+ passthrough_stream = None
+test = ProtocolTestCase(sys.stdin, passthrough=passthrough_stream)
+if options.progress:
+ from bzrlib.tests import TextTestRunner
+ from bzrlib import ui
+ ui.ui_factory = ui.make_ui_for_terminal(None, sys.stdout, sys.stderr)
+ runner = TextTestRunner()
+else:
+ runner = unittest.TextTestRunner(verbosity=2)
+if runner.run(test).wasSuccessful():
+ exit_code = 0
+else:
+ exit_code = 1
+sys.exit(exit_code)
diff --git a/lib/subunit/libcppunit_subunit.pc.in b/lib/subunit/libcppunit_subunit.pc.in
new file mode 100644
index 0000000000..98982c78ae
--- /dev/null
+++ b/lib/subunit/libcppunit_subunit.pc.in
@@ -0,0 +1,11 @@
+prefix=@prefix@
+exec_prefix=@exec_prefix@
+libdir=@libdir@
+includedir=@includedir@
+
+Name: cppunit subunit listener
+Description: Subunit output listener for the CPPUnit test library.
+URL: http://launchpad.net/subunit
+Version: @VERSION@
+Libs: -L${libdir} -lsubunit
+Cflags: -I${includedir}
diff --git a/lib/subunit/libsubunit.pc.in b/lib/subunit/libsubunit.pc.in
new file mode 100644
index 0000000000..67564148e8
--- /dev/null
+++ b/lib/subunit/libsubunit.pc.in
@@ -0,0 +1,11 @@
+prefix=@prefix@
+exec_prefix=@exec_prefix@
+libdir=@libdir@
+includedir=@includedir@
+
+Name: subunit
+Description: Subunit test protocol library.
+URL: http://launchpad.net/subunit
+Version: @VERSION@
+Libs: -L${libdir} -lsubunit
+Cflags: -I${includedir}
diff --git a/lib/subunit/perl/Makefile.PL.in b/lib/subunit/perl/Makefile.PL.in
new file mode 100755
index 0000000000..26e1c181f0
--- /dev/null
+++ b/lib/subunit/perl/Makefile.PL.in
@@ -0,0 +1,20 @@
+use ExtUtils::MakeMaker;
+WriteMakefile(
+ 'INSTALL_BASE' => '@prefix@',
+ 'NAME' => 'Subunit',
+ 'VERSION' => '@SUBUNIT_VERSION@',
+ 'test' => { 'TESTS' => 'tests/*.pl' },
+ 'PMLIBDIRS' => [ 'lib' ],
+ 'EXE_FILES' => [ '@abs_srcdir@/subunit-diff' ],
+);
+sub MY::postamble {
+<<'EOT';
+check: # test
+
+uninstall_distcheck:
+ rm -fr $(DESTINSTALLARCHLIB)
+
+VPATH = @srcdir@
+.PHONY: uninstall_distcheck
+EOT
+}
diff --git a/lib/subunit/perl/lib/Subunit.pm b/lib/subunit/perl/lib/Subunit.pm
new file mode 100644
index 0000000000..05206748e2
--- /dev/null
+++ b/lib/subunit/perl/lib/Subunit.pm
@@ -0,0 +1,162 @@
+# Perl module for parsing and generating the Subunit protocol
+# Copyright (C) 2008-2009 Jelmer Vernooij <jelmer@samba.org>
+#
+# Licensed under either the Apache License, Version 2.0 or the BSD 3-clause
+# license at the users choice. A copy of both licenses are available in the
+# project source as Apache-2.0 and BSD. You may not use this file except in
+# compliance with one of these two licences.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under these licenses is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# license you chose for the specific language governing permissions and
+# limitations under that license.
+
+package Subunit;
+use POSIX;
+
+require Exporter;
+@ISA = qw(Exporter);
+@EXPORT_OK = qw(parse_results $VERSION);
+
+use vars qw ( $VERSION );
+
+$VERSION = '0.0.2';
+
+use strict;
+
+sub parse_results($$$)
+{
+ my ($msg_ops, $statistics, $fh) = @_;
+ my $expected_fail = 0;
+ my $unexpected_fail = 0;
+ my $unexpected_err = 0;
+ my $open_tests = [];
+
+ while(<$fh>) {
+ if (/^test: (.+)\n/) {
+ $msg_ops->control_msg($_);
+ $msg_ops->start_test($1);
+ push (@$open_tests, $1);
+ } elsif (/^time: (\d+)-(\d+)-(\d+) (\d+):(\d+):(\d+)Z\n/) {
+ $msg_ops->report_time(mktime($6, $5, $4, $3, $2, $1-1900));
+ } elsif (/^(success|successful|failure|fail|skip|knownfail|error|xfail): (.*?)( \[)?([ \t]*)\n/) {
+ $msg_ops->control_msg($_);
+ my $result = $1;
+ my $testname = $2;
+ my $reason = undef;
+ if ($3) {
+ $reason = "";
+ # reason may be specified in next lines
+ my $terminated = 0;
+ while(<$fh>) {
+ $msg_ops->control_msg($_);
+ if ($_ eq "]\n") { $terminated = 1; last; } else { $reason .= $_; }
+ }
+
+ unless ($terminated) {
+ $statistics->{TESTS_ERROR}++;
+ $msg_ops->end_test($testname, "error", 1, "reason ($result) interrupted");
+ return 1;
+ }
+ }
+ if ($result eq "success" or $result eq "successful") {
+ pop(@$open_tests); #FIXME: Check that popped value == $testname
+ $statistics->{TESTS_EXPECTED_OK}++;
+ $msg_ops->end_test($testname, $result, 0, $reason);
+ } elsif ($result eq "xfail" or $result eq "knownfail") {
+ pop(@$open_tests); #FIXME: Check that popped value == $testname
+ $statistics->{TESTS_EXPECTED_FAIL}++;
+ $msg_ops->end_test($testname, $result, 0, $reason);
+ $expected_fail++;
+ } elsif ($result eq "failure" or $result eq "fail") {
+ pop(@$open_tests); #FIXME: Check that popped value == $testname
+ $statistics->{TESTS_UNEXPECTED_FAIL}++;
+ $msg_ops->end_test($testname, $result, 1, $reason);
+ $unexpected_fail++;
+ } elsif ($result eq "skip") {
+ $statistics->{TESTS_SKIP}++;
+ my $last = pop(@$open_tests);
+ if (defined($last) and $last ne $testname) {
+ push (@$open_tests, $testname);
+ }
+ $msg_ops->end_test($testname, $result, 0, $reason);
+ } elsif ($result eq "error") {
+ $statistics->{TESTS_ERROR}++;
+ pop(@$open_tests); #FIXME: Check that popped value == $testname
+ $msg_ops->end_test($testname, $result, 1, $reason);
+ $unexpected_err++;
+ }
+ } else {
+ $msg_ops->output_msg($_);
+ }
+ }
+
+ while ($#$open_tests+1 > 0) {
+ $msg_ops->end_test(pop(@$open_tests), "error", 1,
+ "was started but never finished!");
+ $statistics->{TESTS_ERROR}++;
+ $unexpected_err++;
+ }
+
+ return 1 if $unexpected_err > 0;
+ return 1 if $unexpected_fail > 0;
+ return 0;
+}
+
+sub start_test($)
+{
+ my ($testname) = @_;
+ print "test: $testname\n";
+}
+
+sub end_test($$;$)
+{
+ my $name = shift;
+ my $result = shift;
+ my $reason = shift;
+ if ($reason) {
+ print "$result: $name [\n";
+ print "$reason";
+ print "]\n";
+ } else {
+ print "$result: $name\n";
+ }
+}
+
+sub skip_test($;$)
+{
+ my $name = shift;
+ my $reason = shift;
+ end_test($name, "skip", $reason);
+}
+
+sub fail_test($;$)
+{
+ my $name = shift;
+ my $reason = shift;
+ end_test($name, "fail", $reason);
+}
+
+sub success_test($;$)
+{
+ my $name = shift;
+ my $reason = shift;
+ end_test($name, "success", $reason);
+}
+
+sub xfail_test($;$)
+{
+ my $name = shift;
+ my $reason = shift;
+ end_test($name, "xfail", $reason);
+}
+
+sub report_time($)
+{
+ my ($time) = @_;
+ my ($sec, $min, $hour, $mday, $mon, $year, $wday, $yday, $isdst) = localtime($time);
+ printf "time: %04d-%02d-%02d %02d:%02d:%02dZ\n", $year+1900, $mon, $mday, $hour, $min, $sec;
+}
+
+1;
diff --git a/lib/subunit/perl/lib/Subunit/Diff.pm b/lib/subunit/perl/lib/Subunit/Diff.pm
new file mode 100644
index 0000000000..e7841c3b00
--- /dev/null
+++ b/lib/subunit/perl/lib/Subunit/Diff.pm
@@ -0,0 +1,85 @@
+#!/usr/bin/perl
+# Diff two subunit streams
+# Copyright (C) Jelmer Vernooij <jelmer@samba.org>
+#
+# Licensed under either the Apache License, Version 2.0 or the BSD 3-clause
+# license at the users choice. A copy of both licenses are available in the
+# project source as Apache-2.0 and BSD. You may not use this file except in
+# compliance with one of these two licences.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under these licenses is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# license you chose for the specific language governing permissions and
+# limitations under that license.
+
+package Subunit::Diff;
+
+use strict;
+
+use Subunit qw(parse_results);
+
+sub control_msg() { }
+sub report_time($$) { }
+
+sub output_msg($$)
+{
+ my ($self, $msg) = @_;
+
+ # No output for now, perhaps later diff this as well ?
+}
+
+sub start_test($$)
+{
+ my ($self, $testname) = @_;
+}
+
+sub end_test($$$$$)
+{
+ my ($self, $testname, $result, $unexpected, $reason) = @_;
+
+ $self->{$testname} = $result;
+}
+
+sub new {
+ my ($class) = @_;
+
+ my $self = {
+ };
+ bless($self, $class);
+}
+
+sub from_file($)
+{
+ my ($path) = @_;
+ my $statistics = {
+ TESTS_UNEXPECTED_OK => 0,
+ TESTS_EXPECTED_OK => 0,
+ TESTS_UNEXPECTED_FAIL => 0,
+ TESTS_EXPECTED_FAIL => 0,
+ TESTS_ERROR => 0,
+ TESTS_SKIP => 0,
+ };
+
+ my $ret = new Subunit::Diff();
+ open(IN, $path) or return;
+ parse_results($ret, $statistics, *IN);
+ close(IN);
+ return $ret;
+}
+
+sub diff($$)
+{
+ my ($old, $new) = @_;
+ my $ret = {};
+
+ foreach my $testname (keys %$old) {
+ if ($new->{$testname} ne $old->{$testname}) {
+ $ret->{$testname} = [$old->{$testname}, $new->{$testname}];
+ }
+ }
+
+ return $ret;
+}
+
+1;
diff --git a/lib/subunit/perl/subunit-diff b/lib/subunit/perl/subunit-diff
new file mode 100755
index 0000000000..581e832ae3
--- /dev/null
+++ b/lib/subunit/perl/subunit-diff
@@ -0,0 +1,31 @@
+#!/usr/bin/perl
+# Diff two subunit streams
+# Copyright (C) Jelmer Vernooij <jelmer@samba.org>
+#
+# Licensed under either the Apache License, Version 2.0 or the BSD 3-clause
+# license at the users choice. A copy of both licenses are available in the
+# project source as Apache-2.0 and BSD. You may not use this file except in
+# compliance with one of these two licences.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under these licenses is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# license you chose for the specific language governing permissions and
+# limitations under that license.
+
+use Getopt::Long;
+use strict;
+use FindBin qw($RealBin $Script);
+use lib "$RealBin/lib";
+use Subunit::Diff;
+
+my $old = Subunit::Diff::from_file($ARGV[0]);
+my $new = Subunit::Diff::from_file($ARGV[1]);
+
+my $ret = Subunit::Diff::diff($old, $new);
+
+foreach my $e (sort(keys %$ret)) {
+ printf "%s: %s -> %s\n", $e, $ret->{$e}[0], $ret->{$e}[1];
+}
+
+0;
diff --git a/lib/subunit/runtests.py b/lib/subunit/runtests.py
new file mode 100755
index 0000000000..8ecc6cd3fb
--- /dev/null
+++ b/lib/subunit/runtests.py
@@ -0,0 +1,138 @@
+#!/usr/bin/env python
+# -*- Mode: python -*-
+#
+# Copyright (C) 2004 Canonical.com
+# Author: Robert Collins <robert.collins@canonical.com>
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+#
+
+import unittest
+from subunit.tests.TestUtil import TestVisitor, TestSuite
+import subunit
+import sys
+import os
+import shutil
+import logging
+
+class ParameterisableTextTestRunner(unittest.TextTestRunner):
+ """I am a TextTestRunner whose result class is
+ parameterisable without further subclassing"""
+ def __init__(self, **args):
+ unittest.TextTestRunner.__init__(self, **args)
+ self._resultFactory=None
+ def resultFactory(self, *args):
+ """set or retrieve the result factory"""
+ if args:
+ self._resultFactory=args[0]
+ return self
+ if self._resultFactory is None:
+ self._resultFactory=unittest._TextTestResult
+ return self._resultFactory
+
+ def _makeResult(self):
+ return self.resultFactory()(self.stream, self.descriptions, self.verbosity)
+
+
+class EarlyStoppingTextTestResult(unittest._TextTestResult):
+ """I am a TextTestResult that can optionally stop at the first failure
+ or error"""
+
+ def addError(self, test, err):
+ unittest._TextTestResult.addError(self, test, err)
+ if self.stopOnError():
+ self.stop()
+
+ def addFailure(self, test, err):
+ unittest._TextTestResult.addError(self, test, err)
+ if self.stopOnFailure():
+ self.stop()
+
+ def stopOnError(self, *args):
+ """should this result indicate an abort when an error occurs?
+ TODO parameterise this"""
+ return True
+
+ def stopOnFailure(self, *args):
+ """should this result indicate an abort when a failure error occurs?
+ TODO parameterise this"""
+ return True
+
+
+def earlyStopFactory(*args, **kwargs):
+ """return a an early stopping text test result"""
+ result=EarlyStoppingTextTestResult(*args, **kwargs)
+ return result
+
+
+class ShellTests(subunit.ExecTestCase):
+
+ def test_sourcing(self):
+ """./shell/tests/test_source_library.sh"""
+
+ def test_functions(self):
+ """./shell/tests/test_function_output.sh"""
+
+
+def test_suite():
+ result = TestSuite()
+ result.addTest(subunit.test_suite())
+ result.addTest(ShellTests('test_sourcing'))
+ result.addTest(ShellTests('test_functions'))
+ return result
+
+
+class filteringVisitor(TestVisitor):
+ """I accrue all the testCases I visit that pass a regexp filter on id
+ into my suite
+ """
+
+ def __init__(self, filter):
+ import re
+ TestVisitor.__init__(self)
+ self._suite=None
+ self.filter=re.compile(filter)
+
+ def suite(self):
+ """answer the suite we are building"""
+ if self._suite is None:
+ self._suite=TestSuite()
+ return self._suite
+
+ def visitCase(self, aCase):
+ if self.filter.match(aCase.id()):
+ self.suite().addTest(aCase)
+
+
+def main(argv):
+ """To parameterise what tests are run, run this script like so:
+ python test_all.py REGEX
+ i.e.
+ python test_all.py .*Protocol.*
+ to run all tests with Protocol in their id."""
+ if len(argv) > 1:
+ pattern = argv[1]
+ else:
+ pattern = ".*"
+ visitor = filteringVisitor(pattern)
+ test_suite().visit(visitor)
+ runner = ParameterisableTextTestRunner(verbosity=2)
+ runner.resultFactory(unittest._TextTestResult)
+ if not runner.run(visitor.suite()).wasSuccessful():
+ return 1
+ return 0
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv))
diff --git a/lib/subunit/shell/README b/lib/subunit/shell/README
new file mode 100644
index 0000000000..af894a2bd3
--- /dev/null
+++ b/lib/subunit/shell/README
@@ -0,0 +1,62 @@
+#
+# subunit shell bindings.
+# Copyright (C) 2006 Robert Collins <robertc@robertcollins.net>
+#
+# Licensed under either the Apache License, Version 2.0 or the BSD 3-clause
+# license at the users choice. A copy of both licenses are available in the
+# project source as Apache-2.0 and BSD. You may not use this file except in
+# compliance with one of these two licences.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under these licenses is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# license you chose for the specific language governing permissions and
+# limitations under that license.
+#
+
+This tree contains shell bindings to the subunit protocol. They are written
+entirely in shell, and unit tested in shell. See the tests/ directory for the
+test scripts. You can use `make check` to run the tests. There is a trivial
+python test_shell.py which uses the pyunit gui to expose the test results in a
+compact form.
+
+The shell bindings consist of four functions which you can use to output test
+metadata trivially. See share/subunit.sh for the functions and comments.
+
+However, this is not a full test environment, its support code for reporting to
+subunit. You can look at ShUnit (http://shunit.sourceforge.net) for 'proper'
+shell based xUnit functionality. There is a patch for ShUnit 1.3
+(subunit-ui.patch) in the subunit source tree. I hope to have that integrated
+upstream in the near future. I will delete the copy of the patch in the subunit
+tree a release or two later.
+
+If you are a test environment maintainer - either homegrown, or ShUnit or some
+such, you will need to see how the subunit calls should be used. Here is what
+a manually written test using the bindings might look like:
+
+
+subunit_start_test "test name"
+# determine if test passes or fails
+result=$(something)
+if [ $result == 0 ]; then
+ subunit_pass_test "test name"
+else
+ subunit_fail_test "test name" <<END
+Something went wrong running something:
+exited with result: '$func_status'
+END
+fi
+
+Which when run with a subunit test runner will generate something like:
+test name ... ok
+
+on success, and:
+
+test name ... FAIL
+
+======================================================================
+FAIL: test name
+----------------------------------------------------------------------
+RemoteError:
+Something went wrong running something:
+exited with result: '1'
diff --git a/lib/subunit/shell/share/subunit.sh b/lib/subunit/shell/share/subunit.sh
new file mode 100644
index 0000000000..82737276b8
--- /dev/null
+++ b/lib/subunit/shell/share/subunit.sh
@@ -0,0 +1,56 @@
+#
+# subunit.sh: shell functions to report test status via the subunit protocol.
+# Copyright (C) 2006 Robert Collins <robertc@robertcollins.net>
+#
+# Licensed under either the Apache License, Version 2.0 or the BSD 3-clause
+# license at the users choice. A copy of both licenses are available in the
+# project source as Apache-2.0 and BSD. You may not use this file except in
+# compliance with one of these two licences.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under these licenses is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# license you chose for the specific language governing permissions and
+# limitations under that license.
+#
+
+subunit_start_test () {
+ # emit the current protocol start-marker for test $1
+ echo "test: $1"
+}
+
+
+subunit_pass_test () {
+ # emit the current protocol test passed marker for test $1
+ echo "success: $1"
+}
+
+
+subunit_fail_test () {
+ # emit the current protocol fail-marker for test $1, and emit stdin as
+ # the error text.
+ # we use stdin because the failure message can be arbitrarily long, and this
+ # makes it convenient to write in scripts (using <<END syntax.
+ echo "failure: $1 ["
+ cat -
+ echo "]"
+}
+
+
+subunit_error_test () {
+ # emit the current protocol error-marker for test $1, and emit stdin as
+ # the error text.
+ # we use stdin because the failure message can be arbitrarily long, and this
+ # makes it convenient to write in scripts (using <<END syntax.
+ echo "error: $1 ["
+ cat -
+ echo "]"
+}
+
+
+subunit_skip_test () {
+ # emit the current protocol test skipped marker for test $1
+ echo "skip: $1"
+}
+
+
diff --git a/lib/subunit/shell/tests/test_function_output.sh b/lib/subunit/shell/tests/test_function_output.sh
new file mode 100755
index 0000000000..b78eee6946
--- /dev/null
+++ b/lib/subunit/shell/tests/test_function_output.sh
@@ -0,0 +1,97 @@
+#!/bin/bash
+# subunit shell bindings.
+# Copyright (C) 2006 Robert Collins <robertc@robertcollins.net>
+#
+# Licensed under either the Apache License, Version 2.0 or the BSD 3-clause
+# license at the users choice. A copy of both licenses are available in the
+# project source as Apache-2.0 and BSD. You may not use this file except in
+# compliance with one of these two licences.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under these licenses is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# license you chose for the specific language governing permissions and
+# limitations under that license.
+#
+
+
+# this script tests the output of the methods. As each is tested we start using
+# it.
+# So the first test manually implements the entire protocol, the next uses the
+# start method and so on.
+# it is assumed that we are running from the 'shell' tree root in the source
+# of subunit, and that the library sourcing tests have all passed - if they
+# have not, this test script may well fail strangely.
+
+# import the library.
+. ${SHELL_SHARE}subunit.sh
+
+echo 'test: subunit_start_test output'
+func_output=$(subunit_start_test "foo bar")
+func_status=$?
+if [ $func_status == 0 -a "x$func_output" = "xtest: foo bar" ]; then
+ echo 'success: subunit_start_test output'
+else
+ echo 'failure: subunit_start_test output ['
+ echo 'got an error code or incorrect output:'
+ echo "exit: $func_status"
+ echo "output: '$func_output'"
+ echo ']' ;
+fi
+
+subunit_start_test "subunit_pass_test output"
+func_output=$(subunit_pass_test "foo bar")
+func_status=$?
+if [ $func_status == 0 -a "x$func_output" = "xsuccess: foo bar" ]; then
+ subunit_pass_test "subunit_pass_test output"
+else
+ echo 'failure: subunit_pass_test output ['
+ echo 'got an error code or incorrect output:'
+ echo "exit: $func_status"
+ echo "output: '$func_output'"
+ echo ']' ;
+fi
+
+subunit_start_test "subunit_fail_test output"
+func_output=$(subunit_fail_test "foo bar" <<END
+something
+ wrong
+here
+END
+)
+func_status=$?
+if [ $func_status == 0 -a "x$func_output" = "xfailure: foo bar [
+something
+ wrong
+here
+]" ]; then
+ subunit_pass_test "subunit_fail_test output"
+else
+ echo 'failure: subunit_fail_test output ['
+ echo 'got an error code or incorrect output:'
+ echo "exit: $func_status"
+ echo "output: '$func_output'"
+ echo ']' ;
+fi
+
+subunit_start_test "subunit_error_test output"
+func_output=$(subunit_error_test "foo bar" <<END
+something
+ died
+here
+END
+)
+func_status=$?
+if [ $func_status == 0 -a "x$func_output" = "xerror: foo bar [
+something
+ died
+here
+]" ]; then
+ subunit_pass_test "subunit_error_test output"
+else
+ subunit_fail_test "subunit_error_test output" <<END
+got an error code or incorrect output:
+exit: $func_status
+output: '$func_output'
+END
+fi
diff --git a/lib/subunit/shell/tests/test_source_library.sh b/lib/subunit/shell/tests/test_source_library.sh
new file mode 100755
index 0000000000..699f1281bc
--- /dev/null
+++ b/lib/subunit/shell/tests/test_source_library.sh
@@ -0,0 +1,108 @@
+#!/bin/bash
+# subunit shell bindings.
+# Copyright (C) 2006 Robert Collins <robertc@robertcollins.net>
+#
+# Licensed under either the Apache License, Version 2.0 or the BSD 3-clause
+# license at the users choice. A copy of both licenses are available in the
+# project source as Apache-2.0 and BSD. You may not use this file except in
+# compliance with one of these two licences.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under these licenses is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# license you chose for the specific language governing permissions and
+# limitations under that license.
+#
+
+
+# this script tests that we can source the subunit shell bindings successfully.
+# It manually implements the control protocol so that it des not depend on the
+# bindings being complete yet.
+
+# we expect to be run from the tree root.
+
+echo 'test: shell bindings can be sourced'
+# if any output occurs, this has failed to source cleanly
+source_output=$(. ${SHELL_SHARE}subunit.sh 2>&1)
+if [ $? == 0 -a "x$source_output" = "x" ]; then
+ echo 'success: shell bindings can be sourced'
+else
+ echo 'failure: shell bindings can be sourced ['
+ echo 'got an error code or output during sourcing.:'
+ echo $source_output
+ echo ']' ;
+fi
+
+# now source it for real
+. ${SHELL_SHARE}subunit.sh
+
+# we should have a start_test function
+echo 'test: subunit_start_test exists'
+found_type=$(type -t subunit_start_test)
+status=$?
+if [ $status == 0 -a "x$found_type" = "xfunction" ]; then
+ echo 'success: subunit_start_test exists'
+else
+ echo 'failure: subunit_start_test exists ['
+ echo 'subunit_start_test is not a function:'
+ echo "type -t status: $status"
+ echo "output: $found_type"
+ echo ']' ;
+fi
+
+# we should have a pass_test function
+echo 'test: subunit_pass_test exists'
+found_type=$(type -t subunit_pass_test)
+status=$?
+if [ $status == 0 -a "x$found_type" = "xfunction" ]; then
+ echo 'success: subunit_pass_test exists'
+else
+ echo 'failure: subunit_pass_test exists ['
+ echo 'subunit_pass_test is not a function:'
+ echo "type -t status: $status"
+ echo "output: $found_type"
+ echo ']' ;
+fi
+
+# we should have a fail_test function
+echo 'test: subunit_fail_test exists'
+found_type=$(type -t subunit_fail_test)
+status=$?
+if [ $status == 0 -a "x$found_type" = "xfunction" ]; then
+ echo 'success: subunit_fail_test exists'
+else
+ echo 'failure: subunit_fail_test exists ['
+ echo 'subunit_fail_test is not a function:'
+ echo "type -t status: $status"
+ echo "output: $found_type"
+ echo ']' ;
+fi
+
+# we should have a error_test function
+echo 'test: subunit_error_test exists'
+found_type=$(type -t subunit_error_test)
+status=$?
+if [ $status == 0 -a "x$found_type" = "xfunction" ]; then
+ echo 'success: subunit_error_test exists'
+else
+ echo 'failure: subunit_error_test exists ['
+ echo 'subunit_error_test is not a function:'
+ echo "type -t status: $status"
+ echo "output: $found_type"
+ echo ']' ;
+fi
+
+# we should have a skip_test function
+echo 'test: subunit_skip_test exists'
+found_type=$(type -t subunit_skip_test)
+status=$?
+if [ $status == 0 -a "x$found_type" = "xfunction" ]; then
+ echo 'success: subunit_skip_test exists'
+else
+ echo 'failure: subunit_skip_test exists ['
+ echo 'subunit_skip_test is not a function:'
+ echo "type -t status: $status"
+ echo "output: $found_type"
+ echo ']' ;
+fi
+
diff --git a/lib/subunit/update.sh b/lib/subunit/update.sh
deleted file mode 100755
index f8265b188c..0000000000
--- a/lib/subunit/update.sh
+++ /dev/null
@@ -1,16 +0,0 @@
-#!/bin/sh
-# Pull in a new snapshot of Subunit from the upstream bzr branch
-
-TARGETDIR="`dirname $0`"
-WORKDIR="`mktemp -d`"
-bzr export "$WORKDIR/subunit" lp:subunit
-bzr export "$WORKDIR/testtools" lp:testtools
-
-for p in python/ filters/tap2subunit;
-do
- rsync -avz --delete "$WORKDIR/subunit/$p" "$TARGETDIR/$p"
-done
-
-rsync -avz --delete "$WORKDIR/testtools/testtools/" "$TARGETDIR/python/testtools/"
-
-rm -rf "$WORKDIR"
diff --git a/lib/testtools/HACKING b/lib/testtools/HACKING
new file mode 100644
index 0000000000..8fe323cadd
--- /dev/null
+++ b/lib/testtools/HACKING
@@ -0,0 +1,139 @@
+===================================
+Notes for contributing to testtools
+===================================
+
+Coding style
+------------
+
+In general, follow PEP 8 <http://www.python.org/dev/peps/pep-0008/>.
+
+For consistency with the standard library's ``unittest`` module, method names
+are generally ``camelCase``.
+
+testtools supports Python 2.4 and later, so avoid any 2.5-only features like
+the ``with`` statement.
+
+
+Copyright assignment
+--------------------
+
+Part of testtools raison d'etre is to provide Python with improvements to the
+testing code it ships. For that reason we require all contributions (that are
+non-trivial) to meet one of the following rules:
+
+ - be inapplicable for inclusion in Python.
+ - be able to be included in Python without further contact with the
+ contributor.
+ - be copyright assigned to Jonathan M. Lange.
+
+Please pick one of these and specify it when contributing code to testtools.
+
+
+Licensing
+---------
+
+All code that is not copyright assigned to Jonathan M. Lange (see Copyright
+Assignment above) needs to be licensed under the MIT license that testtools
+uses, so that testtools can ship it.
+
+
+Testing
+-------
+
+Please write tests for every feature. This project ought to be a model
+example of well-tested Python code!
+
+Take particular care to make sure the *intent* of each test is clear.
+
+You can run tests with ``make check``, or by running ``./run-tests`` directly.
+
+
+Source layout
+-------------
+
+The top-level directory contains the ``testtools/`` package directory, and
+miscellaneous files like README and setup.py.
+
+The ``testtools/`` directory is the Python package itself. It is separated
+into submodules for internal clarity, but all public APIs should be “promoted”
+into the top-level package by importing them in ``testtools/__init__.py``.
+Users of testtools should never import a submodule, they are just
+implementation details.
+
+Tests belong in ``testtools/tests/``.
+
+
+Commiting to trunk
+------------------
+
+Testtools is maintained using bzr, with its trunk at lp:testtools. This gives
+every contributor the ability to commit their work to their own branches.
+However permission must be granted to allow contributors to commit to the trunk
+branch.
+
+Commit access to trunk is obtained by joining the testtools-devs Launchpad
+team. Membership in this team is contingent on obeying the testtools
+contribution policy, including assigning copyright of all the work one creates
+and places in trunk to Jonathan Lange.
+
+
+Code Review
+-----------
+
+All code must be reviewed before landing on trunk. The process is to create a
+branch in launchpad, and submit it for merging to lp:testtools. It will then
+be reviewed before it can be merged to trunk. It will be reviewed by someone:
+
+ * not the author
+ * a committer (member of the testtools-devs team)
+
+As a special exception, while the testtools committers team is small and prone
+to blocking, a merge request from a committer that has not been reviewed after
+24 hours may be merged by that committer. When the team is larger this policy
+will be revisited.
+
+Code reviewers should look for the quality of what is being submitted,
+including conformance with this HACKING file.
+
+Changes which all users should be made aware of should be documented in NEWS.
+
+
+NEWS management
+---------------
+
+The file NEWS is structured as a sorted list of releases. Each release can have
+a free form description and more or more sections with bullet point items.
+Sections in use today are 'Improvements' and 'Changes'. To ease merging between
+branches, the bullet points are kept alphabetically sorted. The release NEXT is
+permanently present at the top of the list.
+
+
+Release tasks
+-------------
+
+In no particular order:
+
+* Choose a version number.
+
+* Ensure __init__ has that version.
+
+* Add a version number to NEWS immediately below NEXT.
+
+* Possibly write a blurb into NEWS.
+
+* Replace any additional references to NEXT with the version being released.
+
+* Create a source distribution and upload to pypi ('make release').
+
+* Upload to Launchpad as well.
+
+* If a new series has been created (e.g. 0.10.0), make the series on Launchpad.
+
+* Merge or push the release branch to trunk.
+
+* Make a new milestone for the *next release*. We don't really know how we want
+ to handle these yet, so this is a suggestion not actual practice:
+
+ * during release we rename NEXT to $version.
+
+ * we call new milestones NEXT.
diff --git a/lib/testtools/LICENSE b/lib/testtools/LICENSE
new file mode 100644
index 0000000000..bdc733fe04
--- /dev/null
+++ b/lib/testtools/LICENSE
@@ -0,0 +1,19 @@
+Copyright (c) 2008 Jonathan M. Lange <jml@mumak.net> and the testtools authors.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/lib/testtools/MANIFEST.in b/lib/testtools/MANIFEST.in
new file mode 100644
index 0000000000..3296ee4c0e
--- /dev/null
+++ b/lib/testtools/MANIFEST.in
@@ -0,0 +1,9 @@
+include LICENSE
+include HACKING
+include Makefile
+include MANIFEST.in
+include MANUAL
+include NEWS
+include README
+include run-tests
+include .bzrignore
diff --git a/lib/testtools/MANUAL b/lib/testtools/MANUAL
new file mode 100644
index 0000000000..a040c2860d
--- /dev/null
+++ b/lib/testtools/MANUAL
@@ -0,0 +1,213 @@
+======
+Manual
+======
+
+Introduction
+------------
+
+This document provides overview of the features provided by testtools. Refer
+to the API docs (i.e. docstrings) for full details on a particular feature.
+
+Extensions to TestCase
+----------------------
+
+Controlling test execution
+~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Testtools supports two ways to control how tests are executed. The simplest
+is to add a new exception to self.exception_handlers::
+
+ >>> self.exception_handlers.insert(-1, (ExceptionClass, handler)).
+
+Having done this, if any of setUp, tearDown, or the test method raise
+ExceptionClass, handler will be called with the test case, test result and the
+raised exception.
+
+Secondly, by overriding __init__ to pass in runTest=RunTestFactory the whole
+execution of the test can be altered. The default is testtools.runtest.RunTest
+and calls case._run_setup, case._run_test_method and finally
+case._run_teardown. Other methods to control what RunTest is used may be
+added in future.
+
+
+TestCase.addCleanup
+~~~~~~~~~~~~~~~~~~~
+
+addCleanup is a robust way to arrange for a cleanup function to be called
+before tearDown. This is a powerful and simple alternative to putting cleanup
+logic in a try/finally block or tearDown method. e.g.::
+
+ def test_foo(self):
+ foo.lock()
+ self.addCleanup(foo.unlock)
+ ...
+
+
+TestCase.addOnException
+~~~~~~~~~~~~~~~~~~~~~~~
+
+addOnException adds an exception handler that will be called from the test
+framework when it detects an exception from your test code. The handler is
+given the exc_info for the exception, and can use this opportunity to attach
+more data (via the addDetails API) and potentially other uses.
+
+
+TestCase.skip
+~~~~~~~~~~~~~
+
+``skip`` is a simple way to have a test stop running and be reported as a
+skipped test, rather than a success/error/failure. This is an alternative to
+convoluted logic during test loading, permitting later and more localized
+decisions about the appropriateness of running a test. Many reasons exist to
+skip a test - for instance when a dependency is missing, or if the test is
+expensive and should not be run while on laptop battery power, or if the test
+is testing an incomplete feature (this is sometimes called a TODO). Using this
+feature when running your test suite with a TestResult object that is missing
+the ``addSkip`` method will result in the ``addError`` method being invoked
+instead.
+
+
+New assertion methods
+~~~~~~~~~~~~~~~~~~~~~
+
+testtools adds several assertion methods:
+
+ * assertIn
+ * assertNotIn
+ * assertIs
+ * assertIsNot
+ * assertIsInstance
+ * assertThat
+
+
+Improved assertRaises
+~~~~~~~~~~~~~~~~~~~~~
+
+TestCase.assertRaises returns the caught exception. This is useful for
+asserting more things about the exception than just the type::
+
+ error = self.assertRaises(UnauthorisedError, thing.frobnicate)
+ self.assertEqual('bob', error.username)
+ self.assertEqual('User bob cannot frobnicate', str(error))
+
+
+TestCase.assertThat
+~~~~~~~~~~~~~~~~~~~
+
+assertThat is a clean way to write complex assertions without tying them to
+the TestCase inheritance hierarchy (and thus making them easier to reuse).
+
+assertThat takes an object to be matched, and a matcher, and fails if the
+matcher does not match the matchee.
+
+See pydoc testtools.Matcher for the protocol that matchers need to implement.
+
+testtools includes some matchers in testtools.matchers.
+python -c 'import testtools.matchers; print testtools.matchers.__all__' will
+list those matchers.
+
+An example using the DocTestMatches matcher which uses doctests example
+matching logic::
+
+ def test_foo(self):
+ self.assertThat([1,2,3,4], DocTestMatches('[1, 2, 3, 4]'))
+
+
+Creation methods
+~~~~~~~~~~~~~~~~
+
+testtools.TestCase implements creation methods called ``getUniqueString`` and
+``getUniqueInteger``. See pages 419-423 of *xUnit Test Patterns* by Meszaros
+for a detailed discussion of creation methods.
+
+
+Test renaming
+~~~~~~~~~~~~~
+
+``testtools.clone_test_with_new_id`` is a function to copy a test case
+instance to one with a new name. This is helpful for implementing test
+parameterization.
+
+
+Extensions to TestResult
+------------------------
+
+TestResult.addSkip
+~~~~~~~~~~~~~~~~~~
+
+This method is called on result objects when a test skips. The
+``testtools.TestResult`` class records skips in its ``skip_reasons`` instance
+dict. The can be reported on in much the same way as succesful tests.
+
+
+TestResult.time
+~~~~~~~~~~~~~~~
+
+This method controls the time used by a TestResult, permitting accurate
+timing of test results gathered on different machines or in different threads.
+See pydoc testtools.TestResult.time for more details.
+
+
+ThreadsafeForwardingResult
+~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+A TestResult which forwards activity to another test result, but synchronises
+on a semaphore to ensure that all the activity for a single test arrives in a
+batch. This allows simple TestResults which do not expect concurrent test
+reporting to be fed the activity from multiple test threads, or processes.
+
+Note that when you provide multiple errors for a single test, the target sees
+each error as a distinct complete test.
+
+
+TextTestResult
+~~~~~~~~~~~~~~
+
+A TestResult that provides a text UI very similar to the Python standard
+library UI. Key differences are that its supports the extended outcomes and
+details API, and is completely encapsulated into the result object, permitting
+it to be used without a 'TestRunner' object. Not all the Python 2.7 outcomes
+are displayed (yet). It is also a 'quiet' result with no dots or verbose mode.
+These limitations will be corrected soon.
+
+
+Test Doubles
+~~~~~~~~~~~~
+
+In testtools.testresult.doubles there are three test doubles that testtools
+uses for its own testing: Python26TestResult, Python27TestResult,
+ExtendedTestResult. These TestResult objects implement a single variation of
+the TestResult API each, and log activity to a list self._events. These are
+made available for the convenience of people writing their own extensions.
+
+
+startTestRun and stopTestRun
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Python 2.7 added hooks 'startTestRun' and 'stopTestRun' which are called
+before and after the entire test run. 'stopTestRun' is particularly useful for
+test results that wish to produce summary output.
+
+testtools.TestResult provides empty startTestRun and stopTestRun methods, and
+the default testtools runner will call these methods appropriately.
+
+
+Extensions to TestSuite
+-----------------------
+
+ConcurrentTestSuite
+~~~~~~~~~~~~~~~~~~~
+
+A TestSuite for parallel testing. This is used in conjuction with a helper that
+runs a single suite in some parallel fashion (for instance, forking, handing
+off to a subprocess, to a compute cloud, or simple threads).
+ConcurrentTestSuite uses the helper to get a number of separate runnable
+objects with a run(result), runs them all in threads using the
+ThreadsafeForwardingResult to coalesce their activity.
+
+
+Running tests
+-------------
+
+Testtools provides a convenient way to run a test suite using the testtools
+result object: python -m testtools.run testspec [testspec...].
diff --git a/lib/testtools/Makefile b/lib/testtools/Makefile
new file mode 100644
index 0000000000..5e232e3394
--- /dev/null
+++ b/lib/testtools/Makefile
@@ -0,0 +1,28 @@
+# See README for copyright and licensing details.
+
+PYTHON=python
+SOURCES=$(shell find testtools -name "*.py")
+
+check:
+ PYTHONPATH=$(PWD) $(PYTHON) -m testtools.run testtools.tests.test_suite
+
+TAGS: ${SOURCES}
+ ctags -e -R testtools/
+
+tags: ${SOURCES}
+ ctags -R testtools/
+
+clean:
+ rm -f TAGS tags
+ find testtools -name "*.pyc" -exec rm '{}' \;
+
+release:
+ ./setup.py sdist upload --sign
+
+apidocs:
+ pydoctor --make-html --add-package testtools \
+ --docformat=restructuredtext --project-name=testtools \
+ --project-url=https://launchpad.net/testtools
+
+
+.PHONY: check clean release apidocs
diff --git a/lib/testtools/NEWS b/lib/testtools/NEWS
new file mode 100644
index 0000000000..90d7fc492a
--- /dev/null
+++ b/lib/testtools/NEWS
@@ -0,0 +1,191 @@
+testtools NEWS
+++++++++++++++
+
+NEXT
+~~~~
+
+Improvements
+------------
+
+* New matcher "Annotate" that adds a simple string message to another matcher,
+ much like the option 'message' parameter to standard library assertFoo
+ methods.
+
+* New matchers "Not" and "MatchesAll". "Not" will invert another matcher, and
+ "MatchesAll" that needs a successful match for all of its arguments.
+
+* On Python 2.4, where types.FunctionType cannot be deepcopied, testtools will
+ now monkeypatch copy._deepcopy_dispatch using the same trivial patch that
+ added such support to Python 2.5. The monkey patch is triggered by the
+ absence of FunctionType from the dispatch dict rather than a version check.
+ Bug #498030.
+
+* On windows the test 'test_now_datetime_now' should now work reliably.
+
+* TestCase.getUniqueInteger and TestCase.getUniqueString now have docstrings.
+
+* TestCase.getUniqueString now takes an optional prefix parameter, so you can
+ now use it in circumstances that forbid strings with '.'s, and such like.
+
+* testtools.testcase.clone_test_with_new_id now uses copy.copy, rather than
+ copy.deepcopy. Tests that need a deeper copy should use the copy protocol to
+ control how they are copied. Bug #498869.
+
+* The backtrace test result output tests should now pass on windows and other
+ systems where os.sep is not '/'.
+
+
+0.9.2
+~~~~~
+
+Python 3 support, more matchers and better consistency with Python 2.7 --
+you'd think that would be enough for a point release. Well, we here on the
+testtools project think that you deserve more.
+
+We've added a hook so that user code can be called just-in-time whenever there
+is an exception, and we've also factored out the "run" logic of test cases so
+that new outcomes can be added without fiddling with the actual flow of logic.
+
+It might sound like small potatoes, but it's changes like these that will
+bring about the end of test frameworks.
+
+
+Improvements
+------------
+
+* A failure in setUp and tearDown now report as failures not as errors.
+
+* Cleanups now run after tearDown to be consistent with Python 2.7's cleanup
+ feature.
+
+* ExtendedToOriginalDecorator now passes unrecognised attributes through
+ to the decorated result object, permitting other extensions to the
+ TestCase -> TestResult protocol to work.
+
+* It is now possible to trigger code just-in-time after an exception causes
+ a test outcome such as failure or skip. See the testtools MANUAL or
+ ``pydoc testtools.TestCase.addOnException``. (bug #469092)
+
+* New matcher Equals which performs a simple equality test.
+
+* New matcher MatchesAny which looks for a match of any of its arguments.
+
+* TestCase no longer breaks if a TestSkipped exception is raised with no
+ parameters.
+
+* TestCase.run now clones test cases before they are run and runs the clone.
+ This reduces memory footprint in large test runs - state accumulated on
+ test objects during their setup and execution gets freed when test case
+ has finished running unless the TestResult object keeps a reference.
+ NOTE: As test cloning uses deepcopy, this can potentially interfere if
+ a test suite has shared state (such as the testscenarios or testresources
+ projects use). Use the __deepcopy__ hook to control the copying of such
+ objects so that the shared references stay shared.
+
+* Testtools now accepts contributions without copyright assignment under some
+ circumstances. See HACKING for details.
+
+* Testtools now provides a convenient way to run a test suite using the
+ testtools result object: python -m testtools.run testspec [testspec...].
+
+* Testtools now works on Python 3, thanks to Benjamin Peterson.
+
+* Test execution now uses a separate class, testtools.RunTest to run single
+ tests. This can be customised and extended in a more consistent fashion than
+ the previous run method idiom. See pydoc for more information.
+
+* The test doubles that testtools itself uses are now available as part of
+ the testtools API in testtols.testresult.doubles.
+
+* TracebackContent now sets utf8 as the charset encoding, rather than not
+ setting one and encoding with the default encoder.
+
+* With python2.7 testtools.TestSkipped will be the unittest.case.SkipTest
+ exception class making skips compatible with code that manually raises the
+ standard library exception. (bug #490109)
+
+Changes
+-------
+
+* TestCase.getUniqueInteger is now implemented using itertools.count. Thanks
+ to Benjamin Peterson for the patch. (bug #490111)
+
+
+0.9.1
+~~~~~
+
+The new matcher API introduced in 0.9.0 had a small flaw where the matchee
+would be evaluated twice to get a description of the mismatch. This could lead
+to bugs if the act of matching caused side effects to occur in the matchee.
+Since having such side effects isn't desirable, we have changed the API now
+before it has become widespread.
+
+Changes
+-------
+
+* Matcher API changed to avoid evaluating matchee twice. Please consult
+ the API documentation.
+
+* TestCase.getUniqueString now uses the test id, not the test method name,
+ which works nicer with parameterised tests.
+
+Improvements
+------------
+
+* Python2.4 is now supported again.
+
+
+0.9.0
+~~~~~
+
+This release of testtools is perhaps the most interesting and exciting one
+it's ever had. We've continued in bringing together the best practices of unit
+testing from across a raft of different Python projects, but we've also
+extended our mission to incorporating unit testing concepts from other
+languages and from our own research, led by Robert Collins.
+
+We now support skipping and expected failures. We'll make sure that you
+up-call setUp and tearDown, avoiding unexpected testing weirdnesses. We're
+now compatible with Python 2.5, 2.6 and 2.7 unittest library.
+
+All in all, if you are serious about unit testing and want to get the best
+thinking from the whole Python community, you should get this release.
+
+Improvements
+------------
+
+* A new TestResult API has been added for attaching details to test outcomes.
+ This API is currently experimental, but is being prepared with the intent
+ of becoming an upstream Python API. For more details see pydoc
+ testtools.TestResult and the TestCase addDetail / getDetails methods.
+
+* assertThat has been added to TestCase. This new assertion supports
+ a hamcrest-inspired matching protocol. See pydoc testtools.Matcher for
+ details about writing matchers, and testtools.matchers for the included
+ matchers. See http://code.google.com/p/hamcrest/.
+
+* Compatible with Python 2.6 and Python 2.7
+
+* Failing to upcall in setUp or tearDown will now cause a test failure.
+ While the base methods do nothing, failing to upcall is usually a problem
+ in deeper hierarchies, and checking that the root method is called is a
+ simple way to catch this common bug.
+
+* New TestResult decorator ExtendedToOriginalDecorator which handles
+ downgrading extended API calls like addSkip to older result objects that
+ do not support them. This is used internally to make testtools simpler but
+ can also be used to simplify other code built on or for use with testtools.
+
+* New TextTestResult supporting the extended APIs that testtools provides.
+
+* Nose will no longer find 'runTest' tests in classes derived from
+ testtools.testcase.TestCase (bug #312257).
+
+* Supports the Python 2.7/3.1 addUnexpectedSuccess and addExpectedFailure
+ TestResult methods, with a support function 'knownFailure' to let tests
+ trigger these outcomes.
+
+* When using the skip feature with TestResult objects that do not support it
+ a test success will now be reported. Previously an error was reported but
+ production experience has shown that this is too disruptive for projects that
+ are using skips: they cannot get a clean run on down-level result objects.
diff --git a/lib/testtools/README b/lib/testtools/README
new file mode 100644
index 0000000000..5e3dd07cd6
--- /dev/null
+++ b/lib/testtools/README
@@ -0,0 +1,54 @@
+=========
+testtools
+=========
+
+testtools is a set of extensions to the Python standard library's unit testing
+framework.
+
+These extensions have been derived from years of experience with unit testing
+in Python and come from many different sources.
+
+Licensing
+---------
+
+This project is distributed under the MIT license and copyright is owned by
+Jonathan M. Lange. See LICENSE for details.
+
+
+Dependencies
+------------
+
+ * Python 2.4+ or 3.0+
+
+
+Bug reports and patches
+-----------------------
+
+Please report bugs using Launchpad at <https://bugs.launchpad.net/testtools>.
+Patches can also be submitted via Launchpad, or mailed to the author. You can
+mail the author directly at jml@mumak.net.
+
+There's no mailing list for this project yet, however the testing-in-python
+mailing list may be a useful resource:
+
+ * Address: testing-in-python@lists.idyll.org
+ * Subscription link: http://lists.idyll.org/listinfo/testing-in-python
+
+
+History
+-------
+
+testtools used to be called 'pyunit3k'. The name was changed to avoid
+conflating the library with the Python 3.0 release (commonly referred to as
+'py3k').
+
+
+Thanks
+------
+
+ * Canonical Ltd
+ * Bazaar
+ * Twisted Matrix Labs
+ * Robert Collins
+ * Andrew Bennetts
+ * Benjamin Peterson
diff --git a/lib/testtools/setup.py b/lib/testtools/setup.py
new file mode 100755
index 0000000000..d7ed46f79f
--- /dev/null
+++ b/lib/testtools/setup.py
@@ -0,0 +1,25 @@
+#!/usr/bin/env python
+"""Distutils installer for testtools."""
+
+from distutils.core import setup
+import testtools
+version = '.'.join(str(component) for component in testtools.__version__[0:3])
+phase = testtools.__version__[3]
+if phase != 'final':
+ import bzrlib.workingtree
+ t = bzrlib.workingtree.WorkingTree.open_containing(__file__)[0]
+ if phase == 'alpha':
+ # No idea what the next version will be
+ version = 'next-%s' % t.branch.revno()
+ else:
+ # Preserve the version number but give it a revno prefix
+ version = version + '~%s' % t.branch.revno()
+
+setup(name='testtools',
+ author='Jonathan M. Lange',
+ author_email='jml+testtools@mumak.net',
+ url='https://launchpad.net/testtools',
+ description=('Extensions to the Python standard library unit testing '
+ 'framework'),
+ version=version,
+ packages=['testtools', 'testtools.testresult', 'testtools.tests'])
diff --git a/lib/subunit/python/testtools/__init__.py b/lib/testtools/testtools/__init__.py
index 0504d661d4..0504d661d4 100644
--- a/lib/subunit/python/testtools/__init__.py
+++ b/lib/testtools/testtools/__init__.py
diff --git a/lib/subunit/python/testtools/content.py b/lib/testtools/testtools/content.py
index 353e3f0f46..353e3f0f46 100644
--- a/lib/subunit/python/testtools/content.py
+++ b/lib/testtools/testtools/content.py
diff --git a/lib/subunit/python/testtools/content_type.py b/lib/testtools/testtools/content_type.py
index aded81b732..aded81b732 100644
--- a/lib/subunit/python/testtools/content_type.py
+++ b/lib/testtools/testtools/content_type.py
diff --git a/lib/subunit/python/testtools/matchers.py b/lib/testtools/testtools/matchers.py
index 039c84b7c7..039c84b7c7 100644
--- a/lib/subunit/python/testtools/matchers.py
+++ b/lib/testtools/testtools/matchers.py
diff --git a/lib/subunit/python/testtools/run.py b/lib/testtools/testtools/run.py
index c4f461ecfb..c4f461ecfb 100755
--- a/lib/subunit/python/testtools/run.py
+++ b/lib/testtools/testtools/run.py
diff --git a/lib/subunit/python/testtools/runtest.py b/lib/testtools/testtools/runtest.py
index 053e2205a7..053e2205a7 100644
--- a/lib/subunit/python/testtools/runtest.py
+++ b/lib/testtools/testtools/runtest.py
diff --git a/lib/subunit/python/testtools/testcase.py b/lib/testtools/testtools/testcase.py
index fd70141e6d..fd70141e6d 100644
--- a/lib/subunit/python/testtools/testcase.py
+++ b/lib/testtools/testtools/testcase.py
diff --git a/lib/subunit/python/testtools/testresult/__init__.py b/lib/testtools/testtools/testresult/__init__.py
index 2ee3d25293..2ee3d25293 100644
--- a/lib/subunit/python/testtools/testresult/__init__.py
+++ b/lib/testtools/testtools/testresult/__init__.py
diff --git a/lib/subunit/python/testtools/testresult/doubles.py b/lib/testtools/testtools/testresult/doubles.py
index d231c919c2..d231c919c2 100644
--- a/lib/subunit/python/testtools/testresult/doubles.py
+++ b/lib/testtools/testtools/testresult/doubles.py
diff --git a/lib/subunit/python/testtools/testresult/real.py b/lib/testtools/testtools/testresult/real.py
index 8c8a3edd6e..8c8a3edd6e 100644
--- a/lib/subunit/python/testtools/testresult/real.py
+++ b/lib/testtools/testtools/testresult/real.py
diff --git a/lib/subunit/python/testtools/tests/__init__.py b/lib/testtools/testtools/tests/__init__.py
index 2cceba91e2..2cceba91e2 100644
--- a/lib/subunit/python/testtools/tests/__init__.py
+++ b/lib/testtools/testtools/tests/__init__.py
diff --git a/lib/subunit/python/testtools/tests/helpers.py b/lib/testtools/testtools/tests/helpers.py
index c4cf10c736..c4cf10c736 100644
--- a/lib/subunit/python/testtools/tests/helpers.py
+++ b/lib/testtools/testtools/tests/helpers.py
diff --git a/lib/subunit/python/testtools/tests/test_content.py b/lib/testtools/testtools/tests/test_content.py
index 1159362036..1159362036 100644
--- a/lib/subunit/python/testtools/tests/test_content.py
+++ b/lib/testtools/testtools/tests/test_content.py
diff --git a/lib/subunit/python/testtools/tests/test_content_type.py b/lib/testtools/testtools/tests/test_content_type.py
index dbefc21dec..dbefc21dec 100644
--- a/lib/subunit/python/testtools/tests/test_content_type.py
+++ b/lib/testtools/testtools/tests/test_content_type.py
diff --git a/lib/subunit/python/testtools/tests/test_matchers.py b/lib/testtools/testtools/tests/test_matchers.py
index 74b1ebc56a..74b1ebc56a 100644
--- a/lib/subunit/python/testtools/tests/test_matchers.py
+++ b/lib/testtools/testtools/tests/test_matchers.py
diff --git a/lib/subunit/python/testtools/tests/test_runtest.py b/lib/testtools/testtools/tests/test_runtest.py
index 5c46ad1784..5c46ad1784 100644
--- a/lib/subunit/python/testtools/tests/test_runtest.py
+++ b/lib/testtools/testtools/tests/test_runtest.py
diff --git a/lib/subunit/python/testtools/tests/test_testresult.py b/lib/testtools/testtools/tests/test_testresult.py
index df15b91244..df15b91244 100644
--- a/lib/subunit/python/testtools/tests/test_testresult.py
+++ b/lib/testtools/testtools/tests/test_testresult.py
diff --git a/lib/subunit/python/testtools/tests/test_testsuite.py b/lib/testtools/testtools/tests/test_testsuite.py
index 3f2f02758f..3f2f02758f 100644
--- a/lib/subunit/python/testtools/tests/test_testsuite.py
+++ b/lib/testtools/testtools/tests/test_testsuite.py
diff --git a/lib/subunit/python/testtools/tests/test_testtools.py b/lib/testtools/testtools/tests/test_testtools.py
index af1fd794c3..af1fd794c3 100644
--- a/lib/subunit/python/testtools/tests/test_testtools.py
+++ b/lib/testtools/testtools/tests/test_testtools.py
diff --git a/lib/subunit/python/testtools/testsuite.py b/lib/testtools/testtools/testsuite.py
index 26b193799b..26b193799b 100644
--- a/lib/subunit/python/testtools/testsuite.py
+++ b/lib/testtools/testtools/testsuite.py
diff --git a/lib/subunit/python/testtools/utils.py b/lib/testtools/testtools/utils.py
index c0845b610c..c0845b610c 100644
--- a/lib/subunit/python/testtools/utils.py
+++ b/lib/testtools/testtools/utils.py
diff --git a/lib/torture/subunit.c b/lib/torture/subunit.c
index 832f11fafc..86b3dd0e60 100644
--- a/lib/torture/subunit.c
+++ b/lib/torture/subunit.c
@@ -81,10 +81,32 @@ static void subunit_warning(struct torture_context *test,
fprintf(stderr, "WARNING!: %s\n", comment);
}
+static void subunit_progress(struct torture_context *tctx, int offset, enum torture_progress_whence whence)
+{
+ switch (whence) {
+ case TORTURE_PROGRESS_SET:
+ printf("progress: %d\n", offset);
+ break;
+ case TORTURE_PROGRESS_CUR:
+ printf("progress: %+-d\n", offset);
+ break;
+ case TORTURE_PROGRESS_POP:
+ printf("progress: pop\n");
+ break;
+ case TORTURE_PROGRESS_PUSH:
+ printf("progress: push\n");
+ break;
+ default:
+ fprintf(stderr, "Invalid call to progress()\n");
+ break;
+ }
+}
+
const struct torture_ui_ops torture_subunit_ui_ops = {
.comment = subunit_comment,
.warning = subunit_warning,
.test_start = subunit_test_start,
.test_result = subunit_test_result,
- .suite_start = subunit_suite_start
+ .suite_start = subunit_suite_start,
+ .progress = subunit_progress,
};
diff --git a/lib/torture/torture.c b/lib/torture/torture.c
index 672726ebf2..9adf6816b8 100644
--- a/lib/torture/torture.c
+++ b/lib/torture/torture.c
@@ -245,6 +245,23 @@ struct torture_tcase *torture_suite_add_tcase(struct torture_suite *suite,
return tcase;
}
+int torture_suite_children_count(const struct torture_suite *suite)
+{
+ int ret = 0;
+ struct torture_tcase *tcase;
+ struct torture_test *test;
+ struct torture_suite *tsuite;
+ for (tcase = suite->testcases; tcase; tcase = tcase->next) {
+ for (test = tcase->tests; test; test = test->next) {
+ ret++;
+ }
+ }
+ for (tsuite = suite->children; tsuite; tsuite = tsuite->next) {
+ ret ++;
+ }
+ return ret;
+}
+
/**
* Run a torture test suite.
*/
@@ -259,6 +276,8 @@ bool torture_run_suite(struct torture_context *context,
if (context->results->ui_ops->suite_start)
context->results->ui_ops->suite_start(context, suite);
+ context->results->ui_ops->progress(context,
+ torture_suite_children_count(suite), TORTURE_PROGRESS_SET);
old_testname = context->active_testname;
if (old_testname != NULL)
context->active_testname = talloc_asprintf(context, "%s-%s",
@@ -271,7 +290,9 @@ bool torture_run_suite(struct torture_context *context,
}
for (tsuite = suite->children; tsuite; tsuite = tsuite->next) {
+ context->results->ui_ops->progress(context, 0, TORTURE_PROGRESS_PUSH);
ret &= torture_run_suite(context, tsuite);
+ context->results->ui_ops->progress(context, 0, TORTURE_PROGRESS_POP);
}
talloc_free(context->active_testname);
diff --git a/lib/torture/torture.h b/lib/torture/torture.h
index ad3668fbe8..cb2c02ba3e 100644
--- a/lib/torture/torture.h
+++ b/lib/torture/torture.h
@@ -34,6 +34,13 @@ enum torture_result {
TORTURE_SKIP=3
};
+enum torture_progress_whence {
+ TORTURE_PROGRESS_SET,
+ TORTURE_PROGRESS_CUR,
+ TORTURE_PROGRESS_POP,
+ TORTURE_PROGRESS_PUSH,
+};
+
/*
* These callbacks should be implemented by any backend that wishes
* to listen to reports from the torture tests.
@@ -52,6 +59,7 @@ struct torture_ui_ops
struct torture_test *);
void (*test_result) (struct torture_context *,
enum torture_result, const char *reason);
+ void (*progress) (struct torture_context *, int offset, enum torture_progress_whence whence);
};
void torture_ui_test_start(struct torture_context *context,
@@ -465,6 +473,7 @@ struct torture_test *torture_tcase_add_simple_test(struct torture_tcase *tcase,
bool torture_suite_init_tcase(struct torture_suite *suite,
struct torture_tcase *tcase,
const char *name);
+int torture_suite_children_count(const struct torture_suite *suite);
struct torture_context *torture_context_init(struct tevent_context *event_ctx, struct torture_results *results);
diff --git a/lib/update-external.sh b/lib/update-external.sh
new file mode 100755
index 0000000000..53748d8810
--- /dev/null
+++ b/lib/update-external.sh
@@ -0,0 +1,16 @@
+#!/bin/sh
+# Pull in a new snapshot of external projects that are included in
+# our source tree for users that don't have them installed on their system
+
+TARGETDIR="`dirname $0`"
+WORKDIR="`mktemp -d`"
+
+echo "Updating subunit..."
+bzr export "$WORKDIR/subunit" lp:subunit
+rsync -avz --delete "$WORKDIR/subunit/" "$TARGETDIR/subunit/"
+
+echo "Updating testtools..."
+bzr export "$WORKDIR/testtools" lp:testtools
+rsync -avz --delete "$WORKDIR/testtools/" "$TARGETDIR/testtools/"
+
+rm -rf "$WORKDIR"
diff --git a/selftest/Subunit.pm b/selftest/Subunit.pm
index 2a9fc0e48b..42a9ad08aa 100644
--- a/selftest/Subunit.pm
+++ b/selftest/Subunit.pm
@@ -95,8 +95,6 @@ sub parse_results($$$)
}
} elsif (/^testsuite: (.*)\n/) {
$msg_ops->start_testsuite($1);
- } elsif (/^testsuite-count: (\d+)\n/) {
- $msg_ops->testsuite_count($1);
} else {
$msg_ops->output_msg($_);
}
@@ -176,6 +174,27 @@ sub report_time($)
printf "time: %04d-%02d-%02d %02d:%02d:%02d\n", $year+1900, $mon+1, $mday, $hour, $min, $sec;
}
+sub progress_pop()
+{
+ print "progress: pop\n";
+}
+
+sub progress_push()
+{
+ print "progress: push\n";
+}
+
+sub progress($;$)
+{
+ my ($count, $whence) = @_;
+
+ unless(defined($whence)) {
+ $whence = "";
+ }
+
+ print "progress: $whence$count\n";
+}
+
# The following are Samba extensions:
sub start_testsuite($)
@@ -208,10 +227,4 @@ sub end_testsuite($$;$)
}
}
-sub testsuite_count($)
-{
- my ($count) = @_;
- print "testsuite-count: $count\n";
-}
-
1;
diff --git a/selftest/filter-subunit b/selftest/filter-subunit
new file mode 100755
index 0000000000..605a89840a
--- /dev/null
+++ b/selftest/filter-subunit
@@ -0,0 +1,49 @@
+#!/usr/bin/env python
+# Filter a subunit stream
+# Copyright (C) Jelmer Vernooij <jelmer@samba.org>
+# Published under the GNU GPL, v3 or later
+
+import optparse
+import os
+import sys
+import signal
+
+sys.path.insert(0, os.path.join(os.path.dirname(__file__), "../lib/subunit/python"))
+sys.path.insert(0, os.path.join(os.path.dirname(__file__), "../lib/testtools"))
+
+import subunithelper
+
+parser = optparse.OptionParser("filter-subunit [options] < instream > outstream")
+parser.add_option("--expected-failures", type="string",
+ help="File containing list of regexes matching tests to consider known "
+ "failures")
+parser.add_option("--strip-passed-output", action="store_true",
+ help="Whether to strip output from tests that passed")
+
+parser.add_option("--prefix", type="string",
+ help="Add prefix to all test names")
+
+opts, args = parser.parse_args()
+
+if opts.expected_failures:
+ expected_failures = list(subunithelper.read_test_regexes(opts.expected_failures))
+else:
+ expected_failures = []
+
+statistics = {
+ 'TESTS_UNEXPECTED_OK': 0,
+ 'TESTS_EXPECTED_OK': 0,
+ 'TESTS_UNEXPECTED_FAIL': 0,
+ 'TESTS_EXPECTED_FAIL': 0,
+ 'TESTS_ERROR': 0,
+ 'TESTS_SKIP': 0,
+}
+
+def handle_sigint(sig, stack):
+ sys.exit(0)
+signal.signal(signal.SIGINT, handle_sigint)
+
+msg_ops = subunithelper.FilterOps(opts.prefix, expected_failures,
+ opts.strip_passed_output)
+
+sys.exit(subunithelper.parse_results(msg_ops, statistics, sys.stdin))
diff --git a/selftest/filter-subunit.pl b/selftest/filter-subunit.pl
deleted file mode 100755
index 5e87ef49f6..0000000000
--- a/selftest/filter-subunit.pl
+++ /dev/null
@@ -1,100 +0,0 @@
-#!/usr/bin/perl
-# Filter a subunit stream
-# Copyright (C) Jelmer Vernooij <jelmer@samba.org>
-# Published under the GNU GPL, v3 or later
-
-=pod
-
-=head1 NAME
-
-filter-subunit - Filter a subunit stream
-
-=head1 SYNOPSIS
-
-filter-subunit --help
-
-filter-subunit --prefix=PREFIX --known-failures=FILE < in-stream > out-stream
-
-=head1 DESCRIPTION
-
-Simple Subunit stream filter that will change failures to known failures
-based on a list of regular expressions.
-
-=head1 OPTIONS
-
-=over 4
-
-=item I<--prefix>
-
-Add the specified prefix to all test names.
-
-=item I<--expected-failures>
-
-Specify a file containing a list of tests that are expected to fail. Failures
-for these tests will be counted as successes, successes will be counted as
-failures.
-
-The format for the file is, one entry per line:
-
-TESTSUITE-NAME.TEST-NAME
-
-The reason for a test can also be specified, by adding a hash sign (#) and the reason
-after the test name.
-
-=head1 LICENSE
-
-selftest is licensed under the GNU General Public License L<http://www.gnu.org/licenses/gpl.html>.
-
-
-=head1 AUTHOR
-
-Jelmer Vernooij
-
-=cut
-
-use Getopt::Long;
-use strict;
-use FindBin qw($RealBin $Script);
-use lib "$RealBin";
-use Subunit qw(parse_results);
-use Subunit::Filter;
-
-my $opt_expected_failures = undef;
-my $opt_help = 0;
-my $opt_prefix = undef;
-my $opt_strip_ok_output = 0;
-my @expected_failures = ();
-
-my $result = GetOptions(
- 'expected-failures=s' => \$opt_expected_failures,
- 'strip-passed-output' => \$opt_strip_ok_output,
- 'prefix=s' => \$opt_prefix,
- 'help' => \$opt_help,
- );
-exit(1) if (not $result);
-
-if ($opt_help) {
- print "Usage: filter-subunit [--prefix=PREFIX] [--expected-failures=FILE]... < instream > outstream\n";
- exit(0);
-}
-
-if (defined($opt_expected_failures)) {
- @expected_failures = Subunit::Filter::read_test_regexes($opt_expected_failures);
-}
-
-# we want unbuffered output
-$| = 1;
-
-my $statistics = {
- TESTS_UNEXPECTED_OK => 0,
- TESTS_EXPECTED_OK => 0,
- TESTS_UNEXPECTED_FAIL => 0,
- TESTS_EXPECTED_FAIL => 0,
- TESTS_ERROR => 0,
- TESTS_SKIP => 0,
-};
-
-my $msg_ops = new Subunit::Filter($opt_prefix, \@expected_failures,
- $opt_strip_ok_output);
-
-exit(parse_results($msg_ops, $statistics, *STDIN));
diff --git a/selftest/format-subunit b/selftest/format-subunit
index 3747082839..54949df97a 100755
--- a/selftest/format-subunit
+++ b/selftest/format-subunit
@@ -6,9 +6,14 @@
import optparse
import os
+import signal
import sys
+sys.path.insert(0, os.path.join(os.path.dirname(__file__), "../lib/subunit/python"))
+sys.path.insert(0, os.path.join(os.path.dirname(__file__), "../lib/testtools"))
+
import subunithelper
+import subunit
class PlainFormatter(object):
@@ -25,10 +30,19 @@ class PlainFormatter(object):
self.summaryfile = summaryfile
self.index = 0
self.name = None
+ self._progress_level = 0
self.totalsuites = totaltests
- def testsuite_count(self, count):
- self.totalsuites = count
+ def progress(self, offset, whence):
+ if whence == subunit.PROGRESS_POP:
+ self._progress_level -= 1
+ elif whence == subunit.PROGRESS_PUSH:
+ self._progress_level += 1
+ elif whence == subunit.PROGRESS_SET:
+ if self._progress_level == 0:
+ self.totalsuites = offset
+ elif whence == subunit.PROGRESS_CUR:
+ raise NotImplementedError
def report_time(self, time):
if self.start_time is None:
@@ -71,7 +85,7 @@ class PlainFormatter(object):
def end_testsuite(self, name, result, reason):
out = ""
- unexpected = 0
+ unexpected = False
if not name in self.test_output:
print "no output for name[%s]" % name
@@ -79,11 +93,13 @@ class PlainFormatter(object):
if result in ("success", "xfail"):
self.suites_ok+=1
else:
- self.output_msg("ERROR: Testsuite[%s]\nREASON: %s\n" % (name, reason or ''))
+ self.output_msg("ERROR: Testsuite[%s]\n" % name)
+ if reason is not None:
+ self.output_msg("REASON: %s\n" % (reason,))
self.suitesfailed.append(name)
if self.immediate and not self.verbose:
out += self.test_output[name]
- unexpected = 1
+ unexpected = True
if not self.immediate:
if not unexpected:
@@ -96,7 +112,7 @@ class PlainFormatter(object):
def start_test(self, testname):
pass
- def end_test(self, testname, result, unexpected, reason):
+ def end_test(self, testname, result, unexpected, reason=None):
if not unexpected:
self.test_output[self.name] = ""
if not self.immediate:
@@ -107,11 +123,9 @@ class PlainFormatter(object):
'success': '.'}.get(result, "?(%s)" % result))
return
- if reason is None:
- reason = ''
- reason = reason.strip()
-
- self.test_output[self.name] += "UNEXPECTED(%s): %s\nREASON: %s\n" % (result, testname, reason)
+ self.test_output[self.name] += "UNEXPECTED(%s): %s\n" % (result, testname)
+ if reason is not None:
+ self.test_output[self.name] += "REASON: %s\n" % (reason.strip(),)
if self.immediate and not self.verbose:
print self.test_output[self.name]
@@ -188,6 +202,10 @@ statistics = {
'TESTS_SKIP': 0,
}
+def handle_sigint(sig, stack):
+ sys.exit(0)
+signal.signal(signal.SIGINT, handle_sigint)
+
msg_ops = PlainFormatter(os.path.join(opts.prefix, "summary"), opts.verbose,
opts.immediate, statistics)
diff --git a/selftest/selftest.pl b/selftest/selftest.pl
index 7bbad62bbf..634db92c92 100755
--- a/selftest/selftest.pl
+++ b/selftest/selftest.pl
@@ -229,6 +229,7 @@ sub run_testsuite($$$$$)
my $pcap_file = setup_pcap($name);
Subunit::start_testsuite($name);
+ Subunit::progress_push();
Subunit::report_time(time());
open(RESULTS, "$cmd 2>&1|");
@@ -249,6 +250,7 @@ sub run_testsuite($$$$$)
unless (close(RESULTS)) {
if ($!) {
+ Subunit::progress_pop();
Subunit::end_testsuite($name, "error", "Unable to run $cmd: $!");
return 0;
} else {
@@ -257,6 +259,7 @@ sub run_testsuite($$$$$)
}
if ($ret & 127) {
+ Subunit::progress_pop();
Subunit::end_testsuite($name, "error", sprintf("Testsuite died with signal %d, %s coredump", ($ret & 127), ($ret & 128) ? "with": "without"));
return 0;
}
@@ -271,6 +274,7 @@ sub run_testsuite($$$$$)
my $exitcode = $ret >> 8;
Subunit::report_time(time());
+ Subunit::progress_pop();
if ($exitcode == 0) {
Subunit::end_testsuite($name, "success");
} else {
@@ -684,7 +688,7 @@ foreach my $fn (@testlists) {
}
}
-Subunit::testsuite_count($#available+1);
+Subunit::progress($#available+1);
Subunit::report_time(time());
foreach (@available) {
diff --git a/selftest/subunithelper.py b/selftest/subunithelper.py
index 517bbe2c90..8659f984d8 100644
--- a/selftest/subunithelper.py
+++ b/selftest/subunithelper.py
@@ -18,6 +18,8 @@
__all__ = ['parse_results']
import re
+import sys
+import subunit
import time
VALID_RESULTS = ['success', 'successful', 'failure', 'fail', 'skip', 'knownfail', 'error', 'xfail', 'skip-testsuite', 'testsuite-failure', 'testsuite-xfail', 'testsuite-success', 'testsuite-error']
@@ -101,8 +103,16 @@ def parse_results(msg_ops, statistics, fh):
msg_ops.end_testsuite(testname, "error", reason)
elif l.startswith("testsuite: "):
msg_ops.start_testsuite(l.split(":", 1)[1].strip())
- elif l.startswith("testsuite-count: "):
- msg_ops.testsuite_count(int(l.split(":", 1)[1].strip()))
+ elif l.startswith("progress: "):
+ arg = l.split(":", 1)[1].strip()
+ if arg == "pop":
+ msg_ops.progress(None, subunit.PROGRESS_POP)
+ elif arg == "push":
+ msg_ops.progress(None, subunit.PROGRESS_PUSH)
+ elif arg[0] in '+-':
+ msg_ops.progress(int(arg), subunit.PROGRESS_CUR)
+ else:
+ msg_ops.progress(int(arg), subunit.PROGRESS_SET)
else:
msg_ops.output_msg(l)
@@ -144,8 +154,21 @@ class SubunitOps(object):
self.end_test(name, "xfail", reason)
def report_time(self, t):
- (sec, min, hour, mday, mon, year, wday, yday, isdst) = time.localtimet(t)
- print "time: %04d-%02d-%02d %02d:%02d:%02d" % (year+1900, mon+1, mday, hour, min, sec)
+ (year, mon, mday, hour, min, sec, wday, yday, isdst) = time.localtime(t)
+ print "time: %04d-%02d-%02d %02d:%02d:%02d" % (year, mon, mday, hour, min, sec)
+
+ def progress(self, offset, whence):
+ if whence == subunit.PROGRESS_CUR and offset > -1:
+ prefix = "+"
+ elif whence == subunit.PROGRESS_PUSH:
+ prefix = ""
+ offset = "push"
+ elif whence == subunit.PROGRESS_POP:
+ prefix = ""
+ offset = "pop"
+ else:
+ prefix = ""
+ print "progress: %s%s" % (prefix, offset)
# The following are Samba extensions:
def start_testsuite(self, name):
@@ -159,11 +182,136 @@ class SubunitOps(object):
def end_testsuite(self, name, result, reason=None):
if reason:
- print "testsuite-$result: %s [" % name
+ print "testsuite-%s: %s [" % (result, name)
print "%s" % reason
print "]"
else:
- print "testsuite-$result: %s" % name
+ print "testsuite-%s: %s" % (result, name)
+
+
+def read_test_regexes(name):
+ f = open(name, 'r')
+ try:
+ for l in f:
+ l = l.strip()
+ if l == "" or l[0] == "#":
+ continue
+ if "#" in l:
+ (regex, reason) = l.split("#", 1)
+ yield (regex.strip(), reason.strip())
+ else:
+ yield l, None
+ finally:
+ f.close()
+
+
+def find_in_list(regexes, fullname):
+ for regex, reason in regexes:
+ if re.match(regex, fullname):
+ if reason is None:
+ return ""
+ return reason
+ return None
+
+
+class FilterOps(object):
+
+ def control_msg(self, msg):
+ pass # We regenerate control messages, so ignore this
+
+ def report_time(self, time):
+ self._ops.report_time(time)
+
+ def progress(self, delta, whence):
+ self._ops.progress(delta, whence)
+
+ def output_msg(self, msg):
+ if self.output is None:
+ sys.stdout.write(msg)
+ else:
+ self.output+=msg
+
+ def start_test(self, testname):
+ if self.prefix is not None:
+ testname = self.prefix + testname
+
+ if self.strip_ok_output:
+ self.output = ""
+
+ self._ops.start_test(testname)
+
+ def end_test(self, testname, result, unexpected, reason):
+ if self.prefix is not None:
+ testname = self.prefix + testname
+
+ if result in ("fail", "failure") and not unexpected:
+ result = "xfail"
+ self.xfail_added+=1
+ self.total_xfail+=1
+ xfail_reason = find_in_list(self.expected_failures, testname)
+ if xfail_reason is not None and result in ("fail", "failure"):
+ result = "xfail"
+ self.xfail_added+=1
+ self.total_xfail+=1
+ reason += xfail_reason
+
+ if result in ("fail", "failure"):
+ self.fail_added+=1
+ self.total_fail+=1
+
+ if result == "error":
+ self.error_added+=1
+ self.total_error+=1
+
+ if self.strip_ok_output:
+ if result not in ("success", "xfail", "skip"):
+ print self.output
+ self.output = None
+
+ self._ops.end_test(testname, result, reason)
+
+ def skip_testsuite(self, name, reason=None):
+ self._ops.skip_testsuite(name, reason)
+
+ def start_testsuite(self, name):
+ self._ops.start_testsuite(name)
+
+ self.error_added = 0
+ self.fail_added = 0
+ self.xfail_added = 0
+
+ def end_testsuite(self, name, result, reason=None):
+ xfail = False
+
+ if self.xfail_added > 0:
+ xfail = True
+ if self.fail_added > 0 or self.error_added > 0:
+ xfail = False
+
+ if xfail and result in ("fail", "failure"):
+ result = "xfail"
+
+ if self.fail_added > 0 and result != "failure":
+ result = "failure"
+ if reason is None:
+ reason = "Subunit/Filter Reason"
+ reason += "\n failures[%d]" % self.fail_added
+
+ if self.error_added > 0 and result != "error":
+ result = "error"
+ if reason is None:
+ reason = "Subunit/Filter Reason"
+ reason += "\n errors[%d]" % self.error_added
+
+ self._ops.end_testsuite(name, result, reason)
- def testsuite_count(self, count):
- print "testsuite-count: %d" % count
+ def __init__(self, prefix, expected_failures, strip_ok_output):
+ self._ops = SubunitOps()
+ self.output = None
+ self.prefix = prefix
+ self.expected_failures = expected_failures
+ self.strip_ok_output = strip_ok_output
+ self.xfail_added = 0
+ self.total_xfail = 0
+ self.total_error = 0
+ self.total_fail = 0
diff --git a/source3/Makefile.in b/source3/Makefile.in
index 97b3275e46..8cc6c66b23 100644
--- a/source3/Makefile.in
+++ b/source3/Makefile.in
@@ -3251,7 +3251,7 @@ selftest:: all torture timelimit
--testlist="$(srcdir)/selftest/tests.sh|" \
--exclude=$(srcdir)/selftest/skip \
--socket-wrapper $(TESTS) | \
- $(PERL) $(selftestdir)/filter-subunit.pl \
+ $(PYTHON) $(selftestdir)/filter-subunit \
--expected-failures=$(srcdir)/selftest/knownfail | \
$(PYTHON) $(selftestdir)/format-subunit --immediate
diff --git a/source3/smbd/process.c b/source3/smbd/process.c
index 6068816ad9..dd120f9bd2 100644
--- a/source3/smbd/process.c
+++ b/source3/smbd/process.c
@@ -2349,9 +2349,21 @@ static bool keepalive_fn(const struct timeval *now, void *private_data)
static bool deadtime_fn(const struct timeval *now, void *private_data)
{
struct smbd_server_connection *sconn = smbd_server_conn;
+
+ if (sconn->allow_smb2) {
+ /* TODO: implement real idle check */
+ if (sconn->smb2.sessions.list) {
+ return true;
+ }
+ DEBUG( 2, ( "Closing idle SMB2 connection\n" ) );
+ messaging_send(smbd_messaging_context(), procid_self(),
+ MSG_SHUTDOWN, &data_blob_null);
+ return false;
+ }
+
if ((conn_num_open(sconn) == 0)
|| (conn_idle_all(sconn, now->tv_sec))) {
- DEBUG( 2, ( "Closing idle connection\n" ) );
+ DEBUG( 2, ( "Closing idle SMB1 connection\n" ) );
messaging_send(smbd_messaging_context(), procid_self(),
MSG_SHUTDOWN, &data_blob_null);
return False;
diff --git a/source4/lib/ldb/tests/python/deletetest.py b/source4/lib/ldb/tests/python/deletetest.py
index 13bec7efae..eff92c5f33 100755
--- a/source4/lib/ldb/tests/python/deletetest.py
+++ b/source4/lib/ldb/tests/python/deletetest.py
@@ -8,6 +8,7 @@ import os
sys.path.append("bin/python")
sys.path.append("../lib/subunit/python")
+sys.path.append("../lib/testtools")
import samba.getopt as options
diff --git a/source4/lib/ldb/tests/python/urgent_replication.py b/source4/lib/ldb/tests/python/urgent_replication.py
index b8df072bf3..28b3a5fa78 100755
--- a/source4/lib/ldb/tests/python/urgent_replication.py
+++ b/source4/lib/ldb/tests/python/urgent_replication.py
@@ -2,16 +2,13 @@
# -*- coding: utf-8 -*-
# This is a port of the original in testprogs/ejs/ldap.js
-import getopt
import optparse
import sys
-import time
-import random
-import base64
import os
sys.path.append("bin/python")
sys.path.append("../lib/subunit/python")
+sys.path.append("../lib/testtools")
import samba.getopt as options
@@ -26,9 +23,6 @@ from samba import glue
from subunit.run import SubunitTestRunner
import unittest
-from samba.ndr import ndr_pack, ndr_unpack
-from samba.dcerpc import security
-
parser = optparse.OptionParser("urgent_replication [options] <host>")
sambaopts = options.SambaOptions(parser)
parser.add_option_group(sambaopts)
diff --git a/source4/script/installmisc.sh b/source4/script/installmisc.sh
index eba5996e13..6a53b988f5 100755
--- a/source4/script/installmisc.sh
+++ b/source4/script/installmisc.sh
@@ -82,12 +82,13 @@ cp setup/provision.smb.conf.member $SETUPDIR || exit 1
cp setup/provision.smb.conf.standalone $SETUPDIR || exit 1
echo "Installing external python libraries"
-mkdir -p $PYTHONDIR/samba_external || exit 1
+mkdir -p $DESTDIR$PYTHONDIR || exit 1
for p in $($PYTHON scripting/python/samba_external/missing.py);
do
- echo "Installing missing python library $p"
- mkdir -p $PYTHONDIR/samba_external/$p
- cp -r ../lib/$p/* $PYTHONDIR/samba_external/$p/ || exit 1
+ package=`basename $p`
+ echo "Installing missing python package $package"
+ mkdir -p $DESTDIR$PYTHONDIR/$package
+ cp -r ../lib/$p/* $DESTDIR$PYTHONDIR/$package/ || exit 1
done
echo "Installing stuff in $PRIVATEDIR"
diff --git a/source4/scripting/python/config.mk b/source4/scripting/python/config.mk
index d36e136346..db35669e1a 100644
--- a/source4/scripting/python/config.mk
+++ b/source4/scripting/python/config.mk
@@ -23,9 +23,9 @@ python_glue_OBJ_FILES = $(pyscriptsrcdir)/pyglue.o
$(python_glue_OBJ_FILES): CFLAGS+=-I$(ldbsrcdir)
-_PY_FILES = $(shell find $(pyscriptsrcdir)/samba ../lib/subunit/python -type f -name "*.py")
+_PY_FILES = $(shell find $(pyscriptsrcdir)/samba -type f -name "*.py")
-$(eval $(foreach pyfile, $(_PY_FILES),$(call python_py_module_template,$(patsubst $(pyscriptsrcdir)/%,%,$(subst ../lib/subunit/python,,$(pyfile))),$(pyfile))))
+$(eval $(foreach pyfile, $(_PY_FILES),$(call python_py_module_template,$(patsubst $(pyscriptsrcdir)/%,%,$(pyfile)),$(pyfile))))
PYDOCTOR = pydoctor
PYDOCTOR_OPTIONS = --project-name Samba --project-url http://www.samba.org/ \
diff --git a/source4/scripting/python/samba_external/missing.py b/source4/scripting/python/samba_external/missing.py
index 50bda65a21..d3dd2b9290 100755
--- a/source4/scripting/python/samba_external/missing.py
+++ b/source4/scripting/python/samba_external/missing.py
@@ -1,12 +1,17 @@
#!/usr/bin/python
# work out what python external libraries we need to install
+external_libs = {
+ "dns.resolver": "dnspython/dns",
+ "subunit": "subunit/python/subunit",
+ "testtools": "testtools/testtools"}
list = []
-try:
- import dns.resolver
-except:
- list.append("dnspython")
+for module, package in external_libs.iteritems():
+ try:
+ __import__(module)
+ except ImportError:
+ list.append(package)
print ' '.join(list)
diff --git a/source4/selftest/config.mk b/source4/selftest/config.mk
index 6057de68f9..c4c5e190a9 100644
--- a/source4/selftest/config.mk
+++ b/source4/selftest/config.mk
@@ -13,7 +13,7 @@ ST_DONE_TEST = @test -f $(selftest_prefix)/st_done || { echo "SELFTEST FAILED";
SELFTEST_NOSLOW_OPTS = --exclude=$(srcdir)/selftest/slow
SELFTEST_QUICK_OPTS = $(SELFTEST_NOSLOW_OPTS) --quick --include=$(srcdir)/selftest/quick
-FILTER_XFAIL = $(PERL) $(selftestdir)/filter-subunit.pl --expected-failures=$(srcdir)/selftest/knownfail
+FILTER_XFAIL = $(PYTHON) $(selftestdir)/filter-subunit --expected-failures=$(srcdir)/selftest/knownfail
SUBUNIT_FORMATTER ?= $(PYTHON) $(selftestdir)/format-subunit --prefix=${selftest_prefix} --immediate
FORMAT_TEST_OUTPUT = $(FILTER_XFAIL) | $(SUBUNIT_FORMATTER)
diff --git a/source4/selftest/knownfail b/source4/selftest/knownfail
index a6b7161019..60606ace56 100644
--- a/source4/selftest/knownfail
+++ b/source4/selftest/knownfail
@@ -3,63 +3,63 @@
#
# "make test" will not report failures for tests listed here and will consider
# a successful run for any of these tests an error.
-local.resolve.*.async
-local.iconv.*.next_codepoint()
-base.delete.*.deltest17
-base.delete.*.deltest20a
-base.delete.*.deltest20b
-raw.rename.*.osxrename
-raw.rename.*.directory rename
-rpc.winreg.*security
+samba4.local.resolve.*.async
+samba4.local.iconv.*.next_codepoint()
+samba4..*base.delete.*.deltest17
+samba4..*base.delete.*.deltest20a
+samba4..*base.delete.*.deltest20b
+samba4.raw.rename.*.osxrename
+samba4.raw.rename.*.directory rename
+samba4.rpc.winreg.*security
samba4.local.registry.(dir|ldb).check hive security
samba4.local.registry.local.security
-rpc.wkssvc
-rpc.handles.*.lsarpc-shared
-rpc.handles.*.mixed-shared
-rpc.epmapper.*.Insert
-rpc.epmapper.*.InqObject
-rpc.drsuapi.*
-rpc.lsalookup
-rpc.cracknames
-rpc.netlogon.*.LogonUasLogon
-rpc.netlogon.*.LogonUasLogoff
-rpc.netlogon.*.DatabaseSync
-rpc.netlogon.*.DatabaseSync2
-rpc.netlogon.*.LogonControl
-rpc.netlogon.*.LogonControl2
-rpc.netlogon.*.DsrEnumerateDomainTrusts
-rpc.netlogon.*.NetrEnumerateTrustedDomains
-rpc.netlogon.*.NetrEnumerateTrustedDomainsEx
-rpc.netlogon.*.DsrGetDcSiteCoverageW
-rpc.netlogon.*.DsRAddressToSitenamesW
-rpc.netlogon.*.DsRAddressToSitenamesExW
-rpc.netlogon.*.GetPassword
-rpc.netlogon.*.GetTrustPasswords
-rpc.netlogon.*.DatabaseRedo
-rpc.netlogon.*.ServerGetTrustInfo
-rpc.netlogon.*.GetDomainInfo # Also fails against W2K8 (but in a different way)
+samba4.rpc.wkssvc
+samba4.rpc.handles.*.lsarpc-shared
+samba4.rpc.handles.*.mixed-shared
+samba4.rpc.epmapper.*.Insert
+samba4.rpc.epmapper.*.InqObject
+samba4.rpc.drsuapi.*
+samba4.rpc.lsalookup
+samba4.rpc.cracknames
+samba4.rpc.netlogon.*.LogonUasLogon
+samba4.rpc.netlogon.*.LogonUasLogoff
+samba4.rpc.netlogon.*.DatabaseSync
+samba4.rpc.netlogon.*.DatabaseSync2
+samba4.rpc.netlogon.*.LogonControl
+samba4.rpc.netlogon.*.LogonControl2
+samba4.rpc.netlogon.*.DsrEnumerateDomainTrusts
+samba4.rpc.netlogon.*.NetrEnumerateTrustedDomains
+samba4.rpc.netlogon.*.NetrEnumerateTrustedDomainsEx
+samba4.rpc.netlogon.*.DsrGetDcSiteCoverageW
+samba4.rpc.netlogon.*.DsRAddressToSitenamesW
+samba4.rpc.netlogon.*.DsRAddressToSitenamesExW
+samba4.rpc.netlogon.*.GetPassword
+samba4.rpc.netlogon.*.GetTrustPasswords
+samba4.rpc.netlogon.*.DatabaseRedo
+samba4.rpc.netlogon.*.ServerGetTrustInfo
+samba4.rpc.netlogon.*.GetDomainInfo # Also fails against W2K8 (but in a different way)
samba4.rpc.samr.passwords.pwdlastset # Not provided by Samba 4 yet
samba4.rpc.samr.passwords.badpwdcount # Not provided by Samba 4 yet
samba4.rpc.samr.passwords.lockout
samba4.rpc.samr.users.privileges
samba4.rpc.spoolss # Not provided by Samba 4 yet
-base.charset.*.Testing partial surrogate
+samba4.base.charset.*.Testing partial surrogate
.*net.api.delshare.* # DelShare isn't implemented yet
^samba4.net.api.become.dc.*$ # TODO: this should work!!!
-rap.*netservergetinfo
-smb2.persistent.handles1
+samba4.rap.*netservergetinfo
+samba4.smb2.persistent.handles1
samba4.winbind.struct.*.SHOW_SEQUENCE # Not yet working in winbind
samba4.winbind.struct.*.GETPWENT # Not yet working in winbind
samba4.winbind.struct.*.SETPWENT # Not yet working in winbind
samba4.winbind.struct.*.LOOKUP_NAME_SID # Not yet working in winbind
samba4.winbind.struct.*.LIST_GROUPS
-^samba4.*base.delaywrite.*update of write time and SMBwrite truncate$
-^samba4.*base.delaywrite.*update of write time and SMBwrite truncate expand$
-^samba4.*base.delaywrite.*delayed update of write time 3a$
-^samba4.*base.delaywrite.*delayed update of write time 3c$
-^samba4.*base.delaywrite.*update of write time using SET_END_OF_FILE$
-^samba4.*base.delaywrite.*update of write time using SET_ALLOCATION_SIZE$
-^samba4.ldap.python \(dc\).Test add_ldif\(\) with BASE64 security descriptor input using WRONG domain SID$
+samba4.*base.delaywrite.*update of write time and SMBwrite truncate$
+samba4.*base.delaywrite.*update of write time and SMBwrite truncate expand$
+samba4.*base.delaywrite.*delayed update of write time 3a$
+samba4.*base.delaywrite.*delayed update of write time 3c$
+samba4.*base.delaywrite.*update of write time using SET_END_OF_FILE$
+samba4.*base.delaywrite.*update of write time using SET_ALLOCATION_SIZE$
+samba4.ldap.python \(dc\).Test add_ldif\(\) with BASE64 security descriptor input using WRONG domain SID$
# some operations don't work over the CIFS NTVFS backend yet (eg. root_fid)
samba4.ntvfs.cifs.base.createx_access
samba4.ntvfs.cifs.base.createx_sharemodes_dir
diff --git a/source4/selftest/tests.sh b/source4/selftest/tests.sh
index f832b4f15e..bf6127f4a5 100755
--- a/source4/selftest/tests.sh
+++ b/source4/selftest/tests.sh
@@ -85,7 +85,7 @@ smb4torture="$samba4bindir/smbtorture${EXEEXT}"
if which tap2subunit 2>/dev/null; then
TAP2SUBUNIT=tap2subunit
else
- TAP2SUBUNIT="PYTHONPATH=$samba4srcdir/../lib/subunit/python $PYTHON $samba4srcdir/../lib/subunit/filters/tap2subunit"
+ TAP2SUBUNIT="PYTHONPATH=$samba4srcdir/../lib/subunit/python:$samba4srcdir/../lib/testtools $PYTHON $samba4srcdir/../lib/subunit/filters/tap2subunit"
fi
$smb4torture -V
@@ -466,15 +466,15 @@ plantest "samba3sam.python" none PYTHONPATH="$PYTHONPATH:$samba4srcdir/dsdb/samd
plantest "subunit.python" none $SUBUNITRUN subunit
plantest "rpcecho.python" dc:local $SUBUNITRUN samba.tests.dcerpc.rpcecho
plantest "winreg.python" dc:local $SUBUNITRUN -U\$USERNAME%\$PASSWORD samba.tests.dcerpc.registry
-plantest "ldap.python" dc PYTHONPATH="$PYTHONPATH:../lib/subunit/python" $PYTHON $samba4srcdir/lib/ldb/tests/python/ldap.py $CONFIGURATION \$SERVER -U\$USERNAME%\$PASSWORD -W \$DOMAIN
-plantest "urgent_replication.python" dc PYTHONPATH="$PYTHONPATH:../lib/subunit/python" $PYTHON $samba4srcdir/lib/ldb/tests/python/urgent_replication.py \$PREFIX_ABS/dc/private/sam.ldb
-plantest "ldap_schema.python" dc PYTHONPATH="$PYTHONPATH:../lib/subunit/python" $PYTHON $samba4srcdir/lib/ldb/tests/python/ldap_schema.py $CONFIGURATION \$SERVER -U\$USERNAME%\$PASSWORD -W \$DOMAIN
+plantest "ldap.python" dc PYTHONPATH="$PYTHONPATH:../lib/subunit/python:../lib/testtools" $PYTHON $samba4srcdir/lib/ldb/tests/python/ldap.py $CONFIGURATION \$SERVER -U\$USERNAME%\$PASSWORD -W \$DOMAIN
+plantest "urgent_replication.python" dc PYTHONPATH="$PYTHONPATH:../lib/subunit/python:../lib/testtools" $PYTHON $samba4srcdir/lib/ldb/tests/python/urgent_replication.py \$PREFIX_ABS/dc/private/sam.ldb
+plantest "ldap_schema.python" dc PYTHONPATH="$PYTHONPATH:../lib/subunit/python:../lib/testtools" $PYTHON $samba4srcdir/lib/ldb/tests/python/ldap_schema.py $CONFIGURATION \$SERVER -U\$USERNAME%\$PASSWORD -W \$DOMAIN
plantest "ldap.possibleInferiors.python" dc $PYTHON $samba4srcdir/dsdb/samdb/ldb_modules/tests/possibleinferiors.py $CONFIGURATION ldap://\$SERVER -U\$USERNAME%\$PASSWORD -W \$DOMAIN
-plantest "ldap.secdesc.python" dc PYTHONPATH="$PYTHONPATH:../lib/subunit/python" $PYTHON $samba4srcdir/lib/ldb/tests/python/sec_descriptor.py $CONFIGURATION \$SERVER -U\$USERNAME%\$PASSWORD -W \$DOMAIN
-plantest "ldap.acl.python" dc PYTHONPATH="$PYTHONPATH:../lib/subunit/python" $PYTHON $samba4srcdir/lib/ldb/tests/python/acl.py $CONFIGURATION \$SERVER -U\$USERNAME%\$PASSWORD -W \$DOMAIN
+plantest "ldap.secdesc.python" dc PYTHONPATH="$PYTHONPATH:../lib/subunit/python:../lib/testtools" $PYTHON $samba4srcdir/lib/ldb/tests/python/sec_descriptor.py $CONFIGURATION \$SERVER -U\$USERNAME%\$PASSWORD -W \$DOMAIN
+plantest "ldap.acl.python" dc PYTHONPATH="$PYTHONPATH:../lib/subunit/python:../lib/testtools" $PYTHON $samba4srcdir/lib/ldb/tests/python/acl.py $CONFIGURATION \$SERVER -U\$USERNAME%\$PASSWORD -W \$DOMAIN
plantest "xattr.python" none $SUBUNITRUN samba.tests.xattr
plantest "ntacls.python" none $SUBUNITRUN samba.tests.ntacls
-plantest "deletetest.python" dc PYTHONPATH="$PYTHONPATH:../lib/subunit/python" $PYTHON $samba4srcdir/lib/ldb/tests/python/deletetest.py $CONFIGURATION \$SERVER -U\$USERNAME%\$PASSWORD -W \$DOMAIN
+plantest "deletetest.python" dc PYTHONPATH="$PYTHONPATH:../lib/subunit/python:../lib/testtools" $PYTHON $samba4srcdir/lib/ldb/tests/python/deletetest.py $CONFIGURATION \$SERVER -U\$USERNAME%\$PASSWORD -W \$DOMAIN
plantest "blackbox.samba3dump" none $PYTHON $samba4srcdir/scripting/bin/samba3dump $samba4srcdir/../testdata/samba3
rm -rf $PREFIX/upgrade
plantest "blackbox.upgrade" none $PYTHON $samba4srcdir/setup/upgrade_from_s3 $CONFIGURATION --targetdir=$PREFIX/upgrade $samba4srcdir/../testdata/samba3 ../testdata/samba3/smb.conf
diff --git a/source4/torture/basic/denytest.c b/source4/torture/basic/denytest.c
index 81e90f88c8..8a5c24ca79 100644
--- a/source4/torture/basic/denytest.c
+++ b/source4/torture/basic/denytest.c
@@ -2095,13 +2095,6 @@ static int cxd_find_known(struct createx_data *cxd)
return -1;
}
-#define FILL_NTCREATEX(_struct, _init...) \
- do { \
- (_struct)->generic.level = RAW_OPEN_NTCREATEX; \
- (_struct)->ntcreatex.in \
- = (__typeof__((_struct)->ntcreatex.in)) {_init}; \
- } while (0)
-
#define CREATEX_NAME "\\createx_dir"
static bool createx_make_dir(struct torture_context *tctx,
@@ -2124,15 +2117,16 @@ static bool createx_make_file(struct torture_context *tctx,
bool ret = true;
NTSTATUS status;
- FILL_NTCREATEX(&open_parms,
- .flags = 0,
- .access_mask = SEC_RIGHTS_FILE_ALL,
- .file_attr = FILE_ATTRIBUTE_NORMAL,
- .share_access = 0,
- .open_disposition = NTCREATEX_DISP_CREATE,
- .create_options = 0,
- .fname = fname
- );
+ ZERO_STRUCT(open_parms);
+ open_parms.generic.level = RAW_OPEN_NTCREATEX;
+ open_parms.ntcreatex.in.flags = 0;
+ open_parms.ntcreatex.in.access_mask = SEC_RIGHTS_FILE_ALL;
+ open_parms.ntcreatex.in.file_attr = FILE_ATTRIBUTE_NORMAL;
+ open_parms.ntcreatex.in.share_access = 0;
+ open_parms.ntcreatex.in.open_disposition = NTCREATEX_DISP_CREATE;
+ open_parms.ntcreatex.in.create_options = 0;
+ open_parms.ntcreatex.in.fname = fname;
+
status = smb_raw_open(tree, mem_ctx, &open_parms);
CHECK_STATUS(status, NT_STATUS_OK);
@@ -2146,30 +2140,30 @@ static bool createx_make_file(struct torture_context *tctx,
static void createx_fill_dir(union smb_open *open_parms, int accessmode,
int sharemode, const char *fname)
{
- FILL_NTCREATEX(open_parms,
- .flags = 0,
- .access_mask = accessmode,
- .file_attr = FILE_ATTRIBUTE_DIRECTORY,
- .share_access = sharemode,
- .open_disposition = NTCREATEX_DISP_OPEN_IF,
- .create_options = NTCREATEX_OPTIONS_DIRECTORY,
- .fname = fname
- );
+ ZERO_STRUCTP(open_parms);
+ open_parms->generic.level = RAW_OPEN_NTCREATEX;
+ open_parms->ntcreatex.in.flags = 0;
+ open_parms->ntcreatex.in.access_mask = accessmode;
+ open_parms->ntcreatex.in.file_attr = FILE_ATTRIBUTE_DIRECTORY;
+ open_parms->ntcreatex.in.share_access = sharemode;
+ open_parms->ntcreatex.in.open_disposition = NTCREATEX_DISP_OPEN_IF;
+ open_parms->ntcreatex.in.create_options = NTCREATEX_OPTIONS_DIRECTORY;
+ open_parms->ntcreatex.in.fname = fname;
}
static void createx_fill_file(union smb_open *open_parms, int accessmode,
int sharemode, const char *fname)
{
- FILL_NTCREATEX(open_parms,
- .flags = 0,
- .access_mask = accessmode,
- .file_attr = FILE_ATTRIBUTE_NORMAL,
- .share_access = sharemode,
- .open_disposition = NTCREATEX_DISP_OPEN_IF,
- .create_options = 0,
- .fname = fname,
- .root_fid = { .fnum = 0 }
- );
+ ZERO_STRUCTP(open_parms);
+ open_parms->generic.level = RAW_OPEN_NTCREATEX;
+ open_parms->ntcreatex.in.flags = 0;
+ open_parms->ntcreatex.in.access_mask = accessmode;
+ open_parms->ntcreatex.in.file_attr = FILE_ATTRIBUTE_NORMAL;
+ open_parms->ntcreatex.in.share_access = sharemode;
+ open_parms->ntcreatex.in.open_disposition = NTCREATEX_DISP_OPEN_IF;
+ open_parms->ntcreatex.in.create_options = 0;
+ open_parms->ntcreatex.in.fname = fname;
+ open_parms->ntcreatex.in.root_fid.fnum = 0;
}
static int data_file_fd = -1;
@@ -2184,15 +2178,16 @@ static bool createx_test_dir(struct torture_context *tctx,
union smb_open open_parms;
/* bypass original handle to guarantee creation */
- FILL_NTCREATEX(&open_parms,
- .flags = 0,
- .access_mask = SEC_RIGHTS_FILE_ALL,
- .file_attr = FILE_ATTRIBUTE_NORMAL,
- .share_access = 0,
- .open_disposition = NTCREATEX_DISP_CREATE,
- .create_options = 0,
- .fname = CREATEX_NAME "\\" KNOWN
- );
+ ZERO_STRUCT(open_parms);
+ open_parms.generic.level = RAW_OPEN_NTCREATEX;
+ open_parms.ntcreatex.in.flags = 0;
+ open_parms.ntcreatex.in.access_mask = SEC_RIGHTS_FILE_ALL;
+ open_parms.ntcreatex.in.file_attr = FILE_ATTRIBUTE_NORMAL;
+ open_parms.ntcreatex.in.share_access = 0;
+ open_parms.ntcreatex.in.open_disposition = NTCREATEX_DISP_CREATE;
+ open_parms.ntcreatex.in.create_options = 0;
+ open_parms.ntcreatex.in.fname = CREATEX_NAME "\\" KNOWN;
+
status = smb_raw_open(tree, mem_ctx, &open_parms);
CHECK_STATUS(status, NT_STATUS_OK);
smbcli_close(tree, open_parms.ntcreatex.out.file.fnum);
@@ -2200,32 +2195,32 @@ static bool createx_test_dir(struct torture_context *tctx,
result[CXD_DIR_ENUMERATE] = NT_STATUS_OK;
/* try to create a child */
- FILL_NTCREATEX(&open_parms,
- .flags = 0,
- .access_mask = SEC_RIGHTS_FILE_ALL,
- .file_attr = FILE_ATTRIBUTE_NORMAL,
- .share_access = 0,
- .open_disposition = NTCREATEX_DISP_CREATE,
- .create_options = 0,
- .fname = CHILD,
- .root_fid = { .fnum = fnum }
- );
+ ZERO_STRUCT(open_parms);
+ open_parms.generic.level = RAW_OPEN_NTCREATEX;
+ open_parms.ntcreatex.in.flags = 0;
+ open_parms.ntcreatex.in.access_mask = SEC_RIGHTS_FILE_ALL;
+ open_parms.ntcreatex.in.file_attr = FILE_ATTRIBUTE_NORMAL;
+ open_parms.ntcreatex.in.share_access = 0;
+ open_parms.ntcreatex.in.open_disposition = NTCREATEX_DISP_CREATE;
+ open_parms.ntcreatex.in.create_options = 0;
+ open_parms.ntcreatex.in.fname = CHILD;
+ open_parms.ntcreatex.in.root_fid.fnum = fnum;
result[CXD_DIR_CREATE_CHILD] =
smb_raw_open(tree, mem_ctx, &open_parms);
smbcli_close(tree, open_parms.ntcreatex.out.file.fnum);
/* try to traverse dir to known good file */
- FILL_NTCREATEX(&open_parms,
- .flags = 0,
- .access_mask = SEC_RIGHTS_FILE_ALL,
- .file_attr = FILE_ATTRIBUTE_NORMAL,
- .share_access = 0,
- .open_disposition = NTCREATEX_DISP_OPEN,
- .create_options = 0,
- .fname = KNOWN,
- .root_fid = {.fnum = fnum}
- );
+ ZERO_STRUCT(open_parms);
+ open_parms.generic.level = RAW_OPEN_NTCREATEX;
+ open_parms.ntcreatex.in.flags = 0;
+ open_parms.ntcreatex.in.access_mask = SEC_RIGHTS_FILE_ALL;
+ open_parms.ntcreatex.in.file_attr = FILE_ATTRIBUTE_NORMAL;
+ open_parms.ntcreatex.in.share_access = 0;
+ open_parms.ntcreatex.in.open_disposition = NTCREATEX_DISP_OPEN;
+ open_parms.ntcreatex.in.create_options = 0;
+ open_parms.ntcreatex.in.fname = KNOWN;
+ open_parms.ntcreatex.in.root_fid.fnum = fnum;
result[CXD_DIR_TRAVERSE] =
smb_raw_open(tree, mem_ctx, &open_parms);
diff --git a/source4/torture/raw/open.c b/source4/torture/raw/open.c
index e37fd8e09a..42f693e78c 100644
--- a/source4/torture/raw/open.c
+++ b/source4/torture/raw/open.c
@@ -1787,17 +1787,9 @@ done:
return ret;
}
-#define FILL_NTCREATEX(_struct, _init...) \
- do { \
- (_struct)->generic.level = RAW_OPEN_NTCREATEX; \
- (_struct)->ntcreatex.in \
- = (typeof((_struct)->ntcreatex.in)) {_init};\
- } while (0)
-
static bool test_ntcreatex_opendisp_dir(struct torture_context *tctx,
struct smbcli_state *cli)
{
- union smb_open io;
const char *dname = BASEDIR "\\torture_ntcreatex_opendisp_dir";
NTSTATUS status;
bool ret = true;
@@ -1822,20 +1814,21 @@ static bool test_ntcreatex_opendisp_dir(struct torture_context *tctx,
{ 6, true, NT_STATUS_INVALID_PARAMETER },
{ 6, false, NT_STATUS_INVALID_PARAMETER },
};
+ union smb_open io;
+
+ ZERO_STRUCT(io);
+ io.generic.level = RAW_OPEN_NTCREATEX;
+ io.ntcreatex.in.flags = NTCREATEX_FLAGS_EXTENDED;
+ io.ntcreatex.in.access_mask = SEC_FLAG_MAXIMUM_ALLOWED;
+ io.ntcreatex.in.file_attr = FILE_ATTRIBUTE_DIRECTORY;
+ io.ntcreatex.in.share_access = NTCREATEX_SHARE_ACCESS_READ | NTCREATEX_SHARE_ACCESS_WRITE;
+ io.ntcreatex.in.create_options = NTCREATEX_OPTIONS_DIRECTORY;
+ io.ntcreatex.in.fname = dname;
if (!torture_setup_dir(cli, BASEDIR)) {
return false;
}
- FILL_NTCREATEX(&io,
- .flags = NTCREATEX_FLAGS_EXTENDED,
- .access_mask = SEC_FLAG_MAXIMUM_ALLOWED,
- .file_attr = FILE_ATTRIBUTE_DIRECTORY,
- .share_access = NTCREATEX_SHARE_ACCESS_READ | NTCREATEX_SHARE_ACCESS_WRITE,
- .create_options = NTCREATEX_OPTIONS_DIRECTORY,
- .fname = dname,
- );
-
smbcli_rmdir(cli->tree, dname);
smbcli_unlink(cli->tree, dname);
diff --git a/source4/torture/smbtorture.c b/source4/torture/smbtorture.c
index 53e860a144..2aa340ee36 100644
--- a/source4/torture/smbtorture.c
+++ b/source4/torture/smbtorture.c
@@ -354,12 +354,18 @@ static void simple_warning(struct torture_context *test,
fprintf(stderr, "WARNING: %s\n", comment);
}
+static void simple_progress(struct torture_context *test,
+ int offset, enum torture_progress_whence whence)
+{
+}
+
const static struct torture_ui_ops std_ui_ops = {
.comment = simple_comment,
.warning = simple_warning,
.suite_start = simple_suite_start,
.suite_finish = simple_suite_finish,
- .test_result = simple_test_result
+ .test_result = simple_test_result,
+ .progress = simple_progress,
};