summaryrefslogtreecommitdiff
path: root/Source/ThirdParty/gtest
diff options
context:
space:
mode:
authorLorry Tar Creator <lorry-tar-importer@lorry>2017-06-27 06:07:23 +0000
committerLorry Tar Creator <lorry-tar-importer@lorry>2017-06-27 06:07:23 +0000
commit1bf1084f2b10c3b47fd1a588d85d21ed0eb41d0c (patch)
tree46dcd36c86e7fbc6e5df36deb463b33e9967a6f7 /Source/ThirdParty/gtest
parent32761a6cee1d0dee366b885b7b9c777e67885688 (diff)
downloadWebKitGtk-tarball-master.tar.gz
Diffstat (limited to 'Source/ThirdParty/gtest')
-rw-r--r--Source/ThirdParty/gtest/CHANGES98
-rwxr-xr-xSource/ThirdParty/gtest/CMakeLists.txt53
-rw-r--r--Source/ThirdParty/gtest/CONTRIBUTORS36
-rw-r--r--Source/ThirdParty/gtest/COPYING28
-rw-r--r--Source/ThirdParty/gtest/GNUmakefile.am57
-rw-r--r--Source/ThirdParty/gtest/README417
-rw-r--r--Source/ThirdParty/gtest/README.WebKit16
-rw-r--r--Source/ThirdParty/gtest/codegear/gtest.cbproj138
-rw-r--r--Source/ThirdParty/gtest/codegear/gtest.groupproj54
-rw-r--r--Source/ThirdParty/gtest/codegear/gtest_all.cc38
-rw-r--r--Source/ThirdParty/gtest/codegear/gtest_link.cc40
-rw-r--r--Source/ThirdParty/gtest/codegear/gtest_main.cbproj82
-rw-r--r--Source/ThirdParty/gtest/codegear/gtest_unittest.cbproj88
-rw-r--r--Source/ThirdParty/gtest/configure.ac67
-rw-r--r--Source/ThirdParty/gtest/include/gtest/gtest-param-test.h.pump457
-rw-r--r--Source/ThirdParty/gtest/include/gtest/internal/gtest-param-util-generated.h.pump301
-rw-r--r--Source/ThirdParty/gtest/include/gtest/internal/gtest-tuple.h.pump336
-rw-r--r--Source/ThirdParty/gtest/include/gtest/internal/gtest-type-util.h.pump287
-rw-r--r--Source/ThirdParty/gtest/m4/acx_pthread.m4363
-rw-r--r--Source/ThirdParty/gtest/m4/gtest.m474
-rw-r--r--Source/ThirdParty/gtest/msvc/gtest-md.vcxproj.filters71
-rwxr-xr-xSource/ThirdParty/gtest/run_tests.py60
-rw-r--r--Source/ThirdParty/gtest/samples/prime_tables.h123
-rw-r--r--Source/ThirdParty/gtest/samples/sample1.cc68
-rw-r--r--Source/ThirdParty/gtest/samples/sample1.h43
-rw-r--r--Source/ThirdParty/gtest/samples/sample10_unittest.cc145
-rw-r--r--Source/ThirdParty/gtest/samples/sample1_unittest.cc153
-rw-r--r--Source/ThirdParty/gtest/samples/sample2.cc56
-rw-r--r--Source/ThirdParty/gtest/samples/sample2.h86
-rw-r--r--Source/ThirdParty/gtest/samples/sample2_unittest.cc109
-rw-r--r--Source/ThirdParty/gtest/samples/sample3-inl.h173
-rw-r--r--Source/ThirdParty/gtest/samples/sample3_unittest.cc151
-rw-r--r--Source/ThirdParty/gtest/samples/sample4.cc46
-rw-r--r--Source/ThirdParty/gtest/samples/sample4.h53
-rw-r--r--Source/ThirdParty/gtest/samples/sample4_unittest.cc45
-rw-r--r--Source/ThirdParty/gtest/samples/sample5_unittest.cc199
-rw-r--r--Source/ThirdParty/gtest/samples/sample6_unittest.cc224
-rw-r--r--Source/ThirdParty/gtest/samples/sample7_unittest.cc132
-rw-r--r--Source/ThirdParty/gtest/samples/sample8_unittest.cc173
-rw-r--r--Source/ThirdParty/gtest/samples/sample9_unittest.cc160
-rwxr-xr-xSource/ThirdParty/gtest/scripts/fuse_gtest_files.py250
-rwxr-xr-xSource/ThirdParty/gtest/scripts/gen_gtest_pred_impl.py733
-rwxr-xr-xSource/ThirdParty/gtest/scripts/gtest-config.in274
-rwxr-xr-xSource/ThirdParty/gtest/scripts/pump.py835
-rwxr-xr-xSource/ThirdParty/gtest/scripts/upload.py1387
-rwxr-xr-xSource/ThirdParty/gtest/scripts/upload_gtest.py78
-rw-r--r--Source/ThirdParty/gtest/src/gtest-death-test.cc1161
-rw-r--r--Source/ThirdParty/gtest/src/gtest-filepath.cc380
-rw-r--r--Source/ThirdParty/gtest/src/gtest-internal-inl.h1073
-rw-r--r--Source/ThirdParty/gtest/src/gtest-port.cc711
-rw-r--r--Source/ThirdParty/gtest/src/gtest-test-part.cc110
-rw-r--r--Source/ThirdParty/gtest/src/gtest-typed-test.cc110
-rw-r--r--Source/ThirdParty/gtest/src/gtest.cc4704
-rw-r--r--Source/ThirdParty/gtest/src/gtest_main.cc39
-rw-r--r--Source/ThirdParty/gtest/test/gtest-death-test_test.cc1230
-rw-r--r--Source/ThirdParty/gtest/test/gtest-filepath_test.cc690
-rw-r--r--Source/ThirdParty/gtest/test/gtest-linked_ptr_test.cc154
-rw-r--r--Source/ThirdParty/gtest/test/gtest-listener_test.cc313
-rw-r--r--Source/ThirdParty/gtest/test/gtest-message_test.cc167
-rw-r--r--Source/ThirdParty/gtest/test/gtest-options_test.cc212
-rw-r--r--Source/ThirdParty/gtest/test/gtest-param-test2_test.cc65
-rw-r--r--Source/ThirdParty/gtest/test/gtest-param-test_test.cc835
-rw-r--r--Source/ThirdParty/gtest/test/gtest-param-test_test.h55
-rw-r--r--Source/ThirdParty/gtest/test/gtest-port_test.cc1018
-rw-r--r--Source/ThirdParty/gtest/test/gtest-test-part_test.cc208
-rw-r--r--Source/ThirdParty/gtest/test/gtest-tuple_test.cc320
-rw-r--r--Source/ThirdParty/gtest/test/gtest-typed-test2_test.cc45
-rw-r--r--Source/ThirdParty/gtest/test/gtest-typed-test_test.cc360
-rw-r--r--Source/ThirdParty/gtest/test/gtest-typed-test_test.h66
-rw-r--r--Source/ThirdParty/gtest/test/gtest-unittest-api_test.cc343
-rw-r--r--Source/ThirdParty/gtest/test/gtest_all_test.cc48
-rwxr-xr-xSource/ThirdParty/gtest/test/gtest_break_on_failure_unittest.py218
-rw-r--r--Source/ThirdParty/gtest/test/gtest_break_on_failure_unittest_.cc86
-rwxr-xr-xSource/ThirdParty/gtest/test/gtest_color_test.py130
-rw-r--r--Source/ThirdParty/gtest/test/gtest_color_test_.cc71
-rwxr-xr-xSource/ThirdParty/gtest/test/gtest_env_var_test.py105
-rw-r--r--Source/ThirdParty/gtest/test/gtest_env_var_test_.cc126
-rw-r--r--Source/ThirdParty/gtest/test/gtest_environment_test.cc186
-rwxr-xr-xSource/ThirdParty/gtest/test/gtest_filter_unittest.py633
-rw-r--r--Source/ThirdParty/gtest/test/gtest_filter_unittest_.cc140
-rwxr-xr-xSource/ThirdParty/gtest/test/gtest_help_test.py169
-rw-r--r--Source/ThirdParty/gtest/test/gtest_help_test_.cc46
-rwxr-xr-xSource/ThirdParty/gtest/test/gtest_list_tests_unittest.py177
-rw-r--r--Source/ThirdParty/gtest/test/gtest_list_tests_unittest_.cc85
-rw-r--r--Source/ThirdParty/gtest/test/gtest_main_unittest.cc45
-rw-r--r--Source/ThirdParty/gtest/test/gtest_nc.cc234
-rwxr-xr-xSource/ThirdParty/gtest/test/gtest_nc_test.py106
-rw-r--r--Source/ThirdParty/gtest/test/gtest_no_test_unittest.cc54
-rwxr-xr-xSource/ThirdParty/gtest/test/gtest_output_test.py327
-rw-r--r--Source/ThirdParty/gtest/test/gtest_output_test_.cc1135
-rw-r--r--Source/ThirdParty/gtest/test/gtest_output_test_golden_lin.txt696
-rw-r--r--Source/ThirdParty/gtest/test/gtest_output_test_golden_win.txt605
-rw-r--r--Source/ThirdParty/gtest/test/gtest_pred_impl_unittest.cc2432
-rw-r--r--Source/ThirdParty/gtest/test/gtest_prod_test.cc57
-rw-r--r--Source/ThirdParty/gtest/test/gtest_repeat_test.cc253
-rwxr-xr-xSource/ThirdParty/gtest/test/gtest_shuffle_test.py325
-rw-r--r--Source/ThirdParty/gtest/test/gtest_shuffle_test_.cc104
-rw-r--r--Source/ThirdParty/gtest/test/gtest_sole_header_test.cc57
-rw-r--r--Source/ThirdParty/gtest/test/gtest_stress_test.cc257
-rwxr-xr-xSource/ThirdParty/gtest/test/gtest_test_utils.py309
-rw-r--r--Source/ThirdParty/gtest/test/gtest_throw_on_failure_ex_test.cc92
-rwxr-xr-xSource/ThirdParty/gtest/test/gtest_throw_on_failure_test.py171
-rw-r--r--Source/ThirdParty/gtest/test/gtest_throw_on_failure_test_.cc56
-rwxr-xr-xSource/ThirdParty/gtest/test/gtest_uninitialized_test.py70
-rw-r--r--Source/ThirdParty/gtest/test/gtest_uninitialized_test_.cc43
-rw-r--r--Source/ThirdParty/gtest/test/gtest_unittest.cc6718
-rw-r--r--Source/ThirdParty/gtest/test/gtest_xml_outfile1_test_.cc49
-rw-r--r--Source/ThirdParty/gtest/test/gtest_xml_outfile2_test_.cc49
-rwxr-xr-xSource/ThirdParty/gtest/test/gtest_xml_outfiles_test.py132
-rwxr-xr-xSource/ThirdParty/gtest/test/gtest_xml_output_unittest.py224
-rw-r--r--Source/ThirdParty/gtest/test/gtest_xml_output_unittest_.cc145
-rwxr-xr-xSource/ThirdParty/gtest/test/gtest_xml_test_utils.py172
-rw-r--r--Source/ThirdParty/gtest/test/production.cc36
-rw-r--r--Source/ThirdParty/gtest/test/production.h55
-rwxr-xr-xSource/ThirdParty/gtest/test/run_tests_util.py466
-rwxr-xr-xSource/ThirdParty/gtest/test/run_tests_util_test.py676
-rwxr-xr-xSource/ThirdParty/gtest/xcode/Samples/FrameworkSample/runtests.sh62
-rw-r--r--Source/ThirdParty/gtest/xcode/Samples/FrameworkSample/widget.cc63
-rw-r--r--Source/ThirdParty/gtest/xcode/Samples/FrameworkSample/widget.h59
-rw-r--r--Source/ThirdParty/gtest/xcode/Samples/FrameworkSample/widget_test.cc68
-rwxr-xr-xSource/ThirdParty/gtest/xcode/Scripts/runtests.sh65
-rw-r--r--Source/ThirdParty/gtest/xcode/Scripts/versiongenerate.py100
122 files changed, 41956 insertions, 57 deletions
diff --git a/Source/ThirdParty/gtest/CHANGES b/Source/ThirdParty/gtest/CHANGES
new file mode 100644
index 000000000..e574415ee
--- /dev/null
+++ b/Source/ThirdParty/gtest/CHANGES
@@ -0,0 +1,98 @@
+Changes for 1.5.0:
+
+ * New feature: assertions can be safely called in multiple threads
+ where the pthreads library is available.
+ * New feature: predicates used inside EXPECT_TRUE() and friends
+ can now generate custom failure messages.
+ * New feature: Google Test can now be compiled as a DLL.
+ * New feature: fused source files are included.
+ * New feature: prints help when encountering unrecognized Google Test flags.
+ * Experimental feature: CMake build script (requires CMake 2.6.4+).
+ * Experimental feature: the Pump script for meta programming.
+ * double values streamed to an assertion are printed with enough precision
+ to differentiate any two different values.
+ * Google Test now works on Solaris and AIX.
+ * Build and test script improvements.
+ * Bug fixes and implementation clean-ups.
+
+ Potentially breaking changes:
+
+ * Stopped supporting VC++ 7.1 with exceptions disabled.
+ * Dropped support for 'make install'.
+
+Changes for 1.4.0:
+
+ * New feature: the event listener API
+ * New feature: test shuffling
+ * New feature: the XML report format is closer to junitreport and can
+ be parsed by Hudson now.
+ * New feature: when a test runs under Visual Studio, its failures are
+ integrated in the IDE.
+ * New feature: /MD(d) versions of VC++ projects.
+ * New feature: elapsed time for the tests is printed by default.
+ * New feature: comes with a TR1 tuple implementation such that Boost
+ is no longer needed for Combine().
+ * New feature: EXPECT_DEATH_IF_SUPPORTED macro and friends.
+ * New feature: the Xcode project can now produce static gtest
+ libraries in addition to a framework.
+ * Compatibility fixes for Solaris, Cygwin, minGW, Windows Mobile,
+ Symbian, gcc, and C++Builder.
+ * Bug fixes and implementation clean-ups.
+
+Changes for 1.3.0:
+
+ * New feature: death tests on Windows, Cygwin, and Mac.
+ * New feature: ability to use Google Test assertions in other testing
+ frameworks.
+ * New feature: ability to run disabled test via
+ --gtest_also_run_disabled_tests.
+ * New feature: the --help flag for printing the usage.
+ * New feature: access to Google Test flag values in user code.
+ * New feature: a script that packs Google Test into one .h and one
+ .cc file for easy deployment.
+ * New feature: support for distributing test functions to multiple
+ machines (requires support from the test runner).
+ * Bug fixes and implementation clean-ups.
+
+Changes for 1.2.1:
+
+ * Compatibility fixes for Linux IA-64 and IBM z/OS.
+ * Added support for using Boost and other TR1 implementations.
+ * Changes to the build scripts to support upcoming release of Google C++
+ Mocking Framework.
+ * Added Makefile to the distribution package.
+ * Improved build instructions in README.
+
+Changes for 1.2.0:
+
+ * New feature: value-parameterized tests.
+ * New feature: the ASSERT/EXPECT_(NON)FATAL_FAILURE(_ON_ALL_THREADS)
+ macros.
+ * Changed the XML report format to match JUnit/Ant's.
+ * Added tests to the Xcode project.
+ * Added scons/SConscript for building with SCons.
+ * Added src/gtest-all.cc for building Google Test from a single file.
+ * Fixed compatibility with Solaris and z/OS.
+ * Enabled running Python tests on systems with python 2.3 installed,
+ e.g. Mac OS X 10.4.
+ * Bug fixes.
+
+Changes for 1.1.0:
+
+ * New feature: type-parameterized tests.
+ * New feature: exception assertions.
+ * New feature: printing elapsed time of tests.
+ * Improved the robustness of death tests.
+ * Added an Xcode project and samples.
+ * Adjusted the output format on Windows to be understandable by Visual Studio.
+ * Minor bug fixes.
+
+Changes for 1.0.1:
+
+ * Added project files for Visual Studio 7.1.
+ * Fixed issues with compiling on Mac OS X.
+ * Fixed issues with compiling on Cygwin.
+
+Changes for 1.0.0:
+
+ * Initial Open Source release of Google Test
diff --git a/Source/ThirdParty/gtest/CMakeLists.txt b/Source/ThirdParty/gtest/CMakeLists.txt
new file mode 100755
index 000000000..451c58db0
--- /dev/null
+++ b/Source/ThirdParty/gtest/CMakeLists.txt
@@ -0,0 +1,53 @@
+# This is an alternative CMakeLists.txt to the experimental one
+# shipped with the original package. It builds only the essential
+# for our tests. The WebKit version of gtest needs access to some
+# headers from WTF and JSC for configuration.
+
+set(GTEST_DIR "${THIRDPARTY_DIR}/gtest")
+
+set(GTEST_INCLUDE_DIRECTORIES
+ "${GTEST_DIR}"
+ "${GTEST_DIR}/include"
+ "${JAVASCRIPTCORE_DIR}"
+ "${DERIVED_SOURCES_JAVASCRIPTCORE_DIR}"
+)
+
+set(GTEST_SOURCES
+ ${GTEST_DIR}/src/gtest.cc
+ ${GTEST_DIR}/src/gtest-death-test.cc
+ ${GTEST_DIR}/src/gtest-filepath.cc
+ ${GTEST_DIR}/src/gtest_main.cc
+ ${GTEST_DIR}/src/gtest-port.cc
+ ${GTEST_DIR}/src/gtest-test-part.cc
+ ${GTEST_DIR}/src/gtest-typed-test.cc
+)
+
+if (WIN32)
+ # MSVC 2015 requires this definition for INTMAX_MAX to be defined.
+ add_definitions(-D__STDC_LIMIT_MACROS -DGTEST_CREATE_SHARED_LIBRARY=0)
+ # gtest crashes on exit on Windows if it is a dll.
+ add_library(gtest STATIC ${GTEST_SOURCES})
+else ()
+ add_definitions(-DGTEST_CREATE_SHARED_LIBRARY=1)
+ add_library(gtest SHARED ${GTEST_SOURCES})
+endif ()
+
+include_directories(${GTEST_INCLUDE_DIRECTORIES})
+add_definitions(-DGTEST_HAS_RTTI=0)
+
+# FIXME: This works around compatibility problems in the old version of the third-pary
+# googletest source code checkout. It should be removed once we upgrade to a newer version.
+if (COMPILER_IS_CLANG)
+ add_definitions(-DGTEST_HAS_TR1_TUPLE=0)
+endif ()
+
+target_link_libraries(gtest
+ WTF
+)
+
+if (CMAKE_USE_PTHREADS_INIT)
+ target_link_libraries(gtest ${CMAKE_THREAD_LIBS_INIT})
+ add_definitions(-DGTEST_HAS_PTHREAD=1)
+else ()
+ add_definitions(-DGTEST_HAS_PTHREAD=0)
+endif ()
diff --git a/Source/ThirdParty/gtest/CONTRIBUTORS b/Source/ThirdParty/gtest/CONTRIBUTORS
new file mode 100644
index 000000000..0934ae130
--- /dev/null
+++ b/Source/ThirdParty/gtest/CONTRIBUTORS
@@ -0,0 +1,36 @@
+# This file contains a list of people who've made non-trivial
+# contribution to the Google C++ Testing Framework project. People
+# who commit code to the project are encouraged to add their names
+# here. Please keep the list sorted by first names.
+
+Ajay Joshi <jaj@google.com>
+Balázs Dán <balazs.dan@gmail.com>
+Bharat Mediratta <bharat@menalto.com>
+Chandler Carruth <chandlerc@google.com>
+Chris Prince <cprince@google.com>
+Chris Taylor <taylorc@google.com>
+Dan Egnor <egnor@google.com>
+Eric Roman <eroman@chromium.org>
+Hady Zalek <hady.zalek@gmail.com>
+Jeffrey Yasskin <jyasskin@google.com>
+Jói Sigurðsson <joi@google.com>
+Keir Mierle <mierle@gmail.com>
+Keith Ray <keith.ray@gmail.com>
+Kenton Varda <kenton@google.com>
+Manuel Klimek <klimek@google.com>
+Markus Heule <markus.heule@gmail.com>
+Mika Raento <mikie@iki.fi>
+Miklós Fazekas <mfazekas@szemafor.com>
+Patrick Hanna <phanna@google.com>
+Patrick Riley <pfr@google.com>
+Peter Kaminski <piotrk@google.com>
+Preston Jackson <preston.a.jackson@gmail.com>
+Rainer Klaffenboeck <rainer.klaffenboeck@dynatrace.com>
+Russ Cox <rsc@google.com>
+Russ Rufer <russ@pentad.com>
+Sean Mcafee <eefacm@gmail.com>
+Sigurður Ásgeirsson <siggi@google.com>
+Tracy Bialik <tracy@pentad.com>
+Vadim Berman <vadimb@google.com>
+Vlad Losev <vladl@google.com>
+Zhanyong Wan <wan@google.com>
diff --git a/Source/ThirdParty/gtest/COPYING b/Source/ThirdParty/gtest/COPYING
new file mode 100644
index 000000000..1941a11f8
--- /dev/null
+++ b/Source/ThirdParty/gtest/COPYING
@@ -0,0 +1,28 @@
+Copyright 2008, Google Inc.
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ * Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+copyright notice, this list of conditions and the following disclaimer
+in the documentation and/or other materials provided with the
+distribution.
+ * Neither the name of Google Inc. nor the names of its
+contributors may be used to endorse or promote products derived from
+this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/Source/ThirdParty/gtest/GNUmakefile.am b/Source/ThirdParty/gtest/GNUmakefile.am
deleted file mode 100644
index 1e70168cf..000000000
--- a/Source/ThirdParty/gtest/GNUmakefile.am
+++ /dev/null
@@ -1,57 +0,0 @@
-
-noinst_LTLIBRARIES += \
- Libraries/libgtest.la
-
-Libraries_libgtest_la_SOURCES = \
- Source/ThirdParty/gtest/include/gtest/gtest.h \
- Source/ThirdParty/gtest/include/gtest/gtest-death-test.h \
- Source/ThirdParty/gtest/include/gtest/gtest-message.h \
- Source/ThirdParty/gtest/include/gtest/gtest-param-test.h \
- Source/ThirdParty/gtest/include/gtest/gtest_pred_impl.h \
- Source/ThirdParty/gtest/include/gtest/gtest_prod.h \
- Source/ThirdParty/gtest/include/gtest/gtest-spi.h \
- Source/ThirdParty/gtest/include/gtest/gtest-test-part.h \
- Source/ThirdParty/gtest/include/gtest/gtest-typed-test.h \
- Source/ThirdParty/gtest/include/gtest/internal/gtest-death-test-internal.h \
- Source/ThirdParty/gtest/include/gtest/internal/gtest-filepath.h \
- Source/ThirdParty/gtest/include/gtest/internal/gtest-internal.h \
- Source/ThirdParty/gtest/include/gtest/internal/gtest-linked_ptr.h \
- Source/ThirdParty/gtest/include/gtest/internal/gtest-param-util-generated.h \
- Source/ThirdParty/gtest/include/gtest/internal/gtest-param-util.h \
- Source/ThirdParty/gtest/include/gtest/internal/gtest-port.h \
- Source/ThirdParty/gtest/include/gtest/internal/gtest-string.h \
- Source/ThirdParty/gtest/include/gtest/internal/gtest-tuple.h \
- Source/ThirdParty/gtest/include/gtest/internal/gtest-type-util.h \
- Source/ThirdParty/gtest/src/gtest-all.cc
-
-# Use -isystem gcc flag so that gcc considers gtest headers as system headers.
-# We need this to avoid a lot of compile warnings due to -Wundef.
-# See http://code.google.com/p/googletest/issues/detail?id=258
-Libraries_libgtest_la_CPPFLAGS = \
- $(global_cppflags) \
- $(javascriptcore_cppflags) \
- -isystem $(srcdir)/Source/ThirdParty/gtest \
- -isystem $(srcdir)/Source/ThirdParty/gtest/include \
- -I$(srcdir)/Source/WTF \
- -I$(srcdir)/Source/ThirdParty/gtest \
- -I$(srcdir)/Source/ThirdParty/gtest/include
-
-if TARGET_WIN32
-Libraries_libgtest_la_CXXFLAGS = -DGTEST_HAS_PTHREAD=0
-else
-Libraries_libgtest_la_CXXFLAGS = -DGTEST_HAS_PTHREAD=1
-Libraries_libgtest_la_LIBADD = \
- -lpthread
-endif
-
-# gtest source files that we don't compile directly. They are
-# #included by gtest-all.cc.
-EXTRA_DIST += \
- Source/ThirdParty/gtest/src/gtest.cc \
- Source/ThirdParty/gtest/src/gtest-death-test.cc \
- Source/ThirdParty/gtest/src/gtest-filepath.cc \
- Source/ThirdParty/gtest/src/gtest-internal-inl.h \
- Source/ThirdParty/gtest/src/gtest-port.cc \
- Source/ThirdParty/gtest/src/gtest-test-part.cc \
- Source/ThirdParty/gtest/src/gtest-typed-test.cc
-
diff --git a/Source/ThirdParty/gtest/README b/Source/ThirdParty/gtest/README
new file mode 100644
index 000000000..ec6119002
--- /dev/null
+++ b/Source/ThirdParty/gtest/README
@@ -0,0 +1,417 @@
+Google C++ Testing Framework
+============================
+
+http://code.google.com/p/googletest/
+
+Overview
+--------
+
+Google's framework for writing C++ tests on a variety of platforms
+(Linux, Mac OS X, Windows, Windows CE, Symbian, etc). Based on the
+xUnit architecture. Supports automatic test discovery, a rich set of
+assertions, user-defined assertions, death tests, fatal and non-fatal
+failures, various options for running the tests, and XML test report
+generation.
+
+Please see the project page above for more information as well as the
+mailing list for questions, discussions, and development. There is
+also an IRC channel on OFTC (irc.oftc.net) #gtest available. Please
+join us!
+
+Requirements for End Users
+--------------------------
+
+Google Test is designed to have fairly minimal requirements to build
+and use with your projects, but there are some. Currently, we support
+Linux, Windows, Mac OS X, and Cygwin. We will also make our best
+effort to support other platforms (e.g. Solaris, AIX, and z/OS).
+However, since core members of the Google Test project have no access
+to these platforms, Google Test may have outstanding issues there. If
+you notice any problems on your platform, please notify
+googletestframework@googlegroups.com. Patches for fixing them are
+even more welcome!
+
+### Linux Requirements ###
+
+These are the base requirements to build and use Google Test from a source
+package (as described below):
+ * GNU-compatible Make or gmake
+ * POSIX-standard shell
+ * POSIX(-2) Regular Expressions (regex.h)
+ * A C++98-standard-compliant compiler
+
+### Windows Requirements ###
+
+ * Microsoft Visual C++ 7.1 or newer
+
+### Cygwin Requirements ###
+
+ * Cygwin 1.5.25-14 or newer
+
+### Mac OS X Requirements ###
+
+ * Mac OS X 10.4 Tiger or newer
+ * Developer Tools Installed
+
+Also, you'll need CMake 2.6.4 or higher if you want to build the
+samples using the provided CMake script, regardless of the platform.
+
+Requirements for Contributors
+-----------------------------
+
+We welcome patches. If you plan to contribute a patch, you need to
+build Google Test and its own tests from an SVN checkout (described
+below), which has further requirements:
+
+ * Python version 2.3 or newer (for running some of the tests and
+ re-generating certain source files from templates)
+ * CMake 2.6.4 or newer
+
+Getting the Source
+------------------
+
+There are two primary ways of getting Google Test's source code: you
+can download a stable source release in your preferred archive format,
+or directly check out the source from our Subversion (SVN) repositary.
+The SVN checkout requires a few extra steps and some extra software
+packages on your system, but lets you track the latest development and
+make patches much more easily, so we highly encourage it.
+
+### Source Package ###
+
+Google Test is released in versioned source packages which can be
+downloaded from the download page [1]. Several different archive
+formats are provided, but the only difference is the tools used to
+manipulate them, and the size of the resulting file. Download
+whichever you are most comfortable with.
+
+ [1] http://code.google.com/p/googletest/downloads/list
+
+Once the package is downloaded, expand it using whichever tools you
+prefer for that type. This will result in a new directory with the
+name "gtest-X.Y.Z" which contains all of the source code. Here are
+some examples on Linux:
+
+ tar -xvzf gtest-X.Y.Z.tar.gz
+ tar -xvjf gtest-X.Y.Z.tar.bz2
+ unzip gtest-X.Y.Z.zip
+
+### SVN Checkout ###
+
+To check out the main branch (also known as the "trunk") of Google
+Test, run the following Subversion command:
+
+ svn checkout http://googletest.googlecode.com/svn/trunk/ gtest-svn
+
+Setting up the Build
+--------------------
+
+To build Google Test and your tests that use it, you need to tell your
+build system where to find its headers and source files. The exact
+way to do it depends on which build system you use, and is usually
+straightforward.
+
+### Generic Build Instructions ###
+
+Suppose you put Google Test in directory ${GTEST_DIR}. To build it,
+create a library build target (or a project as called by Visual Studio
+and Xcode) to compile
+
+ ${GTEST_DIR}/src/gtest-all.cc
+
+with
+
+ ${GTEST_DIR}/include and ${GTEST_DIR}
+
+in the header search path. Assuming a Linux-like system and gcc,
+something like the following will do:
+
+ g++ -I${GTEST_DIR}/include -I${GTEST_DIR} -c ${GTEST_DIR}/src/gtest-all.cc
+ ar -rv libgtest.a gtest-all.o
+
+Next, you should compile your test source file with
+${GTEST_DIR}/include in the header search path, and link it with gtest
+and any other necessary libraries:
+
+ g++ -I${GTEST_DIR}/include path/to/your_test.cc libgtest.a -o your_test
+
+As an example, the make/ directory contains a Makefile that you can
+use to build Google Test on systems where GNU make is available
+(e.g. Linux, Mac OS X, and Cygwin). It doesn't try to build Google
+Test's own tests. Instead, it just builds the Google Test library and
+a sample test. You can use it as a starting point for your own build
+script.
+
+If the default settings are correct for your environment, the
+following commands should succeed:
+
+ cd ${GTEST_DIR}/make
+ make
+ ./sample1_unittest
+
+If you see errors, try to tweak the contents of make/Makefile to make
+them go away. There are instructions in make/Makefile on how to do
+it.
+
+### Using CMake ###
+
+Google Test comes with a CMake build script (CMakeLists.txt) that can
+be used on a wide range of platforms ("C" stands for cross-platofrm.).
+If you don't have CMake installed already, you can download it for
+free from http://www.cmake.org/.
+
+CMake works by generating native makefiles or build projects that can
+be used in the compiler environment of your choice. The typical
+workflow starts with:
+
+ mkdir mybuild # Create a directory to hold the build output.
+ cd mybuild
+ cmake ${GTEST_DIR} # Generate native build scripts.
+
+If you want to build Google Test's samples, you should replace the
+last command with
+
+ cmake -Dbuild_gtest_samples=ON ${GTEST_DIR}
+
+If you are on a *nix system, you should now see a Makefile in the
+current directory. Just type 'make' to build gtest.
+
+If you use Windows and have Vistual Studio installed, a gtest.sln file
+and several .vcproj files will be created. You can then build them
+using Visual Studio.
+
+On Mac OS X with Xcode installed, a .xcodeproj file will be generated.
+
+### Legacy Build Scripts ###
+
+Before settling on CMake, we have been providing hand-maintained build
+projects/scripts for Visual Studio, Xcode, and Autotools. While we
+continue to provide them for convenience, they are not actively
+maintained any more. We highly recommend that you follow the
+instructions in the previous two sections to integrate Google Test
+with your existing build system.
+
+If you still need to use the legacy build scripts, here's how:
+
+The msvc\ folder contains two solutions with Visual C++ projects.
+Open the gtest.sln or gtest-md.sln file using Visual Studio, and you
+are ready to build Google Test the same way you build any Visual
+Studio project. Files that have names ending with -md use DLL
+versions of Microsoft runtime libraries (the /MD or the /MDd compiler
+option). Files without that suffix use static versions of the runtime
+libraries (the /MT or the /MTd option). Please note that one must use
+the same option to compile both gtest and the test code. If you use
+Visual Studio 2005 or above, we recommend the -md version as /MD is
+the default for new projects in these versions of Visual Studio.
+
+On Mac OS X, open the gtest.xcodeproj in the xcode/ folder using
+Xcode. Build the "gtest" target. The universal binary framework will
+end up in your selected build directory (selected in the Xcode
+"Preferences..." -> "Building" pane and defaults to xcode/build).
+Alternatively, at the command line, enter:
+
+ xcodebuild
+
+This will build the "Release" configuration of gtest.framework in your
+default build location. See the "xcodebuild" man page for more
+information about building different configurations and building in
+different locations.
+
+Tweaking Google Test
+--------------------
+
+Google Test can be used in diverse environments. The default
+configuration may not work (or may not work well) out of the box in
+some environments. However, you can easily tweak Google Test by
+defining control macros on the compiler command line. Generally,
+these macros are named like GTEST_XYZ and you define them to either 1
+or 0 to enable or disable a certain feature.
+
+We list the most frequently used macros below. For a complete list,
+see file include/gtest/internal/gtest-port.h.
+
+### Choosing a TR1 Tuple Library ###
+
+Some Google Test features require the C++ Technical Report 1 (TR1)
+tuple library, which is not yet available with all compilers. The
+good news is that Google Test implements a subset of TR1 tuple that's
+enough for its own need, and will automatically use this when the
+compiler doesn't provide TR1 tuple.
+
+Usually you don't need to care about which tuple library Google Test
+uses. However, if your project already uses TR1 tuple, you need to
+tell Google Test to use the same TR1 tuple library the rest of your
+project uses, or the two tuple implementations will clash. To do
+that, add
+
+ -DGTEST_USE_OWN_TR1_TUPLE=0
+
+to the compiler flags while compiling Google Test and your tests. If
+you want to force Google Test to use its own tuple library, just add
+
+ -DGTEST_USE_OWN_TR1_TUPLE=1
+
+to the compiler flags instead.
+
+If you don't want Google Test to use tuple at all, add
+
+ -DGTEST_HAS_TR1_TUPLE=0
+
+and all features using tuple will be disabled.
+
+### Multi-threaded Tests ###
+
+Google Test is thread-safe where the pthread library is available.
+After #include <gtest/gtest.h>, you can check the GTEST_IS_THREADSAFE
+macro to see whether this is the case (yes if the macro is #defined to
+1, no if it's undefined.).
+
+If Google Test doesn't correctly detect whether pthread is available
+in your environment, you can force it with
+
+ -DGTEST_HAS_PTHREAD=1
+
+or
+
+ -DGTEST_HAS_PTHREAD=0
+
+When Google Test uses pthread, you may need to add flags to your
+compiler and/or linker to select the pthread library, or you'll get
+link errors. If you use the CMake script or the deprecated Autotools
+script, this is taken care of for you. If you use your own build
+script, you'll need to read your compiler and linker's manual to
+figure out what flags to add.
+
+### As a Shared Library (DLL) ###
+
+Google Test is compact, so most users can build and link it as a
+static library for the simplicity. You can choose to use Google Test
+as a shared library (known as a DLL on Windows) if you prefer.
+
+To compile gtest as a shared library, add
+
+ -DGTEST_CREATE_SHARED_LIBRARY=1
+
+to the compiler flags. You'll also need to tell the linker to produce
+a shared library instead - consult your linker's manual for how to do
+it.
+
+To compile your tests that use the gtest shared library, add
+
+ -DGTEST_LINKED_AS_SHARED_LIBRARY=1
+
+to the compiler flags.
+
+### Avoiding Macro Name Clashes ###
+
+In C++, macros don't obey namespaces. Therefore two libraries that
+both define a macro of the same name will clash if you #include both
+definitions. In case a Google Test macro clashes with another
+library, you can force Google Test to rename its macro to avoid the
+conflict.
+
+Specifically, if both Google Test and some other code define macro
+FOO, you can add
+
+ -DGTEST_DONT_DEFINE_FOO=1
+
+to the compiler flags to tell Google Test to change the macro's name
+from FOO to GTEST_FOO. Currently FOO can be FAIL, SUCCEED, or TEST.
+For example, with -DGTEST_DONT_DEFINE_TEST=1, you'll need to write
+
+ GTEST_TEST(SomeTest, DoesThis) { ... }
+
+instead of
+
+ TEST(SomeTest, DoesThis) { ... }
+
+in order to define a test.
+
+Upgrating from an Earlier Version
+---------------------------------
+
+We strive to keep Google Test releases backward compatible.
+Sometimes, though, we have to make some breaking changes for the
+users' long-term benefits. This section describes what you'll need to
+do if you are upgrading from an earlier version of Google Test.
+
+### Upgrading from 1.3.0 or Earlier ###
+
+You may need to explicitly enable or disable Google Test's own TR1
+tuple library. See the instructions in section "Choosing a TR1 Tuple
+Library".
+
+### Upgrading from 1.4.0 or Earlier ###
+
+The Autotools build script (configure + make) is no longer officially
+supportted. You are encouraged to migrate to your own build system or
+use CMake. If you still need to use Autotools, you can find
+instructions in the README file from Google Test 1.4.0.
+
+On platforms where the pthread library is available, Google Test uses
+it in order to be thread-safe. See the "Multi-threaded Tests" section
+for what this means to your build script.
+
+If you use Microsoft Visual C++ 7.1 with exceptions disabled, Google
+Test will no longer compile. This should affect very few people, as a
+large portion of STL (including <string>) doesn't compile in this mode
+anyway. We decided to stop supporting it in order to greatly simplify
+Google Test's implementation.
+
+Developing Google Test
+----------------------
+
+This section discusses how to make your own changes to Google Test.
+
+### Testing Google Test Itself ###
+
+To make sure your changes work as intended and don't break existing
+functionality, you'll want to compile and run Google Test's own tests.
+For that you can use CMake:
+
+ mkdir mybuild
+ cd mybuild
+ cmake -Dbuild_all_gtest_tests=ON ${GTEST_DIR}
+
+Make sure you have Python installed, as some of Google Test's tests
+are written in Python. If the cmake command complains about not being
+able to find Python ("Could NOT find PythonInterp (missing:
+PYTHON_EXECUTABLE)"), try telling it explicitly where your Python
+executable can be found:
+
+ cmake -DPYTHON_EXECUTABLE=path/to/python -Dbuild_all_gtest_tests=ON \
+ ${GTEST_DIR}
+
+Next, you can build Google Test and all of its own tests. On *nix,
+this is usually done by 'make'. To run the tests, do
+
+ make test
+
+All tests should pass.
+
+### Regenerating Source Files ###
+
+Some of Google Test's source files are generated from templates (not
+in the C++ sense) using a script. A template file is named FOO.pump,
+where FOO is the name of the file it will generate. For example, the
+file include/gtest/internal/gtest-type-util.h.pump is used to generate
+gtest-type-util.h in the same directory.
+
+Normally you don't need to worry about regenerating the source files,
+unless you need to modify them. In that case, you should modify the
+corresponding .pump files instead and run the pump.py Python script to
+regenerate them. You can find pump.py in the scripts/ directory.
+Read the Pump manual [2] for how to use it.
+
+ [2] http://code.google.com/p/googletest/wiki/PumpManual
+
+### Contributing a Patch ###
+
+We welcome patches. Please read the Google Test developer's guide [3]
+for how you can contribute. In particular, make sure you have signed
+the Contributor License Agreement, or we won't be able to accept the
+patch.
+
+ [3] http://code.google.com/p/googletest/wiki/GoogleTestDevGuide
+
+Happy testing!
diff --git a/Source/ThirdParty/gtest/README.WebKit b/Source/ThirdParty/gtest/README.WebKit
new file mode 100644
index 000000000..6a318ff62
--- /dev/null
+++ b/Source/ThirdParty/gtest/README.WebKit
@@ -0,0 +1,16 @@
+GTest v.1.5.0
+
+http://googletest.googlecode.com/svn/tags/release-1.5.0/
+
+2016-01-02 Ting-Wei Lan (lantw44@gmail.com)
+ Remove usage of environ global variable to fix the build on FreeBSD.
+
+2012-01-04 Dmitry Lomov (dslomov@google.com) David Kilzer (ddkilzer@apple.com)
+ - WebKit port of gtest uses FastMalloc "new" and "delete" operators from JavaScriptCore.
+ - The gtest.framework target now links to JavaScriptCore.framework.
+
+2011-12-22 David Kilzer (ddkilzer@apple.com)
+ Added Production configuration to gtest Xcode project.
+
+2011-05-06 Dmitry Lomov (dslomov@google.com)
+ gtest-md.vcproj upgraded to VS 2005 (8.0) format to match the rest of WebKit
diff --git a/Source/ThirdParty/gtest/codegear/gtest.cbproj b/Source/ThirdParty/gtest/codegear/gtest.cbproj
new file mode 100644
index 000000000..95c3054b8
--- /dev/null
+++ b/Source/ThirdParty/gtest/codegear/gtest.cbproj
@@ -0,0 +1,138 @@
+<?xml version="1.0" encoding="utf-8"?>
+<Project xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
+ <PropertyGroup>
+ <ProjectGuid>{bca37a72-5b07-46cf-b44e-89f8e06451a2}</ProjectGuid>
+ <Config Condition="'$(Config)'==''">Release</Config>
+ </PropertyGroup>
+ <PropertyGroup Condition="'$(Config)'=='Base' or '$(Base)'!=''">
+ <Base>true</Base>
+ </PropertyGroup>
+ <PropertyGroup Condition="'$(Config)'=='Debug' or '$(Cfg_1)'!=''">
+ <Base>true</Base>
+ <Cfg_1>true</Cfg_1>
+ <CfgParent>Base</CfgParent>
+ </PropertyGroup>
+ <PropertyGroup Condition="'$(Config)'=='Release' or '$(Cfg_2)'!=''">
+ <Base>true</Base>
+ <Cfg_2>true</Cfg_2>
+ <CfgParent>Base</CfgParent>
+ </PropertyGroup>
+ <PropertyGroup Condition="'$(Base)'!=''">
+ <BCC_OptimizeForSpeed>true</BCC_OptimizeForSpeed>
+ <OutputExt>lib</OutputExt>
+ <DCC_CBuilderOutput>JPHNE</DCC_CBuilderOutput>
+ <Defines>NO_STRICT</Defines>
+ <DynamicRTL>true</DynamicRTL>
+ <UsePackages>true</UsePackages>
+ <ProjectType>CppStaticLibrary</ProjectType>
+ <BCC_CPPCompileAlways>true</BCC_CPPCompileAlways>
+ <PackageImports>rtl.bpi;vcl.bpi;bcbie.bpi;vclx.bpi;vclactnband.bpi;xmlrtl.bpi;bcbsmp.bpi;dbrtl.bpi;vcldb.bpi;bdertl.bpi;vcldbx.bpi;dsnap.bpi;dsnapcon.bpi;vclib.bpi;ibxpress.bpi;adortl.bpi;dbxcds.bpi;dbexpress.bpi;DbxCommonDriver.bpi;websnap.bpi;vclie.bpi;webdsnap.bpi;inet.bpi;inetdbbde.bpi;inetdbxpress.bpi;soaprtl.bpi;Rave75VCL.bpi;teeUI.bpi;tee.bpi;teedb.bpi;IndyCore.bpi;IndySystem.bpi;IndyProtocols.bpi;IntrawebDB_90_100.bpi;Intraweb_90_100.bpi;dclZipForged11.bpi;vclZipForged11.bpi;GR32_BDS2006.bpi;GR32_DSGN_BDS2006.bpi;Jcl.bpi;JclVcl.bpi;JvCoreD11R.bpi;JvSystemD11R.bpi;JvStdCtrlsD11R.bpi;JvAppFrmD11R.bpi;JvBandsD11R.bpi;JvDBD11R.bpi;JvDlgsD11R.bpi;JvBDED11R.bpi;JvCmpD11R.bpi;JvCryptD11R.bpi;JvCtrlsD11R.bpi;JvCustomD11R.bpi;JvDockingD11R.bpi;JvDotNetCtrlsD11R.bpi;JvEDID11R.bpi;JvGlobusD11R.bpi;JvHMID11R.bpi;JvInterpreterD11R.bpi;JvJansD11R.bpi;JvManagedThreadsD11R.bpi;JvMMD11R.bpi;JvNetD11R.bpi;JvPageCompsD11R.bpi;JvPluginD11R.bpi;JvPrintPreviewD11R.bpi;JvRuntimeDesignD11R.bpi;JvTimeFrameworkD11R.bpi;JvValidatorsD11R.bpi;JvWizardD11R.bpi;JvXPCtrlsD11R.bpi;VclSmp.bpi;CExceptionExpert11.bpi</PackageImports>
+ <BCC_wpar>false</BCC_wpar>
+ <IncludePath>$(BDS)\include;$(BDS)\include\dinkumware;$(BDS)\include\vcl;..\src;..\include;..</IncludePath>
+ <AllPackageLibs>rtl.lib;vcl.lib</AllPackageLibs>
+ <TLIB_PageSize>32</TLIB_PageSize>
+ <ILINK_LibraryPath>$(BDS)\lib;$(BDS)\lib\obj;$(BDS)\lib\psdk</ILINK_LibraryPath>
+ </PropertyGroup>
+ <PropertyGroup Condition="'$(Cfg_1)'!=''">
+ <BCC_OptimizeForSpeed>false</BCC_OptimizeForSpeed>
+ <DCC_Optimize>false</DCC_Optimize>
+ <DCC_DebugInfoInExe>true</DCC_DebugInfoInExe>
+ <Defines>_DEBUG;$(Defines)</Defines>
+ <ILINK_FullDebugInfo>true</ILINK_FullDebugInfo>
+ <BCC_InlineFunctionExpansion>false</BCC_InlineFunctionExpansion>
+ <ILINK_DisableIncrementalLinking>true</ILINK_DisableIncrementalLinking>
+ <BCC_UseRegisterVariables>None</BCC_UseRegisterVariables>
+ <DCC_Define>DEBUG</DCC_Define>
+ <BCC_DebugLineNumbers>true</BCC_DebugLineNumbers>
+ <IntermediateOutputDir>Debug</IntermediateOutputDir>
+ <TASM_DisplaySourceLines>true</TASM_DisplaySourceLines>
+ <BCC_StackFrames>true</BCC_StackFrames>
+ <BCC_DisableOptimizations>true</BCC_DisableOptimizations>
+ <ILINK_LibraryPath>$(BDS)\lib\debug;$(ILINK_LibraryPath)</ILINK_LibraryPath>
+ <TASM_Debugging>Full</TASM_Debugging>
+ <BCC_SourceDebuggingOn>true</BCC_SourceDebuggingOn>
+ </PropertyGroup>
+ <PropertyGroup Condition="'$(Cfg_2)'!=''">
+ <Defines>NDEBUG;$(Defines)</Defines>
+ <IntermediateOutputDir>Release</IntermediateOutputDir>
+ <ILINK_LibraryPath>$(BDS)\lib\release;$(ILINK_LibraryPath)</ILINK_LibraryPath>
+ <TASM_Debugging>None</TASM_Debugging>
+ </PropertyGroup>
+ <ProjectExtensions>
+ <Borland.Personality>CPlusPlusBuilder.Personality</Borland.Personality>
+ <Borland.ProjectType>CppStaticLibrary</Borland.ProjectType>
+ <BorlandProject>
+<BorlandProject><CPlusPlusBuilder.Personality><VersionInfo><VersionInfo Name="IncludeVerInfo">False</VersionInfo><VersionInfo Name="AutoIncBuild">False</VersionInfo><VersionInfo Name="MajorVer">1</VersionInfo><VersionInfo Name="MinorVer">0</VersionInfo><VersionInfo Name="Release">0</VersionInfo><VersionInfo Name="Build">0</VersionInfo><VersionInfo Name="Debug">False</VersionInfo><VersionInfo Name="PreRelease">False</VersionInfo><VersionInfo Name="Special">False</VersionInfo><VersionInfo Name="Private">False</VersionInfo><VersionInfo Name="DLL">False</VersionInfo><VersionInfo Name="Locale">1033</VersionInfo><VersionInfo Name="CodePage">1252</VersionInfo></VersionInfo><VersionInfoKeys><VersionInfoKeys Name="CompanyName"></VersionInfoKeys><VersionInfoKeys Name="FileDescription"></VersionInfoKeys><VersionInfoKeys Name="FileVersion">1.0.0.0</VersionInfoKeys><VersionInfoKeys Name="InternalName"></VersionInfoKeys><VersionInfoKeys Name="LegalCopyright"></VersionInfoKeys><VersionInfoKeys Name="LegalTrademarks"></VersionInfoKeys><VersionInfoKeys Name="OriginalFilename"></VersionInfoKeys><VersionInfoKeys Name="ProductName"></VersionInfoKeys><VersionInfoKeys Name="ProductVersion">1.0.0.0</VersionInfoKeys><VersionInfoKeys Name="Comments"></VersionInfoKeys></VersionInfoKeys><Debugging><Debugging Name="DebugSourceDirs"></Debugging></Debugging><Parameters><Parameters Name="RunParams"></Parameters><Parameters Name="Launcher"></Parameters><Parameters Name="UseLauncher">False</Parameters><Parameters Name="DebugCWD"></Parameters><Parameters Name="HostApplication"></Parameters><Parameters Name="RemoteHost"></Parameters><Parameters Name="RemotePath"></Parameters><Parameters Name="RemoteParams"></Parameters><Parameters Name="RemoteLauncher"></Parameters><Parameters Name="UseRemoteLauncher">False</Parameters><Parameters Name="RemoteCWD"></Parameters><Parameters Name="RemoteDebug">False</Parameters><Parameters Name="Debug Symbols Search Path"></Parameters><Parameters Name="LoadAllSymbols">True</Parameters><Parameters Name="LoadUnspecifiedSymbols">False</Parameters></Parameters><Excluded_Packages>
+
+
+ <Excluded_Packages Name="$(BDS)\bin\bcboffice2k100.bpl">CodeGear C++Builder Office 2000 Servers Package</Excluded_Packages>
+ <Excluded_Packages Name="$(BDS)\bin\bcbofficexp100.bpl">CodeGear C++Builder Office XP Servers Package</Excluded_Packages>
+ </Excluded_Packages><Linker><Linker Name="LibPrefix"></Linker><Linker Name="LibSuffix"></Linker><Linker Name="LibVersion"></Linker></Linker><ProjectProperties><ProjectProperties Name="AutoShowDeps">False</ProjectProperties><ProjectProperties Name="ManagePaths">True</ProjectProperties><ProjectProperties Name="VerifyPackages">True</ProjectProperties></ProjectProperties><HistoryLists_hlIncludePath><HistoryLists_hlIncludePath Name="Count">3</HistoryLists_hlIncludePath><HistoryLists_hlIncludePath Name="Item0">$(BDS)\include;$(BDS)\include\dinkumware;$(BDS)\include\vcl;..\src;..\include;..</HistoryLists_hlIncludePath><HistoryLists_hlIncludePath Name="Item1">$(BDS)\include;$(BDS)\include\dinkumware;$(BDS)\include\vcl;..\src;..\include;..</HistoryLists_hlIncludePath><HistoryLists_hlIncludePath Name="Item2">$(BDS)\include;$(BDS)\include\dinkumware;$(BDS)\include\vcl;..\src;..\src;..\include</HistoryLists_hlIncludePath></HistoryLists_hlIncludePath><HistoryLists_hlILINK_LibraryPath><HistoryLists_hlILINK_LibraryPath Name="Count">1</HistoryLists_hlILINK_LibraryPath><HistoryLists_hlILINK_LibraryPath Name="Item0">$(BDS)\lib;$(BDS)\lib\obj;$(BDS)\lib\psdk</HistoryLists_hlILINK_LibraryPath></HistoryLists_hlILINK_LibraryPath><HistoryLists_hlDefines><HistoryLists_hlDefines Name="Count">1</HistoryLists_hlDefines><HistoryLists_hlDefines Name="Item0">NO_STRICT</HistoryLists_hlDefines></HistoryLists_hlDefines><HistoryLists_hlTLIB_PageSize><HistoryLists_hlTLIB_PageSize Name="Count">1</HistoryLists_hlTLIB_PageSize><HistoryLists_hlTLIB_PageSize Name="Item0">32</HistoryLists_hlTLIB_PageSize><HistoryLists_hlTLIB_PageSize Name="Item1">16</HistoryLists_hlTLIB_PageSize></HistoryLists_hlTLIB_PageSize></CPlusPlusBuilder.Personality></BorlandProject></BorlandProject>
+ </ProjectExtensions>
+ <Import Project="$(MSBuildBinPath)\Borland.Cpp.Targets" />
+ <ItemGroup>
+ <None Include="..\include\gtest\gtest-death-test.h">
+ <BuildOrder>3</BuildOrder>
+ </None>
+ <None Include="..\include\gtest\gtest-message.h">
+ <BuildOrder>4</BuildOrder>
+ </None>
+ <None Include="..\include\gtest\gtest-param-test.h">
+ <BuildOrder>5</BuildOrder>
+ </None>
+ <None Include="..\include\gtest\gtest-spi.h">
+ <BuildOrder>6</BuildOrder>
+ </None>
+ <None Include="..\include\gtest\gtest-test-part.h">
+ <BuildOrder>7</BuildOrder>
+ </None>
+ <None Include="..\include\gtest\gtest-typed-test.h">
+ <BuildOrder>8</BuildOrder>
+ </None>
+ <None Include="..\include\gtest\gtest.h">
+ <BuildOrder>0</BuildOrder>
+ </None>
+ <None Include="..\include\gtest\gtest_pred_impl.h">
+ <BuildOrder>1</BuildOrder>
+ </None>
+ <None Include="..\include\gtest\gtest_prod.h">
+ <BuildOrder>2</BuildOrder>
+ </None>
+ <None Include="..\include\gtest\internal\gtest-death-test-internal.h">
+ <BuildOrder>9</BuildOrder>
+ </None>
+ <None Include="..\include\gtest\internal\gtest-filepath.h">
+ <BuildOrder>10</BuildOrder>
+ </None>
+ <None Include="..\include\gtest\internal\gtest-internal.h">
+ <BuildOrder>11</BuildOrder>
+ </None>
+ <None Include="..\include\gtest\internal\gtest-linked_ptr.h">
+ <BuildOrder>12</BuildOrder>
+ </None>
+ <None Include="..\include\gtest\internal\gtest-param-util-generated.h">
+ <BuildOrder>14</BuildOrder>
+ </None>
+ <None Include="..\include\gtest\internal\gtest-param-util.h">
+ <BuildOrder>13</BuildOrder>
+ </None>
+ <None Include="..\include\gtest\internal\gtest-port.h">
+ <BuildOrder>15</BuildOrder>
+ </None>
+ <None Include="..\include\gtest\internal\gtest-string.h">
+ <BuildOrder>16</BuildOrder>
+ </None>
+ <None Include="..\include\gtest\internal\gtest-type-util.h">
+ <BuildOrder>17</BuildOrder>
+ </None>
+ <CppCompile Include="gtest_all.cc">
+ <BuildOrder>18</BuildOrder>
+ </CppCompile>
+ <BuildConfiguration Include="Debug">
+ <Key>Cfg_1</Key>
+ </BuildConfiguration>
+ <BuildConfiguration Include="Release">
+ <Key>Cfg_2</Key>
+ </BuildConfiguration>
+ </ItemGroup>
+</Project> \ No newline at end of file
diff --git a/Source/ThirdParty/gtest/codegear/gtest.groupproj b/Source/ThirdParty/gtest/codegear/gtest.groupproj
new file mode 100644
index 000000000..faf31cab6
--- /dev/null
+++ b/Source/ThirdParty/gtest/codegear/gtest.groupproj
@@ -0,0 +1,54 @@
+<Project xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
+ <PropertyGroup>
+ <ProjectGuid>{c1d923e0-6cba-4332-9b6f-3420acbf5091}</ProjectGuid>
+ </PropertyGroup>
+ <ItemGroup />
+ <ItemGroup>
+ <Projects Include="gtest.cbproj" />
+ <Projects Include="gtest_main.cbproj" />
+ <Projects Include="gtest_unittest.cbproj" />
+ </ItemGroup>
+ <ProjectExtensions>
+ <Borland.Personality>Default.Personality</Borland.Personality>
+ <Borland.ProjectType />
+ <BorlandProject>
+<BorlandProject xmlns=""><Default.Personality></Default.Personality></BorlandProject></BorlandProject>
+ </ProjectExtensions>
+ <Target Name="gtest">
+ <MSBuild Projects="gtest.cbproj" Targets="" />
+ </Target>
+ <Target Name="gtest:Clean">
+ <MSBuild Projects="gtest.cbproj" Targets="Clean" />
+ </Target>
+ <Target Name="gtest:Make">
+ <MSBuild Projects="gtest.cbproj" Targets="Make" />
+ </Target>
+ <Target Name="gtest_main">
+ <MSBuild Projects="gtest_main.cbproj" Targets="" />
+ </Target>
+ <Target Name="gtest_main:Clean">
+ <MSBuild Projects="gtest_main.cbproj" Targets="Clean" />
+ </Target>
+ <Target Name="gtest_main:Make">
+ <MSBuild Projects="gtest_main.cbproj" Targets="Make" />
+ </Target>
+ <Target Name="gtest_unittest">
+ <MSBuild Projects="gtest_unittest.cbproj" Targets="" />
+ </Target>
+ <Target Name="gtest_unittest:Clean">
+ <MSBuild Projects="gtest_unittest.cbproj" Targets="Clean" />
+ </Target>
+ <Target Name="gtest_unittest:Make">
+ <MSBuild Projects="gtest_unittest.cbproj" Targets="Make" />
+ </Target>
+ <Target Name="Build">
+ <CallTarget Targets="gtest;gtest_main;gtest_unittest" />
+ </Target>
+ <Target Name="Clean">
+ <CallTarget Targets="gtest:Clean;gtest_main:Clean;gtest_unittest:Clean" />
+ </Target>
+ <Target Name="Make">
+ <CallTarget Targets="gtest:Make;gtest_main:Make;gtest_unittest:Make" />
+ </Target>
+ <Import Condition="Exists('$(MSBuildBinPath)\Borland.Group.Targets')" Project="$(MSBuildBinPath)\Borland.Group.Targets" />
+</Project> \ No newline at end of file
diff --git a/Source/ThirdParty/gtest/codegear/gtest_all.cc b/Source/ThirdParty/gtest/codegear/gtest_all.cc
new file mode 100644
index 000000000..ba7ad68ad
--- /dev/null
+++ b/Source/ThirdParty/gtest/codegear/gtest_all.cc
@@ -0,0 +1,38 @@
+// Copyright 2009, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: Josh Kelley (joshkel@gmail.com)
+//
+// Google C++ Testing Framework (Google Test)
+//
+// C++Builder's IDE cannot build a static library from files with hyphens
+// in their name. See http://qc.codegear.com/wc/qcmain.aspx?d=70977 .
+// This file serves as a workaround.
+
+#include "src/gtest-all.cc"
diff --git a/Source/ThirdParty/gtest/codegear/gtest_link.cc b/Source/ThirdParty/gtest/codegear/gtest_link.cc
new file mode 100644
index 000000000..b955ebf2f
--- /dev/null
+++ b/Source/ThirdParty/gtest/codegear/gtest_link.cc
@@ -0,0 +1,40 @@
+// Copyright 2009, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: Josh Kelley (joshkel@gmail.com)
+//
+// Google C++ Testing Framework (Google Test)
+//
+// Links gtest.lib and gtest_main.lib into the current project in C++Builder.
+// This means that these libraries can't be renamed, but it's the only way to
+// ensure that Debug versus Release test builds are linked against the
+// appropriate Debug or Release build of the libraries.
+
+#pragma link "gtest.lib"
+#pragma link "gtest_main.lib"
diff --git a/Source/ThirdParty/gtest/codegear/gtest_main.cbproj b/Source/ThirdParty/gtest/codegear/gtest_main.cbproj
new file mode 100644
index 000000000..d76ce1398
--- /dev/null
+++ b/Source/ThirdParty/gtest/codegear/gtest_main.cbproj
@@ -0,0 +1,82 @@
+<?xml version="1.0" encoding="utf-8"?>
+<Project xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
+ <PropertyGroup>
+ <ProjectGuid>{bca37a72-5b07-46cf-b44e-89f8e06451a2}</ProjectGuid>
+ <Config Condition="'$(Config)'==''">Release</Config>
+ </PropertyGroup>
+ <PropertyGroup Condition="'$(Config)'=='Base' or '$(Base)'!=''">
+ <Base>true</Base>
+ </PropertyGroup>
+ <PropertyGroup Condition="'$(Config)'=='Debug' or '$(Cfg_1)'!=''">
+ <Base>true</Base>
+ <Cfg_1>true</Cfg_1>
+ <CfgParent>Base</CfgParent>
+ </PropertyGroup>
+ <PropertyGroup Condition="'$(Config)'=='Release' or '$(Cfg_2)'!=''">
+ <Base>true</Base>
+ <Cfg_2>true</Cfg_2>
+ <CfgParent>Base</CfgParent>
+ </PropertyGroup>
+ <PropertyGroup Condition="'$(Base)'!=''">
+ <BCC_OptimizeForSpeed>true</BCC_OptimizeForSpeed>
+ <OutputExt>lib</OutputExt>
+ <DCC_CBuilderOutput>JPHNE</DCC_CBuilderOutput>
+ <Defines>NO_STRICT</Defines>
+ <DynamicRTL>true</DynamicRTL>
+ <UsePackages>true</UsePackages>
+ <ProjectType>CppStaticLibrary</ProjectType>
+ <BCC_CPPCompileAlways>true</BCC_CPPCompileAlways>
+ <PackageImports>rtl.bpi;vcl.bpi;bcbie.bpi;vclx.bpi;vclactnband.bpi;xmlrtl.bpi;bcbsmp.bpi;dbrtl.bpi;vcldb.bpi;bdertl.bpi;vcldbx.bpi;dsnap.bpi;dsnapcon.bpi;vclib.bpi;ibxpress.bpi;adortl.bpi;dbxcds.bpi;dbexpress.bpi;DbxCommonDriver.bpi;websnap.bpi;vclie.bpi;webdsnap.bpi;inet.bpi;inetdbbde.bpi;inetdbxpress.bpi;soaprtl.bpi;Rave75VCL.bpi;teeUI.bpi;tee.bpi;teedb.bpi;IndyCore.bpi;IndySystem.bpi;IndyProtocols.bpi;IntrawebDB_90_100.bpi;Intraweb_90_100.bpi;dclZipForged11.bpi;vclZipForged11.bpi;GR32_BDS2006.bpi;GR32_DSGN_BDS2006.bpi;Jcl.bpi;JclVcl.bpi;JvCoreD11R.bpi;JvSystemD11R.bpi;JvStdCtrlsD11R.bpi;JvAppFrmD11R.bpi;JvBandsD11R.bpi;JvDBD11R.bpi;JvDlgsD11R.bpi;JvBDED11R.bpi;JvCmpD11R.bpi;JvCryptD11R.bpi;JvCtrlsD11R.bpi;JvCustomD11R.bpi;JvDockingD11R.bpi;JvDotNetCtrlsD11R.bpi;JvEDID11R.bpi;JvGlobusD11R.bpi;JvHMID11R.bpi;JvInterpreterD11R.bpi;JvJansD11R.bpi;JvManagedThreadsD11R.bpi;JvMMD11R.bpi;JvNetD11R.bpi;JvPageCompsD11R.bpi;JvPluginD11R.bpi;JvPrintPreviewD11R.bpi;JvRuntimeDesignD11R.bpi;JvTimeFrameworkD11R.bpi;JvValidatorsD11R.bpi;JvWizardD11R.bpi;JvXPCtrlsD11R.bpi;VclSmp.bpi;CExceptionExpert11.bpi</PackageImports>
+ <BCC_wpar>false</BCC_wpar>
+ <IncludePath>$(BDS)\include;$(BDS)\include\dinkumware;$(BDS)\include\vcl;..\src;..\include;..</IncludePath>
+ <AllPackageLibs>rtl.lib;vcl.lib</AllPackageLibs>
+ <TLIB_PageSize>32</TLIB_PageSize>
+ <ILINK_LibraryPath>$(BDS)\lib;$(BDS)\lib\obj;$(BDS)\lib\psdk</ILINK_LibraryPath>
+ </PropertyGroup>
+ <PropertyGroup Condition="'$(Cfg_1)'!=''">
+ <BCC_OptimizeForSpeed>false</BCC_OptimizeForSpeed>
+ <DCC_Optimize>false</DCC_Optimize>
+ <DCC_DebugInfoInExe>true</DCC_DebugInfoInExe>
+ <Defines>_DEBUG;$(Defines)</Defines>
+ <ILINK_FullDebugInfo>true</ILINK_FullDebugInfo>
+ <BCC_InlineFunctionExpansion>false</BCC_InlineFunctionExpansion>
+ <ILINK_DisableIncrementalLinking>true</ILINK_DisableIncrementalLinking>
+ <BCC_UseRegisterVariables>None</BCC_UseRegisterVariables>
+ <DCC_Define>DEBUG</DCC_Define>
+ <BCC_DebugLineNumbers>true</BCC_DebugLineNumbers>
+ <IntermediateOutputDir>Debug</IntermediateOutputDir>
+ <TASM_DisplaySourceLines>true</TASM_DisplaySourceLines>
+ <BCC_StackFrames>true</BCC_StackFrames>
+ <BCC_DisableOptimizations>true</BCC_DisableOptimizations>
+ <ILINK_LibraryPath>$(BDS)\lib\debug;$(ILINK_LibraryPath)</ILINK_LibraryPath>
+ <TASM_Debugging>Full</TASM_Debugging>
+ <BCC_SourceDebuggingOn>true</BCC_SourceDebuggingOn>
+ </PropertyGroup>
+ <PropertyGroup Condition="'$(Cfg_2)'!=''">
+ <Defines>NDEBUG;$(Defines)</Defines>
+ <IntermediateOutputDir>Release</IntermediateOutputDir>
+ <ILINK_LibraryPath>$(BDS)\lib\release;$(ILINK_LibraryPath)</ILINK_LibraryPath>
+ <TASM_Debugging>None</TASM_Debugging>
+ </PropertyGroup>
+ <ProjectExtensions>
+ <Borland.Personality>CPlusPlusBuilder.Personality</Borland.Personality>
+ <Borland.ProjectType>CppStaticLibrary</Borland.ProjectType>
+ <BorlandProject>
+<BorlandProject><CPlusPlusBuilder.Personality><VersionInfo><VersionInfo Name="IncludeVerInfo">False</VersionInfo><VersionInfo Name="AutoIncBuild">False</VersionInfo><VersionInfo Name="MajorVer">1</VersionInfo><VersionInfo Name="MinorVer">0</VersionInfo><VersionInfo Name="Release">0</VersionInfo><VersionInfo Name="Build">0</VersionInfo><VersionInfo Name="Debug">False</VersionInfo><VersionInfo Name="PreRelease">False</VersionInfo><VersionInfo Name="Special">False</VersionInfo><VersionInfo Name="Private">False</VersionInfo><VersionInfo Name="DLL">False</VersionInfo><VersionInfo Name="Locale">1033</VersionInfo><VersionInfo Name="CodePage">1252</VersionInfo></VersionInfo><VersionInfoKeys><VersionInfoKeys Name="CompanyName"></VersionInfoKeys><VersionInfoKeys Name="FileDescription"></VersionInfoKeys><VersionInfoKeys Name="FileVersion">1.0.0.0</VersionInfoKeys><VersionInfoKeys Name="InternalName"></VersionInfoKeys><VersionInfoKeys Name="LegalCopyright"></VersionInfoKeys><VersionInfoKeys Name="LegalTrademarks"></VersionInfoKeys><VersionInfoKeys Name="OriginalFilename"></VersionInfoKeys><VersionInfoKeys Name="ProductName"></VersionInfoKeys><VersionInfoKeys Name="ProductVersion">1.0.0.0</VersionInfoKeys><VersionInfoKeys Name="Comments"></VersionInfoKeys></VersionInfoKeys><Debugging><Debugging Name="DebugSourceDirs"></Debugging></Debugging><Parameters><Parameters Name="RunParams"></Parameters><Parameters Name="Launcher"></Parameters><Parameters Name="UseLauncher">False</Parameters><Parameters Name="DebugCWD"></Parameters><Parameters Name="HostApplication"></Parameters><Parameters Name="RemoteHost"></Parameters><Parameters Name="RemotePath"></Parameters><Parameters Name="RemoteParams"></Parameters><Parameters Name="RemoteLauncher"></Parameters><Parameters Name="UseRemoteLauncher">False</Parameters><Parameters Name="RemoteCWD"></Parameters><Parameters Name="RemoteDebug">False</Parameters><Parameters Name="Debug Symbols Search Path"></Parameters><Parameters Name="LoadAllSymbols">True</Parameters><Parameters Name="LoadUnspecifiedSymbols">False</Parameters></Parameters><Excluded_Packages>
+ <Excluded_Packages Name="$(BDS)\bin\bcboffice2k100.bpl">CodeGear C++Builder Office 2000 Servers Package</Excluded_Packages>
+ <Excluded_Packages Name="$(BDS)\bin\bcbofficexp100.bpl">CodeGear C++Builder Office XP Servers Package</Excluded_Packages>
+ </Excluded_Packages><Linker><Linker Name="LibPrefix"></Linker><Linker Name="LibSuffix"></Linker><Linker Name="LibVersion"></Linker></Linker><ProjectProperties><ProjectProperties Name="AutoShowDeps">False</ProjectProperties><ProjectProperties Name="ManagePaths">True</ProjectProperties><ProjectProperties Name="VerifyPackages">True</ProjectProperties></ProjectProperties><HistoryLists_hlIncludePath><HistoryLists_hlIncludePath Name="Count">3</HistoryLists_hlIncludePath><HistoryLists_hlIncludePath Name="Item0">$(BDS)\include;$(BDS)\include\dinkumware;$(BDS)\include\vcl;..\src;..\include;..</HistoryLists_hlIncludePath><HistoryLists_hlIncludePath Name="Item1">$(BDS)\include;$(BDS)\include\dinkumware;$(BDS)\include\vcl;..\src;..\include;..</HistoryLists_hlIncludePath><HistoryLists_hlIncludePath Name="Item2">$(BDS)\include;$(BDS)\include\dinkumware;$(BDS)\include\vcl;..\src;..\src;..\include</HistoryLists_hlIncludePath></HistoryLists_hlIncludePath><HistoryLists_hlILINK_LibraryPath><HistoryLists_hlILINK_LibraryPath Name="Count">1</HistoryLists_hlILINK_LibraryPath><HistoryLists_hlILINK_LibraryPath Name="Item0">$(BDS)\lib;$(BDS)\lib\obj;$(BDS)\lib\psdk</HistoryLists_hlILINK_LibraryPath></HistoryLists_hlILINK_LibraryPath><HistoryLists_hlDefines><HistoryLists_hlDefines Name="Count">1</HistoryLists_hlDefines><HistoryLists_hlDefines Name="Item0">NO_STRICT</HistoryLists_hlDefines></HistoryLists_hlDefines><HistoryLists_hlTLIB_PageSize><HistoryLists_hlTLIB_PageSize Name="Count">1</HistoryLists_hlTLIB_PageSize><HistoryLists_hlTLIB_PageSize Name="Item0">32</HistoryLists_hlTLIB_PageSize><HistoryLists_hlTLIB_PageSize Name="Item1">16</HistoryLists_hlTLIB_PageSize></HistoryLists_hlTLIB_PageSize></CPlusPlusBuilder.Personality></BorlandProject></BorlandProject>
+ </ProjectExtensions>
+ <Import Project="$(MSBuildBinPath)\Borland.Cpp.Targets" />
+ <ItemGroup>
+ <CppCompile Include="..\src\gtest_main.cc">
+ <BuildOrder>0</BuildOrder>
+ </CppCompile>
+ <BuildConfiguration Include="Debug">
+ <Key>Cfg_1</Key>
+ </BuildConfiguration>
+ <BuildConfiguration Include="Release">
+ <Key>Cfg_2</Key>
+ </BuildConfiguration>
+ </ItemGroup>
+</Project>
diff --git a/Source/ThirdParty/gtest/codegear/gtest_unittest.cbproj b/Source/ThirdParty/gtest/codegear/gtest_unittest.cbproj
new file mode 100644
index 000000000..dc5db8e4d
--- /dev/null
+++ b/Source/ThirdParty/gtest/codegear/gtest_unittest.cbproj
@@ -0,0 +1,88 @@
+<?xml version="1.0" encoding="utf-8"?>
+<Project xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
+ <PropertyGroup>
+ <ProjectGuid>{eea63393-5ac5-4b9c-8909-d75fef2daa41}</ProjectGuid>
+ <Config Condition="'$(Config)'==''">Release</Config>
+ </PropertyGroup>
+ <PropertyGroup Condition="'$(Config)'=='Base' or '$(Base)'!=''">
+ <Base>true</Base>
+ </PropertyGroup>
+ <PropertyGroup Condition="'$(Config)'=='Debug' or '$(Cfg_1)'!=''">
+ <Base>true</Base>
+ <Cfg_1>true</Cfg_1>
+ <CfgParent>Base</CfgParent>
+ </PropertyGroup>
+ <PropertyGroup Condition="'$(Config)'=='Release' or '$(Cfg_2)'!=''">
+ <Base>true</Base>
+ <Cfg_2>true</Cfg_2>
+ <CfgParent>Base</CfgParent>
+ </PropertyGroup>
+ <PropertyGroup Condition="'$(Base)'!=''">
+ <OutputExt>exe</OutputExt>
+ <BCC_OptimizeForSpeed>true</BCC_OptimizeForSpeed>
+ <Defines>NO_STRICT</Defines>
+ <DCC_CBuilderOutput>JPHNE</DCC_CBuilderOutput>
+ <DynamicRTL>true</DynamicRTL>
+ <ILINK_ObjectSearchPath>..\test</ILINK_ObjectSearchPath>
+ <UsePackages>true</UsePackages>
+ <ProjectType>CppConsoleApplication</ProjectType>
+ <NoVCL>true</NoVCL>
+ <BCC_CPPCompileAlways>true</BCC_CPPCompileAlways>
+ <PackageImports>rtl.bpi;vcl.bpi;bcbie.bpi;vclx.bpi;vclactnband.bpi;xmlrtl.bpi;bcbsmp.bpi;dbrtl.bpi;vcldb.bpi;bdertl.bpi;vcldbx.bpi;dsnap.bpi;dsnapcon.bpi;vclib.bpi;ibxpress.bpi;adortl.bpi;dbxcds.bpi;dbexpress.bpi;DbxCommonDriver.bpi;websnap.bpi;vclie.bpi;webdsnap.bpi;inet.bpi;inetdbbde.bpi;inetdbxpress.bpi;soaprtl.bpi;Rave75VCL.bpi;teeUI.bpi;tee.bpi;teedb.bpi;IndyCore.bpi;IndySystem.bpi;IndyProtocols.bpi;IntrawebDB_90_100.bpi;Intraweb_90_100.bpi;Jcl.bpi;JclVcl.bpi;JvCoreD11R.bpi;JvSystemD11R.bpi;JvStdCtrlsD11R.bpi;JvAppFrmD11R.bpi;JvBandsD11R.bpi;JvDBD11R.bpi;JvDlgsD11R.bpi;JvBDED11R.bpi;JvCmpD11R.bpi;JvCryptD11R.bpi;JvCtrlsD11R.bpi;JvCustomD11R.bpi;JvDockingD11R.bpi;JvDotNetCtrlsD11R.bpi;JvEDID11R.bpi;JvGlobusD11R.bpi;JvHMID11R.bpi;JvInterpreterD11R.bpi;JvJansD11R.bpi;JvManagedThreadsD11R.bpi;JvMMD11R.bpi;JvNetD11R.bpi;JvPageCompsD11R.bpi;JvPluginD11R.bpi;JvPrintPreviewD11R.bpi;JvRuntimeDesignD11R.bpi;JvTimeFrameworkD11R.bpi;JvValidatorsD11R.bpi;JvWizardD11R.bpi;JvXPCtrlsD11R.bpi;VclSmp.bpi</PackageImports>
+ <BCC_wpar>false</BCC_wpar>
+ <IncludePath>$(BDS)\include;$(BDS)\include\dinkumware;$(BDS)\include\vcl;..\include;..\test;..</IncludePath>
+ <ILINK_LibraryPath>$(BDS)\lib;$(BDS)\lib\obj;$(BDS)\lib\psdk;..\test</ILINK_LibraryPath>
+ <Multithreaded>true</Multithreaded>
+ </PropertyGroup>
+ <PropertyGroup Condition="'$(Cfg_1)'!=''">
+ <BCC_OptimizeForSpeed>false</BCC_OptimizeForSpeed>
+ <DCC_Optimize>false</DCC_Optimize>
+ <DCC_DebugInfoInExe>true</DCC_DebugInfoInExe>
+ <Defines>_DEBUG;$(Defines)</Defines>
+ <ILINK_FullDebugInfo>true</ILINK_FullDebugInfo>
+ <BCC_InlineFunctionExpansion>false</BCC_InlineFunctionExpansion>
+ <ILINK_DisableIncrementalLinking>true</ILINK_DisableIncrementalLinking>
+ <BCC_UseRegisterVariables>None</BCC_UseRegisterVariables>
+ <DCC_Define>DEBUG</DCC_Define>
+ <BCC_DebugLineNumbers>true</BCC_DebugLineNumbers>
+ <IntermediateOutputDir>Debug</IntermediateOutputDir>
+ <TASM_DisplaySourceLines>true</TASM_DisplaySourceLines>
+ <BCC_StackFrames>true</BCC_StackFrames>
+ <BCC_DisableOptimizations>true</BCC_DisableOptimizations>
+ <ILINK_LibraryPath>$(BDS)\lib\debug;$(ILINK_LibraryPath)</ILINK_LibraryPath>
+ <TASM_Debugging>Full</TASM_Debugging>
+ <BCC_SourceDebuggingOn>true</BCC_SourceDebuggingOn>
+ </PropertyGroup>
+ <PropertyGroup Condition="'$(Cfg_2)'!=''">
+ <Defines>NDEBUG;$(Defines)</Defines>
+ <IntermediateOutputDir>Release</IntermediateOutputDir>
+ <ILINK_LibraryPath>$(BDS)\lib\release;$(ILINK_LibraryPath)</ILINK_LibraryPath>
+ <TASM_Debugging>None</TASM_Debugging>
+ </PropertyGroup>
+ <ProjectExtensions>
+ <Borland.Personality>CPlusPlusBuilder.Personality</Borland.Personality>
+ <Borland.ProjectType>CppConsoleApplication</Borland.ProjectType>
+ <BorlandProject>
+<BorlandProject><CPlusPlusBuilder.Personality><VersionInfo><VersionInfo Name="IncludeVerInfo">False</VersionInfo><VersionInfo Name="AutoIncBuild">False</VersionInfo><VersionInfo Name="MajorVer">1</VersionInfo><VersionInfo Name="MinorVer">0</VersionInfo><VersionInfo Name="Release">0</VersionInfo><VersionInfo Name="Build">0</VersionInfo><VersionInfo Name="Debug">False</VersionInfo><VersionInfo Name="PreRelease">False</VersionInfo><VersionInfo Name="Special">False</VersionInfo><VersionInfo Name="Private">False</VersionInfo><VersionInfo Name="DLL">False</VersionInfo><VersionInfo Name="Locale">1033</VersionInfo><VersionInfo Name="CodePage">1252</VersionInfo></VersionInfo><VersionInfoKeys><VersionInfoKeys Name="CompanyName"></VersionInfoKeys><VersionInfoKeys Name="FileDescription"></VersionInfoKeys><VersionInfoKeys Name="FileVersion">1.0.0.0</VersionInfoKeys><VersionInfoKeys Name="InternalName"></VersionInfoKeys><VersionInfoKeys Name="LegalCopyright"></VersionInfoKeys><VersionInfoKeys Name="LegalTrademarks"></VersionInfoKeys><VersionInfoKeys Name="OriginalFilename"></VersionInfoKeys><VersionInfoKeys Name="ProductName"></VersionInfoKeys><VersionInfoKeys Name="ProductVersion">1.0.0.0</VersionInfoKeys><VersionInfoKeys Name="Comments"></VersionInfoKeys></VersionInfoKeys><Debugging><Debugging Name="DebugSourceDirs"></Debugging></Debugging><Parameters><Parameters Name="RunParams"></Parameters><Parameters Name="Launcher"></Parameters><Parameters Name="UseLauncher">False</Parameters><Parameters Name="DebugCWD"></Parameters><Parameters Name="HostApplication"></Parameters><Parameters Name="RemoteHost"></Parameters><Parameters Name="RemotePath"></Parameters><Parameters Name="RemoteParams"></Parameters><Parameters Name="RemoteLauncher"></Parameters><Parameters Name="UseRemoteLauncher">False</Parameters><Parameters Name="RemoteCWD"></Parameters><Parameters Name="RemoteDebug">False</Parameters><Parameters Name="Debug Symbols Search Path"></Parameters><Parameters Name="LoadAllSymbols">True</Parameters><Parameters Name="LoadUnspecifiedSymbols">False</Parameters></Parameters><Excluded_Packages>
+
+
+ <Excluded_Packages Name="$(BDS)\bin\bcboffice2k100.bpl">CodeGear C++Builder Office 2000 Servers Package</Excluded_Packages>
+ <Excluded_Packages Name="$(BDS)\bin\bcbofficexp100.bpl">CodeGear C++Builder Office XP Servers Package</Excluded_Packages>
+ </Excluded_Packages><Linker><Linker Name="LibPrefix"></Linker><Linker Name="LibSuffix"></Linker><Linker Name="LibVersion"></Linker></Linker><ProjectProperties><ProjectProperties Name="AutoShowDeps">False</ProjectProperties><ProjectProperties Name="ManagePaths">True</ProjectProperties><ProjectProperties Name="VerifyPackages">True</ProjectProperties></ProjectProperties><HistoryLists_hlIncludePath><HistoryLists_hlIncludePath Name="Count">3</HistoryLists_hlIncludePath><HistoryLists_hlIncludePath Name="Item0">$(BDS)\include;$(BDS)\include\dinkumware;$(BDS)\include\vcl;..\include;..\test;..</HistoryLists_hlIncludePath><HistoryLists_hlIncludePath Name="Item1">$(BDS)\include;$(BDS)\include\dinkumware;$(BDS)\include\vcl;..\include;..\test</HistoryLists_hlIncludePath><HistoryLists_hlIncludePath Name="Item2">$(BDS)\include;$(BDS)\include\dinkumware;$(BDS)\include\vcl;..\include</HistoryLists_hlIncludePath></HistoryLists_hlIncludePath><HistoryLists_hlILINK_LibraryPath><HistoryLists_hlILINK_LibraryPath Name="Count">1</HistoryLists_hlILINK_LibraryPath><HistoryLists_hlILINK_LibraryPath Name="Item0">$(BDS)\lib;$(BDS)\lib\obj;$(BDS)\lib\psdk;..\test</HistoryLists_hlILINK_LibraryPath><HistoryLists_hlILINK_LibraryPath Name="Item1">$(BDS)\lib;$(BDS)\lib\obj;$(BDS)\lib\psdk;..\test</HistoryLists_hlILINK_LibraryPath><HistoryLists_hlILINK_LibraryPath Name="Item2">$(BDS)\lib;$(BDS)\lib\obj;$(BDS)\lib\psdk;$(OUTPUTDIR);..\test</HistoryLists_hlILINK_LibraryPath></HistoryLists_hlILINK_LibraryPath><HistoryLists_hlDefines><HistoryLists_hlDefines Name="Count">2</HistoryLists_hlDefines><HistoryLists_hlDefines Name="Item0">NO_STRICT</HistoryLists_hlDefines><HistoryLists_hlDefines Name="Item1">STRICT</HistoryLists_hlDefines></HistoryLists_hlDefines></CPlusPlusBuilder.Personality></BorlandProject></BorlandProject>
+ </ProjectExtensions>
+ <Import Project="$(MSBuildBinPath)\Borland.Cpp.Targets" />
+ <ItemGroup>
+ <CppCompile Include="..\test\gtest_unittest.cc">
+ <BuildOrder>0</BuildOrder>
+ </CppCompile>
+ <CppCompile Include="gtest_link.cc">
+ <BuildOrder>1</BuildOrder>
+ </CppCompile>
+ <BuildConfiguration Include="Debug">
+ <Key>Cfg_1</Key>
+ </BuildConfiguration>
+ <BuildConfiguration Include="Release">
+ <Key>Cfg_2</Key>
+ </BuildConfiguration>
+ </ItemGroup>
+</Project> \ No newline at end of file
diff --git a/Source/ThirdParty/gtest/configure.ac b/Source/ThirdParty/gtest/configure.ac
new file mode 100644
index 000000000..1b9123746
--- /dev/null
+++ b/Source/ThirdParty/gtest/configure.ac
@@ -0,0 +1,67 @@
+m4_include(m4/acx_pthread.m4)
+
+# At this point, the Xcode project assumes the version string will be three
+# integers separated by periods and surrounded by square brackets (e.g.
+# "[1.0.1]"). It also asumes that there won't be any closing parenthesis
+# between "AC_INIT(" and the closing ")" including comments and strings.
+AC_INIT([Google C++ Testing Framework],
+ [1.5.0],
+ [googletestframework@googlegroups.com],
+ [gtest])
+
+# Provide various options to initialize the Autoconf and configure processes.
+AC_PREREQ([2.59])
+AC_CONFIG_SRCDIR([./COPYING])
+AC_CONFIG_AUX_DIR([build-aux])
+AC_CONFIG_HEADERS([build-aux/config.h])
+AC_CONFIG_FILES([Makefile])
+AC_CONFIG_FILES([scripts/gtest-config], [chmod +x scripts/gtest-config])
+
+# Initialize Automake with various options. We require at least v1.9, prevent
+# pedantic complaints about package files, and enable various distribution
+# targets.
+AM_INIT_AUTOMAKE([1.9 dist-bzip2 dist-zip foreign subdir-objects])
+
+# Check for programs used in building Google Test.
+AC_PROG_CC
+AC_PROG_CXX
+AC_LANG([C++])
+AC_PROG_LIBTOOL
+
+# TODO(chandlerc@google.com): Currently we aren't running the Python tests
+# against the interpreter detected by AM_PATH_PYTHON, and so we condition
+# HAVE_PYTHON by requiring "python" to be in the PATH, and that interpreter's
+# version to be >= 2.3. This will allow the scripts to use a "/usr/bin/env"
+# hashbang.
+PYTHON= # We *do not* allow the user to specify a python interpreter
+AC_PATH_PROG([PYTHON],[python],[:])
+AS_IF([test "$PYTHON" != ":"],
+ [AM_PYTHON_CHECK_VERSION([$PYTHON],[2.3],[:],[PYTHON=":"])])
+AM_CONDITIONAL([HAVE_PYTHON],[test "$PYTHON" != ":"])
+
+# Configure pthreads.
+AC_ARG_WITH([pthreads],
+ [AS_HELP_STRING([--with-pthreads],
+ [use pthreads (default is yes)])],
+ [with_pthreads=$withval],
+ [with_pthreads=check])
+
+have_pthreads=no
+AS_IF([test "x$with_pthreads" != "xno"],
+ [ACX_PTHREAD(
+ [],
+ [AS_IF([test "x$with_pthreads" != "xcheck"],
+ [AC_MSG_FAILURE(
+ [--with-pthreads was specified, but unable to be used])])])
+ have_pthreads="$acx_pthread_ok"])
+AM_CONDITIONAL([HAVE_PTHREADS],[test "x$have_pthreads" == "xyes"])
+AC_SUBST(PTHREAD_CFLAGS)
+AC_SUBST(PTHREAD_LIBS)
+
+# TODO(chandlerc@google.com) Check for the necessary system headers.
+
+# TODO(chandlerc@google.com) Check the types, structures, and other compiler
+# and architecture characteristics.
+
+# Output the generated files. No further autoconf macros may be used.
+AC_OUTPUT
diff --git a/Source/ThirdParty/gtest/include/gtest/gtest-param-test.h.pump b/Source/ThirdParty/gtest/include/gtest/gtest-param-test.h.pump
new file mode 100644
index 000000000..a23118827
--- /dev/null
+++ b/Source/ThirdParty/gtest/include/gtest/gtest-param-test.h.pump
@@ -0,0 +1,457 @@
+$$ -*- mode: c++; -*-
+$var n = 50 $$ Maximum length of Values arguments we want to support.
+$var maxtuple = 10 $$ Maximum number of Combine arguments we want to support.
+// Copyright 2008, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Authors: vladl@google.com (Vlad Losev)
+//
+// Macros and functions for implementing parameterized tests
+// in Google C++ Testing Framework (Google Test)
+//
+// This file is generated by a SCRIPT. DO NOT EDIT BY HAND!
+//
+#ifndef GTEST_INCLUDE_GTEST_GTEST_PARAM_TEST_H_
+#define GTEST_INCLUDE_GTEST_GTEST_PARAM_TEST_H_
+
+
+// Value-parameterized tests allow you to test your code with different
+// parameters without writing multiple copies of the same test.
+//
+// Here is how you use value-parameterized tests:
+
+#if 0
+
+// To write value-parameterized tests, first you should define a fixture
+// class. It must be derived from testing::TestWithParam<T>, where T is
+// the type of your parameter values. TestWithParam<T> is itself derived
+// from testing::Test. T can be any copyable type. If it's a raw pointer,
+// you are responsible for managing the lifespan of the pointed values.
+
+class FooTest : public ::testing::TestWithParam<const char*> {
+ // You can implement all the usual class fixture members here.
+};
+
+// Then, use the TEST_P macro to define as many parameterized tests
+// for this fixture as you want. The _P suffix is for "parameterized"
+// or "pattern", whichever you prefer to think.
+
+TEST_P(FooTest, DoesBlah) {
+ // Inside a test, access the test parameter with the GetParam() method
+ // of the TestWithParam<T> class:
+ EXPECT_TRUE(foo.Blah(GetParam()));
+ ...
+}
+
+TEST_P(FooTest, HasBlahBlah) {
+ ...
+}
+
+// Finally, you can use INSTANTIATE_TEST_CASE_P to instantiate the test
+// case with any set of parameters you want. Google Test defines a number
+// of functions for generating test parameters. They return what we call
+// (surprise!) parameter generators. Here is a summary of them, which
+// are all in the testing namespace:
+//
+//
+// Range(begin, end [, step]) - Yields values {begin, begin+step,
+// begin+step+step, ...}. The values do not
+// include end. step defaults to 1.
+// Values(v1, v2, ..., vN) - Yields values {v1, v2, ..., vN}.
+// ValuesIn(container) - Yields values from a C-style array, an STL
+// ValuesIn(begin,end) container, or an iterator range [begin, end).
+// Bool() - Yields sequence {false, true}.
+// Combine(g1, g2, ..., gN) - Yields all combinations (the Cartesian product
+// for the math savvy) of the values generated
+// by the N generators.
+//
+// For more details, see comments at the definitions of these functions below
+// in this file.
+//
+// The following statement will instantiate tests from the FooTest test case
+// each with parameter values "meeny", "miny", and "moe".
+
+INSTANTIATE_TEST_CASE_P(InstantiationName,
+ FooTest,
+ Values("meeny", "miny", "moe"));
+
+// To distinguish different instances of the pattern, (yes, you
+// can instantiate it more then once) the first argument to the
+// INSTANTIATE_TEST_CASE_P macro is a prefix that will be added to the
+// actual test case name. Remember to pick unique prefixes for different
+// instantiations. The tests from the instantiation above will have
+// these names:
+//
+// * InstantiationName/FooTest.DoesBlah/0 for "meeny"
+// * InstantiationName/FooTest.DoesBlah/1 for "miny"
+// * InstantiationName/FooTest.DoesBlah/2 for "moe"
+// * InstantiationName/FooTest.HasBlahBlah/0 for "meeny"
+// * InstantiationName/FooTest.HasBlahBlah/1 for "miny"
+// * InstantiationName/FooTest.HasBlahBlah/2 for "moe"
+//
+// You can use these names in --gtest_filter.
+//
+// This statement will instantiate all tests from FooTest again, each
+// with parameter values "cat" and "dog":
+
+const char* pets[] = {"cat", "dog"};
+INSTANTIATE_TEST_CASE_P(AnotherInstantiationName, FooTest, ValuesIn(pets));
+
+// The tests from the instantiation above will have these names:
+//
+// * AnotherInstantiationName/FooTest.DoesBlah/0 for "cat"
+// * AnotherInstantiationName/FooTest.DoesBlah/1 for "dog"
+// * AnotherInstantiationName/FooTest.HasBlahBlah/0 for "cat"
+// * AnotherInstantiationName/FooTest.HasBlahBlah/1 for "dog"
+//
+// Please note that INSTANTIATE_TEST_CASE_P will instantiate all tests
+// in the given test case, whether their definitions come before or
+// AFTER the INSTANTIATE_TEST_CASE_P statement.
+//
+// Please also note that generator expressions are evaluated in
+// RUN_ALL_TESTS(), after main() has started. This allows evaluation of
+// parameter list based on command line parameters.
+//
+// You can see samples/sample7_unittest.cc and samples/sample8_unittest.cc
+// for more examples.
+//
+// In the future, we plan to publish the API for defining new parameter
+// generators. But for now this interface remains part of the internal
+// implementation and is subject to change.
+
+#endif // 0
+
+#include <gtest/internal/gtest-port.h>
+
+#if !GTEST_OS_SYMBIAN
+#include <utility>
+#endif
+
+// scripts/fuse_gtest.py depends on gtest's own header being #included
+// *unconditionally*. Therefore these #includes cannot be moved
+// inside #if GTEST_HAS_PARAM_TEST.
+#include <gtest/internal/gtest-internal.h>
+#include <gtest/internal/gtest-param-util.h>
+#include <gtest/internal/gtest-param-util-generated.h>
+
+#if GTEST_HAS_PARAM_TEST
+
+namespace testing {
+
+// Functions producing parameter generators.
+//
+// Google Test uses these generators to produce parameters for value-
+// parameterized tests. When a parameterized test case is instantiated
+// with a particular generator, Google Test creates and runs tests
+// for each element in the sequence produced by the generator.
+//
+// In the following sample, tests from test case FooTest are instantiated
+// each three times with parameter values 3, 5, and 8:
+//
+// class FooTest : public TestWithParam<int> { ... };
+//
+// TEST_P(FooTest, TestThis) {
+// }
+// TEST_P(FooTest, TestThat) {
+// }
+// INSTANTIATE_TEST_CASE_P(TestSequence, FooTest, Values(3, 5, 8));
+//
+
+// Range() returns generators providing sequences of values in a range.
+//
+// Synopsis:
+// Range(start, end)
+// - returns a generator producing a sequence of values {start, start+1,
+// start+2, ..., }.
+// Range(start, end, step)
+// - returns a generator producing a sequence of values {start, start+step,
+// start+step+step, ..., }.
+// Notes:
+// * The generated sequences never include end. For example, Range(1, 5)
+// returns a generator producing a sequence {1, 2, 3, 4}. Range(1, 9, 2)
+// returns a generator producing {1, 3, 5, 7}.
+// * start and end must have the same type. That type may be any integral or
+// floating-point type or a user defined type satisfying these conditions:
+// * It must be assignable (have operator=() defined).
+// * It must have operator+() (operator+(int-compatible type) for
+// two-operand version).
+// * It must have operator<() defined.
+// Elements in the resulting sequences will also have that type.
+// * Condition start < end must be satisfied in order for resulting sequences
+// to contain any elements.
+//
+template <typename T, typename IncrementT>
+internal::ParamGenerator<T> Range(T start, T end, IncrementT step) {
+ return internal::ParamGenerator<T>(
+ new internal::RangeGenerator<T, IncrementT>(start, end, step));
+}
+
+template <typename T>
+internal::ParamGenerator<T> Range(T start, T end) {
+ return Range(start, end, 1);
+}
+
+// ValuesIn() function allows generation of tests with parameters coming from
+// a container.
+//
+// Synopsis:
+// ValuesIn(const T (&array)[N])
+// - returns a generator producing sequences with elements from
+// a C-style array.
+// ValuesIn(const Container& container)
+// - returns a generator producing sequences with elements from
+// an STL-style container.
+// ValuesIn(Iterator begin, Iterator end)
+// - returns a generator producing sequences with elements from
+// a range [begin, end) defined by a pair of STL-style iterators. These
+// iterators can also be plain C pointers.
+//
+// Please note that ValuesIn copies the values from the containers
+// passed in and keeps them to generate tests in RUN_ALL_TESTS().
+//
+// Examples:
+//
+// This instantiates tests from test case StringTest
+// each with C-string values of "foo", "bar", and "baz":
+//
+// const char* strings[] = {"foo", "bar", "baz"};
+// INSTANTIATE_TEST_CASE_P(StringSequence, SrtingTest, ValuesIn(strings));
+//
+// This instantiates tests from test case StlStringTest
+// each with STL strings with values "a" and "b":
+//
+// ::std::vector< ::std::string> GetParameterStrings() {
+// ::std::vector< ::std::string> v;
+// v.push_back("a");
+// v.push_back("b");
+// return v;
+// }
+//
+// INSTANTIATE_TEST_CASE_P(CharSequence,
+// StlStringTest,
+// ValuesIn(GetParameterStrings()));
+//
+//
+// This will also instantiate tests from CharTest
+// each with parameter values 'a' and 'b':
+//
+// ::std::list<char> GetParameterChars() {
+// ::std::list<char> list;
+// list.push_back('a');
+// list.push_back('b');
+// return list;
+// }
+// ::std::list<char> l = GetParameterChars();
+// INSTANTIATE_TEST_CASE_P(CharSequence2,
+// CharTest,
+// ValuesIn(l.begin(), l.end()));
+//
+template <typename ForwardIterator>
+internal::ParamGenerator<
+ typename ::std::iterator_traits<ForwardIterator>::value_type> ValuesIn(
+ ForwardIterator begin,
+ ForwardIterator end) {
+ typedef typename ::std::iterator_traits<ForwardIterator>::value_type
+ ParamType;
+ return internal::ParamGenerator<ParamType>(
+ new internal::ValuesInIteratorRangeGenerator<ParamType>(begin, end));
+}
+
+template <typename T, size_t N>
+internal::ParamGenerator<T> ValuesIn(const T (&array)[N]) {
+ return ValuesIn(array, array + N);
+}
+
+template <class Container>
+internal::ParamGenerator<typename Container::value_type> ValuesIn(
+ const Container& container) {
+ return ValuesIn(container.begin(), container.end());
+}
+
+// Values() allows generating tests from explicitly specified list of
+// parameters.
+//
+// Synopsis:
+// Values(T v1, T v2, ..., T vN)
+// - returns a generator producing sequences with elements v1, v2, ..., vN.
+//
+// For example, this instantiates tests from test case BarTest each
+// with values "one", "two", and "three":
+//
+// INSTANTIATE_TEST_CASE_P(NumSequence, BarTest, Values("one", "two", "three"));
+//
+// This instantiates tests from test case BazTest each with values 1, 2, 3.5.
+// The exact type of values will depend on the type of parameter in BazTest.
+//
+// INSTANTIATE_TEST_CASE_P(FloatingNumbers, BazTest, Values(1, 2, 3.5));
+//
+// Currently, Values() supports from 1 to $n parameters.
+//
+$range i 1..n
+$for i [[
+$range j 1..i
+
+template <$for j, [[typename T$j]]>
+internal::ValueArray$i<$for j, [[T$j]]> Values($for j, [[T$j v$j]]) {
+ return internal::ValueArray$i<$for j, [[T$j]]>($for j, [[v$j]]);
+}
+
+]]
+
+// Bool() allows generating tests with parameters in a set of (false, true).
+//
+// Synopsis:
+// Bool()
+// - returns a generator producing sequences with elements {false, true}.
+//
+// It is useful when testing code that depends on Boolean flags. Combinations
+// of multiple flags can be tested when several Bool()'s are combined using
+// Combine() function.
+//
+// In the following example all tests in the test case FlagDependentTest
+// will be instantiated twice with parameters false and true.
+//
+// class FlagDependentTest : public testing::TestWithParam<bool> {
+// virtual void SetUp() {
+// external_flag = GetParam();
+// }
+// }
+// INSTANTIATE_TEST_CASE_P(BoolSequence, FlagDependentTest, Bool());
+//
+inline internal::ParamGenerator<bool> Bool() {
+ return Values(false, true);
+}
+
+#if GTEST_HAS_COMBINE
+// Combine() allows the user to combine two or more sequences to produce
+// values of a Cartesian product of those sequences' elements.
+//
+// Synopsis:
+// Combine(gen1, gen2, ..., genN)
+// - returns a generator producing sequences with elements coming from
+// the Cartesian product of elements from the sequences generated by
+// gen1, gen2, ..., genN. The sequence elements will have a type of
+// tuple<T1, T2, ..., TN> where T1, T2, ..., TN are the types
+// of elements from sequences produces by gen1, gen2, ..., genN.
+//
+// Combine can have up to $maxtuple arguments. This number is currently limited
+// by the maximum number of elements in the tuple implementation used by Google
+// Test.
+//
+// Example:
+//
+// This will instantiate tests in test case AnimalTest each one with
+// the parameter values tuple("cat", BLACK), tuple("cat", WHITE),
+// tuple("dog", BLACK), and tuple("dog", WHITE):
+//
+// enum Color { BLACK, GRAY, WHITE };
+// class AnimalTest
+// : public testing::TestWithParam<tuple<const char*, Color> > {...};
+//
+// TEST_P(AnimalTest, AnimalLooksNice) {...}
+//
+// INSTANTIATE_TEST_CASE_P(AnimalVariations, AnimalTest,
+// Combine(Values("cat", "dog"),
+// Values(BLACK, WHITE)));
+//
+// This will instantiate tests in FlagDependentTest with all variations of two
+// Boolean flags:
+//
+// class FlagDependentTest
+// : public testing::TestWithParam<tuple(bool, bool)> > {
+// virtual void SetUp() {
+// // Assigns external_flag_1 and external_flag_2 values from the tuple.
+// tie(external_flag_1, external_flag_2) = GetParam();
+// }
+// };
+//
+// TEST_P(FlagDependentTest, TestFeature1) {
+// // Test your code using external_flag_1 and external_flag_2 here.
+// }
+// INSTANTIATE_TEST_CASE_P(TwoBoolSequence, FlagDependentTest,
+// Combine(Bool(), Bool()));
+//
+$range i 2..maxtuple
+$for i [[
+$range j 1..i
+
+template <$for j, [[typename Generator$j]]>
+internal::CartesianProductHolder$i<$for j, [[Generator$j]]> Combine(
+ $for j, [[const Generator$j& g$j]]) {
+ return internal::CartesianProductHolder$i<$for j, [[Generator$j]]>(
+ $for j, [[g$j]]);
+}
+
+]]
+#endif // GTEST_HAS_COMBINE
+
+
+
+#define TEST_P(test_case_name, test_name) \
+ class GTEST_TEST_CLASS_NAME_(test_case_name, test_name) \
+ : public test_case_name { \
+ public: \
+ GTEST_TEST_CLASS_NAME_(test_case_name, test_name)() {} \
+ virtual void TestBody(); \
+ private: \
+ static int AddToRegistry() { \
+ ::testing::UnitTest::GetInstance()->parameterized_test_registry(). \
+ GetTestCasePatternHolder<test_case_name>(\
+ #test_case_name, __FILE__, __LINE__)->AddTestPattern(\
+ #test_case_name, \
+ #test_name, \
+ new ::testing::internal::TestMetaFactory< \
+ GTEST_TEST_CLASS_NAME_(test_case_name, test_name)>()); \
+ return 0; \
+ } \
+ static int gtest_registering_dummy_; \
+ GTEST_DISALLOW_COPY_AND_ASSIGN_(\
+ GTEST_TEST_CLASS_NAME_(test_case_name, test_name)); \
+ }; \
+ int GTEST_TEST_CLASS_NAME_(test_case_name, \
+ test_name)::gtest_registering_dummy_ = \
+ GTEST_TEST_CLASS_NAME_(test_case_name, test_name)::AddToRegistry(); \
+ void GTEST_TEST_CLASS_NAME_(test_case_name, test_name)::TestBody()
+
+#define INSTANTIATE_TEST_CASE_P(prefix, test_case_name, generator) \
+ ::testing::internal::ParamGenerator<test_case_name::ParamType> \
+ gtest_##prefix##test_case_name##_EvalGenerator_() { return generator; } \
+ int gtest_##prefix##test_case_name##_dummy_ = \
+ ::testing::UnitTest::GetInstance()->parameterized_test_registry(). \
+ GetTestCasePatternHolder<test_case_name>(\
+ #test_case_name, __FILE__, __LINE__)->AddTestCaseInstantiation(\
+ #prefix, \
+ &gtest_##prefix##test_case_name##_EvalGenerator_, \
+ __FILE__, __LINE__)
+
+} // namespace testing
+
+#endif // GTEST_HAS_PARAM_TEST
+
+#endif // GTEST_INCLUDE_GTEST_GTEST_PARAM_TEST_H_
diff --git a/Source/ThirdParty/gtest/include/gtest/internal/gtest-param-util-generated.h.pump b/Source/ThirdParty/gtest/include/gtest/internal/gtest-param-util-generated.h.pump
new file mode 100644
index 000000000..baedfbc2d
--- /dev/null
+++ b/Source/ThirdParty/gtest/include/gtest/internal/gtest-param-util-generated.h.pump
@@ -0,0 +1,301 @@
+$$ -*- mode: c++; -*-
+$var n = 50 $$ Maximum length of Values arguments we want to support.
+$var maxtuple = 10 $$ Maximum number of Combine arguments we want to support.
+// Copyright 2008 Google Inc.
+// All Rights Reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: vladl@google.com (Vlad Losev)
+
+// Type and function utilities for implementing parameterized tests.
+// This file is generated by a SCRIPT. DO NOT EDIT BY HAND!
+//
+// Currently Google Test supports at most $n arguments in Values,
+// and at most $maxtuple arguments in Combine. Please contact
+// googletestframework@googlegroups.com if you need more.
+// Please note that the number of arguments to Combine is limited
+// by the maximum arity of the implementation of tr1::tuple which is
+// currently set at $maxtuple.
+
+#ifndef GTEST_INCLUDE_GTEST_INTERNAL_GTEST_PARAM_UTIL_GENERATED_H_
+#define GTEST_INCLUDE_GTEST_INTERNAL_GTEST_PARAM_UTIL_GENERATED_H_
+
+// scripts/fuse_gtest.py depends on gtest's own header being #included
+// *unconditionally*. Therefore these #includes cannot be moved
+// inside #if GTEST_HAS_PARAM_TEST.
+#include <gtest/internal/gtest-param-util.h>
+#include <gtest/internal/gtest-port.h>
+
+#if GTEST_HAS_PARAM_TEST
+
+namespace testing {
+
+// Forward declarations of ValuesIn(), which is implemented in
+// include/gtest/gtest-param-test.h.
+template <typename ForwardIterator>
+internal::ParamGenerator<
+ typename ::std::iterator_traits<ForwardIterator>::value_type> ValuesIn(
+ ForwardIterator begin, ForwardIterator end);
+
+template <typename T, size_t N>
+internal::ParamGenerator<T> ValuesIn(const T (&array)[N]);
+
+template <class Container>
+internal::ParamGenerator<typename Container::value_type> ValuesIn(
+ const Container& container);
+
+namespace internal {
+
+// Used in the Values() function to provide polymorphic capabilities.
+template <typename T1>
+class ValueArray1 {
+ public:
+ explicit ValueArray1(T1 v1) : v1_(v1) {}
+
+ template <typename T>
+ operator ParamGenerator<T>() const { return ValuesIn(&v1_, &v1_ + 1); }
+
+ private:
+ // No implementation - assignment is unsupported.
+ void operator=(const ValueArray1& other);
+
+ const T1 v1_;
+};
+
+$range i 2..n
+$for i [[
+$range j 1..i
+
+template <$for j, [[typename T$j]]>
+class ValueArray$i {
+ public:
+ ValueArray$i($for j, [[T$j v$j]]) : $for j, [[v$(j)_(v$j)]] {}
+
+ template <typename T>
+ operator ParamGenerator<T>() const {
+ const T array[] = {$for j, [[v$(j)_]]};
+ return ValuesIn(array);
+ }
+
+ private:
+ // No implementation - assignment is unsupported.
+ void operator=(const ValueArray$i& other);
+
+$for j [[
+
+ const T$j v$(j)_;
+]]
+
+};
+
+]]
+
+#if GTEST_HAS_COMBINE
+// INTERNAL IMPLEMENTATION - DO NOT USE IN USER CODE.
+//
+// Generates values from the Cartesian product of values produced
+// by the argument generators.
+//
+$range i 2..maxtuple
+$for i [[
+$range j 1..i
+$range k 2..i
+
+template <$for j, [[typename T$j]]>
+class CartesianProductGenerator$i
+ : public ParamGeneratorInterface< ::std::tr1::tuple<$for j, [[T$j]]> > {
+ public:
+ typedef ::std::tr1::tuple<$for j, [[T$j]]> ParamType;
+
+ CartesianProductGenerator$i($for j, [[const ParamGenerator<T$j>& g$j]])
+ : $for j, [[g$(j)_(g$j)]] {}
+ virtual ~CartesianProductGenerator$i() {}
+
+ virtual ParamIteratorInterface<ParamType>* Begin() const {
+ return new Iterator(this, $for j, [[g$(j)_, g$(j)_.begin()]]);
+ }
+ virtual ParamIteratorInterface<ParamType>* End() const {
+ return new Iterator(this, $for j, [[g$(j)_, g$(j)_.end()]]);
+ }
+
+ private:
+ class Iterator : public ParamIteratorInterface<ParamType> {
+ public:
+ Iterator(const ParamGeneratorInterface<ParamType>* base, $for j, [[
+
+ const ParamGenerator<T$j>& g$j,
+ const typename ParamGenerator<T$j>::iterator& current$(j)]])
+ : base_(base),
+$for j, [[
+
+ begin$(j)_(g$j.begin()), end$(j)_(g$j.end()), current$(j)_(current$j)
+]] {
+ ComputeCurrentValue();
+ }
+ virtual ~Iterator() {}
+
+ virtual const ParamGeneratorInterface<ParamType>* BaseGenerator() const {
+ return base_;
+ }
+ // Advance should not be called on beyond-of-range iterators
+ // so no component iterators must be beyond end of range, either.
+ virtual void Advance() {
+ assert(!AtEnd());
+ ++current$(i)_;
+
+$for k [[
+ if (current$(i+2-k)_ == end$(i+2-k)_) {
+ current$(i+2-k)_ = begin$(i+2-k)_;
+ ++current$(i+2-k-1)_;
+ }
+
+]]
+ ComputeCurrentValue();
+ }
+ virtual ParamIteratorInterface<ParamType>* Clone() const {
+ return new Iterator(*this);
+ }
+ virtual const ParamType* Current() const { return &current_value_; }
+ virtual bool Equals(const ParamIteratorInterface<ParamType>& other) const {
+ // Having the same base generator guarantees that the other
+ // iterator is of the same type and we can downcast.
+ GTEST_CHECK_(BaseGenerator() == other.BaseGenerator())
+ << "The program attempted to compare iterators "
+ << "from different generators." << std::endl;
+ const Iterator* typed_other =
+ CheckedDowncastToActualType<const Iterator>(&other);
+ // We must report iterators equal if they both point beyond their
+ // respective ranges. That can happen in a variety of fashions,
+ // so we have to consult AtEnd().
+ return (AtEnd() && typed_other->AtEnd()) ||
+ ($for j && [[
+
+ current$(j)_ == typed_other->current$(j)_
+]]);
+ }
+
+ private:
+ Iterator(const Iterator& other)
+ : base_(other.base_), $for j, [[
+
+ begin$(j)_(other.begin$(j)_),
+ end$(j)_(other.end$(j)_),
+ current$(j)_(other.current$(j)_)
+]] {
+ ComputeCurrentValue();
+ }
+
+ void ComputeCurrentValue() {
+ if (!AtEnd())
+ current_value_ = ParamType($for j, [[*current$(j)_]]);
+ }
+ bool AtEnd() const {
+ // We must report iterator past the end of the range when either of the
+ // component iterators has reached the end of its range.
+ return
+$for j || [[
+
+ current$(j)_ == end$(j)_
+]];
+ }
+
+ // No implementation - assignment is unsupported.
+ void operator=(const Iterator& other);
+
+ const ParamGeneratorInterface<ParamType>* const base_;
+ // begin[i]_ and end[i]_ define the i-th range that Iterator traverses.
+ // current[i]_ is the actual traversing iterator.
+$for j [[
+
+ const typename ParamGenerator<T$j>::iterator begin$(j)_;
+ const typename ParamGenerator<T$j>::iterator end$(j)_;
+ typename ParamGenerator<T$j>::iterator current$(j)_;
+]]
+
+ ParamType current_value_;
+ }; // class CartesianProductGenerator$i::Iterator
+
+ // No implementation - assignment is unsupported.
+ void operator=(const CartesianProductGenerator$i& other);
+
+
+$for j [[
+ const ParamGenerator<T$j> g$(j)_;
+
+]]
+}; // class CartesianProductGenerator$i
+
+
+]]
+
+// INTERNAL IMPLEMENTATION - DO NOT USE IN USER CODE.
+//
+// Helper classes providing Combine() with polymorphic features. They allow
+// casting CartesianProductGeneratorN<T> to ParamGenerator<U> if T is
+// convertible to U.
+//
+$range i 2..maxtuple
+$for i [[
+$range j 1..i
+
+template <$for j, [[class Generator$j]]>
+class CartesianProductHolder$i {
+ public:
+CartesianProductHolder$i($for j, [[const Generator$j& g$j]])
+ : $for j, [[g$(j)_(g$j)]] {}
+ template <$for j, [[typename T$j]]>
+ operator ParamGenerator< ::std::tr1::tuple<$for j, [[T$j]]> >() const {
+ return ParamGenerator< ::std::tr1::tuple<$for j, [[T$j]]> >(
+ new CartesianProductGenerator$i<$for j, [[T$j]]>(
+$for j,[[
+
+ static_cast<ParamGenerator<T$j> >(g$(j)_)
+]]));
+ }
+
+ private:
+ // No implementation - assignment is unsupported.
+ void operator=(const CartesianProductHolder$i& other);
+
+
+$for j [[
+ const Generator$j g$(j)_;
+
+]]
+}; // class CartesianProductHolder$i
+
+]]
+
+#endif // GTEST_HAS_COMBINE
+
+} // namespace internal
+} // namespace testing
+
+#endif // GTEST_HAS_PARAM_TEST
+
+#endif // GTEST_INCLUDE_GTEST_INTERNAL_GTEST_PARAM_UTIL_GENERATED_H_
diff --git a/Source/ThirdParty/gtest/include/gtest/internal/gtest-tuple.h.pump b/Source/ThirdParty/gtest/include/gtest/internal/gtest-tuple.h.pump
new file mode 100644
index 000000000..85ebc8062
--- /dev/null
+++ b/Source/ThirdParty/gtest/include/gtest/internal/gtest-tuple.h.pump
@@ -0,0 +1,336 @@
+$$ -*- mode: c++; -*-
+$var n = 10 $$ Maximum number of tuple fields we want to support.
+$$ This meta comment fixes auto-indentation in Emacs. }}
+// Copyright 2009 Google Inc.
+// All Rights Reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: wan@google.com (Zhanyong Wan)
+
+// Implements a subset of TR1 tuple needed by Google Test and Google Mock.
+
+#ifndef GTEST_INCLUDE_GTEST_INTERNAL_GTEST_TUPLE_H_
+#define GTEST_INCLUDE_GTEST_INTERNAL_GTEST_TUPLE_H_
+
+#include <utility> // For ::std::pair.
+
+// The compiler used in Symbian has a bug that prevents us from declaring the
+// tuple template as a friend (it complains that tuple is redefined). This
+// hack bypasses the bug by declaring the members that should otherwise be
+// private as public.
+// Sun Studio versions < 12 also have the above bug.
+#if defined(__SYMBIAN32__) || (defined(__SUNPRO_CC) && __SUNPRO_CC < 0x590)
+#define GTEST_DECLARE_TUPLE_AS_FRIEND_ public:
+#else
+#define GTEST_DECLARE_TUPLE_AS_FRIEND_ \
+ template <GTEST_$(n)_TYPENAMES_(U)> friend class tuple; \
+ private:
+#endif
+
+
+$range i 0..n-1
+$range j 0..n
+$range k 1..n
+// GTEST_n_TUPLE_(T) is the type of an n-tuple.
+#define GTEST_0_TUPLE_(T) tuple<>
+
+$for k [[
+$range m 0..k-1
+$range m2 k..n-1
+#define GTEST_$(k)_TUPLE_(T) tuple<$for m, [[T##$m]]$for m2 [[, void]]>
+
+]]
+
+// GTEST_n_TYPENAMES_(T) declares a list of n typenames.
+
+$for j [[
+$range m 0..j-1
+#define GTEST_$(j)_TYPENAMES_(T) $for m, [[typename T##$m]]
+
+
+]]
+
+// In theory, defining stuff in the ::std namespace is undefined
+// behavior. We can do this as we are playing the role of a standard
+// library vendor.
+namespace std {
+namespace tr1 {
+
+template <$for i, [[typename T$i = void]]>
+class tuple;
+
+// Anything in namespace gtest_internal is Google Test's INTERNAL
+// IMPLEMENTATION DETAIL and MUST NOT BE USED DIRECTLY in user code.
+namespace gtest_internal {
+
+// ByRef<T>::type is T if T is a reference; otherwise it's const T&.
+template <typename T>
+struct ByRef { typedef const T& type; }; // NOLINT
+template <typename T>
+struct ByRef<T&> { typedef T& type; }; // NOLINT
+
+// A handy wrapper for ByRef.
+#define GTEST_BY_REF_(T) typename ::std::tr1::gtest_internal::ByRef<T>::type
+
+// AddRef<T>::type is T if T is a reference; otherwise it's T&. This
+// is the same as tr1::add_reference<T>::type.
+template <typename T>
+struct AddRef { typedef T& type; }; // NOLINT
+template <typename T>
+struct AddRef<T&> { typedef T& type; }; // NOLINT
+
+// A handy wrapper for AddRef.
+#define GTEST_ADD_REF_(T) typename ::std::tr1::gtest_internal::AddRef<T>::type
+
+// A helper for implementing get<k>().
+template <int k> class Get;
+
+// A helper for implementing tuple_element<k, T>. kIndexValid is true
+// iff k < the number of fields in tuple type T.
+template <bool kIndexValid, int kIndex, class Tuple>
+struct TupleElement;
+
+
+$for i [[
+template <GTEST_$(n)_TYPENAMES_(T)>
+struct TupleElement<true, $i, GTEST_$(n)_TUPLE_(T)> [[]]
+{ typedef T$i type; };
+
+
+]]
+} // namespace gtest_internal
+
+template <>
+class tuple<> {
+ public:
+ tuple() {}
+ tuple(const tuple& /* t */) {}
+ tuple& operator=(const tuple& /* t */) { return *this; }
+};
+
+
+$for k [[
+$range m 0..k-1
+template <GTEST_$(k)_TYPENAMES_(T)>
+class $if k < n [[GTEST_$(k)_TUPLE_(T)]] $else [[tuple]] {
+ public:
+ template <int k> friend class gtest_internal::Get;
+
+ tuple() : $for m, [[f$(m)_()]] {}
+
+ explicit tuple($for m, [[GTEST_BY_REF_(T$m) f$m]]) : [[]]
+$for m, [[f$(m)_(f$m)]] {}
+
+ tuple(const tuple& t) : $for m, [[f$(m)_(t.f$(m)_)]] {}
+
+ template <GTEST_$(k)_TYPENAMES_(U)>
+ tuple(const GTEST_$(k)_TUPLE_(U)& t) : $for m, [[f$(m)_(t.f$(m)_)]] {}
+
+$if k == 2 [[
+ template <typename U0, typename U1>
+ tuple(const ::std::pair<U0, U1>& p) : f0_(p.first), f1_(p.second) {}
+
+]]
+
+ tuple& operator=(const tuple& t) { return CopyFrom(t); }
+
+ template <GTEST_$(k)_TYPENAMES_(U)>
+ tuple& operator=(const GTEST_$(k)_TUPLE_(U)& t) {
+ return CopyFrom(t);
+ }
+
+$if k == 2 [[
+ template <typename U0, typename U1>
+ tuple& operator=(const ::std::pair<U0, U1>& p) {
+ f0_ = p.first;
+ f1_ = p.second;
+ return *this;
+ }
+
+]]
+
+ GTEST_DECLARE_TUPLE_AS_FRIEND_
+
+ template <GTEST_$(k)_TYPENAMES_(U)>
+ tuple& CopyFrom(const GTEST_$(k)_TUPLE_(U)& t) {
+
+$for m [[
+ f$(m)_ = t.f$(m)_;
+
+]]
+ return *this;
+ }
+
+
+$for m [[
+ T$m f$(m)_;
+
+]]
+};
+
+
+]]
+// 6.1.3.2 Tuple creation functions.
+
+// Known limitations: we don't support passing an
+// std::tr1::reference_wrapper<T> to make_tuple(). And we don't
+// implement tie().
+
+inline tuple<> make_tuple() { return tuple<>(); }
+
+$for k [[
+$range m 0..k-1
+
+template <GTEST_$(k)_TYPENAMES_(T)>
+inline GTEST_$(k)_TUPLE_(T) make_tuple($for m, [[const T$m& f$m]]) {
+ return GTEST_$(k)_TUPLE_(T)($for m, [[f$m]]);
+}
+
+]]
+
+// 6.1.3.3 Tuple helper classes.
+
+template <typename Tuple> struct tuple_size;
+
+
+$for j [[
+template <GTEST_$(j)_TYPENAMES_(T)>
+struct tuple_size<GTEST_$(j)_TUPLE_(T)> { static const int value = $j; };
+
+
+]]
+template <int k, class Tuple>
+struct tuple_element {
+ typedef typename gtest_internal::TupleElement<
+ k < (tuple_size<Tuple>::value), k, Tuple>::type type;
+};
+
+#define GTEST_TUPLE_ELEMENT_(k, Tuple) typename tuple_element<k, Tuple >::type
+
+// 6.1.3.4 Element access.
+
+namespace gtest_internal {
+
+
+$for i [[
+template <>
+class Get<$i> {
+ public:
+ template <class Tuple>
+ static GTEST_ADD_REF_(GTEST_TUPLE_ELEMENT_($i, Tuple))
+ Field(Tuple& t) { return t.f$(i)_; } // NOLINT
+
+ template <class Tuple>
+ static GTEST_BY_REF_(GTEST_TUPLE_ELEMENT_($i, Tuple))
+ ConstField(const Tuple& t) { return t.f$(i)_; }
+};
+
+
+]]
+} // namespace gtest_internal
+
+template <int k, GTEST_$(n)_TYPENAMES_(T)>
+GTEST_ADD_REF_(GTEST_TUPLE_ELEMENT_(k, GTEST_$(n)_TUPLE_(T)))
+get(GTEST_$(n)_TUPLE_(T)& t) {
+ return gtest_internal::Get<k>::Field(t);
+}
+
+template <int k, GTEST_$(n)_TYPENAMES_(T)>
+GTEST_BY_REF_(GTEST_TUPLE_ELEMENT_(k, GTEST_$(n)_TUPLE_(T)))
+get(const GTEST_$(n)_TUPLE_(T)& t) {
+ return gtest_internal::Get<k>::ConstField(t);
+}
+
+// 6.1.3.5 Relational operators
+
+// We only implement == and !=, as we don't have a need for the rest yet.
+
+namespace gtest_internal {
+
+// SameSizeTuplePrefixComparator<k, k>::Eq(t1, t2) returns true if the
+// first k fields of t1 equals the first k fields of t2.
+// SameSizeTuplePrefixComparator(k1, k2) would be a compiler error if
+// k1 != k2.
+template <int kSize1, int kSize2>
+struct SameSizeTuplePrefixComparator;
+
+template <>
+struct SameSizeTuplePrefixComparator<0, 0> {
+ template <class Tuple1, class Tuple2>
+ static bool Eq(const Tuple1& /* t1 */, const Tuple2& /* t2 */) {
+ return true;
+ }
+};
+
+template <int k>
+struct SameSizeTuplePrefixComparator<k, k> {
+ template <class Tuple1, class Tuple2>
+ static bool Eq(const Tuple1& t1, const Tuple2& t2) {
+ return SameSizeTuplePrefixComparator<k - 1, k - 1>::Eq(t1, t2) &&
+ ::std::tr1::get<k - 1>(t1) == ::std::tr1::get<k - 1>(t2);
+ }
+};
+
+} // namespace gtest_internal
+
+template <GTEST_$(n)_TYPENAMES_(T), GTEST_$(n)_TYPENAMES_(U)>
+inline bool operator==(const GTEST_$(n)_TUPLE_(T)& t,
+ const GTEST_$(n)_TUPLE_(U)& u) {
+ return gtest_internal::SameSizeTuplePrefixComparator<
+ tuple_size<GTEST_$(n)_TUPLE_(T)>::value,
+ tuple_size<GTEST_$(n)_TUPLE_(U)>::value>::Eq(t, u);
+}
+
+template <GTEST_$(n)_TYPENAMES_(T), GTEST_$(n)_TYPENAMES_(U)>
+inline bool operator!=(const GTEST_$(n)_TUPLE_(T)& t,
+ const GTEST_$(n)_TUPLE_(U)& u) { return !(t == u); }
+
+// 6.1.4 Pairs.
+// Unimplemented.
+
+} // namespace tr1
+} // namespace std
+
+
+$for j [[
+#undef GTEST_$(j)_TUPLE_
+
+]]
+
+
+$for j [[
+#undef GTEST_$(j)_TYPENAMES_
+
+]]
+
+#undef GTEST_DECLARE_TUPLE_AS_FRIEND_
+#undef GTEST_BY_REF_
+#undef GTEST_ADD_REF_
+#undef GTEST_TUPLE_ELEMENT_
+
+#endif // GTEST_INCLUDE_GTEST_INTERNAL_GTEST_TUPLE_H_
diff --git a/Source/ThirdParty/gtest/include/gtest/internal/gtest-type-util.h.pump b/Source/ThirdParty/gtest/include/gtest/internal/gtest-type-util.h.pump
new file mode 100644
index 000000000..5aed1e554
--- /dev/null
+++ b/Source/ThirdParty/gtest/include/gtest/internal/gtest-type-util.h.pump
@@ -0,0 +1,287 @@
+$$ -*- mode: c++; -*-
+$var n = 50 $$ Maximum length of type lists we want to support.
+// Copyright 2008 Google Inc.
+// All Rights Reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: wan@google.com (Zhanyong Wan)
+
+// Type utilities needed for implementing typed and type-parameterized
+// tests. This file is generated by a SCRIPT. DO NOT EDIT BY HAND!
+//
+// Currently we support at most $n types in a list, and at most $n
+// type-parameterized tests in one type-parameterized test case.
+// Please contact googletestframework@googlegroups.com if you need
+// more.
+
+#ifndef GTEST_INCLUDE_GTEST_INTERNAL_GTEST_TYPE_UTIL_H_
+#define GTEST_INCLUDE_GTEST_INTERNAL_GTEST_TYPE_UTIL_H_
+
+#include <gtest/internal/gtest-port.h>
+#include <gtest/internal/gtest-string.h>
+
+#if GTEST_HAS_TYPED_TEST || GTEST_HAS_TYPED_TEST_P
+
+// #ifdef __GNUC__ is too general here. It is possible to use gcc without using
+// libstdc++ (which is where cxxabi.h comes from).
+#ifdef __GLIBCXX__
+#include <cxxabi.h>
+#endif // __GLIBCXX__
+
+namespace testing {
+namespace internal {
+
+// AssertyTypeEq<T1, T2>::type is defined iff T1 and T2 are the same
+// type. This can be used as a compile-time assertion to ensure that
+// two types are equal.
+
+template <typename T1, typename T2>
+struct AssertTypeEq;
+
+template <typename T>
+struct AssertTypeEq<T, T> {
+ typedef bool type;
+};
+
+// GetTypeName<T>() returns a human-readable name of type T.
+template <typename T>
+String GetTypeName() {
+#if GTEST_HAS_RTTI
+
+ const char* const name = typeid(T).name();
+#ifdef __GLIBCXX__
+ int status = 0;
+ // gcc's implementation of typeid(T).name() mangles the type name,
+ // so we have to demangle it.
+ char* const readable_name = abi::__cxa_demangle(name, 0, 0, &status);
+ const String name_str(status == 0 ? readable_name : name);
+ free(readable_name);
+ return name_str;
+#else
+ return name;
+#endif // __GLIBCXX__
+
+#else
+ return "<type>";
+#endif // GTEST_HAS_RTTI
+}
+
+// A unique type used as the default value for the arguments of class
+// template Types. This allows us to simulate variadic templates
+// (e.g. Types<int>, Type<int, double>, and etc), which C++ doesn't
+// support directly.
+struct None {};
+
+// The following family of struct and struct templates are used to
+// represent type lists. In particular, TypesN<T1, T2, ..., TN>
+// represents a type list with N types (T1, T2, ..., and TN) in it.
+// Except for Types0, every struct in the family has two member types:
+// Head for the first type in the list, and Tail for the rest of the
+// list.
+
+// The empty type list.
+struct Types0 {};
+
+// Type lists of length 1, 2, 3, and so on.
+
+template <typename T1>
+struct Types1 {
+ typedef T1 Head;
+ typedef Types0 Tail;
+};
+
+$range i 2..n
+
+$for i [[
+$range j 1..i
+$range k 2..i
+template <$for j, [[typename T$j]]>
+struct Types$i {
+ typedef T1 Head;
+ typedef Types$(i-1)<$for k, [[T$k]]> Tail;
+};
+
+
+]]
+
+} // namespace internal
+
+// We don't want to require the users to write TypesN<...> directly,
+// as that would require them to count the length. Types<...> is much
+// easier to write, but generates horrible messages when there is a
+// compiler error, as gcc insists on printing out each template
+// argument, even if it has the default value (this means Types<int>
+// will appear as Types<int, None, None, ..., None> in the compiler
+// errors).
+//
+// Our solution is to combine the best part of the two approaches: a
+// user would write Types<T1, ..., TN>, and Google Test will translate
+// that to TypesN<T1, ..., TN> internally to make error messages
+// readable. The translation is done by the 'type' member of the
+// Types template.
+
+$range i 1..n
+template <$for i, [[typename T$i = internal::None]]>
+struct Types {
+ typedef internal::Types$n<$for i, [[T$i]]> type;
+};
+
+template <>
+struct Types<$for i, [[internal::None]]> {
+ typedef internal::Types0 type;
+};
+
+$range i 1..n-1
+$for i [[
+$range j 1..i
+$range k i+1..n
+template <$for j, [[typename T$j]]>
+struct Types<$for j, [[T$j]]$for k[[, internal::None]]> {
+ typedef internal::Types$i<$for j, [[T$j]]> type;
+};
+
+]]
+
+namespace internal {
+
+#define GTEST_TEMPLATE_ template <typename T> class
+
+// The template "selector" struct TemplateSel<Tmpl> is used to
+// represent Tmpl, which must be a class template with one type
+// parameter, as a type. TemplateSel<Tmpl>::Bind<T>::type is defined
+// as the type Tmpl<T>. This allows us to actually instantiate the
+// template "selected" by TemplateSel<Tmpl>.
+//
+// This trick is necessary for simulating typedef for class templates,
+// which C++ doesn't support directly.
+template <GTEST_TEMPLATE_ Tmpl>
+struct TemplateSel {
+ template <typename T>
+ struct Bind {
+ typedef Tmpl<T> type;
+ };
+};
+
+#define GTEST_BIND_(TmplSel, T) \
+ TmplSel::template Bind<T>::type
+
+// A unique struct template used as the default value for the
+// arguments of class template Templates. This allows us to simulate
+// variadic templates (e.g. Templates<int>, Templates<int, double>,
+// and etc), which C++ doesn't support directly.
+template <typename T>
+struct NoneT {};
+
+// The following family of struct and struct templates are used to
+// represent template lists. In particular, TemplatesN<T1, T2, ...,
+// TN> represents a list of N templates (T1, T2, ..., and TN). Except
+// for Templates0, every struct in the family has two member types:
+// Head for the selector of the first template in the list, and Tail
+// for the rest of the list.
+
+// The empty template list.
+struct Templates0 {};
+
+// Template lists of length 1, 2, 3, and so on.
+
+template <GTEST_TEMPLATE_ T1>
+struct Templates1 {
+ typedef TemplateSel<T1> Head;
+ typedef Templates0 Tail;
+};
+
+$range i 2..n
+
+$for i [[
+$range j 1..i
+$range k 2..i
+template <$for j, [[GTEST_TEMPLATE_ T$j]]>
+struct Templates$i {
+ typedef TemplateSel<T1> Head;
+ typedef Templates$(i-1)<$for k, [[T$k]]> Tail;
+};
+
+
+]]
+
+// We don't want to require the users to write TemplatesN<...> directly,
+// as that would require them to count the length. Templates<...> is much
+// easier to write, but generates horrible messages when there is a
+// compiler error, as gcc insists on printing out each template
+// argument, even if it has the default value (this means Templates<list>
+// will appear as Templates<list, NoneT, NoneT, ..., NoneT> in the compiler
+// errors).
+//
+// Our solution is to combine the best part of the two approaches: a
+// user would write Templates<T1, ..., TN>, and Google Test will translate
+// that to TemplatesN<T1, ..., TN> internally to make error messages
+// readable. The translation is done by the 'type' member of the
+// Templates template.
+
+$range i 1..n
+template <$for i, [[GTEST_TEMPLATE_ T$i = NoneT]]>
+struct Templates {
+ typedef Templates$n<$for i, [[T$i]]> type;
+};
+
+template <>
+struct Templates<$for i, [[NoneT]]> {
+ typedef Templates0 type;
+};
+
+$range i 1..n-1
+$for i [[
+$range j 1..i
+$range k i+1..n
+template <$for j, [[GTEST_TEMPLATE_ T$j]]>
+struct Templates<$for j, [[T$j]]$for k[[, NoneT]]> {
+ typedef Templates$i<$for j, [[T$j]]> type;
+};
+
+]]
+
+// The TypeList template makes it possible to use either a single type
+// or a Types<...> list in TYPED_TEST_CASE() and
+// INSTANTIATE_TYPED_TEST_CASE_P().
+
+template <typename T>
+struct TypeList { typedef Types1<T> type; };
+
+
+$range i 1..n
+template <$for i, [[typename T$i]]>
+struct TypeList<Types<$for i, [[T$i]]> > {
+ typedef typename Types<$for i, [[T$i]]>::type type;
+};
+
+} // namespace internal
+} // namespace testing
+
+#endif // GTEST_HAS_TYPED_TEST || GTEST_HAS_TYPED_TEST_P
+
+#endif // GTEST_INCLUDE_GTEST_INTERNAL_GTEST_TYPE_UTIL_H_
diff --git a/Source/ThirdParty/gtest/m4/acx_pthread.m4 b/Source/ThirdParty/gtest/m4/acx_pthread.m4
new file mode 100644
index 000000000..2cf20de14
--- /dev/null
+++ b/Source/ThirdParty/gtest/m4/acx_pthread.m4
@@ -0,0 +1,363 @@
+# This was retrieved from
+# http://svn.0pointer.de/viewvc/trunk/common/acx_pthread.m4?revision=1277&root=avahi
+# See also (perhaps for new versions?)
+# http://svn.0pointer.de/viewvc/trunk/common/acx_pthread.m4?root=avahi
+#
+# We've rewritten the inconsistency check code (from avahi), to work
+# more broadly. In particular, it no longer assumes ld accepts -zdefs.
+# This caused a restructing of the code, but the functionality has only
+# changed a little.
+
+dnl @synopsis ACX_PTHREAD([ACTION-IF-FOUND[, ACTION-IF-NOT-FOUND]])
+dnl
+dnl @summary figure out how to build C programs using POSIX threads
+dnl
+dnl This macro figures out how to build C programs using POSIX threads.
+dnl It sets the PTHREAD_LIBS output variable to the threads library and
+dnl linker flags, and the PTHREAD_CFLAGS output variable to any special
+dnl C compiler flags that are needed. (The user can also force certain
+dnl compiler flags/libs to be tested by setting these environment
+dnl variables.)
+dnl
+dnl Also sets PTHREAD_CC to any special C compiler that is needed for
+dnl multi-threaded programs (defaults to the value of CC otherwise).
+dnl (This is necessary on AIX to use the special cc_r compiler alias.)
+dnl
+dnl NOTE: You are assumed to not only compile your program with these
+dnl flags, but also link it with them as well. e.g. you should link
+dnl with $PTHREAD_CC $CFLAGS $PTHREAD_CFLAGS $LDFLAGS ... $PTHREAD_LIBS
+dnl $LIBS
+dnl
+dnl If you are only building threads programs, you may wish to use
+dnl these variables in your default LIBS, CFLAGS, and CC:
+dnl
+dnl LIBS="$PTHREAD_LIBS $LIBS"
+dnl CFLAGS="$CFLAGS $PTHREAD_CFLAGS"
+dnl CC="$PTHREAD_CC"
+dnl
+dnl In addition, if the PTHREAD_CREATE_JOINABLE thread-attribute
+dnl constant has a nonstandard name, defines PTHREAD_CREATE_JOINABLE to
+dnl that name (e.g. PTHREAD_CREATE_UNDETACHED on AIX).
+dnl
+dnl ACTION-IF-FOUND is a list of shell commands to run if a threads
+dnl library is found, and ACTION-IF-NOT-FOUND is a list of commands to
+dnl run it if it is not found. If ACTION-IF-FOUND is not specified, the
+dnl default action will define HAVE_PTHREAD.
+dnl
+dnl Please let the authors know if this macro fails on any platform, or
+dnl if you have any other suggestions or comments. This macro was based
+dnl on work by SGJ on autoconf scripts for FFTW (www.fftw.org) (with
+dnl help from M. Frigo), as well as ac_pthread and hb_pthread macros
+dnl posted by Alejandro Forero Cuervo to the autoconf macro repository.
+dnl We are also grateful for the helpful feedback of numerous users.
+dnl
+dnl @category InstalledPackages
+dnl @author Steven G. Johnson <stevenj@alum.mit.edu>
+dnl @version 2006-05-29
+dnl @license GPLWithACException
+dnl
+dnl Checks for GCC shared/pthread inconsistency based on work by
+dnl Marcin Owsiany <marcin@owsiany.pl>
+
+
+AC_DEFUN([ACX_PTHREAD], [
+AC_REQUIRE([AC_CANONICAL_HOST])
+AC_LANG_SAVE
+AC_LANG_C
+acx_pthread_ok=no
+
+# We used to check for pthread.h first, but this fails if pthread.h
+# requires special compiler flags (e.g. on True64 or Sequent).
+# It gets checked for in the link test anyway.
+
+# First of all, check if the user has set any of the PTHREAD_LIBS,
+# etcetera environment variables, and if threads linking works using
+# them:
+if test x"$PTHREAD_LIBS$PTHREAD_CFLAGS" != x; then
+ save_CFLAGS="$CFLAGS"
+ CFLAGS="$CFLAGS $PTHREAD_CFLAGS"
+ save_LIBS="$LIBS"
+ LIBS="$PTHREAD_LIBS $LIBS"
+ AC_MSG_CHECKING([for pthread_join in LIBS=$PTHREAD_LIBS with CFLAGS=$PTHREAD_CFLAGS])
+ AC_TRY_LINK_FUNC(pthread_join, acx_pthread_ok=yes)
+ AC_MSG_RESULT($acx_pthread_ok)
+ if test x"$acx_pthread_ok" = xno; then
+ PTHREAD_LIBS=""
+ PTHREAD_CFLAGS=""
+ fi
+ LIBS="$save_LIBS"
+ CFLAGS="$save_CFLAGS"
+fi
+
+# We must check for the threads library under a number of different
+# names; the ordering is very important because some systems
+# (e.g. DEC) have both -lpthread and -lpthreads, where one of the
+# libraries is broken (non-POSIX).
+
+# Create a list of thread flags to try. Items starting with a "-" are
+# C compiler flags, and other items are library names, except for "none"
+# which indicates that we try without any flags at all, and "pthread-config"
+# which is a program returning the flags for the Pth emulation library.
+
+acx_pthread_flags="pthreads none -Kthread -kthread lthread -pthread -pthreads -mthreads pthread --thread-safe -mt pthread-config"
+
+# The ordering *is* (sometimes) important. Some notes on the
+# individual items follow:
+
+# pthreads: AIX (must check this before -lpthread)
+# none: in case threads are in libc; should be tried before -Kthread and
+# other compiler flags to prevent continual compiler warnings
+# -Kthread: Sequent (threads in libc, but -Kthread needed for pthread.h)
+# -kthread: FreeBSD kernel threads (preferred to -pthread since SMP-able)
+# lthread: LinuxThreads port on FreeBSD (also preferred to -pthread)
+# -pthread: Linux/gcc (kernel threads), BSD/gcc (userland threads)
+# -pthreads: Solaris/gcc
+# -mthreads: Mingw32/gcc, Lynx/gcc
+# -mt: Sun Workshop C (may only link SunOS threads [-lthread], but it
+# doesn't hurt to check since this sometimes defines pthreads too;
+# also defines -D_REENTRANT)
+# ... -mt is also the pthreads flag for HP/aCC
+# pthread: Linux, etcetera
+# --thread-safe: KAI C++
+# pthread-config: use pthread-config program (for GNU Pth library)
+
+case "${host_cpu}-${host_os}" in
+ *solaris*)
+
+ # On Solaris (at least, for some versions), libc contains stubbed
+ # (non-functional) versions of the pthreads routines, so link-based
+ # tests will erroneously succeed. (We need to link with -pthreads/-mt/
+ # -lpthread.) (The stubs are missing pthread_cleanup_push, or rather
+ # a function called by this macro, so we could check for that, but
+ # who knows whether they'll stub that too in a future libc.) So,
+ # we'll just look for -pthreads and -lpthread first:
+
+ acx_pthread_flags="-pthreads pthread -mt -pthread $acx_pthread_flags"
+ ;;
+esac
+
+if test x"$acx_pthread_ok" = xno; then
+for flag in $acx_pthread_flags; do
+
+ case $flag in
+ none)
+ AC_MSG_CHECKING([whether pthreads work without any flags])
+ ;;
+
+ -*)
+ AC_MSG_CHECKING([whether pthreads work with $flag])
+ PTHREAD_CFLAGS="$flag"
+ ;;
+
+ pthread-config)
+ AC_CHECK_PROG(acx_pthread_config, pthread-config, yes, no)
+ if test x"$acx_pthread_config" = xno; then continue; fi
+ PTHREAD_CFLAGS="`pthread-config --cflags`"
+ PTHREAD_LIBS="`pthread-config --ldflags` `pthread-config --libs`"
+ ;;
+
+ *)
+ AC_MSG_CHECKING([for the pthreads library -l$flag])
+ PTHREAD_LIBS="-l$flag"
+ ;;
+ esac
+
+ save_LIBS="$LIBS"
+ save_CFLAGS="$CFLAGS"
+ LIBS="$PTHREAD_LIBS $LIBS"
+ CFLAGS="$CFLAGS $PTHREAD_CFLAGS"
+
+ # Check for various functions. We must include pthread.h,
+ # since some functions may be macros. (On the Sequent, we
+ # need a special flag -Kthread to make this header compile.)
+ # We check for pthread_join because it is in -lpthread on IRIX
+ # while pthread_create is in libc. We check for pthread_attr_init
+ # due to DEC craziness with -lpthreads. We check for
+ # pthread_cleanup_push because it is one of the few pthread
+ # functions on Solaris that doesn't have a non-functional libc stub.
+ # We try pthread_create on general principles.
+ AC_TRY_LINK([#include <pthread.h>],
+ [pthread_t th; pthread_join(th, 0);
+ pthread_attr_init(0); pthread_cleanup_push(0, 0);
+ pthread_create(0,0,0,0); pthread_cleanup_pop(0); ],
+ [acx_pthread_ok=yes])
+
+ LIBS="$save_LIBS"
+ CFLAGS="$save_CFLAGS"
+
+ AC_MSG_RESULT($acx_pthread_ok)
+ if test "x$acx_pthread_ok" = xyes; then
+ break;
+ fi
+
+ PTHREAD_LIBS=""
+ PTHREAD_CFLAGS=""
+done
+fi
+
+# Various other checks:
+if test "x$acx_pthread_ok" = xyes; then
+ save_LIBS="$LIBS"
+ LIBS="$PTHREAD_LIBS $LIBS"
+ save_CFLAGS="$CFLAGS"
+ CFLAGS="$CFLAGS $PTHREAD_CFLAGS"
+
+ # Detect AIX lossage: JOINABLE attribute is called UNDETACHED.
+ AC_MSG_CHECKING([for joinable pthread attribute])
+ attr_name=unknown
+ for attr in PTHREAD_CREATE_JOINABLE PTHREAD_CREATE_UNDETACHED; do
+ AC_TRY_LINK([#include <pthread.h>], [int attr=$attr; return attr;],
+ [attr_name=$attr; break])
+ done
+ AC_MSG_RESULT($attr_name)
+ if test "$attr_name" != PTHREAD_CREATE_JOINABLE; then
+ AC_DEFINE_UNQUOTED(PTHREAD_CREATE_JOINABLE, $attr_name,
+ [Define to necessary symbol if this constant
+ uses a non-standard name on your system.])
+ fi
+
+ AC_MSG_CHECKING([if more special flags are required for pthreads])
+ flag=no
+ case "${host_cpu}-${host_os}" in
+ *-aix* | *-freebsd* | *-darwin*) flag="-D_THREAD_SAFE";;
+ *solaris* | *-osf* | *-hpux*) flag="-D_REENTRANT";;
+ esac
+ AC_MSG_RESULT(${flag})
+ if test "x$flag" != xno; then
+ PTHREAD_CFLAGS="$flag $PTHREAD_CFLAGS"
+ fi
+
+ LIBS="$save_LIBS"
+ CFLAGS="$save_CFLAGS"
+ # More AIX lossage: must compile with xlc_r or cc_r
+ if test x"$GCC" != xyes; then
+ AC_CHECK_PROGS(PTHREAD_CC, xlc_r cc_r, ${CC})
+ else
+ PTHREAD_CC=$CC
+ fi
+
+ # The next part tries to detect GCC inconsistency with -shared on some
+ # architectures and systems. The problem is that in certain
+ # configurations, when -shared is specified, GCC "forgets" to
+ # internally use various flags which are still necessary.
+
+ #
+ # Prepare the flags
+ #
+ save_CFLAGS="$CFLAGS"
+ save_LIBS="$LIBS"
+ save_CC="$CC"
+
+ # Try with the flags determined by the earlier checks.
+ #
+ # -Wl,-z,defs forces link-time symbol resolution, so that the
+ # linking checks with -shared actually have any value
+ #
+ # FIXME: -fPIC is required for -shared on many architectures,
+ # so we specify it here, but the right way would probably be to
+ # properly detect whether it is actually required.
+ CFLAGS="-shared -fPIC -Wl,-z,defs $CFLAGS $PTHREAD_CFLAGS"
+ LIBS="$PTHREAD_LIBS $LIBS"
+ CC="$PTHREAD_CC"
+
+ # In order not to create several levels of indentation, we test
+ # the value of "$done" until we find the cure or run out of ideas.
+ done="no"
+
+ # First, make sure the CFLAGS we added are actually accepted by our
+ # compiler. If not (and OS X's ld, for instance, does not accept -z),
+ # then we can't do this test.
+ if test x"$done" = xno; then
+ AC_MSG_CHECKING([whether to check for GCC pthread/shared inconsistencies])
+ AC_TRY_LINK(,, , [done=yes])
+
+ if test "x$done" = xyes ; then
+ AC_MSG_RESULT([no])
+ else
+ AC_MSG_RESULT([yes])
+ fi
+ fi
+
+ if test x"$done" = xno; then
+ AC_MSG_CHECKING([whether -pthread is sufficient with -shared])
+ AC_TRY_LINK([#include <pthread.h>],
+ [pthread_t th; pthread_join(th, 0);
+ pthread_attr_init(0); pthread_cleanup_push(0, 0);
+ pthread_create(0,0,0,0); pthread_cleanup_pop(0); ],
+ [done=yes])
+
+ if test "x$done" = xyes; then
+ AC_MSG_RESULT([yes])
+ else
+ AC_MSG_RESULT([no])
+ fi
+ fi
+
+ #
+ # Linux gcc on some architectures such as mips/mipsel forgets
+ # about -lpthread
+ #
+ if test x"$done" = xno; then
+ AC_MSG_CHECKING([whether -lpthread fixes that])
+ LIBS="-lpthread $PTHREAD_LIBS $save_LIBS"
+ AC_TRY_LINK([#include <pthread.h>],
+ [pthread_t th; pthread_join(th, 0);
+ pthread_attr_init(0); pthread_cleanup_push(0, 0);
+ pthread_create(0,0,0,0); pthread_cleanup_pop(0); ],
+ [done=yes])
+
+ if test "x$done" = xyes; then
+ AC_MSG_RESULT([yes])
+ PTHREAD_LIBS="-lpthread $PTHREAD_LIBS"
+ else
+ AC_MSG_RESULT([no])
+ fi
+ fi
+ #
+ # FreeBSD 4.10 gcc forgets to use -lc_r instead of -lc
+ #
+ if test x"$done" = xno; then
+ AC_MSG_CHECKING([whether -lc_r fixes that])
+ LIBS="-lc_r $PTHREAD_LIBS $save_LIBS"
+ AC_TRY_LINK([#include <pthread.h>],
+ [pthread_t th; pthread_join(th, 0);
+ pthread_attr_init(0); pthread_cleanup_push(0, 0);
+ pthread_create(0,0,0,0); pthread_cleanup_pop(0); ],
+ [done=yes])
+
+ if test "x$done" = xyes; then
+ AC_MSG_RESULT([yes])
+ PTHREAD_LIBS="-lc_r $PTHREAD_LIBS"
+ else
+ AC_MSG_RESULT([no])
+ fi
+ fi
+ if test x"$done" = xno; then
+ # OK, we have run out of ideas
+ AC_MSG_WARN([Impossible to determine how to use pthreads with shared libraries])
+
+ # so it's not safe to assume that we may use pthreads
+ acx_pthread_ok=no
+ fi
+
+ CFLAGS="$save_CFLAGS"
+ LIBS="$save_LIBS"
+ CC="$save_CC"
+else
+ PTHREAD_CC="$CC"
+fi
+
+AC_SUBST(PTHREAD_LIBS)
+AC_SUBST(PTHREAD_CFLAGS)
+AC_SUBST(PTHREAD_CC)
+
+# Finally, execute ACTION-IF-FOUND/ACTION-IF-NOT-FOUND:
+if test x"$acx_pthread_ok" = xyes; then
+ ifelse([$1],,AC_DEFINE(HAVE_PTHREAD,1,[Define if you have POSIX threads libraries and header files.]),[$1])
+ :
+else
+ acx_pthread_ok=no
+ $2
+fi
+AC_LANG_RESTORE
+])dnl ACX_PTHREAD
diff --git a/Source/ThirdParty/gtest/m4/gtest.m4 b/Source/ThirdParty/gtest/m4/gtest.m4
new file mode 100644
index 000000000..6598ba75a
--- /dev/null
+++ b/Source/ThirdParty/gtest/m4/gtest.m4
@@ -0,0 +1,74 @@
+dnl GTEST_LIB_CHECK([minimum version [,
+dnl action if found [,action if not found]]])
+dnl
+dnl Check for the presence of the Google Test library, optionally at a minimum
+dnl version, and indicate a viable version with the HAVE_GTEST flag. It defines
+dnl standard variables for substitution including GTEST_CPPFLAGS,
+dnl GTEST_CXXFLAGS, GTEST_LDFLAGS, and GTEST_LIBS. It also defines
+dnl GTEST_VERSION as the version of Google Test found. Finally, it provides
+dnl optional custom action slots in the event GTEST is found or not.
+AC_DEFUN([GTEST_LIB_CHECK],
+[
+dnl Provide a flag to enable or disable Google Test usage.
+AC_ARG_ENABLE([gtest],
+ [AS_HELP_STRING([--enable-gtest],
+ [Enable tests using the Google C++ Testing Framework.
+ (Default is enabled.)])],
+ [],
+ [enable_gtest=])
+AC_ARG_VAR([GTEST_CONFIG],
+ [The exact path of Google Test's 'gtest-config' script.])
+AC_ARG_VAR([GTEST_CPPFLAGS],
+ [C-like preprocessor flags for Google Test.])
+AC_ARG_VAR([GTEST_CXXFLAGS],
+ [C++ compile flags for Google Test.])
+AC_ARG_VAR([GTEST_LDFLAGS],
+ [Linker path and option flags for Google Test.])
+AC_ARG_VAR([GTEST_LIBS],
+ [Library linking flags for Google Test.])
+AC_ARG_VAR([GTEST_VERSION],
+ [The version of Google Test available.])
+HAVE_GTEST="no"
+AS_IF([test "x${enable_gtest}" != "xno"],
+ [AC_MSG_CHECKING([for 'gtest-config'])
+ AS_IF([test "x${enable_gtest}" != "xyes"],
+ [AS_IF([test -x "${enable_gtest}/scripts/gtest-config"],
+ [GTEST_CONFIG="${enable_gtest}/scripts/gtest-config"],
+ [GTEST_CONFIG="${enable_gtest}/bin/gtest-config"])
+ AS_IF([test -x "${GTEST_CONFIG}"], [],
+ [AC_MSG_RESULT([no])
+ AC_MSG_ERROR([dnl
+Unable to locate either a built or installed Google Test.
+The specific location '${enable_gtest}' was provided for a built or installed
+Google Test, but no 'gtest-config' script could be found at this location.])
+ ])],
+ [AC_PATH_PROG([GTEST_CONFIG], [gtest-config])])
+ AS_IF([test -x "${GTEST_CONFIG}"],
+ [AC_MSG_RESULT([${GTEST_CONFIG}])
+ m4_ifval([$1],
+ [_gtest_min_version="--min-version=$1"
+ AC_MSG_CHECKING([for Google Test at least version >= $1])],
+ [_gtest_min_version="--min-version=0"
+ AC_MSG_CHECKING([for Google Test])])
+ AS_IF([${GTEST_CONFIG} ${_gtest_min_version}],
+ [AC_MSG_RESULT([yes])
+ HAVE_GTEST='yes'],
+ [AC_MSG_RESULT([no])])],
+ [AC_MSG_RESULT([no])])
+ AS_IF([test "x${HAVE_GTEST}" = "xyes"],
+ [GTEST_CPPFLAGS=`${GTEST_CONFIG} --cppflags`
+ GTEST_CXXFLAGS=`${GTEST_CONFIG} --cxxflags`
+ GTEST_LDFLAGS=`${GTEST_CONFIG} --ldflags`
+ GTEST_LIBS=`${GTEST_CONFIG} --libs`
+ GTEST_VERSION=`${GTEST_CONFIG} --version`
+ AC_DEFINE([HAVE_GTEST],[1],[Defined when Google Test is available.])],
+ [AS_IF([test "x${enable_gtest}" = "xyes"],
+ [AC_MSG_ERROR([dnl
+Google Test was enabled, but no viable version could be found.])
+ ])])])
+AC_SUBST([HAVE_GTEST])
+AM_CONDITIONAL([HAVE_GTEST],[test "x$HAVE_GTEST" = "xyes"])
+AS_IF([test "x$HAVE_GTEST" = "xyes"],
+ [m4_ifval([$2], [$2])],
+ [m4_ifval([$3], [$3])])
+])
diff --git a/Source/ThirdParty/gtest/msvc/gtest-md.vcxproj.filters b/Source/ThirdParty/gtest/msvc/gtest-md.vcxproj.filters
new file mode 100644
index 000000000..037d1c9b4
--- /dev/null
+++ b/Source/ThirdParty/gtest/msvc/gtest-md.vcxproj.filters
@@ -0,0 +1,71 @@
+<?xml version="1.0" encoding="utf-8"?>
+<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
+ <ItemGroup>
+ <Filter Include="Source Files">
+ <UniqueIdentifier>{4FC737F1-C7A5-4376-A066-2A32D752A2FF}</UniqueIdentifier>
+ <Extensions>cpp;c;cxx;def;odl;idl;hpj;bat;asm;asmx</Extensions>
+ </Filter>
+ <Filter Include="Header Files">
+ <UniqueIdentifier>{93995380-89BD-4b04-88EB-625FBE52EBFB}</UniqueIdentifier>
+ <Extensions>h;hpp;hxx;hm;inl;inc;xsd</Extensions>
+ </Filter>
+ </ItemGroup>
+ <ItemGroup>
+ <ClCompile Include="..\src\gtest-death-test.cc">
+ <Filter>Source Files</Filter>
+ </ClCompile>
+ <ClCompile Include="..\src\gtest-filepath.cc">
+ <Filter>Source Files</Filter>
+ </ClCompile>
+ <ClCompile Include="..\src\gtest-port.cc">
+ <Filter>Source Files</Filter>
+ </ClCompile>
+ <ClCompile Include="..\src\gtest-test-part.cc">
+ <Filter>Source Files</Filter>
+ </ClCompile>
+ <ClCompile Include="..\src\gtest-typed-test.cc">
+ <Filter>Source Files</Filter>
+ </ClCompile>
+ <ClCompile Include="..\src\gtest.cc">
+ <Filter>Source Files</Filter>
+ </ClCompile>
+ </ItemGroup>
+ <ItemGroup>
+ <ClInclude Include="..\include\gtest\internal\gtest-death-test-internal.h">
+ <Filter>Header Files</Filter>
+ </ClInclude>
+ <ClInclude Include="..\include\gtest\gtest-death-test.h">
+ <Filter>Header Files</Filter>
+ </ClInclude>
+ <ClInclude Include="..\include\gtest\internal\gtest-filepath.h">
+ <Filter>Header Files</Filter>
+ </ClInclude>
+ <ClInclude Include="..\src\gtest-internal-inl.h">
+ <Filter>Header Files</Filter>
+ </ClInclude>
+ <ClInclude Include="..\include\gtest\internal\gtest-internal.h">
+ <Filter>Header Files</Filter>
+ </ClInclude>
+ <ClInclude Include="..\include\gtest\gtest-message.h">
+ <Filter>Header Files</Filter>
+ </ClInclude>
+ <ClInclude Include="..\include\gtest\internal\gtest-port.h">
+ <Filter>Header Files</Filter>
+ </ClInclude>
+ <ClInclude Include="..\include\gtest\gtest-spi.h">
+ <Filter>Header Files</Filter>
+ </ClInclude>
+ <ClInclude Include="..\include\gtest\internal\gtest-string.h">
+ <Filter>Header Files</Filter>
+ </ClInclude>
+ <ClInclude Include="..\include\gtest\gtest.h">
+ <Filter>Header Files</Filter>
+ </ClInclude>
+ <ClInclude Include="..\include\gtest\gtest_pred_impl.h">
+ <Filter>Header Files</Filter>
+ </ClInclude>
+ <ClInclude Include="..\include\gtest\gtest_prod.h">
+ <Filter>Header Files</Filter>
+ </ClInclude>
+ </ItemGroup>
+</Project> \ No newline at end of file
diff --git a/Source/ThirdParty/gtest/run_tests.py b/Source/ThirdParty/gtest/run_tests.py
new file mode 100755
index 000000000..e1084056e
--- /dev/null
+++ b/Source/ThirdParty/gtest/run_tests.py
@@ -0,0 +1,60 @@
+#!/usr/bin/env python
+#
+# Copyright 2008, Google Inc. All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""Runs the specified tests for Google Test.
+
+This script requires Python 2.3 or higher. To learn the usage, run it
+with -h.
+"""
+
+import os
+import sys
+
+SCRIPT_DIR = os.path.dirname(__file__) or '.'
+
+sys.path.append(os.path.join(SCRIPT_DIR, 'test'))
+import run_tests_util
+
+
+def _Main():
+ """Runs all tests for Google Test."""
+
+ options, args = run_tests_util.ParseArgs('gtest')
+ test_runner = run_tests_util.TestRunner(script_dir=SCRIPT_DIR)
+ tests = test_runner.GetTestsToRun(args,
+ options.configurations,
+ options.built_configurations)
+ if not tests:
+ sys.exit(1) # Incorrect parameters given, abort execution.
+
+ sys.exit(test_runner.RunTests(tests[0], tests[1]))
+
+if __name__ == '__main__':
+ _Main()
diff --git a/Source/ThirdParty/gtest/samples/prime_tables.h b/Source/ThirdParty/gtest/samples/prime_tables.h
new file mode 100644
index 000000000..92ce16a01
--- /dev/null
+++ b/Source/ThirdParty/gtest/samples/prime_tables.h
@@ -0,0 +1,123 @@
+// Copyright 2008 Google Inc.
+// All Rights Reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: wan@google.com (Zhanyong Wan)
+// Author: vladl@google.com (Vlad Losev)
+
+// This provides interface PrimeTable that determines whether a number is a
+// prime and determines a next prime number. This interface is used
+// in Google Test samples demonstrating use of parameterized tests.
+
+#ifndef GTEST_SAMPLES_PRIME_TABLES_H_
+#define GTEST_SAMPLES_PRIME_TABLES_H_
+
+#include <algorithm>
+
+// The prime table interface.
+class PrimeTable {
+ public:
+ virtual ~PrimeTable() {}
+
+ // Returns true iff n is a prime number.
+ virtual bool IsPrime(int n) const = 0;
+
+ // Returns the smallest prime number greater than p; or returns -1
+ // if the next prime is beyond the capacity of the table.
+ virtual int GetNextPrime(int p) const = 0;
+};
+
+// Implementation #1 calculates the primes on-the-fly.
+class OnTheFlyPrimeTable : public PrimeTable {
+ public:
+ virtual bool IsPrime(int n) const {
+ if (n <= 1) return false;
+
+ for (int i = 2; i*i <= n; i++) {
+ // n is divisible by an integer other than 1 and itself.
+ if ((n % i) == 0) return false;
+ }
+
+ return true;
+ }
+
+ virtual int GetNextPrime(int p) const {
+ for (int n = p + 1; n > 0; n++) {
+ if (IsPrime(n)) return n;
+ }
+
+ return -1;
+ }
+};
+
+// Implementation #2 pre-calculates the primes and stores the result
+// in an array.
+class PreCalculatedPrimeTable : public PrimeTable {
+ public:
+ // 'max' specifies the maximum number the prime table holds.
+ explicit PreCalculatedPrimeTable(int max)
+ : is_prime_size_(max + 1), is_prime_(new bool[max + 1]) {
+ CalculatePrimesUpTo(max);
+ }
+ virtual ~PreCalculatedPrimeTable() { delete[] is_prime_; }
+
+ virtual bool IsPrime(int n) const {
+ return 0 <= n && n < is_prime_size_ && is_prime_[n];
+ }
+
+ virtual int GetNextPrime(int p) const {
+ for (int n = p + 1; n < is_prime_size_; n++) {
+ if (is_prime_[n]) return n;
+ }
+
+ return -1;
+ }
+
+ private:
+ void CalculatePrimesUpTo(int max) {
+ ::std::fill(is_prime_, is_prime_ + is_prime_size_, true);
+ is_prime_[0] = is_prime_[1] = false;
+
+ for (int i = 2; i <= max; i++) {
+ if (!is_prime_[i]) continue;
+
+ // Marks all multiples of i (except i itself) as non-prime.
+ for (int j = 2*i; j <= max; j += i) {
+ is_prime_[j] = false;
+ }
+ }
+ }
+
+ const int is_prime_size_;
+ bool* const is_prime_;
+
+ // Disables compiler warning "assignment operator could not be generated."
+ void operator=(const PreCalculatedPrimeTable& rhs);
+};
+
+#endif // GTEST_SAMPLES_PRIME_TABLES_H_
diff --git a/Source/ThirdParty/gtest/samples/sample1.cc b/Source/ThirdParty/gtest/samples/sample1.cc
new file mode 100644
index 000000000..f171e2609
--- /dev/null
+++ b/Source/ThirdParty/gtest/samples/sample1.cc
@@ -0,0 +1,68 @@
+// Copyright 2005, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// A sample program demonstrating using Google C++ testing framework.
+//
+// Author: wan@google.com (Zhanyong Wan)
+
+#include "sample1.h"
+
+// Returns n! (the factorial of n). For negative n, n! is defined to be 1.
+int Factorial(int n) {
+ int result = 1;
+ for (int i = 1; i <= n; i++) {
+ result *= i;
+ }
+
+ return result;
+}
+
+// Returns true iff n is a prime number.
+bool IsPrime(int n) {
+ // Trivial case 1: small numbers
+ if (n <= 1) return false;
+
+ // Trivial case 2: even numbers
+ if (n % 2 == 0) return n == 2;
+
+ // Now, we have that n is odd and n >= 3.
+
+ // Try to divide n by every odd number i, starting from 3
+ for (int i = 3; ; i += 2) {
+ // We only have to try i up to the squre root of n
+ if (i > n/i) break;
+
+ // Now, we have i <= n/i < n.
+ // If n is divisible by i, n is not prime.
+ if (n % i == 0) return false;
+ }
+
+ // n has no integer factor in the range (1, n), and thus is prime.
+ return true;
+}
diff --git a/Source/ThirdParty/gtest/samples/sample1.h b/Source/ThirdParty/gtest/samples/sample1.h
new file mode 100644
index 000000000..3dfeb98c4
--- /dev/null
+++ b/Source/ThirdParty/gtest/samples/sample1.h
@@ -0,0 +1,43 @@
+// Copyright 2005, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// A sample program demonstrating using Google C++ testing framework.
+//
+// Author: wan@google.com (Zhanyong Wan)
+
+#ifndef GTEST_SAMPLES_SAMPLE1_H_
+#define GTEST_SAMPLES_SAMPLE1_H_
+
+// Returns n! (the factorial of n). For negative n, n! is defined to be 1.
+int Factorial(int n);
+
+// Returns true iff n is a prime number.
+bool IsPrime(int n);
+
+#endif // GTEST_SAMPLES_SAMPLE1_H_
diff --git a/Source/ThirdParty/gtest/samples/sample10_unittest.cc b/Source/ThirdParty/gtest/samples/sample10_unittest.cc
new file mode 100644
index 000000000..3ad6fd653
--- /dev/null
+++ b/Source/ThirdParty/gtest/samples/sample10_unittest.cc
@@ -0,0 +1,145 @@
+// Copyright 2009 Google Inc. All Rights Reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: vladl@google.com (Vlad Losev)
+
+// This sample shows how to use Google Test listener API to implement
+// a primitive leak checker.
+
+#include <stdio.h>
+#include <stdlib.h>
+
+#include <gtest/gtest.h>
+
+using ::testing::EmptyTestEventListener;
+using ::testing::InitGoogleTest;
+using ::testing::Test;
+using ::testing::TestCase;
+using ::testing::TestEventListeners;
+using ::testing::TestInfo;
+using ::testing::TestPartResult;
+using ::testing::UnitTest;
+
+namespace {
+
+// We will track memory used by this class.
+class Water {
+ public:
+ // Normal Water declarations go here.
+
+ // operator new and operator delete help us control water allocation.
+ void* operator new(size_t allocation_size) {
+ allocated_++;
+ return malloc(allocation_size);
+ }
+
+ void operator delete(void* block, size_t /* allocation_size */) {
+ allocated_--;
+ free(block);
+ }
+
+ static int allocated() { return allocated_; }
+
+ private:
+ static int allocated_;
+};
+
+int Water::allocated_ = 0;
+
+// This event listener monitors how many Water objects are created and
+// destroyed by each test, and reports a failure if a test leaks some Water
+// objects. It does this by comparing the number of live Water objects at
+// the beginning of a test and at the end of a test.
+class LeakChecker : public EmptyTestEventListener {
+ private:
+ // Called before a test starts.
+ virtual void OnTestStart(const TestInfo& /* test_info */) {
+ initially_allocated_ = Water::allocated();
+ }
+
+ // Called after a test ends.
+ virtual void OnTestEnd(const TestInfo& /* test_info */) {
+ int difference = Water::allocated() - initially_allocated_;
+
+ // You can generate a failure in any event handler except
+ // OnTestPartResult. Just use an appropriate Google Test assertion to do
+ // it.
+ EXPECT_TRUE(difference <= 0)
+ << "Leaked " << difference << " unit(s) of Water!";
+ }
+
+ int initially_allocated_;
+};
+
+TEST(ListenersTest, DoesNotLeak) {
+ Water* water = new Water;
+ delete water;
+}
+
+// This should fail when the --check_for_leaks command line flag is
+// specified.
+TEST(ListenersTest, LeaksWater) {
+ Water* water = new Water;
+ EXPECT_TRUE(water != NULL);
+}
+
+} // namespace
+
+int main(int argc, char **argv) {
+ InitGoogleTest(&argc, argv);
+
+ bool check_for_leaks = false;
+ if (argc > 1 && strcmp(argv[1], "--check_for_leaks") == 0 )
+ check_for_leaks = true;
+ else
+ printf("%s\n", "Run this program with --check_for_leaks to enable "
+ "custom leak checking in the tests.");
+
+ // If we are given the --check_for_leaks command line flag, installs the
+ // leak checker.
+ if (check_for_leaks) {
+ TestEventListeners& listeners = UnitTest::GetInstance()->listeners();
+
+ // Adds the leak checker to the end of the test event listener list,
+ // after the default text output printer and the default XML report
+ // generator.
+ //
+ // The order is important - it ensures that failures generated in the
+ // leak checker's OnTestEnd() method are processed by the text and XML
+ // printers *before* their OnTestEnd() methods are called, such that
+ // they are attributed to the right test. Remember that a listener
+ // receives an OnXyzStart event *after* listeners preceding it in the
+ // list received that event, and receives an OnXyzEnd event *before*
+ // listeners preceding it.
+ //
+ // We don't need to worry about deleting the new listener later, as
+ // Google Test will do it.
+ listeners.Append(new LeakChecker);
+ }
+ return RUN_ALL_TESTS();
+}
diff --git a/Source/ThirdParty/gtest/samples/sample1_unittest.cc b/Source/ThirdParty/gtest/samples/sample1_unittest.cc
new file mode 100644
index 000000000..01eb5462f
--- /dev/null
+++ b/Source/ThirdParty/gtest/samples/sample1_unittest.cc
@@ -0,0 +1,153 @@
+// Copyright 2005, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// A sample program demonstrating using Google C++ testing framework.
+//
+// Author: wan@google.com (Zhanyong Wan)
+
+
+// This sample shows how to write a simple unit test for a function,
+// using Google C++ testing framework.
+//
+// Writing a unit test using Google C++ testing framework is easy as 1-2-3:
+
+
+// Step 1. Include necessary header files such that the stuff your
+// test logic needs is declared.
+//
+// Don't forget gtest.h, which declares the testing framework.
+
+#include <limits.h>
+#include "sample1.h"
+#include <gtest/gtest.h>
+
+
+// Step 2. Use the TEST macro to define your tests.
+//
+// TEST has two parameters: the test case name and the test name.
+// After using the macro, you should define your test logic between a
+// pair of braces. You can use a bunch of macros to indicate the
+// success or failure of a test. EXPECT_TRUE and EXPECT_EQ are
+// examples of such macros. For a complete list, see gtest.h.
+//
+// <TechnicalDetails>
+//
+// In Google Test, tests are grouped into test cases. This is how we
+// keep test code organized. You should put logically related tests
+// into the same test case.
+//
+// The test case name and the test name should both be valid C++
+// identifiers. And you should not use underscore (_) in the names.
+//
+// Google Test guarantees that each test you define is run exactly
+// once, but it makes no guarantee on the order the tests are
+// executed. Therefore, you should write your tests in such a way
+// that their results don't depend on their order.
+//
+// </TechnicalDetails>
+
+
+// Tests Factorial().
+
+// Tests factorial of negative numbers.
+TEST(FactorialTest, Negative) {
+ // This test is named "Negative", and belongs to the "FactorialTest"
+ // test case.
+ EXPECT_EQ(1, Factorial(-5));
+ EXPECT_EQ(1, Factorial(-1));
+ EXPECT_TRUE(Factorial(-10) > 0);
+
+ // <TechnicalDetails>
+ //
+ // EXPECT_EQ(expected, actual) is the same as
+ //
+ // EXPECT_TRUE((expected) == (actual))
+ //
+ // except that it will print both the expected value and the actual
+ // value when the assertion fails. This is very helpful for
+ // debugging. Therefore in this case EXPECT_EQ is preferred.
+ //
+ // On the other hand, EXPECT_TRUE accepts any Boolean expression,
+ // and is thus more general.
+ //
+ // </TechnicalDetails>
+}
+
+// Tests factorial of 0.
+TEST(FactorialTest, Zero) {
+ EXPECT_EQ(1, Factorial(0));
+}
+
+// Tests factorial of positive numbers.
+TEST(FactorialTest, Positive) {
+ EXPECT_EQ(1, Factorial(1));
+ EXPECT_EQ(2, Factorial(2));
+ EXPECT_EQ(6, Factorial(3));
+ EXPECT_EQ(40320, Factorial(8));
+}
+
+
+// Tests IsPrime()
+
+// Tests negative input.
+TEST(IsPrimeTest, Negative) {
+ // This test belongs to the IsPrimeTest test case.
+
+ EXPECT_FALSE(IsPrime(-1));
+ EXPECT_FALSE(IsPrime(-2));
+ EXPECT_FALSE(IsPrime(INT_MIN));
+}
+
+// Tests some trivial cases.
+TEST(IsPrimeTest, Trivial) {
+ EXPECT_FALSE(IsPrime(0));
+ EXPECT_FALSE(IsPrime(1));
+ EXPECT_TRUE(IsPrime(2));
+ EXPECT_TRUE(IsPrime(3));
+}
+
+// Tests positive input.
+TEST(IsPrimeTest, Positive) {
+ EXPECT_FALSE(IsPrime(4));
+ EXPECT_TRUE(IsPrime(5));
+ EXPECT_FALSE(IsPrime(6));
+ EXPECT_TRUE(IsPrime(23));
+}
+
+// Step 3. Call RUN_ALL_TESTS() in main().
+//
+// We do this by linking in src/gtest_main.cc file, which consists of
+// a main() function which calls RUN_ALL_TESTS() for us.
+//
+// This runs all the tests you've defined, prints the result, and
+// returns 0 if successful, or 1 otherwise.
+//
+// Did you notice that we didn't register the tests? The
+// RUN_ALL_TESTS() macro magically knows about all the tests we
+// defined. Isn't this convenient?
diff --git a/Source/ThirdParty/gtest/samples/sample2.cc b/Source/ThirdParty/gtest/samples/sample2.cc
new file mode 100644
index 000000000..5f763b9bd
--- /dev/null
+++ b/Source/ThirdParty/gtest/samples/sample2.cc
@@ -0,0 +1,56 @@
+// Copyright 2005, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// A sample program demonstrating using Google C++ testing framework.
+//
+// Author: wan@google.com (Zhanyong Wan)
+
+#include "sample2.h"
+
+#include <string.h>
+
+// Clones a 0-terminated C string, allocating memory using new.
+const char* MyString::CloneCString(const char* a_c_string) {
+ if (a_c_string == NULL) return NULL;
+
+ const size_t len = strlen(a_c_string);
+ char* const clone = new char[ len + 1 ];
+ memcpy(clone, a_c_string, len + 1);
+
+ return clone;
+}
+
+// Sets the 0-terminated C string this MyString object
+// represents.
+void MyString::Set(const char* a_c_string) {
+ // Makes sure this works when c_string == c_string_
+ const char* const temp = MyString::CloneCString(a_c_string);
+ delete[] c_string_;
+ c_string_ = temp;
+}
diff --git a/Source/ThirdParty/gtest/samples/sample2.h b/Source/ThirdParty/gtest/samples/sample2.h
new file mode 100644
index 000000000..5b57e6082
--- /dev/null
+++ b/Source/ThirdParty/gtest/samples/sample2.h
@@ -0,0 +1,86 @@
+// Copyright 2005, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// A sample program demonstrating using Google C++ testing framework.
+//
+// Author: wan@google.com (Zhanyong Wan)
+
+#ifndef GTEST_SAMPLES_SAMPLE2_H_
+#define GTEST_SAMPLES_SAMPLE2_H_
+
+#include <string.h>
+
+
+// A simple string class.
+class MyString {
+ private:
+ const char* c_string_;
+ const MyString& operator=(const MyString& rhs);
+
+ public:
+
+ // Clones a 0-terminated C string, allocating memory using new.
+ static const char* CloneCString(const char* a_c_string);
+
+ ////////////////////////////////////////////////////////////
+ //
+ // C'tors
+
+ // The default c'tor constructs a NULL string.
+ MyString() : c_string_(NULL) {}
+
+ // Constructs a MyString by cloning a 0-terminated C string.
+ explicit MyString(const char* a_c_string) : c_string_(NULL) {
+ Set(a_c_string);
+ }
+
+ // Copy c'tor
+ MyString(const MyString& string) : c_string_(NULL) {
+ Set(string.c_string_);
+ }
+
+ ////////////////////////////////////////////////////////////
+ //
+ // D'tor. MyString is intended to be a final class, so the d'tor
+ // doesn't need to be virtual.
+ ~MyString() { delete[] c_string_; }
+
+ // Gets the 0-terminated C string this MyString object represents.
+ const char* c_string() const { return c_string_; }
+
+ size_t Length() const {
+ return c_string_ == NULL ? 0 : strlen(c_string_);
+ }
+
+ // Sets the 0-terminated C string this MyString object represents.
+ void Set(const char* c_string);
+};
+
+
+#endif // GTEST_SAMPLES_SAMPLE2_H_
diff --git a/Source/ThirdParty/gtest/samples/sample2_unittest.cc b/Source/ThirdParty/gtest/samples/sample2_unittest.cc
new file mode 100644
index 000000000..32232d98c
--- /dev/null
+++ b/Source/ThirdParty/gtest/samples/sample2_unittest.cc
@@ -0,0 +1,109 @@
+// Copyright 2005, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// A sample program demonstrating using Google C++ testing framework.
+//
+// Author: wan@google.com (Zhanyong Wan)
+
+
+// This sample shows how to write a more complex unit test for a class
+// that has multiple member functions.
+//
+// Usually, it's a good idea to have one test for each method in your
+// class. You don't have to do that exactly, but it helps to keep
+// your tests organized. You may also throw in additional tests as
+// needed.
+
+#include "sample2.h"
+#include <gtest/gtest.h>
+
+// In this example, we test the MyString class (a simple string).
+
+// Tests the default c'tor.
+TEST(MyString, DefaultConstructor) {
+ const MyString s;
+
+ // Asserts that s.c_string() returns NULL.
+ //
+ // <TechnicalDetails>
+ //
+ // If we write NULL instead of
+ //
+ // static_cast<const char *>(NULL)
+ //
+ // in this assertion, it will generate a warning on gcc 3.4. The
+ // reason is that EXPECT_EQ needs to know the types of its
+ // arguments in order to print them when it fails. Since NULL is
+ // #defined as 0, the compiler will use the formatter function for
+ // int to print it. However, gcc thinks that NULL should be used as
+ // a pointer, not an int, and therefore complains.
+ //
+ // The root of the problem is C++'s lack of distinction between the
+ // integer number 0 and the null pointer constant. Unfortunately,
+ // we have to live with this fact.
+ //
+ // </TechnicalDetails>
+ EXPECT_STREQ(NULL, s.c_string());
+
+ EXPECT_EQ(0u, s.Length());
+}
+
+const char kHelloString[] = "Hello, world!";
+
+// Tests the c'tor that accepts a C string.
+TEST(MyString, ConstructorFromCString) {
+ const MyString s(kHelloString);
+ EXPECT_TRUE(strcmp(s.c_string(), kHelloString) == 0);
+ EXPECT_EQ(sizeof(kHelloString)/sizeof(kHelloString[0]) - 1,
+ s.Length());
+}
+
+// Tests the copy c'tor.
+TEST(MyString, CopyConstructor) {
+ const MyString s1(kHelloString);
+ const MyString s2 = s1;
+ EXPECT_TRUE(strcmp(s2.c_string(), kHelloString) == 0);
+}
+
+// Tests the Set method.
+TEST(MyString, Set) {
+ MyString s;
+
+ s.Set(kHelloString);
+ EXPECT_TRUE(strcmp(s.c_string(), kHelloString) == 0);
+
+ // Set should work when the input pointer is the same as the one
+ // already in the MyString object.
+ s.Set(s.c_string());
+ EXPECT_TRUE(strcmp(s.c_string(), kHelloString) == 0);
+
+ // Can we set the MyString to NULL?
+ s.Set(NULL);
+ EXPECT_STREQ(NULL, s.c_string());
+}
diff --git a/Source/ThirdParty/gtest/samples/sample3-inl.h b/Source/ThirdParty/gtest/samples/sample3-inl.h
new file mode 100644
index 000000000..46369a076
--- /dev/null
+++ b/Source/ThirdParty/gtest/samples/sample3-inl.h
@@ -0,0 +1,173 @@
+// Copyright 2005, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// A sample program demonstrating using Google C++ testing framework.
+//
+// Author: wan@google.com (Zhanyong Wan)
+
+#ifndef GTEST_SAMPLES_SAMPLE3_INL_H_
+#define GTEST_SAMPLES_SAMPLE3_INL_H_
+
+#include <stddef.h>
+
+
+// Queue is a simple queue implemented as a singled-linked list.
+//
+// The element type must support copy constructor.
+template <typename E> // E is the element type
+class Queue;
+
+// QueueNode is a node in a Queue, which consists of an element of
+// type E and a pointer to the next node.
+template <typename E> // E is the element type
+class QueueNode {
+ friend class Queue<E>;
+
+ public:
+ // Gets the element in this node.
+ const E& element() const { return element_; }
+
+ // Gets the next node in the queue.
+ QueueNode* next() { return next_; }
+ const QueueNode* next() const { return next_; }
+
+ private:
+ // Creates a node with a given element value. The next pointer is
+ // set to NULL.
+ QueueNode(const E& an_element) : element_(an_element), next_(NULL) {}
+
+ // We disable the default assignment operator and copy c'tor.
+ const QueueNode& operator = (const QueueNode&);
+ QueueNode(const QueueNode&);
+
+ E element_;
+ QueueNode* next_;
+};
+
+template <typename E> // E is the element type.
+class Queue {
+public:
+
+ // Creates an empty queue.
+ Queue() : head_(NULL), last_(NULL), size_(0) {}
+
+ // D'tor. Clears the queue.
+ ~Queue() { Clear(); }
+
+ // Clears the queue.
+ void Clear() {
+ if (size_ > 0) {
+ // 1. Deletes every node.
+ QueueNode<E>* node = head_;
+ QueueNode<E>* next = node->next();
+ for (; ;) {
+ delete node;
+ node = next;
+ if (node == NULL) break;
+ next = node->next();
+ }
+
+ // 2. Resets the member variables.
+ head_ = last_ = NULL;
+ size_ = 0;
+ }
+ }
+
+ // Gets the number of elements.
+ size_t Size() const { return size_; }
+
+ // Gets the first element of the queue, or NULL if the queue is empty.
+ QueueNode<E>* Head() { return head_; }
+ const QueueNode<E>* Head() const { return head_; }
+
+ // Gets the last element of the queue, or NULL if the queue is empty.
+ QueueNode<E>* Last() { return last_; }
+ const QueueNode<E>* Last() const { return last_; }
+
+ // Adds an element to the end of the queue. A copy of the element is
+ // created using the copy constructor, and then stored in the queue.
+ // Changes made to the element in the queue doesn't affect the source
+ // object, and vice versa.
+ void Enqueue(const E& element) {
+ QueueNode<E>* new_node = new QueueNode<E>(element);
+
+ if (size_ == 0) {
+ head_ = last_ = new_node;
+ size_ = 1;
+ } else {
+ last_->next_ = new_node;
+ last_ = new_node;
+ size_++;
+ }
+ }
+
+ // Removes the head of the queue and returns it. Returns NULL if
+ // the queue is empty.
+ E* Dequeue() {
+ if (size_ == 0) {
+ return NULL;
+ }
+
+ const QueueNode<E>* const old_head = head_;
+ head_ = head_->next_;
+ size_--;
+ if (size_ == 0) {
+ last_ = NULL;
+ }
+
+ E* element = new E(old_head->element());
+ delete old_head;
+
+ return element;
+ }
+
+ // Applies a function/functor on each element of the queue, and
+ // returns the result in a new queue. The original queue is not
+ // affected.
+ template <typename F>
+ Queue* Map(F function) const {
+ Queue* new_queue = new Queue();
+ for (const QueueNode<E>* node = head_; node != NULL; node = node->next_) {
+ new_queue->Enqueue(function(node->element()));
+ }
+
+ return new_queue;
+ }
+
+ private:
+ QueueNode<E>* head_; // The first node of the queue.
+ QueueNode<E>* last_; // The last node of the queue.
+ size_t size_; // The number of elements in the queue.
+
+ // We disallow copying a queue.
+ Queue(const Queue&);
+ const Queue& operator = (const Queue&);
+ };
+
+#endif // GTEST_SAMPLES_SAMPLE3_INL_H_
diff --git a/Source/ThirdParty/gtest/samples/sample3_unittest.cc b/Source/ThirdParty/gtest/samples/sample3_unittest.cc
new file mode 100644
index 000000000..34c1ca865
--- /dev/null
+++ b/Source/ThirdParty/gtest/samples/sample3_unittest.cc
@@ -0,0 +1,151 @@
+// Copyright 2005, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// A sample program demonstrating using Google C++ testing framework.
+//
+// Author: wan@google.com (Zhanyong Wan)
+
+
+// In this example, we use a more advanced feature of Google Test called
+// test fixture.
+//
+// A test fixture is a place to hold objects and functions shared by
+// all tests in a test case. Using a test fixture avoids duplicating
+// the test code necessary to initialize and cleanup those common
+// objects for each test. It is also useful for defining sub-routines
+// that your tests need to invoke a lot.
+//
+// <TechnicalDetails>
+//
+// The tests share the test fixture in the sense of code sharing, not
+// data sharing. Each test is given its own fresh copy of the
+// fixture. You cannot expect the data modified by one test to be
+// passed on to another test, which is a bad idea.
+//
+// The reason for this design is that tests should be independent and
+// repeatable. In particular, a test should not fail as the result of
+// another test's failure. If one test depends on info produced by
+// another test, then the two tests should really be one big test.
+//
+// The macros for indicating the success/failure of a test
+// (EXPECT_TRUE, FAIL, etc) need to know what the current test is
+// (when Google Test prints the test result, it tells you which test
+// each failure belongs to). Technically, these macros invoke a
+// member function of the Test class. Therefore, you cannot use them
+// in a global function. That's why you should put test sub-routines
+// in a test fixture.
+//
+// </TechnicalDetails>
+
+#include "sample3-inl.h"
+#include <gtest/gtest.h>
+
+// To use a test fixture, derive a class from testing::Test.
+class QueueTest : public testing::Test {
+ protected: // You should make the members protected s.t. they can be
+ // accessed from sub-classes.
+
+ // virtual void SetUp() will be called before each test is run. You
+ // should define it if you need to initialize the varaibles.
+ // Otherwise, this can be skipped.
+ virtual void SetUp() {
+ q1_.Enqueue(1);
+ q2_.Enqueue(2);
+ q2_.Enqueue(3);
+ }
+
+ // virtual void TearDown() will be called after each test is run.
+ // You should define it if there is cleanup work to do. Otherwise,
+ // you don't have to provide it.
+ //
+ // virtual void TearDown() {
+ // }
+
+ // A helper function that some test uses.
+ static int Double(int n) {
+ return 2*n;
+ }
+
+ // A helper function for testing Queue::Map().
+ void MapTester(const Queue<int> * q) {
+ // Creates a new queue, where each element is twice as big as the
+ // corresponding one in q.
+ const Queue<int> * const new_q = q->Map(Double);
+
+ // Verifies that the new queue has the same size as q.
+ ASSERT_EQ(q->Size(), new_q->Size());
+
+ // Verifies the relationship between the elements of the two queues.
+ for ( const QueueNode<int> * n1 = q->Head(), * n2 = new_q->Head();
+ n1 != NULL; n1 = n1->next(), n2 = n2->next() ) {
+ EXPECT_EQ(2 * n1->element(), n2->element());
+ }
+
+ delete new_q;
+ }
+
+ // Declares the variables your tests want to use.
+ Queue<int> q0_;
+ Queue<int> q1_;
+ Queue<int> q2_;
+};
+
+// When you have a test fixture, you define a test using TEST_F
+// instead of TEST.
+
+// Tests the default c'tor.
+TEST_F(QueueTest, DefaultConstructor) {
+ // You can access data in the test fixture here.
+ EXPECT_EQ(0u, q0_.Size());
+}
+
+// Tests Dequeue().
+TEST_F(QueueTest, Dequeue) {
+ int * n = q0_.Dequeue();
+ EXPECT_TRUE(n == NULL);
+
+ n = q1_.Dequeue();
+ ASSERT_TRUE(n != NULL);
+ EXPECT_EQ(1, *n);
+ EXPECT_EQ(0u, q1_.Size());
+ delete n;
+
+ n = q2_.Dequeue();
+ ASSERT_TRUE(n != NULL);
+ EXPECT_EQ(2, *n);
+ EXPECT_EQ(1u, q2_.Size());
+ delete n;
+}
+
+// Tests the Queue::Map() function.
+TEST_F(QueueTest, Map) {
+ MapTester(&q0_);
+ MapTester(&q1_);
+ MapTester(&q2_);
+}
diff --git a/Source/ThirdParty/gtest/samples/sample4.cc b/Source/ThirdParty/gtest/samples/sample4.cc
new file mode 100644
index 000000000..ae44bda6f
--- /dev/null
+++ b/Source/ThirdParty/gtest/samples/sample4.cc
@@ -0,0 +1,46 @@
+// Copyright 2005, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// A sample program demonstrating using Google C++ testing framework.
+//
+// Author: wan@google.com (Zhanyong Wan)
+
+#include <stdio.h>
+
+#include "sample4.h"
+
+// Returns the current counter value, and increments it.
+int Counter::Increment() {
+ return counter_++;
+}
+
+// Prints the current counter value to STDOUT.
+void Counter::Print() const {
+ printf("%d", counter_);
+}
diff --git a/Source/ThirdParty/gtest/samples/sample4.h b/Source/ThirdParty/gtest/samples/sample4.h
new file mode 100644
index 000000000..cd60f0dd2
--- /dev/null
+++ b/Source/ThirdParty/gtest/samples/sample4.h
@@ -0,0 +1,53 @@
+// Copyright 2005, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// A sample program demonstrating using Google C++ testing framework.
+//
+// Author: wan@google.com (Zhanyong Wan)
+
+#ifndef GTEST_SAMPLES_SAMPLE4_H_
+#define GTEST_SAMPLES_SAMPLE4_H_
+
+// A simple monotonic counter.
+class Counter {
+ private:
+ int counter_;
+
+ public:
+ // Creates a counter that starts at 0.
+ Counter() : counter_(0) {}
+
+ // Returns the current counter value, and increments it.
+ int Increment();
+
+ // Prints the current counter value to STDOUT.
+ void Print() const;
+};
+
+#endif // GTEST_SAMPLES_SAMPLE4_H_
diff --git a/Source/ThirdParty/gtest/samples/sample4_unittest.cc b/Source/ThirdParty/gtest/samples/sample4_unittest.cc
new file mode 100644
index 000000000..b4fb3736a
--- /dev/null
+++ b/Source/ThirdParty/gtest/samples/sample4_unittest.cc
@@ -0,0 +1,45 @@
+// Copyright 2005, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: wan@google.com (Zhanyong Wan)
+
+#include <gtest/gtest.h>
+#include "sample4.h"
+
+// Tests the Increment() method.
+TEST(Counter, Increment) {
+ Counter c;
+
+ // EXPECT_EQ() evaluates its arguments exactly once, so they
+ // can have side effects.
+
+ EXPECT_EQ(0, c.Increment());
+ EXPECT_EQ(1, c.Increment());
+ EXPECT_EQ(2, c.Increment());
+}
diff --git a/Source/ThirdParty/gtest/samples/sample5_unittest.cc b/Source/ThirdParty/gtest/samples/sample5_unittest.cc
new file mode 100644
index 000000000..49dae7c69
--- /dev/null
+++ b/Source/ThirdParty/gtest/samples/sample5_unittest.cc
@@ -0,0 +1,199 @@
+// Copyright 2005, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: wan@google.com (Zhanyong Wan)
+
+// This sample teaches how to reuse a test fixture in multiple test
+// cases by deriving sub-fixtures from it.
+//
+// When you define a test fixture, you specify the name of the test
+// case that will use this fixture. Therefore, a test fixture can
+// be used by only one test case.
+//
+// Sometimes, more than one test cases may want to use the same or
+// slightly different test fixtures. For example, you may want to
+// make sure that all tests for a GUI library don't leak important
+// system resources like fonts and brushes. In Google Test, you do
+// this by putting the shared logic in a super (as in "super class")
+// test fixture, and then have each test case use a fixture derived
+// from this super fixture.
+
+#include <limits.h>
+#include <time.h>
+#include "sample3-inl.h"
+#include <gtest/gtest.h>
+#include "sample1.h"
+
+// In this sample, we want to ensure that every test finishes within
+// ~5 seconds. If a test takes longer to run, we consider it a
+// failure.
+//
+// We put the code for timing a test in a test fixture called
+// "QuickTest". QuickTest is intended to be the super fixture that
+// other fixtures derive from, therefore there is no test case with
+// the name "QuickTest". This is OK.
+//
+// Later, we will derive multiple test fixtures from QuickTest.
+class QuickTest : public testing::Test {
+ protected:
+ // Remember that SetUp() is run immediately before a test starts.
+ // This is a good place to record the start time.
+ virtual void SetUp() {
+ start_time_ = time(NULL);
+ }
+
+ // TearDown() is invoked immediately after a test finishes. Here we
+ // check if the test was too slow.
+ virtual void TearDown() {
+ // Gets the time when the test finishes
+ const time_t end_time = time(NULL);
+
+ // Asserts that the test took no more than ~5 seconds. Did you
+ // know that you can use assertions in SetUp() and TearDown() as
+ // well?
+ EXPECT_TRUE(end_time - start_time_ <= 5) << "The test took too long.";
+ }
+
+ // The UTC time (in seconds) when the test starts
+ time_t start_time_;
+};
+
+
+// We derive a fixture named IntegerFunctionTest from the QuickTest
+// fixture. All tests using this fixture will be automatically
+// required to be quick.
+class IntegerFunctionTest : public QuickTest {
+ // We don't need any more logic than already in the QuickTest fixture.
+ // Therefore the body is empty.
+};
+
+
+// Now we can write tests in the IntegerFunctionTest test case.
+
+// Tests Factorial()
+TEST_F(IntegerFunctionTest, Factorial) {
+ // Tests factorial of negative numbers.
+ EXPECT_EQ(1, Factorial(-5));
+ EXPECT_EQ(1, Factorial(-1));
+ EXPECT_TRUE(Factorial(-10) > 0);
+
+ // Tests factorial of 0.
+ EXPECT_EQ(1, Factorial(0));
+
+ // Tests factorial of positive numbers.
+ EXPECT_EQ(1, Factorial(1));
+ EXPECT_EQ(2, Factorial(2));
+ EXPECT_EQ(6, Factorial(3));
+ EXPECT_EQ(40320, Factorial(8));
+}
+
+
+// Tests IsPrime()
+TEST_F(IntegerFunctionTest, IsPrime) {
+ // Tests negative input.
+ EXPECT_TRUE(!IsPrime(-1));
+ EXPECT_TRUE(!IsPrime(-2));
+ EXPECT_TRUE(!IsPrime(INT_MIN));
+
+ // Tests some trivial cases.
+ EXPECT_TRUE(!IsPrime(0));
+ EXPECT_TRUE(!IsPrime(1));
+ EXPECT_TRUE(IsPrime(2));
+ EXPECT_TRUE(IsPrime(3));
+
+ // Tests positive input.
+ EXPECT_TRUE(!IsPrime(4));
+ EXPECT_TRUE(IsPrime(5));
+ EXPECT_TRUE(!IsPrime(6));
+ EXPECT_TRUE(IsPrime(23));
+}
+
+
+// The next test case (named "QueueTest") also needs to be quick, so
+// we derive another fixture from QuickTest.
+//
+// The QueueTest test fixture has some logic and shared objects in
+// addition to what's in QuickTest already. We define the additional
+// stuff inside the body of the test fixture, as usual.
+class QueueTest : public QuickTest {
+ protected:
+ virtual void SetUp() {
+ // First, we need to set up the super fixture (QuickTest).
+ QuickTest::SetUp();
+
+ // Second, some additional setup for this fixture.
+ q1_.Enqueue(1);
+ q2_.Enqueue(2);
+ q2_.Enqueue(3);
+ }
+
+ // By default, TearDown() inherits the behavior of
+ // QuickTest::TearDown(). As we have no additional cleaning work
+ // for QueueTest, we omit it here.
+ //
+ // virtual void TearDown() {
+ // QuickTest::TearDown();
+ // }
+
+ Queue<int> q0_;
+ Queue<int> q1_;
+ Queue<int> q2_;
+};
+
+
+// Now, let's write tests using the QueueTest fixture.
+
+// Tests the default constructor.
+TEST_F(QueueTest, DefaultConstructor) {
+ EXPECT_EQ(0u, q0_.Size());
+}
+
+// Tests Dequeue().
+TEST_F(QueueTest, Dequeue) {
+ int* n = q0_.Dequeue();
+ EXPECT_TRUE(n == NULL);
+
+ n = q1_.Dequeue();
+ EXPECT_TRUE(n != NULL);
+ EXPECT_EQ(1, *n);
+ EXPECT_EQ(0u, q1_.Size());
+ delete n;
+
+ n = q2_.Dequeue();
+ EXPECT_TRUE(n != NULL);
+ EXPECT_EQ(2, *n);
+ EXPECT_EQ(1u, q2_.Size());
+ delete n;
+}
+
+// If necessary, you can derive further test fixtures from a derived
+// fixture itself. For example, you can derive another fixture from
+// QueueTest. Google Test imposes no limit on how deep the hierarchy
+// can be. In practice, however, you probably don't want it to be too
+// deep as to be confusing.
diff --git a/Source/ThirdParty/gtest/samples/sample6_unittest.cc b/Source/ThirdParty/gtest/samples/sample6_unittest.cc
new file mode 100644
index 000000000..dd0df31f0
--- /dev/null
+++ b/Source/ThirdParty/gtest/samples/sample6_unittest.cc
@@ -0,0 +1,224 @@
+// Copyright 2008 Google Inc.
+// All Rights Reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: wan@google.com (Zhanyong Wan)
+
+// This sample shows how to test common properties of multiple
+// implementations of the same interface (aka interface tests).
+
+// The interface and its implementations are in this header.
+#include "prime_tables.h"
+
+#include <gtest/gtest.h>
+
+// First, we define some factory functions for creating instances of
+// the implementations. You may be able to skip this step if all your
+// implementations can be constructed the same way.
+
+template <class T>
+PrimeTable* CreatePrimeTable();
+
+template <>
+PrimeTable* CreatePrimeTable<OnTheFlyPrimeTable>() {
+ return new OnTheFlyPrimeTable;
+}
+
+template <>
+PrimeTable* CreatePrimeTable<PreCalculatedPrimeTable>() {
+ return new PreCalculatedPrimeTable(10000);
+}
+
+// Then we define a test fixture class template.
+template <class T>
+class PrimeTableTest : public testing::Test {
+ protected:
+ // The ctor calls the factory function to create a prime table
+ // implemented by T.
+ PrimeTableTest() : table_(CreatePrimeTable<T>()) {}
+
+ virtual ~PrimeTableTest() { delete table_; }
+
+ // Note that we test an implementation via the base interface
+ // instead of the actual implementation class. This is important
+ // for keeping the tests close to the real world scenario, where the
+ // implementation is invoked via the base interface. It avoids
+ // got-yas where the implementation class has a method that shadows
+ // a method with the same name (but slightly different argument
+ // types) in the base interface, for example.
+ PrimeTable* const table_;
+};
+
+#if GTEST_HAS_TYPED_TEST
+
+using testing::Types;
+
+// Google Test offers two ways for reusing tests for different types.
+// The first is called "typed tests". You should use it if you
+// already know *all* the types you are gonna exercise when you write
+// the tests.
+
+// To write a typed test case, first use
+//
+// TYPED_TEST_CASE(TestCaseName, TypeList);
+//
+// to declare it and specify the type parameters. As with TEST_F,
+// TestCaseName must match the test fixture name.
+
+// The list of types we want to test.
+typedef Types<OnTheFlyPrimeTable, PreCalculatedPrimeTable> Implementations;
+
+TYPED_TEST_CASE(PrimeTableTest, Implementations);
+
+// Then use TYPED_TEST(TestCaseName, TestName) to define a typed test,
+// similar to TEST_F.
+TYPED_TEST(PrimeTableTest, ReturnsFalseForNonPrimes) {
+ // Inside the test body, you can refer to the type parameter by
+ // TypeParam, and refer to the fixture class by TestFixture. We
+ // don't need them in this example.
+
+ // Since we are in the template world, C++ requires explicitly
+ // writing 'this->' when referring to members of the fixture class.
+ // This is something you have to learn to live with.
+ EXPECT_FALSE(this->table_->IsPrime(-5));
+ EXPECT_FALSE(this->table_->IsPrime(0));
+ EXPECT_FALSE(this->table_->IsPrime(1));
+ EXPECT_FALSE(this->table_->IsPrime(4));
+ EXPECT_FALSE(this->table_->IsPrime(6));
+ EXPECT_FALSE(this->table_->IsPrime(100));
+}
+
+TYPED_TEST(PrimeTableTest, ReturnsTrueForPrimes) {
+ EXPECT_TRUE(this->table_->IsPrime(2));
+ EXPECT_TRUE(this->table_->IsPrime(3));
+ EXPECT_TRUE(this->table_->IsPrime(5));
+ EXPECT_TRUE(this->table_->IsPrime(7));
+ EXPECT_TRUE(this->table_->IsPrime(11));
+ EXPECT_TRUE(this->table_->IsPrime(131));
+}
+
+TYPED_TEST(PrimeTableTest, CanGetNextPrime) {
+ EXPECT_EQ(2, this->table_->GetNextPrime(0));
+ EXPECT_EQ(3, this->table_->GetNextPrime(2));
+ EXPECT_EQ(5, this->table_->GetNextPrime(3));
+ EXPECT_EQ(7, this->table_->GetNextPrime(5));
+ EXPECT_EQ(11, this->table_->GetNextPrime(7));
+ EXPECT_EQ(131, this->table_->GetNextPrime(128));
+}
+
+// That's it! Google Test will repeat each TYPED_TEST for each type
+// in the type list specified in TYPED_TEST_CASE. Sit back and be
+// happy that you don't have to define them multiple times.
+
+#endif // GTEST_HAS_TYPED_TEST
+
+#if GTEST_HAS_TYPED_TEST_P
+
+using testing::Types;
+
+// Sometimes, however, you don't yet know all the types that you want
+// to test when you write the tests. For example, if you are the
+// author of an interface and expect other people to implement it, you
+// might want to write a set of tests to make sure each implementation
+// conforms to some basic requirements, but you don't know what
+// implementations will be written in the future.
+//
+// How can you write the tests without committing to the type
+// parameters? That's what "type-parameterized tests" can do for you.
+// It is a bit more involved than typed tests, but in return you get a
+// test pattern that can be reused in many contexts, which is a big
+// win. Here's how you do it:
+
+// First, define a test fixture class template. Here we just reuse
+// the PrimeTableTest fixture defined earlier:
+
+template <class T>
+class PrimeTableTest2 : public PrimeTableTest<T> {
+};
+
+// Then, declare the test case. The argument is the name of the test
+// fixture, and also the name of the test case (as usual). The _P
+// suffix is for "parameterized" or "pattern".
+TYPED_TEST_CASE_P(PrimeTableTest2);
+
+// Next, use TYPED_TEST_P(TestCaseName, TestName) to define a test,
+// similar to what you do with TEST_F.
+TYPED_TEST_P(PrimeTableTest2, ReturnsFalseForNonPrimes) {
+ EXPECT_FALSE(this->table_->IsPrime(-5));
+ EXPECT_FALSE(this->table_->IsPrime(0));
+ EXPECT_FALSE(this->table_->IsPrime(1));
+ EXPECT_FALSE(this->table_->IsPrime(4));
+ EXPECT_FALSE(this->table_->IsPrime(6));
+ EXPECT_FALSE(this->table_->IsPrime(100));
+}
+
+TYPED_TEST_P(PrimeTableTest2, ReturnsTrueForPrimes) {
+ EXPECT_TRUE(this->table_->IsPrime(2));
+ EXPECT_TRUE(this->table_->IsPrime(3));
+ EXPECT_TRUE(this->table_->IsPrime(5));
+ EXPECT_TRUE(this->table_->IsPrime(7));
+ EXPECT_TRUE(this->table_->IsPrime(11));
+ EXPECT_TRUE(this->table_->IsPrime(131));
+}
+
+TYPED_TEST_P(PrimeTableTest2, CanGetNextPrime) {
+ EXPECT_EQ(2, this->table_->GetNextPrime(0));
+ EXPECT_EQ(3, this->table_->GetNextPrime(2));
+ EXPECT_EQ(5, this->table_->GetNextPrime(3));
+ EXPECT_EQ(7, this->table_->GetNextPrime(5));
+ EXPECT_EQ(11, this->table_->GetNextPrime(7));
+ EXPECT_EQ(131, this->table_->GetNextPrime(128));
+}
+
+// Type-parameterized tests involve one extra step: you have to
+// enumerate the tests you defined:
+REGISTER_TYPED_TEST_CASE_P(
+ PrimeTableTest2, // The first argument is the test case name.
+ // The rest of the arguments are the test names.
+ ReturnsFalseForNonPrimes, ReturnsTrueForPrimes, CanGetNextPrime);
+
+// At this point the test pattern is done. However, you don't have
+// any real test yet as you haven't said which types you want to run
+// the tests with.
+
+// To turn the abstract test pattern into real tests, you instantiate
+// it with a list of types. Usually the test pattern will be defined
+// in a .h file, and anyone can #include and instantiate it. You can
+// even instantiate it more than once in the same program. To tell
+// different instances apart, you give each of them a name, which will
+// become part of the test case name and can be used in test filters.
+
+// The list of types we want to test. Note that it doesn't have to be
+// defined at the time we write the TYPED_TEST_P()s.
+typedef Types<OnTheFlyPrimeTable, PreCalculatedPrimeTable>
+ PrimeTableImplementations;
+INSTANTIATE_TYPED_TEST_CASE_P(OnTheFlyAndPreCalculated, // Instance name
+ PrimeTableTest2, // Test case name
+ PrimeTableImplementations); // Type list
+
+#endif // GTEST_HAS_TYPED_TEST_P
diff --git a/Source/ThirdParty/gtest/samples/sample7_unittest.cc b/Source/ThirdParty/gtest/samples/sample7_unittest.cc
new file mode 100644
index 000000000..f4552827e
--- /dev/null
+++ b/Source/ThirdParty/gtest/samples/sample7_unittest.cc
@@ -0,0 +1,132 @@
+// Copyright 2008 Google Inc.
+// All Rights Reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: vladl@google.com (Vlad Losev)
+
+// This sample shows how to test common properties of multiple
+// implementations of an interface (aka interface tests) using
+// value-parameterized tests. Each test in the test case has
+// a parameter that is an interface pointer to an implementation
+// tested.
+
+// The interface and its implementations are in this header.
+#include "prime_tables.h"
+
+#include <gtest/gtest.h>
+
+#if GTEST_HAS_PARAM_TEST
+
+using ::testing::TestWithParam;
+using ::testing::Values;
+
+// As a general rule, tested objects should not be reused between tests.
+// Also, their constructors and destructors of tested objects can have
+// side effects. Thus you should create and destroy them for each test.
+// In this sample we will define a simple factory function for PrimeTable
+// objects. We will instantiate objects in test's SetUp() method and
+// delete them in TearDown() method.
+typedef PrimeTable* CreatePrimeTableFunc();
+
+PrimeTable* CreateOnTheFlyPrimeTable() {
+ return new OnTheFlyPrimeTable();
+}
+
+template <size_t max_precalculated>
+PrimeTable* CreatePreCalculatedPrimeTable() {
+ return new PreCalculatedPrimeTable(max_precalculated);
+}
+
+// Inside the test body, fixture constructor, SetUp(), and TearDown()
+// you can refer to the test parameter by GetParam().
+// In this case, the test parameter is a PrimeTableFactory interface pointer
+// which we use in fixture's SetUp() to create and store an instance of
+// PrimeTable.
+class PrimeTableTest : public TestWithParam<CreatePrimeTableFunc*> {
+ public:
+ virtual ~PrimeTableTest() { delete table_; }
+ virtual void SetUp() { table_ = (*GetParam())(); }
+ virtual void TearDown() {
+ delete table_;
+ table_ = NULL;
+ }
+
+ protected:
+ PrimeTable* table_;
+};
+
+TEST_P(PrimeTableTest, ReturnsFalseForNonPrimes) {
+ EXPECT_FALSE(table_->IsPrime(-5));
+ EXPECT_FALSE(table_->IsPrime(0));
+ EXPECT_FALSE(table_->IsPrime(1));
+ EXPECT_FALSE(table_->IsPrime(4));
+ EXPECT_FALSE(table_->IsPrime(6));
+ EXPECT_FALSE(table_->IsPrime(100));
+}
+
+TEST_P(PrimeTableTest, ReturnsTrueForPrimes) {
+ EXPECT_TRUE(table_->IsPrime(2));
+ EXPECT_TRUE(table_->IsPrime(3));
+ EXPECT_TRUE(table_->IsPrime(5));
+ EXPECT_TRUE(table_->IsPrime(7));
+ EXPECT_TRUE(table_->IsPrime(11));
+ EXPECT_TRUE(table_->IsPrime(131));
+}
+
+TEST_P(PrimeTableTest, CanGetNextPrime) {
+ EXPECT_EQ(2, table_->GetNextPrime(0));
+ EXPECT_EQ(3, table_->GetNextPrime(2));
+ EXPECT_EQ(5, table_->GetNextPrime(3));
+ EXPECT_EQ(7, table_->GetNextPrime(5));
+ EXPECT_EQ(11, table_->GetNextPrime(7));
+ EXPECT_EQ(131, table_->GetNextPrime(128));
+}
+
+// In order to run value-parameterized tests, you need to instantiate them,
+// or bind them to a list of values which will be used as test parameters.
+// You can instantiate them in a different translation module, or even
+// instantiate them several times.
+//
+// Here, we instantiate our tests with a list of two PrimeTable object
+// factory functions:
+INSTANTIATE_TEST_CASE_P(
+ OnTheFlyAndPreCalculated,
+ PrimeTableTest,
+ Values(&CreateOnTheFlyPrimeTable, &CreatePreCalculatedPrimeTable<1000>));
+
+#else
+
+// Google Test may not support value-parameterized tests with some
+// compilers. If we use conditional compilation to compile out all
+// code referring to the gtest_main library, MSVC linker will not link
+// that library at all and consequently complain about missing entry
+// point defined in that library (fatal error LNK1561: entry point
+// must be defined). This dummy test keeps gtest_main linked in.
+TEST(DummyTest, ValueParameterizedTestsAreNotSupportedOnThisPlatform) {}
+
+#endif // GTEST_HAS_PARAM_TEST
diff --git a/Source/ThirdParty/gtest/samples/sample8_unittest.cc b/Source/ThirdParty/gtest/samples/sample8_unittest.cc
new file mode 100644
index 000000000..ccf61d92b
--- /dev/null
+++ b/Source/ThirdParty/gtest/samples/sample8_unittest.cc
@@ -0,0 +1,173 @@
+// Copyright 2008 Google Inc.
+// All Rights Reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: vladl@google.com (Vlad Losev)
+
+// This sample shows how to test code relying on some global flag variables.
+// Combine() helps with generating all possible combinations of such flags,
+// and each test is given one combination as a parameter.
+
+// Use class definitions to test from this header.
+#include "prime_tables.h"
+
+#include <gtest/gtest.h>
+
+#if GTEST_HAS_COMBINE
+
+// Suppose we want to introduce a new, improved implementation of PrimeTable
+// which combines speed of PrecalcPrimeTable and versatility of
+// OnTheFlyPrimeTable (see prime_tables.h). Inside it instantiates both
+// PrecalcPrimeTable and OnTheFlyPrimeTable and uses the one that is more
+// appropriate under the circumstances. But in low memory conditions, it can be
+// told to instantiate without PrecalcPrimeTable instance at all and use only
+// OnTheFlyPrimeTable.
+class HybridPrimeTable : public PrimeTable {
+ public:
+ HybridPrimeTable(bool force_on_the_fly, int max_precalculated)
+ : on_the_fly_impl_(new OnTheFlyPrimeTable),
+ precalc_impl_(force_on_the_fly ? NULL :
+ new PreCalculatedPrimeTable(max_precalculated)),
+ max_precalculated_(max_precalculated) {}
+ virtual ~HybridPrimeTable() {
+ delete on_the_fly_impl_;
+ delete precalc_impl_;
+ }
+
+ virtual bool IsPrime(int n) const {
+ if (precalc_impl_ != NULL && n < max_precalculated_)
+ return precalc_impl_->IsPrime(n);
+ else
+ return on_the_fly_impl_->IsPrime(n);
+ }
+
+ virtual int GetNextPrime(int p) const {
+ int next_prime = -1;
+ if (precalc_impl_ != NULL && p < max_precalculated_)
+ next_prime = precalc_impl_->GetNextPrime(p);
+
+ return next_prime != -1 ? next_prime : on_the_fly_impl_->GetNextPrime(p);
+ }
+
+ private:
+ OnTheFlyPrimeTable* on_the_fly_impl_;
+ PreCalculatedPrimeTable* precalc_impl_;
+ int max_precalculated_;
+};
+
+using ::testing::TestWithParam;
+using ::testing::Bool;
+using ::testing::Values;
+using ::testing::Combine;
+
+// To test all code paths for HybridPrimeTable we must test it with numbers
+// both within and outside PreCalculatedPrimeTable's capacity and also with
+// PreCalculatedPrimeTable disabled. We do this by defining fixture which will
+// accept different combinations of parameters for instantiating a
+// HybridPrimeTable instance.
+class PrimeTableTest : public TestWithParam< ::std::tr1::tuple<bool, int> > {
+ protected:
+ virtual void SetUp() {
+ // This can be written as
+ //
+ // bool force_on_the_fly;
+ // int max_precalculated;
+ // tie(force_on_the_fly, max_precalculated) = GetParam();
+ //
+ // once the Google C++ Style Guide allows use of ::std::tr1::tie.
+ //
+ bool force_on_the_fly = ::std::tr1::get<0>(GetParam());
+ int max_precalculated = ::std::tr1::get<1>(GetParam());
+ table_ = new HybridPrimeTable(force_on_the_fly, max_precalculated);
+ }
+ virtual void TearDown() {
+ delete table_;
+ table_ = NULL;
+ }
+ HybridPrimeTable* table_;
+};
+
+TEST_P(PrimeTableTest, ReturnsFalseForNonPrimes) {
+ // Inside the test body, you can refer to the test parameter by GetParam().
+ // In this case, the test parameter is a PrimeTable interface pointer which
+ // we can use directly.
+ // Please note that you can also save it in the fixture's SetUp() method
+ // or constructor and use saved copy in the tests.
+
+ EXPECT_FALSE(table_->IsPrime(-5));
+ EXPECT_FALSE(table_->IsPrime(0));
+ EXPECT_FALSE(table_->IsPrime(1));
+ EXPECT_FALSE(table_->IsPrime(4));
+ EXPECT_FALSE(table_->IsPrime(6));
+ EXPECT_FALSE(table_->IsPrime(100));
+}
+
+TEST_P(PrimeTableTest, ReturnsTrueForPrimes) {
+ EXPECT_TRUE(table_->IsPrime(2));
+ EXPECT_TRUE(table_->IsPrime(3));
+ EXPECT_TRUE(table_->IsPrime(5));
+ EXPECT_TRUE(table_->IsPrime(7));
+ EXPECT_TRUE(table_->IsPrime(11));
+ EXPECT_TRUE(table_->IsPrime(131));
+}
+
+TEST_P(PrimeTableTest, CanGetNextPrime) {
+ EXPECT_EQ(2, table_->GetNextPrime(0));
+ EXPECT_EQ(3, table_->GetNextPrime(2));
+ EXPECT_EQ(5, table_->GetNextPrime(3));
+ EXPECT_EQ(7, table_->GetNextPrime(5));
+ EXPECT_EQ(11, table_->GetNextPrime(7));
+ EXPECT_EQ(131, table_->GetNextPrime(128));
+}
+
+// In order to run value-parameterized tests, you need to instantiate them,
+// or bind them to a list of values which will be used as test parameters.
+// You can instantiate them in a different translation module, or even
+// instantiate them several times.
+//
+// Here, we instantiate our tests with a list of parameters. We must combine
+// all variations of the boolean flag suppressing PrecalcPrimeTable and some
+// meaningful values for tests. We choose a small value (1), and a value that
+// will put some of the tested numbers beyond the capability of the
+// PrecalcPrimeTable instance and some inside it (10). Combine will produce all
+// possible combinations.
+INSTANTIATE_TEST_CASE_P(MeaningfulTestParameters,
+ PrimeTableTest,
+ Combine(Bool(), Values(1, 10)));
+
+#else
+
+// Google Test may not support Combine() with some compilers. If we
+// use conditional compilation to compile out all code referring to
+// the gtest_main library, MSVC linker will not link that library at
+// all and consequently complain about missing entry point defined in
+// that library (fatal error LNK1561: entry point must be
+// defined). This dummy test keeps gtest_main linked in.
+TEST(DummyTest, CombineIsNotSupportedOnThisPlatform) {}
+
+#endif // GTEST_HAS_COMBINE
diff --git a/Source/ThirdParty/gtest/samples/sample9_unittest.cc b/Source/ThirdParty/gtest/samples/sample9_unittest.cc
new file mode 100644
index 000000000..d828ef4d8
--- /dev/null
+++ b/Source/ThirdParty/gtest/samples/sample9_unittest.cc
@@ -0,0 +1,160 @@
+// Copyright 2009 Google Inc. All Rights Reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: vladl@google.com (Vlad Losev)
+
+// This sample shows how to use Google Test listener API to implement
+// an alternative console output and how to use the UnitTest reflection API
+// to enumerate test cases and tests and to inspect their results.
+
+#include <stdio.h>
+
+#include <gtest/gtest.h>
+
+using ::testing::EmptyTestEventListener;
+using ::testing::InitGoogleTest;
+using ::testing::Test;
+using ::testing::TestCase;
+using ::testing::TestEventListeners;
+using ::testing::TestInfo;
+using ::testing::TestPartResult;
+using ::testing::UnitTest;
+
+namespace {
+
+// Provides alternative output mode which produces minimal amount of
+// information about tests.
+class TersePrinter : public EmptyTestEventListener {
+ private:
+ // Called before any test activity starts.
+ virtual void OnTestProgramStart(const UnitTest& /* unit_test */) {}
+
+ // Called after all test activities have ended.
+ virtual void OnTestProgramEnd(const UnitTest& unit_test) {
+ fprintf(stdout, "TEST %s\n", unit_test.Passed() ? "PASSED" : "FAILED");
+ fflush(stdout);
+ }
+
+ // Called before a test starts.
+ virtual void OnTestStart(const TestInfo& test_info) {
+ fprintf(stdout,
+ "*** Test %s.%s starting.\n",
+ test_info.test_case_name(),
+ test_info.name());
+ fflush(stdout);
+ }
+
+ // Called after a failed assertion or a SUCCESS().
+ virtual void OnTestPartResult(const TestPartResult& test_part_result) {
+ fprintf(stdout,
+ "%s in %s:%d\n%s\n",
+ test_part_result.failed() ? "*** Failure" : "Success",
+ test_part_result.file_name(),
+ test_part_result.line_number(),
+ test_part_result.summary());
+ fflush(stdout);
+ }
+
+ // Called after a test ends.
+ virtual void OnTestEnd(const TestInfo& test_info) {
+ fprintf(stdout,
+ "*** Test %s.%s ending.\n",
+ test_info.test_case_name(),
+ test_info.name());
+ fflush(stdout);
+ }
+}; // class TersePrinter
+
+TEST(CustomOutputTest, PrintsMessage) {
+ printf("Printing something from the test body...\n");
+}
+
+TEST(CustomOutputTest, Succeeds) {
+ SUCCEED() << "SUCCEED() has been invoked from here";
+}
+
+TEST(CustomOutputTest, Fails) {
+ EXPECT_EQ(1, 2)
+ << "This test fails in order to demonstrate alternative failure messages";
+}
+
+} // namespace
+
+int main(int argc, char **argv) {
+ InitGoogleTest(&argc, argv);
+
+ bool terse_output = false;
+ if (argc > 1 && strcmp(argv[1], "--terse_output") == 0 )
+ terse_output = true;
+ else
+ printf("%s\n", "Run this program with --terse_output to change the way "
+ "it prints its output.");
+
+ UnitTest& unit_test = *UnitTest::GetInstance();
+
+ // If we are given the --terse_output command line flag, suppresses the
+ // standard output and attaches own result printer.
+ if (terse_output) {
+ TestEventListeners& listeners = unit_test.listeners();
+
+ // Removes the default console output listener from the list so it will
+ // not receive events from Google Test and won't print any output. Since
+ // this operation transfers ownership of the listener to the caller we
+ // have to delete it as well.
+ delete listeners.Release(listeners.default_result_printer());
+
+ // Adds the custom output listener to the list. It will now receive
+ // events from Google Test and print the alternative output. We don't
+ // have to worry about deleting it since Google Test assumes ownership
+ // over it after adding it to the list.
+ listeners.Append(new TersePrinter);
+ }
+ int ret_val = RUN_ALL_TESTS();
+
+ // This is an example of using the UnitTest reflection API to inspect test
+ // results. Here we discount failures from the tests we expected to fail.
+ int unexpectedly_failed_tests = 0;
+ for (int i = 0; i < unit_test.total_test_case_count(); ++i) {
+ const TestCase& test_case = *unit_test.GetTestCase(i);
+ for (int j = 0; j < test_case.total_test_count(); ++j) {
+ const TestInfo& test_info = *test_case.GetTestInfo(j);
+ // Counts failed tests that were not meant to fail (those without
+ // 'Fails' in the name).
+ if (test_info.result()->Failed() &&
+ strcmp(test_info.name(), "Fails") != 0) {
+ unexpectedly_failed_tests++;
+ }
+ }
+ }
+
+ // Test that were meant to fail should not affect the test program outcome.
+ if (unexpectedly_failed_tests == 0)
+ ret_val = 0;
+
+ return ret_val;
+}
diff --git a/Source/ThirdParty/gtest/scripts/fuse_gtest_files.py b/Source/ThirdParty/gtest/scripts/fuse_gtest_files.py
new file mode 100755
index 000000000..148444ca7
--- /dev/null
+++ b/Source/ThirdParty/gtest/scripts/fuse_gtest_files.py
@@ -0,0 +1,250 @@
+#!/usr/bin/env python
+#
+# Copyright 2009, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""fuse_gtest_files.py v0.2.0
+Fuses Google Test source code into a .h file and a .cc file.
+
+SYNOPSIS
+ fuse_gtest_files.py [GTEST_ROOT_DIR] OUTPUT_DIR
+
+ Scans GTEST_ROOT_DIR for Google Test source code, and generates
+ two files: OUTPUT_DIR/gtest/gtest.h and OUTPUT_DIR/gtest/gtest-all.cc.
+ Then you can build your tests by adding OUTPUT_DIR to the include
+ search path and linking with OUTPUT_DIR/gtest/gtest-all.cc. These
+ two files contain everything you need to use Google Test. Hence
+ you can "install" Google Test by copying them to wherever you want.
+
+ GTEST_ROOT_DIR can be omitted and defaults to the parent
+ directory of the directory holding this script.
+
+EXAMPLES
+ ./fuse_gtest_files.py fused_gtest
+ ./fuse_gtest_files.py path/to/unpacked/gtest fused_gtest
+
+This tool is experimental. In particular, it assumes that there is no
+conditional inclusion of Google Test headers. Please report any
+problems to googletestframework@googlegroups.com. You can read
+http://code.google.com/p/googletest/wiki/GoogleTestAdvancedGuide for
+more information.
+"""
+
+__author__ = 'wan@google.com (Zhanyong Wan)'
+
+import os
+import re
+import sets
+import sys
+
+# We assume that this file is in the scripts/ directory in the Google
+# Test root directory.
+DEFAULT_GTEST_ROOT_DIR = os.path.join(os.path.dirname(__file__), '..')
+
+# Regex for matching '#include <gtest/...>'.
+INCLUDE_GTEST_FILE_REGEX = re.compile(r'^\s*#\s*include\s*<(gtest/.+)>')
+
+# Regex for matching '#include "src/..."'.
+INCLUDE_SRC_FILE_REGEX = re.compile(r'^\s*#\s*include\s*"(src/.+)"')
+
+# Where to find the source seed files.
+GTEST_H_SEED = 'include/gtest/gtest.h'
+GTEST_SPI_H_SEED = 'include/gtest/gtest-spi.h'
+GTEST_ALL_CC_SEED = 'src/gtest-all.cc'
+
+# Where to put the generated files.
+GTEST_H_OUTPUT = 'gtest/gtest.h'
+GTEST_ALL_CC_OUTPUT = 'gtest/gtest-all.cc'
+
+
+def VerifyFileExists(directory, relative_path):
+ """Verifies that the given file exists; aborts on failure.
+
+ relative_path is the file path relative to the given directory.
+ """
+
+ if not os.path.isfile(os.path.join(directory, relative_path)):
+ print 'ERROR: Cannot find %s in directory %s.' % (relative_path,
+ directory)
+ print ('Please either specify a valid project root directory '
+ 'or omit it on the command line.')
+ sys.exit(1)
+
+
+def ValidateGTestRootDir(gtest_root):
+ """Makes sure gtest_root points to a valid gtest root directory.
+
+ The function aborts the program on failure.
+ """
+
+ VerifyFileExists(gtest_root, GTEST_H_SEED)
+ VerifyFileExists(gtest_root, GTEST_ALL_CC_SEED)
+
+
+def VerifyOutputFile(output_dir, relative_path):
+ """Verifies that the given output file path is valid.
+
+ relative_path is relative to the output_dir directory.
+ """
+
+ # Makes sure the output file either doesn't exist or can be overwritten.
+ output_file = os.path.join(output_dir, relative_path)
+ if os.path.exists(output_file):
+ # TODO(wan@google.com): The following user-interaction doesn't
+ # work with automated processes. We should provide a way for the
+ # Makefile to force overwriting the files.
+ print ('%s already exists in directory %s - overwrite it? (y/N) ' %
+ (relative_path, output_dir))
+ answer = sys.stdin.readline().strip()
+ if answer not in ['y', 'Y']:
+ print 'ABORTED.'
+ sys.exit(1)
+
+ # Makes sure the directory holding the output file exists; creates
+ # it and all its ancestors if necessary.
+ parent_directory = os.path.dirname(output_file)
+ if not os.path.isdir(parent_directory):
+ os.makedirs(parent_directory)
+
+
+def ValidateOutputDir(output_dir):
+ """Makes sure output_dir points to a valid output directory.
+
+ The function aborts the program on failure.
+ """
+
+ VerifyOutputFile(output_dir, GTEST_H_OUTPUT)
+ VerifyOutputFile(output_dir, GTEST_ALL_CC_OUTPUT)
+
+
+def FuseGTestH(gtest_root, output_dir):
+ """Scans folder gtest_root to generate gtest/gtest.h in output_dir."""
+
+ output_file = file(os.path.join(output_dir, GTEST_H_OUTPUT), 'w')
+ processed_files = sets.Set() # Holds all gtest headers we've processed.
+
+ def ProcessFile(gtest_header_path):
+ """Processes the given gtest header file."""
+
+ # We don't process the same header twice.
+ if gtest_header_path in processed_files:
+ return
+
+ processed_files.add(gtest_header_path)
+
+ # Reads each line in the given gtest header.
+ for line in file(os.path.join(gtest_root, gtest_header_path), 'r'):
+ m = INCLUDE_GTEST_FILE_REGEX.match(line)
+ if m:
+ # It's '#include <gtest/...>' - let's process it recursively.
+ ProcessFile('include/' + m.group(1))
+ else:
+ # Otherwise we copy the line unchanged to the output file.
+ output_file.write(line)
+
+ ProcessFile(GTEST_H_SEED)
+ output_file.close()
+
+
+def FuseGTestAllCcToFile(gtest_root, output_file):
+ """Scans folder gtest_root to generate gtest/gtest-all.cc in output_file."""
+
+ processed_files = sets.Set()
+
+ def ProcessFile(gtest_source_file):
+ """Processes the given gtest source file."""
+
+ # We don't process the same #included file twice.
+ if gtest_source_file in processed_files:
+ return
+
+ processed_files.add(gtest_source_file)
+
+ # Reads each line in the given gtest source file.
+ for line in file(os.path.join(gtest_root, gtest_source_file), 'r'):
+ m = INCLUDE_GTEST_FILE_REGEX.match(line)
+ if m:
+ if 'include/' + m.group(1) == GTEST_SPI_H_SEED:
+ # It's '#include <gtest/gtest-spi.h>'. This file is not
+ # #included by <gtest/gtest.h>, so we need to process it.
+ ProcessFile(GTEST_SPI_H_SEED)
+ else:
+ # It's '#include <gtest/foo.h>' where foo is not gtest-spi.
+ # We treat it as '#include <gtest/gtest.h>', as all other
+ # gtest headers are being fused into gtest.h and cannot be
+ # #included directly.
+
+ # There is no need to #include <gtest/gtest.h> more than once.
+ if not GTEST_H_SEED in processed_files:
+ processed_files.add(GTEST_H_SEED)
+ output_file.write('#include <%s>\n' % (GTEST_H_OUTPUT,))
+ else:
+ m = INCLUDE_SRC_FILE_REGEX.match(line)
+ if m:
+ # It's '#include "src/foo"' - let's process it recursively.
+ ProcessFile(m.group(1))
+ else:
+ output_file.write(line)
+
+ ProcessFile(GTEST_ALL_CC_SEED)
+
+
+def FuseGTestAllCc(gtest_root, output_dir):
+ """Scans folder gtest_root to generate gtest/gtest-all.cc in output_dir."""
+
+ output_file = file(os.path.join(output_dir, GTEST_ALL_CC_OUTPUT), 'w')
+ FuseGTestAllCcToFile(gtest_root, output_file)
+ output_file.close()
+
+
+def FuseGTest(gtest_root, output_dir):
+ """Fuses gtest.h and gtest-all.cc."""
+
+ ValidateGTestRootDir(gtest_root)
+ ValidateOutputDir(output_dir)
+
+ FuseGTestH(gtest_root, output_dir)
+ FuseGTestAllCc(gtest_root, output_dir)
+
+
+def main():
+ argc = len(sys.argv)
+ if argc == 2:
+ # fuse_gtest_files.py OUTPUT_DIR
+ FuseGTest(DEFAULT_GTEST_ROOT_DIR, sys.argv[1])
+ elif argc == 3:
+ # fuse_gtest_files.py GTEST_ROOT_DIR OUTPUT_DIR
+ FuseGTest(sys.argv[1], sys.argv[2])
+ else:
+ print __doc__
+ sys.exit(1)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/Source/ThirdParty/gtest/scripts/gen_gtest_pred_impl.py b/Source/ThirdParty/gtest/scripts/gen_gtest_pred_impl.py
new file mode 100755
index 000000000..8307134ad
--- /dev/null
+++ b/Source/ThirdParty/gtest/scripts/gen_gtest_pred_impl.py
@@ -0,0 +1,733 @@
+#!/usr/bin/env python
+#
+# Copyright 2006, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""gen_gtest_pred_impl.py v0.1
+
+Generates the implementation of Google Test predicate assertions and
+accompanying tests.
+
+Usage:
+
+ gen_gtest_pred_impl.py MAX_ARITY
+
+where MAX_ARITY is a positive integer.
+
+The command generates the implementation of up-to MAX_ARITY-ary
+predicate assertions, and writes it to file gtest_pred_impl.h in the
+directory where the script is. It also generates the accompanying
+unit test in file gtest_pred_impl_unittest.cc.
+"""
+
+__author__ = 'wan@google.com (Zhanyong Wan)'
+
+import os
+import sys
+import time
+
+# Where this script is.
+SCRIPT_DIR = os.path.dirname(sys.argv[0])
+
+# Where to store the generated header.
+HEADER = os.path.join(SCRIPT_DIR, '../include/gtest/gtest_pred_impl.h')
+
+# Where to store the generated unit test.
+UNIT_TEST = os.path.join(SCRIPT_DIR, '../test/gtest_pred_impl_unittest.cc')
+
+
+def HeaderPreamble(n):
+ """Returns the preamble for the header file.
+
+ Args:
+ n: the maximum arity of the predicate macros to be generated.
+ """
+
+ # A map that defines the values used in the preamble template.
+ DEFS = {
+ 'today' : time.strftime('%m/%d/%Y'),
+ 'year' : time.strftime('%Y'),
+ 'command' : '%s %s' % (os.path.basename(sys.argv[0]), n),
+ 'n' : n
+ }
+
+ return (
+"""// Copyright 2006, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// This file is AUTOMATICALLY GENERATED on %(today)s by command
+// '%(command)s'. DO NOT EDIT BY HAND!
+//
+// Implements a family of generic predicate assertion macros.
+
+#ifndef GTEST_INCLUDE_GTEST_GTEST_PRED_IMPL_H_
+#define GTEST_INCLUDE_GTEST_GTEST_PRED_IMPL_H_
+
+// Makes sure this header is not included before gtest.h.
+#ifndef GTEST_INCLUDE_GTEST_GTEST_H_
+#error Do not include gtest_pred_impl.h directly. Include gtest.h instead.
+#endif // GTEST_INCLUDE_GTEST_GTEST_H_
+
+// This header implements a family of generic predicate assertion
+// macros:
+//
+// ASSERT_PRED_FORMAT1(pred_format, v1)
+// ASSERT_PRED_FORMAT2(pred_format, v1, v2)
+// ...
+//
+// where pred_format is a function or functor that takes n (in the
+// case of ASSERT_PRED_FORMATn) values and their source expression
+// text, and returns a testing::AssertionResult. See the definition
+// of ASSERT_EQ in gtest.h for an example.
+//
+// If you don't care about formatting, you can use the more
+// restrictive version:
+//
+// ASSERT_PRED1(pred, v1)
+// ASSERT_PRED2(pred, v1, v2)
+// ...
+//
+// where pred is an n-ary function or functor that returns bool,
+// and the values v1, v2, ..., must support the << operator for
+// streaming to std::ostream.
+//
+// We also define the EXPECT_* variations.
+//
+// For now we only support predicates whose arity is at most %(n)s.
+// Please email googletestframework@googlegroups.com if you need
+// support for higher arities.
+
+// GTEST_ASSERT_ is the basic statement to which all of the assertions
+// in this file reduce. Don't use this in your code.
+
+#define GTEST_ASSERT_(expression, on_failure) \\
+ GTEST_AMBIGUOUS_ELSE_BLOCKER_ \\
+ if (const ::testing::AssertionResult gtest_ar = (expression)) \\
+ ; \\
+ else \\
+ on_failure(gtest_ar.failure_message())
+""" % DEFS)
+
+
+def Arity(n):
+ """Returns the English name of the given arity."""
+
+ if n < 0:
+ return None
+ elif n <= 3:
+ return ['nullary', 'unary', 'binary', 'ternary'][n]
+ else:
+ return '%s-ary' % n
+
+
+def Title(word):
+ """Returns the given word in title case. The difference between
+ this and string's title() method is that Title('4-ary') is '4-ary'
+ while '4-ary'.title() is '4-Ary'."""
+
+ return word[0].upper() + word[1:]
+
+
+def OneTo(n):
+ """Returns the list [1, 2, 3, ..., n]."""
+
+ return range(1, n + 1)
+
+
+def Iter(n, format, sep=''):
+ """Given a positive integer n, a format string that contains 0 or
+ more '%s' format specs, and optionally a separator string, returns
+ the join of n strings, each formatted with the format string on an
+ iterator ranged from 1 to n.
+
+ Example:
+
+ Iter(3, 'v%s', sep=', ') returns 'v1, v2, v3'.
+ """
+
+ # How many '%s' specs are in format?
+ spec_count = len(format.split('%s')) - 1
+ return sep.join([format % (spec_count * (i,)) for i in OneTo(n)])
+
+
+def ImplementationForArity(n):
+ """Returns the implementation of n-ary predicate assertions."""
+
+ # A map the defines the values used in the implementation template.
+ DEFS = {
+ 'n' : str(n),
+ 'vs' : Iter(n, 'v%s', sep=', '),
+ 'vts' : Iter(n, '#v%s', sep=', '),
+ 'arity' : Arity(n),
+ 'Arity' : Title(Arity(n))
+ }
+
+ impl = """
+
+// Helper function for implementing {EXPECT|ASSERT}_PRED%(n)s. Don't use
+// this in your code.
+template <typename Pred""" % DEFS
+
+ impl += Iter(n, """,
+ typename T%s""")
+
+ impl += """>
+AssertionResult AssertPred%(n)sHelper(const char* pred_text""" % DEFS
+
+ impl += Iter(n, """,
+ const char* e%s""")
+
+ impl += """,
+ Pred pred"""
+
+ impl += Iter(n, """,
+ const T%s& v%s""")
+
+ impl += """) {
+ if (pred(%(vs)s)) return AssertionSuccess();
+
+ Message msg;
+""" % DEFS
+
+ impl += ' msg << pred_text << "("'
+
+ impl += Iter(n, """
+ << e%s""", sep=' << ", "')
+
+ impl += ' << ") evaluates to false, where"'
+
+ impl += Iter(n, """
+ << "\\n" << e%s << " evaluates to " << v%s""")
+
+ impl += """;
+ return AssertionFailure(msg);
+}
+
+// Internal macro for implementing {EXPECT|ASSERT}_PRED_FORMAT%(n)s.
+// Don't use this in your code.
+#define GTEST_PRED_FORMAT%(n)s_(pred_format, %(vs)s, on_failure)\\
+ GTEST_ASSERT_(pred_format(%(vts)s, %(vs)s),\\
+ on_failure)
+
+// Internal macro for implementing {EXPECT|ASSERT}_PRED%(n)s. Don't use
+// this in your code.
+#define GTEST_PRED%(n)s_(pred, %(vs)s, on_failure)\\
+ GTEST_ASSERT_(::testing::AssertPred%(n)sHelper(#pred""" % DEFS
+
+ impl += Iter(n, """, \\
+ #v%s""")
+
+ impl += """, \\
+ pred"""
+
+ impl += Iter(n, """, \\
+ v%s""")
+
+ impl += """), on_failure)
+
+// %(Arity)s predicate assertion macros.
+#define EXPECT_PRED_FORMAT%(n)s(pred_format, %(vs)s) \\
+ GTEST_PRED_FORMAT%(n)s_(pred_format, %(vs)s, GTEST_NONFATAL_FAILURE_)
+#define EXPECT_PRED%(n)s(pred, %(vs)s) \\
+ GTEST_PRED%(n)s_(pred, %(vs)s, GTEST_NONFATAL_FAILURE_)
+#define ASSERT_PRED_FORMAT%(n)s(pred_format, %(vs)s) \\
+ GTEST_PRED_FORMAT%(n)s_(pred_format, %(vs)s, GTEST_FATAL_FAILURE_)
+#define ASSERT_PRED%(n)s(pred, %(vs)s) \\
+ GTEST_PRED%(n)s_(pred, %(vs)s, GTEST_FATAL_FAILURE_)
+
+""" % DEFS
+
+ return impl
+
+
+def HeaderPostamble():
+ """Returns the postamble for the header file."""
+
+ return """
+
+#endif // GTEST_INCLUDE_GTEST_GTEST_PRED_IMPL_H_
+"""
+
+
+def GenerateFile(path, content):
+ """Given a file path and a content string, overwrites it with the
+ given content."""
+
+ print 'Updating file %s . . .' % path
+
+ f = file(path, 'w+')
+ print >>f, content,
+ f.close()
+
+ print 'File %s has been updated.' % path
+
+
+def GenerateHeader(n):
+ """Given the maximum arity n, updates the header file that implements
+ the predicate assertions."""
+
+ GenerateFile(HEADER,
+ HeaderPreamble(n)
+ + ''.join([ImplementationForArity(i) for i in OneTo(n)])
+ + HeaderPostamble())
+
+
+def UnitTestPreamble():
+ """Returns the preamble for the unit test file."""
+
+ # A map that defines the values used in the preamble template.
+ DEFS = {
+ 'today' : time.strftime('%m/%d/%Y'),
+ 'year' : time.strftime('%Y'),
+ 'command' : '%s %s' % (os.path.basename(sys.argv[0]), sys.argv[1]),
+ }
+
+ return (
+"""// Copyright 2006, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// This file is AUTOMATICALLY GENERATED on %(today)s by command
+// '%(command)s'. DO NOT EDIT BY HAND!
+
+// Regression test for gtest_pred_impl.h
+//
+// This file is generated by a script and quite long. If you intend to
+// learn how Google Test works by reading its unit tests, read
+// gtest_unittest.cc instead.
+//
+// This is intended as a regression test for the Google Test predicate
+// assertions. We compile it as part of the gtest_unittest target
+// only to keep the implementation tidy and compact, as it is quite
+// involved to set up the stage for testing Google Test using Google
+// Test itself.
+//
+// Currently, gtest_unittest takes ~11 seconds to run in the testing
+// daemon. In the future, if it grows too large and needs much more
+// time to finish, we should consider separating this file into a
+// stand-alone regression test.
+
+#include <iostream>
+
+#include <gtest/gtest.h>
+#include <gtest/gtest-spi.h>
+
+// A user-defined data type.
+struct Bool {
+ explicit Bool(int val) : value(val != 0) {}
+
+ bool operator>(int n) const { return value > Bool(n).value; }
+
+ Bool operator+(const Bool& rhs) const { return Bool(value + rhs.value); }
+
+ bool operator==(const Bool& rhs) const { return value == rhs.value; }
+
+ bool value;
+};
+
+// Enables Bool to be used in assertions.
+std::ostream& operator<<(std::ostream& os, const Bool& x) {
+ return os << (x.value ? "true" : "false");
+}
+
+""" % DEFS)
+
+
+def TestsForArity(n):
+ """Returns the tests for n-ary predicate assertions."""
+
+ # A map that defines the values used in the template for the tests.
+ DEFS = {
+ 'n' : n,
+ 'es' : Iter(n, 'e%s', sep=', '),
+ 'vs' : Iter(n, 'v%s', sep=', '),
+ 'vts' : Iter(n, '#v%s', sep=', '),
+ 'tvs' : Iter(n, 'T%s v%s', sep=', '),
+ 'int_vs' : Iter(n, 'int v%s', sep=', '),
+ 'Bool_vs' : Iter(n, 'Bool v%s', sep=', '),
+ 'types' : Iter(n, 'typename T%s', sep=', '),
+ 'v_sum' : Iter(n, 'v%s', sep=' + '),
+ 'arity' : Arity(n),
+ 'Arity' : Title(Arity(n)),
+ }
+
+ tests = (
+"""// Sample functions/functors for testing %(arity)s predicate assertions.
+
+// A %(arity)s predicate function.
+template <%(types)s>
+bool PredFunction%(n)s(%(tvs)s) {
+ return %(v_sum)s > 0;
+}
+
+// The following two functions are needed to circumvent a bug in
+// gcc 2.95.3, which sometimes has problem with the above template
+// function.
+bool PredFunction%(n)sInt(%(int_vs)s) {
+ return %(v_sum)s > 0;
+}
+bool PredFunction%(n)sBool(%(Bool_vs)s) {
+ return %(v_sum)s > 0;
+}
+""" % DEFS)
+
+ tests += """
+// A %(arity)s predicate functor.
+struct PredFunctor%(n)s {
+ template <%(types)s>
+ bool operator()(""" % DEFS
+
+ tests += Iter(n, 'const T%s& v%s', sep=""",
+ """)
+
+ tests += """) {
+ return %(v_sum)s > 0;
+ }
+};
+""" % DEFS
+
+ tests += """
+// A %(arity)s predicate-formatter function.
+template <%(types)s>
+testing::AssertionResult PredFormatFunction%(n)s(""" % DEFS
+
+ tests += Iter(n, 'const char* e%s', sep=""",
+ """)
+
+ tests += Iter(n, """,
+ const T%s& v%s""")
+
+ tests += """) {
+ if (PredFunction%(n)s(%(vs)s))
+ return testing::AssertionSuccess();
+
+ testing::Message msg;
+ msg << """ % DEFS
+
+ tests += Iter(n, 'e%s', sep=' << " + " << ')
+
+ tests += """
+ << " is expected to be positive, but evaluates to "
+ << %(v_sum)s << ".";
+ return testing::AssertionFailure(msg);
+}
+""" % DEFS
+
+ tests += """
+// A %(arity)s predicate-formatter functor.
+struct PredFormatFunctor%(n)s {
+ template <%(types)s>
+ testing::AssertionResult operator()(""" % DEFS
+
+ tests += Iter(n, 'const char* e%s', sep=""",
+ """)
+
+ tests += Iter(n, """,
+ const T%s& v%s""")
+
+ tests += """) const {
+ return PredFormatFunction%(n)s(%(es)s, %(vs)s);
+ }
+};
+""" % DEFS
+
+ tests += """
+// Tests for {EXPECT|ASSERT}_PRED_FORMAT%(n)s.
+
+class Predicate%(n)sTest : public testing::Test {
+ protected:
+ virtual void SetUp() {
+ expected_to_finish_ = true;
+ finished_ = false;""" % DEFS
+
+ tests += """
+ """ + Iter(n, 'n%s_ = ') + """0;
+ }
+"""
+
+ tests += """
+ virtual void TearDown() {
+ // Verifies that each of the predicate's arguments was evaluated
+ // exactly once."""
+
+ tests += ''.join(["""
+ EXPECT_EQ(1, n%s_) <<
+ "The predicate assertion didn't evaluate argument %s "
+ "exactly once.";""" % (i, i + 1) for i in OneTo(n)])
+
+ tests += """
+
+ // Verifies that the control flow in the test function is expected.
+ if (expected_to_finish_ && !finished_) {
+ FAIL() << "The predicate assertion unexpactedly aborted the test.";
+ } else if (!expected_to_finish_ && finished_) {
+ FAIL() << "The failed predicate assertion didn't abort the test "
+ "as expected.";
+ }
+ }
+
+ // true iff the test function is expected to run to finish.
+ static bool expected_to_finish_;
+
+ // true iff the test function did run to finish.
+ static bool finished_;
+""" % DEFS
+
+ tests += Iter(n, """
+ static int n%s_;""")
+
+ tests += """
+};
+
+bool Predicate%(n)sTest::expected_to_finish_;
+bool Predicate%(n)sTest::finished_;
+""" % DEFS
+
+ tests += Iter(n, """int Predicate%%(n)sTest::n%s_;
+""") % DEFS
+
+ tests += """
+typedef Predicate%(n)sTest EXPECT_PRED_FORMAT%(n)sTest;
+typedef Predicate%(n)sTest ASSERT_PRED_FORMAT%(n)sTest;
+typedef Predicate%(n)sTest EXPECT_PRED%(n)sTest;
+typedef Predicate%(n)sTest ASSERT_PRED%(n)sTest;
+""" % DEFS
+
+ def GenTest(use_format, use_assert, expect_failure,
+ use_functor, use_user_type):
+ """Returns the test for a predicate assertion macro.
+
+ Args:
+ use_format: true iff the assertion is a *_PRED_FORMAT*.
+ use_assert: true iff the assertion is a ASSERT_*.
+ expect_failure: true iff the assertion is expected to fail.
+ use_functor: true iff the first argument of the assertion is
+ a functor (as opposed to a function)
+ use_user_type: true iff the predicate functor/function takes
+ argument(s) of a user-defined type.
+
+ Example:
+
+ GenTest(1, 0, 0, 1, 0) returns a test that tests the behavior
+ of a successful EXPECT_PRED_FORMATn() that takes a functor
+ whose arguments have built-in types."""
+
+ if use_assert:
+ assrt = 'ASSERT' # 'assert' is reserved, so we cannot use
+ # that identifier here.
+ else:
+ assrt = 'EXPECT'
+
+ assertion = assrt + '_PRED'
+
+ if use_format:
+ pred_format = 'PredFormat'
+ assertion += '_FORMAT'
+ else:
+ pred_format = 'Pred'
+
+ assertion += '%(n)s' % DEFS
+
+ if use_functor:
+ pred_format_type = 'functor'
+ pred_format += 'Functor%(n)s()'
+ else:
+ pred_format_type = 'function'
+ pred_format += 'Function%(n)s'
+ if not use_format:
+ if use_user_type:
+ pred_format += 'Bool'
+ else:
+ pred_format += 'Int'
+
+ test_name = pred_format_type.title()
+
+ if use_user_type:
+ arg_type = 'user-defined type (Bool)'
+ test_name += 'OnUserType'
+ if expect_failure:
+ arg = 'Bool(n%s_++)'
+ else:
+ arg = 'Bool(++n%s_)'
+ else:
+ arg_type = 'built-in type (int)'
+ test_name += 'OnBuiltInType'
+ if expect_failure:
+ arg = 'n%s_++'
+ else:
+ arg = '++n%s_'
+
+ if expect_failure:
+ successful_or_failed = 'failed'
+ expected_or_not = 'expected.'
+ test_name += 'Failure'
+ else:
+ successful_or_failed = 'successful'
+ expected_or_not = 'UNEXPECTED!'
+ test_name += 'Success'
+
+ # A map that defines the values used in the test template.
+ defs = DEFS.copy()
+ defs.update({
+ 'assert' : assrt,
+ 'assertion' : assertion,
+ 'test_name' : test_name,
+ 'pf_type' : pred_format_type,
+ 'pf' : pred_format,
+ 'arg_type' : arg_type,
+ 'arg' : arg,
+ 'successful' : successful_or_failed,
+ 'expected' : expected_or_not,
+ })
+
+ test = """
+// Tests a %(successful)s %(assertion)s where the
+// predicate-formatter is a %(pf_type)s on a %(arg_type)s.
+TEST_F(%(assertion)sTest, %(test_name)s) {""" % defs
+
+ indent = (len(assertion) + 3)*' '
+ extra_indent = ''
+
+ if expect_failure:
+ extra_indent = ' '
+ if use_assert:
+ test += """
+ expected_to_finish_ = false;
+ EXPECT_FATAL_FAILURE({ // NOLINT"""
+ else:
+ test += """
+ EXPECT_NONFATAL_FAILURE({ // NOLINT"""
+
+ test += '\n' + extra_indent + """ %(assertion)s(%(pf)s""" % defs
+
+ test = test % defs
+ test += Iter(n, ',\n' + indent + extra_indent + '%(arg)s' % defs)
+ test += ');\n' + extra_indent + ' finished_ = true;\n'
+
+ if expect_failure:
+ test += ' }, "");\n'
+
+ test += '}\n'
+ return test
+
+ # Generates tests for all 2**6 = 64 combinations.
+ tests += ''.join([GenTest(use_format, use_assert, expect_failure,
+ use_functor, use_user_type)
+ for use_format in [0, 1]
+ for use_assert in [0, 1]
+ for expect_failure in [0, 1]
+ for use_functor in [0, 1]
+ for use_user_type in [0, 1]
+ ])
+
+ return tests
+
+
+def UnitTestPostamble():
+ """Returns the postamble for the tests."""
+
+ return ''
+
+
+def GenerateUnitTest(n):
+ """Returns the tests for up-to n-ary predicate assertions."""
+
+ GenerateFile(UNIT_TEST,
+ UnitTestPreamble()
+ + ''.join([TestsForArity(i) for i in OneTo(n)])
+ + UnitTestPostamble())
+
+
+def _Main():
+ """The entry point of the script. Generates the header file and its
+ unit test."""
+
+ if len(sys.argv) != 2:
+ print __doc__
+ print 'Author: ' + __author__
+ sys.exit(1)
+
+ n = int(sys.argv[1])
+ GenerateHeader(n)
+ GenerateUnitTest(n)
+
+
+if __name__ == '__main__':
+ _Main()
diff --git a/Source/ThirdParty/gtest/scripts/gtest-config.in b/Source/ThirdParty/gtest/scripts/gtest-config.in
new file mode 100755
index 000000000..9c7263854
--- /dev/null
+++ b/Source/ThirdParty/gtest/scripts/gtest-config.in
@@ -0,0 +1,274 @@
+#!/bin/sh
+
+# These variables are automatically filled in by the configure script.
+name="@PACKAGE_TARNAME@"
+version="@PACKAGE_VERSION@"
+
+show_usage()
+{
+ echo "Usage: gtest-config [OPTIONS...]"
+}
+
+show_help()
+{
+ show_usage
+ cat <<\EOF
+
+The `gtest-config' script provides access to the necessary compile and linking
+flags to connect with Google C++ Testing Framework, both in a build prior to
+installation, and on the system proper after installation. The installation
+overrides may be issued in combination with any other queries, but will only
+affect installation queries if called on a built but not installed gtest. The
+installation queries may not be issued with any other types of queries, and
+only one installation query may be made at a time. The version queries and
+compiler flag queries may be combined as desired but not mixed. Different
+version queries are always combined with logical "and" semantics, and only the
+last of any particular query is used while all previous ones ignored. All
+versions must be specified as a sequence of numbers separated by periods.
+Compiler flag queries output the union of the sets of flags when combined.
+
+ Examples:
+ gtest-config --min-version=1.0 || echo "Insufficient Google Test version."
+
+ g++ $(gtest-config --cppflags --cxxflags) -o foo.o -c foo.cpp
+ g++ $(gtest-config --ldflags --libs) -o foo foo.o
+
+ # When using a built but not installed Google Test:
+ g++ $(../../my_gtest_build/scripts/gtest-config ...) ...
+
+ # When using an installed Google Test, but with installation overrides:
+ export GTEST_PREFIX="/opt"
+ g++ $(gtest-config --libdir="/opt/lib64" ...) ...
+
+ Help:
+ --usage brief usage information
+ --help display this help message
+
+ Installation Overrides:
+ --prefix=<dir> overrides the installation prefix
+ --exec-prefix=<dir> overrides the executable installation prefix
+ --libdir=<dir> overrides the library installation prefix
+ --includedir=<dir> overrides the header file installation prefix
+
+ Installation Queries:
+ --prefix installation prefix
+ --exec-prefix executable installation prefix
+ --libdir library installation directory
+ --includedir header file installation directory
+ --version the version of the Google Test installation
+
+ Version Queries:
+ --min-version=VERSION return 0 if the version is at least VERSION
+ --exact-version=VERSION return 0 if the version is exactly VERSION
+ --max-version=VERSION return 0 if the version is at most VERSION
+
+ Compilation Flag Queries:
+ --cppflags compile flags specific to the C-like preprocessors
+ --cxxflags compile flags appropriate for C++ programs
+ --ldflags linker flags
+ --libs libraries for linking
+
+EOF
+}
+
+# This function bounds our version with a min and a max. It uses some clever
+# POSIX-compliant variable expansion to portably do all the work in the shell
+# and avoid any dependency on a particular "sed" or "awk" implementation.
+# Notable is that it will only ever compare the first 3 components of versions.
+# Further components will be cleanly stripped off. All versions must be
+# unadorned, so "v1.0" will *not* work. The minimum version must be in $1, and
+# the max in $2. TODO(chandlerc@google.com): If this ever breaks, we should
+# investigate expanding this via autom4te from AS_VERSION_COMPARE rather than
+# continuing to maintain our own shell version.
+check_versions()
+{
+ major_version=${version%%.*}
+ minor_version="0"
+ point_version="0"
+ if test "${version#*.}" != "${version}"; then
+ minor_version=${version#*.}
+ minor_version=${minor_version%%.*}
+ fi
+ if test "${version#*.*.}" != "${version}"; then
+ point_version=${version#*.*.}
+ point_version=${point_version%%.*}
+ fi
+
+ min_version="$1"
+ min_major_version=${min_version%%.*}
+ min_minor_version="0"
+ min_point_version="0"
+ if test "${min_version#*.}" != "${min_version}"; then
+ min_minor_version=${min_version#*.}
+ min_minor_version=${min_minor_version%%.*}
+ fi
+ if test "${min_version#*.*.}" != "${min_version}"; then
+ min_point_version=${min_version#*.*.}
+ min_point_version=${min_point_version%%.*}
+ fi
+
+ max_version="$2"
+ max_major_version=${max_version%%.*}
+ max_minor_version="0"
+ max_point_version="0"
+ if test "${max_version#*.}" != "${max_version}"; then
+ max_minor_version=${max_version#*.}
+ max_minor_version=${max_minor_version%%.*}
+ fi
+ if test "${max_version#*.*.}" != "${max_version}"; then
+ max_point_version=${max_version#*.*.}
+ max_point_version=${max_point_version%%.*}
+ fi
+
+ test $(($major_version)) -lt $(($min_major_version)) && exit 1
+ if test $(($major_version)) -eq $(($min_major_version)); then
+ test $(($minor_version)) -lt $(($min_minor_version)) && exit 1
+ if test $(($minor_version)) -eq $(($min_minor_version)); then
+ test $(($point_version)) -lt $(($min_point_version)) && exit 1
+ fi
+ fi
+
+ test $(($major_version)) -gt $(($max_major_version)) && exit 1
+ if test $(($major_version)) -eq $(($max_major_version)); then
+ test $(($minor_version)) -gt $(($max_minor_version)) && exit 1
+ if test $(($minor_version)) -eq $(($max_minor_version)); then
+ test $(($point_version)) -gt $(($max_point_version)) && exit 1
+ fi
+ fi
+
+ exit 0
+}
+
+# Show the usage line when no arguments are specified.
+if test $# -eq 0; then
+ show_usage
+ exit 1
+fi
+
+while test $# -gt 0; do
+ case $1 in
+ --usage) show_usage; exit 0;;
+ --help) show_help; exit 0;;
+
+ # Installation overrides
+ --prefix=*) GTEST_PREFIX=${1#--prefix=};;
+ --exec-prefix=*) GTEST_EXEC_PREFIX=${1#--exec-prefix=};;
+ --libdir=*) GTEST_LIBDIR=${1#--libdir=};;
+ --includedir=*) GTEST_INCLUDEDIR=${1#--includedir=};;
+
+ # Installation queries
+ --prefix|--exec-prefix|--libdir|--includedir|--version)
+ if test -n "${do_query}"; then
+ show_usage
+ exit 1
+ fi
+ do_query=${1#--}
+ ;;
+
+ # Version checking
+ --min-version=*)
+ do_check_versions=yes
+ min_version=${1#--min-version=}
+ ;;
+ --max-version=*)
+ do_check_versions=yes
+ max_version=${1#--max-version=}
+ ;;
+ --exact-version=*)
+ do_check_versions=yes
+ exact_version=${1#--exact-version=}
+ ;;
+
+ # Compiler flag output
+ --cppflags) echo_cppflags=yes;;
+ --cxxflags) echo_cxxflags=yes;;
+ --ldflags) echo_ldflags=yes;;
+ --libs) echo_libs=yes;;
+
+ # Everything else is an error
+ *) show_usage; exit 1;;
+ esac
+ shift
+done
+
+# These have defaults filled in by the configure script but can also be
+# overridden by environment variables or command line parameters.
+prefix="${GTEST_PREFIX:-@prefix@}"
+exec_prefix="${GTEST_EXEC_PREFIX:-@exec_prefix@}"
+libdir="${GTEST_LIBDIR:-@libdir@}"
+includedir="${GTEST_INCLUDEDIR:-@includedir@}"
+
+# We try and detect if our binary is not located at its installed location. If
+# it's not, we provide variables pointing to the source and build tree rather
+# than to the install tree. This allows building against a just-built gtest
+# rather than an installed gtest.
+bindir="@bindir@"
+this_relative_bindir=`dirname $0`
+this_bindir=`cd ${this_relative_bindir}; pwd -P`
+if test "${this_bindir}" = "${this_bindir%${bindir}}"; then
+ # The path to the script doesn't end in the bindir sequence from Autoconf,
+ # assume that we are in a build tree.
+ build_dir=`dirname ${this_bindir}`
+ src_dir=`cd ${this_bindir}/@top_srcdir@; pwd -P`
+
+ # TODO(chandlerc@google.com): This is a dangerous dependency on libtool, we
+ # should work to remove it, and/or remove libtool altogether, replacing it
+ # with direct references to the library and a link path.
+ gtest_libs="${build_dir}/lib/libgtest.la @PTHREAD_CFLAGS@ @PTHREAD_LIBS@"
+ gtest_ldflags=""
+
+ # We provide hooks to include from either the source or build dir, where the
+ # build dir is always preferred. This will potentially allow us to write
+ # build rules for generated headers and have them automatically be preferred
+ # over provided versions.
+ gtest_cppflags="-I${build_dir}/include -I${src_dir}/include"
+ gtest_cxxflags="@PTHREAD_CFLAGS@"
+else
+ # We're using an installed gtest, although it may be staged under some
+ # prefix. Assume (as our own libraries do) that we can resolve the prefix,
+ # and are present in the dynamic link paths.
+ gtest_ldflags="-L${libdir}"
+ gtest_libs="-l${name} @PTHREAD_CFLAGS@ @PTHREAD_LIBS@"
+ gtest_cppflags="-I${includedir}"
+ gtest_cxxflags="@PTHREAD_CFLAGS@"
+fi
+
+# Do an installation query if requested.
+if test -n "$do_query"; then
+ case $do_query in
+ prefix) echo $prefix; exit 0;;
+ exec-prefix) echo $exec_prefix; exit 0;;
+ libdir) echo $libdir; exit 0;;
+ includedir) echo $includedir; exit 0;;
+ version) echo $version; exit 0;;
+ *) show_usage; exit 1;;
+ esac
+fi
+
+# Do a version check if requested.
+if test "$do_check_versions" = "yes"; then
+ # Make sure we didn't receive a bad combination of parameters.
+ test "$echo_cppflags" = "yes" && show_usage && exit 1
+ test "$echo_cxxflags" = "yes" && show_usage && exit 1
+ test "$echo_ldflags" = "yes" && show_usage && exit 1
+ test "$echo_libs" = "yes" && show_usage && exit 1
+
+ if test "$exact_version" != ""; then
+ check_versions $exact_version $exact_version
+ # unreachable
+ else
+ check_versions ${min_version:-0.0.0} ${max_version:-9999.9999.9999}
+ # unreachable
+ fi
+fi
+
+# Do the output in the correct order so that these can be used in-line of
+# a compiler invocation.
+output=""
+test "$echo_cppflags" = "yes" && output="$output $gtest_cppflags"
+test "$echo_cxxflags" = "yes" && output="$output $gtest_cxxflags"
+test "$echo_ldflags" = "yes" && output="$output $gtest_ldflags"
+test "$echo_libs" = "yes" && output="$output $gtest_libs"
+echo $output
+
+exit 0
diff --git a/Source/ThirdParty/gtest/scripts/pump.py b/Source/ThirdParty/gtest/scripts/pump.py
new file mode 100755
index 000000000..f15c1b6ce
--- /dev/null
+++ b/Source/ThirdParty/gtest/scripts/pump.py
@@ -0,0 +1,835 @@
+#!/usr/bin/env python
+#
+# Copyright 2008, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""pump v0.1 - Pretty Useful for Meta Programming.
+
+A tool for preprocessor meta programming. Useful for generating
+repetitive boilerplate code. Especially useful for writing C++
+classes, functions, macros, and templates that need to work with
+various number of arguments.
+
+USAGE:
+ pump.py SOURCE_FILE
+
+EXAMPLES:
+ pump.py foo.cc.pump
+ Converts foo.cc.pump to foo.cc.
+
+GRAMMAR:
+ CODE ::= ATOMIC_CODE*
+ ATOMIC_CODE ::= $var ID = EXPRESSION
+ | $var ID = [[ CODE ]]
+ | $range ID EXPRESSION..EXPRESSION
+ | $for ID SEPARATOR [[ CODE ]]
+ | $($)
+ | $ID
+ | $(EXPRESSION)
+ | $if EXPRESSION [[ CODE ]] ELSE_BRANCH
+ | [[ CODE ]]
+ | RAW_CODE
+ SEPARATOR ::= RAW_CODE | EMPTY
+ ELSE_BRANCH ::= $else [[ CODE ]]
+ | $elif EXPRESSION [[ CODE ]] ELSE_BRANCH
+ | EMPTY
+ EXPRESSION has Python syntax.
+"""
+
+__author__ = 'wan@google.com (Zhanyong Wan)'
+
+import os
+import re
+import sys
+
+
+TOKEN_TABLE = [
+ (re.compile(r'\$var\s+'), '$var'),
+ (re.compile(r'\$elif\s+'), '$elif'),
+ (re.compile(r'\$else\s+'), '$else'),
+ (re.compile(r'\$for\s+'), '$for'),
+ (re.compile(r'\$if\s+'), '$if'),
+ (re.compile(r'\$range\s+'), '$range'),
+ (re.compile(r'\$[_A-Za-z]\w*'), '$id'),
+ (re.compile(r'\$\(\$\)'), '$($)'),
+ (re.compile(r'\$\$.*'), '$$'),
+ (re.compile(r'\$'), '$'),
+ (re.compile(r'\[\[\n?'), '[['),
+ (re.compile(r'\]\]\n?'), ']]'),
+ ]
+
+
+class Cursor:
+ """Represents a position (line and column) in a text file."""
+
+ def __init__(self, line=-1, column=-1):
+ self.line = line
+ self.column = column
+
+ def __eq__(self, rhs):
+ return self.line == rhs.line and self.column == rhs.column
+
+ def __ne__(self, rhs):
+ return not self == rhs
+
+ def __lt__(self, rhs):
+ return self.line < rhs.line or (
+ self.line == rhs.line and self.column < rhs.column)
+
+ def __le__(self, rhs):
+ return self < rhs or self == rhs
+
+ def __gt__(self, rhs):
+ return rhs < self
+
+ def __ge__(self, rhs):
+ return rhs <= self
+
+ def __str__(self):
+ if self == Eof():
+ return 'EOF'
+ else:
+ return '%s(%s)' % (self.line + 1, self.column)
+
+ def __add__(self, offset):
+ return Cursor(self.line, self.column + offset)
+
+ def __sub__(self, offset):
+ return Cursor(self.line, self.column - offset)
+
+ def Clone(self):
+ """Returns a copy of self."""
+
+ return Cursor(self.line, self.column)
+
+
+# Special cursor to indicate the end-of-file.
+def Eof():
+ """Returns the special cursor to denote the end-of-file."""
+ return Cursor(-1, -1)
+
+
+class Token:
+ """Represents a token in a Pump source file."""
+
+ def __init__(self, start=None, end=None, value=None, token_type=None):
+ if start is None:
+ self.start = Eof()
+ else:
+ self.start = start
+ if end is None:
+ self.end = Eof()
+ else:
+ self.end = end
+ self.value = value
+ self.token_type = token_type
+
+ def __str__(self):
+ return 'Token @%s: \'%s\' type=%s' % (
+ self.start, self.value, self.token_type)
+
+ def Clone(self):
+ """Returns a copy of self."""
+
+ return Token(self.start.Clone(), self.end.Clone(), self.value,
+ self.token_type)
+
+
+def StartsWith(lines, pos, string):
+ """Returns True iff the given position in lines starts with 'string'."""
+
+ return lines[pos.line][pos.column:].startswith(string)
+
+
+def FindFirstInLine(line, token_table):
+ best_match_start = -1
+ for (regex, token_type) in token_table:
+ m = regex.search(line)
+ if m:
+ # We found regex in lines
+ if best_match_start < 0 or m.start() < best_match_start:
+ best_match_start = m.start()
+ best_match_length = m.end() - m.start()
+ best_match_token_type = token_type
+
+ if best_match_start < 0:
+ return None
+
+ return (best_match_start, best_match_length, best_match_token_type)
+
+
+def FindFirst(lines, token_table, cursor):
+ """Finds the first occurrence of any string in strings in lines."""
+
+ start = cursor.Clone()
+ cur_line_number = cursor.line
+ for line in lines[start.line:]:
+ if cur_line_number == start.line:
+ line = line[start.column:]
+ m = FindFirstInLine(line, token_table)
+ if m:
+ # We found a regex in line.
+ (start_column, length, token_type) = m
+ if cur_line_number == start.line:
+ start_column += start.column
+ found_start = Cursor(cur_line_number, start_column)
+ found_end = found_start + length
+ return MakeToken(lines, found_start, found_end, token_type)
+ cur_line_number += 1
+ # We failed to find str in lines
+ return None
+
+
+def SubString(lines, start, end):
+ """Returns a substring in lines."""
+
+ if end == Eof():
+ end = Cursor(len(lines) - 1, len(lines[-1]))
+
+ if start >= end:
+ return ''
+
+ if start.line == end.line:
+ return lines[start.line][start.column:end.column]
+
+ result_lines = ([lines[start.line][start.column:]] +
+ lines[start.line + 1:end.line] +
+ [lines[end.line][:end.column]])
+ return ''.join(result_lines)
+
+
+def MakeToken(lines, start, end, token_type):
+ """Creates a new instance of Token."""
+
+ return Token(start, end, SubString(lines, start, end), token_type)
+
+
+def ParseToken(lines, pos, regex, token_type):
+ line = lines[pos.line][pos.column:]
+ m = regex.search(line)
+ if m and not m.start():
+ return MakeToken(lines, pos, pos + m.end(), token_type)
+ else:
+ print 'ERROR: %s expected at %s.' % (token_type, pos)
+ sys.exit(1)
+
+
+ID_REGEX = re.compile(r'[_A-Za-z]\w*')
+EQ_REGEX = re.compile(r'=')
+REST_OF_LINE_REGEX = re.compile(r'.*?(?=$|\$\$)')
+OPTIONAL_WHITE_SPACES_REGEX = re.compile(r'\s*')
+WHITE_SPACE_REGEX = re.compile(r'\s')
+DOT_DOT_REGEX = re.compile(r'\.\.')
+
+
+def Skip(lines, pos, regex):
+ line = lines[pos.line][pos.column:]
+ m = re.search(regex, line)
+ if m and not m.start():
+ return pos + m.end()
+ else:
+ return pos
+
+
+def SkipUntil(lines, pos, regex, token_type):
+ line = lines[pos.line][pos.column:]
+ m = re.search(regex, line)
+ if m:
+ return pos + m.start()
+ else:
+ print ('ERROR: %s expected on line %s after column %s.' %
+ (token_type, pos.line + 1, pos.column))
+ sys.exit(1)
+
+
+def ParseExpTokenInParens(lines, pos):
+ def ParseInParens(pos):
+ pos = Skip(lines, pos, OPTIONAL_WHITE_SPACES_REGEX)
+ pos = Skip(lines, pos, r'\(')
+ pos = Parse(pos)
+ pos = Skip(lines, pos, r'\)')
+ return pos
+
+ def Parse(pos):
+ pos = SkipUntil(lines, pos, r'\(|\)', ')')
+ if SubString(lines, pos, pos + 1) == '(':
+ pos = Parse(pos + 1)
+ pos = Skip(lines, pos, r'\)')
+ return Parse(pos)
+ else:
+ return pos
+
+ start = pos.Clone()
+ pos = ParseInParens(pos)
+ return MakeToken(lines, start, pos, 'exp')
+
+
+def RStripNewLineFromToken(token):
+ if token.value.endswith('\n'):
+ return Token(token.start, token.end, token.value[:-1], token.token_type)
+ else:
+ return token
+
+
+def TokenizeLines(lines, pos):
+ while True:
+ found = FindFirst(lines, TOKEN_TABLE, pos)
+ if not found:
+ yield MakeToken(lines, pos, Eof(), 'code')
+ return
+
+ if found.start == pos:
+ prev_token = None
+ prev_token_rstripped = None
+ else:
+ prev_token = MakeToken(lines, pos, found.start, 'code')
+ prev_token_rstripped = RStripNewLineFromToken(prev_token)
+
+ if found.token_type == '$$': # A meta comment.
+ if prev_token_rstripped:
+ yield prev_token_rstripped
+ pos = Cursor(found.end.line + 1, 0)
+ elif found.token_type == '$var':
+ if prev_token_rstripped:
+ yield prev_token_rstripped
+ yield found
+ id_token = ParseToken(lines, found.end, ID_REGEX, 'id')
+ yield id_token
+ pos = Skip(lines, id_token.end, OPTIONAL_WHITE_SPACES_REGEX)
+
+ eq_token = ParseToken(lines, pos, EQ_REGEX, '=')
+ yield eq_token
+ pos = Skip(lines, eq_token.end, r'\s*')
+
+ if SubString(lines, pos, pos + 2) != '[[':
+ exp_token = ParseToken(lines, pos, REST_OF_LINE_REGEX, 'exp')
+ yield exp_token
+ pos = Cursor(exp_token.end.line + 1, 0)
+ elif found.token_type == '$for':
+ if prev_token_rstripped:
+ yield prev_token_rstripped
+ yield found
+ id_token = ParseToken(lines, found.end, ID_REGEX, 'id')
+ yield id_token
+ pos = Skip(lines, id_token.end, WHITE_SPACE_REGEX)
+ elif found.token_type == '$range':
+ if prev_token_rstripped:
+ yield prev_token_rstripped
+ yield found
+ id_token = ParseToken(lines, found.end, ID_REGEX, 'id')
+ yield id_token
+ pos = Skip(lines, id_token.end, OPTIONAL_WHITE_SPACES_REGEX)
+
+ dots_pos = SkipUntil(lines, pos, DOT_DOT_REGEX, '..')
+ yield MakeToken(lines, pos, dots_pos, 'exp')
+ yield MakeToken(lines, dots_pos, dots_pos + 2, '..')
+ pos = dots_pos + 2
+ new_pos = Cursor(pos.line + 1, 0)
+ yield MakeToken(lines, pos, new_pos, 'exp')
+ pos = new_pos
+ elif found.token_type == '$':
+ if prev_token:
+ yield prev_token
+ yield found
+ exp_token = ParseExpTokenInParens(lines, found.end)
+ yield exp_token
+ pos = exp_token.end
+ elif (found.token_type == ']]' or found.token_type == '$if' or
+ found.token_type == '$elif' or found.token_type == '$else'):
+ if prev_token_rstripped:
+ yield prev_token_rstripped
+ yield found
+ pos = found.end
+ else:
+ if prev_token:
+ yield prev_token
+ yield found
+ pos = found.end
+
+
+def Tokenize(s):
+ lines = s.splitlines(True)
+ return TokenizeLines(lines, Cursor(0, 0))
+
+
+class CodeNode:
+ def __init__(self, atomic_code_list=None):
+ self.atomic_code = atomic_code_list
+
+
+class VarNode:
+ def __init__(self, identifier=None, atomic_code=None):
+ self.identifier = identifier
+ self.atomic_code = atomic_code
+
+
+class RangeNode:
+ def __init__(self, identifier=None, exp1=None, exp2=None):
+ self.identifier = identifier
+ self.exp1 = exp1
+ self.exp2 = exp2
+
+
+class ForNode:
+ def __init__(self, identifier=None, sep=None, code=None):
+ self.identifier = identifier
+ self.sep = sep
+ self.code = code
+
+
+class ElseNode:
+ def __init__(self, else_branch=None):
+ self.else_branch = else_branch
+
+
+class IfNode:
+ def __init__(self, exp=None, then_branch=None, else_branch=None):
+ self.exp = exp
+ self.then_branch = then_branch
+ self.else_branch = else_branch
+
+
+class RawCodeNode:
+ def __init__(self, token=None):
+ self.raw_code = token
+
+
+class LiteralDollarNode:
+ def __init__(self, token):
+ self.token = token
+
+
+class ExpNode:
+ def __init__(self, token, python_exp):
+ self.token = token
+ self.python_exp = python_exp
+
+
+def PopFront(a_list):
+ head = a_list[0]
+ a_list[:1] = []
+ return head
+
+
+def PushFront(a_list, elem):
+ a_list[:0] = [elem]
+
+
+def PopToken(a_list, token_type=None):
+ token = PopFront(a_list)
+ if token_type is not None and token.token_type != token_type:
+ print 'ERROR: %s expected at %s' % (token_type, token.start)
+ print 'ERROR: %s found instead' % (token,)
+ sys.exit(1)
+
+ return token
+
+
+def PeekToken(a_list):
+ if not a_list:
+ return None
+
+ return a_list[0]
+
+
+def ParseExpNode(token):
+ python_exp = re.sub(r'([_A-Za-z]\w*)', r'self.GetValue("\1")', token.value)
+ return ExpNode(token, python_exp)
+
+
+def ParseElseNode(tokens):
+ def Pop(token_type=None):
+ return PopToken(tokens, token_type)
+
+ next = PeekToken(tokens)
+ if not next:
+ return None
+ if next.token_type == '$else':
+ Pop('$else')
+ Pop('[[')
+ code_node = ParseCodeNode(tokens)
+ Pop(']]')
+ return code_node
+ elif next.token_type == '$elif':
+ Pop('$elif')
+ exp = Pop('code')
+ Pop('[[')
+ code_node = ParseCodeNode(tokens)
+ Pop(']]')
+ inner_else_node = ParseElseNode(tokens)
+ return CodeNode([IfNode(ParseExpNode(exp), code_node, inner_else_node)])
+ elif not next.value.strip():
+ Pop('code')
+ return ParseElseNode(tokens)
+ else:
+ return None
+
+
+def ParseAtomicCodeNode(tokens):
+ def Pop(token_type=None):
+ return PopToken(tokens, token_type)
+
+ head = PopFront(tokens)
+ t = head.token_type
+ if t == 'code':
+ return RawCodeNode(head)
+ elif t == '$var':
+ id_token = Pop('id')
+ Pop('=')
+ next = PeekToken(tokens)
+ if next.token_type == 'exp':
+ exp_token = Pop()
+ return VarNode(id_token, ParseExpNode(exp_token))
+ Pop('[[')
+ code_node = ParseCodeNode(tokens)
+ Pop(']]')
+ return VarNode(id_token, code_node)
+ elif t == '$for':
+ id_token = Pop('id')
+ next_token = PeekToken(tokens)
+ if next_token.token_type == 'code':
+ sep_token = next_token
+ Pop('code')
+ else:
+ sep_token = None
+ Pop('[[')
+ code_node = ParseCodeNode(tokens)
+ Pop(']]')
+ return ForNode(id_token, sep_token, code_node)
+ elif t == '$if':
+ exp_token = Pop('code')
+ Pop('[[')
+ code_node = ParseCodeNode(tokens)
+ Pop(']]')
+ else_node = ParseElseNode(tokens)
+ return IfNode(ParseExpNode(exp_token), code_node, else_node)
+ elif t == '$range':
+ id_token = Pop('id')
+ exp1_token = Pop('exp')
+ Pop('..')
+ exp2_token = Pop('exp')
+ return RangeNode(id_token, ParseExpNode(exp1_token),
+ ParseExpNode(exp2_token))
+ elif t == '$id':
+ return ParseExpNode(Token(head.start + 1, head.end, head.value[1:], 'id'))
+ elif t == '$($)':
+ return LiteralDollarNode(head)
+ elif t == '$':
+ exp_token = Pop('exp')
+ return ParseExpNode(exp_token)
+ elif t == '[[':
+ code_node = ParseCodeNode(tokens)
+ Pop(']]')
+ return code_node
+ else:
+ PushFront(tokens, head)
+ return None
+
+
+def ParseCodeNode(tokens):
+ atomic_code_list = []
+ while True:
+ if not tokens:
+ break
+ atomic_code_node = ParseAtomicCodeNode(tokens)
+ if atomic_code_node:
+ atomic_code_list.append(atomic_code_node)
+ else:
+ break
+ return CodeNode(atomic_code_list)
+
+
+def Convert(file_path):
+ s = file(file_path, 'r').read()
+ tokens = []
+ for token in Tokenize(s):
+ tokens.append(token)
+ code_node = ParseCodeNode(tokens)
+ return code_node
+
+
+class Env:
+ def __init__(self):
+ self.variables = []
+ self.ranges = []
+
+ def Clone(self):
+ clone = Env()
+ clone.variables = self.variables[:]
+ clone.ranges = self.ranges[:]
+ return clone
+
+ def PushVariable(self, var, value):
+ # If value looks like an int, store it as an int.
+ try:
+ int_value = int(value)
+ if ('%s' % int_value) == value:
+ value = int_value
+ except Exception:
+ pass
+ self.variables[:0] = [(var, value)]
+
+ def PopVariable(self):
+ self.variables[:1] = []
+
+ def PushRange(self, var, lower, upper):
+ self.ranges[:0] = [(var, lower, upper)]
+
+ def PopRange(self):
+ self.ranges[:1] = []
+
+ def GetValue(self, identifier):
+ for (var, value) in self.variables:
+ if identifier == var:
+ return value
+
+ print 'ERROR: meta variable %s is undefined.' % (identifier,)
+ sys.exit(1)
+
+ def EvalExp(self, exp):
+ try:
+ result = eval(exp.python_exp)
+ except Exception, e:
+ print 'ERROR: caught exception %s: %s' % (e.__class__.__name__, e)
+ print ('ERROR: failed to evaluate meta expression %s at %s' %
+ (exp.python_exp, exp.token.start))
+ sys.exit(1)
+ return result
+
+ def GetRange(self, identifier):
+ for (var, lower, upper) in self.ranges:
+ if identifier == var:
+ return (lower, upper)
+
+ print 'ERROR: range %s is undefined.' % (identifier,)
+ sys.exit(1)
+
+
+class Output:
+ def __init__(self):
+ self.string = ''
+
+ def GetLastLine(self):
+ index = self.string.rfind('\n')
+ if index < 0:
+ return ''
+
+ return self.string[index + 1:]
+
+ def Append(self, s):
+ self.string += s
+
+
+def RunAtomicCode(env, node, output):
+ if isinstance(node, VarNode):
+ identifier = node.identifier.value.strip()
+ result = Output()
+ RunAtomicCode(env.Clone(), node.atomic_code, result)
+ value = result.string
+ env.PushVariable(identifier, value)
+ elif isinstance(node, RangeNode):
+ identifier = node.identifier.value.strip()
+ lower = int(env.EvalExp(node.exp1))
+ upper = int(env.EvalExp(node.exp2))
+ env.PushRange(identifier, lower, upper)
+ elif isinstance(node, ForNode):
+ identifier = node.identifier.value.strip()
+ if node.sep is None:
+ sep = ''
+ else:
+ sep = node.sep.value
+ (lower, upper) = env.GetRange(identifier)
+ for i in range(lower, upper + 1):
+ new_env = env.Clone()
+ new_env.PushVariable(identifier, i)
+ RunCode(new_env, node.code, output)
+ if i != upper:
+ output.Append(sep)
+ elif isinstance(node, RawCodeNode):
+ output.Append(node.raw_code.value)
+ elif isinstance(node, IfNode):
+ cond = env.EvalExp(node.exp)
+ if cond:
+ RunCode(env.Clone(), node.then_branch, output)
+ elif node.else_branch is not None:
+ RunCode(env.Clone(), node.else_branch, output)
+ elif isinstance(node, ExpNode):
+ value = env.EvalExp(node)
+ output.Append('%s' % (value,))
+ elif isinstance(node, LiteralDollarNode):
+ output.Append('$')
+ elif isinstance(node, CodeNode):
+ RunCode(env.Clone(), node, output)
+ else:
+ print 'BAD'
+ print node
+ sys.exit(1)
+
+
+def RunCode(env, code_node, output):
+ for atomic_code in code_node.atomic_code:
+ RunAtomicCode(env, atomic_code, output)
+
+
+def IsComment(cur_line):
+ return '//' in cur_line
+
+
+def IsInPreprocessorDirevative(prev_lines, cur_line):
+ if cur_line.lstrip().startswith('#'):
+ return True
+ return prev_lines != [] and prev_lines[-1].endswith('\\')
+
+
+def WrapComment(line, output):
+ loc = line.find('//')
+ before_comment = line[:loc].rstrip()
+ if before_comment == '':
+ indent = loc
+ else:
+ output.append(before_comment)
+ indent = len(before_comment) - len(before_comment.lstrip())
+ prefix = indent*' ' + '// '
+ max_len = 80 - len(prefix)
+ comment = line[loc + 2:].strip()
+ segs = [seg for seg in re.split(r'(\w+\W*)', comment) if seg != '']
+ cur_line = ''
+ for seg in segs:
+ if len((cur_line + seg).rstrip()) < max_len:
+ cur_line += seg
+ else:
+ if cur_line.strip() != '':
+ output.append(prefix + cur_line.rstrip())
+ cur_line = seg.lstrip()
+ if cur_line.strip() != '':
+ output.append(prefix + cur_line.strip())
+
+
+def WrapCode(line, line_concat, output):
+ indent = len(line) - len(line.lstrip())
+ prefix = indent*' ' # Prefix of the current line
+ max_len = 80 - indent - len(line_concat) # Maximum length of the current line
+ new_prefix = prefix + 4*' ' # Prefix of a continuation line
+ new_max_len = max_len - 4 # Maximum length of a continuation line
+ # Prefers to wrap a line after a ',' or ';'.
+ segs = [seg for seg in re.split(r'([^,;]+[,;]?)', line.strip()) if seg != '']
+ cur_line = '' # The current line without leading spaces.
+ for seg in segs:
+ # If the line is still too long, wrap at a space.
+ while cur_line == '' and len(seg.strip()) > max_len:
+ seg = seg.lstrip()
+ split_at = seg.rfind(' ', 0, max_len)
+ output.append(prefix + seg[:split_at].strip() + line_concat)
+ seg = seg[split_at + 1:]
+ prefix = new_prefix
+ max_len = new_max_len
+
+ if len((cur_line + seg).rstrip()) < max_len:
+ cur_line = (cur_line + seg).lstrip()
+ else:
+ output.append(prefix + cur_line.rstrip() + line_concat)
+ prefix = new_prefix
+ max_len = new_max_len
+ cur_line = seg.lstrip()
+ if cur_line.strip() != '':
+ output.append(prefix + cur_line.strip())
+
+
+def WrapPreprocessorDirevative(line, output):
+ WrapCode(line, ' \\', output)
+
+
+def WrapPlainCode(line, output):
+ WrapCode(line, '', output)
+
+
+def IsHeaderGuardOrInclude(line):
+ return (re.match(r'^#(ifndef|define|endif\s*//)\s*[\w_]+\s*$', line) or
+ re.match(r'^#include\s', line))
+
+
+def WrapLongLine(line, output):
+ line = line.rstrip()
+ if len(line) <= 80:
+ output.append(line)
+ elif IsComment(line):
+ if IsHeaderGuardOrInclude(line):
+ # The style guide made an exception to allow long header guard lines
+ # and includes.
+ output.append(line)
+ else:
+ WrapComment(line, output)
+ elif IsInPreprocessorDirevative(output, line):
+ if IsHeaderGuardOrInclude(line):
+ # The style guide made an exception to allow long header guard lines
+ # and includes.
+ output.append(line)
+ else:
+ WrapPreprocessorDirevative(line, output)
+ else:
+ WrapPlainCode(line, output)
+
+
+def BeautifyCode(string):
+ lines = string.splitlines()
+ output = []
+ for line in lines:
+ WrapLongLine(line, output)
+ output2 = [line.rstrip() for line in output]
+ return '\n'.join(output2) + '\n'
+
+
+def main(argv):
+ if len(argv) == 1:
+ print __doc__
+ sys.exit(1)
+
+ file_path = argv[-1]
+ ast = Convert(file_path)
+ output = Output()
+ RunCode(Env(), ast, output)
+ output_str = BeautifyCode(output.string)
+ if file_path.endswith('.pump'):
+ output_file_path = file_path[:-5]
+ else:
+ output_file_path = '-'
+ if output_file_path == '-':
+ print output_str,
+ else:
+ output_file = file(output_file_path, 'w')
+ output_file.write('// This file was GENERATED by command:\n')
+ output_file.write('// %s %s\n' %
+ (os.path.basename(__file__), os.path.basename(file_path)))
+ output_file.write('// DO NOT EDIT BY HAND!!!\n\n')
+ output_file.write(output_str)
+ output_file.close()
+
+
+if __name__ == '__main__':
+ main(sys.argv)
diff --git a/Source/ThirdParty/gtest/scripts/upload.py b/Source/ThirdParty/gtest/scripts/upload.py
new file mode 100755
index 000000000..6e6f9a147
--- /dev/null
+++ b/Source/ThirdParty/gtest/scripts/upload.py
@@ -0,0 +1,1387 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Tool for uploading diffs from a version control system to the codereview app.
+
+Usage summary: upload.py [options] [-- diff_options]
+
+Diff options are passed to the diff command of the underlying system.
+
+Supported version control systems:
+ Git
+ Mercurial
+ Subversion
+
+It is important for Git/Mercurial users to specify a tree/node/branch to diff
+against by using the '--rev' option.
+"""
+# This code is derived from appcfg.py in the App Engine SDK (open source),
+# and from ASPN recipe #146306.
+
+import cookielib
+import getpass
+import logging
+import md5
+import mimetypes
+import optparse
+import os
+import re
+import socket
+import subprocess
+import sys
+import urllib
+import urllib2
+import urlparse
+
+try:
+ import readline
+except ImportError:
+ pass
+
+# The logging verbosity:
+# 0: Errors only.
+# 1: Status messages.
+# 2: Info logs.
+# 3: Debug logs.
+verbosity = 1
+
+# Max size of patch or base file.
+MAX_UPLOAD_SIZE = 900 * 1024
+
+
+def GetEmail(prompt):
+ """Prompts the user for their email address and returns it.
+
+ The last used email address is saved to a file and offered up as a suggestion
+ to the user. If the user presses enter without typing in anything the last
+ used email address is used. If the user enters a new address, it is saved
+ for next time we prompt.
+
+ """
+ last_email_file_name = os.path.expanduser("~/.last_codereview_email_address")
+ last_email = ""
+ if os.path.exists(last_email_file_name):
+ try:
+ last_email_file = open(last_email_file_name, "r")
+ last_email = last_email_file.readline().strip("\n")
+ last_email_file.close()
+ prompt += " [%s]" % last_email
+ except IOError, e:
+ pass
+ email = raw_input(prompt + ": ").strip()
+ if email:
+ try:
+ last_email_file = open(last_email_file_name, "w")
+ last_email_file.write(email)
+ last_email_file.close()
+ except IOError, e:
+ pass
+ else:
+ email = last_email
+ return email
+
+
+def StatusUpdate(msg):
+ """Print a status message to stdout.
+
+ If 'verbosity' is greater than 0, print the message.
+
+ Args:
+ msg: The string to print.
+ """
+ if verbosity > 0:
+ print msg
+
+
+def ErrorExit(msg):
+ """Print an error message to stderr and exit."""
+ print >>sys.stderr, msg
+ sys.exit(1)
+
+
+class ClientLoginError(urllib2.HTTPError):
+ """Raised to indicate there was an error authenticating with ClientLogin."""
+
+ def __init__(self, url, code, msg, headers, args):
+ urllib2.HTTPError.__init__(self, url, code, msg, headers, None)
+ self.args = args
+ self.reason = args["Error"]
+
+
+class AbstractRpcServer(object):
+ """Provides a common interface for a simple RPC server."""
+
+ def __init__(self, host, auth_function, host_override=None, extra_headers={},
+ save_cookies=False):
+ """Creates a new HttpRpcServer.
+
+ Args:
+ host: The host to send requests to.
+ auth_function: A function that takes no arguments and returns an
+ (email, password) tuple when called. Will be called if authentication
+ is required.
+ host_override: The host header to send to the server (defaults to host).
+ extra_headers: A dict of extra headers to append to every request.
+ save_cookies: If True, save the authentication cookies to local disk.
+ If False, use an in-memory cookiejar instead. Subclasses must
+ implement this functionality. Defaults to False.
+ """
+ self.host = host
+ self.host_override = host_override
+ self.auth_function = auth_function
+ self.authenticated = False
+ self.extra_headers = extra_headers
+ self.save_cookies = save_cookies
+ self.opener = self._GetOpener()
+ if self.host_override:
+ logging.info("Server: %s; Host: %s", self.host, self.host_override)
+ else:
+ logging.info("Server: %s", self.host)
+
+ def _GetOpener(self):
+ """Returns an OpenerDirector for making HTTP requests.
+
+ Returns:
+ A urllib2.OpenerDirector object.
+ """
+ raise NotImplementedError()
+
+ def _CreateRequest(self, url, data=None):
+ """Creates a new urllib request."""
+ logging.debug("Creating request for: '%s' with payload:\n%s", url, data)
+ req = urllib2.Request(url, data=data)
+ if self.host_override:
+ req.add_header("Host", self.host_override)
+ for key, value in self.extra_headers.iteritems():
+ req.add_header(key, value)
+ return req
+
+ def _GetAuthToken(self, email, password):
+ """Uses ClientLogin to authenticate the user, returning an auth token.
+
+ Args:
+ email: The user's email address
+ password: The user's password
+
+ Raises:
+ ClientLoginError: If there was an error authenticating with ClientLogin.
+ HTTPError: If there was some other form of HTTP error.
+
+ Returns:
+ The authentication token returned by ClientLogin.
+ """
+ account_type = "GOOGLE"
+ if self.host.endswith(".google.com"):
+ # Needed for use inside Google.
+ account_type = "HOSTED"
+ req = self._CreateRequest(
+ url="https://www.google.com/accounts/ClientLogin",
+ data=urllib.urlencode({
+ "Email": email,
+ "Passwd": password,
+ "service": "ah",
+ "source": "rietveld-codereview-upload",
+ "accountType": account_type,
+ }),
+ )
+ try:
+ response = self.opener.open(req)
+ response_body = response.read()
+ response_dict = dict(x.split("=")
+ for x in response_body.split("\n") if x)
+ return response_dict["Auth"]
+ except urllib2.HTTPError, e:
+ if e.code == 403:
+ body = e.read()
+ response_dict = dict(x.split("=", 1) for x in body.split("\n") if x)
+ raise ClientLoginError(req.get_full_url(), e.code, e.msg,
+ e.headers, response_dict)
+ else:
+ raise
+
+ def _GetAuthCookie(self, auth_token):
+ """Fetches authentication cookies for an authentication token.
+
+ Args:
+ auth_token: The authentication token returned by ClientLogin.
+
+ Raises:
+ HTTPError: If there was an error fetching the authentication cookies.
+ """
+ # This is a dummy value to allow us to identify when we're successful.
+ continue_location = "http://localhost/"
+ args = {"continue": continue_location, "auth": auth_token}
+ req = self._CreateRequest("http://%s/_ah/login?%s" %
+ (self.host, urllib.urlencode(args)))
+ try:
+ response = self.opener.open(req)
+ except urllib2.HTTPError, e:
+ response = e
+ if (response.code != 302 or
+ response.info()["location"] != continue_location):
+ raise urllib2.HTTPError(req.get_full_url(), response.code, response.msg,
+ response.headers, response.fp)
+ self.authenticated = True
+
+ def _Authenticate(self):
+ """Authenticates the user.
+
+ The authentication process works as follows:
+ 1) We get a username and password from the user
+ 2) We use ClientLogin to obtain an AUTH token for the user
+ (see http://code.google.com/apis/accounts/AuthForInstalledApps.html).
+ 3) We pass the auth token to /_ah/login on the server to obtain an
+ authentication cookie. If login was successful, it tries to redirect
+ us to the URL we provided.
+
+ If we attempt to access the upload API without first obtaining an
+ authentication cookie, it returns a 401 response and directs us to
+ authenticate ourselves with ClientLogin.
+ """
+ for i in range(3):
+ credentials = self.auth_function()
+ try:
+ auth_token = self._GetAuthToken(credentials[0], credentials[1])
+ except ClientLoginError, e:
+ if e.reason == "BadAuthentication":
+ print >>sys.stderr, "Invalid username or password."
+ continue
+ if e.reason == "CaptchaRequired":
+ print >>sys.stderr, (
+ "Please go to\n"
+ "https://www.google.com/accounts/DisplayUnlockCaptcha\n"
+ "and verify you are a human. Then try again.")
+ break
+ if e.reason == "NotVerified":
+ print >>sys.stderr, "Account not verified."
+ break
+ if e.reason == "TermsNotAgreed":
+ print >>sys.stderr, "User has not agreed to TOS."
+ break
+ if e.reason == "AccountDeleted":
+ print >>sys.stderr, "The user account has been deleted."
+ break
+ if e.reason == "AccountDisabled":
+ print >>sys.stderr, "The user account has been disabled."
+ break
+ if e.reason == "ServiceDisabled":
+ print >>sys.stderr, ("The user's access to the service has been "
+ "disabled.")
+ break
+ if e.reason == "ServiceUnavailable":
+ print >>sys.stderr, "The service is not available; try again later."
+ break
+ raise
+ self._GetAuthCookie(auth_token)
+ return
+
+ def Send(self, request_path, payload=None,
+ content_type="application/octet-stream",
+ timeout=None,
+ **kwargs):
+ """Sends an RPC and returns the response.
+
+ Args:
+ request_path: The path to send the request to, eg /api/appversion/create.
+ payload: The body of the request, or None to send an empty request.
+ content_type: The Content-Type header to use.
+ timeout: timeout in seconds; default None i.e. no timeout.
+ (Note: for large requests on OS X, the timeout doesn't work right.)
+ kwargs: Any keyword arguments are converted into query string parameters.
+
+ Returns:
+ The response body, as a string.
+ """
+ # TODO: Don't require authentication. Let the server say
+ # whether it is necessary.
+ if not self.authenticated:
+ self._Authenticate()
+
+ old_timeout = socket.getdefaulttimeout()
+ socket.setdefaulttimeout(timeout)
+ try:
+ tries = 0
+ while True:
+ tries += 1
+ args = dict(kwargs)
+ url = "http://%s%s" % (self.host, request_path)
+ if args:
+ url += "?" + urllib.urlencode(args)
+ req = self._CreateRequest(url=url, data=payload)
+ req.add_header("Content-Type", content_type)
+ try:
+ f = self.opener.open(req)
+ response = f.read()
+ f.close()
+ return response
+ except urllib2.HTTPError, e:
+ if tries > 3:
+ raise
+ elif e.code == 401:
+ self._Authenticate()
+## elif e.code >= 500 and e.code < 600:
+## # Server Error - try again.
+## continue
+ else:
+ raise
+ finally:
+ socket.setdefaulttimeout(old_timeout)
+
+
+class HttpRpcServer(AbstractRpcServer):
+ """Provides a simplified RPC-style interface for HTTP requests."""
+
+ def _Authenticate(self):
+ """Save the cookie jar after authentication."""
+ super(HttpRpcServer, self)._Authenticate()
+ if self.save_cookies:
+ StatusUpdate("Saving authentication cookies to %s" % self.cookie_file)
+ self.cookie_jar.save()
+
+ def _GetOpener(self):
+ """Returns an OpenerDirector that supports cookies and ignores redirects.
+
+ Returns:
+ A urllib2.OpenerDirector object.
+ """
+ opener = urllib2.OpenerDirector()
+ opener.add_handler(urllib2.ProxyHandler())
+ opener.add_handler(urllib2.UnknownHandler())
+ opener.add_handler(urllib2.HTTPHandler())
+ opener.add_handler(urllib2.HTTPDefaultErrorHandler())
+ opener.add_handler(urllib2.HTTPSHandler())
+ opener.add_handler(urllib2.HTTPErrorProcessor())
+ if self.save_cookies:
+ self.cookie_file = os.path.expanduser("~/.codereview_upload_cookies")
+ self.cookie_jar = cookielib.MozillaCookieJar(self.cookie_file)
+ if os.path.exists(self.cookie_file):
+ try:
+ self.cookie_jar.load()
+ self.authenticated = True
+ StatusUpdate("Loaded authentication cookies from %s" %
+ self.cookie_file)
+ except (cookielib.LoadError, IOError):
+ # Failed to load cookies - just ignore them.
+ pass
+ else:
+ # Create an empty cookie file with mode 600
+ fd = os.open(self.cookie_file, os.O_CREAT, 0600)
+ os.close(fd)
+ # Always chmod the cookie file
+ os.chmod(self.cookie_file, 0600)
+ else:
+ # Don't save cookies across runs of update.py.
+ self.cookie_jar = cookielib.CookieJar()
+ opener.add_handler(urllib2.HTTPCookieProcessor(self.cookie_jar))
+ return opener
+
+
+parser = optparse.OptionParser(usage="%prog [options] [-- diff_options]")
+parser.add_option("-y", "--assume_yes", action="store_true",
+ dest="assume_yes", default=False,
+ help="Assume that the answer to yes/no questions is 'yes'.")
+# Logging
+group = parser.add_option_group("Logging options")
+group.add_option("-q", "--quiet", action="store_const", const=0,
+ dest="verbose", help="Print errors only.")
+group.add_option("-v", "--verbose", action="store_const", const=2,
+ dest="verbose", default=1,
+ help="Print info level logs (default).")
+group.add_option("--noisy", action="store_const", const=3,
+ dest="verbose", help="Print all logs.")
+# Review server
+group = parser.add_option_group("Review server options")
+group.add_option("-s", "--server", action="store", dest="server",
+ default="codereview.appspot.com",
+ metavar="SERVER",
+ help=("The server to upload to. The format is host[:port]. "
+ "Defaults to 'codereview.appspot.com'."))
+group.add_option("-e", "--email", action="store", dest="email",
+ metavar="EMAIL", default=None,
+ help="The username to use. Will prompt if omitted.")
+group.add_option("-H", "--host", action="store", dest="host",
+ metavar="HOST", default=None,
+ help="Overrides the Host header sent with all RPCs.")
+group.add_option("--no_cookies", action="store_false",
+ dest="save_cookies", default=True,
+ help="Do not save authentication cookies to local disk.")
+# Issue
+group = parser.add_option_group("Issue options")
+group.add_option("-d", "--description", action="store", dest="description",
+ metavar="DESCRIPTION", default=None,
+ help="Optional description when creating an issue.")
+group.add_option("-f", "--description_file", action="store",
+ dest="description_file", metavar="DESCRIPTION_FILE",
+ default=None,
+ help="Optional path of a file that contains "
+ "the description when creating an issue.")
+group.add_option("-r", "--reviewers", action="store", dest="reviewers",
+ metavar="REVIEWERS", default=None,
+ help="Add reviewers (comma separated email addresses).")
+group.add_option("--cc", action="store", dest="cc",
+ metavar="CC", default=None,
+ help="Add CC (comma separated email addresses).")
+# Upload options
+group = parser.add_option_group("Patch options")
+group.add_option("-m", "--message", action="store", dest="message",
+ metavar="MESSAGE", default=None,
+ help="A message to identify the patch. "
+ "Will prompt if omitted.")
+group.add_option("-i", "--issue", type="int", action="store",
+ metavar="ISSUE", default=None,
+ help="Issue number to which to add. Defaults to new issue.")
+group.add_option("--download_base", action="store_true",
+ dest="download_base", default=False,
+ help="Base files will be downloaded by the server "
+ "(side-by-side diffs may not work on files with CRs).")
+group.add_option("--rev", action="store", dest="revision",
+ metavar="REV", default=None,
+ help="Branch/tree/revision to diff against (used by DVCS).")
+group.add_option("--send_mail", action="store_true",
+ dest="send_mail", default=False,
+ help="Send notification email to reviewers.")
+
+
+def GetRpcServer(options):
+ """Returns an instance of an AbstractRpcServer.
+
+ Returns:
+ A new AbstractRpcServer, on which RPC calls can be made.
+ """
+
+ rpc_server_class = HttpRpcServer
+
+ def GetUserCredentials():
+ """Prompts the user for a username and password."""
+ email = options.email
+ if email is None:
+ email = GetEmail("Email (login for uploading to %s)" % options.server)
+ password = getpass.getpass("Password for %s: " % email)
+ return (email, password)
+
+ # If this is the dev_appserver, use fake authentication.
+ host = (options.host or options.server).lower()
+ if host == "localhost" or host.startswith("localhost:"):
+ email = options.email
+ if email is None:
+ email = "test@example.com"
+ logging.info("Using debug user %s. Override with --email" % email)
+ server = rpc_server_class(
+ options.server,
+ lambda: (email, "password"),
+ host_override=options.host,
+ extra_headers={"Cookie":
+ 'dev_appserver_login="%s:False"' % email},
+ save_cookies=options.save_cookies)
+ # Don't try to talk to ClientLogin.
+ server.authenticated = True
+ return server
+
+ return rpc_server_class(options.server, GetUserCredentials,
+ host_override=options.host,
+ save_cookies=options.save_cookies)
+
+
+def EncodeMultipartFormData(fields, files):
+ """Encode form fields for multipart/form-data.
+
+ Args:
+ fields: A sequence of (name, value) elements for regular form fields.
+ files: A sequence of (name, filename, value) elements for data to be
+ uploaded as files.
+ Returns:
+ (content_type, body) ready for httplib.HTTP instance.
+
+ Source:
+ http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/146306
+ """
+ BOUNDARY = '-M-A-G-I-C---B-O-U-N-D-A-R-Y-'
+ CRLF = '\r\n'
+ lines = []
+ for (key, value) in fields:
+ lines.append('--' + BOUNDARY)
+ lines.append('Content-Disposition: form-data; name="%s"' % key)
+ lines.append('')
+ lines.append(value)
+ for (key, filename, value) in files:
+ lines.append('--' + BOUNDARY)
+ lines.append('Content-Disposition: form-data; name="%s"; filename="%s"' %
+ (key, filename))
+ lines.append('Content-Type: %s' % GetContentType(filename))
+ lines.append('')
+ lines.append(value)
+ lines.append('--' + BOUNDARY + '--')
+ lines.append('')
+ body = CRLF.join(lines)
+ content_type = 'multipart/form-data; boundary=%s' % BOUNDARY
+ return content_type, body
+
+
+def GetContentType(filename):
+ """Helper to guess the content-type from the filename."""
+ return mimetypes.guess_type(filename)[0] or 'application/octet-stream'
+
+
+# Use a shell for subcommands on Windows to get a PATH search.
+use_shell = sys.platform.startswith("win")
+
+def RunShellWithReturnCode(command, print_output=False,
+ universal_newlines=True):
+ """Executes a command and returns the output from stdout and the return code.
+
+ Args:
+ command: Command to execute.
+ print_output: If True, the output is printed to stdout.
+ If False, both stdout and stderr are ignored.
+ universal_newlines: Use universal_newlines flag (default: True).
+
+ Returns:
+ Tuple (output, return code)
+ """
+ logging.info("Running %s", command)
+ p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
+ shell=use_shell, universal_newlines=universal_newlines)
+ if print_output:
+ output_array = []
+ while True:
+ line = p.stdout.readline()
+ if not line:
+ break
+ print line.strip("\n")
+ output_array.append(line)
+ output = "".join(output_array)
+ else:
+ output = p.stdout.read()
+ p.wait()
+ errout = p.stderr.read()
+ if print_output and errout:
+ print >>sys.stderr, errout
+ p.stdout.close()
+ p.stderr.close()
+ return output, p.returncode
+
+
+def RunShell(command, silent_ok=False, universal_newlines=True,
+ print_output=False):
+ data, retcode = RunShellWithReturnCode(command, print_output,
+ universal_newlines)
+ if retcode:
+ ErrorExit("Got error status from %s:\n%s" % (command, data))
+ if not silent_ok and not data:
+ ErrorExit("No output from %s" % command)
+ return data
+
+
+class VersionControlSystem(object):
+ """Abstract base class providing an interface to the VCS."""
+
+ def __init__(self, options):
+ """Constructor.
+
+ Args:
+ options: Command line options.
+ """
+ self.options = options
+
+ def GenerateDiff(self, args):
+ """Return the current diff as a string.
+
+ Args:
+ args: Extra arguments to pass to the diff command.
+ """
+ raise NotImplementedError(
+ "abstract method -- subclass %s must override" % self.__class__)
+
+ def GetUnknownFiles(self):
+ """Return a list of files unknown to the VCS."""
+ raise NotImplementedError(
+ "abstract method -- subclass %s must override" % self.__class__)
+
+ def CheckForUnknownFiles(self):
+ """Show an "are you sure?" prompt if there are unknown files."""
+ unknown_files = self.GetUnknownFiles()
+ if unknown_files:
+ print "The following files are not added to version control:"
+ for line in unknown_files:
+ print line
+ prompt = "Are you sure to continue?(y/N) "
+ answer = raw_input(prompt).strip()
+ if answer != "y":
+ ErrorExit("User aborted")
+
+ def GetBaseFile(self, filename):
+ """Get the content of the upstream version of a file.
+
+ Returns:
+ A tuple (base_content, new_content, is_binary, status)
+ base_content: The contents of the base file.
+ new_content: For text files, this is empty. For binary files, this is
+ the contents of the new file, since the diff output won't contain
+ information to reconstruct the current file.
+ is_binary: True iff the file is binary.
+ status: The status of the file.
+ """
+
+ raise NotImplementedError(
+ "abstract method -- subclass %s must override" % self.__class__)
+
+
+ def GetBaseFiles(self, diff):
+ """Helper that calls GetBase file for each file in the patch.
+
+ Returns:
+ A dictionary that maps from filename to GetBaseFile's tuple. Filenames
+ are retrieved based on lines that start with "Index:" or
+ "Property changes on:".
+ """
+ files = {}
+ for line in diff.splitlines(True):
+ if line.startswith('Index:') or line.startswith('Property changes on:'):
+ unused, filename = line.split(':', 1)
+ # On Windows if a file has property changes its filename uses '\'
+ # instead of '/'.
+ filename = filename.strip().replace('\\', '/')
+ files[filename] = self.GetBaseFile(filename)
+ return files
+
+
+ def UploadBaseFiles(self, issue, rpc_server, patch_list, patchset, options,
+ files):
+ """Uploads the base files (and if necessary, the current ones as well)."""
+
+ def UploadFile(filename, file_id, content, is_binary, status, is_base):
+ """Uploads a file to the server."""
+ file_too_large = False
+ if is_base:
+ type = "base"
+ else:
+ type = "current"
+ if len(content) > MAX_UPLOAD_SIZE:
+ print ("Not uploading the %s file for %s because it's too large." %
+ (type, filename))
+ file_too_large = True
+ content = ""
+ checksum = md5.new(content).hexdigest()
+ if options.verbose > 0 and not file_too_large:
+ print "Uploading %s file for %s" % (type, filename)
+ url = "/%d/upload_content/%d/%d" % (int(issue), int(patchset), file_id)
+ form_fields = [("filename", filename),
+ ("status", status),
+ ("checksum", checksum),
+ ("is_binary", str(is_binary)),
+ ("is_current", str(not is_base)),
+ ]
+ if file_too_large:
+ form_fields.append(("file_too_large", "1"))
+ if options.email:
+ form_fields.append(("user", options.email))
+ ctype, body = EncodeMultipartFormData(form_fields,
+ [("data", filename, content)])
+ response_body = rpc_server.Send(url, body,
+ content_type=ctype)
+ if not response_body.startswith("OK"):
+ StatusUpdate(" --> %s" % response_body)
+ sys.exit(1)
+
+ patches = dict()
+ [patches.setdefault(v, k) for k, v in patch_list]
+ for filename in patches.keys():
+ base_content, new_content, is_binary, status = files[filename]
+ file_id_str = patches.get(filename)
+ if file_id_str.find("nobase") != -1:
+ base_content = None
+ file_id_str = file_id_str[file_id_str.rfind("_") + 1:]
+ file_id = int(file_id_str)
+ if base_content != None:
+ UploadFile(filename, file_id, base_content, is_binary, status, True)
+ if new_content != None:
+ UploadFile(filename, file_id, new_content, is_binary, status, False)
+
+ def IsImage(self, filename):
+ """Returns true if the filename has an image extension."""
+ mimetype = mimetypes.guess_type(filename)[0]
+ if not mimetype:
+ return False
+ return mimetype.startswith("image/")
+
+
+class SubversionVCS(VersionControlSystem):
+ """Implementation of the VersionControlSystem interface for Subversion."""
+
+ def __init__(self, options):
+ super(SubversionVCS, self).__init__(options)
+ if self.options.revision:
+ match = re.match(r"(\d+)(:(\d+))?", self.options.revision)
+ if not match:
+ ErrorExit("Invalid Subversion revision %s." % self.options.revision)
+ self.rev_start = match.group(1)
+ self.rev_end = match.group(3)
+ else:
+ self.rev_start = self.rev_end = None
+ # Cache output from "svn list -r REVNO dirname".
+ # Keys: dirname, Values: 2-tuple (ouput for start rev and end rev).
+ self.svnls_cache = {}
+ # SVN base URL is required to fetch files deleted in an older revision.
+ # Result is cached to not guess it over and over again in GetBaseFile().
+ required = self.options.download_base or self.options.revision is not None
+ self.svn_base = self._GuessBase(required)
+
+ def GuessBase(self, required):
+ """Wrapper for _GuessBase."""
+ return self.svn_base
+
+ def _GuessBase(self, required):
+ """Returns the SVN base URL.
+
+ Args:
+ required: If true, exits if the url can't be guessed, otherwise None is
+ returned.
+ """
+ info = RunShell(["svn", "info"])
+ for line in info.splitlines():
+ words = line.split()
+ if len(words) == 2 and words[0] == "URL:":
+ url = words[1]
+ scheme, netloc, path, params, query, fragment = urlparse.urlparse(url)
+ username, netloc = urllib.splituser(netloc)
+ if username:
+ logging.info("Removed username from base URL")
+ if netloc.endswith("svn.python.org"):
+ if netloc == "svn.python.org":
+ if path.startswith("/projects/"):
+ path = path[9:]
+ elif netloc != "pythondev@svn.python.org":
+ ErrorExit("Unrecognized Python URL: %s" % url)
+ base = "http://svn.python.org/view/*checkout*%s/" % path
+ logging.info("Guessed Python base = %s", base)
+ elif netloc.endswith("svn.collab.net"):
+ if path.startswith("/repos/"):
+ path = path[6:]
+ base = "http://svn.collab.net/viewvc/*checkout*%s/" % path
+ logging.info("Guessed CollabNet base = %s", base)
+ elif netloc.endswith(".googlecode.com"):
+ path = path + "/"
+ base = urlparse.urlunparse(("http", netloc, path, params,
+ query, fragment))
+ logging.info("Guessed Google Code base = %s", base)
+ else:
+ path = path + "/"
+ base = urlparse.urlunparse((scheme, netloc, path, params,
+ query, fragment))
+ logging.info("Guessed base = %s", base)
+ return base
+ if required:
+ ErrorExit("Can't find URL in output from svn info")
+ return None
+
+ def GenerateDiff(self, args):
+ cmd = ["svn", "diff"]
+ if self.options.revision:
+ cmd += ["-r", self.options.revision]
+ cmd.extend(args)
+ data = RunShell(cmd)
+ count = 0
+ for line in data.splitlines():
+ if line.startswith("Index:") or line.startswith("Property changes on:"):
+ count += 1
+ logging.info(line)
+ if not count:
+ ErrorExit("No valid patches found in output from svn diff")
+ return data
+
+ def _CollapseKeywords(self, content, keyword_str):
+ """Collapses SVN keywords."""
+ # svn cat translates keywords but svn diff doesn't. As a result of this
+ # behavior patching.PatchChunks() fails with a chunk mismatch error.
+ # This part was originally written by the Review Board development team
+ # who had the same problem (http://reviews.review-board.org/r/276/).
+ # Mapping of keywords to known aliases
+ svn_keywords = {
+ # Standard keywords
+ 'Date': ['Date', 'LastChangedDate'],
+ 'Revision': ['Revision', 'LastChangedRevision', 'Rev'],
+ 'Author': ['Author', 'LastChangedBy'],
+ 'HeadURL': ['HeadURL', 'URL'],
+ 'Id': ['Id'],
+
+ # Aliases
+ 'LastChangedDate': ['LastChangedDate', 'Date'],
+ 'LastChangedRevision': ['LastChangedRevision', 'Rev', 'Revision'],
+ 'LastChangedBy': ['LastChangedBy', 'Author'],
+ 'URL': ['URL', 'HeadURL'],
+ }
+
+ def repl(m):
+ if m.group(2):
+ return "$%s::%s$" % (m.group(1), " " * len(m.group(3)))
+ return "$%s$" % m.group(1)
+ keywords = [keyword
+ for name in keyword_str.split(" ")
+ for keyword in svn_keywords.get(name, [])]
+ return re.sub(r"\$(%s):(:?)([^\$]+)\$" % '|'.join(keywords), repl, content)
+
+ def GetUnknownFiles(self):
+ status = RunShell(["svn", "status", "--ignore-externals"], silent_ok=True)
+ unknown_files = []
+ for line in status.split("\n"):
+ if line and line[0] == "?":
+ unknown_files.append(line)
+ return unknown_files
+
+ def ReadFile(self, filename):
+ """Returns the contents of a file."""
+ file = open(filename, 'rb')
+ result = ""
+ try:
+ result = file.read()
+ finally:
+ file.close()
+ return result
+
+ def GetStatus(self, filename):
+ """Returns the status of a file."""
+ if not self.options.revision:
+ status = RunShell(["svn", "status", "--ignore-externals", filename])
+ if not status:
+ ErrorExit("svn status returned no output for %s" % filename)
+ status_lines = status.splitlines()
+ # If file is in a cl, the output will begin with
+ # "\n--- Changelist 'cl_name':\n". See
+ # http://svn.collab.net/repos/svn/trunk/notes/changelist-design.txt
+ if (len(status_lines) == 3 and
+ not status_lines[0] and
+ status_lines[1].startswith("--- Changelist")):
+ status = status_lines[2]
+ else:
+ status = status_lines[0]
+ # If we have a revision to diff against we need to run "svn list"
+ # for the old and the new revision and compare the results to get
+ # the correct status for a file.
+ else:
+ dirname, relfilename = os.path.split(filename)
+ if dirname not in self.svnls_cache:
+ cmd = ["svn", "list", "-r", self.rev_start, dirname or "."]
+ out, returncode = RunShellWithReturnCode(cmd)
+ if returncode:
+ ErrorExit("Failed to get status for %s." % filename)
+ old_files = out.splitlines()
+ args = ["svn", "list"]
+ if self.rev_end:
+ args += ["-r", self.rev_end]
+ cmd = args + [dirname or "."]
+ out, returncode = RunShellWithReturnCode(cmd)
+ if returncode:
+ ErrorExit("Failed to run command %s" % cmd)
+ self.svnls_cache[dirname] = (old_files, out.splitlines())
+ old_files, new_files = self.svnls_cache[dirname]
+ if relfilename in old_files and relfilename not in new_files:
+ status = "D "
+ elif relfilename in old_files and relfilename in new_files:
+ status = "M "
+ else:
+ status = "A "
+ return status
+
+ def GetBaseFile(self, filename):
+ status = self.GetStatus(filename)
+ base_content = None
+ new_content = None
+
+ # If a file is copied its status will be "A +", which signifies
+ # "addition-with-history". See "svn st" for more information. We need to
+ # upload the original file or else diff parsing will fail if the file was
+ # edited.
+ if status[0] == "A" and status[3] != "+":
+ # We'll need to upload the new content if we're adding a binary file
+ # since diff's output won't contain it.
+ mimetype = RunShell(["svn", "propget", "svn:mime-type", filename],
+ silent_ok=True)
+ base_content = ""
+ is_binary = mimetype and not mimetype.startswith("text/")
+ if is_binary and self.IsImage(filename):
+ new_content = self.ReadFile(filename)
+ elif (status[0] in ("M", "D", "R") or
+ (status[0] == "A" and status[3] == "+") or # Copied file.
+ (status[0] == " " and status[1] == "M")): # Property change.
+ args = []
+ if self.options.revision:
+ url = "%s/%s@%s" % (self.svn_base, filename, self.rev_start)
+ else:
+ # Don't change filename, it's needed later.
+ url = filename
+ args += ["-r", "BASE"]
+ cmd = ["svn"] + args + ["propget", "svn:mime-type", url]
+ mimetype, returncode = RunShellWithReturnCode(cmd)
+ if returncode:
+ # File does not exist in the requested revision.
+ # Reset mimetype, it contains an error message.
+ mimetype = ""
+ get_base = False
+ is_binary = mimetype and not mimetype.startswith("text/")
+ if status[0] == " ":
+ # Empty base content just to force an upload.
+ base_content = ""
+ elif is_binary:
+ if self.IsImage(filename):
+ get_base = True
+ if status[0] == "M":
+ if not self.rev_end:
+ new_content = self.ReadFile(filename)
+ else:
+ url = "%s/%s@%s" % (self.svn_base, filename, self.rev_end)
+ new_content = RunShell(["svn", "cat", url],
+ universal_newlines=True, silent_ok=True)
+ else:
+ base_content = ""
+ else:
+ get_base = True
+
+ if get_base:
+ if is_binary:
+ universal_newlines = False
+ else:
+ universal_newlines = True
+ if self.rev_start:
+ # "svn cat -r REV delete_file.txt" doesn't work. cat requires
+ # the full URL with "@REV" appended instead of using "-r" option.
+ url = "%s/%s@%s" % (self.svn_base, filename, self.rev_start)
+ base_content = RunShell(["svn", "cat", url],
+ universal_newlines=universal_newlines,
+ silent_ok=True)
+ else:
+ base_content = RunShell(["svn", "cat", filename],
+ universal_newlines=universal_newlines,
+ silent_ok=True)
+ if not is_binary:
+ args = []
+ if self.rev_start:
+ url = "%s/%s@%s" % (self.svn_base, filename, self.rev_start)
+ else:
+ url = filename
+ args += ["-r", "BASE"]
+ cmd = ["svn"] + args + ["propget", "svn:keywords", url]
+ keywords, returncode = RunShellWithReturnCode(cmd)
+ if keywords and not returncode:
+ base_content = self._CollapseKeywords(base_content, keywords)
+ else:
+ StatusUpdate("svn status returned unexpected output: %s" % status)
+ sys.exit(1)
+ return base_content, new_content, is_binary, status[0:5]
+
+
+class GitVCS(VersionControlSystem):
+ """Implementation of the VersionControlSystem interface for Git."""
+
+ def __init__(self, options):
+ super(GitVCS, self).__init__(options)
+ # Map of filename -> hash of base file.
+ self.base_hashes = {}
+
+ def GenerateDiff(self, extra_args):
+ # This is more complicated than svn's GenerateDiff because we must convert
+ # the diff output to include an svn-style "Index:" line as well as record
+ # the hashes of the base files, so we can upload them along with our diff.
+ if self.options.revision:
+ extra_args = [self.options.revision] + extra_args
+ gitdiff = RunShell(["git", "diff", "--full-index"] + extra_args)
+ svndiff = []
+ filecount = 0
+ filename = None
+ for line in gitdiff.splitlines():
+ match = re.match(r"diff --git a/(.*) b/.*$", line)
+ if match:
+ filecount += 1
+ filename = match.group(1)
+ svndiff.append("Index: %s\n" % filename)
+ else:
+ # The "index" line in a git diff looks like this (long hashes elided):
+ # index 82c0d44..b2cee3f 100755
+ # We want to save the left hash, as that identifies the base file.
+ match = re.match(r"index (\w+)\.\.", line)
+ if match:
+ self.base_hashes[filename] = match.group(1)
+ svndiff.append(line + "\n")
+ if not filecount:
+ ErrorExit("No valid patches found in output from git diff")
+ return "".join(svndiff)
+
+ def GetUnknownFiles(self):
+ status = RunShell(["git", "ls-files", "--exclude-standard", "--others"],
+ silent_ok=True)
+ return status.splitlines()
+
+ def GetBaseFile(self, filename):
+ hash = self.base_hashes[filename]
+ base_content = None
+ new_content = None
+ is_binary = False
+ if hash == "0" * 40: # All-zero hash indicates no base file.
+ status = "A"
+ base_content = ""
+ else:
+ status = "M"
+ base_content, returncode = RunShellWithReturnCode(["git", "show", hash])
+ if returncode:
+ ErrorExit("Got error status from 'git show %s'" % hash)
+ return (base_content, new_content, is_binary, status)
+
+
+class MercurialVCS(VersionControlSystem):
+ """Implementation of the VersionControlSystem interface for Mercurial."""
+
+ def __init__(self, options, repo_dir):
+ super(MercurialVCS, self).__init__(options)
+ # Absolute path to repository (we can be in a subdir)
+ self.repo_dir = os.path.normpath(repo_dir)
+ # Compute the subdir
+ cwd = os.path.normpath(os.getcwd())
+ assert cwd.startswith(self.repo_dir)
+ self.subdir = cwd[len(self.repo_dir):].lstrip(r"\/")
+ if self.options.revision:
+ self.base_rev = self.options.revision
+ else:
+ self.base_rev = RunShell(["hg", "parent", "-q"]).split(':')[1].strip()
+
+ def _GetRelPath(self, filename):
+ """Get relative path of a file according to the current directory,
+ given its logical path in the repo."""
+ assert filename.startswith(self.subdir), filename
+ return filename[len(self.subdir):].lstrip(r"\/")
+
+ def GenerateDiff(self, extra_args):
+ # If no file specified, restrict to the current subdir
+ extra_args = extra_args or ["."]
+ cmd = ["hg", "diff", "--git", "-r", self.base_rev] + extra_args
+ data = RunShell(cmd, silent_ok=True)
+ svndiff = []
+ filecount = 0
+ for line in data.splitlines():
+ m = re.match("diff --git a/(\S+) b/(\S+)", line)
+ if m:
+ # Modify line to make it look like as it comes from svn diff.
+ # With this modification no changes on the server side are required
+ # to make upload.py work with Mercurial repos.
+ # NOTE: for proper handling of moved/copied files, we have to use
+ # the second filename.
+ filename = m.group(2)
+ svndiff.append("Index: %s" % filename)
+ svndiff.append("=" * 67)
+ filecount += 1
+ logging.info(line)
+ else:
+ svndiff.append(line)
+ if not filecount:
+ ErrorExit("No valid patches found in output from hg diff")
+ return "\n".join(svndiff) + "\n"
+
+ def GetUnknownFiles(self):
+ """Return a list of files unknown to the VCS."""
+ args = []
+ status = RunShell(["hg", "status", "--rev", self.base_rev, "-u", "."],
+ silent_ok=True)
+ unknown_files = []
+ for line in status.splitlines():
+ st, fn = line.split(" ", 1)
+ if st == "?":
+ unknown_files.append(fn)
+ return unknown_files
+
+ def GetBaseFile(self, filename):
+ # "hg status" and "hg cat" both take a path relative to the current subdir
+ # rather than to the repo root, but "hg diff" has given us the full path
+ # to the repo root.
+ base_content = ""
+ new_content = None
+ is_binary = False
+ oldrelpath = relpath = self._GetRelPath(filename)
+ # "hg status -C" returns two lines for moved/copied files, one otherwise
+ out = RunShell(["hg", "status", "-C", "--rev", self.base_rev, relpath])
+ out = out.splitlines()
+ # HACK: strip error message about missing file/directory if it isn't in
+ # the working copy
+ if out[0].startswith('%s: ' % relpath):
+ out = out[1:]
+ if len(out) > 1:
+ # Moved/copied => considered as modified, use old filename to
+ # retrieve base contents
+ oldrelpath = out[1].strip()
+ status = "M"
+ else:
+ status, _ = out[0].split(' ', 1)
+ if status != "A":
+ base_content = RunShell(["hg", "cat", "-r", self.base_rev, oldrelpath],
+ silent_ok=True)
+ is_binary = "\0" in base_content # Mercurial's heuristic
+ if status != "R":
+ new_content = open(relpath, "rb").read()
+ is_binary = is_binary or "\0" in new_content
+ if is_binary and base_content:
+ # Fetch again without converting newlines
+ base_content = RunShell(["hg", "cat", "-r", self.base_rev, oldrelpath],
+ silent_ok=True, universal_newlines=False)
+ if not is_binary or not self.IsImage(relpath):
+ new_content = None
+ return base_content, new_content, is_binary, status
+
+
+# NOTE: The SplitPatch function is duplicated in engine.py, keep them in sync.
+def SplitPatch(data):
+ """Splits a patch into separate pieces for each file.
+
+ Args:
+ data: A string containing the output of svn diff.
+
+ Returns:
+ A list of 2-tuple (filename, text) where text is the svn diff output
+ pertaining to filename.
+ """
+ patches = []
+ filename = None
+ diff = []
+ for line in data.splitlines(True):
+ new_filename = None
+ if line.startswith('Index:'):
+ unused, new_filename = line.split(':', 1)
+ new_filename = new_filename.strip()
+ elif line.startswith('Property changes on:'):
+ unused, temp_filename = line.split(':', 1)
+ # When a file is modified, paths use '/' between directories, however
+ # when a property is modified '\' is used on Windows. Make them the same
+ # otherwise the file shows up twice.
+ temp_filename = temp_filename.strip().replace('\\', '/')
+ if temp_filename != filename:
+ # File has property changes but no modifications, create a new diff.
+ new_filename = temp_filename
+ if new_filename:
+ if filename and diff:
+ patches.append((filename, ''.join(diff)))
+ filename = new_filename
+ diff = [line]
+ continue
+ if diff is not None:
+ diff.append(line)
+ if filename and diff:
+ patches.append((filename, ''.join(diff)))
+ return patches
+
+
+def UploadSeparatePatches(issue, rpc_server, patchset, data, options):
+ """Uploads a separate patch for each file in the diff output.
+
+ Returns a list of [patch_key, filename] for each file.
+ """
+ patches = SplitPatch(data)
+ rv = []
+ for patch in patches:
+ if len(patch[1]) > MAX_UPLOAD_SIZE:
+ print ("Not uploading the patch for " + patch[0] +
+ " because the file is too large.")
+ continue
+ form_fields = [("filename", patch[0])]
+ if not options.download_base:
+ form_fields.append(("content_upload", "1"))
+ files = [("data", "data.diff", patch[1])]
+ ctype, body = EncodeMultipartFormData(form_fields, files)
+ url = "/%d/upload_patch/%d" % (int(issue), int(patchset))
+ print "Uploading patch for " + patch[0]
+ response_body = rpc_server.Send(url, body, content_type=ctype)
+ lines = response_body.splitlines()
+ if not lines or lines[0] != "OK":
+ StatusUpdate(" --> %s" % response_body)
+ sys.exit(1)
+ rv.append([lines[1], patch[0]])
+ return rv
+
+
+def GuessVCS(options):
+ """Helper to guess the version control system.
+
+ This examines the current directory, guesses which VersionControlSystem
+ we're using, and returns an instance of the appropriate class. Exit with an
+ error if we can't figure it out.
+
+ Returns:
+ A VersionControlSystem instance. Exits if the VCS can't be guessed.
+ """
+ # Mercurial has a command to get the base directory of a repository
+ # Try running it, but don't die if we don't have hg installed.
+ # NOTE: we try Mercurial first as it can sit on top of an SVN working copy.
+ try:
+ out, returncode = RunShellWithReturnCode(["hg", "root"])
+ if returncode == 0:
+ return MercurialVCS(options, out.strip())
+ except OSError, (errno, message):
+ if errno != 2: # ENOENT -- they don't have hg installed.
+ raise
+
+ # Subversion has a .svn in all working directories.
+ if os.path.isdir('.svn'):
+ logging.info("Guessed VCS = Subversion")
+ return SubversionVCS(options)
+
+ # Git has a command to test if you're in a git tree.
+ # Try running it, but don't die if we don't have git installed.
+ try:
+ out, returncode = RunShellWithReturnCode(["git", "rev-parse",
+ "--is-inside-work-tree"])
+ if returncode == 0:
+ return GitVCS(options)
+ except OSError, (errno, message):
+ if errno != 2: # ENOENT -- they don't have git installed.
+ raise
+
+ ErrorExit(("Could not guess version control system. "
+ "Are you in a working copy directory?"))
+
+
+def RealMain(argv, data=None):
+ """The real main function.
+
+ Args:
+ argv: Command line arguments.
+ data: Diff contents. If None (default) the diff is generated by
+ the VersionControlSystem implementation returned by GuessVCS().
+
+ Returns:
+ A 2-tuple (issue id, patchset id).
+ The patchset id is None if the base files are not uploaded by this
+ script (applies only to SVN checkouts).
+ """
+ logging.basicConfig(format=("%(asctime).19s %(levelname)s %(filename)s:"
+ "%(lineno)s %(message)s "))
+ os.environ['LC_ALL'] = 'C'
+ options, args = parser.parse_args(argv[1:])
+ global verbosity
+ verbosity = options.verbose
+ if verbosity >= 3:
+ logging.getLogger().setLevel(logging.DEBUG)
+ elif verbosity >= 2:
+ logging.getLogger().setLevel(logging.INFO)
+ vcs = GuessVCS(options)
+ if isinstance(vcs, SubversionVCS):
+ # base field is only allowed for Subversion.
+ # Note: Fetching base files may become deprecated in future releases.
+ base = vcs.GuessBase(options.download_base)
+ else:
+ base = None
+ if not base and options.download_base:
+ options.download_base = True
+ logging.info("Enabled upload of base file")
+ if not options.assume_yes:
+ vcs.CheckForUnknownFiles()
+ if data is None:
+ data = vcs.GenerateDiff(args)
+ files = vcs.GetBaseFiles(data)
+ if verbosity >= 1:
+ print "Upload server:", options.server, "(change with -s/--server)"
+ if options.issue:
+ prompt = "Message describing this patch set: "
+ else:
+ prompt = "New issue subject: "
+ message = options.message or raw_input(prompt).strip()
+ if not message:
+ ErrorExit("A non-empty message is required")
+ rpc_server = GetRpcServer(options)
+ form_fields = [("subject", message)]
+ if base:
+ form_fields.append(("base", base))
+ if options.issue:
+ form_fields.append(("issue", str(options.issue)))
+ if options.email:
+ form_fields.append(("user", options.email))
+ if options.reviewers:
+ for reviewer in options.reviewers.split(','):
+ if "@" in reviewer and not reviewer.split("@")[1].count(".") == 1:
+ ErrorExit("Invalid email address: %s" % reviewer)
+ form_fields.append(("reviewers", options.reviewers))
+ if options.cc:
+ for cc in options.cc.split(','):
+ if "@" in cc and not cc.split("@")[1].count(".") == 1:
+ ErrorExit("Invalid email address: %s" % cc)
+ form_fields.append(("cc", options.cc))
+ description = options.description
+ if options.description_file:
+ if options.description:
+ ErrorExit("Can't specify description and description_file")
+ file = open(options.description_file, 'r')
+ description = file.read()
+ file.close()
+ if description:
+ form_fields.append(("description", description))
+ # Send a hash of all the base file so the server can determine if a copy
+ # already exists in an earlier patchset.
+ base_hashes = ""
+ for file, info in files.iteritems():
+ if not info[0] is None:
+ checksum = md5.new(info[0]).hexdigest()
+ if base_hashes:
+ base_hashes += "|"
+ base_hashes += checksum + ":" + file
+ form_fields.append(("base_hashes", base_hashes))
+ # If we're uploading base files, don't send the email before the uploads, so
+ # that it contains the file status.
+ if options.send_mail and options.download_base:
+ form_fields.append(("send_mail", "1"))
+ if not options.download_base:
+ form_fields.append(("content_upload", "1"))
+ if len(data) > MAX_UPLOAD_SIZE:
+ print "Patch is large, so uploading file patches separately."
+ uploaded_diff_file = []
+ form_fields.append(("separate_patches", "1"))
+ else:
+ uploaded_diff_file = [("data", "data.diff", data)]
+ ctype, body = EncodeMultipartFormData(form_fields, uploaded_diff_file)
+ response_body = rpc_server.Send("/upload", body, content_type=ctype)
+ patchset = None
+ if not options.download_base or not uploaded_diff_file:
+ lines = response_body.splitlines()
+ if len(lines) >= 2:
+ msg = lines[0]
+ patchset = lines[1].strip()
+ patches = [x.split(" ", 1) for x in lines[2:]]
+ else:
+ msg = response_body
+ else:
+ msg = response_body
+ StatusUpdate(msg)
+ if not response_body.startswith("Issue created.") and \
+ not response_body.startswith("Issue updated."):
+ sys.exit(0)
+ issue = msg[msg.rfind("/")+1:]
+
+ if not uploaded_diff_file:
+ result = UploadSeparatePatches(issue, rpc_server, patchset, data, options)
+ if not options.download_base:
+ patches = result
+
+ if not options.download_base:
+ vcs.UploadBaseFiles(issue, rpc_server, patches, patchset, options, files)
+ if options.send_mail:
+ rpc_server.Send("/" + issue + "/mail", payload="")
+ return issue, patchset
+
+
+def main():
+ try:
+ RealMain(sys.argv)
+ except KeyboardInterrupt:
+ print
+ StatusUpdate("Interrupted.")
+ sys.exit(1)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/Source/ThirdParty/gtest/scripts/upload_gtest.py b/Source/ThirdParty/gtest/scripts/upload_gtest.py
new file mode 100755
index 000000000..be19ae809
--- /dev/null
+++ b/Source/ThirdParty/gtest/scripts/upload_gtest.py
@@ -0,0 +1,78 @@
+#!/usr/bin/env python
+#
+# Copyright 2009, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""upload_gtest.py v0.1.0 -- uploads a Google Test patch for review.
+
+This simple wrapper passes all command line flags and
+--cc=googletestframework@googlegroups.com to upload.py.
+
+USAGE: upload_gtest.py [options for upload.py]
+"""
+
+__author__ = 'wan@google.com (Zhanyong Wan)'
+
+import os
+import sys
+
+CC_FLAG = '--cc='
+GTEST_GROUP = 'googletestframework@googlegroups.com'
+
+
+def main():
+ # Finds the path to upload.py, assuming it is in the same directory
+ # as this file.
+ my_dir = os.path.dirname(os.path.abspath(__file__))
+ upload_py_path = os.path.join(my_dir, 'upload.py')
+
+ # Adds Google Test discussion group to the cc line if it's not there
+ # already.
+ upload_py_argv = [upload_py_path]
+ found_cc_flag = False
+ for arg in sys.argv[1:]:
+ if arg.startswith(CC_FLAG):
+ found_cc_flag = True
+ cc_line = arg[len(CC_FLAG):]
+ cc_list = [addr for addr in cc_line.split(',') if addr]
+ if GTEST_GROUP not in cc_list:
+ cc_list.append(GTEST_GROUP)
+ upload_py_argv.append(CC_FLAG + ','.join(cc_list))
+ else:
+ upload_py_argv.append(arg)
+
+ if not found_cc_flag:
+ upload_py_argv.append(CC_FLAG + GTEST_GROUP)
+
+ # Invokes upload.py with the modified command line flags.
+ os.execv(upload_py_path, upload_py_argv)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/Source/ThirdParty/gtest/src/gtest-death-test.cc b/Source/ThirdParty/gtest/src/gtest-death-test.cc
new file mode 100644
index 000000000..0cf7825bc
--- /dev/null
+++ b/Source/ThirdParty/gtest/src/gtest-death-test.cc
@@ -0,0 +1,1161 @@
+// Copyright 2005, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: wan@google.com (Zhanyong Wan), vladl@google.com (Vlad Losev)
+//
+// This file implements death tests.
+
+#include <gtest/gtest-death-test.h>
+#include <gtest/internal/gtest-port.h>
+
+#if GTEST_HAS_DEATH_TEST
+
+#if GTEST_OS_MAC
+#include <crt_externs.h>
+#endif // GTEST_OS_MAC
+
+#include <errno.h>
+#include <fcntl.h>
+#include <limits.h>
+#include <stdarg.h>
+
+#if GTEST_OS_WINDOWS
+#include <windows.h>
+#else
+#include <sys/mman.h>
+#include <sys/wait.h>
+#endif // GTEST_OS_WINDOWS
+
+#endif // GTEST_HAS_DEATH_TEST
+
+#include <gtest/gtest-message.h>
+#include <gtest/internal/gtest-string.h>
+
+// Indicates that this translation unit is part of Google Test's
+// implementation. It must come before gtest-internal-inl.h is
+// included, or there will be a compiler error. This trick is to
+// prevent a user from accidentally including gtest-internal-inl.h in
+// his code.
+#define GTEST_IMPLEMENTATION_ 1
+#include "src/gtest-internal-inl.h"
+#undef GTEST_IMPLEMENTATION_
+
+namespace testing {
+
+// Constants.
+
+// The default death test style.
+static const char kDefaultDeathTestStyle[] = "fast";
+
+GTEST_DEFINE_string_(
+ death_test_style,
+ internal::StringFromGTestEnv("death_test_style", kDefaultDeathTestStyle),
+ "Indicates how to run a death test in a forked child process: "
+ "\"threadsafe\" (child process re-executes the test binary "
+ "from the beginning, running only the specific death test) or "
+ "\"fast\" (child process runs the death test immediately "
+ "after forking).");
+
+GTEST_DEFINE_bool_(
+ death_test_use_fork,
+ internal::BoolFromGTestEnv("death_test_use_fork", false),
+ "Instructs to use fork()/_exit() instead of clone() in death tests. "
+ "Ignored and always uses fork() on POSIX systems where clone() is not "
+ "implemented. Useful when running under valgrind or similar tools if "
+ "those do not support clone(). Valgrind 3.3.1 will just fail if "
+ "it sees an unsupported combination of clone() flags. "
+ "It is not recommended to use this flag w/o valgrind though it will "
+ "work in 99% of the cases. Once valgrind is fixed, this flag will "
+ "most likely be removed.");
+
+namespace internal {
+GTEST_DEFINE_string_(
+ internal_run_death_test, "",
+ "Indicates the file, line number, temporal index of "
+ "the single death test to run, and a file descriptor to "
+ "which a success code may be sent, all separated by "
+ "colons. This flag is specified if and only if the current "
+ "process is a sub-process launched for running a thread-safe "
+ "death test. FOR INTERNAL USE ONLY.");
+} // namespace internal
+
+#if GTEST_HAS_DEATH_TEST
+
+// ExitedWithCode constructor.
+ExitedWithCode::ExitedWithCode(int exit_code) : exit_code_(exit_code) {
+}
+
+// ExitedWithCode function-call operator.
+bool ExitedWithCode::operator()(int exit_status) const {
+#if GTEST_OS_WINDOWS
+ return exit_status == exit_code_;
+#else
+ return WIFEXITED(exit_status) && WEXITSTATUS(exit_status) == exit_code_;
+#endif // GTEST_OS_WINDOWS
+}
+
+#if !GTEST_OS_WINDOWS
+// KilledBySignal constructor.
+KilledBySignal::KilledBySignal(int signum) : signum_(signum) {
+}
+
+// KilledBySignal function-call operator.
+bool KilledBySignal::operator()(int exit_status) const {
+ return WIFSIGNALED(exit_status) && WTERMSIG(exit_status) == signum_;
+}
+#endif // !GTEST_OS_WINDOWS
+
+namespace internal {
+
+// Utilities needed for death tests.
+
+// Generates a textual description of a given exit code, in the format
+// specified by wait(2).
+static String ExitSummary(int exit_code) {
+ Message m;
+#if GTEST_OS_WINDOWS
+ m << "Exited with exit status " << exit_code;
+#else
+ if (WIFEXITED(exit_code)) {
+ m << "Exited with exit status " << WEXITSTATUS(exit_code);
+ } else if (WIFSIGNALED(exit_code)) {
+ m << "Terminated by signal " << WTERMSIG(exit_code);
+ }
+#ifdef WCOREDUMP
+ if (WCOREDUMP(exit_code)) {
+ m << " (core dumped)";
+ }
+#endif
+#endif // GTEST_OS_WINDOWS
+ return m.GetString();
+}
+
+// Returns true if exit_status describes a process that was terminated
+// by a signal, or exited normally with a nonzero exit code.
+bool ExitedUnsuccessfully(int exit_status) {
+ return !ExitedWithCode(0)(exit_status);
+}
+
+#if !GTEST_OS_WINDOWS
+// Generates a textual failure message when a death test finds more than
+// one thread running, or cannot determine the number of threads, prior
+// to executing the given statement. It is the responsibility of the
+// caller not to pass a thread_count of 1.
+static String DeathTestThreadWarning(size_t thread_count) {
+ Message msg;
+ msg << "Death tests use fork(), which is unsafe particularly"
+ << " in a threaded context. For this test, " << GTEST_NAME_ << " ";
+ if (thread_count == 0)
+ msg << "couldn't detect the number of threads.";
+ else
+ msg << "detected " << thread_count << " threads.";
+ return msg.GetString();
+}
+#endif // !GTEST_OS_WINDOWS
+
+// Flag characters for reporting a death test that did not die.
+static const char kDeathTestLived = 'L';
+static const char kDeathTestReturned = 'R';
+static const char kDeathTestInternalError = 'I';
+
+// An enumeration describing all of the possible ways that a death test
+// can conclude. DIED means that the process died while executing the
+// test code; LIVED means that process lived beyond the end of the test
+// code; and RETURNED means that the test statement attempted a "return,"
+// which is not allowed. IN_PROGRESS means the test has not yet
+// concluded.
+enum DeathTestOutcome { IN_PROGRESS, DIED, LIVED, RETURNED };
+
+// Routine for aborting the program which is safe to call from an
+// exec-style death test child process, in which case the error
+// message is propagated back to the parent process. Otherwise, the
+// message is simply printed to stderr. In either case, the program
+// then exits with status 1.
+void DeathTestAbort(const String& message) {
+ // On a POSIX system, this function may be called from a threadsafe-style
+ // death test child process, which operates on a very small stack. Use
+ // the heap for any additional non-minuscule memory requirements.
+ const InternalRunDeathTestFlag* const flag =
+ GetUnitTestImpl()->internal_run_death_test_flag();
+ if (flag != NULL) {
+ FILE* parent = posix::FDOpen(flag->write_fd(), "w");
+ fputc(kDeathTestInternalError, parent);
+ fprintf(parent, "%s", message.c_str());
+ fflush(parent);
+ _exit(1);
+ } else {
+ fprintf(stderr, "%s", message.c_str());
+ fflush(stderr);
+ abort();
+ }
+}
+
+// A replacement for CHECK that calls DeathTestAbort if the assertion
+// fails.
+#define GTEST_DEATH_TEST_CHECK_(expression) \
+ do { \
+ if (!::testing::internal::IsTrue(expression)) { \
+ DeathTestAbort(::testing::internal::String::Format( \
+ "CHECK failed: File %s, line %d: %s", \
+ __FILE__, __LINE__, #expression)); \
+ } \
+ } while (::testing::internal::AlwaysFalse())
+
+// This macro is similar to GTEST_DEATH_TEST_CHECK_, but it is meant for
+// evaluating any system call that fulfills two conditions: it must return
+// -1 on failure, and set errno to EINTR when it is interrupted and
+// should be tried again. The macro expands to a loop that repeatedly
+// evaluates the expression as long as it evaluates to -1 and sets
+// errno to EINTR. If the expression evaluates to -1 but errno is
+// something other than EINTR, DeathTestAbort is called.
+#define GTEST_DEATH_TEST_CHECK_SYSCALL_(expression) \
+ do { \
+ int gtest_retval; \
+ do { \
+ gtest_retval = (expression); \
+ } while (gtest_retval == -1 && errno == EINTR); \
+ if (gtest_retval == -1) { \
+ DeathTestAbort(::testing::internal::String::Format( \
+ "CHECK failed: File %s, line %d: %s != -1", \
+ __FILE__, __LINE__, #expression)); \
+ } \
+ } while (::testing::internal::AlwaysFalse())
+
+// Returns the message describing the last system error in errno.
+String GetLastErrnoDescription() {
+ return String(errno == 0 ? "" : posix::StrError(errno));
+}
+
+// This is called from a death test parent process to read a failure
+// message from the death test child process and log it with the FATAL
+// severity. On Windows, the message is read from a pipe handle. On other
+// platforms, it is read from a file descriptor.
+static void FailFromInternalError(int fd) {
+ Message error;
+ char buffer[256];
+ int num_read;
+
+ do {
+ while ((num_read = posix::Read(fd, buffer, 255)) > 0) {
+ buffer[num_read] = '\0';
+ error << buffer;
+ }
+ } while (num_read == -1 && errno == EINTR);
+
+ if (num_read == 0) {
+ GTEST_LOG_(FATAL) << error.GetString();
+ } else {
+ const int last_error = errno;
+ GTEST_LOG_(FATAL) << "Error while reading death test internal: "
+ << GetLastErrnoDescription() << " [" << last_error << "]";
+ }
+}
+
+// Death test constructor. Increments the running death test count
+// for the current test.
+DeathTest::DeathTest() {
+ TestInfo* const info = GetUnitTestImpl()->current_test_info();
+ if (info == NULL) {
+ DeathTestAbort("Cannot run a death test outside of a TEST or "
+ "TEST_F construct");
+ }
+}
+
+// Creates and returns a death test by dispatching to the current
+// death test factory.
+bool DeathTest::Create(const char* statement, const RE* regex,
+ const char* file, int line, DeathTest** test) {
+ return GetUnitTestImpl()->death_test_factory()->Create(
+ statement, regex, file, line, test);
+}
+
+const char* DeathTest::LastMessage() {
+ return last_death_test_message_.c_str();
+}
+
+void DeathTest::set_last_death_test_message(const String& message) {
+ last_death_test_message_ = message;
+}
+
+String DeathTest::last_death_test_message_;
+
+// Provides cross platform implementation for some death functionality.
+class DeathTestImpl : public DeathTest {
+ protected:
+ DeathTestImpl(const char* a_statement, const RE* a_regex)
+ : statement_(a_statement),
+ regex_(a_regex),
+ spawned_(false),
+ status_(-1),
+ outcome_(IN_PROGRESS),
+ read_fd_(-1),
+ write_fd_(-1) {}
+
+ // read_fd_ is expected to be closed and cleared by a derived class.
+ ~DeathTestImpl() { GTEST_DEATH_TEST_CHECK_(read_fd_ == -1); }
+
+ void Abort(AbortReason reason);
+ virtual bool Passed(bool status_ok);
+
+ const char* statement() const { return statement_; }
+ const RE* regex() const { return regex_; }
+ bool spawned() const { return spawned_; }
+ void set_spawned(bool is_spawned) { spawned_ = is_spawned; }
+ int status() const { return status_; }
+ void set_status(int a_status) { status_ = a_status; }
+ DeathTestOutcome outcome() const { return outcome_; }
+ void set_outcome(DeathTestOutcome an_outcome) { outcome_ = an_outcome; }
+ int read_fd() const { return read_fd_; }
+ void set_read_fd(int fd) { read_fd_ = fd; }
+ int write_fd() const { return write_fd_; }
+ void set_write_fd(int fd) { write_fd_ = fd; }
+
+ // Called in the parent process only. Reads the result code of the death
+ // test child process via a pipe, interprets it to set the outcome_
+ // member, and closes read_fd_. Outputs diagnostics and terminates in
+ // case of unexpected codes.
+ void ReadAndInterpretStatusByte();
+
+ private:
+ // The textual content of the code this object is testing. This class
+ // doesn't own this string and should not attempt to delete it.
+ const char* const statement_;
+ // The regular expression which test output must match. DeathTestImpl
+ // doesn't own this object and should not attempt to delete it.
+ const RE* const regex_;
+ // True if the death test child process has been successfully spawned.
+ bool spawned_;
+ // The exit status of the child process.
+ int status_;
+ // How the death test concluded.
+ DeathTestOutcome outcome_;
+ // Descriptor to the read end of the pipe to the child process. It is
+ // always -1 in the child process. The child keeps its write end of the
+ // pipe in write_fd_.
+ int read_fd_;
+ // Descriptor to the child's write end of the pipe to the parent process.
+ // It is always -1 in the parent process. The parent keeps its end of the
+ // pipe in read_fd_.
+ int write_fd_;
+};
+
+// Called in the parent process only. Reads the result code of the death
+// test child process via a pipe, interprets it to set the outcome_
+// member, and closes read_fd_. Outputs diagnostics and terminates in
+// case of unexpected codes.
+void DeathTestImpl::ReadAndInterpretStatusByte() {
+ char flag;
+ int bytes_read;
+
+ // The read() here blocks until data is available (signifying the
+ // failure of the death test) or until the pipe is closed (signifying
+ // its success), so it's okay to call this in the parent before
+ // the child process has exited.
+ do {
+ bytes_read = posix::Read(read_fd(), &flag, 1);
+ } while (bytes_read == -1 && errno == EINTR);
+
+ if (bytes_read == 0) {
+ set_outcome(DIED);
+ } else if (bytes_read == 1) {
+ switch (flag) {
+ case kDeathTestReturned:
+ set_outcome(RETURNED);
+ break;
+ case kDeathTestLived:
+ set_outcome(LIVED);
+ break;
+ case kDeathTestInternalError:
+ FailFromInternalError(read_fd()); // Does not return.
+ break;
+ default:
+ GTEST_LOG_(FATAL) << "Death test child process reported "
+ << "unexpected status byte ("
+ << static_cast<unsigned int>(flag) << ")";
+ }
+ } else {
+ GTEST_LOG_(FATAL) << "Read from death test child process failed: "
+ << GetLastErrnoDescription();
+ }
+ GTEST_DEATH_TEST_CHECK_SYSCALL_(posix::Close(read_fd()));
+ set_read_fd(-1);
+}
+
+// Signals that the death test code which should have exited, didn't.
+// Should be called only in a death test child process.
+// Writes a status byte to the child's status file descriptor, then
+// calls _exit(1).
+void DeathTestImpl::Abort(AbortReason reason) {
+ // The parent process considers the death test to be a failure if
+ // it finds any data in our pipe. So, here we write a single flag byte
+ // to the pipe, then exit.
+ const char status_ch =
+ reason == TEST_DID_NOT_DIE ? kDeathTestLived : kDeathTestReturned;
+ GTEST_DEATH_TEST_CHECK_SYSCALL_(posix::Write(write_fd(), &status_ch, 1));
+ GTEST_DEATH_TEST_CHECK_SYSCALL_(posix::Close(write_fd()));
+ _exit(1); // Exits w/o any normal exit hooks (we were supposed to crash)
+}
+
+// Assesses the success or failure of a death test, using both private
+// members which have previously been set, and one argument:
+//
+// Private data members:
+// outcome: An enumeration describing how the death test
+// concluded: DIED, LIVED, or RETURNED. The death test fails
+// in the latter two cases.
+// status: The exit status of the child process. On *nix, it is in the
+// in the format specified by wait(2). On Windows, this is the
+// value supplied to the ExitProcess() API or a numeric code
+// of the exception that terminated the program.
+// regex: A regular expression object to be applied to
+// the test's captured standard error output; the death test
+// fails if it does not match.
+//
+// Argument:
+// status_ok: true if exit_status is acceptable in the context of
+// this particular death test, which fails if it is false
+//
+// Returns true iff all of the above conditions are met. Otherwise, the
+// first failing condition, in the order given above, is the one that is
+// reported. Also sets the last death test message string.
+bool DeathTestImpl::Passed(bool status_ok) {
+ if (!spawned())
+ return false;
+
+ const String error_message = GetCapturedStderr();
+
+ bool success = false;
+ Message buffer;
+
+ buffer << "Death test: " << statement() << "\n";
+ switch (outcome()) {
+ case LIVED:
+ buffer << " Result: failed to die.\n"
+ << " Error msg: " << error_message;
+ break;
+ case RETURNED:
+ buffer << " Result: illegal return in test statement.\n"
+ << " Error msg: " << error_message;
+ break;
+ case DIED:
+ if (status_ok) {
+ const bool matched = RE::PartialMatch(error_message.c_str(), *regex());
+ if (matched) {
+ success = true;
+ } else {
+ buffer << " Result: died but not with expected error.\n"
+ << " Expected: " << regex()->pattern() << "\n"
+ << "Actual msg: " << error_message;
+ }
+ } else {
+ buffer << " Result: died but not with expected exit code:\n"
+ << " " << ExitSummary(status()) << "\n";
+ }
+ break;
+ case IN_PROGRESS:
+ default:
+ GTEST_LOG_(FATAL)
+ << "DeathTest::Passed somehow called before conclusion of test";
+ }
+
+ DeathTest::set_last_death_test_message(buffer.GetString());
+ return success;
+}
+
+#if GTEST_OS_WINDOWS
+// WindowsDeathTest implements death tests on Windows. Due to the
+// specifics of starting new processes on Windows, death tests there are
+// always threadsafe, and Google Test considers the
+// --gtest_death_test_style=fast setting to be equivalent to
+// --gtest_death_test_style=threadsafe there.
+//
+// A few implementation notes: Like the Linux version, the Windows
+// implementation uses pipes for child-to-parent communication. But due to
+// the specifics of pipes on Windows, some extra steps are required:
+//
+// 1. The parent creates a communication pipe and stores handles to both
+// ends of it.
+// 2. The parent starts the child and provides it with the information
+// necessary to acquire the handle to the write end of the pipe.
+// 3. The child acquires the write end of the pipe and signals the parent
+// using a Windows event.
+// 4. Now the parent can release the write end of the pipe on its side. If
+// this is done before step 3, the object's reference count goes down to
+// 0 and it is destroyed, preventing the child from acquiring it. The
+// parent now has to release it, or read operations on the read end of
+// the pipe will not return when the child terminates.
+// 5. The parent reads child's output through the pipe (outcome code and
+// any possible error messages) from the pipe, and its stderr and then
+// determines whether to fail the test.
+//
+// Note: to distinguish Win32 API calls from the local method and function
+// calls, the former are explicitly resolved in the global namespace.
+//
+class WindowsDeathTest : public DeathTestImpl {
+ public:
+ WindowsDeathTest(const char* statement,
+ const RE* regex,
+ const char* file,
+ int line)
+ : DeathTestImpl(statement, regex), file_(file), line_(line) {}
+
+ // All of these virtual functions are inherited from DeathTest.
+ virtual int Wait();
+ virtual TestRole AssumeRole();
+
+ private:
+ // The name of the file in which the death test is located.
+ const char* const file_;
+ // The line number on which the death test is located.
+ const int line_;
+ // Handle to the write end of the pipe to the child process.
+ AutoHandle write_handle_;
+ // Child process handle.
+ AutoHandle child_handle_;
+ // Event the child process uses to signal the parent that it has
+ // acquired the handle to the write end of the pipe. After seeing this
+ // event the parent can release its own handles to make sure its
+ // ReadFile() calls return when the child terminates.
+ AutoHandle event_handle_;
+};
+
+// Waits for the child in a death test to exit, returning its exit
+// status, or 0 if no child process exists. As a side effect, sets the
+// outcome data member.
+int WindowsDeathTest::Wait() {
+ if (!spawned())
+ return 0;
+
+ // Wait until the child either signals that it has acquired the write end
+ // of the pipe or it dies.
+ const HANDLE wait_handles[2] = { child_handle_.Get(), event_handle_.Get() };
+ switch (::WaitForMultipleObjects(2,
+ wait_handles,
+ FALSE, // Waits for any of the handles.
+ INFINITE)) {
+ case WAIT_OBJECT_0:
+ case WAIT_OBJECT_0 + 1:
+ break;
+ default:
+ GTEST_DEATH_TEST_CHECK_(false); // Should not get here.
+ }
+
+ // The child has acquired the write end of the pipe or exited.
+ // We release the handle on our side and continue.
+ write_handle_.Reset();
+ event_handle_.Reset();
+
+ ReadAndInterpretStatusByte();
+
+ // Waits for the child process to exit if it haven't already. This
+ // returns immediately if the child has already exited, regardless of
+ // whether previous calls to WaitForMultipleObjects synchronized on this
+ // handle or not.
+ GTEST_DEATH_TEST_CHECK_(
+ WAIT_OBJECT_0 == ::WaitForSingleObject(child_handle_.Get(),
+ INFINITE));
+ DWORD status;
+ GTEST_DEATH_TEST_CHECK_(::GetExitCodeProcess(child_handle_.Get(), &status)
+ != FALSE);
+ child_handle_.Reset();
+ set_status(static_cast<int>(status));
+ return this->status();
+}
+
+// The AssumeRole process for a Windows death test. It creates a child
+// process with the same executable as the current process to run the
+// death test. The child process is given the --gtest_filter and
+// --gtest_internal_run_death_test flags such that it knows to run the
+// current death test only.
+DeathTest::TestRole WindowsDeathTest::AssumeRole() {
+ const UnitTestImpl* const impl = GetUnitTestImpl();
+ const InternalRunDeathTestFlag* const flag =
+ impl->internal_run_death_test_flag();
+ const TestInfo* const info = impl->current_test_info();
+ const int death_test_index = info->result()->death_test_count();
+
+ if (flag != NULL) {
+ // ParseInternalRunDeathTestFlag() has performed all the necessary
+ // processing.
+ set_write_fd(flag->write_fd());
+ return EXECUTE_TEST;
+ }
+
+ // WindowsDeathTest uses an anonymous pipe to communicate results of
+ // a death test.
+ SECURITY_ATTRIBUTES handles_are_inheritable = {
+ sizeof(SECURITY_ATTRIBUTES), NULL, TRUE };
+ HANDLE read_handle, write_handle;
+ GTEST_DEATH_TEST_CHECK_(
+ ::CreatePipe(&read_handle, &write_handle, &handles_are_inheritable,
+ 0) // Default buffer size.
+ != FALSE);
+ set_read_fd(::_open_osfhandle(reinterpret_cast<intptr_t>(read_handle),
+ O_RDONLY));
+ write_handle_.Reset(write_handle);
+ event_handle_.Reset(::CreateEvent(
+ &handles_are_inheritable,
+ TRUE, // The event will automatically reset to non-signaled state.
+ FALSE, // The initial state is non-signalled.
+ NULL)); // The even is unnamed.
+ GTEST_DEATH_TEST_CHECK_(event_handle_.Get() != NULL);
+ const String filter_flag = String::Format("--%s%s=%s.%s",
+ GTEST_FLAG_PREFIX_, kFilterFlag,
+ info->test_case_name(),
+ info->name());
+ const String internal_flag = String::Format(
+ "--%s%s=%s|%d|%d|%u|%Iu|%Iu",
+ GTEST_FLAG_PREFIX_,
+ kInternalRunDeathTestFlag,
+ file_, line_,
+ death_test_index,
+ static_cast<unsigned int>(::GetCurrentProcessId()),
+ // size_t has the same with as pointers on both 32-bit and 64-bit
+ // Windows platforms.
+ // See http://msdn.microsoft.com/en-us/library/tcxf1dw6.aspx.
+ reinterpret_cast<size_t>(write_handle),
+ reinterpret_cast<size_t>(event_handle_.Get()));
+
+ char executable_path[_MAX_PATH + 1]; // NOLINT
+ GTEST_DEATH_TEST_CHECK_(
+ _MAX_PATH + 1 != ::GetModuleFileNameA(NULL,
+ executable_path,
+ _MAX_PATH));
+
+ String command_line = String::Format("%s %s \"%s\"",
+ ::GetCommandLineA(),
+ filter_flag.c_str(),
+ internal_flag.c_str());
+
+ DeathTest::set_last_death_test_message("");
+
+ CaptureStderr();
+ // Flush the log buffers since the log streams are shared with the child.
+ FlushInfoLog();
+
+ // The child process will share the standard handles with the parent.
+ STARTUPINFOA startup_info;
+ memset(&startup_info, 0, sizeof(STARTUPINFO));
+ startup_info.dwFlags = STARTF_USESTDHANDLES;
+ startup_info.hStdInput = ::GetStdHandle(STD_INPUT_HANDLE);
+ startup_info.hStdOutput = ::GetStdHandle(STD_OUTPUT_HANDLE);
+ startup_info.hStdError = ::GetStdHandle(STD_ERROR_HANDLE);
+
+ PROCESS_INFORMATION process_info;
+ GTEST_DEATH_TEST_CHECK_(::CreateProcessA(
+ executable_path,
+ const_cast<char*>(command_line.c_str()),
+ NULL, // Retuned process handle is not inheritable.
+ NULL, // Retuned thread handle is not inheritable.
+ TRUE, // Child inherits all inheritable handles (for write_handle_).
+ 0x0, // Default creation flags.
+ NULL, // Inherit the parent's environment.
+ UnitTest::GetInstance()->original_working_dir(),
+ &startup_info,
+ &process_info) != FALSE);
+ child_handle_.Reset(process_info.hProcess);
+ ::CloseHandle(process_info.hThread);
+ set_spawned(true);
+ return OVERSEE_TEST;
+}
+#else // We are not on Windows.
+
+// ForkingDeathTest provides implementations for most of the abstract
+// methods of the DeathTest interface. Only the AssumeRole method is
+// left undefined.
+class ForkingDeathTest : public DeathTestImpl {
+ public:
+ ForkingDeathTest(const char* statement, const RE* regex);
+
+ // All of these virtual functions are inherited from DeathTest.
+ virtual int Wait();
+
+ protected:
+ void set_child_pid(pid_t child_pid) { child_pid_ = child_pid; }
+
+ private:
+ // PID of child process during death test; 0 in the child process itself.
+ pid_t child_pid_;
+};
+
+// Constructs a ForkingDeathTest.
+ForkingDeathTest::ForkingDeathTest(const char* a_statement, const RE* a_regex)
+ : DeathTestImpl(a_statement, a_regex),
+ child_pid_(-1) {}
+
+// Waits for the child in a death test to exit, returning its exit
+// status, or 0 if no child process exists. As a side effect, sets the
+// outcome data member.
+int ForkingDeathTest::Wait() {
+ if (!spawned())
+ return 0;
+
+ ReadAndInterpretStatusByte();
+
+ int status_value;
+ GTEST_DEATH_TEST_CHECK_SYSCALL_(waitpid(child_pid_, &status_value, 0));
+ set_status(status_value);
+ return status_value;
+}
+
+// A concrete death test class that forks, then immediately runs the test
+// in the child process.
+class NoExecDeathTest : public ForkingDeathTest {
+ public:
+ NoExecDeathTest(const char* a_statement, const RE* a_regex) :
+ ForkingDeathTest(a_statement, a_regex) { }
+ virtual TestRole AssumeRole();
+};
+
+// The AssumeRole process for a fork-and-run death test. It implements a
+// straightforward fork, with a simple pipe to transmit the status byte.
+DeathTest::TestRole NoExecDeathTest::AssumeRole() {
+ const size_t thread_count = GetThreadCount();
+ if (thread_count != 1) {
+ GTEST_LOG_(WARNING) << DeathTestThreadWarning(thread_count);
+ }
+
+ int pipe_fd[2];
+ GTEST_DEATH_TEST_CHECK_(pipe(pipe_fd) != -1);
+
+ DeathTest::set_last_death_test_message("");
+ CaptureStderr();
+ // When we fork the process below, the log file buffers are copied, but the
+ // file descriptors are shared. We flush all log files here so that closing
+ // the file descriptors in the child process doesn't throw off the
+ // synchronization between descriptors and buffers in the parent process.
+ // This is as close to the fork as possible to avoid a race condition in case
+ // there are multiple threads running before the death test, and another
+ // thread writes to the log file.
+ FlushInfoLog();
+
+ const pid_t child_pid = fork();
+ GTEST_DEATH_TEST_CHECK_(child_pid != -1);
+ set_child_pid(child_pid);
+ if (child_pid == 0) {
+ GTEST_DEATH_TEST_CHECK_SYSCALL_(close(pipe_fd[0]));
+ set_write_fd(pipe_fd[1]);
+ // Redirects all logging to stderr in the child process to prevent
+ // concurrent writes to the log files. We capture stderr in the parent
+ // process and append the child process' output to a log.
+ LogToStderr();
+ // Event forwarding to the listeners of event listener API mush be shut
+ // down in death test subprocesses.
+ GetUnitTestImpl()->listeners()->SuppressEventForwarding();
+ return EXECUTE_TEST;
+ } else {
+ GTEST_DEATH_TEST_CHECK_SYSCALL_(close(pipe_fd[1]));
+ set_read_fd(pipe_fd[0]);
+ set_spawned(true);
+ return OVERSEE_TEST;
+ }
+}
+
+// A concrete death test class that forks and re-executes the main
+// program from the beginning, with command-line flags set that cause
+// only this specific death test to be run.
+class ExecDeathTest : public ForkingDeathTest {
+ public:
+ ExecDeathTest(const char* a_statement, const RE* a_regex,
+ const char* file, int line) :
+ ForkingDeathTest(a_statement, a_regex), file_(file), line_(line) { }
+ virtual TestRole AssumeRole();
+ private:
+ // The name of the file in which the death test is located.
+ const char* const file_;
+ // The line number on which the death test is located.
+ const int line_;
+};
+
+// Utility class for accumulating command-line arguments.
+class Arguments {
+ public:
+ Arguments() {
+ args_.push_back(NULL);
+ }
+
+ ~Arguments() {
+ for (std::vector<char*>::iterator i = args_.begin(); i != args_.end();
+ ++i) {
+ free(*i);
+ }
+ }
+ void AddArgument(const char* argument) {
+ args_.insert(args_.end() - 1, posix::StrDup(argument));
+ }
+
+ template <typename Str>
+ void AddArguments(const ::std::vector<Str>& arguments) {
+ for (typename ::std::vector<Str>::const_iterator i = arguments.begin();
+ i != arguments.end();
+ ++i) {
+ args_.insert(args_.end() - 1, posix::StrDup(i->c_str()));
+ }
+ }
+ char* const* Argv() {
+ return &args_[0];
+ }
+ private:
+ std::vector<char*> args_;
+};
+
+// A struct that encompasses the arguments to the child process of a
+// threadsafe-style death test process.
+struct ExecDeathTestArgs {
+ char* const* argv; // Command-line arguments for the child's call to exec
+ int close_fd; // File descriptor to close; the read end of a pipe
+};
+
+// The main function for a threadsafe-style death test child process.
+// This function is called in a clone()-ed process and thus must avoid
+// any potentially unsafe operations like malloc or libc functions.
+static int ExecDeathTestChildMain(void* child_arg) {
+ ExecDeathTestArgs* const args = static_cast<ExecDeathTestArgs*>(child_arg);
+ GTEST_DEATH_TEST_CHECK_SYSCALL_(close(args->close_fd));
+
+ // We need to execute the test program in the same environment where
+ // it was originally invoked. Therefore we change to the original
+ // working directory first.
+ const char* const original_dir =
+ UnitTest::GetInstance()->original_working_dir();
+ // We can safely call chdir() as it's a direct system call.
+ if (chdir(original_dir) != 0) {
+ DeathTestAbort(String::Format("chdir(\"%s\") failed: %s",
+ original_dir,
+ GetLastErrnoDescription().c_str()));
+ return EXIT_FAILURE;
+ }
+
+ // We can safely call execve() as it's a direct system call. We
+ // cannot use execvp() as it's a libc function and thus potentially
+ // unsafe. Since execve() doesn't search the PATH, the user must
+ // invoke the test program via a valid path that contains at least
+ // one path separator.
+ // We have replaced execve() with execv() for WebKit to avoid using
+ // environ. It should be safe because execv() is just a simple wrapper
+ // of execve().
+ execv(args->argv[0], args->argv);
+ DeathTestAbort(String::Format("execv(%s, ...) in %s failed: %s",
+ args->argv[0],
+ original_dir,
+ GetLastErrnoDescription().c_str()));
+ return EXIT_FAILURE;
+}
+
+// Two utility routines that together determine the direction the stack
+// grows.
+// This could be accomplished more elegantly by a single recursive
+// function, but we want to guard against the unlikely possibility of
+// a smart compiler optimizing the recursion away.
+bool StackLowerThanAddress(const void* ptr) {
+ int dummy;
+ return &dummy < ptr;
+}
+
+bool StackGrowsDown() {
+ int dummy;
+ return StackLowerThanAddress(&dummy);
+}
+
+// A threadsafe implementation of fork(2) for threadsafe-style death tests
+// that uses clone(2). It dies with an error message if anything goes
+// wrong.
+static pid_t ExecDeathTestFork(char* const* argv, int close_fd) {
+ ExecDeathTestArgs args = { argv, close_fd };
+ pid_t child_pid = -1;
+
+#if GTEST_HAS_CLONE
+ const bool use_fork = GTEST_FLAG(death_test_use_fork);
+
+ if (!use_fork) {
+ static const bool stack_grows_down = StackGrowsDown();
+ const size_t stack_size = getpagesize();
+ // MMAP_ANONYMOUS is not defined on Mac, so we use MAP_ANON instead.
+ void* const stack = mmap(NULL, stack_size, PROT_READ | PROT_WRITE,
+ MAP_ANON | MAP_PRIVATE, -1, 0);
+ GTEST_DEATH_TEST_CHECK_(stack != MAP_FAILED);
+ void* const stack_top =
+ static_cast<char*>(stack) + (stack_grows_down ? stack_size : 0);
+
+ child_pid = clone(&ExecDeathTestChildMain, stack_top, SIGCHLD, &args);
+
+ GTEST_DEATH_TEST_CHECK_(munmap(stack, stack_size) != -1);
+ }
+#else
+ const bool use_fork = true;
+#endif // GTEST_HAS_CLONE
+
+ if (use_fork && (child_pid = fork()) == 0) {
+ ExecDeathTestChildMain(&args);
+ _exit(0);
+ }
+
+ GTEST_DEATH_TEST_CHECK_(child_pid != -1);
+ return child_pid;
+}
+
+// The AssumeRole process for a fork-and-exec death test. It re-executes the
+// main program from the beginning, setting the --gtest_filter
+// and --gtest_internal_run_death_test flags to cause only the current
+// death test to be re-run.
+DeathTest::TestRole ExecDeathTest::AssumeRole() {
+ const UnitTestImpl* const impl = GetUnitTestImpl();
+ const InternalRunDeathTestFlag* const flag =
+ impl->internal_run_death_test_flag();
+ const TestInfo* const info = impl->current_test_info();
+ const int death_test_index = info->result()->death_test_count();
+
+ if (flag != NULL) {
+ set_write_fd(flag->write_fd());
+ return EXECUTE_TEST;
+ }
+
+ int pipe_fd[2];
+ GTEST_DEATH_TEST_CHECK_(pipe(pipe_fd) != -1);
+ // Clear the close-on-exec flag on the write end of the pipe, lest
+ // it be closed when the child process does an exec:
+ GTEST_DEATH_TEST_CHECK_(fcntl(pipe_fd[1], F_SETFD, 0) != -1);
+
+ const String filter_flag =
+ String::Format("--%s%s=%s.%s",
+ GTEST_FLAG_PREFIX_, kFilterFlag,
+ info->test_case_name(), info->name());
+ const String internal_flag =
+ String::Format("--%s%s=%s|%d|%d|%d",
+ GTEST_FLAG_PREFIX_, kInternalRunDeathTestFlag,
+ file_, line_, death_test_index, pipe_fd[1]);
+ Arguments args;
+ args.AddArguments(GetArgvs());
+ args.AddArgument(filter_flag.c_str());
+ args.AddArgument(internal_flag.c_str());
+
+ DeathTest::set_last_death_test_message("");
+
+ CaptureStderr();
+ // See the comment in NoExecDeathTest::AssumeRole for why the next line
+ // is necessary.
+ FlushInfoLog();
+
+ const pid_t child_pid = ExecDeathTestFork(args.Argv(), pipe_fd[0]);
+ GTEST_DEATH_TEST_CHECK_SYSCALL_(close(pipe_fd[1]));
+ set_child_pid(child_pid);
+ set_read_fd(pipe_fd[0]);
+ set_spawned(true);
+ return OVERSEE_TEST;
+}
+
+#endif // !GTEST_OS_WINDOWS
+
+// Creates a concrete DeathTest-derived class that depends on the
+// --gtest_death_test_style flag, and sets the pointer pointed to
+// by the "test" argument to its address. If the test should be
+// skipped, sets that pointer to NULL. Returns true, unless the
+// flag is set to an invalid value.
+bool DefaultDeathTestFactory::Create(const char* statement, const RE* regex,
+ const char* file, int line,
+ DeathTest** test) {
+ UnitTestImpl* const impl = GetUnitTestImpl();
+ const InternalRunDeathTestFlag* const flag =
+ impl->internal_run_death_test_flag();
+ const int death_test_index = impl->current_test_info()
+ ->increment_death_test_count();
+
+ if (flag != NULL) {
+ if (death_test_index > flag->index()) {
+ DeathTest::set_last_death_test_message(String::Format(
+ "Death test count (%d) somehow exceeded expected maximum (%d)",
+ death_test_index, flag->index()));
+ return false;
+ }
+
+ if (!(flag->file() == file && flag->line() == line &&
+ flag->index() == death_test_index)) {
+ *test = NULL;
+ return true;
+ }
+ }
+
+#if GTEST_OS_WINDOWS
+ if (GTEST_FLAG(death_test_style) == "threadsafe" ||
+ GTEST_FLAG(death_test_style) == "fast") {
+ *test = new WindowsDeathTest(statement, regex, file, line);
+ }
+#else
+ if (GTEST_FLAG(death_test_style) == "threadsafe") {
+ *test = new ExecDeathTest(statement, regex, file, line);
+ } else if (GTEST_FLAG(death_test_style) == "fast") {
+ *test = new NoExecDeathTest(statement, regex);
+ }
+#endif // GTEST_OS_WINDOWS
+ else { // NOLINT - this is more readable than unbalanced brackets inside #if.
+ DeathTest::set_last_death_test_message(String::Format(
+ "Unknown death test style \"%s\" encountered",
+ GTEST_FLAG(death_test_style).c_str()));
+ return false;
+ }
+
+ return true;
+}
+
+// Splits a given string on a given delimiter, populating a given
+// vector with the fields. GTEST_HAS_DEATH_TEST implies that we have
+// ::std::string, so we can use it here.
+static void SplitString(const ::std::string& str, char delimiter,
+ ::std::vector< ::std::string>* dest) {
+ ::std::vector< ::std::string> parsed;
+ ::std::string::size_type pos = 0;
+ while (::testing::internal::AlwaysTrue()) {
+ const ::std::string::size_type colon = str.find(delimiter, pos);
+ if (colon == ::std::string::npos) {
+ parsed.push_back(str.substr(pos));
+ break;
+ } else {
+ parsed.push_back(str.substr(pos, colon - pos));
+ pos = colon + 1;
+ }
+ }
+ dest->swap(parsed);
+}
+
+#if GTEST_OS_WINDOWS
+// Recreates the pipe and event handles from the provided parameters,
+// signals the event, and returns a file descriptor wrapped around the pipe
+// handle. This function is called in the child process only.
+int GetStatusFileDescriptor(unsigned int parent_process_id,
+ size_t write_handle_as_size_t,
+ size_t event_handle_as_size_t) {
+ AutoHandle parent_process_handle(::OpenProcess(PROCESS_DUP_HANDLE,
+ FALSE, // Non-inheritable.
+ parent_process_id));
+ if (parent_process_handle.Get() == INVALID_HANDLE_VALUE) {
+ DeathTestAbort(String::Format("Unable to open parent process %u",
+ parent_process_id));
+ }
+
+ // TODO(vladl@google.com): Replace the following check with a
+ // compile-time assertion when available.
+ GTEST_CHECK_(sizeof(HANDLE) <= sizeof(size_t));
+
+ const HANDLE write_handle =
+ reinterpret_cast<HANDLE>(write_handle_as_size_t);
+ HANDLE dup_write_handle;
+
+ // The newly initialized handle is accessible only in in the parent
+ // process. To obtain one accessible within the child, we need to use
+ // DuplicateHandle.
+ if (!::DuplicateHandle(parent_process_handle.Get(), write_handle,
+ ::GetCurrentProcess(), &dup_write_handle,
+ 0x0, // Requested privileges ignored since
+ // DUPLICATE_SAME_ACCESS is used.
+ FALSE, // Request non-inheritable handler.
+ DUPLICATE_SAME_ACCESS)) {
+ DeathTestAbort(String::Format(
+ "Unable to duplicate the pipe handle %Iu from the parent process %u",
+ write_handle_as_size_t, parent_process_id));
+ }
+
+ const HANDLE event_handle = reinterpret_cast<HANDLE>(event_handle_as_size_t);
+ HANDLE dup_event_handle;
+
+ if (!::DuplicateHandle(parent_process_handle.Get(), event_handle,
+ ::GetCurrentProcess(), &dup_event_handle,
+ 0x0,
+ FALSE,
+ DUPLICATE_SAME_ACCESS)) {
+ DeathTestAbort(String::Format(
+ "Unable to duplicate the event handle %Iu from the parent process %u",
+ event_handle_as_size_t, parent_process_id));
+ }
+
+ const int write_fd =
+ ::_open_osfhandle(reinterpret_cast<intptr_t>(dup_write_handle), O_APPEND);
+ if (write_fd == -1) {
+ DeathTestAbort(String::Format(
+ "Unable to convert pipe handle %Iu to a file descriptor",
+ write_handle_as_size_t));
+ }
+
+ // Signals the parent that the write end of the pipe has been acquired
+ // so the parent can release its own write end.
+ ::SetEvent(dup_event_handle);
+
+ return write_fd;
+}
+#endif // GTEST_OS_WINDOWS
+
+// Returns a newly created InternalRunDeathTestFlag object with fields
+// initialized from the GTEST_FLAG(internal_run_death_test) flag if
+// the flag is specified; otherwise returns NULL.
+InternalRunDeathTestFlag* ParseInternalRunDeathTestFlag() {
+ if (GTEST_FLAG(internal_run_death_test) == "") return NULL;
+
+ // GTEST_HAS_DEATH_TEST implies that we have ::std::string, so we
+ // can use it here.
+ int line = -1;
+ int index = -1;
+ ::std::vector< ::std::string> fields;
+ SplitString(GTEST_FLAG(internal_run_death_test).c_str(), '|', &fields);
+ int write_fd = -1;
+
+#if GTEST_OS_WINDOWS
+ unsigned int parent_process_id = 0;
+ size_t write_handle_as_size_t = 0;
+ size_t event_handle_as_size_t = 0;
+
+ if (fields.size() != 6
+ || !ParseNaturalNumber(fields[1], &line)
+ || !ParseNaturalNumber(fields[2], &index)
+ || !ParseNaturalNumber(fields[3], &parent_process_id)
+ || !ParseNaturalNumber(fields[4], &write_handle_as_size_t)
+ || !ParseNaturalNumber(fields[5], &event_handle_as_size_t)) {
+ DeathTestAbort(String::Format(
+ "Bad --gtest_internal_run_death_test flag: %s",
+ GTEST_FLAG(internal_run_death_test).c_str()));
+ }
+ write_fd = GetStatusFileDescriptor(parent_process_id,
+ write_handle_as_size_t,
+ event_handle_as_size_t);
+#else
+ if (fields.size() != 4
+ || !ParseNaturalNumber(fields[1], &line)
+ || !ParseNaturalNumber(fields[2], &index)
+ || !ParseNaturalNumber(fields[3], &write_fd)) {
+ DeathTestAbort(String::Format(
+ "Bad --gtest_internal_run_death_test flag: %s",
+ GTEST_FLAG(internal_run_death_test).c_str()));
+ }
+#endif // GTEST_OS_WINDOWS
+ return new InternalRunDeathTestFlag(fields[0], line, index, write_fd);
+}
+
+} // namespace internal
+
+#endif // GTEST_HAS_DEATH_TEST
+
+} // namespace testing
diff --git a/Source/ThirdParty/gtest/src/gtest-filepath.cc b/Source/ThirdParty/gtest/src/gtest-filepath.cc
new file mode 100644
index 000000000..c1ef9188a
--- /dev/null
+++ b/Source/ThirdParty/gtest/src/gtest-filepath.cc
@@ -0,0 +1,380 @@
+// Copyright 2008, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Authors: keith.ray@gmail.com (Keith Ray)
+
+#include <gtest/internal/gtest-filepath.h>
+#include <gtest/internal/gtest-port.h>
+
+#include <stdlib.h>
+
+#if GTEST_OS_WINDOWS_MOBILE
+#include <windows.h>
+#elif GTEST_OS_WINDOWS
+#include <direct.h>
+#include <io.h>
+#elif GTEST_OS_SYMBIAN
+// Symbian OpenC has PATH_MAX in sys/syslimits.h
+#include <sys/syslimits.h>
+#else
+#include <limits.h>
+#include <climits> // Some Linux distributions define PATH_MAX here.
+#endif // GTEST_OS_WINDOWS_MOBILE
+
+#if GTEST_OS_WINDOWS
+#define GTEST_PATH_MAX_ _MAX_PATH
+#elif defined(PATH_MAX)
+#define GTEST_PATH_MAX_ PATH_MAX
+#elif defined(_XOPEN_PATH_MAX)
+#define GTEST_PATH_MAX_ _XOPEN_PATH_MAX
+#else
+#define GTEST_PATH_MAX_ _POSIX_PATH_MAX
+#endif // GTEST_OS_WINDOWS
+
+#include <gtest/internal/gtest-string.h>
+
+namespace testing {
+namespace internal {
+
+#if GTEST_OS_WINDOWS
+// On Windows, '\\' is the standard path separator, but many tools and the
+// Windows API also accept '/' as an alternate path separator. Unless otherwise
+// noted, a file path can contain either kind of path separators, or a mixture
+// of them.
+const char kPathSeparator = '\\';
+const char kAlternatePathSeparator = '/';
+const char kPathSeparatorString[] = "\\";
+const char kAlternatePathSeparatorString[] = "/";
+#if GTEST_OS_WINDOWS_MOBILE
+// Windows CE doesn't have a current directory. You should not use
+// the current directory in tests on Windows CE, but this at least
+// provides a reasonable fallback.
+const char kCurrentDirectoryString[] = "\\";
+// Windows CE doesn't define INVALID_FILE_ATTRIBUTES
+const DWORD kInvalidFileAttributes = 0xffffffff;
+#else
+const char kCurrentDirectoryString[] = ".\\";
+#endif // GTEST_OS_WINDOWS_MOBILE
+#else
+const char kPathSeparator = '/';
+const char kPathSeparatorString[] = "/";
+const char kCurrentDirectoryString[] = "./";
+#endif // GTEST_OS_WINDOWS
+
+// Returns whether the given character is a valid path separator.
+static bool IsPathSeparator(char c) {
+#if GTEST_HAS_ALT_PATH_SEP_
+ return (c == kPathSeparator) || (c == kAlternatePathSeparator);
+#else
+ return c == kPathSeparator;
+#endif
+}
+
+// Returns the current working directory, or "" if unsuccessful.
+FilePath FilePath::GetCurrentDir() {
+#if GTEST_OS_WINDOWS_MOBILE
+ // Windows CE doesn't have a current directory, so we just return
+ // something reasonable.
+ return FilePath(kCurrentDirectoryString);
+#elif GTEST_OS_WINDOWS
+ char cwd[GTEST_PATH_MAX_ + 1] = { '\0' };
+ return FilePath(_getcwd(cwd, sizeof(cwd)) == NULL ? "" : cwd);
+#else
+ char cwd[GTEST_PATH_MAX_ + 1] = { '\0' };
+ return FilePath(getcwd(cwd, sizeof(cwd)) == NULL ? "" : cwd);
+#endif // GTEST_OS_WINDOWS_MOBILE
+}
+
+// Returns a copy of the FilePath with the case-insensitive extension removed.
+// Example: FilePath("dir/file.exe").RemoveExtension("EXE") returns
+// FilePath("dir/file"). If a case-insensitive extension is not
+// found, returns a copy of the original FilePath.
+FilePath FilePath::RemoveExtension(const char* extension) const {
+ String dot_extension(String::Format(".%s", extension));
+ if (pathname_.EndsWithCaseInsensitive(dot_extension.c_str())) {
+ return FilePath(String(pathname_.c_str(), pathname_.length() - 4));
+ }
+ return *this;
+}
+
+// Returns a pointer to the last occurence of a valid path separator in
+// the FilePath. On Windows, for example, both '/' and '\' are valid path
+// separators. Returns NULL if no path separator was found.
+const char* FilePath::FindLastPathSeparator() const {
+ const char* const last_sep = strrchr(c_str(), kPathSeparator);
+#if GTEST_HAS_ALT_PATH_SEP_
+ const char* const last_alt_sep = strrchr(c_str(), kAlternatePathSeparator);
+ // Comparing two pointers of which only one is NULL is undefined.
+ if (last_alt_sep != NULL &&
+ (last_sep == NULL || last_alt_sep > last_sep)) {
+ return last_alt_sep;
+ }
+#endif
+ return last_sep;
+}
+
+// Returns a copy of the FilePath with the directory part removed.
+// Example: FilePath("path/to/file").RemoveDirectoryName() returns
+// FilePath("file"). If there is no directory part ("just_a_file"), it returns
+// the FilePath unmodified. If there is no file part ("just_a_dir/") it
+// returns an empty FilePath ("").
+// On Windows platform, '\' is the path separator, otherwise it is '/'.
+FilePath FilePath::RemoveDirectoryName() const {
+ const char* const last_sep = FindLastPathSeparator();
+ return last_sep ? FilePath(String(last_sep + 1)) : *this;
+}
+
+// RemoveFileName returns the directory path with the filename removed.
+// Example: FilePath("path/to/file").RemoveFileName() returns "path/to/".
+// If the FilePath is "a_file" or "/a_file", RemoveFileName returns
+// FilePath("./") or, on Windows, FilePath(".\\"). If the filepath does
+// not have a file, like "just/a/dir/", it returns the FilePath unmodified.
+// On Windows platform, '\' is the path separator, otherwise it is '/'.
+FilePath FilePath::RemoveFileName() const {
+ const char* const last_sep = FindLastPathSeparator();
+ String dir;
+ if (last_sep) {
+ dir = String(c_str(), last_sep + 1 - c_str());
+ } else {
+ dir = kCurrentDirectoryString;
+ }
+ return FilePath(dir);
+}
+
+// Helper functions for naming files in a directory for xml output.
+
+// Given directory = "dir", base_name = "test", number = 0,
+// extension = "xml", returns "dir/test.xml". If number is greater
+// than zero (e.g., 12), returns "dir/test_12.xml".
+// On Windows platform, uses \ as the separator rather than /.
+FilePath FilePath::MakeFileName(const FilePath& directory,
+ const FilePath& base_name,
+ int number,
+ const char* extension) {
+ String file;
+ if (number == 0) {
+ file = String::Format("%s.%s", base_name.c_str(), extension);
+ } else {
+ file = String::Format("%s_%d.%s", base_name.c_str(), number, extension);
+ }
+ return ConcatPaths(directory, FilePath(file));
+}
+
+// Given directory = "dir", relative_path = "test.xml", returns "dir/test.xml".
+// On Windows, uses \ as the separator rather than /.
+FilePath FilePath::ConcatPaths(const FilePath& directory,
+ const FilePath& relative_path) {
+ if (directory.IsEmpty())
+ return relative_path;
+ const FilePath dir(directory.RemoveTrailingPathSeparator());
+ return FilePath(String::Format("%s%c%s", dir.c_str(), kPathSeparator,
+ relative_path.c_str()));
+}
+
+// Returns true if pathname describes something findable in the file-system,
+// either a file, directory, or whatever.
+bool FilePath::FileOrDirectoryExists() const {
+#if GTEST_OS_WINDOWS_MOBILE
+ LPCWSTR unicode = String::AnsiToUtf16(pathname_.c_str());
+ const DWORD attributes = GetFileAttributes(unicode);
+ delete [] unicode;
+ return attributes != kInvalidFileAttributes;
+#else
+ posix::StatStruct file_stat;
+ return posix::Stat(pathname_.c_str(), &file_stat) == 0;
+#endif // GTEST_OS_WINDOWS_MOBILE
+}
+
+// Returns true if pathname describes a directory in the file-system
+// that exists.
+bool FilePath::DirectoryExists() const {
+ bool result = false;
+#if GTEST_OS_WINDOWS
+ // Don't strip off trailing separator if path is a root directory on
+ // Windows (like "C:\\").
+ const FilePath& path(IsRootDirectory() ? *this :
+ RemoveTrailingPathSeparator());
+#else
+ const FilePath& path(*this);
+#endif
+
+#if GTEST_OS_WINDOWS_MOBILE
+ LPCWSTR unicode = String::AnsiToUtf16(path.c_str());
+ const DWORD attributes = GetFileAttributes(unicode);
+ delete [] unicode;
+ if ((attributes != kInvalidFileAttributes) &&
+ (attributes & FILE_ATTRIBUTE_DIRECTORY)) {
+ result = true;
+ }
+#else
+ posix::StatStruct file_stat;
+ result = posix::Stat(path.c_str(), &file_stat) == 0 &&
+ posix::IsDir(file_stat);
+#endif // GTEST_OS_WINDOWS_MOBILE
+
+ return result;
+}
+
+// Returns true if pathname describes a root directory. (Windows has one
+// root directory per disk drive.)
+bool FilePath::IsRootDirectory() const {
+#if GTEST_OS_WINDOWS
+ // TODO(wan@google.com): on Windows a network share like
+ // \\server\share can be a root directory, although it cannot be the
+ // current directory. Handle this properly.
+ return pathname_.length() == 3 && IsAbsolutePath();
+#else
+ return pathname_.length() == 1 && IsPathSeparator(pathname_.c_str()[0]);
+#endif
+}
+
+// Returns true if pathname describes an absolute path.
+bool FilePath::IsAbsolutePath() const {
+ const char* const name = pathname_.c_str();
+#if GTEST_OS_WINDOWS
+ return pathname_.length() >= 3 &&
+ ((name[0] >= 'a' && name[0] <= 'z') ||
+ (name[0] >= 'A' && name[0] <= 'Z')) &&
+ name[1] == ':' &&
+ IsPathSeparator(name[2]);
+#else
+ return IsPathSeparator(name[0]);
+#endif
+}
+
+// Returns a pathname for a file that does not currently exist. The pathname
+// will be directory/base_name.extension or
+// directory/base_name_<number>.extension if directory/base_name.extension
+// already exists. The number will be incremented until a pathname is found
+// that does not already exist.
+// Examples: 'dir/foo_test.xml' or 'dir/foo_test_1.xml'.
+// There could be a race condition if two or more processes are calling this
+// function at the same time -- they could both pick the same filename.
+FilePath FilePath::GenerateUniqueFileName(const FilePath& directory,
+ const FilePath& base_name,
+ const char* extension) {
+ FilePath full_pathname;
+ int number = 0;
+ do {
+ full_pathname.Set(MakeFileName(directory, base_name, number++, extension));
+ } while (full_pathname.FileOrDirectoryExists());
+ return full_pathname;
+}
+
+// Returns true if FilePath ends with a path separator, which indicates that
+// it is intended to represent a directory. Returns false otherwise.
+// This does NOT check that a directory (or file) actually exists.
+bool FilePath::IsDirectory() const {
+ return !pathname_.empty() &&
+ IsPathSeparator(pathname_.c_str()[pathname_.length() - 1]);
+}
+
+// Create directories so that path exists. Returns true if successful or if
+// the directories already exist; returns false if unable to create directories
+// for any reason.
+bool FilePath::CreateDirectoriesRecursively() const {
+ if (!this->IsDirectory()) {
+ return false;
+ }
+
+ if (pathname_.length() == 0 || this->DirectoryExists()) {
+ return true;
+ }
+
+ const FilePath parent(this->RemoveTrailingPathSeparator().RemoveFileName());
+ return parent.CreateDirectoriesRecursively() && this->CreateFolder();
+}
+
+// Create the directory so that path exists. Returns true if successful or
+// if the directory already exists; returns false if unable to create the
+// directory for any reason, including if the parent directory does not
+// exist. Not named "CreateDirectory" because that's a macro on Windows.
+bool FilePath::CreateFolder() const {
+#if GTEST_OS_WINDOWS_MOBILE
+ FilePath removed_sep(this->RemoveTrailingPathSeparator());
+ LPCWSTR unicode = String::AnsiToUtf16(removed_sep.c_str());
+ int result = CreateDirectory(unicode, NULL) ? 0 : -1;
+ delete [] unicode;
+#elif GTEST_OS_WINDOWS
+ int result = _mkdir(pathname_.c_str());
+#else
+ int result = mkdir(pathname_.c_str(), 0777);
+#endif // GTEST_OS_WINDOWS_MOBILE
+
+ if (result == -1) {
+ return this->DirectoryExists(); // An error is OK if the directory exists.
+ }
+ return true; // No error.
+}
+
+// If input name has a trailing separator character, remove it and return the
+// name, otherwise return the name string unmodified.
+// On Windows platform, uses \ as the separator, other platforms use /.
+FilePath FilePath::RemoveTrailingPathSeparator() const {
+ return IsDirectory()
+ ? FilePath(String(pathname_.c_str(), pathname_.length() - 1))
+ : *this;
+}
+
+// Removes any redundant separators that might be in the pathname.
+// For example, "bar///foo" becomes "bar/foo". Does not eliminate other
+// redundancies that might be in a pathname involving "." or "..".
+// TODO(wan@google.com): handle Windows network shares (e.g. \\server\share).
+void FilePath::Normalize() {
+ if (pathname_.c_str() == NULL) {
+ pathname_ = "";
+ return;
+ }
+ const char* src = pathname_.c_str();
+ char* const dest = new char[pathname_.length() + 1];
+ char* dest_ptr = dest;
+ memset(dest_ptr, 0, pathname_.length() + 1);
+
+ while (*src != '\0') {
+ *dest_ptr = *src;
+ if (!IsPathSeparator(*src)) {
+ src++;
+ } else {
+#if GTEST_HAS_ALT_PATH_SEP_
+ if (*dest_ptr == kAlternatePathSeparator) {
+ *dest_ptr = kPathSeparator;
+ }
+#endif
+ while (IsPathSeparator(*src))
+ src++;
+ }
+ dest_ptr++;
+ }
+ *dest_ptr = '\0';
+ pathname_ = dest;
+ delete[] dest;
+}
+
+} // namespace internal
+} // namespace testing
diff --git a/Source/ThirdParty/gtest/src/gtest-internal-inl.h b/Source/ThirdParty/gtest/src/gtest-internal-inl.h
new file mode 100644
index 000000000..01415f7d9
--- /dev/null
+++ b/Source/ThirdParty/gtest/src/gtest-internal-inl.h
@@ -0,0 +1,1073 @@
+// Copyright 2005, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Utility functions and classes used by the Google C++ testing framework.
+//
+// Author: wan@google.com (Zhanyong Wan)
+//
+// This file contains purely Google Test's internal implementation. Please
+// DO NOT #INCLUDE IT IN A USER PROGRAM.
+
+#ifndef GTEST_SRC_GTEST_INTERNAL_INL_H_
+#define GTEST_SRC_GTEST_INTERNAL_INL_H_
+
+// GTEST_IMPLEMENTATION_ is defined to 1 iff the current translation unit is
+// part of Google Test's implementation; otherwise it's undefined.
+#if !GTEST_IMPLEMENTATION_
+// A user is trying to include this from his code - just say no.
+#error "gtest-internal-inl.h is part of Google Test's internal implementation."
+#error "It must not be included except by Google Test itself."
+#endif // GTEST_IMPLEMENTATION_
+
+#ifndef _WIN32_WCE
+#include <errno.h>
+#endif // !_WIN32_WCE
+#include <stddef.h>
+#include <stdlib.h> // For strtoll/_strtoul64/malloc/free.
+#include <string.h> // For memmove.
+
+#include <algorithm>
+#include <string>
+#include <vector>
+
+#include <gtest/internal/gtest-port.h>
+
+#if GTEST_OS_WINDOWS
+#include <windows.h> // For DWORD.
+#endif // GTEST_OS_WINDOWS
+
+#include <gtest/gtest.h> // NOLINT
+#include <gtest/gtest-spi.h>
+
+namespace testing {
+
+// Declares the flags.
+//
+// We don't want the users to modify this flag in the code, but want
+// Google Test's own unit tests to be able to access it. Therefore we
+// declare it here as opposed to in gtest.h.
+GTEST_DECLARE_bool_(death_test_use_fork);
+
+namespace internal {
+
+// The value of GetTestTypeId() as seen from within the Google Test
+// library. This is solely for testing GetTestTypeId().
+GTEST_API_ extern const TypeId kTestTypeIdInGoogleTest;
+
+// Names of the flags (needed for parsing Google Test flags).
+const char kAlsoRunDisabledTestsFlag[] = "also_run_disabled_tests";
+const char kBreakOnFailureFlag[] = "break_on_failure";
+const char kCatchExceptionsFlag[] = "catch_exceptions";
+const char kColorFlag[] = "color";
+const char kFilterFlag[] = "filter";
+const char kListTestsFlag[] = "list_tests";
+const char kOutputFlag[] = "output";
+const char kPrintTimeFlag[] = "print_time";
+const char kRandomSeedFlag[] = "random_seed";
+const char kRepeatFlag[] = "repeat";
+const char kShuffleFlag[] = "shuffle";
+const char kStackTraceDepthFlag[] = "stack_trace_depth";
+const char kThrowOnFailureFlag[] = "throw_on_failure";
+
+// A valid random seed must be in [1, kMaxRandomSeed].
+const int kMaxRandomSeed = 99999;
+
+// g_help_flag is true iff the --help flag or an equivalent form is
+// specified on the command line.
+GTEST_API_ extern bool g_help_flag;
+
+// Returns the current time in milliseconds.
+GTEST_API_ TimeInMillis GetTimeInMillis();
+
+// Returns true iff Google Test should use colors in the output.
+GTEST_API_ bool ShouldUseColor(bool stdout_is_tty);
+
+// Formats the given time in milliseconds as seconds.
+GTEST_API_ std::string FormatTimeInMillisAsSeconds(TimeInMillis ms);
+
+// Parses a string for an Int32 flag, in the form of "--flag=value".
+//
+// On success, stores the value of the flag in *value, and returns
+// true. On failure, returns false without changing *value.
+GTEST_API_ bool ParseInt32Flag(
+ const char* str, const char* flag, Int32* value);
+
+// Returns a random seed in range [1, kMaxRandomSeed] based on the
+// given --gtest_random_seed flag value.
+inline int GetRandomSeedFromFlag(Int32 random_seed_flag) {
+ const unsigned int raw_seed = (random_seed_flag == 0) ?
+ static_cast<unsigned int>(GetTimeInMillis()) :
+ static_cast<unsigned int>(random_seed_flag);
+
+ // Normalizes the actual seed to range [1, kMaxRandomSeed] such that
+ // it's easy to type.
+ const int normalized_seed =
+ static_cast<int>((raw_seed - 1U) %
+ static_cast<unsigned int>(kMaxRandomSeed)) + 1;
+ return normalized_seed;
+}
+
+// Returns the first valid random seed after 'seed'. The behavior is
+// undefined if 'seed' is invalid. The seed after kMaxRandomSeed is
+// considered to be 1.
+inline int GetNextRandomSeed(int seed) {
+ GTEST_CHECK_(1 <= seed && seed <= kMaxRandomSeed)
+ << "Invalid random seed " << seed << " - must be in [1, "
+ << kMaxRandomSeed << "].";
+ const int next_seed = seed + 1;
+ return (next_seed > kMaxRandomSeed) ? 1 : next_seed;
+}
+
+// This class saves the values of all Google Test flags in its c'tor, and
+// restores them in its d'tor.
+class GTestFlagSaver {
+ public:
+ // The c'tor.
+ GTestFlagSaver() {
+ also_run_disabled_tests_ = GTEST_FLAG(also_run_disabled_tests);
+ break_on_failure_ = GTEST_FLAG(break_on_failure);
+ catch_exceptions_ = GTEST_FLAG(catch_exceptions);
+ color_ = GTEST_FLAG(color);
+ death_test_style_ = GTEST_FLAG(death_test_style);
+ death_test_use_fork_ = GTEST_FLAG(death_test_use_fork);
+ filter_ = GTEST_FLAG(filter);
+ internal_run_death_test_ = GTEST_FLAG(internal_run_death_test);
+ list_tests_ = GTEST_FLAG(list_tests);
+ output_ = GTEST_FLAG(output);
+ print_time_ = GTEST_FLAG(print_time);
+ random_seed_ = GTEST_FLAG(random_seed);
+ repeat_ = GTEST_FLAG(repeat);
+ shuffle_ = GTEST_FLAG(shuffle);
+ stack_trace_depth_ = GTEST_FLAG(stack_trace_depth);
+ throw_on_failure_ = GTEST_FLAG(throw_on_failure);
+ }
+
+ // The d'tor is not virtual. DO NOT INHERIT FROM THIS CLASS.
+ ~GTestFlagSaver() {
+ GTEST_FLAG(also_run_disabled_tests) = also_run_disabled_tests_;
+ GTEST_FLAG(break_on_failure) = break_on_failure_;
+ GTEST_FLAG(catch_exceptions) = catch_exceptions_;
+ GTEST_FLAG(color) = color_;
+ GTEST_FLAG(death_test_style) = death_test_style_;
+ GTEST_FLAG(death_test_use_fork) = death_test_use_fork_;
+ GTEST_FLAG(filter) = filter_;
+ GTEST_FLAG(internal_run_death_test) = internal_run_death_test_;
+ GTEST_FLAG(list_tests) = list_tests_;
+ GTEST_FLAG(output) = output_;
+ GTEST_FLAG(print_time) = print_time_;
+ GTEST_FLAG(random_seed) = random_seed_;
+ GTEST_FLAG(repeat) = repeat_;
+ GTEST_FLAG(shuffle) = shuffle_;
+ GTEST_FLAG(stack_trace_depth) = stack_trace_depth_;
+ GTEST_FLAG(throw_on_failure) = throw_on_failure_;
+ }
+ private:
+ // Fields for saving the original values of flags.
+ bool also_run_disabled_tests_;
+ bool break_on_failure_;
+ bool catch_exceptions_;
+ String color_;
+ String death_test_style_;
+ bool death_test_use_fork_;
+ String filter_;
+ String internal_run_death_test_;
+ bool list_tests_;
+ String output_;
+ bool print_time_;
+ internal::Int32 random_seed_;
+ internal::Int32 repeat_;
+ bool shuffle_;
+ internal::Int32 stack_trace_depth_;
+ bool throw_on_failure_;
+} GTEST_ATTRIBUTE_UNUSED_;
+
+// Converts a Unicode code point to a narrow string in UTF-8 encoding.
+// code_point parameter is of type UInt32 because wchar_t may not be
+// wide enough to contain a code point.
+// The output buffer str must containt at least 32 characters.
+// The function returns the address of the output buffer.
+// If the code_point is not a valid Unicode code point
+// (i.e. outside of Unicode range U+0 to U+10FFFF) it will be output
+// as '(Invalid Unicode 0xXXXXXXXX)'.
+GTEST_API_ char* CodePointToUtf8(UInt32 code_point, char* str);
+
+// Converts a wide string to a narrow string in UTF-8 encoding.
+// The wide string is assumed to have the following encoding:
+// UTF-16 if sizeof(wchar_t) == 2 (on Windows, Cygwin, Symbian OS)
+// UTF-32 if sizeof(wchar_t) == 4 (on Linux)
+// Parameter str points to a null-terminated wide string.
+// Parameter num_chars may additionally limit the number
+// of wchar_t characters processed. -1 is used when the entire string
+// should be processed.
+// If the string contains code points that are not valid Unicode code points
+// (i.e. outside of Unicode range U+0 to U+10FFFF) they will be output
+// as '(Invalid Unicode 0xXXXXXXXX)'. If the string is in UTF16 encoding
+// and contains invalid UTF-16 surrogate pairs, values in those pairs
+// will be encoded as individual Unicode characters from Basic Normal Plane.
+GTEST_API_ String WideStringToUtf8(const wchar_t* str, int num_chars);
+
+// Reads the GTEST_SHARD_STATUS_FILE environment variable, and creates the file
+// if the variable is present. If a file already exists at this location, this
+// function will write over it. If the variable is present, but the file cannot
+// be created, prints an error and exits.
+void WriteToShardStatusFileIfNeeded();
+
+// Checks whether sharding is enabled by examining the relevant
+// environment variable values. If the variables are present,
+// but inconsistent (e.g., shard_index >= total_shards), prints
+// an error and exits. If in_subprocess_for_death_test, sharding is
+// disabled because it must only be applied to the original test
+// process. Otherwise, we could filter out death tests we intended to execute.
+GTEST_API_ bool ShouldShard(const char* total_shards_str,
+ const char* shard_index_str,
+ bool in_subprocess_for_death_test);
+
+// Parses the environment variable var as an Int32. If it is unset,
+// returns default_val. If it is not an Int32, prints an error and
+// and aborts.
+GTEST_API_ Int32 Int32FromEnvOrDie(const char* env_var, Int32 default_val);
+
+// Given the total number of shards, the shard index, and the test id,
+// returns true iff the test should be run on this shard. The test id is
+// some arbitrary but unique non-negative integer assigned to each test
+// method. Assumes that 0 <= shard_index < total_shards.
+GTEST_API_ bool ShouldRunTestOnShard(
+ int total_shards, int shard_index, int test_id);
+
+// STL container utilities.
+
+// Returns the number of elements in the given container that satisfy
+// the given predicate.
+template <class Container, typename Predicate>
+inline int CountIf(const Container& c, Predicate predicate) {
+ return static_cast<int>(std::count_if(c.begin(), c.end(), predicate));
+}
+
+// Applies a function/functor to each element in the container.
+template <class Container, typename Functor>
+void ForEach(const Container& c, Functor functor) {
+ std::for_each(c.begin(), c.end(), functor);
+}
+
+// Returns the i-th element of the vector, or default_value if i is not
+// in range [0, v.size()).
+template <typename E>
+inline E GetElementOr(const std::vector<E>& v, int i, E default_value) {
+ return (i < 0 || i >= static_cast<int>(v.size())) ? default_value : v[i];
+}
+
+// Performs an in-place shuffle of a range of the vector's elements.
+// 'begin' and 'end' are element indices as an STL-style range;
+// i.e. [begin, end) are shuffled, where 'end' == size() means to
+// shuffle to the end of the vector.
+template <typename E>
+void ShuffleRange(internal::Random* random, int begin, int end,
+ std::vector<E>* v) {
+ const int size = static_cast<int>(v->size());
+ GTEST_CHECK_(0 <= begin && begin <= size)
+ << "Invalid shuffle range start " << begin << ": must be in range [0, "
+ << size << "].";
+ GTEST_CHECK_(begin <= end && end <= size)
+ << "Invalid shuffle range finish " << end << ": must be in range ["
+ << begin << ", " << size << "].";
+
+ // Fisher-Yates shuffle, from
+ // http://en.wikipedia.org/wiki/Fisher-Yates_shuffle
+ for (int range_width = end - begin; range_width >= 2; range_width--) {
+ const int last_in_range = begin + range_width - 1;
+ const int selected = begin + random->Generate(range_width);
+ std::swap((*v)[selected], (*v)[last_in_range]);
+ }
+}
+
+// Performs an in-place shuffle of the vector's elements.
+template <typename E>
+inline void Shuffle(internal::Random* random, std::vector<E>* v) {
+ ShuffleRange(random, 0, static_cast<int>(v->size()), v);
+}
+
+// A function for deleting an object. Handy for being used as a
+// functor.
+template <typename T>
+static void Delete(T* x) {
+ delete x;
+}
+
+// A predicate that checks the key of a TestProperty against a known key.
+//
+// TestPropertyKeyIs is copyable.
+class TestPropertyKeyIs {
+ public:
+ // Constructor.
+ //
+ // TestPropertyKeyIs has NO default constructor.
+ explicit TestPropertyKeyIs(const char* key)
+ : key_(key) {}
+
+ // Returns true iff the test name of test property matches on key_.
+ bool operator()(const TestProperty& test_property) const {
+ return String(test_property.key()).Compare(key_) == 0;
+ }
+
+ private:
+ String key_;
+};
+
+class TestInfoImpl {
+ public:
+ TestInfoImpl(TestInfo* parent, const char* test_case_name,
+ const char* name, const char* test_case_comment,
+ const char* comment, TypeId fixture_class_id,
+ internal::TestFactoryBase* factory);
+ ~TestInfoImpl();
+
+ // Returns true if this test should run.
+ bool should_run() const { return should_run_; }
+
+ // Sets the should_run member.
+ void set_should_run(bool should) { should_run_ = should; }
+
+ // Returns true if this test is disabled. Disabled tests are not run.
+ bool is_disabled() const { return is_disabled_; }
+
+ // Sets the is_disabled member.
+ void set_is_disabled(bool is) { is_disabled_ = is; }
+
+ // Returns true if this test matches the filter specified by the user.
+ bool matches_filter() const { return matches_filter_; }
+
+ // Sets the matches_filter member.
+ void set_matches_filter(bool matches) { matches_filter_ = matches; }
+
+ // Returns the test case name.
+ const char* test_case_name() const { return test_case_name_.c_str(); }
+
+ // Returns the test name.
+ const char* name() const { return name_.c_str(); }
+
+ // Returns the test case comment.
+ const char* test_case_comment() const { return test_case_comment_.c_str(); }
+
+ // Returns the test comment.
+ const char* comment() const { return comment_.c_str(); }
+
+ // Returns the ID of the test fixture class.
+ TypeId fixture_class_id() const { return fixture_class_id_; }
+
+ // Returns the test result.
+ TestResult* result() { return &result_; }
+ const TestResult* result() const { return &result_; }
+
+ // Creates the test object, runs it, records its result, and then
+ // deletes it.
+ void Run();
+
+ // Clears the test result.
+ void ClearResult() { result_.Clear(); }
+
+ // Clears the test result in the given TestInfo object.
+ static void ClearTestResult(TestInfo * test_info) {
+ test_info->impl()->ClearResult();
+ }
+
+ private:
+ // These fields are immutable properties of the test.
+ TestInfo* const parent_; // The owner of this object
+ const String test_case_name_; // Test case name
+ const String name_; // Test name
+ const String test_case_comment_; // Test case comment
+ const String comment_; // Test comment
+ const TypeId fixture_class_id_; // ID of the test fixture class
+ bool should_run_; // True iff this test should run
+ bool is_disabled_; // True iff this test is disabled
+ bool matches_filter_; // True if this test matches the
+ // user-specified filter.
+ internal::TestFactoryBase* const factory_; // The factory that creates
+ // the test object
+
+ // This field is mutable and needs to be reset before running the
+ // test for the second time.
+ TestResult result_;
+
+ GTEST_DISALLOW_COPY_AND_ASSIGN_(TestInfoImpl);
+};
+
+// Class UnitTestOptions.
+//
+// This class contains functions for processing options the user
+// specifies when running the tests. It has only static members.
+//
+// In most cases, the user can specify an option using either an
+// environment variable or a command line flag. E.g. you can set the
+// test filter using either GTEST_FILTER or --gtest_filter. If both
+// the variable and the flag are present, the latter overrides the
+// former.
+class GTEST_API_ UnitTestOptions {
+ public:
+ // Functions for processing the gtest_output flag.
+
+ // Returns the output format, or "" for normal printed output.
+ static String GetOutputFormat();
+
+ // Returns the absolute path of the requested output file, or the
+ // default (test_detail.xml in the original working directory) if
+ // none was explicitly specified.
+ static String GetAbsolutePathToOutputFile();
+
+ // Functions for processing the gtest_filter flag.
+
+ // Returns true iff the wildcard pattern matches the string. The
+ // first ':' or '\0' character in pattern marks the end of it.
+ //
+ // This recursive algorithm isn't very efficient, but is clear and
+ // works well enough for matching test names, which are short.
+ static bool PatternMatchesString(const char *pattern, const char *str);
+
+ // Returns true iff the user-specified filter matches the test case
+ // name and the test name.
+ static bool FilterMatchesTest(const String &test_case_name,
+ const String &test_name);
+
+#if GTEST_OS_WINDOWS
+ // Function for supporting the gtest_catch_exception flag.
+
+ // Returns EXCEPTION_EXECUTE_HANDLER if Google Test should handle the
+ // given SEH exception, or EXCEPTION_CONTINUE_SEARCH otherwise.
+ // This function is useful as an __except condition.
+ static int GTestShouldProcessSEH(DWORD exception_code);
+#endif // GTEST_OS_WINDOWS
+
+ // Returns true if "name" matches the ':' separated list of glob-style
+ // filters in "filter".
+ static bool MatchesFilter(const String& name, const char* filter);
+};
+
+// Returns the current application's name, removing directory path if that
+// is present. Used by UnitTestOptions::GetOutputFile.
+GTEST_API_ FilePath GetCurrentExecutableName();
+
+// The role interface for getting the OS stack trace as a string.
+class OsStackTraceGetterInterface {
+ public:
+ OsStackTraceGetterInterface() {}
+ virtual ~OsStackTraceGetterInterface() {}
+
+ // Returns the current OS stack trace as a String. Parameters:
+ //
+ // max_depth - the maximum number of stack frames to be included
+ // in the trace.
+ // skip_count - the number of top frames to be skipped; doesn't count
+ // against max_depth.
+ virtual String CurrentStackTrace(int max_depth, int skip_count) = 0;
+
+ // UponLeavingGTest() should be called immediately before Google Test calls
+ // user code. It saves some information about the current stack that
+ // CurrentStackTrace() will use to find and hide Google Test stack frames.
+ virtual void UponLeavingGTest() = 0;
+
+ private:
+ GTEST_DISALLOW_COPY_AND_ASSIGN_(OsStackTraceGetterInterface);
+};
+
+// A working implementation of the OsStackTraceGetterInterface interface.
+class OsStackTraceGetter : public OsStackTraceGetterInterface {
+ public:
+ OsStackTraceGetter() : caller_frame_(NULL) {}
+ virtual String CurrentStackTrace(int max_depth, int skip_count);
+ virtual void UponLeavingGTest();
+
+ // This string is inserted in place of stack frames that are part of
+ // Google Test's implementation.
+ static const char* const kElidedFramesMarker;
+
+ private:
+ Mutex mutex_; // protects all internal state
+
+ // We save the stack frame below the frame that calls user code.
+ // We do this because the address of the frame immediately below
+ // the user code changes between the call to UponLeavingGTest()
+ // and any calls to CurrentStackTrace() from within the user code.
+ void* caller_frame_;
+
+ GTEST_DISALLOW_COPY_AND_ASSIGN_(OsStackTraceGetter);
+};
+
+// Information about a Google Test trace point.
+struct TraceInfo {
+ const char* file;
+ int line;
+ String message;
+};
+
+// This is the default global test part result reporter used in UnitTestImpl.
+// This class should only be used by UnitTestImpl.
+class DefaultGlobalTestPartResultReporter
+ : public TestPartResultReporterInterface {
+ public:
+ explicit DefaultGlobalTestPartResultReporter(UnitTestImpl* unit_test);
+ // Implements the TestPartResultReporterInterface. Reports the test part
+ // result in the current test.
+ virtual void ReportTestPartResult(const TestPartResult& result);
+
+ private:
+ UnitTestImpl* const unit_test_;
+
+ GTEST_DISALLOW_COPY_AND_ASSIGN_(DefaultGlobalTestPartResultReporter);
+};
+
+// This is the default per thread test part result reporter used in
+// UnitTestImpl. This class should only be used by UnitTestImpl.
+class DefaultPerThreadTestPartResultReporter
+ : public TestPartResultReporterInterface {
+ public:
+ explicit DefaultPerThreadTestPartResultReporter(UnitTestImpl* unit_test);
+ // Implements the TestPartResultReporterInterface. The implementation just
+ // delegates to the current global test part result reporter of *unit_test_.
+ virtual void ReportTestPartResult(const TestPartResult& result);
+
+ private:
+ UnitTestImpl* const unit_test_;
+
+ GTEST_DISALLOW_COPY_AND_ASSIGN_(DefaultPerThreadTestPartResultReporter);
+};
+
+// The private implementation of the UnitTest class. We don't protect
+// the methods under a mutex, as this class is not accessible by a
+// user and the UnitTest class that delegates work to this class does
+// proper locking.
+class GTEST_API_ UnitTestImpl {
+ public:
+ explicit UnitTestImpl(UnitTest* parent);
+ virtual ~UnitTestImpl();
+
+ // There are two different ways to register your own TestPartResultReporter.
+ // You can register your own repoter to listen either only for test results
+ // from the current thread or for results from all threads.
+ // By default, each per-thread test result repoter just passes a new
+ // TestPartResult to the global test result reporter, which registers the
+ // test part result for the currently running test.
+
+ // Returns the global test part result reporter.
+ TestPartResultReporterInterface* GetGlobalTestPartResultReporter();
+
+ // Sets the global test part result reporter.
+ void SetGlobalTestPartResultReporter(
+ TestPartResultReporterInterface* reporter);
+
+ // Returns the test part result reporter for the current thread.
+ TestPartResultReporterInterface* GetTestPartResultReporterForCurrentThread();
+
+ // Sets the test part result reporter for the current thread.
+ void SetTestPartResultReporterForCurrentThread(
+ TestPartResultReporterInterface* reporter);
+
+ // Gets the number of successful test cases.
+ int successful_test_case_count() const;
+
+ // Gets the number of failed test cases.
+ int failed_test_case_count() const;
+
+ // Gets the number of all test cases.
+ int total_test_case_count() const;
+
+ // Gets the number of all test cases that contain at least one test
+ // that should run.
+ int test_case_to_run_count() const;
+
+ // Gets the number of successful tests.
+ int successful_test_count() const;
+
+ // Gets the number of failed tests.
+ int failed_test_count() const;
+
+ // Gets the number of disabled tests.
+ int disabled_test_count() const;
+
+ // Gets the number of all tests.
+ int total_test_count() const;
+
+ // Gets the number of tests that should run.
+ int test_to_run_count() const;
+
+ // Gets the elapsed time, in milliseconds.
+ TimeInMillis elapsed_time() const { return elapsed_time_; }
+
+ // Returns true iff the unit test passed (i.e. all test cases passed).
+ bool Passed() const { return !Failed(); }
+
+ // Returns true iff the unit test failed (i.e. some test case failed
+ // or something outside of all tests failed).
+ bool Failed() const {
+ return failed_test_case_count() > 0 || ad_hoc_test_result()->Failed();
+ }
+
+ // Gets the i-th test case among all the test cases. i can range from 0 to
+ // total_test_case_count() - 1. If i is not in that range, returns NULL.
+ const TestCase* GetTestCase(int i) const {
+ const int index = GetElementOr(test_case_indices_, i, -1);
+ return index < 0 ? NULL : test_cases_[i];
+ }
+
+ // Gets the i-th test case among all the test cases. i can range from 0 to
+ // total_test_case_count() - 1. If i is not in that range, returns NULL.
+ TestCase* GetMutableTestCase(int i) {
+ const int index = GetElementOr(test_case_indices_, i, -1);
+ return index < 0 ? NULL : test_cases_[index];
+ }
+
+ // Provides access to the event listener list.
+ TestEventListeners* listeners() { return &listeners_; }
+
+ // Returns the TestResult for the test that's currently running, or
+ // the TestResult for the ad hoc test if no test is running.
+ TestResult* current_test_result();
+
+ // Returns the TestResult for the ad hoc test.
+ const TestResult* ad_hoc_test_result() const { return &ad_hoc_test_result_; }
+
+ // Sets the OS stack trace getter.
+ //
+ // Does nothing if the input and the current OS stack trace getter
+ // are the same; otherwise, deletes the old getter and makes the
+ // input the current getter.
+ void set_os_stack_trace_getter(OsStackTraceGetterInterface* getter);
+
+ // Returns the current OS stack trace getter if it is not NULL;
+ // otherwise, creates an OsStackTraceGetter, makes it the current
+ // getter, and returns it.
+ OsStackTraceGetterInterface* os_stack_trace_getter();
+
+ // Returns the current OS stack trace as a String.
+ //
+ // The maximum number of stack frames to be included is specified by
+ // the gtest_stack_trace_depth flag. The skip_count parameter
+ // specifies the number of top frames to be skipped, which doesn't
+ // count against the number of frames to be included.
+ //
+ // For example, if Foo() calls Bar(), which in turn calls
+ // CurrentOsStackTraceExceptTop(1), Foo() will be included in the
+ // trace but Bar() and CurrentOsStackTraceExceptTop() won't.
+ String CurrentOsStackTraceExceptTop(int skip_count);
+
+ // Finds and returns a TestCase with the given name. If one doesn't
+ // exist, creates one and returns it.
+ //
+ // Arguments:
+ //
+ // test_case_name: name of the test case
+ // set_up_tc: pointer to the function that sets up the test case
+ // tear_down_tc: pointer to the function that tears down the test case
+ TestCase* GetTestCase(const char* test_case_name,
+ const char* comment,
+ Test::SetUpTestCaseFunc set_up_tc,
+ Test::TearDownTestCaseFunc tear_down_tc);
+
+ // Adds a TestInfo to the unit test.
+ //
+ // Arguments:
+ //
+ // set_up_tc: pointer to the function that sets up the test case
+ // tear_down_tc: pointer to the function that tears down the test case
+ // test_info: the TestInfo object
+ void AddTestInfo(Test::SetUpTestCaseFunc set_up_tc,
+ Test::TearDownTestCaseFunc tear_down_tc,
+ TestInfo * test_info) {
+ // In order to support thread-safe death tests, we need to
+ // remember the original working directory when the test program
+ // was first invoked. We cannot do this in RUN_ALL_TESTS(), as
+ // the user may have changed the current directory before calling
+ // RUN_ALL_TESTS(). Therefore we capture the current directory in
+ // AddTestInfo(), which is called to register a TEST or TEST_F
+ // before main() is reached.
+ if (original_working_dir_.IsEmpty()) {
+ original_working_dir_.Set(FilePath::GetCurrentDir());
+ GTEST_CHECK_(!original_working_dir_.IsEmpty())
+ << "Failed to get the current working directory.";
+ }
+
+ GetTestCase(test_info->test_case_name(),
+ test_info->test_case_comment(),
+ set_up_tc,
+ tear_down_tc)->AddTestInfo(test_info);
+ }
+
+#if GTEST_HAS_PARAM_TEST
+ // Returns ParameterizedTestCaseRegistry object used to keep track of
+ // value-parameterized tests and instantiate and register them.
+ internal::ParameterizedTestCaseRegistry& parameterized_test_registry() {
+ return parameterized_test_registry_;
+ }
+#endif // GTEST_HAS_PARAM_TEST
+
+ // Sets the TestCase object for the test that's currently running.
+ void set_current_test_case(TestCase* a_current_test_case) {
+ current_test_case_ = a_current_test_case;
+ }
+
+ // Sets the TestInfo object for the test that's currently running. If
+ // current_test_info is NULL, the assertion results will be stored in
+ // ad_hoc_test_result_.
+ void set_current_test_info(TestInfo* a_current_test_info) {
+ current_test_info_ = a_current_test_info;
+ }
+
+ // Registers all parameterized tests defined using TEST_P and
+ // INSTANTIATE_TEST_P, creating regular tests for each test/parameter
+ // combination. This method can be called more then once; it has
+ // guards protecting from registering the tests more then once.
+ // If value-parameterized tests are disabled, RegisterParameterizedTests
+ // is present but does nothing.
+ void RegisterParameterizedTests();
+
+ // Runs all tests in this UnitTest object, prints the result, and
+ // returns 0 if all tests are successful, or 1 otherwise. If any
+ // exception is thrown during a test on Windows, this test is
+ // considered to be failed, but the rest of the tests will still be
+ // run. (We disable exceptions on Linux and Mac OS X, so the issue
+ // doesn't apply there.)
+ int RunAllTests();
+
+ // Clears the results of all tests, including the ad hoc test.
+ void ClearResult() {
+ ForEach(test_cases_, TestCase::ClearTestCaseResult);
+ ad_hoc_test_result_.Clear();
+ }
+
+ enum ReactionToSharding {
+ HONOR_SHARDING_PROTOCOL,
+ IGNORE_SHARDING_PROTOCOL
+ };
+
+ // Matches the full name of each test against the user-specified
+ // filter to decide whether the test should run, then records the
+ // result in each TestCase and TestInfo object.
+ // If shard_tests == HONOR_SHARDING_PROTOCOL, further filters tests
+ // based on sharding variables in the environment.
+ // Returns the number of tests that should run.
+ int FilterTests(ReactionToSharding shard_tests);
+
+ // Prints the names of the tests matching the user-specified filter flag.
+ void ListTestsMatchingFilter();
+
+ const TestCase* current_test_case() const { return current_test_case_; }
+ TestInfo* current_test_info() { return current_test_info_; }
+ const TestInfo* current_test_info() const { return current_test_info_; }
+
+ // Returns the vector of environments that need to be set-up/torn-down
+ // before/after the tests are run.
+ std::vector<Environment*>& environments() { return environments_; }
+
+ // Getters for the per-thread Google Test trace stack.
+ std::vector<TraceInfo>& gtest_trace_stack() {
+ return *(gtest_trace_stack_.pointer());
+ }
+ const std::vector<TraceInfo>& gtest_trace_stack() const {
+ return gtest_trace_stack_.get();
+ }
+
+#if GTEST_HAS_DEATH_TEST
+ void InitDeathTestSubprocessControlInfo() {
+ internal_run_death_test_flag_.reset(ParseInternalRunDeathTestFlag());
+ }
+ // Returns a pointer to the parsed --gtest_internal_run_death_test
+ // flag, or NULL if that flag was not specified.
+ // This information is useful only in a death test child process.
+ // Must not be called before a call to InitGoogleTest.
+ const InternalRunDeathTestFlag* internal_run_death_test_flag() const {
+ return internal_run_death_test_flag_.get();
+ }
+
+ // Returns a pointer to the current death test factory.
+ internal::DeathTestFactory* death_test_factory() {
+ return death_test_factory_.get();
+ }
+
+ void SuppressTestEventsIfInSubprocess();
+
+ friend class ReplaceDeathTestFactory;
+#endif // GTEST_HAS_DEATH_TEST
+
+ // Initializes the event listener performing XML output as specified by
+ // UnitTestOptions. Must not be called before InitGoogleTest.
+ void ConfigureXmlOutput();
+
+ // Performs initialization dependent upon flag values obtained in
+ // ParseGoogleTestFlagsOnly. Is called from InitGoogleTest after the call to
+ // ParseGoogleTestFlagsOnly. In case a user neglects to call InitGoogleTest
+ // this function is also called from RunAllTests. Since this function can be
+ // called more than once, it has to be idempotent.
+ void PostFlagParsingInit();
+
+ // Gets the random seed used at the start of the current test iteration.
+ int random_seed() const { return random_seed_; }
+
+ // Gets the random number generator.
+ internal::Random* random() { return &random_; }
+
+ // Shuffles all test cases, and the tests within each test case,
+ // making sure that death tests are still run first.
+ void ShuffleTests();
+
+ // Restores the test cases and tests to their order before the first shuffle.
+ void UnshuffleTests();
+
+ private:
+ friend class ::testing::UnitTest;
+
+ // The UnitTest object that owns this implementation object.
+ UnitTest* const parent_;
+
+ // The working directory when the first TEST() or TEST_F() was
+ // executed.
+ internal::FilePath original_working_dir_;
+
+ // The default test part result reporters.
+ DefaultGlobalTestPartResultReporter default_global_test_part_result_reporter_;
+ DefaultPerThreadTestPartResultReporter
+ default_per_thread_test_part_result_reporter_;
+
+ // Points to (but doesn't own) the global test part result reporter.
+ TestPartResultReporterInterface* global_test_part_result_repoter_;
+
+ // Protects read and write access to global_test_part_result_reporter_.
+ internal::Mutex global_test_part_result_reporter_mutex_;
+
+ // Points to (but doesn't own) the per-thread test part result reporter.
+ internal::ThreadLocal<TestPartResultReporterInterface*>
+ per_thread_test_part_result_reporter_;
+
+ // The vector of environments that need to be set-up/torn-down
+ // before/after the tests are run.
+ std::vector<Environment*> environments_;
+
+ // The vector of TestCases in their original order. It owns the
+ // elements in the vector.
+ std::vector<TestCase*> test_cases_;
+
+ // Provides a level of indirection for the test case list to allow
+ // easy shuffling and restoring the test case order. The i-th
+ // element of this vector is the index of the i-th test case in the
+ // shuffled order.
+ std::vector<int> test_case_indices_;
+
+#if GTEST_HAS_PARAM_TEST
+ // ParameterizedTestRegistry object used to register value-parameterized
+ // tests.
+ internal::ParameterizedTestCaseRegistry parameterized_test_registry_;
+
+ // Indicates whether RegisterParameterizedTests() has been called already.
+ bool parameterized_tests_registered_;
+#endif // GTEST_HAS_PARAM_TEST
+
+ // Index of the last death test case registered. Initially -1.
+ int last_death_test_case_;
+
+ // This points to the TestCase for the currently running test. It
+ // changes as Google Test goes through one test case after another.
+ // When no test is running, this is set to NULL and Google Test
+ // stores assertion results in ad_hoc_test_result_. Initially NULL.
+ TestCase* current_test_case_;
+
+ // This points to the TestInfo for the currently running test. It
+ // changes as Google Test goes through one test after another. When
+ // no test is running, this is set to NULL and Google Test stores
+ // assertion results in ad_hoc_test_result_. Initially NULL.
+ TestInfo* current_test_info_;
+
+ // Normally, a user only writes assertions inside a TEST or TEST_F,
+ // or inside a function called by a TEST or TEST_F. Since Google
+ // Test keeps track of which test is current running, it can
+ // associate such an assertion with the test it belongs to.
+ //
+ // If an assertion is encountered when no TEST or TEST_F is running,
+ // Google Test attributes the assertion result to an imaginary "ad hoc"
+ // test, and records the result in ad_hoc_test_result_.
+ TestResult ad_hoc_test_result_;
+
+ // The list of event listeners that can be used to track events inside
+ // Google Test.
+ TestEventListeners listeners_;
+
+ // The OS stack trace getter. Will be deleted when the UnitTest
+ // object is destructed. By default, an OsStackTraceGetter is used,
+ // but the user can set this field to use a custom getter if that is
+ // desired.
+ OsStackTraceGetterInterface* os_stack_trace_getter_;
+
+ // True iff PostFlagParsingInit() has been called.
+ bool post_flag_parse_init_performed_;
+
+ // The random number seed used at the beginning of the test run.
+ int random_seed_;
+
+ // Our random number generator.
+ internal::Random random_;
+
+ // How long the test took to run, in milliseconds.
+ TimeInMillis elapsed_time_;
+
+#if GTEST_HAS_DEATH_TEST
+ // The decomposed components of the gtest_internal_run_death_test flag,
+ // parsed when RUN_ALL_TESTS is called.
+ internal::scoped_ptr<InternalRunDeathTestFlag> internal_run_death_test_flag_;
+ internal::scoped_ptr<internal::DeathTestFactory> death_test_factory_;
+#endif // GTEST_HAS_DEATH_TEST
+
+ // A per-thread stack of traces created by the SCOPED_TRACE() macro.
+ internal::ThreadLocal<std::vector<TraceInfo> > gtest_trace_stack_;
+
+ GTEST_DISALLOW_COPY_AND_ASSIGN_(UnitTestImpl);
+}; // class UnitTestImpl
+
+// Convenience function for accessing the global UnitTest
+// implementation object.
+inline UnitTestImpl* GetUnitTestImpl() {
+ return UnitTest::GetInstance()->impl();
+}
+
+// Internal helper functions for implementing the simple regular
+// expression matcher.
+GTEST_API_ bool IsInSet(char ch, const char* str);
+GTEST_API_ bool IsDigit(char ch);
+GTEST_API_ bool IsPunct(char ch);
+GTEST_API_ bool IsRepeat(char ch);
+GTEST_API_ bool IsWhiteSpace(char ch);
+GTEST_API_ bool IsWordChar(char ch);
+GTEST_API_ bool IsValidEscape(char ch);
+GTEST_API_ bool AtomMatchesChar(bool escaped, char pattern, char ch);
+GTEST_API_ bool ValidateRegex(const char* regex);
+GTEST_API_ bool MatchRegexAtHead(const char* regex, const char* str);
+GTEST_API_ bool MatchRepetitionAndRegexAtHead(
+ bool escaped, char ch, char repeat, const char* regex, const char* str);
+GTEST_API_ bool MatchRegexAnywhere(const char* regex, const char* str);
+
+// Parses the command line for Google Test flags, without initializing
+// other parts of Google Test.
+GTEST_API_ void ParseGoogleTestFlagsOnly(int* argc, char** argv);
+GTEST_API_ void ParseGoogleTestFlagsOnly(int* argc, wchar_t** argv);
+
+#if GTEST_HAS_DEATH_TEST
+
+// Returns the message describing the last system error, regardless of the
+// platform.
+String GetLastErrnoDescription();
+
+#if GTEST_OS_WINDOWS
+// Provides leak-safe Windows kernel handle ownership.
+class AutoHandle {
+ public:
+ AutoHandle() : handle_(INVALID_HANDLE_VALUE) {}
+ explicit AutoHandle(HANDLE handle) : handle_(handle) {}
+
+ ~AutoHandle() { Reset(); }
+
+ HANDLE Get() const { return handle_; }
+ void Reset() { Reset(INVALID_HANDLE_VALUE); }
+ void Reset(HANDLE handle) {
+ if (handle != handle_) {
+ if (handle_ != INVALID_HANDLE_VALUE)
+ ::CloseHandle(handle_);
+ handle_ = handle;
+ }
+ }
+
+ private:
+ HANDLE handle_;
+
+ GTEST_DISALLOW_COPY_AND_ASSIGN_(AutoHandle);
+};
+#endif // GTEST_OS_WINDOWS
+
+// Attempts to parse a string into a positive integer pointed to by the
+// number parameter. Returns true if that is possible.
+// GTEST_HAS_DEATH_TEST implies that we have ::std::string, so we can use
+// it here.
+template <typename Integer>
+bool ParseNaturalNumber(const ::std::string& str, Integer* number) {
+ // Fail fast if the given string does not begin with a digit;
+ // this bypasses strtoXXX's "optional leading whitespace and plus
+ // or minus sign" semantics, which are undesirable here.
+ if (str.empty() || !isdigit(str[0])) {
+ return false;
+ }
+ errno = 0;
+
+ char* end;
+ // BiggestConvertible is the largest integer type that system-provided
+ // string-to-number conversion routines can return.
+#if GTEST_OS_WINDOWS && !defined(__GNUC__)
+ // MSVC and C++ Builder define __int64 instead of the standard long long.
+ typedef unsigned __int64 BiggestConvertible;
+ const BiggestConvertible parsed = _strtoui64(str.c_str(), &end, 10);
+#else
+ typedef unsigned long long BiggestConvertible; // NOLINT
+ const BiggestConvertible parsed = strtoull(str.c_str(), &end, 10);
+#endif // GTEST_OS_WINDOWS && !defined(__GNUC__)
+ const bool parse_success = *end == '\0' && errno == 0;
+
+ // TODO(vladl@google.com): Convert this to compile time assertion when it is
+ // available.
+ GTEST_CHECK_(sizeof(Integer) <= sizeof(parsed));
+
+ const Integer result = static_cast<Integer>(parsed);
+ if (parse_success && static_cast<BiggestConvertible>(result) == parsed) {
+ *number = result;
+ return true;
+ }
+ return false;
+}
+#endif // GTEST_HAS_DEATH_TEST
+
+// TestResult contains some private methods that should be hidden from
+// Google Test user but are required for testing. This class allow our tests
+// to access them.
+//
+// This class is supplied only for the purpose of testing Google Test's own
+// constructs. Do not use it in user tests, either directly or indirectly.
+class TestResultAccessor {
+ public:
+ static void RecordProperty(TestResult* test_result,
+ const TestProperty& property) {
+ test_result->RecordProperty(property);
+ }
+
+ static void ClearTestPartResults(TestResult* test_result) {
+ test_result->ClearTestPartResults();
+ }
+
+ static const std::vector<testing::TestPartResult>& test_part_results(
+ const TestResult& test_result) {
+ return test_result.test_part_results();
+ }
+};
+
+} // namespace internal
+} // namespace testing
+
+#endif // GTEST_SRC_GTEST_INTERNAL_INL_H_
diff --git a/Source/ThirdParty/gtest/src/gtest-port.cc b/Source/ThirdParty/gtest/src/gtest-port.cc
new file mode 100644
index 000000000..b9504f56a
--- /dev/null
+++ b/Source/ThirdParty/gtest/src/gtest-port.cc
@@ -0,0 +1,711 @@
+// Copyright 2008, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: wan@google.com (Zhanyong Wan)
+
+#include <gtest/internal/gtest-port.h>
+
+#include <limits.h>
+#include <stdlib.h>
+#include <stdio.h>
+
+#if GTEST_OS_WINDOWS_MOBILE
+#include <windows.h> // For TerminateProcess()
+#elif GTEST_OS_WINDOWS
+#include <io.h>
+#include <sys/stat.h>
+#else
+#include <unistd.h>
+#endif // GTEST_OS_WINDOWS_MOBILE
+
+#if GTEST_OS_MAC
+#include <mach/mach_init.h>
+#include <mach/task.h>
+#include <mach/vm_map.h>
+#endif // GTEST_OS_MAC
+
+#include <gtest/gtest-spi.h>
+#include <gtest/gtest-message.h>
+#include <gtest/internal/gtest-string.h>
+
+// Indicates that this translation unit is part of Google Test's
+// implementation. It must come before gtest-internal-inl.h is
+// included, or there will be a compiler error. This trick is to
+// prevent a user from accidentally including gtest-internal-inl.h in
+// his code.
+#define GTEST_IMPLEMENTATION_ 1
+#include "src/gtest-internal-inl.h"
+#undef GTEST_IMPLEMENTATION_
+
+namespace testing {
+namespace internal {
+
+#if defined(_MSC_VER) || defined(__BORLANDC__)
+// MSVC and C++Builder do not provide a definition of STDERR_FILENO.
+const int kStdOutFileno = 1;
+const int kStdErrFileno = 2;
+#else
+const int kStdOutFileno = STDOUT_FILENO;
+const int kStdErrFileno = STDERR_FILENO;
+#endif // _MSC_VER
+
+#if GTEST_OS_MAC
+
+// Returns the number of threads running in the process, or 0 to indicate that
+// we cannot detect it.
+size_t GetThreadCount() {
+ const task_t task = mach_task_self();
+ mach_msg_type_number_t thread_count;
+ thread_act_array_t thread_list;
+ const kern_return_t status = task_threads(task, &thread_list, &thread_count);
+ if (status == KERN_SUCCESS) {
+ // task_threads allocates resources in thread_list and we need to free them
+ // to avoid leaks.
+ vm_deallocate(task,
+ reinterpret_cast<vm_address_t>(thread_list),
+ sizeof(thread_t) * thread_count);
+ return static_cast<size_t>(thread_count);
+ } else {
+ return 0;
+ }
+}
+
+#else
+
+size_t GetThreadCount() {
+ // There's no portable way to detect the number of threads, so we just
+ // return 0 to indicate that we cannot detect it.
+ return 0;
+}
+
+#endif // GTEST_OS_MAC
+
+#if GTEST_USES_POSIX_RE
+
+// Implements RE. Currently only needed for death tests.
+
+RE::~RE() {
+ if (is_valid_) {
+ // regfree'ing an invalid regex might crash because the content
+ // of the regex is undefined. Since the regex's are essentially
+ // the same, one cannot be valid (or invalid) without the other
+ // being so too.
+ regfree(&partial_regex_);
+ regfree(&full_regex_);
+ }
+ free(const_cast<char*>(pattern_));
+}
+
+// Returns true iff regular expression re matches the entire str.
+bool RE::FullMatch(const char* str, const RE& re) {
+ if (!re.is_valid_) return false;
+
+ regmatch_t match;
+ return regexec(&re.full_regex_, str, 1, &match, 0) == 0;
+}
+
+// Returns true iff regular expression re matches a substring of str
+// (including str itself).
+bool RE::PartialMatch(const char* str, const RE& re) {
+ if (!re.is_valid_) return false;
+
+ regmatch_t match;
+ return regexec(&re.partial_regex_, str, 1, &match, 0) == 0;
+}
+
+// Initializes an RE from its string representation.
+void RE::Init(const char* regex) {
+ pattern_ = posix::StrDup(regex);
+
+ // Reserves enough bytes to hold the regular expression used for a
+ // full match.
+ const size_t full_regex_len = strlen(regex) + 10;
+ char* const full_pattern = new char[full_regex_len];
+
+ snprintf(full_pattern, full_regex_len, "^(%s)$", regex);
+ is_valid_ = regcomp(&full_regex_, full_pattern, REG_EXTENDED) == 0;
+ // We want to call regcomp(&partial_regex_, ...) even if the
+ // previous expression returns false. Otherwise partial_regex_ may
+ // not be properly initialized can may cause trouble when it's
+ // freed.
+ //
+ // Some implementation of POSIX regex (e.g. on at least some
+ // versions of Cygwin) doesn't accept the empty string as a valid
+ // regex. We change it to an equivalent form "()" to be safe.
+ if (is_valid_) {
+ const char* const partial_regex = (*regex == '\0') ? "()" : regex;
+ is_valid_ = regcomp(&partial_regex_, partial_regex, REG_EXTENDED) == 0;
+ }
+ EXPECT_TRUE(is_valid_)
+ << "Regular expression \"" << regex
+ << "\" is not a valid POSIX Extended regular expression.";
+
+ delete[] full_pattern;
+}
+
+#elif GTEST_USES_SIMPLE_RE
+
+// Returns true iff ch appears anywhere in str (excluding the
+// terminating '\0' character).
+bool IsInSet(char ch, const char* str) {
+ return ch != '\0' && strchr(str, ch) != NULL;
+}
+
+// Returns true iff ch belongs to the given classification. Unlike
+// similar functions in <ctype.h>, these aren't affected by the
+// current locale.
+bool IsDigit(char ch) { return '0' <= ch && ch <= '9'; }
+bool IsPunct(char ch) {
+ return IsInSet(ch, "^-!\"#$%&'()*+,./:;<=>?@[\\]_`{|}~");
+}
+bool IsRepeat(char ch) { return IsInSet(ch, "?*+"); }
+bool IsWhiteSpace(char ch) { return IsInSet(ch, " \f\n\r\t\v"); }
+bool IsWordChar(char ch) {
+ return ('a' <= ch && ch <= 'z') || ('A' <= ch && ch <= 'Z') ||
+ ('0' <= ch && ch <= '9') || ch == '_';
+}
+
+// Returns true iff "\\c" is a supported escape sequence.
+bool IsValidEscape(char c) {
+ return (IsPunct(c) || IsInSet(c, "dDfnrsStvwW"));
+}
+
+// Returns true iff the given atom (specified by escaped and pattern)
+// matches ch. The result is undefined if the atom is invalid.
+bool AtomMatchesChar(bool escaped, char pattern_char, char ch) {
+ if (escaped) { // "\\p" where p is pattern_char.
+ switch (pattern_char) {
+ case 'd': return IsDigit(ch);
+ case 'D': return !IsDigit(ch);
+ case 'f': return ch == '\f';
+ case 'n': return ch == '\n';
+ case 'r': return ch == '\r';
+ case 's': return IsWhiteSpace(ch);
+ case 'S': return !IsWhiteSpace(ch);
+ case 't': return ch == '\t';
+ case 'v': return ch == '\v';
+ case 'w': return IsWordChar(ch);
+ case 'W': return !IsWordChar(ch);
+ }
+ return IsPunct(pattern_char) && pattern_char == ch;
+ }
+
+ return (pattern_char == '.' && ch != '\n') || pattern_char == ch;
+}
+
+// Helper function used by ValidateRegex() to format error messages.
+String FormatRegexSyntaxError(const char* regex, int index) {
+ return (Message() << "Syntax error at index " << index
+ << " in simple regular expression \"" << regex << "\": ").GetString();
+}
+
+// Generates non-fatal failures and returns false if regex is invalid;
+// otherwise returns true.
+bool ValidateRegex(const char* regex) {
+ if (regex == NULL) {
+ // TODO(wan@google.com): fix the source file location in the
+ // assertion failures to match where the regex is used in user
+ // code.
+ ADD_FAILURE() << "NULL is not a valid simple regular expression.";
+ return false;
+ }
+
+ bool is_valid = true;
+
+ // True iff ?, *, or + can follow the previous atom.
+ bool prev_repeatable = false;
+ for (int i = 0; regex[i]; i++) {
+ if (regex[i] == '\\') { // An escape sequence
+ i++;
+ if (regex[i] == '\0') {
+ ADD_FAILURE() << FormatRegexSyntaxError(regex, i - 1)
+ << "'\\' cannot appear at the end.";
+ return false;
+ }
+
+ if (!IsValidEscape(regex[i])) {
+ ADD_FAILURE() << FormatRegexSyntaxError(regex, i - 1)
+ << "invalid escape sequence \"\\" << regex[i] << "\".";
+ is_valid = false;
+ }
+ prev_repeatable = true;
+ } else { // Not an escape sequence.
+ const char ch = regex[i];
+
+ if (ch == '^' && i > 0) {
+ ADD_FAILURE() << FormatRegexSyntaxError(regex, i)
+ << "'^' can only appear at the beginning.";
+ is_valid = false;
+ } else if (ch == '$' && regex[i + 1] != '\0') {
+ ADD_FAILURE() << FormatRegexSyntaxError(regex, i)
+ << "'$' can only appear at the end.";
+ is_valid = false;
+ } else if (IsInSet(ch, "()[]{}|")) {
+ ADD_FAILURE() << FormatRegexSyntaxError(regex, i)
+ << "'" << ch << "' is unsupported.";
+ is_valid = false;
+ } else if (IsRepeat(ch) && !prev_repeatable) {
+ ADD_FAILURE() << FormatRegexSyntaxError(regex, i)
+ << "'" << ch << "' can only follow a repeatable token.";
+ is_valid = false;
+ }
+
+ prev_repeatable = !IsInSet(ch, "^$?*+");
+ }
+ }
+
+ return is_valid;
+}
+
+// Matches a repeated regex atom followed by a valid simple regular
+// expression. The regex atom is defined as c if escaped is false,
+// or \c otherwise. repeat is the repetition meta character (?, *,
+// or +). The behavior is undefined if str contains too many
+// characters to be indexable by size_t, in which case the test will
+// probably time out anyway. We are fine with this limitation as
+// std::string has it too.
+bool MatchRepetitionAndRegexAtHead(
+ bool escaped, char c, char repeat, const char* regex,
+ const char* str) {
+ const size_t min_count = (repeat == '+') ? 1 : 0;
+ const size_t max_count = (repeat == '?') ? 1 :
+ static_cast<size_t>(-1) - 1;
+ // We cannot call numeric_limits::max() as it conflicts with the
+ // max() macro on Windows.
+
+ for (size_t i = 0; i <= max_count; ++i) {
+ // We know that the atom matches each of the first i characters in str.
+ if (i >= min_count && MatchRegexAtHead(regex, str + i)) {
+ // We have enough matches at the head, and the tail matches too.
+ // Since we only care about *whether* the pattern matches str
+ // (as opposed to *how* it matches), there is no need to find a
+ // greedy match.
+ return true;
+ }
+ if (str[i] == '\0' || !AtomMatchesChar(escaped, c, str[i]))
+ return false;
+ }
+ return false;
+}
+
+// Returns true iff regex matches a prefix of str. regex must be a
+// valid simple regular expression and not start with "^", or the
+// result is undefined.
+bool MatchRegexAtHead(const char* regex, const char* str) {
+ if (*regex == '\0') // An empty regex matches a prefix of anything.
+ return true;
+
+ // "$" only matches the end of a string. Note that regex being
+ // valid guarantees that there's nothing after "$" in it.
+ if (*regex == '$')
+ return *str == '\0';
+
+ // Is the first thing in regex an escape sequence?
+ const bool escaped = *regex == '\\';
+ if (escaped)
+ ++regex;
+ if (IsRepeat(regex[1])) {
+ // MatchRepetitionAndRegexAtHead() calls MatchRegexAtHead(), so
+ // here's an indirect recursion. It terminates as the regex gets
+ // shorter in each recursion.
+ return MatchRepetitionAndRegexAtHead(
+ escaped, regex[0], regex[1], regex + 2, str);
+ } else {
+ // regex isn't empty, isn't "$", and doesn't start with a
+ // repetition. We match the first atom of regex with the first
+ // character of str and recurse.
+ return (*str != '\0') && AtomMatchesChar(escaped, *regex, *str) &&
+ MatchRegexAtHead(regex + 1, str + 1);
+ }
+}
+
+// Returns true iff regex matches any substring of str. regex must be
+// a valid simple regular expression, or the result is undefined.
+//
+// The algorithm is recursive, but the recursion depth doesn't exceed
+// the regex length, so we won't need to worry about running out of
+// stack space normally. In rare cases the time complexity can be
+// exponential with respect to the regex length + the string length,
+// but usually it's must faster (often close to linear).
+bool MatchRegexAnywhere(const char* regex, const char* str) {
+ if (regex == NULL || str == NULL)
+ return false;
+
+ if (*regex == '^')
+ return MatchRegexAtHead(regex + 1, str);
+
+ // A successful match can be anywhere in str.
+ do {
+ if (MatchRegexAtHead(regex, str))
+ return true;
+ } while (*str++ != '\0');
+ return false;
+}
+
+// Implements the RE class.
+
+RE::~RE() {
+ free(const_cast<char*>(pattern_));
+ free(const_cast<char*>(full_pattern_));
+}
+
+// Returns true iff regular expression re matches the entire str.
+bool RE::FullMatch(const char* str, const RE& re) {
+ return re.is_valid_ && MatchRegexAnywhere(re.full_pattern_, str);
+}
+
+// Returns true iff regular expression re matches a substring of str
+// (including str itself).
+bool RE::PartialMatch(const char* str, const RE& re) {
+ return re.is_valid_ && MatchRegexAnywhere(re.pattern_, str);
+}
+
+// Initializes an RE from its string representation.
+void RE::Init(const char* regex) {
+ pattern_ = full_pattern_ = NULL;
+ if (regex != NULL) {
+ pattern_ = posix::StrDup(regex);
+ }
+
+ is_valid_ = ValidateRegex(regex);
+ if (!is_valid_) {
+ // No need to calculate the full pattern when the regex is invalid.
+ return;
+ }
+
+ const size_t len = strlen(regex);
+ // Reserves enough bytes to hold the regular expression used for a
+ // full match: we need space to prepend a '^', append a '$', and
+ // terminate the string with '\0'.
+ char* buffer = static_cast<char*>(malloc(len + 3));
+ full_pattern_ = buffer;
+
+ if (*regex != '^')
+ *buffer++ = '^'; // Makes sure full_pattern_ starts with '^'.
+
+ // We don't use snprintf or strncpy, as they trigger a warning when
+ // compiled with VC++ 8.0.
+ memcpy(buffer, regex, len);
+ buffer += len;
+
+ if (len == 0 || regex[len - 1] != '$')
+ *buffer++ = '$'; // Makes sure full_pattern_ ends with '$'.
+
+ *buffer = '\0';
+}
+
+#endif // GTEST_USES_POSIX_RE
+
+
+GTestLog::GTestLog(GTestLogSeverity severity, const char* file, int line)
+ : severity_(severity) {
+ const char* const marker =
+ severity == GTEST_INFO ? "[ INFO ]" :
+ severity == GTEST_WARNING ? "[WARNING]" :
+ severity == GTEST_ERROR ? "[ ERROR ]" : "[ FATAL ]";
+ GetStream() << ::std::endl << marker << " "
+ << FormatFileLocation(file, line).c_str() << ": ";
+}
+
+// Flushes the buffers and, if severity is GTEST_FATAL, aborts the program.
+GTestLog::~GTestLog() {
+ GetStream() << ::std::endl;
+ if (severity_ == GTEST_FATAL) {
+ fflush(stderr);
+ posix::Abort();
+ }
+}
+// Disable Microsoft deprecation warnings for POSIX functions called from
+// this class (creat, dup, dup2, and close)
+#ifdef _MSC_VER
+#pragma warning(push)
+#pragma warning(disable: 4996)
+#endif // _MSC_VER
+
+#if GTEST_HAS_STREAM_REDIRECTION_
+
+// Object that captures an output stream (stdout/stderr).
+class CapturedStream {
+ public:
+ // The ctor redirects the stream to a temporary file.
+ CapturedStream(int fd) : fd_(fd), uncaptured_fd_(dup(fd)) {
+#if GTEST_OS_WINDOWS
+ char temp_dir_path[MAX_PATH + 1] = { '\0' }; // NOLINT
+ char temp_file_path[MAX_PATH + 1] = { '\0' }; // NOLINT
+
+ ::GetTempPathA(sizeof(temp_dir_path), temp_dir_path);
+ const UINT success = ::GetTempFileNameA(temp_dir_path,
+ "gtest_redir",
+ 0, // Generate unique file name.
+ temp_file_path);
+ GTEST_CHECK_(success != 0)
+ << "Unable to create a temporary file in " << temp_dir_path;
+ const int captured_fd = creat(temp_file_path, _S_IREAD | _S_IWRITE);
+ GTEST_CHECK_(captured_fd != -1) << "Unable to open temporary file "
+ << temp_file_path;
+ filename_ = temp_file_path;
+#else
+ // There's no guarantee that a test has write access to the
+ // current directory, so we create the temporary file in the /tmp
+ // directory instead.
+ char name_template[] = "/tmp/captured_stream.XXXXXX";
+ const int captured_fd = mkstemp(name_template);
+ filename_ = name_template;
+#endif // GTEST_OS_WINDOWS
+ fflush(NULL);
+ dup2(captured_fd, fd_);
+ close(captured_fd);
+ }
+
+ ~CapturedStream() {
+ remove(filename_.c_str());
+ }
+
+ String GetCapturedString() {
+ if (uncaptured_fd_ != -1) {
+ // Restores the original stream.
+ fflush(NULL);
+ dup2(uncaptured_fd_, fd_);
+ close(uncaptured_fd_);
+ uncaptured_fd_ = -1;
+ }
+
+ FILE* const file = posix::FOpen(filename_.c_str(), "r");
+ const String content = ReadEntireFile(file);
+ posix::FClose(file);
+ return content;
+ }
+
+ private:
+ // Reads the entire content of a file as a String.
+ static String ReadEntireFile(FILE* file);
+
+ // Returns the size (in bytes) of a file.
+ static size_t GetFileSize(FILE* file);
+
+ const int fd_; // A stream to capture.
+ int uncaptured_fd_;
+ // Name of the temporary file holding the stderr output.
+ ::std::string filename_;
+
+ GTEST_DISALLOW_COPY_AND_ASSIGN_(CapturedStream);
+};
+
+// Returns the size (in bytes) of a file.
+size_t CapturedStream::GetFileSize(FILE* file) {
+ fseek(file, 0, SEEK_END);
+ return static_cast<size_t>(ftell(file));
+}
+
+// Reads the entire content of a file as a string.
+String CapturedStream::ReadEntireFile(FILE* file) {
+ const size_t file_size = GetFileSize(file);
+ char* const buffer = new char[file_size];
+
+ size_t bytes_last_read = 0; // # of bytes read in the last fread()
+ size_t bytes_read = 0; // # of bytes read so far
+
+ fseek(file, 0, SEEK_SET);
+
+ // Keeps reading the file until we cannot read further or the
+ // pre-determined file size is reached.
+ do {
+ bytes_last_read = fread(buffer+bytes_read, 1, file_size-bytes_read, file);
+ bytes_read += bytes_last_read;
+ } while (bytes_last_read > 0 && bytes_read < file_size);
+
+ const String content(buffer, bytes_read);
+ delete[] buffer;
+
+ return content;
+}
+
+#ifdef _MSC_VER
+#pragma warning(pop)
+#endif // _MSC_VER
+
+static CapturedStream* g_captured_stderr = NULL;
+static CapturedStream* g_captured_stdout = NULL;
+
+// Starts capturing an output stream (stdout/stderr).
+void CaptureStream(int fd, const char* stream_name, CapturedStream** stream) {
+ if (*stream != NULL) {
+ GTEST_LOG_(FATAL) << "Only one " << stream_name
+ << " capturer can exist at a time.";
+ }
+ *stream = new CapturedStream(fd);
+}
+
+// Stops capturing the output stream and returns the captured string.
+String GetCapturedStream(CapturedStream** captured_stream) {
+ const String content = (*captured_stream)->GetCapturedString();
+
+ delete *captured_stream;
+ *captured_stream = NULL;
+
+ return content;
+}
+
+// Starts capturing stdout.
+void CaptureStdout() {
+ CaptureStream(kStdOutFileno, "stdout", &g_captured_stdout);
+}
+
+// Starts capturing stderr.
+void CaptureStderr() {
+ CaptureStream(kStdErrFileno, "stderr", &g_captured_stderr);
+}
+
+// Stops capturing stdout and returns the captured string.
+String GetCapturedStdout() { return GetCapturedStream(&g_captured_stdout); }
+
+// Stops capturing stderr and returns the captured string.
+String GetCapturedStderr() { return GetCapturedStream(&g_captured_stderr); }
+
+#endif // GTEST_HAS_STREAM_REDIRECTION_
+
+#if GTEST_HAS_DEATH_TEST
+
+// A copy of all command line arguments. Set by InitGoogleTest().
+::std::vector<String> g_argvs;
+
+// Returns the command line as a vector of strings.
+const ::std::vector<String>& GetArgvs() { return g_argvs; }
+
+#endif // GTEST_HAS_DEATH_TEST
+
+#if GTEST_OS_WINDOWS_MOBILE
+namespace posix {
+void Abort() {
+ DebugBreak();
+ TerminateProcess(GetCurrentProcess(), 1);
+}
+} // namespace posix
+#endif // GTEST_OS_WINDOWS_MOBILE
+
+// Returns the name of the environment variable corresponding to the
+// given flag. For example, FlagToEnvVar("foo") will return
+// "GTEST_FOO" in the open-source version.
+static String FlagToEnvVar(const char* flag) {
+ const String full_flag =
+ (Message() << GTEST_FLAG_PREFIX_ << flag).GetString();
+
+ Message env_var;
+ for (size_t i = 0; i != full_flag.length(); i++) {
+ env_var << static_cast<char>(toupper(full_flag.c_str()[i]));
+ }
+
+ return env_var.GetString();
+}
+
+// Parses 'str' for a 32-bit signed integer. If successful, writes
+// the result to *value and returns true; otherwise leaves *value
+// unchanged and returns false.
+bool ParseInt32(const Message& src_text, const char* str, Int32* value) {
+ // Parses the environment variable as a decimal integer.
+ char* end = NULL;
+ const long long_value = strtol(str, &end, 10); // NOLINT
+
+ // Has strtol() consumed all characters in the string?
+ if (*end != '\0') {
+ // No - an invalid character was encountered.
+ Message msg;
+ msg << "WARNING: " << src_text
+ << " is expected to be a 32-bit integer, but actually"
+ << " has value \"" << str << "\".\n";
+ printf("%s", msg.GetString().c_str());
+ fflush(stdout);
+ return false;
+ }
+
+ // Is the parsed value in the range of an Int32?
+ const Int32 result = static_cast<Int32>(long_value);
+ if (long_value == LONG_MAX || long_value == LONG_MIN ||
+ // The parsed value overflows as a long. (strtol() returns
+ // LONG_MAX or LONG_MIN when the input overflows.)
+ result != long_value
+ // The parsed value overflows as an Int32.
+ ) {
+ Message msg;
+ msg << "WARNING: " << src_text
+ << " is expected to be a 32-bit integer, but actually"
+ << " has value " << str << ", which overflows.\n";
+ printf("%s", msg.GetString().c_str());
+ fflush(stdout);
+ return false;
+ }
+
+ *value = result;
+ return true;
+}
+
+// Reads and returns the Boolean environment variable corresponding to
+// the given flag; if it's not set, returns default_value.
+//
+// The value is considered true iff it's not "0".
+bool BoolFromGTestEnv(const char* flag, bool default_value) {
+ const String env_var = FlagToEnvVar(flag);
+ const char* const string_value = posix::GetEnv(env_var.c_str());
+ return string_value == NULL ?
+ default_value : strcmp(string_value, "0") != 0;
+}
+
+// Reads and returns a 32-bit integer stored in the environment
+// variable corresponding to the given flag; if it isn't set or
+// doesn't represent a valid 32-bit integer, returns default_value.
+Int32 Int32FromGTestEnv(const char* flag, Int32 default_value) {
+ const String env_var = FlagToEnvVar(flag);
+ const char* const string_value = posix::GetEnv(env_var.c_str());
+ if (string_value == NULL) {
+ // The environment variable is not set.
+ return default_value;
+ }
+
+ Int32 result = default_value;
+ if (!ParseInt32(Message() << "Environment variable " << env_var,
+ string_value, &result)) {
+ printf("The default value %s is used.\n",
+ (Message() << default_value).GetString().c_str());
+ fflush(stdout);
+ return default_value;
+ }
+
+ return result;
+}
+
+// Reads and returns the string environment variable corresponding to
+// the given flag; if it's not set, returns default_value.
+const char* StringFromGTestEnv(const char* flag, const char* default_value) {
+ const String env_var = FlagToEnvVar(flag);
+ const char* const value = posix::GetEnv(env_var.c_str());
+ return value == NULL ? default_value : value;
+}
+
+} // namespace internal
+} // namespace testing
diff --git a/Source/ThirdParty/gtest/src/gtest-test-part.cc b/Source/ThirdParty/gtest/src/gtest-test-part.cc
new file mode 100644
index 000000000..5d183a446
--- /dev/null
+++ b/Source/ThirdParty/gtest/src/gtest-test-part.cc
@@ -0,0 +1,110 @@
+// Copyright 2008, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: mheule@google.com (Markus Heule)
+//
+// The Google C++ Testing Framework (Google Test)
+
+#include <gtest/gtest-test-part.h>
+
+// Indicates that this translation unit is part of Google Test's
+// implementation. It must come before gtest-internal-inl.h is
+// included, or there will be a compiler error. This trick is to
+// prevent a user from accidentally including gtest-internal-inl.h in
+// his code.
+#define GTEST_IMPLEMENTATION_ 1
+#include "src/gtest-internal-inl.h"
+#undef GTEST_IMPLEMENTATION_
+
+namespace testing {
+
+using internal::GetUnitTestImpl;
+
+// Gets the summary of the failure message by omitting the stack trace
+// in it.
+internal::String TestPartResult::ExtractSummary(const char* message) {
+ const char* const stack_trace = strstr(message, internal::kStackTraceMarker);
+ return stack_trace == NULL ? internal::String(message) :
+ internal::String(message, stack_trace - message);
+}
+
+// Prints a TestPartResult object.
+std::ostream& operator<<(std::ostream& os, const TestPartResult& result) {
+ return os
+ << result.file_name() << ":" << result.line_number() << ": "
+ << (result.type() == TestPartResult::kSuccess ? "Success" :
+ result.type() == TestPartResult::kFatalFailure ? "Fatal failure" :
+ "Non-fatal failure") << ":\n"
+ << result.message() << std::endl;
+}
+
+// Appends a TestPartResult to the array.
+void TestPartResultArray::Append(const TestPartResult& result) {
+ array_.push_back(result);
+}
+
+// Returns the TestPartResult at the given index (0-based).
+const TestPartResult& TestPartResultArray::GetTestPartResult(int index) const {
+ if (index < 0 || index >= size()) {
+ printf("\nInvalid index (%d) into TestPartResultArray.\n", index);
+ internal::posix::Abort();
+ }
+
+ return array_[index];
+}
+
+// Returns the number of TestPartResult objects in the array.
+int TestPartResultArray::size() const {
+ return static_cast<int>(array_.size());
+}
+
+namespace internal {
+
+HasNewFatalFailureHelper::HasNewFatalFailureHelper()
+ : has_new_fatal_failure_(false),
+ original_reporter_(GetUnitTestImpl()->
+ GetTestPartResultReporterForCurrentThread()) {
+ GetUnitTestImpl()->SetTestPartResultReporterForCurrentThread(this);
+}
+
+HasNewFatalFailureHelper::~HasNewFatalFailureHelper() {
+ GetUnitTestImpl()->SetTestPartResultReporterForCurrentThread(
+ original_reporter_);
+}
+
+void HasNewFatalFailureHelper::ReportTestPartResult(
+ const TestPartResult& result) {
+ if (result.fatally_failed())
+ has_new_fatal_failure_ = true;
+ original_reporter_->ReportTestPartResult(result);
+}
+
+} // namespace internal
+
+} // namespace testing
diff --git a/Source/ThirdParty/gtest/src/gtest-typed-test.cc b/Source/ThirdParty/gtest/src/gtest-typed-test.cc
new file mode 100644
index 000000000..3cc4b5de2
--- /dev/null
+++ b/Source/ThirdParty/gtest/src/gtest-typed-test.cc
@@ -0,0 +1,110 @@
+// Copyright 2008 Google Inc.
+// All Rights Reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: wan@google.com (Zhanyong Wan)
+
+#include <gtest/gtest-typed-test.h>
+#include <gtest/gtest.h>
+
+namespace testing {
+namespace internal {
+
+#if GTEST_HAS_TYPED_TEST_P
+
+// Skips to the first non-space char in str. Returns an empty string if str
+// contains only whitespace characters.
+static const char* SkipSpaces(const char* str) {
+ while (isspace(*str))
+ str++;
+ return str;
+}
+
+// Verifies that registered_tests match the test names in
+// defined_test_names_; returns registered_tests if successful, or
+// aborts the program otherwise.
+const char* TypedTestCasePState::VerifyRegisteredTestNames(
+ const char* file, int line, const char* registered_tests) {
+ typedef ::std::set<const char*>::const_iterator DefinedTestIter;
+ registered_ = true;
+
+ // Skip initial whitespace in registered_tests since some
+ // preprocessors prefix stringizied literals with whitespace.
+ registered_tests = SkipSpaces(registered_tests);
+
+ Message errors;
+ ::std::set<String> tests;
+ for (const char* names = registered_tests; names != NULL;
+ names = SkipComma(names)) {
+ const String name = GetPrefixUntilComma(names);
+ if (tests.count(name) != 0) {
+ errors << "Test " << name << " is listed more than once.\n";
+ continue;
+ }
+
+ bool found = false;
+ for (DefinedTestIter it = defined_test_names_.begin();
+ it != defined_test_names_.end();
+ ++it) {
+ if (name == *it) {
+ found = true;
+ break;
+ }
+ }
+
+ if (found) {
+ tests.insert(name);
+ } else {
+ errors << "No test named " << name
+ << " can be found in this test case.\n";
+ }
+ }
+
+ for (DefinedTestIter it = defined_test_names_.begin();
+ it != defined_test_names_.end();
+ ++it) {
+ if (tests.count(*it) == 0) {
+ errors << "You forgot to list test " << *it << ".\n";
+ }
+ }
+
+ const String& errors_str = errors.GetString();
+ if (errors_str != "") {
+ fprintf(stderr, "%s %s", FormatFileLocation(file, line).c_str(),
+ errors_str.c_str());
+ fflush(stderr);
+ posix::Abort();
+ }
+
+ return registered_tests;
+}
+
+#endif // GTEST_HAS_TYPED_TEST_P
+
+} // namespace internal
+} // namespace testing
diff --git a/Source/ThirdParty/gtest/src/gtest.cc b/Source/ThirdParty/gtest/src/gtest.cc
new file mode 100644
index 000000000..5b871c0ad
--- /dev/null
+++ b/Source/ThirdParty/gtest/src/gtest.cc
@@ -0,0 +1,4704 @@
+// Copyright 2005, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: wan@google.com (Zhanyong Wan)
+//
+// The Google C++ Testing Framework (Google Test)
+
+#include <gtest/gtest.h>
+#include <gtest/gtest-spi.h>
+
+#include <ctype.h>
+#include <math.h>
+#include <stdarg.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <wchar.h>
+#include <wctype.h>
+
+#include <algorithm>
+#include <ostream>
+#include <sstream>
+#include <vector>
+
+#if GTEST_OS_LINUX
+
+// TODO(kenton@google.com): Use autoconf to detect availability of
+// gettimeofday().
+#define GTEST_HAS_GETTIMEOFDAY_ 1
+
+#include <fcntl.h>
+#include <limits.h>
+#include <sched.h>
+// Declares vsnprintf(). This header is not available on Windows.
+#include <strings.h>
+#include <sys/mman.h>
+#include <sys/time.h>
+#include <unistd.h>
+#include <string>
+#include <vector>
+
+#elif GTEST_OS_SYMBIAN
+#define GTEST_HAS_GETTIMEOFDAY_ 1
+#include <sys/time.h> // NOLINT
+
+#elif GTEST_OS_ZOS
+#define GTEST_HAS_GETTIMEOFDAY_ 1
+#include <sys/time.h> // NOLINT
+
+// On z/OS we additionally need strings.h for strcasecmp.
+#include <strings.h> // NOLINT
+
+#elif GTEST_OS_WINDOWS_MOBILE // We are on Windows CE.
+
+#include <windows.h> // NOLINT
+
+#elif GTEST_OS_WINDOWS // We are on Windows proper.
+
+#include <io.h> // NOLINT
+#include <sys/timeb.h> // NOLINT
+#include <sys/types.h> // NOLINT
+#include <sys/stat.h> // NOLINT
+
+#if GTEST_OS_WINDOWS_MINGW
+// MinGW has gettimeofday() but not _ftime64().
+// TODO(kenton@google.com): Use autoconf to detect availability of
+// gettimeofday().
+// TODO(kenton@google.com): There are other ways to get the time on
+// Windows, like GetTickCount() or GetSystemTimeAsFileTime(). MinGW
+// supports these. consider using them instead.
+#define GTEST_HAS_GETTIMEOFDAY_ 1
+#include <sys/time.h> // NOLINT
+#endif // GTEST_OS_WINDOWS_MINGW
+
+// cpplint thinks that the header is already included, so we want to
+// silence it.
+#include <windows.h> // NOLINT
+
+#else
+
+// Assume other platforms have gettimeofday().
+// TODO(kenton@google.com): Use autoconf to detect availability of
+// gettimeofday().
+#define GTEST_HAS_GETTIMEOFDAY_ 1
+
+// cpplint thinks that the header is already included, so we want to
+// silence it.
+#include <sys/time.h> // NOLINT
+#include <unistd.h> // NOLINT
+
+#endif // GTEST_OS_LINUX
+
+#if GTEST_HAS_EXCEPTIONS
+#include <stdexcept>
+#endif
+
+// Indicates that this translation unit is part of Google Test's
+// implementation. It must come before gtest-internal-inl.h is
+// included, or there will be a compiler error. This trick is to
+// prevent a user from accidentally including gtest-internal-inl.h in
+// his code.
+#define GTEST_IMPLEMENTATION_ 1
+#include "src/gtest-internal-inl.h"
+#undef GTEST_IMPLEMENTATION_
+
+#if GTEST_OS_WINDOWS
+#define vsnprintf _vsnprintf
+#endif // GTEST_OS_WINDOWS
+
+namespace testing {
+
+using internal::CountIf;
+using internal::ForEach;
+using internal::GetElementOr;
+using internal::Shuffle;
+
+// Constants.
+
+// A test whose test case name or test name matches this filter is
+// disabled and not run.
+static const char kDisableTestFilter[] = "DISABLED_*:*/DISABLED_*";
+
+// A test case whose name matches this filter is considered a death
+// test case and will be run before test cases whose name doesn't
+// match this filter.
+static const char kDeathTestCaseFilter[] = "*DeathTest:*DeathTest/*";
+
+// A test filter that matches everything.
+static const char kUniversalFilter[] = "*";
+
+// The default output file for XML output.
+static const char kDefaultOutputFile[] = "test_detail.xml";
+
+// The environment variable name for the test shard index.
+static const char kTestShardIndex[] = "GTEST_SHARD_INDEX";
+// The environment variable name for the total number of test shards.
+static const char kTestTotalShards[] = "GTEST_TOTAL_SHARDS";
+// The environment variable name for the test shard status file.
+static const char kTestShardStatusFile[] = "GTEST_SHARD_STATUS_FILE";
+
+namespace internal {
+
+// The text used in failure messages to indicate the start of the
+// stack trace.
+const char kStackTraceMarker[] = "\nStack trace:\n";
+
+// g_help_flag is true iff the --help flag or an equivalent form is
+// specified on the command line.
+bool g_help_flag = false;
+
+} // namespace internal
+
+GTEST_DEFINE_bool_(
+ also_run_disabled_tests,
+ internal::BoolFromGTestEnv("also_run_disabled_tests", false),
+ "Run disabled tests too, in addition to the tests normally being run.");
+
+GTEST_DEFINE_bool_(
+ break_on_failure,
+ internal::BoolFromGTestEnv("break_on_failure", false),
+ "True iff a failed assertion should be a debugger break-point.");
+
+GTEST_DEFINE_bool_(
+ catch_exceptions,
+ internal::BoolFromGTestEnv("catch_exceptions", false),
+ "True iff " GTEST_NAME_
+ " should catch exceptions and treat them as test failures.");
+
+GTEST_DEFINE_string_(
+ color,
+ internal::StringFromGTestEnv("color", "auto"),
+ "Whether to use colors in the output. Valid values: yes, no, "
+ "and auto. 'auto' means to use colors if the output is "
+ "being sent to a terminal and the TERM environment variable "
+ "is set to xterm, xterm-color, xterm-256color, linux or cygwin.");
+
+GTEST_DEFINE_string_(
+ filter,
+ internal::StringFromGTestEnv("filter", kUniversalFilter),
+ "A colon-separated list of glob (not regex) patterns "
+ "for filtering the tests to run, optionally followed by a "
+ "'-' and a : separated list of negative patterns (tests to "
+ "exclude). A test is run if it matches one of the positive "
+ "patterns and does not match any of the negative patterns.");
+
+GTEST_DEFINE_bool_(list_tests, false,
+ "List all tests without running them.");
+
+GTEST_DEFINE_string_(
+ output,
+ internal::StringFromGTestEnv("output", ""),
+ "A format (currently must be \"xml\"), optionally followed "
+ "by a colon and an output file name or directory. A directory "
+ "is indicated by a trailing pathname separator. "
+ "Examples: \"xml:filename.xml\", \"xml::directoryname/\". "
+ "If a directory is specified, output files will be created "
+ "within that directory, with file-names based on the test "
+ "executable's name and, if necessary, made unique by adding "
+ "digits.");
+
+GTEST_DEFINE_bool_(
+ print_time,
+ internal::BoolFromGTestEnv("print_time", true),
+ "True iff " GTEST_NAME_
+ " should display elapsed time in text output.");
+
+GTEST_DEFINE_int32_(
+ random_seed,
+ internal::Int32FromGTestEnv("random_seed", 0),
+ "Random number seed to use when shuffling test orders. Must be in range "
+ "[1, 99999], or 0 to use a seed based on the current time.");
+
+GTEST_DEFINE_int32_(
+ repeat,
+ internal::Int32FromGTestEnv("repeat", 1),
+ "How many times to repeat each test. Specify a negative number "
+ "for repeating forever. Useful for shaking out flaky tests.");
+
+GTEST_DEFINE_bool_(
+ show_internal_stack_frames, false,
+ "True iff " GTEST_NAME_ " should include internal stack frames when "
+ "printing test failure stack traces.");
+
+GTEST_DEFINE_bool_(
+ shuffle,
+ internal::BoolFromGTestEnv("shuffle", false),
+ "True iff " GTEST_NAME_
+ " should randomize tests' order on every run.");
+
+GTEST_DEFINE_int32_(
+ stack_trace_depth,
+ internal::Int32FromGTestEnv("stack_trace_depth", kMaxStackTraceDepth),
+ "The maximum number of stack frames to print when an "
+ "assertion fails. The valid range is 0 through 100, inclusive.");
+
+GTEST_DEFINE_bool_(
+ throw_on_failure,
+ internal::BoolFromGTestEnv("throw_on_failure", false),
+ "When this flag is specified, a failed assertion will throw an exception "
+ "if exceptions are enabled or exit the program with a non-zero code "
+ "otherwise.");
+
+namespace internal {
+
+// Generates a random number from [0, range), using a Linear
+// Congruential Generator (LCG). Crashes if 'range' is 0 or greater
+// than kMaxRange.
+UInt32 Random::Generate(UInt32 range) {
+ // These constants are the same as are used in glibc's rand(3).
+ state_ = (1103515245U*state_ + 12345U) % kMaxRange;
+
+ GTEST_CHECK_(range > 0)
+ << "Cannot generate a number in the range [0, 0).";
+ GTEST_CHECK_(range <= kMaxRange)
+ << "Generation of a number in [0, " << range << ") was requested, "
+ << "but this can only generate numbers in [0, " << kMaxRange << ").";
+
+ // Converting via modulus introduces a bit of downward bias, but
+ // it's simple, and a linear congruential generator isn't too good
+ // to begin with.
+ return state_ % range;
+}
+
+// GTestIsInitialized() returns true iff the user has initialized
+// Google Test. Useful for catching the user mistake of not initializing
+// Google Test before calling RUN_ALL_TESTS().
+//
+// A user must call testing::InitGoogleTest() to initialize Google
+// Test. g_init_gtest_count is set to the number of times
+// InitGoogleTest() has been called. We don't protect this variable
+// under a mutex as it is only accessed in the main thread.
+int g_init_gtest_count = 0;
+static bool GTestIsInitialized() { return g_init_gtest_count != 0; }
+
+// Iterates over a vector of TestCases, keeping a running sum of the
+// results of calling a given int-returning method on each.
+// Returns the sum.
+static int SumOverTestCaseList(const std::vector<TestCase*>& case_list,
+ int (TestCase::*method)() const) {
+ int sum = 0;
+ for (size_t i = 0; i < case_list.size(); i++) {
+ sum += (case_list[i]->*method)();
+ }
+ return sum;
+}
+
+// Returns true iff the test case passed.
+static bool TestCasePassed(const TestCase* test_case) {
+ return test_case->should_run() && test_case->Passed();
+}
+
+// Returns true iff the test case failed.
+static bool TestCaseFailed(const TestCase* test_case) {
+ return test_case->should_run() && test_case->Failed();
+}
+
+// Returns true iff test_case contains at least one test that should
+// run.
+static bool ShouldRunTestCase(const TestCase* test_case) {
+ return test_case->should_run();
+}
+
+// AssertHelper constructor.
+AssertHelper::AssertHelper(TestPartResult::Type type,
+ const char* file,
+ int line,
+ const char* message)
+ : data_(new AssertHelperData(type, file, line, message)) {
+}
+
+AssertHelper::~AssertHelper() {
+ delete data_;
+}
+
+// Message assignment, for assertion streaming support.
+void AssertHelper::operator=(const Message& message) const {
+ UnitTest::GetInstance()->
+ AddTestPartResult(data_->type, data_->file, data_->line,
+ AppendUserMessage(data_->message, message),
+ UnitTest::GetInstance()->impl()
+ ->CurrentOsStackTraceExceptTop(1)
+ // Skips the stack frame for this function itself.
+ ); // NOLINT
+}
+
+// Mutex for linked pointers.
+GTEST_DEFINE_STATIC_MUTEX_(g_linked_ptr_mutex);
+
+// Application pathname gotten in InitGoogleTest.
+String g_executable_path;
+
+// Returns the current application's name, removing directory path if that
+// is present.
+FilePath GetCurrentExecutableName() {
+ FilePath result;
+
+#if GTEST_OS_WINDOWS
+ result.Set(FilePath(g_executable_path).RemoveExtension("exe"));
+#else
+ result.Set(FilePath(g_executable_path));
+#endif // GTEST_OS_WINDOWS
+
+ return result.RemoveDirectoryName();
+}
+
+// Functions for processing the gtest_output flag.
+
+// Returns the output format, or "" for normal printed output.
+String UnitTestOptions::GetOutputFormat() {
+ const char* const gtest_output_flag = GTEST_FLAG(output).c_str();
+ if (gtest_output_flag == NULL) return String("");
+
+ const char* const colon = strchr(gtest_output_flag, ':');
+ return (colon == NULL) ?
+ String(gtest_output_flag) :
+ String(gtest_output_flag, colon - gtest_output_flag);
+}
+
+// Returns the name of the requested output file, or the default if none
+// was explicitly specified.
+String UnitTestOptions::GetAbsolutePathToOutputFile() {
+ const char* const gtest_output_flag = GTEST_FLAG(output).c_str();
+ if (gtest_output_flag == NULL)
+ return String("");
+
+ const char* const colon = strchr(gtest_output_flag, ':');
+ if (colon == NULL)
+ return String(internal::FilePath::ConcatPaths(
+ internal::FilePath(
+ UnitTest::GetInstance()->original_working_dir()),
+ internal::FilePath(kDefaultOutputFile)).ToString() );
+
+ internal::FilePath output_name(colon + 1);
+ if (!output_name.IsAbsolutePath())
+ // TODO(wan@google.com): on Windows \some\path is not an absolute
+ // path (as its meaning depends on the current drive), yet the
+ // following logic for turning it into an absolute path is wrong.
+ // Fix it.
+ output_name = internal::FilePath::ConcatPaths(
+ internal::FilePath(UnitTest::GetInstance()->original_working_dir()),
+ internal::FilePath(colon + 1));
+
+ if (!output_name.IsDirectory())
+ return output_name.ToString();
+
+ internal::FilePath result(internal::FilePath::GenerateUniqueFileName(
+ output_name, internal::GetCurrentExecutableName(),
+ GetOutputFormat().c_str()));
+ return result.ToString();
+}
+
+// Returns true iff the wildcard pattern matches the string. The
+// first ':' or '\0' character in pattern marks the end of it.
+//
+// This recursive algorithm isn't very efficient, but is clear and
+// works well enough for matching test names, which are short.
+bool UnitTestOptions::PatternMatchesString(const char *pattern,
+ const char *str) {
+ switch (*pattern) {
+ case '\0':
+ case ':': // Either ':' or '\0' marks the end of the pattern.
+ return *str == '\0';
+ case '?': // Matches any single character.
+ return *str != '\0' && PatternMatchesString(pattern + 1, str + 1);
+ case '*': // Matches any string (possibly empty) of characters.
+ return (*str != '\0' && PatternMatchesString(pattern, str + 1)) ||
+ PatternMatchesString(pattern + 1, str);
+ default: // Non-special character. Matches itself.
+ return *pattern == *str &&
+ PatternMatchesString(pattern + 1, str + 1);
+ }
+}
+
+bool UnitTestOptions::MatchesFilter(const String& name, const char* filter) {
+ const char *cur_pattern = filter;
+ for (;;) {
+ if (PatternMatchesString(cur_pattern, name.c_str())) {
+ return true;
+ }
+
+ // Finds the next pattern in the filter.
+ cur_pattern = strchr(cur_pattern, ':');
+
+ // Returns if no more pattern can be found.
+ if (cur_pattern == NULL) {
+ return false;
+ }
+
+ // Skips the pattern separater (the ':' character).
+ cur_pattern++;
+ }
+}
+
+// TODO(keithray): move String function implementations to gtest-string.cc.
+
+// Returns true iff the user-specified filter matches the test case
+// name and the test name.
+bool UnitTestOptions::FilterMatchesTest(const String &test_case_name,
+ const String &test_name) {
+ const String& full_name = String::Format("%s.%s",
+ test_case_name.c_str(),
+ test_name.c_str());
+
+ // Split --gtest_filter at '-', if there is one, to separate into
+ // positive filter and negative filter portions
+ const char* const p = GTEST_FLAG(filter).c_str();
+ const char* const dash = strchr(p, '-');
+ String positive;
+ String negative;
+ if (dash == NULL) {
+ positive = GTEST_FLAG(filter).c_str(); // Whole string is a positive filter
+ negative = String("");
+ } else {
+ positive = String(p, dash - p); // Everything up to the dash
+ negative = String(dash+1); // Everything after the dash
+ if (positive.empty()) {
+ // Treat '-test1' as the same as '*-test1'
+ positive = kUniversalFilter;
+ }
+ }
+
+ // A filter is a colon-separated list of patterns. It matches a
+ // test if any pattern in it matches the test.
+ return (MatchesFilter(full_name, positive.c_str()) &&
+ !MatchesFilter(full_name, negative.c_str()));
+}
+
+#if GTEST_OS_WINDOWS
+// Returns EXCEPTION_EXECUTE_HANDLER if Google Test should handle the
+// given SEH exception, or EXCEPTION_CONTINUE_SEARCH otherwise.
+// This function is useful as an __except condition.
+int UnitTestOptions::GTestShouldProcessSEH(DWORD exception_code) {
+ // Google Test should handle an exception if:
+ // 1. the user wants it to, AND
+ // 2. this is not a breakpoint exception.
+ return (GTEST_FLAG(catch_exceptions) &&
+ exception_code != EXCEPTION_BREAKPOINT) ?
+ EXCEPTION_EXECUTE_HANDLER :
+ EXCEPTION_CONTINUE_SEARCH;
+}
+#endif // GTEST_OS_WINDOWS
+
+} // namespace internal
+
+// The c'tor sets this object as the test part result reporter used by
+// Google Test. The 'result' parameter specifies where to report the
+// results. Intercepts only failures from the current thread.
+ScopedFakeTestPartResultReporter::ScopedFakeTestPartResultReporter(
+ TestPartResultArray* result)
+ : intercept_mode_(INTERCEPT_ONLY_CURRENT_THREAD),
+ result_(result) {
+ Init();
+}
+
+// The c'tor sets this object as the test part result reporter used by
+// Google Test. The 'result' parameter specifies where to report the
+// results.
+ScopedFakeTestPartResultReporter::ScopedFakeTestPartResultReporter(
+ InterceptMode intercept_mode, TestPartResultArray* result)
+ : intercept_mode_(intercept_mode),
+ result_(result) {
+ Init();
+}
+
+void ScopedFakeTestPartResultReporter::Init() {
+ internal::UnitTestImpl* const impl = internal::GetUnitTestImpl();
+ if (intercept_mode_ == INTERCEPT_ALL_THREADS) {
+ old_reporter_ = impl->GetGlobalTestPartResultReporter();
+ impl->SetGlobalTestPartResultReporter(this);
+ } else {
+ old_reporter_ = impl->GetTestPartResultReporterForCurrentThread();
+ impl->SetTestPartResultReporterForCurrentThread(this);
+ }
+}
+
+// The d'tor restores the test part result reporter used by Google Test
+// before.
+ScopedFakeTestPartResultReporter::~ScopedFakeTestPartResultReporter() {
+ internal::UnitTestImpl* const impl = internal::GetUnitTestImpl();
+ if (intercept_mode_ == INTERCEPT_ALL_THREADS) {
+ impl->SetGlobalTestPartResultReporter(old_reporter_);
+ } else {
+ impl->SetTestPartResultReporterForCurrentThread(old_reporter_);
+ }
+}
+
+// Increments the test part result count and remembers the result.
+// This method is from the TestPartResultReporterInterface interface.
+void ScopedFakeTestPartResultReporter::ReportTestPartResult(
+ const TestPartResult& result) {
+ result_->Append(result);
+}
+
+namespace internal {
+
+// Returns the type ID of ::testing::Test. We should always call this
+// instead of GetTypeId< ::testing::Test>() to get the type ID of
+// testing::Test. This is to work around a suspected linker bug when
+// using Google Test as a framework on Mac OS X. The bug causes
+// GetTypeId< ::testing::Test>() to return different values depending
+// on whether the call is from the Google Test framework itself or
+// from user test code. GetTestTypeId() is guaranteed to always
+// return the same value, as it always calls GetTypeId<>() from the
+// gtest.cc, which is within the Google Test framework.
+TypeId GetTestTypeId() {
+ return GetTypeId<Test>();
+}
+
+// The value of GetTestTypeId() as seen from within the Google Test
+// library. This is solely for testing GetTestTypeId().
+extern const TypeId kTestTypeIdInGoogleTest = GetTestTypeId();
+
+// This predicate-formatter checks that 'results' contains a test part
+// failure of the given type and that the failure message contains the
+// given substring.
+AssertionResult HasOneFailure(const char* /* results_expr */,
+ const char* /* type_expr */,
+ const char* /* substr_expr */,
+ const TestPartResultArray& results,
+ TestPartResult::Type type,
+ const char* substr) {
+ const String expected(type == TestPartResult::kFatalFailure ?
+ "1 fatal failure" :
+ "1 non-fatal failure");
+ Message msg;
+ if (results.size() != 1) {
+ msg << "Expected: " << expected << "\n"
+ << " Actual: " << results.size() << " failures";
+ for (int i = 0; i < results.size(); i++) {
+ msg << "\n" << results.GetTestPartResult(i);
+ }
+ return AssertionFailure(msg);
+ }
+
+ const TestPartResult& r = results.GetTestPartResult(0);
+ if (r.type() != type) {
+ msg << "Expected: " << expected << "\n"
+ << " Actual:\n"
+ << r;
+ return AssertionFailure(msg);
+ }
+
+ if (strstr(r.message(), substr) == NULL) {
+ msg << "Expected: " << expected << " containing \""
+ << substr << "\"\n"
+ << " Actual:\n"
+ << r;
+ return AssertionFailure(msg);
+ }
+
+ return AssertionSuccess();
+}
+
+// The constructor of SingleFailureChecker remembers where to look up
+// test part results, what type of failure we expect, and what
+// substring the failure message should contain.
+SingleFailureChecker:: SingleFailureChecker(
+ const TestPartResultArray* results,
+ TestPartResult::Type type,
+ const char* substr)
+ : results_(results),
+ type_(type),
+ substr_(substr) {}
+
+// The destructor of SingleFailureChecker verifies that the given
+// TestPartResultArray contains exactly one failure that has the given
+// type and contains the given substring. If that's not the case, a
+// non-fatal failure will be generated.
+SingleFailureChecker::~SingleFailureChecker() {
+ EXPECT_PRED_FORMAT3(HasOneFailure, *results_, type_, substr_.c_str());
+}
+
+DefaultGlobalTestPartResultReporter::DefaultGlobalTestPartResultReporter(
+ UnitTestImpl* unit_test) : unit_test_(unit_test) {}
+
+void DefaultGlobalTestPartResultReporter::ReportTestPartResult(
+ const TestPartResult& result) {
+ unit_test_->current_test_result()->AddTestPartResult(result);
+ unit_test_->listeners()->repeater()->OnTestPartResult(result);
+}
+
+DefaultPerThreadTestPartResultReporter::DefaultPerThreadTestPartResultReporter(
+ UnitTestImpl* unit_test) : unit_test_(unit_test) {}
+
+void DefaultPerThreadTestPartResultReporter::ReportTestPartResult(
+ const TestPartResult& result) {
+ unit_test_->GetGlobalTestPartResultReporter()->ReportTestPartResult(result);
+}
+
+// Returns the global test part result reporter.
+TestPartResultReporterInterface*
+UnitTestImpl::GetGlobalTestPartResultReporter() {
+ internal::MutexLock lock(&global_test_part_result_reporter_mutex_);
+ return global_test_part_result_repoter_;
+}
+
+// Sets the global test part result reporter.
+void UnitTestImpl::SetGlobalTestPartResultReporter(
+ TestPartResultReporterInterface* reporter) {
+ internal::MutexLock lock(&global_test_part_result_reporter_mutex_);
+ global_test_part_result_repoter_ = reporter;
+}
+
+// Returns the test part result reporter for the current thread.
+TestPartResultReporterInterface*
+UnitTestImpl::GetTestPartResultReporterForCurrentThread() {
+ return per_thread_test_part_result_reporter_.get();
+}
+
+// Sets the test part result reporter for the current thread.
+void UnitTestImpl::SetTestPartResultReporterForCurrentThread(
+ TestPartResultReporterInterface* reporter) {
+ per_thread_test_part_result_reporter_.set(reporter);
+}
+
+// Gets the number of successful test cases.
+int UnitTestImpl::successful_test_case_count() const {
+ return CountIf(test_cases_, TestCasePassed);
+}
+
+// Gets the number of failed test cases.
+int UnitTestImpl::failed_test_case_count() const {
+ return CountIf(test_cases_, TestCaseFailed);
+}
+
+// Gets the number of all test cases.
+int UnitTestImpl::total_test_case_count() const {
+ return static_cast<int>(test_cases_.size());
+}
+
+// Gets the number of all test cases that contain at least one test
+// that should run.
+int UnitTestImpl::test_case_to_run_count() const {
+ return CountIf(test_cases_, ShouldRunTestCase);
+}
+
+// Gets the number of successful tests.
+int UnitTestImpl::successful_test_count() const {
+ return SumOverTestCaseList(test_cases_, &TestCase::successful_test_count);
+}
+
+// Gets the number of failed tests.
+int UnitTestImpl::failed_test_count() const {
+ return SumOverTestCaseList(test_cases_, &TestCase::failed_test_count);
+}
+
+// Gets the number of disabled tests.
+int UnitTestImpl::disabled_test_count() const {
+ return SumOverTestCaseList(test_cases_, &TestCase::disabled_test_count);
+}
+
+// Gets the number of all tests.
+int UnitTestImpl::total_test_count() const {
+ return SumOverTestCaseList(test_cases_, &TestCase::total_test_count);
+}
+
+// Gets the number of tests that should run.
+int UnitTestImpl::test_to_run_count() const {
+ return SumOverTestCaseList(test_cases_, &TestCase::test_to_run_count);
+}
+
+// Returns the current OS stack trace as a String.
+//
+// The maximum number of stack frames to be included is specified by
+// the gtest_stack_trace_depth flag. The skip_count parameter
+// specifies the number of top frames to be skipped, which doesn't
+// count against the number of frames to be included.
+//
+// For example, if Foo() calls Bar(), which in turn calls
+// CurrentOsStackTraceExceptTop(1), Foo() will be included in the
+// trace but Bar() and CurrentOsStackTraceExceptTop() won't.
+String UnitTestImpl::CurrentOsStackTraceExceptTop(int skip_count) {
+ (void)skip_count;
+ return String("");
+}
+
+// Returns the current time in milliseconds.
+TimeInMillis GetTimeInMillis() {
+#if GTEST_OS_WINDOWS_MOBILE || defined(__BORLANDC__)
+ // Difference between 1970-01-01 and 1601-01-01 in milliseconds.
+ // http://analogous.blogspot.com/2005/04/epoch.html
+ const TimeInMillis kJavaEpochToWinFileTimeDelta =
+ static_cast<TimeInMillis>(116444736UL) * 100000UL;
+ const DWORD kTenthMicrosInMilliSecond = 10000;
+
+ SYSTEMTIME now_systime;
+ FILETIME now_filetime;
+ ULARGE_INTEGER now_int64;
+ // TODO(kenton@google.com): Shouldn't this just use
+ // GetSystemTimeAsFileTime()?
+ GetSystemTime(&now_systime);
+ if (SystemTimeToFileTime(&now_systime, &now_filetime)) {
+ now_int64.LowPart = now_filetime.dwLowDateTime;
+ now_int64.HighPart = now_filetime.dwHighDateTime;
+ now_int64.QuadPart = (now_int64.QuadPart / kTenthMicrosInMilliSecond) -
+ kJavaEpochToWinFileTimeDelta;
+ return now_int64.QuadPart;
+ }
+ return 0;
+#elif GTEST_OS_WINDOWS && !GTEST_HAS_GETTIMEOFDAY_
+ __timeb64 now;
+#ifdef _MSC_VER
+ // MSVC 8 deprecates _ftime64(), so we want to suppress warning 4996
+ // (deprecated function) there.
+ // TODO(kenton@google.com): Use GetTickCount()? Or use
+ // SystemTimeToFileTime()
+#pragma warning(push) // Saves the current warning state.
+#pragma warning(disable:4996) // Temporarily disables warning 4996.
+ _ftime64(&now);
+#pragma warning(pop) // Restores the warning state.
+#else
+ _ftime64(&now);
+#endif // _MSC_VER
+ return static_cast<TimeInMillis>(now.time) * 1000 + now.millitm;
+#elif GTEST_HAS_GETTIMEOFDAY_
+ struct timeval now;
+ gettimeofday(&now, NULL);
+ return static_cast<TimeInMillis>(now.tv_sec) * 1000 + now.tv_usec / 1000;
+#else
+#error "Don't know how to get the current time on your system."
+#endif
+}
+
+// Utilities
+
+// class String
+
+// Returns the input enclosed in double quotes if it's not NULL;
+// otherwise returns "(null)". For example, "\"Hello\"" is returned
+// for input "Hello".
+//
+// This is useful for printing a C string in the syntax of a literal.
+//
+// Known issue: escape sequences are not handled yet.
+String String::ShowCStringQuoted(const char* c_str) {
+ return c_str ? String::Format("\"%s\"", c_str) : String("(null)");
+}
+
+// Copies at most length characters from str into a newly-allocated
+// piece of memory of size length+1. The memory is allocated with new[].
+// A terminating null byte is written to the memory, and a pointer to it
+// is returned. If str is NULL, NULL is returned.
+static char* CloneString(const char* str, size_t length) {
+ if (str == NULL) {
+ return NULL;
+ } else {
+ char* const clone = new char[length + 1];
+ posix::StrNCpy(clone, str, length);
+ clone[length] = '\0';
+ return clone;
+ }
+}
+
+// Clones a 0-terminated C string, allocating memory using new. The
+// caller is responsible for deleting[] the return value. Returns the
+// cloned string, or NULL if the input is NULL.
+const char * String::CloneCString(const char* c_str) {
+ return (c_str == NULL) ?
+ NULL : CloneString(c_str, strlen(c_str));
+}
+
+#if GTEST_OS_WINDOWS_MOBILE
+// Creates a UTF-16 wide string from the given ANSI string, allocating
+// memory using new. The caller is responsible for deleting the return
+// value using delete[]. Returns the wide string, or NULL if the
+// input is NULL.
+LPCWSTR String::AnsiToUtf16(const char* ansi) {
+ if (!ansi) return NULL;
+ const int length = strlen(ansi);
+ const int unicode_length =
+ MultiByteToWideChar(CP_ACP, 0, ansi, length,
+ NULL, 0);
+ WCHAR* unicode = new WCHAR[unicode_length + 1];
+ MultiByteToWideChar(CP_ACP, 0, ansi, length,
+ unicode, unicode_length);
+ unicode[unicode_length] = 0;
+ return unicode;
+}
+
+// Creates an ANSI string from the given wide string, allocating
+// memory using new. The caller is responsible for deleting the return
+// value using delete[]. Returns the ANSI string, or NULL if the
+// input is NULL.
+const char* String::Utf16ToAnsi(LPCWSTR utf16_str) {
+ if (!utf16_str) return NULL;
+ const int ansi_length =
+ WideCharToMultiByte(CP_ACP, 0, utf16_str, -1,
+ NULL, 0, NULL, NULL);
+ char* ansi = new char[ansi_length + 1];
+ WideCharToMultiByte(CP_ACP, 0, utf16_str, -1,
+ ansi, ansi_length, NULL, NULL);
+ ansi[ansi_length] = 0;
+ return ansi;
+}
+
+#endif // GTEST_OS_WINDOWS_MOBILE
+
+// Compares two C strings. Returns true iff they have the same content.
+//
+// Unlike strcmp(), this function can handle NULL argument(s). A NULL
+// C string is considered different to any non-NULL C string,
+// including the empty string.
+bool String::CStringEquals(const char * lhs, const char * rhs) {
+ if ( lhs == NULL ) return rhs == NULL;
+
+ if ( rhs == NULL ) return false;
+
+ return strcmp(lhs, rhs) == 0;
+}
+
+#if GTEST_HAS_STD_WSTRING || GTEST_HAS_GLOBAL_WSTRING
+
+// Converts an array of wide chars to a narrow string using the UTF-8
+// encoding, and streams the result to the given Message object.
+static void StreamWideCharsToMessage(const wchar_t* wstr, size_t length,
+ Message* msg) {
+ // TODO(wan): consider allowing a testing::String object to
+ // contain '\0'. This will make it behave more like std::string,
+ // and will allow ToUtf8String() to return the correct encoding
+ // for '\0' s.t. we can get rid of the conditional here (and in
+ // several other places).
+ for (size_t i = 0; i != length; ) { // NOLINT
+ if (wstr[i] != L'\0') {
+ *msg << WideStringToUtf8(wstr + i, static_cast<int>(length - i));
+ while (i != length && wstr[i] != L'\0')
+ i++;
+ } else {
+ *msg << '\0';
+ i++;
+ }
+ }
+}
+
+#endif // GTEST_HAS_STD_WSTRING || GTEST_HAS_GLOBAL_WSTRING
+
+} // namespace internal
+
+#if GTEST_HAS_STD_WSTRING
+// Converts the given wide string to a narrow string using the UTF-8
+// encoding, and streams the result to this Message object.
+Message& Message::operator <<(const ::std::wstring& wstr) {
+ internal::StreamWideCharsToMessage(wstr.c_str(), wstr.length(), this);
+ return *this;
+}
+#endif // GTEST_HAS_STD_WSTRING
+
+#if GTEST_HAS_GLOBAL_WSTRING
+// Converts the given wide string to a narrow string using the UTF-8
+// encoding, and streams the result to this Message object.
+Message& Message::operator <<(const ::wstring& wstr) {
+ internal::StreamWideCharsToMessage(wstr.c_str(), wstr.length(), this);
+ return *this;
+}
+#endif // GTEST_HAS_GLOBAL_WSTRING
+
+namespace internal {
+
+// Formats a value to be used in a failure message.
+
+// For a char value, we print it as a C++ char literal and as an
+// unsigned integer (both in decimal and in hexadecimal).
+String FormatForFailureMessage(char ch) {
+ const unsigned int ch_as_uint = ch;
+ // A String object cannot contain '\0', so we print "\\0" when ch is
+ // '\0'.
+ return String::Format("'%s' (%u, 0x%X)",
+ ch ? String::Format("%c", ch).c_str() : "\\0",
+ ch_as_uint, ch_as_uint);
+}
+
+// For a wchar_t value, we print it as a C++ wchar_t literal and as an
+// unsigned integer (both in decimal and in hexidecimal).
+String FormatForFailureMessage(wchar_t wchar) {
+ // The C++ standard doesn't specify the exact size of the wchar_t
+ // type. It just says that it shall have the same size as another
+ // integral type, called its underlying type.
+ //
+ // Therefore, in order to print a wchar_t value in the numeric form,
+ // we first convert it to the largest integral type (UInt64) and
+ // then print the converted value.
+ //
+ // We use streaming to print the value as "%llu" doesn't work
+ // correctly with MSVC 7.1.
+ const UInt64 wchar_as_uint64 = wchar;
+ Message msg;
+ // A String object cannot contain '\0', so we print "\\0" when wchar is
+ // L'\0'.
+ char buffer[32]; // CodePointToUtf8 requires a buffer that big.
+ msg << "L'"
+ << (wchar ? CodePointToUtf8(static_cast<UInt32>(wchar), buffer) : "\\0")
+ << "' (" << wchar_as_uint64 << ", 0x" << ::std::setbase(16)
+ << wchar_as_uint64 << ")";
+ return msg.GetString();
+}
+
+} // namespace internal
+
+// AssertionResult constructors.
+// Used in EXPECT_TRUE/FALSE(assertion_result).
+AssertionResult::AssertionResult(const AssertionResult& other)
+ : success_(other.success_),
+ message_(other.message_.get() != NULL ?
+ new internal::String(*other.message_) :
+ static_cast<internal::String*>(NULL)) {
+}
+
+// Returns the assertion's negation. Used with EXPECT/ASSERT_FALSE.
+AssertionResult AssertionResult::operator!() const {
+ AssertionResult negation(!success_);
+ if (message_.get() != NULL)
+ negation << *message_;
+ return negation;
+}
+
+// Makes a successful assertion result.
+AssertionResult AssertionSuccess() {
+ return AssertionResult(true);
+}
+
+// Makes a failed assertion result.
+AssertionResult AssertionFailure() {
+ return AssertionResult(false);
+}
+
+// Makes a failed assertion result with the given failure message.
+// Deprecated; use AssertionFailure() << message.
+AssertionResult AssertionFailure(const Message& message) {
+ return AssertionFailure() << message;
+}
+
+namespace internal {
+
+// Constructs and returns the message for an equality assertion
+// (e.g. ASSERT_EQ, EXPECT_STREQ, etc) failure.
+//
+// The first four parameters are the expressions used in the assertion
+// and their values, as strings. For example, for ASSERT_EQ(foo, bar)
+// where foo is 5 and bar is 6, we have:
+//
+// expected_expression: "foo"
+// actual_expression: "bar"
+// expected_value: "5"
+// actual_value: "6"
+//
+// The ignoring_case parameter is true iff the assertion is a
+// *_STRCASEEQ*. When it's true, the string " (ignoring case)" will
+// be inserted into the message.
+AssertionResult EqFailure(const char* expected_expression,
+ const char* actual_expression,
+ const String& expected_value,
+ const String& actual_value,
+ bool ignoring_case) {
+ Message msg;
+ msg << "Value of: " << actual_expression;
+ if (actual_value != actual_expression) {
+ msg << "\n Actual: " << actual_value;
+ }
+
+ msg << "\nExpected: " << expected_expression;
+ if (ignoring_case) {
+ msg << " (ignoring case)";
+ }
+ if (expected_value != expected_expression) {
+ msg << "\nWhich is: " << expected_value;
+ }
+
+ return AssertionFailure(msg);
+}
+
+// Constructs a failure message for Boolean assertions such as EXPECT_TRUE.
+String GetBoolAssertionFailureMessage(const AssertionResult& assertion_result,
+ const char* expression_text,
+ const char* actual_predicate_value,
+ const char* expected_predicate_value) {
+ const char* actual_message = assertion_result.message();
+ Message msg;
+ msg << "Value of: " << expression_text
+ << "\n Actual: " << actual_predicate_value;
+ if (actual_message[0] != '\0')
+ msg << " (" << actual_message << ")";
+ msg << "\nExpected: " << expected_predicate_value;
+ return msg.GetString();
+}
+
+// Helper function for implementing ASSERT_NEAR.
+AssertionResult DoubleNearPredFormat(const char* expr1,
+ const char* expr2,
+ const char* abs_error_expr,
+ double val1,
+ double val2,
+ double abs_error) {
+ const double diff = fabs(val1 - val2);
+ if (diff <= abs_error) return AssertionSuccess();
+
+ // TODO(wan): do not print the value of an expression if it's
+ // already a literal.
+ Message msg;
+ msg << "The difference between " << expr1 << " and " << expr2
+ << " is " << diff << ", which exceeds " << abs_error_expr << ", where\n"
+ << expr1 << " evaluates to " << val1 << ",\n"
+ << expr2 << " evaluates to " << val2 << ", and\n"
+ << abs_error_expr << " evaluates to " << abs_error << ".";
+ return AssertionFailure(msg);
+}
+
+
+// Helper template for implementing FloatLE() and DoubleLE().
+template <typename RawType>
+AssertionResult FloatingPointLE(const char* expr1,
+ const char* expr2,
+ RawType val1,
+ RawType val2) {
+ // Returns success if val1 is less than val2,
+ if (val1 < val2) {
+ return AssertionSuccess();
+ }
+
+ // or if val1 is almost equal to val2.
+ const FloatingPoint<RawType> lhs(val1), rhs(val2);
+ if (lhs.AlmostEquals(rhs)) {
+ return AssertionSuccess();
+ }
+
+ // Note that the above two checks will both fail if either val1 or
+ // val2 is NaN, as the IEEE floating-point standard requires that
+ // any predicate involving a NaN must return false.
+
+ StrStream val1_ss;
+ val1_ss << std::setprecision(std::numeric_limits<RawType>::digits10 + 2)
+ << val1;
+
+ StrStream val2_ss;
+ val2_ss << std::setprecision(std::numeric_limits<RawType>::digits10 + 2)
+ << val2;
+
+ Message msg;
+ msg << "Expected: (" << expr1 << ") <= (" << expr2 << ")\n"
+ << " Actual: " << StrStreamToString(&val1_ss) << " vs "
+ << StrStreamToString(&val2_ss);
+
+ return AssertionFailure(msg);
+}
+
+} // namespace internal
+
+// Asserts that val1 is less than, or almost equal to, val2. Fails
+// otherwise. In particular, it fails if either val1 or val2 is NaN.
+AssertionResult FloatLE(const char* expr1, const char* expr2,
+ float val1, float val2) {
+ return internal::FloatingPointLE<float>(expr1, expr2, val1, val2);
+}
+
+// Asserts that val1 is less than, or almost equal to, val2. Fails
+// otherwise. In particular, it fails if either val1 or val2 is NaN.
+AssertionResult DoubleLE(const char* expr1, const char* expr2,
+ double val1, double val2) {
+ return internal::FloatingPointLE<double>(expr1, expr2, val1, val2);
+}
+
+namespace internal {
+
+// The helper function for {ASSERT|EXPECT}_EQ with int or enum
+// arguments.
+AssertionResult CmpHelperEQ(const char* expected_expression,
+ const char* actual_expression,
+ BiggestInt expected,
+ BiggestInt actual) {
+ if (expected == actual) {
+ return AssertionSuccess();
+ }
+
+ return EqFailure(expected_expression,
+ actual_expression,
+ FormatForComparisonFailureMessage(expected, actual),
+ FormatForComparisonFailureMessage(actual, expected),
+ false);
+}
+
+// A macro for implementing the helper functions needed to implement
+// ASSERT_?? and EXPECT_?? with integer or enum arguments. It is here
+// just to avoid copy-and-paste of similar code.
+#define GTEST_IMPL_CMP_HELPER_(op_name, op)\
+AssertionResult CmpHelper##op_name(const char* expr1, const char* expr2, \
+ BiggestInt val1, BiggestInt val2) {\
+ if (val1 op val2) {\
+ return AssertionSuccess();\
+ } else {\
+ Message msg;\
+ msg << "Expected: (" << expr1 << ") " #op " (" << expr2\
+ << "), actual: " << FormatForComparisonFailureMessage(val1, val2)\
+ << " vs " << FormatForComparisonFailureMessage(val2, val1);\
+ return AssertionFailure(msg);\
+ }\
+}
+
+// Implements the helper function for {ASSERT|EXPECT}_NE with int or
+// enum arguments.
+GTEST_IMPL_CMP_HELPER_(NE, !=)
+// Implements the helper function for {ASSERT|EXPECT}_LE with int or
+// enum arguments.
+GTEST_IMPL_CMP_HELPER_(LE, <=)
+// Implements the helper function for {ASSERT|EXPECT}_LT with int or
+// enum arguments.
+GTEST_IMPL_CMP_HELPER_(LT, < )
+// Implements the helper function for {ASSERT|EXPECT}_GE with int or
+// enum arguments.
+GTEST_IMPL_CMP_HELPER_(GE, >=)
+// Implements the helper function for {ASSERT|EXPECT}_GT with int or
+// enum arguments.
+GTEST_IMPL_CMP_HELPER_(GT, > )
+
+#undef GTEST_IMPL_CMP_HELPER_
+
+// The helper function for {ASSERT|EXPECT}_STREQ.
+AssertionResult CmpHelperSTREQ(const char* expected_expression,
+ const char* actual_expression,
+ const char* expected,
+ const char* actual) {
+ if (String::CStringEquals(expected, actual)) {
+ return AssertionSuccess();
+ }
+
+ return EqFailure(expected_expression,
+ actual_expression,
+ String::ShowCStringQuoted(expected),
+ String::ShowCStringQuoted(actual),
+ false);
+}
+
+// The helper function for {ASSERT|EXPECT}_STRCASEEQ.
+AssertionResult CmpHelperSTRCASEEQ(const char* expected_expression,
+ const char* actual_expression,
+ const char* expected,
+ const char* actual) {
+ if (String::CaseInsensitiveCStringEquals(expected, actual)) {
+ return AssertionSuccess();
+ }
+
+ return EqFailure(expected_expression,
+ actual_expression,
+ String::ShowCStringQuoted(expected),
+ String::ShowCStringQuoted(actual),
+ true);
+}
+
+// The helper function for {ASSERT|EXPECT}_STRNE.
+AssertionResult CmpHelperSTRNE(const char* s1_expression,
+ const char* s2_expression,
+ const char* s1,
+ const char* s2) {
+ if (!String::CStringEquals(s1, s2)) {
+ return AssertionSuccess();
+ } else {
+ Message msg;
+ msg << "Expected: (" << s1_expression << ") != ("
+ << s2_expression << "), actual: \""
+ << s1 << "\" vs \"" << s2 << "\"";
+ return AssertionFailure(msg);
+ }
+}
+
+// The helper function for {ASSERT|EXPECT}_STRCASENE.
+AssertionResult CmpHelperSTRCASENE(const char* s1_expression,
+ const char* s2_expression,
+ const char* s1,
+ const char* s2) {
+ if (!String::CaseInsensitiveCStringEquals(s1, s2)) {
+ return AssertionSuccess();
+ } else {
+ Message msg;
+ msg << "Expected: (" << s1_expression << ") != ("
+ << s2_expression << ") (ignoring case), actual: \""
+ << s1 << "\" vs \"" << s2 << "\"";
+ return AssertionFailure(msg);
+ }
+}
+
+} // namespace internal
+
+namespace {
+
+// Helper functions for implementing IsSubString() and IsNotSubstring().
+
+// This group of overloaded functions return true iff needle is a
+// substring of haystack. NULL is considered a substring of itself
+// only.
+
+bool IsSubstringPred(const char* needle, const char* haystack) {
+ if (needle == NULL || haystack == NULL)
+ return needle == haystack;
+
+ return strstr(haystack, needle) != NULL;
+}
+
+bool IsSubstringPred(const wchar_t* needle, const wchar_t* haystack) {
+ if (needle == NULL || haystack == NULL)
+ return needle == haystack;
+
+ return wcsstr(haystack, needle) != NULL;
+}
+
+// StringType here can be either ::std::string or ::std::wstring.
+template <typename StringType>
+bool IsSubstringPred(const StringType& needle,
+ const StringType& haystack) {
+ return haystack.find(needle) != StringType::npos;
+}
+
+// This function implements either IsSubstring() or IsNotSubstring(),
+// depending on the value of the expected_to_be_substring parameter.
+// StringType here can be const char*, const wchar_t*, ::std::string,
+// or ::std::wstring.
+template <typename StringType>
+AssertionResult IsSubstringImpl(
+ bool expected_to_be_substring,
+ const char* needle_expr, const char* haystack_expr,
+ const StringType& needle, const StringType& haystack) {
+ if (IsSubstringPred(needle, haystack) == expected_to_be_substring)
+ return AssertionSuccess();
+
+ const bool is_wide_string = sizeof(needle[0]) > 1;
+ const char* const begin_string_quote = is_wide_string ? "L\"" : "\"";
+ return AssertionFailure(
+ Message()
+ << "Value of: " << needle_expr << "\n"
+ << " Actual: " << begin_string_quote << needle << "\"\n"
+ << "Expected: " << (expected_to_be_substring ? "" : "not ")
+ << "a substring of " << haystack_expr << "\n"
+ << "Which is: " << begin_string_quote << haystack << "\"");
+}
+
+} // namespace
+
+// IsSubstring() and IsNotSubstring() check whether needle is a
+// substring of haystack (NULL is considered a substring of itself
+// only), and return an appropriate error message when they fail.
+
+AssertionResult IsSubstring(
+ const char* needle_expr, const char* haystack_expr,
+ const char* needle, const char* haystack) {
+ return IsSubstringImpl(true, needle_expr, haystack_expr, needle, haystack);
+}
+
+AssertionResult IsSubstring(
+ const char* needle_expr, const char* haystack_expr,
+ const wchar_t* needle, const wchar_t* haystack) {
+ return IsSubstringImpl(true, needle_expr, haystack_expr, needle, haystack);
+}
+
+AssertionResult IsNotSubstring(
+ const char* needle_expr, const char* haystack_expr,
+ const char* needle, const char* haystack) {
+ return IsSubstringImpl(false, needle_expr, haystack_expr, needle, haystack);
+}
+
+AssertionResult IsNotSubstring(
+ const char* needle_expr, const char* haystack_expr,
+ const wchar_t* needle, const wchar_t* haystack) {
+ return IsSubstringImpl(false, needle_expr, haystack_expr, needle, haystack);
+}
+
+AssertionResult IsSubstring(
+ const char* needle_expr, const char* haystack_expr,
+ const ::std::string& needle, const ::std::string& haystack) {
+ return IsSubstringImpl(true, needle_expr, haystack_expr, needle, haystack);
+}
+
+AssertionResult IsNotSubstring(
+ const char* needle_expr, const char* haystack_expr,
+ const ::std::string& needle, const ::std::string& haystack) {
+ return IsSubstringImpl(false, needle_expr, haystack_expr, needle, haystack);
+}
+
+#if GTEST_HAS_STD_WSTRING
+AssertionResult IsSubstring(
+ const char* needle_expr, const char* haystack_expr,
+ const ::std::wstring& needle, const ::std::wstring& haystack) {
+ return IsSubstringImpl(true, needle_expr, haystack_expr, needle, haystack);
+}
+
+AssertionResult IsNotSubstring(
+ const char* needle_expr, const char* haystack_expr,
+ const ::std::wstring& needle, const ::std::wstring& haystack) {
+ return IsSubstringImpl(false, needle_expr, haystack_expr, needle, haystack);
+}
+#endif // GTEST_HAS_STD_WSTRING
+
+namespace internal {
+
+#if GTEST_OS_WINDOWS
+
+namespace {
+
+// Helper function for IsHRESULT{SuccessFailure} predicates
+AssertionResult HRESULTFailureHelper(const char* expr,
+ const char* expected,
+ long hr) { // NOLINT
+#if GTEST_OS_WINDOWS_MOBILE
+ // Windows CE doesn't support FormatMessage.
+ const char error_text[] = "";
+#else
+ // Looks up the human-readable system message for the HRESULT code
+ // and since we're not passing any params to FormatMessage, we don't
+ // want inserts expanded.
+ const DWORD kFlags = FORMAT_MESSAGE_FROM_SYSTEM |
+ FORMAT_MESSAGE_IGNORE_INSERTS;
+ const DWORD kBufSize = 4096; // String::Format can't exceed this length.
+ // Gets the system's human readable message string for this HRESULT.
+ char error_text[kBufSize] = { '\0' };
+ DWORD message_length = ::FormatMessageA(kFlags,
+ 0, // no source, we're asking system
+ hr, // the error
+ 0, // no line width restrictions
+ error_text, // output buffer
+ kBufSize, // buf size
+ NULL); // no arguments for inserts
+ // Trims tailing white space (FormatMessage leaves a trailing cr-lf)
+ for (; message_length && isspace(error_text[message_length - 1]);
+ --message_length) {
+ error_text[message_length - 1] = '\0';
+ }
+#endif // GTEST_OS_WINDOWS_MOBILE
+
+ const String error_hex(String::Format("0x%08X ", hr));
+ Message msg;
+ msg << "Expected: " << expr << " " << expected << ".\n"
+ << " Actual: " << error_hex << error_text << "\n";
+
+ return ::testing::AssertionFailure(msg);
+}
+
+} // namespace
+
+AssertionResult IsHRESULTSuccess(const char* expr, long hr) { // NOLINT
+ if (SUCCEEDED(hr)) {
+ return AssertionSuccess();
+ }
+ return HRESULTFailureHelper(expr, "succeeds", hr);
+}
+
+AssertionResult IsHRESULTFailure(const char* expr, long hr) { // NOLINT
+ if (FAILED(hr)) {
+ return AssertionSuccess();
+ }
+ return HRESULTFailureHelper(expr, "fails", hr);
+}
+
+#endif // GTEST_OS_WINDOWS
+
+// Utility functions for encoding Unicode text (wide strings) in
+// UTF-8.
+
+// A Unicode code-point can have upto 21 bits, and is encoded in UTF-8
+// like this:
+//
+// Code-point length Encoding
+// 0 - 7 bits 0xxxxxxx
+// 8 - 11 bits 110xxxxx 10xxxxxx
+// 12 - 16 bits 1110xxxx 10xxxxxx 10xxxxxx
+// 17 - 21 bits 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx
+
+// The maximum code-point a one-byte UTF-8 sequence can represent.
+const UInt32 kMaxCodePoint1 = (static_cast<UInt32>(1) << 7) - 1;
+
+// The maximum code-point a two-byte UTF-8 sequence can represent.
+const UInt32 kMaxCodePoint2 = (static_cast<UInt32>(1) << (5 + 6)) - 1;
+
+// The maximum code-point a three-byte UTF-8 sequence can represent.
+const UInt32 kMaxCodePoint3 = (static_cast<UInt32>(1) << (4 + 2*6)) - 1;
+
+// The maximum code-point a four-byte UTF-8 sequence can represent.
+const UInt32 kMaxCodePoint4 = (static_cast<UInt32>(1) << (3 + 3*6)) - 1;
+
+// Chops off the n lowest bits from a bit pattern. Returns the n
+// lowest bits. As a side effect, the original bit pattern will be
+// shifted to the right by n bits.
+inline UInt32 ChopLowBits(UInt32* bits, int n) {
+ const UInt32 low_bits = *bits & ((static_cast<UInt32>(1) << n) - 1);
+ *bits >>= n;
+ return low_bits;
+}
+
+// Converts a Unicode code point to a narrow string in UTF-8 encoding.
+// code_point parameter is of type UInt32 because wchar_t may not be
+// wide enough to contain a code point.
+// The output buffer str must containt at least 32 characters.
+// The function returns the address of the output buffer.
+// If the code_point is not a valid Unicode code point
+// (i.e. outside of Unicode range U+0 to U+10FFFF) it will be output
+// as '(Invalid Unicode 0xXXXXXXXX)'.
+char* CodePointToUtf8(UInt32 code_point, char* str) {
+ if (code_point <= kMaxCodePoint1) {
+ str[1] = '\0';
+ str[0] = static_cast<char>(code_point); // 0xxxxxxx
+ } else if (code_point <= kMaxCodePoint2) {
+ str[2] = '\0';
+ str[1] = static_cast<char>(0x80 | ChopLowBits(&code_point, 6)); // 10xxxxxx
+ str[0] = static_cast<char>(0xC0 | code_point); // 110xxxxx
+ } else if (code_point <= kMaxCodePoint3) {
+ str[3] = '\0';
+ str[2] = static_cast<char>(0x80 | ChopLowBits(&code_point, 6)); // 10xxxxxx
+ str[1] = static_cast<char>(0x80 | ChopLowBits(&code_point, 6)); // 10xxxxxx
+ str[0] = static_cast<char>(0xE0 | code_point); // 1110xxxx
+ } else if (code_point <= kMaxCodePoint4) {
+ str[4] = '\0';
+ str[3] = static_cast<char>(0x80 | ChopLowBits(&code_point, 6)); // 10xxxxxx
+ str[2] = static_cast<char>(0x80 | ChopLowBits(&code_point, 6)); // 10xxxxxx
+ str[1] = static_cast<char>(0x80 | ChopLowBits(&code_point, 6)); // 10xxxxxx
+ str[0] = static_cast<char>(0xF0 | code_point); // 11110xxx
+ } else {
+ // The longest string String::Format can produce when invoked
+ // with these parameters is 28 character long (not including
+ // the terminating nul character). We are asking for 32 character
+ // buffer just in case. This is also enough for strncpy to
+ // null-terminate the destination string.
+ posix::StrNCpy(
+ str, String::Format("(Invalid Unicode 0x%X)", code_point).c_str(), 32);
+ str[31] = '\0'; // Makes sure no change in the format to strncpy leaves
+ // the result unterminated.
+ }
+ return str;
+}
+
+// The following two functions only make sense if the the system
+// uses UTF-16 for wide string encoding. All supported systems
+// with 16 bit wchar_t (Windows, Cygwin, Symbian OS) do use UTF-16.
+
+// Determines if the arguments constitute UTF-16 surrogate pair
+// and thus should be combined into a single Unicode code point
+// using CreateCodePointFromUtf16SurrogatePair.
+inline bool IsUtf16SurrogatePair(wchar_t first, wchar_t second) {
+ return sizeof(wchar_t) == 2 &&
+ (first & 0xFC00) == 0xD800 && (second & 0xFC00) == 0xDC00;
+}
+
+// Creates a Unicode code point from UTF16 surrogate pair.
+inline UInt32 CreateCodePointFromUtf16SurrogatePair(wchar_t first,
+ wchar_t second) {
+ const UInt32 mask = (1 << 10) - 1;
+ return (sizeof(wchar_t) == 2) ?
+ (((first & mask) << 10) | (second & mask)) + 0x10000 :
+ // This function should not be called when the condition is
+ // false, but we provide a sensible default in case it is.
+ static_cast<UInt32>(first);
+}
+
+// Converts a wide string to a narrow string in UTF-8 encoding.
+// The wide string is assumed to have the following encoding:
+// UTF-16 if sizeof(wchar_t) == 2 (on Windows, Cygwin, Symbian OS)
+// UTF-32 if sizeof(wchar_t) == 4 (on Linux)
+// Parameter str points to a null-terminated wide string.
+// Parameter num_chars may additionally limit the number
+// of wchar_t characters processed. -1 is used when the entire string
+// should be processed.
+// If the string contains code points that are not valid Unicode code points
+// (i.e. outside of Unicode range U+0 to U+10FFFF) they will be output
+// as '(Invalid Unicode 0xXXXXXXXX)'. If the string is in UTF16 encoding
+// and contains invalid UTF-16 surrogate pairs, values in those pairs
+// will be encoded as individual Unicode characters from Basic Normal Plane.
+String WideStringToUtf8(const wchar_t* str, int num_chars) {
+ if (num_chars == -1)
+ num_chars = static_cast<int>(wcslen(str));
+
+ StrStream stream;
+ for (int i = 0; i < num_chars; ++i) {
+ UInt32 unicode_code_point;
+
+ if (str[i] == L'\0') {
+ break;
+ } else if (i + 1 < num_chars && IsUtf16SurrogatePair(str[i], str[i + 1])) {
+ unicode_code_point = CreateCodePointFromUtf16SurrogatePair(str[i],
+ str[i + 1]);
+ i++;
+ } else {
+ unicode_code_point = static_cast<UInt32>(str[i]);
+ }
+
+ char buffer[32]; // CodePointToUtf8 requires a buffer this big.
+ stream << CodePointToUtf8(unicode_code_point, buffer);
+ }
+ return StrStreamToString(&stream);
+}
+
+// Converts a wide C string to a String using the UTF-8 encoding.
+// NULL will be converted to "(null)".
+String String::ShowWideCString(const wchar_t * wide_c_str) {
+ if (wide_c_str == NULL) return String("(null)");
+
+ return String(internal::WideStringToUtf8(wide_c_str, -1).c_str());
+}
+
+// Similar to ShowWideCString(), except that this function encloses
+// the converted string in double quotes.
+String String::ShowWideCStringQuoted(const wchar_t* wide_c_str) {
+ if (wide_c_str == NULL) return String("(null)");
+
+ return String::Format("L\"%s\"",
+ String::ShowWideCString(wide_c_str).c_str());
+}
+
+// Compares two wide C strings. Returns true iff they have the same
+// content.
+//
+// Unlike wcscmp(), this function can handle NULL argument(s). A NULL
+// C string is considered different to any non-NULL C string,
+// including the empty string.
+bool String::WideCStringEquals(const wchar_t * lhs, const wchar_t * rhs) {
+ if (lhs == NULL) return rhs == NULL;
+
+ if (rhs == NULL) return false;
+
+ return wcscmp(lhs, rhs) == 0;
+}
+
+// Helper function for *_STREQ on wide strings.
+AssertionResult CmpHelperSTREQ(const char* expected_expression,
+ const char* actual_expression,
+ const wchar_t* expected,
+ const wchar_t* actual) {
+ if (String::WideCStringEquals(expected, actual)) {
+ return AssertionSuccess();
+ }
+
+ return EqFailure(expected_expression,
+ actual_expression,
+ String::ShowWideCStringQuoted(expected),
+ String::ShowWideCStringQuoted(actual),
+ false);
+}
+
+// Helper function for *_STRNE on wide strings.
+AssertionResult CmpHelperSTRNE(const char* s1_expression,
+ const char* s2_expression,
+ const wchar_t* s1,
+ const wchar_t* s2) {
+ if (!String::WideCStringEquals(s1, s2)) {
+ return AssertionSuccess();
+ }
+
+ Message msg;
+ msg << "Expected: (" << s1_expression << ") != ("
+ << s2_expression << "), actual: "
+ << String::ShowWideCStringQuoted(s1)
+ << " vs " << String::ShowWideCStringQuoted(s2);
+ return AssertionFailure(msg);
+}
+
+// Compares two C strings, ignoring case. Returns true iff they have
+// the same content.
+//
+// Unlike strcasecmp(), this function can handle NULL argument(s). A
+// NULL C string is considered different to any non-NULL C string,
+// including the empty string.
+bool String::CaseInsensitiveCStringEquals(const char * lhs, const char * rhs) {
+ if (lhs == NULL)
+ return rhs == NULL;
+ if (rhs == NULL)
+ return false;
+ return posix::StrCaseCmp(lhs, rhs) == 0;
+}
+
+ // Compares two wide C strings, ignoring case. Returns true iff they
+ // have the same content.
+ //
+ // Unlike wcscasecmp(), this function can handle NULL argument(s).
+ // A NULL C string is considered different to any non-NULL wide C string,
+ // including the empty string.
+ // NB: The implementations on different platforms slightly differ.
+ // On windows, this method uses _wcsicmp which compares according to LC_CTYPE
+ // environment variable. On GNU platform this method uses wcscasecmp
+ // which compares according to LC_CTYPE category of the current locale.
+ // On MacOS X, it uses towlower, which also uses LC_CTYPE category of the
+ // current locale.
+bool String::CaseInsensitiveWideCStringEquals(const wchar_t* lhs,
+ const wchar_t* rhs) {
+ if ( lhs == NULL ) return rhs == NULL;
+
+ if ( rhs == NULL ) return false;
+
+#if GTEST_OS_WINDOWS
+ return _wcsicmp(lhs, rhs) == 0;
+#elif GTEST_OS_LINUX
+ return wcscasecmp(lhs, rhs) == 0;
+#else
+ // Mac OS X and Cygwin don't define wcscasecmp. Other unknown OSes
+ // may not define it either.
+ wint_t left, right;
+ do {
+ left = towlower(*lhs++);
+ right = towlower(*rhs++);
+ } while (left && left == right);
+ return left == right;
+#endif // OS selector
+}
+
+// Compares this with another String.
+// Returns < 0 if this is less than rhs, 0 if this is equal to rhs, or > 0
+// if this is greater than rhs.
+int String::Compare(const String & rhs) const {
+ const char* const lhs_c_str = c_str();
+ const char* const rhs_c_str = rhs.c_str();
+
+ if (lhs_c_str == NULL) {
+ return rhs_c_str == NULL ? 0 : -1; // NULL < anything except NULL
+ } else if (rhs_c_str == NULL) {
+ return 1;
+ }
+
+ const size_t shorter_str_len =
+ length() <= rhs.length() ? length() : rhs.length();
+ for (size_t i = 0; i != shorter_str_len; i++) {
+ if (lhs_c_str[i] < rhs_c_str[i]) {
+ return -1;
+ } else if (lhs_c_str[i] > rhs_c_str[i]) {
+ return 1;
+ }
+ }
+ return (length() < rhs.length()) ? -1 :
+ (length() > rhs.length()) ? 1 : 0;
+}
+
+// Returns true iff this String ends with the given suffix. *Any*
+// String is considered to end with a NULL or empty suffix.
+bool String::EndsWith(const char* suffix) const {
+ if (suffix == NULL || CStringEquals(suffix, "")) return true;
+
+ if (c_str() == NULL) return false;
+
+ const size_t this_len = strlen(c_str());
+ const size_t suffix_len = strlen(suffix);
+ return (this_len >= suffix_len) &&
+ CStringEquals(c_str() + this_len - suffix_len, suffix);
+}
+
+// Returns true iff this String ends with the given suffix, ignoring case.
+// Any String is considered to end with a NULL or empty suffix.
+bool String::EndsWithCaseInsensitive(const char* suffix) const {
+ if (suffix == NULL || CStringEquals(suffix, "")) return true;
+
+ if (c_str() == NULL) return false;
+
+ const size_t this_len = strlen(c_str());
+ const size_t suffix_len = strlen(suffix);
+ return (this_len >= suffix_len) &&
+ CaseInsensitiveCStringEquals(c_str() + this_len - suffix_len, suffix);
+}
+
+// Formats a list of arguments to a String, using the same format
+// spec string as for printf.
+//
+// We do not use the StringPrintf class as it is not universally
+// available.
+//
+// The result is limited to 4096 characters (including the tailing 0).
+// If 4096 characters are not enough to format the input, or if
+// there's an error, "<formatting error or buffer exceeded>" is
+// returned.
+String String::Format(const char * format, ...) {
+ va_list args;
+ va_start(args, format);
+
+ char buffer[4096];
+ const int kBufferSize = sizeof(buffer)/sizeof(buffer[0]);
+
+ // MSVC 8 deprecates vsnprintf(), so we want to suppress warning
+ // 4996 (deprecated function) there.
+#ifdef _MSC_VER // We are using MSVC.
+#pragma warning(push) // Saves the current warning state.
+#pragma warning(disable:4996) // Temporarily disables warning 4996.
+ const int size = vsnprintf(buffer, kBufferSize, format, args);
+#pragma warning(pop) // Restores the warning state.
+#else // We are not using MSVC.
+ const int size = vsnprintf(buffer, kBufferSize, format, args);
+#endif // _MSC_VER
+ va_end(args);
+
+ // vsnprintf()'s behavior is not portable. When the buffer is not
+ // big enough, it returns a negative value in MSVC, and returns the
+ // needed buffer size on Linux. When there is an output error, it
+ // always returns a negative value. For simplicity, we lump the two
+ // error cases together.
+ if (size < 0 || size >= kBufferSize) {
+ return String("<formatting error or buffer exceeded>");
+ } else {
+ return String(buffer, size);
+ }
+}
+
+// Converts the buffer in a StrStream to a String, converting NUL
+// bytes to "\\0" along the way.
+String StrStreamToString(StrStream* ss) {
+ const ::std::string& str = ss->str();
+ const char* const start = str.c_str();
+ const char* const end = start + str.length();
+
+ // We need to use a helper StrStream to do this transformation
+ // because String doesn't support push_back().
+ StrStream helper;
+ for (const char* ch = start; ch != end; ++ch) {
+ if (*ch == '\0') {
+ helper << "\\0"; // Replaces NUL with "\\0";
+ } else {
+ helper.put(*ch);
+ }
+ }
+
+ return String(helper.str().c_str());
+}
+
+// Appends the user-supplied message to the Google-Test-generated message.
+String AppendUserMessage(const String& gtest_msg,
+ const Message& user_msg) {
+ // Appends the user message if it's non-empty.
+ const String user_msg_string = user_msg.GetString();
+ if (user_msg_string.empty()) {
+ return gtest_msg;
+ }
+
+ Message msg;
+ msg << gtest_msg << "\n" << user_msg_string;
+
+ return msg.GetString();
+}
+
+} // namespace internal
+
+// class TestResult
+
+// Creates an empty TestResult.
+TestResult::TestResult()
+ : death_test_count_(0),
+ elapsed_time_(0) {
+}
+
+// D'tor.
+TestResult::~TestResult() {
+}
+
+// Returns the i-th test part result among all the results. i can
+// range from 0 to total_part_count() - 1. If i is not in that range,
+// aborts the program.
+const TestPartResult& TestResult::GetTestPartResult(int i) const {
+ if (i < 0 || i >= total_part_count())
+ internal::posix::Abort();
+ return test_part_results_.at(i);
+}
+
+// Returns the i-th test property. i can range from 0 to
+// test_property_count() - 1. If i is not in that range, aborts the
+// program.
+const TestProperty& TestResult::GetTestProperty(int i) const {
+ if (i < 0 || i >= test_property_count())
+ internal::posix::Abort();
+ return test_properties_.at(i);
+}
+
+// Clears the test part results.
+void TestResult::ClearTestPartResults() {
+ test_part_results_.clear();
+}
+
+// Adds a test part result to the list.
+void TestResult::AddTestPartResult(const TestPartResult& test_part_result) {
+ test_part_results_.push_back(test_part_result);
+}
+
+// Adds a test property to the list. If a property with the same key as the
+// supplied property is already represented, the value of this test_property
+// replaces the old value for that key.
+void TestResult::RecordProperty(const TestProperty& test_property) {
+ if (!ValidateTestProperty(test_property)) {
+ return;
+ }
+ internal::MutexLock lock(&test_properites_mutex_);
+ const std::vector<TestProperty>::iterator property_with_matching_key =
+ std::find_if(test_properties_.begin(), test_properties_.end(),
+ internal::TestPropertyKeyIs(test_property.key()));
+ if (property_with_matching_key == test_properties_.end()) {
+ test_properties_.push_back(test_property);
+ return;
+ }
+ property_with_matching_key->SetValue(test_property.value());
+}
+
+// Adds a failure if the key is a reserved attribute of Google Test
+// testcase tags. Returns true if the property is valid.
+bool TestResult::ValidateTestProperty(const TestProperty& test_property) {
+ internal::String key(test_property.key());
+ if (key == "name" || key == "status" || key == "time" || key == "classname") {
+ ADD_FAILURE()
+ << "Reserved key used in RecordProperty(): "
+ << key
+ << " ('name', 'status', 'time', and 'classname' are reserved by "
+ << GTEST_NAME_ << ")";
+ return false;
+ }
+ return true;
+}
+
+// Clears the object.
+void TestResult::Clear() {
+ test_part_results_.clear();
+ test_properties_.clear();
+ death_test_count_ = 0;
+ elapsed_time_ = 0;
+}
+
+// Returns true iff the test failed.
+bool TestResult::Failed() const {
+ for (int i = 0; i < total_part_count(); ++i) {
+ if (GetTestPartResult(i).failed())
+ return true;
+ }
+ return false;
+}
+
+// Returns true iff the test part fatally failed.
+static bool TestPartFatallyFailed(const TestPartResult& result) {
+ return result.fatally_failed();
+}
+
+// Returns true iff the test fatally failed.
+bool TestResult::HasFatalFailure() const {
+ return CountIf(test_part_results_, TestPartFatallyFailed) > 0;
+}
+
+// Returns true iff the test part non-fatally failed.
+static bool TestPartNonfatallyFailed(const TestPartResult& result) {
+ return result.nonfatally_failed();
+}
+
+// Returns true iff the test has a non-fatal failure.
+bool TestResult::HasNonfatalFailure() const {
+ return CountIf(test_part_results_, TestPartNonfatallyFailed) > 0;
+}
+
+// Gets the number of all test parts. This is the sum of the number
+// of successful test parts and the number of failed test parts.
+int TestResult::total_part_count() const {
+ return static_cast<int>(test_part_results_.size());
+}
+
+// Returns the number of the test properties.
+int TestResult::test_property_count() const {
+ return static_cast<int>(test_properties_.size());
+}
+
+// class Test
+
+// Creates a Test object.
+
+// The c'tor saves the values of all Google Test flags.
+Test::Test()
+ : gtest_flag_saver_(new internal::GTestFlagSaver) {
+}
+
+// The d'tor restores the values of all Google Test flags.
+Test::~Test() {
+ delete gtest_flag_saver_;
+}
+
+// Sets up the test fixture.
+//
+// A sub-class may override this.
+void Test::SetUp() {
+}
+
+// Tears down the test fixture.
+//
+// A sub-class may override this.
+void Test::TearDown() {
+}
+
+// Allows user supplied key value pairs to be recorded for later output.
+void Test::RecordProperty(const char* key, const char* value) {
+ UnitTest::GetInstance()->RecordPropertyForCurrentTest(key, value);
+}
+
+// Allows user supplied key value pairs to be recorded for later output.
+void Test::RecordProperty(const char* key, int value) {
+ Message value_message;
+ value_message << value;
+ RecordProperty(key, value_message.GetString().c_str());
+}
+
+namespace internal {
+
+void ReportFailureInUnknownLocation(TestPartResult::Type result_type,
+ const String& message) {
+ // This function is a friend of UnitTest and as such has access to
+ // AddTestPartResult.
+ UnitTest::GetInstance()->AddTestPartResult(
+ result_type,
+ NULL, // No info about the source file where the exception occurred.
+ -1, // We have no info on which line caused the exception.
+ message,
+ String()); // No stack trace, either.
+}
+
+} // namespace internal
+
+#if GTEST_OS_WINDOWS
+// We are on Windows.
+
+// Adds an "exception thrown" fatal failure to the current test.
+static void AddExceptionThrownFailure(DWORD exception_code,
+ const char* location) {
+ Message message;
+ message << "Exception thrown with code 0x" << std::setbase(16) <<
+ exception_code << std::setbase(10) << " in " << location << ".";
+
+ internal::ReportFailureInUnknownLocation(TestPartResult::kFatalFailure,
+ message.GetString());
+}
+
+#endif // GTEST_OS_WINDOWS
+
+// Google Test requires all tests in the same test case to use the same test
+// fixture class. This function checks if the current test has the
+// same fixture class as the first test in the current test case. If
+// yes, it returns true; otherwise it generates a Google Test failure and
+// returns false.
+bool Test::HasSameFixtureClass() {
+ internal::UnitTestImpl* const impl = internal::GetUnitTestImpl();
+ const TestCase* const test_case = impl->current_test_case();
+
+ // Info about the first test in the current test case.
+ const internal::TestInfoImpl* const first_test_info =
+ test_case->test_info_list()[0]->impl();
+ const internal::TypeId first_fixture_id = first_test_info->fixture_class_id();
+ const char* const first_test_name = first_test_info->name();
+
+ // Info about the current test.
+ const internal::TestInfoImpl* const this_test_info =
+ impl->current_test_info()->impl();
+ const internal::TypeId this_fixture_id = this_test_info->fixture_class_id();
+ const char* const this_test_name = this_test_info->name();
+
+ if (this_fixture_id != first_fixture_id) {
+ // Is the first test defined using TEST?
+ const bool first_is_TEST = first_fixture_id == internal::GetTestTypeId();
+ // Is this test defined using TEST?
+ const bool this_is_TEST = this_fixture_id == internal::GetTestTypeId();
+
+ if (first_is_TEST || this_is_TEST) {
+ // The user mixed TEST and TEST_F in this test case - we'll tell
+ // him/her how to fix it.
+
+ // Gets the name of the TEST and the name of the TEST_F. Note
+ // that first_is_TEST and this_is_TEST cannot both be true, as
+ // the fixture IDs are different for the two tests.
+ const char* const TEST_name =
+ first_is_TEST ? first_test_name : this_test_name;
+ const char* const TEST_F_name =
+ first_is_TEST ? this_test_name : first_test_name;
+
+ ADD_FAILURE()
+ << "All tests in the same test case must use the same test fixture\n"
+ << "class, so mixing TEST_F and TEST in the same test case is\n"
+ << "illegal. In test case " << this_test_info->test_case_name()
+ << ",\n"
+ << "test " << TEST_F_name << " is defined using TEST_F but\n"
+ << "test " << TEST_name << " is defined using TEST. You probably\n"
+ << "want to change the TEST to TEST_F or move it to another test\n"
+ << "case.";
+ } else {
+ // The user defined two fixture classes with the same name in
+ // two namespaces - we'll tell him/her how to fix it.
+ ADD_FAILURE()
+ << "All tests in the same test case must use the same test fixture\n"
+ << "class. However, in test case "
+ << this_test_info->test_case_name() << ",\n"
+ << "you defined test " << first_test_name
+ << " and test " << this_test_name << "\n"
+ << "using two different test fixture classes. This can happen if\n"
+ << "the two classes are from different namespaces or translation\n"
+ << "units and have the same name. You should probably rename one\n"
+ << "of the classes to put the tests into different test cases.";
+ }
+ return false;
+ }
+
+ return true;
+}
+
+// Runs the test and updates the test result.
+void Test::Run() {
+ if (!HasSameFixtureClass()) return;
+
+ internal::UnitTestImpl* const impl = internal::GetUnitTestImpl();
+#if GTEST_HAS_SEH
+ // Catch SEH-style exceptions.
+ impl->os_stack_trace_getter()->UponLeavingGTest();
+ __try {
+ SetUp();
+ } __except(internal::UnitTestOptions::GTestShouldProcessSEH(
+ GetExceptionCode())) {
+ AddExceptionThrownFailure(GetExceptionCode(), "SetUp()");
+ }
+
+ // We will run the test only if SetUp() had no fatal failure.
+ if (!HasFatalFailure()) {
+ impl->os_stack_trace_getter()->UponLeavingGTest();
+ __try {
+ TestBody();
+ } __except(internal::UnitTestOptions::GTestShouldProcessSEH(
+ GetExceptionCode())) {
+ AddExceptionThrownFailure(GetExceptionCode(), "the test body");
+ }
+ }
+
+ // However, we want to clean up as much as possible. Hence we will
+ // always call TearDown(), even if SetUp() or the test body has
+ // failed.
+ impl->os_stack_trace_getter()->UponLeavingGTest();
+ __try {
+ TearDown();
+ } __except(internal::UnitTestOptions::GTestShouldProcessSEH(
+ GetExceptionCode())) {
+ AddExceptionThrownFailure(GetExceptionCode(), "TearDown()");
+ }
+
+#else // We are on a compiler or platform that doesn't support SEH.
+ impl->os_stack_trace_getter()->UponLeavingGTest();
+ SetUp();
+
+ // We will run the test only if SetUp() was successful.
+ if (!HasFatalFailure()) {
+ impl->os_stack_trace_getter()->UponLeavingGTest();
+ TestBody();
+ }
+
+ // However, we want to clean up as much as possible. Hence we will
+ // always call TearDown(), even if SetUp() or the test body has
+ // failed.
+ impl->os_stack_trace_getter()->UponLeavingGTest();
+ TearDown();
+#endif // GTEST_HAS_SEH
+}
+
+
+// Returns true iff the current test has a fatal failure.
+bool Test::HasFatalFailure() {
+ return internal::GetUnitTestImpl()->current_test_result()->HasFatalFailure();
+}
+
+// Returns true iff the current test has a non-fatal failure.
+bool Test::HasNonfatalFailure() {
+ return internal::GetUnitTestImpl()->current_test_result()->
+ HasNonfatalFailure();
+}
+
+// class TestInfo
+
+// Constructs a TestInfo object. It assumes ownership of the test factory
+// object via impl_.
+TestInfo::TestInfo(const char* a_test_case_name,
+ const char* a_name,
+ const char* a_test_case_comment,
+ const char* a_comment,
+ internal::TypeId fixture_class_id,
+ internal::TestFactoryBase* factory) {
+ impl_ = new internal::TestInfoImpl(this, a_test_case_name, a_name,
+ a_test_case_comment, a_comment,
+ fixture_class_id, factory);
+}
+
+// Destructs a TestInfo object.
+TestInfo::~TestInfo() {
+ delete impl_;
+}
+
+namespace internal {
+
+// Creates a new TestInfo object and registers it with Google Test;
+// returns the created object.
+//
+// Arguments:
+//
+// test_case_name: name of the test case
+// name: name of the test
+// test_case_comment: a comment on the test case that will be included in
+// the test output
+// comment: a comment on the test that will be included in the
+// test output
+// fixture_class_id: ID of the test fixture class
+// set_up_tc: pointer to the function that sets up the test case
+// tear_down_tc: pointer to the function that tears down the test case
+// factory: pointer to the factory that creates a test object.
+// The newly created TestInfo instance will assume
+// ownership of the factory object.
+TestInfo* MakeAndRegisterTestInfo(
+ const char* test_case_name, const char* name,
+ const char* test_case_comment, const char* comment,
+ TypeId fixture_class_id,
+ SetUpTestCaseFunc set_up_tc,
+ TearDownTestCaseFunc tear_down_tc,
+ TestFactoryBase* factory) {
+ TestInfo* const test_info =
+ new TestInfo(test_case_name, name, test_case_comment, comment,
+ fixture_class_id, factory);
+ GetUnitTestImpl()->AddTestInfo(set_up_tc, tear_down_tc, test_info);
+ return test_info;
+}
+
+#if GTEST_HAS_PARAM_TEST
+void ReportInvalidTestCaseType(const char* test_case_name,
+ const char* file, int line) {
+ Message errors;
+ errors
+ << "Attempted redefinition of test case " << test_case_name << ".\n"
+ << "All tests in the same test case must use the same test fixture\n"
+ << "class. However, in test case " << test_case_name << ", you tried\n"
+ << "to define a test using a fixture class different from the one\n"
+ << "used earlier. This can happen if the two fixture classes are\n"
+ << "from different namespaces and have the same name. You should\n"
+ << "probably rename one of the classes to put the tests into different\n"
+ << "test cases.";
+
+ fprintf(stderr, "%s %s", FormatFileLocation(file, line).c_str(),
+ errors.GetString().c_str());
+}
+#endif // GTEST_HAS_PARAM_TEST
+
+} // namespace internal
+
+// Returns the test case name.
+const char* TestInfo::test_case_name() const {
+ return impl_->test_case_name();
+}
+
+// Returns the test name.
+const char* TestInfo::name() const {
+ return impl_->name();
+}
+
+// Returns the test case comment.
+const char* TestInfo::test_case_comment() const {
+ return impl_->test_case_comment();
+}
+
+// Returns the test comment.
+const char* TestInfo::comment() const {
+ return impl_->comment();
+}
+
+// Returns true if this test should run.
+bool TestInfo::should_run() const { return impl_->should_run(); }
+
+// Returns true if this test matches the user-specified filter.
+bool TestInfo::matches_filter() const { return impl_->matches_filter(); }
+
+// Returns the result of the test.
+const TestResult* TestInfo::result() const { return impl_->result(); }
+
+// Increments the number of death tests encountered in this test so
+// far.
+int TestInfo::increment_death_test_count() {
+ return impl_->result()->increment_death_test_count();
+}
+
+namespace {
+
+// A predicate that checks the test name of a TestInfo against a known
+// value.
+//
+// This is used for implementation of the TestCase class only. We put
+// it in the anonymous namespace to prevent polluting the outer
+// namespace.
+//
+// TestNameIs is copyable.
+class TestNameIs {
+ public:
+ // Constructor.
+ //
+ // TestNameIs has NO default constructor.
+ explicit TestNameIs(const char* name)
+ : name_(name) {}
+
+ // Returns true iff the test name of test_info matches name_.
+ bool operator()(const TestInfo * test_info) const {
+ return test_info && internal::String(test_info->name()).Compare(name_) == 0;
+ }
+
+ private:
+ internal::String name_;
+};
+
+} // namespace
+
+namespace internal {
+
+// This method expands all parameterized tests registered with macros TEST_P
+// and INSTANTIATE_TEST_CASE_P into regular tests and registers those.
+// This will be done just once during the program runtime.
+void UnitTestImpl::RegisterParameterizedTests() {
+#if GTEST_HAS_PARAM_TEST
+ if (!parameterized_tests_registered_) {
+ parameterized_test_registry_.RegisterTests();
+ parameterized_tests_registered_ = true;
+ }
+#endif
+}
+
+// Creates the test object, runs it, records its result, and then
+// deletes it.
+void TestInfoImpl::Run() {
+ if (!should_run_) return;
+
+ // Tells UnitTest where to store test result.
+ UnitTestImpl* const impl = internal::GetUnitTestImpl();
+ impl->set_current_test_info(parent_);
+
+ TestEventListener* repeater = UnitTest::GetInstance()->listeners().repeater();
+
+ // Notifies the unit test event listeners that a test is about to start.
+ repeater->OnTestStart(*parent_);
+
+ const TimeInMillis start = GetTimeInMillis();
+
+ impl->os_stack_trace_getter()->UponLeavingGTest();
+#if GTEST_HAS_SEH
+ // Catch SEH-style exceptions.
+ Test* test = NULL;
+
+ __try {
+ // Creates the test object.
+ test = factory_->CreateTest();
+ } __except(internal::UnitTestOptions::GTestShouldProcessSEH(
+ GetExceptionCode())) {
+ AddExceptionThrownFailure(GetExceptionCode(),
+ "the test fixture's constructor");
+ return;
+ }
+#else // We are on a compiler or platform that doesn't support SEH.
+
+ // TODO(wan): If test->Run() throws, test won't be deleted. This is
+ // not a problem now as we don't use exceptions. If we were to
+ // enable exceptions, we should revise the following to be
+ // exception-safe.
+
+ // Creates the test object.
+ Test* test = factory_->CreateTest();
+#endif // GTEST_HAS_SEH
+
+ // Runs the test only if the constructor of the test fixture didn't
+ // generate a fatal failure.
+ if (!Test::HasFatalFailure()) {
+ test->Run();
+ }
+
+ // Deletes the test object.
+ impl->os_stack_trace_getter()->UponLeavingGTest();
+ delete test;
+ test = NULL;
+
+ result_.set_elapsed_time(GetTimeInMillis() - start);
+
+ // Notifies the unit test event listener that a test has just finished.
+ repeater->OnTestEnd(*parent_);
+
+ // Tells UnitTest to stop associating assertion results to this
+ // test.
+ impl->set_current_test_info(NULL);
+}
+
+} // namespace internal
+
+// class TestCase
+
+// Gets the number of successful tests in this test case.
+int TestCase::successful_test_count() const {
+ return CountIf(test_info_list_, TestPassed);
+}
+
+// Gets the number of failed tests in this test case.
+int TestCase::failed_test_count() const {
+ return CountIf(test_info_list_, TestFailed);
+}
+
+int TestCase::disabled_test_count() const {
+ return CountIf(test_info_list_, TestDisabled);
+}
+
+// Get the number of tests in this test case that should run.
+int TestCase::test_to_run_count() const {
+ return CountIf(test_info_list_, ShouldRunTest);
+}
+
+// Gets the number of all tests.
+int TestCase::total_test_count() const {
+ return static_cast<int>(test_info_list_.size());
+}
+
+// Creates a TestCase with the given name.
+//
+// Arguments:
+//
+// name: name of the test case
+// set_up_tc: pointer to the function that sets up the test case
+// tear_down_tc: pointer to the function that tears down the test case
+TestCase::TestCase(const char* a_name, const char* a_comment,
+ Test::SetUpTestCaseFunc set_up_tc,
+ Test::TearDownTestCaseFunc tear_down_tc)
+ : name_(a_name),
+ comment_(a_comment),
+ set_up_tc_(set_up_tc),
+ tear_down_tc_(tear_down_tc),
+ should_run_(false),
+ elapsed_time_(0) {
+}
+
+// Destructor of TestCase.
+TestCase::~TestCase() {
+ // Deletes every Test in the collection.
+ ForEach(test_info_list_, internal::Delete<TestInfo>);
+}
+
+// Returns the i-th test among all the tests. i can range from 0 to
+// total_test_count() - 1. If i is not in that range, returns NULL.
+const TestInfo* TestCase::GetTestInfo(int i) const {
+ const int index = GetElementOr(test_indices_, i, -1);
+ return index < 0 ? NULL : test_info_list_[index];
+}
+
+// Returns the i-th test among all the tests. i can range from 0 to
+// total_test_count() - 1. If i is not in that range, returns NULL.
+TestInfo* TestCase::GetMutableTestInfo(int i) {
+ const int index = GetElementOr(test_indices_, i, -1);
+ return index < 0 ? NULL : test_info_list_[index];
+}
+
+// Adds a test to this test case. Will delete the test upon
+// destruction of the TestCase object.
+void TestCase::AddTestInfo(TestInfo * test_info) {
+ test_info_list_.push_back(test_info);
+ test_indices_.push_back(static_cast<int>(test_indices_.size()));
+}
+
+// Runs every test in this TestCase.
+void TestCase::Run() {
+ if (!should_run_) return;
+
+ internal::UnitTestImpl* const impl = internal::GetUnitTestImpl();
+ impl->set_current_test_case(this);
+
+ TestEventListener* repeater = UnitTest::GetInstance()->listeners().repeater();
+
+ repeater->OnTestCaseStart(*this);
+ impl->os_stack_trace_getter()->UponLeavingGTest();
+ set_up_tc_();
+
+ const internal::TimeInMillis start = internal::GetTimeInMillis();
+ for (int i = 0; i < total_test_count(); i++) {
+ GetMutableTestInfo(i)->impl()->Run();
+ }
+ elapsed_time_ = internal::GetTimeInMillis() - start;
+
+ impl->os_stack_trace_getter()->UponLeavingGTest();
+ tear_down_tc_();
+ repeater->OnTestCaseEnd(*this);
+ impl->set_current_test_case(NULL);
+}
+
+// Clears the results of all tests in this test case.
+void TestCase::ClearResult() {
+ ForEach(test_info_list_, internal::TestInfoImpl::ClearTestResult);
+}
+
+// Returns true iff test passed.
+bool TestCase::TestPassed(const TestInfo * test_info) {
+ const internal::TestInfoImpl* const impl = test_info->impl();
+ return impl->should_run() && impl->result()->Passed();
+}
+
+// Returns true iff test failed.
+bool TestCase::TestFailed(const TestInfo * test_info) {
+ const internal::TestInfoImpl* const impl = test_info->impl();
+ return impl->should_run() && impl->result()->Failed();
+}
+
+// Returns true iff test is disabled.
+bool TestCase::TestDisabled(const TestInfo * test_info) {
+ return test_info->impl()->is_disabled();
+}
+
+// Returns true if the given test should run.
+bool TestCase::ShouldRunTest(const TestInfo *test_info) {
+ return test_info->impl()->should_run();
+}
+
+// Shuffles the tests in this test case.
+void TestCase::ShuffleTests(internal::Random* random) {
+ Shuffle(random, &test_indices_);
+}
+
+// Restores the test order to before the first shuffle.
+void TestCase::UnshuffleTests() {
+ for (size_t i = 0; i < test_indices_.size(); i++) {
+ test_indices_[i] = static_cast<int>(i);
+ }
+}
+
+// Formats a countable noun. Depending on its quantity, either the
+// singular form or the plural form is used. e.g.
+//
+// FormatCountableNoun(1, "formula", "formuli") returns "1 formula".
+// FormatCountableNoun(5, "book", "books") returns "5 books".
+static internal::String FormatCountableNoun(int count,
+ const char * singular_form,
+ const char * plural_form) {
+ return internal::String::Format("%d %s", count,
+ count == 1 ? singular_form : plural_form);
+}
+
+// Formats the count of tests.
+static internal::String FormatTestCount(int test_count) {
+ return FormatCountableNoun(test_count, "test", "tests");
+}
+
+// Formats the count of test cases.
+static internal::String FormatTestCaseCount(int test_case_count) {
+ return FormatCountableNoun(test_case_count, "test case", "test cases");
+}
+
+// Converts a TestPartResult::Type enum to human-friendly string
+// representation. Both kNonFatalFailure and kFatalFailure are translated
+// to "Failure", as the user usually doesn't care about the difference
+// between the two when viewing the test result.
+static const char * TestPartResultTypeToString(TestPartResult::Type type) {
+ switch (type) {
+ case TestPartResult::kSuccess:
+ return "Success";
+
+ case TestPartResult::kNonFatalFailure:
+ case TestPartResult::kFatalFailure:
+#ifdef _MSC_VER
+ return "error: ";
+#else
+ return "Failure\n";
+#endif
+ }
+
+ return "Unknown result type";
+}
+
+// Prints a TestPartResult to a String.
+static internal::String PrintTestPartResultToString(
+ const TestPartResult& test_part_result) {
+ return (Message()
+ << internal::FormatFileLocation(test_part_result.file_name(),
+ test_part_result.line_number())
+ << " " << TestPartResultTypeToString(test_part_result.type())
+ << test_part_result.message()).GetString();
+}
+
+// Prints a TestPartResult.
+static void PrintTestPartResult(const TestPartResult& test_part_result) {
+ const internal::String& result =
+ PrintTestPartResultToString(test_part_result);
+ printf("%s\n", result.c_str());
+ fflush(stdout);
+ // If the test program runs in Visual Studio or a debugger, the
+ // following statements add the test part result message to the Output
+ // window such that the user can double-click on it to jump to the
+ // corresponding source code location; otherwise they do nothing.
+#if GTEST_OS_WINDOWS && !GTEST_OS_WINDOWS_MOBILE
+ // We don't call OutputDebugString*() on Windows Mobile, as printing
+ // to stdout is done by OutputDebugString() there already - we don't
+ // want the same message printed twice.
+ ::OutputDebugStringA(result.c_str());
+ ::OutputDebugStringA("\n");
+#endif
+}
+
+// class PrettyUnitTestResultPrinter
+
+namespace internal {
+
+enum GTestColor {
+ COLOR_DEFAULT,
+ COLOR_RED,
+ COLOR_GREEN,
+ COLOR_YELLOW
+};
+
+#if GTEST_OS_WINDOWS && !GTEST_OS_WINDOWS_MOBILE
+
+// Returns the character attribute for the given color.
+WORD GetColorAttribute(GTestColor color) {
+ switch (color) {
+ case COLOR_RED: return FOREGROUND_RED;
+ case COLOR_GREEN: return FOREGROUND_GREEN;
+ case COLOR_YELLOW: return FOREGROUND_RED | FOREGROUND_GREEN;
+ default: return 0;
+ }
+}
+
+#else
+
+// Returns the ANSI color code for the given color. COLOR_DEFAULT is
+// an invalid input.
+const char* GetAnsiColorCode(GTestColor color) {
+ switch (color) {
+ case COLOR_RED: return "1";
+ case COLOR_GREEN: return "2";
+ case COLOR_YELLOW: return "3";
+ default: return NULL;
+ };
+}
+
+#endif // GTEST_OS_WINDOWS && !GTEST_OS_WINDOWS_MOBILE
+
+// Returns true iff Google Test should use colors in the output.
+bool ShouldUseColor(bool stdout_is_tty) {
+ const char* const gtest_color = GTEST_FLAG(color).c_str();
+
+ if (String::CaseInsensitiveCStringEquals(gtest_color, "auto")) {
+#if GTEST_OS_WINDOWS
+ // On Windows the TERM variable is usually not set, but the
+ // console there does support colors.
+ return stdout_is_tty;
+#else
+ // On non-Windows platforms, we rely on the TERM variable.
+ const char* const term = posix::GetEnv("TERM");
+ const bool term_supports_color =
+ String::CStringEquals(term, "xterm") ||
+ String::CStringEquals(term, "xterm-color") ||
+ String::CStringEquals(term, "xterm-256color") ||
+ String::CStringEquals(term, "linux") ||
+ String::CStringEquals(term, "cygwin");
+ return stdout_is_tty && term_supports_color;
+#endif // GTEST_OS_WINDOWS
+ }
+
+ return String::CaseInsensitiveCStringEquals(gtest_color, "yes") ||
+ String::CaseInsensitiveCStringEquals(gtest_color, "true") ||
+ String::CaseInsensitiveCStringEquals(gtest_color, "t") ||
+ String::CStringEquals(gtest_color, "1");
+ // We take "yes", "true", "t", and "1" as meaning "yes". If the
+ // value is neither one of these nor "auto", we treat it as "no" to
+ // be conservative.
+}
+
+// Helpers for printing colored strings to stdout. Note that on Windows, we
+// cannot simply emit special characters and have the terminal change colors.
+// This routine must actually emit the characters rather than return a string
+// that would be colored when printed, as can be done on Linux.
+void ColoredPrintf(GTestColor color, const char* fmt, ...) {
+ va_list args;
+ va_start(args, fmt);
+
+#if GTEST_OS_WINDOWS_MOBILE || GTEST_OS_SYMBIAN || GTEST_OS_ZOS
+ const bool use_color = false;
+#else
+ static const bool in_color_mode =
+ ShouldUseColor(posix::IsATTY(posix::FileNo(stdout)) != 0);
+ const bool use_color = in_color_mode && (color != COLOR_DEFAULT);
+#endif // GTEST_OS_WINDOWS_MOBILE || GTEST_OS_SYMBIAN || GTEST_OS_ZOS
+ // The '!= 0' comparison is necessary to satisfy MSVC 7.1.
+
+ if (!use_color) {
+ vprintf(fmt, args);
+ va_end(args);
+ return;
+ }
+
+#if GTEST_OS_WINDOWS && !GTEST_OS_WINDOWS_MOBILE
+ const HANDLE stdout_handle = GetStdHandle(STD_OUTPUT_HANDLE);
+
+ // Gets the current text color.
+ CONSOLE_SCREEN_BUFFER_INFO buffer_info;
+ GetConsoleScreenBufferInfo(stdout_handle, &buffer_info);
+ const WORD old_color_attrs = buffer_info.wAttributes;
+
+ // We need to flush the stream buffers into the console before each
+ // SetConsoleTextAttribute call lest it affect the text that is already
+ // printed but has not yet reached the console.
+ fflush(stdout);
+ SetConsoleTextAttribute(stdout_handle,
+ GetColorAttribute(color) | FOREGROUND_INTENSITY);
+ vprintf(fmt, args);
+
+ fflush(stdout);
+ // Restores the text color.
+ SetConsoleTextAttribute(stdout_handle, old_color_attrs);
+#else
+ printf("\033[0;3%sm", GetAnsiColorCode(color));
+ vprintf(fmt, args);
+ printf("\033[m"); // Resets the terminal to default.
+#endif // GTEST_OS_WINDOWS && !GTEST_OS_WINDOWS_MOBILE
+ va_end(args);
+}
+
+// This class implements the TestEventListener interface.
+//
+// Class PrettyUnitTestResultPrinter is copyable.
+class PrettyUnitTestResultPrinter : public TestEventListener {
+ public:
+ PrettyUnitTestResultPrinter() {}
+ static void PrintTestName(const char * test_case, const char * test) {
+ printf("%s.%s", test_case, test);
+ }
+
+ // The following methods override what's in the TestEventListener class.
+ virtual void OnTestProgramStart(const UnitTest& /*unit_test*/) {}
+ virtual void OnTestIterationStart(const UnitTest& unit_test, int iteration);
+ virtual void OnEnvironmentsSetUpStart(const UnitTest& unit_test);
+ virtual void OnEnvironmentsSetUpEnd(const UnitTest& /*unit_test*/) {}
+ virtual void OnTestCaseStart(const TestCase& test_case);
+ virtual void OnTestStart(const TestInfo& test_info);
+ virtual void OnTestPartResult(const TestPartResult& result);
+ virtual void OnTestEnd(const TestInfo& test_info);
+ virtual void OnTestCaseEnd(const TestCase& test_case);
+ virtual void OnEnvironmentsTearDownStart(const UnitTest& unit_test);
+ virtual void OnEnvironmentsTearDownEnd(const UnitTest& /*unit_test*/) {}
+ virtual void OnTestIterationEnd(const UnitTest& unit_test, int iteration);
+ virtual void OnTestProgramEnd(const UnitTest& /*unit_test*/) {}
+
+ private:
+ static void PrintFailedTests(const UnitTest& unit_test);
+
+ internal::String test_case_name_;
+};
+
+ // Fired before each iteration of tests starts.
+void PrettyUnitTestResultPrinter::OnTestIterationStart(
+ const UnitTest& unit_test, int iteration) {
+ if (GTEST_FLAG(repeat) != 1)
+ printf("\nRepeating all tests (iteration %d) . . .\n\n", iteration + 1);
+
+ const char* const filter = GTEST_FLAG(filter).c_str();
+
+ // Prints the filter if it's not *. This reminds the user that some
+ // tests may be skipped.
+ if (!internal::String::CStringEquals(filter, kUniversalFilter)) {
+ ColoredPrintf(COLOR_YELLOW,
+ "Note: %s filter = %s\n", GTEST_NAME_, filter);
+ }
+
+ if (internal::ShouldShard(kTestTotalShards, kTestShardIndex, false)) {
+ ColoredPrintf(COLOR_YELLOW,
+ "Note: This is test shard %s of %s.\n",
+ internal::posix::GetEnv(kTestShardIndex),
+ internal::posix::GetEnv(kTestTotalShards));
+ }
+
+ if (GTEST_FLAG(shuffle)) {
+ ColoredPrintf(COLOR_YELLOW,
+ "Note: Randomizing tests' orders with a seed of %d .\n",
+ unit_test.random_seed());
+ }
+
+ ColoredPrintf(COLOR_GREEN, "[==========] ");
+ printf("Running %s from %s.\n",
+ FormatTestCount(unit_test.test_to_run_count()).c_str(),
+ FormatTestCaseCount(unit_test.test_case_to_run_count()).c_str());
+ fflush(stdout);
+}
+
+void PrettyUnitTestResultPrinter::OnEnvironmentsSetUpStart(
+ const UnitTest& /*unit_test*/) {
+ ColoredPrintf(COLOR_GREEN, "[----------] ");
+ printf("Global test environment set-up.\n");
+ fflush(stdout);
+}
+
+void PrettyUnitTestResultPrinter::OnTestCaseStart(const TestCase& test_case) {
+ test_case_name_ = test_case.name();
+ const internal::String counts =
+ FormatCountableNoun(test_case.test_to_run_count(), "test", "tests");
+ ColoredPrintf(COLOR_GREEN, "[----------] ");
+ printf("%s from %s", counts.c_str(), test_case_name_.c_str());
+ if (test_case.comment()[0] == '\0') {
+ printf("\n");
+ } else {
+ printf(", where %s\n", test_case.comment());
+ }
+ fflush(stdout);
+}
+
+void PrettyUnitTestResultPrinter::OnTestStart(const TestInfo& test_info) {
+ ColoredPrintf(COLOR_GREEN, "[ RUN ] ");
+ PrintTestName(test_case_name_.c_str(), test_info.name());
+ if (test_info.comment()[0] == '\0') {
+ printf("\n");
+ } else {
+ printf(", where %s\n", test_info.comment());
+ }
+ fflush(stdout);
+}
+
+// Called after an assertion failure.
+void PrettyUnitTestResultPrinter::OnTestPartResult(
+ const TestPartResult& result) {
+ // If the test part succeeded, we don't need to do anything.
+ if (result.type() == TestPartResult::kSuccess)
+ return;
+
+ // Print failure message from the assertion (e.g. expected this and got that).
+ PrintTestPartResult(result);
+ fflush(stdout);
+}
+
+void PrettyUnitTestResultPrinter::OnTestEnd(const TestInfo& test_info) {
+ if (test_info.result()->Passed()) {
+ ColoredPrintf(COLOR_GREEN, "[ OK ] ");
+ } else {
+ ColoredPrintf(COLOR_RED, "[ FAILED ] ");
+ }
+ PrintTestName(test_case_name_.c_str(), test_info.name());
+ if (GTEST_FLAG(print_time)) {
+ printf(" (%s ms)\n", internal::StreamableToString(
+ test_info.result()->elapsed_time()).c_str());
+ } else {
+ printf("\n");
+ }
+ fflush(stdout);
+}
+
+void PrettyUnitTestResultPrinter::OnTestCaseEnd(const TestCase& test_case) {
+ if (!GTEST_FLAG(print_time)) return;
+
+ test_case_name_ = test_case.name();
+ const internal::String counts =
+ FormatCountableNoun(test_case.test_to_run_count(), "test", "tests");
+ ColoredPrintf(COLOR_GREEN, "[----------] ");
+ printf("%s from %s (%s ms total)\n\n",
+ counts.c_str(), test_case_name_.c_str(),
+ internal::StreamableToString(test_case.elapsed_time()).c_str());
+ fflush(stdout);
+}
+
+void PrettyUnitTestResultPrinter::OnEnvironmentsTearDownStart(
+ const UnitTest& /*unit_test*/) {
+ ColoredPrintf(COLOR_GREEN, "[----------] ");
+ printf("Global test environment tear-down\n");
+ fflush(stdout);
+}
+
+// Internal helper for printing the list of failed tests.
+void PrettyUnitTestResultPrinter::PrintFailedTests(const UnitTest& unit_test) {
+ const int failed_test_count = unit_test.failed_test_count();
+ if (failed_test_count == 0) {
+ return;
+ }
+
+ for (int i = 0; i < unit_test.total_test_case_count(); ++i) {
+ const TestCase& test_case = *unit_test.GetTestCase(i);
+ if (!test_case.should_run() || (test_case.failed_test_count() == 0)) {
+ continue;
+ }
+ for (int j = 0; j < test_case.total_test_count(); ++j) {
+ const TestInfo& test_info = *test_case.GetTestInfo(j);
+ if (!test_info.should_run() || test_info.result()->Passed()) {
+ continue;
+ }
+ ColoredPrintf(COLOR_RED, "[ FAILED ] ");
+ printf("%s.%s", test_case.name(), test_info.name());
+ if (test_case.comment()[0] != '\0' ||
+ test_info.comment()[0] != '\0') {
+ printf(", where %s", test_case.comment());
+ if (test_case.comment()[0] != '\0' &&
+ test_info.comment()[0] != '\0') {
+ printf(" and ");
+ }
+ }
+ printf("%s\n", test_info.comment());
+ }
+ }
+}
+
+ void PrettyUnitTestResultPrinter::OnTestIterationEnd(const UnitTest& unit_test,
+ int /*iteration*/) {
+ ColoredPrintf(COLOR_GREEN, "[==========] ");
+ printf("%s from %s ran.",
+ FormatTestCount(unit_test.test_to_run_count()).c_str(),
+ FormatTestCaseCount(unit_test.test_case_to_run_count()).c_str());
+ if (GTEST_FLAG(print_time)) {
+ printf(" (%s ms total)",
+ internal::StreamableToString(unit_test.elapsed_time()).c_str());
+ }
+ printf("\n");
+ ColoredPrintf(COLOR_GREEN, "[ PASSED ] ");
+ printf("%s.\n", FormatTestCount(unit_test.successful_test_count()).c_str());
+
+ int num_failures = unit_test.failed_test_count();
+ if (!unit_test.Passed()) {
+ const int failed_test_count = unit_test.failed_test_count();
+ ColoredPrintf(COLOR_RED, "[ FAILED ] ");
+ printf("%s, listed below:\n", FormatTestCount(failed_test_count).c_str());
+ PrintFailedTests(unit_test);
+ printf("\n%2d FAILED %s\n", num_failures,
+ num_failures == 1 ? "TEST" : "TESTS");
+ }
+
+ int num_disabled = unit_test.disabled_test_count();
+ if (num_disabled && !GTEST_FLAG(also_run_disabled_tests)) {
+ if (!num_failures) {
+ printf("\n"); // Add a spacer if no FAILURE banner is displayed.
+ }
+ ColoredPrintf(COLOR_YELLOW,
+ " YOU HAVE %d DISABLED %s\n\n",
+ num_disabled,
+ num_disabled == 1 ? "TEST" : "TESTS");
+ }
+ // Ensure that Google Test output is printed before, e.g., heapchecker output.
+ fflush(stdout);
+}
+
+// End PrettyUnitTestResultPrinter
+
+// class TestEventRepeater
+//
+// This class forwards events to other event listeners.
+class TestEventRepeater : public TestEventListener {
+ public:
+ TestEventRepeater() : forwarding_enabled_(true) {}
+ virtual ~TestEventRepeater();
+ void Append(TestEventListener *listener);
+ TestEventListener* Release(TestEventListener* listener);
+
+ // Controls whether events will be forwarded to listeners_. Set to false
+ // in death test child processes.
+ bool forwarding_enabled() const { return forwarding_enabled_; }
+ void set_forwarding_enabled(bool enable) { forwarding_enabled_ = enable; }
+
+ virtual void OnTestProgramStart(const UnitTest& unit_test);
+ virtual void OnTestIterationStart(const UnitTest& unit_test, int iteration);
+ virtual void OnEnvironmentsSetUpStart(const UnitTest& unit_test);
+ virtual void OnEnvironmentsSetUpEnd(const UnitTest& unit_test);
+ virtual void OnTestCaseStart(const TestCase& test_case);
+ virtual void OnTestStart(const TestInfo& test_info);
+ virtual void OnTestPartResult(const TestPartResult& result);
+ virtual void OnTestEnd(const TestInfo& test_info);
+ virtual void OnTestCaseEnd(const TestCase& test_case);
+ virtual void OnEnvironmentsTearDownStart(const UnitTest& unit_test);
+ virtual void OnEnvironmentsTearDownEnd(const UnitTest& unit_test);
+ virtual void OnTestIterationEnd(const UnitTest& unit_test, int iteration);
+ virtual void OnTestProgramEnd(const UnitTest& unit_test);
+
+ private:
+ // Controls whether events will be forwarded to listeners_. Set to false
+ // in death test child processes.
+ bool forwarding_enabled_;
+ // The list of listeners that receive events.
+ std::vector<TestEventListener*> listeners_;
+
+ GTEST_DISALLOW_COPY_AND_ASSIGN_(TestEventRepeater);
+};
+
+TestEventRepeater::~TestEventRepeater() {
+ ForEach(listeners_, Delete<TestEventListener>);
+}
+
+void TestEventRepeater::Append(TestEventListener *listener) {
+ listeners_.push_back(listener);
+}
+
+// TODO(vladl@google.com): Factor the search functionality into Vector::Find.
+TestEventListener* TestEventRepeater::Release(TestEventListener *listener) {
+ for (size_t i = 0; i < listeners_.size(); ++i) {
+ if (listeners_[i] == listener) {
+ listeners_.erase(listeners_.begin() + i);
+ return listener;
+ }
+ }
+
+ return NULL;
+}
+
+// Since most methods are very similar, use macros to reduce boilerplate.
+// This defines a member that forwards the call to all listeners.
+#define GTEST_REPEATER_METHOD_(Name, Type) \
+void TestEventRepeater::Name(const Type& parameter) { \
+ if (forwarding_enabled_) { \
+ for (size_t i = 0; i < listeners_.size(); i++) { \
+ listeners_[i]->Name(parameter); \
+ } \
+ } \
+}
+// This defines a member that forwards the call to all listeners in reverse
+// order.
+#define GTEST_REVERSE_REPEATER_METHOD_(Name, Type) \
+void TestEventRepeater::Name(const Type& parameter) { \
+ if (forwarding_enabled_) { \
+ for (int i = static_cast<int>(listeners_.size()) - 1; i >= 0; i--) { \
+ listeners_[i]->Name(parameter); \
+ } \
+ } \
+}
+
+GTEST_REPEATER_METHOD_(OnTestProgramStart, UnitTest)
+GTEST_REPEATER_METHOD_(OnEnvironmentsSetUpStart, UnitTest)
+GTEST_REPEATER_METHOD_(OnTestCaseStart, TestCase)
+GTEST_REPEATER_METHOD_(OnTestStart, TestInfo)
+GTEST_REPEATER_METHOD_(OnTestPartResult, TestPartResult)
+GTEST_REPEATER_METHOD_(OnEnvironmentsTearDownStart, UnitTest)
+GTEST_REVERSE_REPEATER_METHOD_(OnEnvironmentsSetUpEnd, UnitTest)
+GTEST_REVERSE_REPEATER_METHOD_(OnEnvironmentsTearDownEnd, UnitTest)
+GTEST_REVERSE_REPEATER_METHOD_(OnTestEnd, TestInfo)
+GTEST_REVERSE_REPEATER_METHOD_(OnTestCaseEnd, TestCase)
+GTEST_REVERSE_REPEATER_METHOD_(OnTestProgramEnd, UnitTest)
+
+#undef GTEST_REPEATER_METHOD_
+#undef GTEST_REVERSE_REPEATER_METHOD_
+
+void TestEventRepeater::OnTestIterationStart(const UnitTest& unit_test,
+ int iteration) {
+ if (forwarding_enabled_) {
+ for (size_t i = 0; i < listeners_.size(); i++) {
+ listeners_[i]->OnTestIterationStart(unit_test, iteration);
+ }
+ }
+}
+
+void TestEventRepeater::OnTestIterationEnd(const UnitTest& unit_test,
+ int iteration) {
+ if (forwarding_enabled_) {
+ for (int i = static_cast<int>(listeners_.size()) - 1; i >= 0; i--) {
+ listeners_[i]->OnTestIterationEnd(unit_test, iteration);
+ }
+ }
+}
+
+// End TestEventRepeater
+
+// This class generates an XML output file.
+class XmlUnitTestResultPrinter : public EmptyTestEventListener {
+ public:
+ explicit XmlUnitTestResultPrinter(const char* output_file);
+
+ virtual void OnTestIterationEnd(const UnitTest& unit_test, int iteration);
+
+ private:
+ // Is c a whitespace character that is normalized to a space character
+ // when it appears in an XML attribute value?
+ static bool IsNormalizableWhitespace(char c) {
+ return c == 0x9 || c == 0xA || c == 0xD;
+ }
+
+ // May c appear in a well-formed XML document?
+ static bool IsValidXmlCharacter(char c) {
+ return IsNormalizableWhitespace(c) || c >= 0x20;
+ }
+
+ // Returns an XML-escaped copy of the input string str. If
+ // is_attribute is true, the text is meant to appear as an attribute
+ // value, and normalizable whitespace is preserved by replacing it
+ // with character references.
+ static String EscapeXml(const char* str, bool is_attribute);
+
+ // Returns the given string with all characters invalid in XML removed.
+ static String RemoveInvalidXmlCharacters(const char* str);
+
+ // Convenience wrapper around EscapeXml when str is an attribute value.
+ static String EscapeXmlAttribute(const char* str) {
+ return EscapeXml(str, true);
+ }
+
+ // Convenience wrapper around EscapeXml when str is not an attribute value.
+ static String EscapeXmlText(const char* str) { return EscapeXml(str, false); }
+
+ // Streams an XML CDATA section, escaping invalid CDATA sequences as needed.
+ static void OutputXmlCDataSection(::std::ostream* stream, const char* data);
+
+ // Streams an XML representation of a TestInfo object.
+ static void OutputXmlTestInfo(::std::ostream* stream,
+ const char* test_case_name,
+ const TestInfo& test_info);
+
+ // Prints an XML representation of a TestCase object
+ static void PrintXmlTestCase(FILE* out, const TestCase& test_case);
+
+ // Prints an XML summary of unit_test to output stream out.
+ static void PrintXmlUnitTest(FILE* out, const UnitTest& unit_test);
+
+ // Produces a string representing the test properties in a result as space
+ // delimited XML attributes based on the property key="value" pairs.
+ // When the String is not empty, it includes a space at the beginning,
+ // to delimit this attribute from prior attributes.
+ static String TestPropertiesAsXmlAttributes(const TestResult& result);
+
+ // The output file.
+ const String output_file_;
+
+ GTEST_DISALLOW_COPY_AND_ASSIGN_(XmlUnitTestResultPrinter);
+};
+
+// Creates a new XmlUnitTestResultPrinter.
+XmlUnitTestResultPrinter::XmlUnitTestResultPrinter(const char* output_file)
+ : output_file_(output_file) {
+ if (output_file_.c_str() == NULL || output_file_.empty()) {
+ fprintf(stderr, "XML output file may not be null\n");
+ fflush(stderr);
+ exit(EXIT_FAILURE);
+ }
+}
+
+// Called after the unit test ends.
+void XmlUnitTestResultPrinter::OnTestIterationEnd(const UnitTest& unit_test,
+ int /*iteration*/) {
+ FILE* xmlout = NULL;
+ FilePath output_file(output_file_);
+ FilePath output_dir(output_file.RemoveFileName());
+
+ if (output_dir.CreateDirectoriesRecursively()) {
+ xmlout = posix::FOpen(output_file_.c_str(), "w");
+ }
+ if (xmlout == NULL) {
+ // TODO(wan): report the reason of the failure.
+ //
+ // We don't do it for now as:
+ //
+ // 1. There is no urgent need for it.
+ // 2. It's a bit involved to make the errno variable thread-safe on
+ // all three operating systems (Linux, Windows, and Mac OS).
+ // 3. To interpret the meaning of errno in a thread-safe way,
+ // we need the strerror_r() function, which is not available on
+ // Windows.
+ fprintf(stderr,
+ "Unable to open file \"%s\"\n",
+ output_file_.c_str());
+ fflush(stderr);
+ exit(EXIT_FAILURE);
+ }
+ PrintXmlUnitTest(xmlout, unit_test);
+ fclose(xmlout);
+}
+
+// Returns an XML-escaped copy of the input string str. If is_attribute
+// is true, the text is meant to appear as an attribute value, and
+// normalizable whitespace is preserved by replacing it with character
+// references.
+//
+// Invalid XML characters in str, if any, are stripped from the output.
+// It is expected that most, if not all, of the text processed by this
+// module will consist of ordinary English text.
+// If this module is ever modified to produce version 1.1 XML output,
+// most invalid characters can be retained using character references.
+// TODO(wan): It might be nice to have a minimally invasive, human-readable
+// escaping scheme for invalid characters, rather than dropping them.
+String XmlUnitTestResultPrinter::EscapeXml(const char* str, bool is_attribute) {
+ Message m;
+
+ if (str != NULL) {
+ for (const char* src = str; *src; ++src) {
+ switch (*src) {
+ case '<':
+ m << "&lt;";
+ break;
+ case '>':
+ m << "&gt;";
+ break;
+ case '&':
+ m << "&amp;";
+ break;
+ case '\'':
+ if (is_attribute)
+ m << "&apos;";
+ else
+ m << '\'';
+ break;
+ case '"':
+ if (is_attribute)
+ m << "&quot;";
+ else
+ m << '"';
+ break;
+ default:
+ if (IsValidXmlCharacter(*src)) {
+ if (is_attribute && IsNormalizableWhitespace(*src))
+ m << String::Format("&#x%02X;", unsigned(*src));
+ else
+ m << *src;
+ }
+ break;
+ }
+ }
+ }
+
+ return m.GetString();
+}
+
+// Returns the given string with all characters invalid in XML removed.
+// Currently invalid characters are dropped from the string. An
+// alternative is to replace them with certain characters such as . or ?.
+String XmlUnitTestResultPrinter::RemoveInvalidXmlCharacters(const char* str) {
+ char* const output = new char[strlen(str) + 1];
+ char* appender = output;
+ for (char ch = *str; ch != '\0'; ch = *++str)
+ if (IsValidXmlCharacter(ch))
+ *appender++ = ch;
+ *appender = '\0';
+
+ String ret_value(output);
+ delete[] output;
+ return ret_value;
+}
+
+// The following routines generate an XML representation of a UnitTest
+// object.
+//
+// This is how Google Test concepts map to the DTD:
+//
+// <testsuites name="AllTests"> <-- corresponds to a UnitTest object
+// <testsuite name="testcase-name"> <-- corresponds to a TestCase object
+// <testcase name="test-name"> <-- corresponds to a TestInfo object
+// <failure message="...">...</failure>
+// <failure message="...">...</failure>
+// <failure message="...">...</failure>
+// <-- individual assertion failures
+// </testcase>
+// </testsuite>
+// </testsuites>
+
+// Formats the given time in milliseconds as seconds.
+std::string FormatTimeInMillisAsSeconds(TimeInMillis ms) {
+ ::std::stringstream ss;
+ ss << ms/1000.0;
+ return ss.str();
+}
+
+// Streams an XML CDATA section, escaping invalid CDATA sequences as needed.
+void XmlUnitTestResultPrinter::OutputXmlCDataSection(::std::ostream* stream,
+ const char* data) {
+ const char* segment = data;
+ *stream << "<![CDATA[";
+ for (;;) {
+ const char* const next_segment = strstr(segment, "]]>");
+ if (next_segment != NULL) {
+ stream->write(
+ segment, static_cast<std::streamsize>(next_segment - segment));
+ *stream << "]]>]]&gt;<![CDATA[";
+ segment = next_segment + strlen("]]>");
+ } else {
+ *stream << segment;
+ break;
+ }
+ }
+ *stream << "]]>";
+}
+
+// Prints an XML representation of a TestInfo object.
+// TODO(wan): There is also value in printing properties with the plain printer.
+void XmlUnitTestResultPrinter::OutputXmlTestInfo(::std::ostream* stream,
+ const char* test_case_name,
+ const TestInfo& test_info) {
+ const TestResult& result = *test_info.result();
+ *stream << " <testcase name=\""
+ << EscapeXmlAttribute(test_info.name()).c_str()
+ << "\" status=\""
+ << (test_info.should_run() ? "run" : "notrun")
+ << "\" time=\""
+ << FormatTimeInMillisAsSeconds(result.elapsed_time())
+ << "\" classname=\"" << EscapeXmlAttribute(test_case_name).c_str()
+ << "\"" << TestPropertiesAsXmlAttributes(result).c_str();
+
+ int failures = 0;
+ for (int i = 0; i < result.total_part_count(); ++i) {
+ const TestPartResult& part = result.GetTestPartResult(i);
+ if (part.failed()) {
+ if (++failures == 1)
+ *stream << ">\n";
+ *stream << " <failure message=\""
+ << EscapeXmlAttribute(part.summary()).c_str()
+ << "\" type=\"\">";
+ const String message = RemoveInvalidXmlCharacters(String::Format(
+ "%s:%d\n%s",
+ part.file_name(), part.line_number(),
+ part.message()).c_str());
+ OutputXmlCDataSection(stream, message.c_str());
+ *stream << "</failure>\n";
+ }
+ }
+
+ if (failures == 0)
+ *stream << " />\n";
+ else
+ *stream << " </testcase>\n";
+}
+
+// Prints an XML representation of a TestCase object
+void XmlUnitTestResultPrinter::PrintXmlTestCase(FILE* out,
+ const TestCase& test_case) {
+ fprintf(out,
+ " <testsuite name=\"%s\" tests=\"%d\" failures=\"%d\" "
+ "disabled=\"%d\" ",
+ EscapeXmlAttribute(test_case.name()).c_str(),
+ test_case.total_test_count(),
+ test_case.failed_test_count(),
+ test_case.disabled_test_count());
+ fprintf(out,
+ "errors=\"0\" time=\"%s\">\n",
+ FormatTimeInMillisAsSeconds(test_case.elapsed_time()).c_str());
+ for (int i = 0; i < test_case.total_test_count(); ++i) {
+ StrStream stream;
+ OutputXmlTestInfo(&stream, test_case.name(), *test_case.GetTestInfo(i));
+ fprintf(out, "%s", StrStreamToString(&stream).c_str());
+ }
+ fprintf(out, " </testsuite>\n");
+}
+
+// Prints an XML summary of unit_test to output stream out.
+void XmlUnitTestResultPrinter::PrintXmlUnitTest(FILE* out,
+ const UnitTest& unit_test) {
+ fprintf(out, "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n");
+ fprintf(out,
+ "<testsuites tests=\"%d\" failures=\"%d\" disabled=\"%d\" "
+ "errors=\"0\" time=\"%s\" ",
+ unit_test.total_test_count(),
+ unit_test.failed_test_count(),
+ unit_test.disabled_test_count(),
+ FormatTimeInMillisAsSeconds(unit_test.elapsed_time()).c_str());
+ if (GTEST_FLAG(shuffle)) {
+ fprintf(out, "random_seed=\"%d\" ", unit_test.random_seed());
+ }
+ fprintf(out, "name=\"AllTests\">\n");
+ for (int i = 0; i < unit_test.total_test_case_count(); ++i)
+ PrintXmlTestCase(out, *unit_test.GetTestCase(i));
+ fprintf(out, "</testsuites>\n");
+}
+
+// Produces a string representing the test properties in a result as space
+// delimited XML attributes based on the property key="value" pairs.
+String XmlUnitTestResultPrinter::TestPropertiesAsXmlAttributes(
+ const TestResult& result) {
+ Message attributes;
+ for (int i = 0; i < result.test_property_count(); ++i) {
+ const TestProperty& property = result.GetTestProperty(i);
+ attributes << " " << property.key() << "="
+ << "\"" << EscapeXmlAttribute(property.value()) << "\"";
+ }
+ return attributes.GetString();
+}
+
+// End XmlUnitTestResultPrinter
+
+// Class ScopedTrace
+
+// Pushes the given source file location and message onto a per-thread
+// trace stack maintained by Google Test.
+// L < UnitTest::mutex_
+ScopedTrace::ScopedTrace(const char* file, int line, const Message& message) {
+ TraceInfo trace;
+ trace.file = file;
+ trace.line = line;
+ trace.message = message.GetString();
+
+ UnitTest::GetInstance()->PushGTestTrace(trace);
+}
+
+// Pops the info pushed by the c'tor.
+// L < UnitTest::mutex_
+ScopedTrace::~ScopedTrace() {
+ UnitTest::GetInstance()->PopGTestTrace();
+}
+
+
+// class OsStackTraceGetter
+
+// Returns the current OS stack trace as a String. Parameters:
+//
+// max_depth - the maximum number of stack frames to be included
+// in the trace.
+// skip_count - the number of top frames to be skipped; doesn't count
+// against max_depth.
+//
+// L < mutex_
+// We use "L < mutex_" to denote that the function may acquire mutex_.
+String OsStackTraceGetter::CurrentStackTrace(int, int) {
+ return String("");
+}
+
+// L < mutex_
+void OsStackTraceGetter::UponLeavingGTest() {
+}
+
+const char* const
+OsStackTraceGetter::kElidedFramesMarker =
+ "... " GTEST_NAME_ " internal frames ...";
+
+} // namespace internal
+
+// class TestEventListeners
+
+TestEventListeners::TestEventListeners()
+ : repeater_(new internal::TestEventRepeater()),
+ default_result_printer_(NULL),
+ default_xml_generator_(NULL) {
+}
+
+TestEventListeners::~TestEventListeners() { delete repeater_; }
+
+// Returns the standard listener responsible for the default console
+// output. Can be removed from the listeners list to shut down default
+// console output. Note that removing this object from the listener list
+// with Release transfers its ownership to the user.
+void TestEventListeners::Append(TestEventListener* listener) {
+ repeater_->Append(listener);
+}
+
+// Removes the given event listener from the list and returns it. It then
+// becomes the caller's responsibility to delete the listener. Returns
+// NULL if the listener is not found in the list.
+TestEventListener* TestEventListeners::Release(TestEventListener* listener) {
+ if (listener == default_result_printer_)
+ default_result_printer_ = NULL;
+ else if (listener == default_xml_generator_)
+ default_xml_generator_ = NULL;
+ return repeater_->Release(listener);
+}
+
+// Returns repeater that broadcasts the TestEventListener events to all
+// subscribers.
+TestEventListener* TestEventListeners::repeater() { return repeater_; }
+
+// Sets the default_result_printer attribute to the provided listener.
+// The listener is also added to the listener list and previous
+// default_result_printer is removed from it and deleted. The listener can
+// also be NULL in which case it will not be added to the list. Does
+// nothing if the previous and the current listener objects are the same.
+void TestEventListeners::SetDefaultResultPrinter(TestEventListener* listener) {
+ if (default_result_printer_ != listener) {
+ // It is an error to pass this method a listener that is already in the
+ // list.
+ delete Release(default_result_printer_);
+ default_result_printer_ = listener;
+ if (listener != NULL)
+ Append(listener);
+ }
+}
+
+// Sets the default_xml_generator attribute to the provided listener. The
+// listener is also added to the listener list and previous
+// default_xml_generator is removed from it and deleted. The listener can
+// also be NULL in which case it will not be added to the list. Does
+// nothing if the previous and the current listener objects are the same.
+void TestEventListeners::SetDefaultXmlGenerator(TestEventListener* listener) {
+ if (default_xml_generator_ != listener) {
+ // It is an error to pass this method a listener that is already in the
+ // list.
+ delete Release(default_xml_generator_);
+ default_xml_generator_ = listener;
+ if (listener != NULL)
+ Append(listener);
+ }
+}
+
+// Controls whether events will be forwarded by the repeater to the
+// listeners in the list.
+bool TestEventListeners::EventForwardingEnabled() const {
+ return repeater_->forwarding_enabled();
+}
+
+void TestEventListeners::SuppressEventForwarding() {
+ repeater_->set_forwarding_enabled(false);
+}
+
+// class UnitTest
+
+// Gets the singleton UnitTest object. The first time this method is
+// called, a UnitTest object is constructed and returned. Consecutive
+// calls will return the same object.
+//
+// We don't protect this under mutex_ as a user is not supposed to
+// call this before main() starts, from which point on the return
+// value will never change.
+UnitTest * UnitTest::GetInstance() {
+ // When compiled with MSVC 7.1 in optimized mode, destroying the
+ // UnitTest object upon exiting the program messes up the exit code,
+ // causing successful tests to appear failed. We have to use a
+ // different implementation in this case to bypass the compiler bug.
+ // This implementation makes the compiler happy, at the cost of
+ // leaking the UnitTest object.
+
+ // CodeGear C++Builder insists on a public destructor for the
+ // default implementation. Use this implementation to keep good OO
+ // design with private destructor.
+
+#if (_MSC_VER == 1310 && !defined(_DEBUG)) || defined(__BORLANDC__)
+ static UnitTest* const instance = new UnitTest;
+ return instance;
+#else
+ static UnitTest instance;
+ return &instance;
+#endif // (_MSC_VER == 1310 && !defined(_DEBUG)) || defined(__BORLANDC__)
+}
+
+// Gets the number of successful test cases.
+int UnitTest::successful_test_case_count() const {
+ return impl()->successful_test_case_count();
+}
+
+// Gets the number of failed test cases.
+int UnitTest::failed_test_case_count() const {
+ return impl()->failed_test_case_count();
+}
+
+// Gets the number of all test cases.
+int UnitTest::total_test_case_count() const {
+ return impl()->total_test_case_count();
+}
+
+// Gets the number of all test cases that contain at least one test
+// that should run.
+int UnitTest::test_case_to_run_count() const {
+ return impl()->test_case_to_run_count();
+}
+
+// Gets the number of successful tests.
+int UnitTest::successful_test_count() const {
+ return impl()->successful_test_count();
+}
+
+// Gets the number of failed tests.
+int UnitTest::failed_test_count() const { return impl()->failed_test_count(); }
+
+// Gets the number of disabled tests.
+int UnitTest::disabled_test_count() const {
+ return impl()->disabled_test_count();
+}
+
+// Gets the number of all tests.
+int UnitTest::total_test_count() const { return impl()->total_test_count(); }
+
+// Gets the number of tests that should run.
+int UnitTest::test_to_run_count() const { return impl()->test_to_run_count(); }
+
+// Gets the elapsed time, in milliseconds.
+internal::TimeInMillis UnitTest::elapsed_time() const {
+ return impl()->elapsed_time();
+}
+
+// Returns true iff the unit test passed (i.e. all test cases passed).
+bool UnitTest::Passed() const { return impl()->Passed(); }
+
+// Returns true iff the unit test failed (i.e. some test case failed
+// or something outside of all tests failed).
+bool UnitTest::Failed() const { return impl()->Failed(); }
+
+// Gets the i-th test case among all the test cases. i can range from 0 to
+// total_test_case_count() - 1. If i is not in that range, returns NULL.
+const TestCase* UnitTest::GetTestCase(int i) const {
+ return impl()->GetTestCase(i);
+}
+
+// Gets the i-th test case among all the test cases. i can range from 0 to
+// total_test_case_count() - 1. If i is not in that range, returns NULL.
+TestCase* UnitTest::GetMutableTestCase(int i) {
+ return impl()->GetMutableTestCase(i);
+}
+
+// Returns the list of event listeners that can be used to track events
+// inside Google Test.
+TestEventListeners& UnitTest::listeners() {
+ return *impl()->listeners();
+}
+
+// Registers and returns a global test environment. When a test
+// program is run, all global test environments will be set-up in the
+// order they were registered. After all tests in the program have
+// finished, all global test environments will be torn-down in the
+// *reverse* order they were registered.
+//
+// The UnitTest object takes ownership of the given environment.
+//
+// We don't protect this under mutex_, as we only support calling it
+// from the main thread.
+Environment* UnitTest::AddEnvironment(Environment* env) {
+ if (env == NULL) {
+ return NULL;
+ }
+
+ impl_->environments().push_back(env);
+ return env;
+}
+
+#if GTEST_HAS_EXCEPTIONS
+// A failed Google Test assertion will throw an exception of this type
+// when exceptions are enabled. We derive it from std::runtime_error,
+// which is for errors presumably detectable only at run time. Since
+// std::runtime_error inherits from std::exception, many testing
+// frameworks know how to extract and print the message inside it.
+class GoogleTestFailureException : public ::std::runtime_error {
+ public:
+ explicit GoogleTestFailureException(const TestPartResult& failure)
+ : ::std::runtime_error(PrintTestPartResultToString(failure).c_str()) {}
+};
+#endif
+
+// Adds a TestPartResult to the current TestResult object. All Google Test
+// assertion macros (e.g. ASSERT_TRUE, EXPECT_EQ, etc) eventually call
+// this to report their results. The user code should use the
+// assertion macros instead of calling this directly.
+// L < mutex_
+void UnitTest::AddTestPartResult(TestPartResult::Type result_type,
+ const char* file_name,
+ int line_number,
+ const internal::String& message,
+ const internal::String& os_stack_trace) {
+ Message msg;
+ msg << message;
+
+ internal::MutexLock lock(&mutex_);
+ if (impl_->gtest_trace_stack().size() > 0) {
+ msg << "\n" << GTEST_NAME_ << " trace:";
+
+ for (int i = static_cast<int>(impl_->gtest_trace_stack().size());
+ i > 0; --i) {
+ const internal::TraceInfo& trace = impl_->gtest_trace_stack()[i - 1];
+ msg << "\n" << internal::FormatFileLocation(trace.file, trace.line)
+ << " " << trace.message;
+ }
+ }
+
+ if (os_stack_trace.c_str() != NULL && !os_stack_trace.empty()) {
+ msg << internal::kStackTraceMarker << os_stack_trace;
+ }
+
+ const TestPartResult result =
+ TestPartResult(result_type, file_name, line_number,
+ msg.GetString().c_str());
+ impl_->GetTestPartResultReporterForCurrentThread()->
+ ReportTestPartResult(result);
+
+ if (result_type != TestPartResult::kSuccess) {
+ // gtest_break_on_failure takes precedence over
+ // gtest_throw_on_failure. This allows a user to set the latter
+ // in the code (perhaps in order to use Google Test assertions
+ // with another testing framework) and specify the former on the
+ // command line for debugging.
+ if (GTEST_FLAG(break_on_failure)) {
+#if GTEST_OS_WINDOWS
+ // Using DebugBreak on Windows allows gtest to still break into a debugger
+ // when a failure happens and both the --gtest_break_on_failure and
+ // the --gtest_catch_exceptions flags are specified.
+ DebugBreak();
+#else
+ *static_cast<volatile int*>(NULL) = 1;
+#endif // GTEST_OS_WINDOWS
+ } else if (GTEST_FLAG(throw_on_failure)) {
+#if GTEST_HAS_EXCEPTIONS
+ throw GoogleTestFailureException(result);
+#else
+ // We cannot call abort() as it generates a pop-up in debug mode
+ // that cannot be suppressed in VC 7.1 or below.
+ exit(1);
+#endif
+ }
+ }
+}
+
+// Creates and adds a property to the current TestResult. If a property matching
+// the supplied value already exists, updates its value instead.
+void UnitTest::RecordPropertyForCurrentTest(const char* key,
+ const char* value) {
+ const TestProperty test_property(key, value);
+ impl_->current_test_result()->RecordProperty(test_property);
+}
+
+// Runs all tests in this UnitTest object and prints the result.
+// Returns 0 if successful, or 1 otherwise.
+//
+// We don't protect this under mutex_, as we only support calling it
+// from the main thread.
+int UnitTest::Run() {
+#if GTEST_HAS_SEH
+ // Catch SEH-style exceptions.
+
+ const bool in_death_test_child_process =
+ internal::GTEST_FLAG(internal_run_death_test).length() > 0;
+
+ // Either the user wants Google Test to catch exceptions thrown by the
+ // tests or this is executing in the context of death test child
+ // process. In either case the user does not want to see pop-up dialogs
+ // about crashes - they are expected..
+ if (GTEST_FLAG(catch_exceptions) || in_death_test_child_process) {
+#if !GTEST_OS_WINDOWS_MOBILE
+ // SetErrorMode doesn't exist on CE.
+ SetErrorMode(SEM_FAILCRITICALERRORS | SEM_NOALIGNMENTFAULTEXCEPT |
+ SEM_NOGPFAULTERRORBOX | SEM_NOOPENFILEERRORBOX);
+#endif // !GTEST_OS_WINDOWS_MOBILE
+
+#if (defined(_MSC_VER) || GTEST_OS_WINDOWS_MINGW) && !GTEST_OS_WINDOWS_MOBILE
+ // Death test children can be terminated with _abort(). On Windows,
+ // _abort() can show a dialog with a warning message. This forces the
+ // abort message to go to stderr instead.
+ _set_error_mode(_OUT_TO_STDERR);
+#endif
+
+#if _MSC_VER >= 1400 && !GTEST_OS_WINDOWS_MOBILE
+ // In the debug version, Visual Studio pops up a separate dialog
+ // offering a choice to debug the aborted program. We need to suppress
+ // this dialog or it will pop up for every EXPECT/ASSERT_DEATH statement
+ // executed. Google Test will notify the user of any unexpected
+ // failure via stderr.
+ //
+ // VC++ doesn't define _set_abort_behavior() prior to the version 8.0.
+ // Users of prior VC versions shall suffer the agony and pain of
+ // clicking through the countless debug dialogs.
+ // TODO(vladl@google.com): find a way to suppress the abort dialog() in the
+ // debug mode when compiled with VC 7.1 or lower.
+ if (!GTEST_FLAG(break_on_failure))
+ _set_abort_behavior(
+ 0x0, // Clear the following flags:
+ _WRITE_ABORT_MSG | _CALL_REPORTFAULT); // pop-up window, core dump.
+#endif
+ }
+
+ __try {
+ return impl_->RunAllTests();
+ } __except(internal::UnitTestOptions::GTestShouldProcessSEH(
+ GetExceptionCode())) {
+ printf("Exception thrown with code 0x%x.\nFAIL\n", GetExceptionCode());
+ fflush(stdout);
+ return 1;
+ }
+
+#else // We are on a compiler or platform that doesn't support SEH.
+
+ return impl_->RunAllTests();
+#endif // GTEST_HAS_SEH
+}
+
+// Returns the working directory when the first TEST() or TEST_F() was
+// executed.
+const char* UnitTest::original_working_dir() const {
+ return impl_->original_working_dir_.c_str();
+}
+
+// Returns the TestCase object for the test that's currently running,
+// or NULL if no test is running.
+// L < mutex_
+const TestCase* UnitTest::current_test_case() const {
+ internal::MutexLock lock(&mutex_);
+ return impl_->current_test_case();
+}
+
+// Returns the TestInfo object for the test that's currently running,
+// or NULL if no test is running.
+// L < mutex_
+const TestInfo* UnitTest::current_test_info() const {
+ internal::MutexLock lock(&mutex_);
+ return impl_->current_test_info();
+}
+
+// Returns the random seed used at the start of the current test run.
+int UnitTest::random_seed() const { return impl_->random_seed(); }
+
+#if GTEST_HAS_PARAM_TEST
+// Returns ParameterizedTestCaseRegistry object used to keep track of
+// value-parameterized tests and instantiate and register them.
+// L < mutex_
+internal::ParameterizedTestCaseRegistry&
+ UnitTest::parameterized_test_registry() {
+ return impl_->parameterized_test_registry();
+}
+#endif // GTEST_HAS_PARAM_TEST
+
+// Creates an empty UnitTest.
+UnitTest::UnitTest() {
+ impl_ = new internal::UnitTestImpl(this);
+}
+
+// Destructor of UnitTest.
+UnitTest::~UnitTest() {
+ delete impl_;
+}
+
+// Pushes a trace defined by SCOPED_TRACE() on to the per-thread
+// Google Test trace stack.
+// L < mutex_
+void UnitTest::PushGTestTrace(const internal::TraceInfo& trace) {
+ internal::MutexLock lock(&mutex_);
+ impl_->gtest_trace_stack().push_back(trace);
+}
+
+// Pops a trace from the per-thread Google Test trace stack.
+// L < mutex_
+void UnitTest::PopGTestTrace() {
+ internal::MutexLock lock(&mutex_);
+ impl_->gtest_trace_stack().pop_back();
+}
+
+namespace internal {
+
+UnitTestImpl::UnitTestImpl(UnitTest* parent)
+ : parent_(parent),
+#ifdef _MSC_VER
+#pragma warning(push) // Saves the current warning state.
+#pragma warning(disable:4355) // Temporarily disables warning 4355
+ // (using this in initializer).
+ default_global_test_part_result_reporter_(this),
+ default_per_thread_test_part_result_reporter_(this),
+#pragma warning(pop) // Restores the warning state again.
+#else
+ default_global_test_part_result_reporter_(this),
+ default_per_thread_test_part_result_reporter_(this),
+#endif // _MSC_VER
+ global_test_part_result_repoter_(
+ &default_global_test_part_result_reporter_),
+ per_thread_test_part_result_reporter_(
+ &default_per_thread_test_part_result_reporter_),
+#if GTEST_HAS_PARAM_TEST
+ parameterized_test_registry_(),
+ parameterized_tests_registered_(false),
+#endif // GTEST_HAS_PARAM_TEST
+ last_death_test_case_(-1),
+ current_test_case_(NULL),
+ current_test_info_(NULL),
+ ad_hoc_test_result_(),
+ os_stack_trace_getter_(NULL),
+ post_flag_parse_init_performed_(false),
+ random_seed_(0), // Will be overridden by the flag before first use.
+ random_(0), // Will be reseeded before first use.
+#if GTEST_HAS_DEATH_TEST
+ elapsed_time_(0),
+ internal_run_death_test_flag_(NULL),
+ death_test_factory_(new DefaultDeathTestFactory) {
+#else
+ elapsed_time_(0) {
+#endif // GTEST_HAS_DEATH_TEST
+ listeners()->SetDefaultResultPrinter(new PrettyUnitTestResultPrinter);
+}
+
+UnitTestImpl::~UnitTestImpl() {
+ // Deletes every TestCase.
+ ForEach(test_cases_, internal::Delete<TestCase>);
+
+ // Deletes every Environment.
+ ForEach(environments_, internal::Delete<Environment>);
+
+ delete os_stack_trace_getter_;
+}
+
+#if GTEST_HAS_DEATH_TEST
+// Disables event forwarding if the control is currently in a death test
+// subprocess. Must not be called before InitGoogleTest.
+void UnitTestImpl::SuppressTestEventsIfInSubprocess() {
+ if (internal_run_death_test_flag_.get() != NULL)
+ listeners()->SuppressEventForwarding();
+}
+#endif // GTEST_HAS_DEATH_TEST
+
+// Initializes event listeners performing XML output as specified by
+// UnitTestOptions. Must not be called before InitGoogleTest.
+void UnitTestImpl::ConfigureXmlOutput() {
+ const String& output_format = UnitTestOptions::GetOutputFormat();
+ if (output_format == "xml") {
+ listeners()->SetDefaultXmlGenerator(new XmlUnitTestResultPrinter(
+ UnitTestOptions::GetAbsolutePathToOutputFile().c_str()));
+ } else if (output_format != "") {
+ printf("WARNING: unrecognized output format \"%s\" ignored.\n",
+ output_format.c_str());
+ fflush(stdout);
+ }
+}
+
+// Performs initialization dependent upon flag values obtained in
+// ParseGoogleTestFlagsOnly. Is called from InitGoogleTest after the call to
+// ParseGoogleTestFlagsOnly. In case a user neglects to call InitGoogleTest
+// this function is also called from RunAllTests. Since this function can be
+// called more than once, it has to be idempotent.
+void UnitTestImpl::PostFlagParsingInit() {
+ // Ensures that this function does not execute more than once.
+ if (!post_flag_parse_init_performed_) {
+ post_flag_parse_init_performed_ = true;
+
+#if GTEST_HAS_DEATH_TEST
+ InitDeathTestSubprocessControlInfo();
+ SuppressTestEventsIfInSubprocess();
+#endif // GTEST_HAS_DEATH_TEST
+
+ // Registers parameterized tests. This makes parameterized tests
+ // available to the UnitTest reflection API without running
+ // RUN_ALL_TESTS.
+ RegisterParameterizedTests();
+
+ // Configures listeners for XML output. This makes it possible for users
+ // to shut down the default XML output before invoking RUN_ALL_TESTS.
+ ConfigureXmlOutput();
+ }
+}
+
+// A predicate that checks the name of a TestCase against a known
+// value.
+//
+// This is used for implementation of the UnitTest class only. We put
+// it in the anonymous namespace to prevent polluting the outer
+// namespace.
+//
+// TestCaseNameIs is copyable.
+class TestCaseNameIs {
+ public:
+ // Constructor.
+ explicit TestCaseNameIs(const String& name)
+ : name_(name) {}
+
+ // Returns true iff the name of test_case matches name_.
+ bool operator()(const TestCase* test_case) const {
+ return test_case != NULL && strcmp(test_case->name(), name_.c_str()) == 0;
+ }
+
+ private:
+ String name_;
+};
+
+// Finds and returns a TestCase with the given name. If one doesn't
+// exist, creates one and returns it. It's the CALLER'S
+// RESPONSIBILITY to ensure that this function is only called WHEN THE
+// TESTS ARE NOT SHUFFLED.
+//
+// Arguments:
+//
+// test_case_name: name of the test case
+// set_up_tc: pointer to the function that sets up the test case
+// tear_down_tc: pointer to the function that tears down the test case
+TestCase* UnitTestImpl::GetTestCase(const char* test_case_name,
+ const char* comment,
+ Test::SetUpTestCaseFunc set_up_tc,
+ Test::TearDownTestCaseFunc tear_down_tc) {
+ // Can we find a TestCase with the given name?
+ const std::vector<TestCase*>::const_iterator test_case =
+ std::find_if(test_cases_.begin(), test_cases_.end(),
+ TestCaseNameIs(test_case_name));
+
+ if (test_case != test_cases_.end())
+ return *test_case;
+
+ // No. Let's create one.
+ TestCase* const new_test_case =
+ new TestCase(test_case_name, comment, set_up_tc, tear_down_tc);
+
+ // Is this a death test case?
+ if (internal::UnitTestOptions::MatchesFilter(String(test_case_name),
+ kDeathTestCaseFilter)) {
+ // Yes. Inserts the test case after the last death test case
+ // defined so far. This only works when the test cases haven't
+ // been shuffled. Otherwise we may end up running a death test
+ // after a non-death test.
+ ++last_death_test_case_;
+ test_cases_.insert(test_cases_.begin() + last_death_test_case_,
+ new_test_case);
+ } else {
+ // No. Appends to the end of the list.
+ test_cases_.push_back(new_test_case);
+ }
+
+ test_case_indices_.push_back(static_cast<int>(test_case_indices_.size()));
+ return new_test_case;
+}
+
+// Helpers for setting up / tearing down the given environment. They
+// are for use in the ForEach() function.
+static void SetUpEnvironment(Environment* env) { env->SetUp(); }
+static void TearDownEnvironment(Environment* env) { env->TearDown(); }
+
+// Runs all tests in this UnitTest object, prints the result, and
+// returns 0 if all tests are successful, or 1 otherwise. If any
+// exception is thrown during a test on Windows, this test is
+// considered to be failed, but the rest of the tests will still be
+// run. (We disable exceptions on Linux and Mac OS X, so the issue
+// doesn't apply there.)
+// When parameterized tests are enabled, it expands and registers
+// parameterized tests first in RegisterParameterizedTests().
+// All other functions called from RunAllTests() may safely assume that
+// parameterized tests are ready to be counted and run.
+int UnitTestImpl::RunAllTests() {
+ // Makes sure InitGoogleTest() was called.
+ if (!GTestIsInitialized()) {
+ printf("%s",
+ "\nThis test program did NOT call ::testing::InitGoogleTest "
+ "before calling RUN_ALL_TESTS(). Please fix it.\n");
+ return 1;
+ }
+
+ // Do not run any test if the --help flag was specified.
+ if (g_help_flag)
+ return 0;
+
+ // Repeats the call to the post-flag parsing initialization in case the
+ // user didn't call InitGoogleTest.
+ PostFlagParsingInit();
+
+ // Even if sharding is not on, test runners may want to use the
+ // GTEST_SHARD_STATUS_FILE to query whether the test supports the sharding
+ // protocol.
+ internal::WriteToShardStatusFileIfNeeded();
+
+ // True iff we are in a subprocess for running a thread-safe-style
+ // death test.
+ bool in_subprocess_for_death_test = false;
+
+#if GTEST_HAS_DEATH_TEST
+ in_subprocess_for_death_test = (internal_run_death_test_flag_.get() != NULL);
+#endif // GTEST_HAS_DEATH_TEST
+
+ const bool should_shard = ShouldShard(kTestTotalShards, kTestShardIndex,
+ in_subprocess_for_death_test);
+
+ // Compares the full test names with the filter to decide which
+ // tests to run.
+ const bool has_tests_to_run = FilterTests(should_shard
+ ? HONOR_SHARDING_PROTOCOL
+ : IGNORE_SHARDING_PROTOCOL) > 0;
+
+ // Lists the tests and exits if the --gtest_list_tests flag was specified.
+ if (GTEST_FLAG(list_tests)) {
+ // This must be called *after* FilterTests() has been called.
+ ListTestsMatchingFilter();
+ return 0;
+ }
+
+ random_seed_ = GTEST_FLAG(shuffle) ?
+ GetRandomSeedFromFlag(GTEST_FLAG(random_seed)) : 0;
+
+ // True iff at least one test has failed.
+ bool failed = false;
+
+ TestEventListener* repeater = listeners()->repeater();
+
+ repeater->OnTestProgramStart(*parent_);
+
+ // How many times to repeat the tests? We don't want to repeat them
+ // when we are inside the subprocess of a death test.
+ const int repeat = in_subprocess_for_death_test ? 1 : GTEST_FLAG(repeat);
+ // Repeats forever if the repeat count is negative.
+ const bool forever = repeat < 0;
+ for (int i = 0; forever || i != repeat; i++) {
+ ClearResult();
+
+ const TimeInMillis start = GetTimeInMillis();
+
+ // Shuffles test cases and tests if requested.
+ if (has_tests_to_run && GTEST_FLAG(shuffle)) {
+ random()->Reseed(random_seed_);
+ // This should be done before calling OnTestIterationStart(),
+ // such that a test event listener can see the actual test order
+ // in the event.
+ ShuffleTests();
+ }
+
+ // Tells the unit test event listeners that the tests are about to start.
+ repeater->OnTestIterationStart(*parent_, i);
+
+ // Runs each test case if there is at least one test to run.
+ if (has_tests_to_run) {
+ // Sets up all environments beforehand.
+ repeater->OnEnvironmentsSetUpStart(*parent_);
+ ForEach(environments_, SetUpEnvironment);
+ repeater->OnEnvironmentsSetUpEnd(*parent_);
+
+ // Runs the tests only if there was no fatal failure during global
+ // set-up.
+ if (!Test::HasFatalFailure()) {
+ for (int test_index = 0; test_index < total_test_case_count();
+ test_index++) {
+ GetMutableTestCase(test_index)->Run();
+ }
+ }
+
+ // Tears down all environments in reverse order afterwards.
+ repeater->OnEnvironmentsTearDownStart(*parent_);
+ std::for_each(environments_.rbegin(), environments_.rend(),
+ TearDownEnvironment);
+ repeater->OnEnvironmentsTearDownEnd(*parent_);
+ }
+
+ elapsed_time_ = GetTimeInMillis() - start;
+
+ // Tells the unit test event listener that the tests have just finished.
+ repeater->OnTestIterationEnd(*parent_, i);
+
+ // Gets the result and clears it.
+ if (!Passed()) {
+ failed = true;
+ }
+
+ // Restores the original test order after the iteration. This
+ // allows the user to quickly repro a failure that happens in the
+ // N-th iteration without repeating the first (N - 1) iterations.
+ // This is not enclosed in "if (GTEST_FLAG(shuffle)) { ... }", in
+ // case the user somehow changes the value of the flag somewhere
+ // (it's always safe to unshuffle the tests).
+ UnshuffleTests();
+
+ if (GTEST_FLAG(shuffle)) {
+ // Picks a new random seed for each iteration.
+ random_seed_ = GetNextRandomSeed(random_seed_);
+ }
+ }
+
+ repeater->OnTestProgramEnd(*parent_);
+
+ // Returns 0 if all tests passed, or 1 other wise.
+ return failed ? 1 : 0;
+}
+
+// Reads the GTEST_SHARD_STATUS_FILE environment variable, and creates the file
+// if the variable is present. If a file already exists at this location, this
+// function will write over it. If the variable is present, but the file cannot
+// be created, prints an error and exits.
+void WriteToShardStatusFileIfNeeded() {
+ const char* const test_shard_file = posix::GetEnv(kTestShardStatusFile);
+ if (test_shard_file != NULL) {
+ FILE* const file = posix::FOpen(test_shard_file, "w");
+ if (file == NULL) {
+ ColoredPrintf(COLOR_RED,
+ "Could not write to the test shard status file \"%s\" "
+ "specified by the %s environment variable.\n",
+ test_shard_file, kTestShardStatusFile);
+ fflush(stdout);
+ exit(EXIT_FAILURE);
+ }
+ fclose(file);
+ }
+}
+
+// Checks whether sharding is enabled by examining the relevant
+// environment variable values. If the variables are present,
+// but inconsistent (i.e., shard_index >= total_shards), prints
+// an error and exits. If in_subprocess_for_death_test, sharding is
+// disabled because it must only be applied to the original test
+// process. Otherwise, we could filter out death tests we intended to execute.
+bool ShouldShard(const char* total_shards_env,
+ const char* shard_index_env,
+ bool in_subprocess_for_death_test) {
+ if (in_subprocess_for_death_test) {
+ return false;
+ }
+
+ const Int32 total_shards = Int32FromEnvOrDie(total_shards_env, -1);
+ const Int32 shard_index = Int32FromEnvOrDie(shard_index_env, -1);
+
+ if (total_shards == -1 && shard_index == -1) {
+ return false;
+ } else if (total_shards == -1 && shard_index != -1) {
+ const Message msg = Message()
+ << "Invalid environment variables: you have "
+ << kTestShardIndex << " = " << shard_index
+ << ", but have left " << kTestTotalShards << " unset.\n";
+ ColoredPrintf(COLOR_RED, msg.GetString().c_str());
+ fflush(stdout);
+ exit(EXIT_FAILURE);
+ } else if (total_shards != -1 && shard_index == -1) {
+ const Message msg = Message()
+ << "Invalid environment variables: you have "
+ << kTestTotalShards << " = " << total_shards
+ << ", but have left " << kTestShardIndex << " unset.\n";
+ ColoredPrintf(COLOR_RED, msg.GetString().c_str());
+ fflush(stdout);
+ exit(EXIT_FAILURE);
+ } else if (shard_index < 0 || shard_index >= total_shards) {
+ const Message msg = Message()
+ << "Invalid environment variables: we require 0 <= "
+ << kTestShardIndex << " < " << kTestTotalShards
+ << ", but you have " << kTestShardIndex << "=" << shard_index
+ << ", " << kTestTotalShards << "=" << total_shards << ".\n";
+ ColoredPrintf(COLOR_RED, msg.GetString().c_str());
+ fflush(stdout);
+ exit(EXIT_FAILURE);
+ }
+
+ return total_shards > 1;
+}
+
+// Parses the environment variable var as an Int32. If it is unset,
+// returns default_val. If it is not an Int32, prints an error
+// and aborts.
+Int32 Int32FromEnvOrDie(const char* const var, Int32 default_val) {
+ const char* str_val = posix::GetEnv(var);
+ if (str_val == NULL) {
+ return default_val;
+ }
+
+ Int32 result;
+ if (!ParseInt32(Message() << "The value of environment variable " << var,
+ str_val, &result)) {
+ exit(EXIT_FAILURE);
+ }
+ return result;
+}
+
+// Given the total number of shards, the shard index, and the test id,
+// returns true iff the test should be run on this shard. The test id is
+// some arbitrary but unique non-negative integer assigned to each test
+// method. Assumes that 0 <= shard_index < total_shards.
+bool ShouldRunTestOnShard(int total_shards, int shard_index, int test_id) {
+ return (test_id % total_shards) == shard_index;
+}
+
+// Compares the name of each test with the user-specified filter to
+// decide whether the test should be run, then records the result in
+// each TestCase and TestInfo object.
+// If shard_tests == true, further filters tests based on sharding
+// variables in the environment - see
+// http://code.google.com/p/googletest/wiki/GoogleTestAdvancedGuide.
+// Returns the number of tests that should run.
+int UnitTestImpl::FilterTests(ReactionToSharding shard_tests) {
+ const Int32 total_shards = shard_tests == HONOR_SHARDING_PROTOCOL ?
+ Int32FromEnvOrDie(kTestTotalShards, -1) : -1;
+ const Int32 shard_index = shard_tests == HONOR_SHARDING_PROTOCOL ?
+ Int32FromEnvOrDie(kTestShardIndex, -1) : -1;
+
+ // num_runnable_tests are the number of tests that will
+ // run across all shards (i.e., match filter and are not disabled).
+ // num_selected_tests are the number of tests to be run on
+ // this shard.
+ int num_runnable_tests = 0;
+ int num_selected_tests = 0;
+ for (size_t i = 0; i < test_cases_.size(); i++) {
+ TestCase* const test_case = test_cases_[i];
+ const String &test_case_name = test_case->name();
+ test_case->set_should_run(false);
+
+ for (size_t j = 0; j < test_case->test_info_list().size(); j++) {
+ TestInfo* const test_info = test_case->test_info_list()[j];
+ const String test_name(test_info->name());
+ // A test is disabled if test case name or test name matches
+ // kDisableTestFilter.
+ const bool is_disabled =
+ internal::UnitTestOptions::MatchesFilter(test_case_name,
+ kDisableTestFilter) ||
+ internal::UnitTestOptions::MatchesFilter(test_name,
+ kDisableTestFilter);
+ test_info->impl()->set_is_disabled(is_disabled);
+
+ const bool matches_filter =
+ internal::UnitTestOptions::FilterMatchesTest(test_case_name,
+ test_name);
+ test_info->impl()->set_matches_filter(matches_filter);
+
+ const bool is_runnable =
+ (GTEST_FLAG(also_run_disabled_tests) || !is_disabled) &&
+ matches_filter;
+
+ const bool is_selected = is_runnable &&
+ (shard_tests == IGNORE_SHARDING_PROTOCOL ||
+ ShouldRunTestOnShard(total_shards, shard_index,
+ num_runnable_tests));
+
+ num_runnable_tests += is_runnable;
+ num_selected_tests += is_selected;
+
+ test_info->impl()->set_should_run(is_selected);
+ test_case->set_should_run(test_case->should_run() || is_selected);
+ }
+ }
+ return num_selected_tests;
+}
+
+// Prints the names of the tests matching the user-specified filter flag.
+void UnitTestImpl::ListTestsMatchingFilter() {
+ for (size_t i = 0; i < test_cases_.size(); i++) {
+ const TestCase* const test_case = test_cases_[i];
+ bool printed_test_case_name = false;
+
+ for (size_t j = 0; j < test_case->test_info_list().size(); j++) {
+ const TestInfo* const test_info =
+ test_case->test_info_list()[j];
+ if (test_info->matches_filter()) {
+ if (!printed_test_case_name) {
+ printed_test_case_name = true;
+ printf("%s.\n", test_case->name());
+ }
+ printf(" %s\n", test_info->name());
+ }
+ }
+ }
+ fflush(stdout);
+}
+
+// Sets the OS stack trace getter.
+//
+// Does nothing if the input and the current OS stack trace getter are
+// the same; otherwise, deletes the old getter and makes the input the
+// current getter.
+void UnitTestImpl::set_os_stack_trace_getter(
+ OsStackTraceGetterInterface* getter) {
+ if (os_stack_trace_getter_ != getter) {
+ delete os_stack_trace_getter_;
+ os_stack_trace_getter_ = getter;
+ }
+}
+
+// Returns the current OS stack trace getter if it is not NULL;
+// otherwise, creates an OsStackTraceGetter, makes it the current
+// getter, and returns it.
+OsStackTraceGetterInterface* UnitTestImpl::os_stack_trace_getter() {
+ if (os_stack_trace_getter_ == NULL) {
+ os_stack_trace_getter_ = new OsStackTraceGetter;
+ }
+
+ return os_stack_trace_getter_;
+}
+
+// Returns the TestResult for the test that's currently running, or
+// the TestResult for the ad hoc test if no test is running.
+TestResult* UnitTestImpl::current_test_result() {
+ return current_test_info_ ?
+ current_test_info_->impl()->result() : &ad_hoc_test_result_;
+}
+
+// Shuffles all test cases, and the tests within each test case,
+// making sure that death tests are still run first.
+void UnitTestImpl::ShuffleTests() {
+ // Shuffles the death test cases.
+ ShuffleRange(random(), 0, last_death_test_case_ + 1, &test_case_indices_);
+
+ // Shuffles the non-death test cases.
+ ShuffleRange(random(), last_death_test_case_ + 1,
+ static_cast<int>(test_cases_.size()), &test_case_indices_);
+
+ // Shuffles the tests inside each test case.
+ for (size_t i = 0; i < test_cases_.size(); i++) {
+ test_cases_[i]->ShuffleTests(random());
+ }
+}
+
+// Restores the test cases and tests to their order before the first shuffle.
+void UnitTestImpl::UnshuffleTests() {
+ for (size_t i = 0; i < test_cases_.size(); i++) {
+ // Unshuffles the tests in each test case.
+ test_cases_[i]->UnshuffleTests();
+ // Resets the index of each test case.
+ test_case_indices_[i] = static_cast<int>(i);
+ }
+}
+
+// TestInfoImpl constructor. The new instance assumes ownership of the test
+// factory object.
+TestInfoImpl::TestInfoImpl(TestInfo* parent,
+ const char* a_test_case_name,
+ const char* a_name,
+ const char* a_test_case_comment,
+ const char* a_comment,
+ TypeId a_fixture_class_id,
+ internal::TestFactoryBase* factory) :
+ parent_(parent),
+ test_case_name_(String(a_test_case_name)),
+ name_(String(a_name)),
+ test_case_comment_(String(a_test_case_comment)),
+ comment_(String(a_comment)),
+ fixture_class_id_(a_fixture_class_id),
+ should_run_(false),
+ is_disabled_(false),
+ matches_filter_(false),
+ factory_(factory) {
+}
+
+// TestInfoImpl destructor.
+TestInfoImpl::~TestInfoImpl() {
+ delete factory_;
+}
+
+// Returns the current OS stack trace as a String.
+//
+// The maximum number of stack frames to be included is specified by
+// the gtest_stack_trace_depth flag. The skip_count parameter
+// specifies the number of top frames to be skipped, which doesn't
+// count against the number of frames to be included.
+//
+// For example, if Foo() calls Bar(), which in turn calls
+// GetCurrentOsStackTraceExceptTop(..., 1), Foo() will be included in
+// the trace but Bar() and GetCurrentOsStackTraceExceptTop() won't.
+String GetCurrentOsStackTraceExceptTop(UnitTest* /*unit_test*/,
+ int skip_count) {
+ // We pass skip_count + 1 to skip this wrapper function in addition
+ // to what the user really wants to skip.
+ return GetUnitTestImpl()->CurrentOsStackTraceExceptTop(skip_count + 1);
+}
+
+// Used by the GTEST_HIDE_UNREACHABLE_CODE_ macro to suppress unreachable
+// code warnings.
+namespace {
+class ClassUniqueToAlwaysTrue {};
+}
+
+bool IsTrue(bool condition) { return condition; }
+
+bool AlwaysTrue() {
+#if GTEST_HAS_EXCEPTIONS
+ // This condition is always false so AlwaysTrue() never actually throws,
+ // but it makes the compiler think that it may throw.
+ if (IsTrue(false))
+ throw ClassUniqueToAlwaysTrue();
+#endif // GTEST_HAS_EXCEPTIONS
+ return true;
+}
+
+// If *pstr starts with the given prefix, modifies *pstr to be right
+// past the prefix and returns true; otherwise leaves *pstr unchanged
+// and returns false. None of pstr, *pstr, and prefix can be NULL.
+bool SkipPrefix(const char* prefix, const char** pstr) {
+ const size_t prefix_len = strlen(prefix);
+ if (strncmp(*pstr, prefix, prefix_len) == 0) {
+ *pstr += prefix_len;
+ return true;
+ }
+ return false;
+}
+
+// Parses a string as a command line flag. The string should have
+// the format "--flag=value". When def_optional is true, the "=value"
+// part can be omitted.
+//
+// Returns the value of the flag, or NULL if the parsing failed.
+const char* ParseFlagValue(const char* str,
+ const char* flag,
+ bool def_optional) {
+ // str and flag must not be NULL.
+ if (str == NULL || flag == NULL) return NULL;
+
+ // The flag must start with "--" followed by GTEST_FLAG_PREFIX_.
+ const String flag_str = String::Format("--%s%s", GTEST_FLAG_PREFIX_, flag);
+ const size_t flag_len = flag_str.length();
+ if (strncmp(str, flag_str.c_str(), flag_len) != 0) return NULL;
+
+ // Skips the flag name.
+ const char* flag_end = str + flag_len;
+
+ // When def_optional is true, it's OK to not have a "=value" part.
+ if (def_optional && (flag_end[0] == '\0')) {
+ return flag_end;
+ }
+
+ // If def_optional is true and there are more characters after the
+ // flag name, or if def_optional is false, there must be a '=' after
+ // the flag name.
+ if (flag_end[0] != '=') return NULL;
+
+ // Returns the string after "=".
+ return flag_end + 1;
+}
+
+// Parses a string for a bool flag, in the form of either
+// "--flag=value" or "--flag".
+//
+// In the former case, the value is taken as true as long as it does
+// not start with '0', 'f', or 'F'.
+//
+// In the latter case, the value is taken as true.
+//
+// On success, stores the value of the flag in *value, and returns
+// true. On failure, returns false without changing *value.
+bool ParseBoolFlag(const char* str, const char* flag, bool* value) {
+ // Gets the value of the flag as a string.
+ const char* const value_str = ParseFlagValue(str, flag, true);
+
+ // Aborts if the parsing failed.
+ if (value_str == NULL) return false;
+
+ // Converts the string value to a bool.
+ *value = !(*value_str == '0' || *value_str == 'f' || *value_str == 'F');
+ return true;
+}
+
+// Parses a string for an Int32 flag, in the form of
+// "--flag=value".
+//
+// On success, stores the value of the flag in *value, and returns
+// true. On failure, returns false without changing *value.
+bool ParseInt32Flag(const char* str, const char* flag, Int32* value) {
+ // Gets the value of the flag as a string.
+ const char* const value_str = ParseFlagValue(str, flag, false);
+
+ // Aborts if the parsing failed.
+ if (value_str == NULL) return false;
+
+ // Sets *value to the value of the flag.
+ return ParseInt32(Message() << "The value of flag --" << flag,
+ value_str, value);
+}
+
+// Parses a string for a string flag, in the form of
+// "--flag=value".
+//
+// On success, stores the value of the flag in *value, and returns
+// true. On failure, returns false without changing *value.
+bool ParseStringFlag(const char* str, const char* flag, String* value) {
+ // Gets the value of the flag as a string.
+ const char* const value_str = ParseFlagValue(str, flag, false);
+
+ // Aborts if the parsing failed.
+ if (value_str == NULL) return false;
+
+ // Sets *value to the value of the flag.
+ *value = value_str;
+ return true;
+}
+
+// Determines whether a string has a prefix that Google Test uses for its
+// flags, i.e., starts with GTEST_FLAG_PREFIX_ or GTEST_FLAG_PREFIX_DASH_.
+// If Google Test detects that a command line flag has its prefix but is not
+// recognized, it will print its help message. Flags starting with
+// GTEST_INTERNAL_PREFIX_ followed by "internal_" are considered Google Test
+// internal flags and do not trigger the help message.
+static bool HasGoogleTestFlagPrefix(const char* str) {
+ return (SkipPrefix("--", &str) ||
+ SkipPrefix("-", &str) ||
+ SkipPrefix("/", &str)) &&
+ !SkipPrefix(GTEST_FLAG_PREFIX_ "internal_", &str) &&
+ (SkipPrefix(GTEST_FLAG_PREFIX_, &str) ||
+ SkipPrefix(GTEST_FLAG_PREFIX_DASH_, &str));
+}
+
+// Prints a string containing code-encoded text. The following escape
+// sequences can be used in the string to control the text color:
+//
+// @@ prints a single '@' character.
+// @R changes the color to red.
+// @G changes the color to green.
+// @Y changes the color to yellow.
+// @D changes to the default terminal text color.
+//
+// TODO(wan@google.com): Write tests for this once we add stdout
+// capturing to Google Test.
+static void PrintColorEncoded(const char* str) {
+ GTestColor color = COLOR_DEFAULT; // The current color.
+
+ // Conceptually, we split the string into segments divided by escape
+ // sequences. Then we print one segment at a time. At the end of
+ // each iteration, the str pointer advances to the beginning of the
+ // next segment.
+ for (;;) {
+ const char* p = strchr(str, '@');
+ if (p == NULL) {
+ ColoredPrintf(color, "%s", str);
+ return;
+ }
+
+ ColoredPrintf(color, "%s", String(str, p - str).c_str());
+
+ const char ch = p[1];
+ str = p + 2;
+ if (ch == '@') {
+ ColoredPrintf(color, "@");
+ } else if (ch == 'D') {
+ color = COLOR_DEFAULT;
+ } else if (ch == 'R') {
+ color = COLOR_RED;
+ } else if (ch == 'G') {
+ color = COLOR_GREEN;
+ } else if (ch == 'Y') {
+ color = COLOR_YELLOW;
+ } else {
+ --str;
+ }
+ }
+}
+
+static const char kColorEncodedHelpMessage[] =
+"This program contains tests written using " GTEST_NAME_ ". You can use the\n"
+"following command line flags to control its behavior:\n"
+"\n"
+"Test Selection:\n"
+" @G--" GTEST_FLAG_PREFIX_ "list_tests@D\n"
+" List the names of all tests instead of running them. The name of\n"
+" TEST(Foo, Bar) is \"Foo.Bar\".\n"
+" @G--" GTEST_FLAG_PREFIX_ "filter=@YPOSTIVE_PATTERNS"
+ "[@G-@YNEGATIVE_PATTERNS]@D\n"
+" Run only the tests whose name matches one of the positive patterns but\n"
+" none of the negative patterns. '?' matches any single character; '*'\n"
+" matches any substring; ':' separates two patterns.\n"
+" @G--" GTEST_FLAG_PREFIX_ "also_run_disabled_tests@D\n"
+" Run all disabled tests too.\n"
+"\n"
+"Test Execution:\n"
+" @G--" GTEST_FLAG_PREFIX_ "repeat=@Y[COUNT]@D\n"
+" Run the tests repeatedly; use a negative count to repeat forever.\n"
+" @G--" GTEST_FLAG_PREFIX_ "shuffle@D\n"
+" Randomize tests' orders on every iteration.\n"
+" @G--" GTEST_FLAG_PREFIX_ "random_seed=@Y[NUMBER]@D\n"
+" Random number seed to use for shuffling test orders (between 1 and\n"
+" 99999, or 0 to use a seed based on the current time).\n"
+"\n"
+"Test Output:\n"
+" @G--" GTEST_FLAG_PREFIX_ "color=@Y(@Gyes@Y|@Gno@Y|@Gauto@Y)@D\n"
+" Enable/disable colored output. The default is @Gauto@D.\n"
+" -@G-" GTEST_FLAG_PREFIX_ "print_time=0@D\n"
+" Don't print the elapsed time of each test.\n"
+" @G--" GTEST_FLAG_PREFIX_ "output=xml@Y[@G:@YDIRECTORY_PATH@G"
+ GTEST_PATH_SEP_ "@Y|@G:@YFILE_PATH]@D\n"
+" Generate an XML report in the given directory or with the given file\n"
+" name. @YFILE_PATH@D defaults to @Gtest_details.xml@D.\n"
+"\n"
+"Assertion Behavior:\n"
+#if GTEST_HAS_DEATH_TEST && !GTEST_OS_WINDOWS
+" @G--" GTEST_FLAG_PREFIX_ "death_test_style=@Y(@Gfast@Y|@Gthreadsafe@Y)@D\n"
+" Set the default death test style.\n"
+#endif // GTEST_HAS_DEATH_TEST && !GTEST_OS_WINDOWS
+" @G--" GTEST_FLAG_PREFIX_ "break_on_failure@D\n"
+" Turn assertion failures into debugger break-points.\n"
+" @G--" GTEST_FLAG_PREFIX_ "throw_on_failure@D\n"
+" Turn assertion failures into C++ exceptions.\n"
+#if GTEST_OS_WINDOWS
+" @G--" GTEST_FLAG_PREFIX_ "catch_exceptions@D\n"
+" Suppress pop-ups caused by exceptions.\n"
+#endif // GTEST_OS_WINDOWS
+"\n"
+"Except for @G--" GTEST_FLAG_PREFIX_ "list_tests@D, you can alternatively set "
+ "the corresponding\n"
+"environment variable of a flag (all letters in upper-case). For example, to\n"
+"disable colored text output, you can either specify @G--" GTEST_FLAG_PREFIX_
+ "color=no@D or set\n"
+"the @G" GTEST_FLAG_PREFIX_UPPER_ "COLOR@D environment variable to @Gno@D.\n"
+"\n"
+"For more information, please read the " GTEST_NAME_ " documentation at\n"
+"@G" GTEST_PROJECT_URL_ "@D. If you find a bug in " GTEST_NAME_ "\n"
+"(not one in your own code or tests), please report it to\n"
+"@G<" GTEST_DEV_EMAIL_ ">@D.\n";
+
+// Parses the command line for Google Test flags, without initializing
+// other parts of Google Test. The type parameter CharType can be
+// instantiated to either char or wchar_t.
+template <typename CharType>
+void ParseGoogleTestFlagsOnlyImpl(int* argc, CharType** argv) {
+ for (int i = 1; i < *argc; i++) {
+ const String arg_string = StreamableToString(argv[i]);
+ const char* const arg = arg_string.c_str();
+
+ using internal::ParseBoolFlag;
+ using internal::ParseInt32Flag;
+ using internal::ParseStringFlag;
+
+ // Do we see a Google Test flag?
+ if (ParseBoolFlag(arg, kAlsoRunDisabledTestsFlag,
+ &GTEST_FLAG(also_run_disabled_tests)) ||
+ ParseBoolFlag(arg, kBreakOnFailureFlag,
+ &GTEST_FLAG(break_on_failure)) ||
+ ParseBoolFlag(arg, kCatchExceptionsFlag,
+ &GTEST_FLAG(catch_exceptions)) ||
+ ParseStringFlag(arg, kColorFlag, &GTEST_FLAG(color)) ||
+ ParseStringFlag(arg, kDeathTestStyleFlag,
+ &GTEST_FLAG(death_test_style)) ||
+ ParseBoolFlag(arg, kDeathTestUseFork,
+ &GTEST_FLAG(death_test_use_fork)) ||
+ ParseStringFlag(arg, kFilterFlag, &GTEST_FLAG(filter)) ||
+ ParseStringFlag(arg, kInternalRunDeathTestFlag,
+ &GTEST_FLAG(internal_run_death_test)) ||
+ ParseBoolFlag(arg, kListTestsFlag, &GTEST_FLAG(list_tests)) ||
+ ParseStringFlag(arg, kOutputFlag, &GTEST_FLAG(output)) ||
+ ParseBoolFlag(arg, kPrintTimeFlag, &GTEST_FLAG(print_time)) ||
+ ParseInt32Flag(arg, kRandomSeedFlag, &GTEST_FLAG(random_seed)) ||
+ ParseInt32Flag(arg, kRepeatFlag, &GTEST_FLAG(repeat)) ||
+ ParseBoolFlag(arg, kShuffleFlag, &GTEST_FLAG(shuffle)) ||
+ ParseInt32Flag(arg, kStackTraceDepthFlag,
+ &GTEST_FLAG(stack_trace_depth)) ||
+ ParseBoolFlag(arg, kThrowOnFailureFlag, &GTEST_FLAG(throw_on_failure))
+ ) {
+ // Yes. Shift the remainder of the argv list left by one. Note
+ // that argv has (*argc + 1) elements, the last one always being
+ // NULL. The following loop moves the trailing NULL element as
+ // well.
+ for (int j = i; j != *argc; j++) {
+ argv[j] = argv[j + 1];
+ }
+
+ // Decrements the argument count.
+ (*argc)--;
+
+ // We also need to decrement the iterator as we just removed
+ // an element.
+ i--;
+ } else if (arg_string == "--help" || arg_string == "-h" ||
+ arg_string == "-?" || arg_string == "/?" ||
+ HasGoogleTestFlagPrefix(arg)) {
+ // Both help flag and unrecognized Google Test flags (excluding
+ // internal ones) trigger help display.
+ g_help_flag = true;
+ }
+ }
+
+ if (g_help_flag) {
+ // We print the help here instead of in RUN_ALL_TESTS(), as the
+ // latter may not be called at all if the user is using Google
+ // Test with another testing framework.
+ PrintColorEncoded(kColorEncodedHelpMessage);
+ }
+}
+
+// Parses the command line for Google Test flags, without initializing
+// other parts of Google Test.
+void ParseGoogleTestFlagsOnly(int* argc, char** argv) {
+ ParseGoogleTestFlagsOnlyImpl(argc, argv);
+}
+void ParseGoogleTestFlagsOnly(int* argc, wchar_t** argv) {
+ ParseGoogleTestFlagsOnlyImpl(argc, argv);
+}
+
+// The internal implementation of InitGoogleTest().
+//
+// The type parameter CharType can be instantiated to either char or
+// wchar_t.
+template <typename CharType>
+void InitGoogleTestImpl(int* argc, CharType** argv) {
+ g_init_gtest_count++;
+
+ // We don't want to run the initialization code twice.
+ if (g_init_gtest_count != 1) return;
+
+ if (*argc <= 0) return;
+
+ internal::g_executable_path = internal::StreamableToString(argv[0]);
+
+#if GTEST_HAS_DEATH_TEST
+ g_argvs.clear();
+ for (int i = 0; i != *argc; i++) {
+ g_argvs.push_back(StreamableToString(argv[i]));
+ }
+#endif // GTEST_HAS_DEATH_TEST
+
+ ParseGoogleTestFlagsOnly(argc, argv);
+ GetUnitTestImpl()->PostFlagParsingInit();
+}
+
+} // namespace internal
+
+// Initializes Google Test. This must be called before calling
+// RUN_ALL_TESTS(). In particular, it parses a command line for the
+// flags that Google Test recognizes. Whenever a Google Test flag is
+// seen, it is removed from argv, and *argc is decremented.
+//
+// No value is returned. Instead, the Google Test flag variables are
+// updated.
+//
+// Calling the function for the second time has no user-visible effect.
+void InitGoogleTest(int* argc, char** argv) {
+ internal::InitGoogleTestImpl(argc, argv);
+}
+
+// This overloaded version can be used in Windows programs compiled in
+// UNICODE mode.
+void InitGoogleTest(int* argc, wchar_t** argv) {
+ internal::InitGoogleTestImpl(argc, argv);
+}
+
+} // namespace testing
diff --git a/Source/ThirdParty/gtest/src/gtest_main.cc b/Source/ThirdParty/gtest/src/gtest_main.cc
new file mode 100644
index 000000000..d20c02fdf
--- /dev/null
+++ b/Source/ThirdParty/gtest/src/gtest_main.cc
@@ -0,0 +1,39 @@
+// Copyright 2006, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#include <iostream>
+
+#include <gtest/gtest.h>
+
+int main(int argc, char **argv) {
+ std::cout << "Running main() from gtest_main.cc\n";
+
+ testing::InitGoogleTest(&argc, argv);
+ return RUN_ALL_TESTS();
+}
diff --git a/Source/ThirdParty/gtest/test/gtest-death-test_test.cc b/Source/ThirdParty/gtest/test/gtest-death-test_test.cc
new file mode 100644
index 000000000..ed5b53b71
--- /dev/null
+++ b/Source/ThirdParty/gtest/test/gtest-death-test_test.cc
@@ -0,0 +1,1230 @@
+// Copyright 2005, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: wan@google.com (Zhanyong Wan)
+//
+// Tests for death tests.
+
+#include <gtest/gtest-death-test.h>
+#include <gtest/gtest.h>
+#include <gtest/internal/gtest-filepath.h>
+
+using testing::internal::AlwaysFalse;
+using testing::internal::AlwaysTrue;
+
+#if GTEST_HAS_DEATH_TEST
+
+#if GTEST_OS_WINDOWS
+#include <direct.h> // For chdir().
+#else
+#include <unistd.h>
+#include <sys/wait.h> // For waitpid.
+#include <limits> // For std::numeric_limits.
+#endif // GTEST_OS_WINDOWS
+
+#include <limits.h>
+#include <signal.h>
+#include <stdio.h>
+
+#include <gtest/gtest-spi.h>
+
+// Indicates that this translation unit is part of Google Test's
+// implementation. It must come before gtest-internal-inl.h is
+// included, or there will be a compiler error. This trick is to
+// prevent a user from accidentally including gtest-internal-inl.h in
+// his code.
+#define GTEST_IMPLEMENTATION_ 1
+#include "src/gtest-internal-inl.h"
+#undef GTEST_IMPLEMENTATION_
+
+namespace posix = ::testing::internal::posix;
+
+using testing::Message;
+using testing::internal::DeathTest;
+using testing::internal::DeathTestFactory;
+using testing::internal::FilePath;
+using testing::internal::GetLastErrnoDescription;
+using testing::internal::GetUnitTestImpl;
+using testing::internal::ParseNaturalNumber;
+using testing::internal::String;
+
+namespace testing {
+namespace internal {
+
+// A helper class whose objects replace the death test factory for a
+// single UnitTest object during their lifetimes.
+class ReplaceDeathTestFactory {
+ public:
+ explicit ReplaceDeathTestFactory(DeathTestFactory* new_factory)
+ : unit_test_impl_(GetUnitTestImpl()) {
+ old_factory_ = unit_test_impl_->death_test_factory_.release();
+ unit_test_impl_->death_test_factory_.reset(new_factory);
+ }
+
+ ~ReplaceDeathTestFactory() {
+ unit_test_impl_->death_test_factory_.release();
+ unit_test_impl_->death_test_factory_.reset(old_factory_);
+ }
+ private:
+ // Prevents copying ReplaceDeathTestFactory objects.
+ ReplaceDeathTestFactory(const ReplaceDeathTestFactory&);
+ void operator=(const ReplaceDeathTestFactory&);
+
+ UnitTestImpl* unit_test_impl_;
+ DeathTestFactory* old_factory_;
+};
+
+} // namespace internal
+} // namespace testing
+
+void DieInside(const char* function) {
+ fprintf(stderr, "death inside %s().", function);
+ fflush(stderr);
+ // We call _exit() instead of exit(), as the former is a direct
+ // system call and thus safer in the presence of threads. exit()
+ // will invoke user-defined exit-hooks, which may do dangerous
+ // things that conflict with death tests.
+ //
+ // Some compilers can recognize that _exit() never returns and issue the
+ // 'unreachable code' warning for code following this function, unless
+ // fooled by a fake condition.
+ if (AlwaysTrue())
+ _exit(1);
+}
+
+// Tests that death tests work.
+
+class TestForDeathTest : public testing::Test {
+ protected:
+ TestForDeathTest() : original_dir_(FilePath::GetCurrentDir()) {}
+
+ virtual ~TestForDeathTest() {
+ posix::ChDir(original_dir_.c_str());
+ }
+
+ // A static member function that's expected to die.
+ static void StaticMemberFunction() { DieInside("StaticMemberFunction"); }
+
+ // A method of the test fixture that may die.
+ void MemberFunction() {
+ if (should_die_)
+ DieInside("MemberFunction");
+ }
+
+ // True iff MemberFunction() should die.
+ bool should_die_;
+ const FilePath original_dir_;
+};
+
+// A class with a member function that may die.
+class MayDie {
+ public:
+ explicit MayDie(bool should_die) : should_die_(should_die) {}
+
+ // A member function that may die.
+ void MemberFunction() const {
+ if (should_die_)
+ DieInside("MayDie::MemberFunction");
+ }
+
+ private:
+ // True iff MemberFunction() should die.
+ bool should_die_;
+};
+
+// A global function that's expected to die.
+void GlobalFunction() { DieInside("GlobalFunction"); }
+
+// A non-void function that's expected to die.
+int NonVoidFunction() {
+ DieInside("NonVoidFunction");
+ return 1;
+}
+
+// A unary function that may die.
+void DieIf(bool should_die) {
+ if (should_die)
+ DieInside("DieIf");
+}
+
+// A binary function that may die.
+bool DieIfLessThan(int x, int y) {
+ if (x < y) {
+ DieInside("DieIfLessThan");
+ }
+ return true;
+}
+
+// Tests that ASSERT_DEATH can be used outside a TEST, TEST_F, or test fixture.
+void DeathTestSubroutine() {
+ EXPECT_DEATH(GlobalFunction(), "death.*GlobalFunction");
+ ASSERT_DEATH(GlobalFunction(), "death.*GlobalFunction");
+}
+
+// Death in dbg, not opt.
+int DieInDebugElse12(int* sideeffect) {
+ if (sideeffect) *sideeffect = 12;
+#ifndef NDEBUG
+ DieInside("DieInDebugElse12");
+#endif // NDEBUG
+ return 12;
+}
+
+#if GTEST_OS_WINDOWS
+
+// Tests the ExitedWithCode predicate.
+TEST(ExitStatusPredicateTest, ExitedWithCode) {
+ // On Windows, the process's exit code is the same as its exit status,
+ // so the predicate just compares the its input with its parameter.
+ EXPECT_TRUE(testing::ExitedWithCode(0)(0));
+ EXPECT_TRUE(testing::ExitedWithCode(1)(1));
+ EXPECT_TRUE(testing::ExitedWithCode(42)(42));
+ EXPECT_FALSE(testing::ExitedWithCode(0)(1));
+ EXPECT_FALSE(testing::ExitedWithCode(1)(0));
+}
+
+#else
+
+// Returns the exit status of a process that calls _exit(2) with a
+// given exit code. This is a helper function for the
+// ExitStatusPredicateTest test suite.
+static int NormalExitStatus(int exit_code) {
+ pid_t child_pid = fork();
+ if (child_pid == 0) {
+ _exit(exit_code);
+ }
+ int status;
+ waitpid(child_pid, &status, 0);
+ return status;
+}
+
+// Returns the exit status of a process that raises a given signal.
+// If the signal does not cause the process to die, then it returns
+// instead the exit status of a process that exits normally with exit
+// code 1. This is a helper function for the ExitStatusPredicateTest
+// test suite.
+static int KilledExitStatus(int signum) {
+ pid_t child_pid = fork();
+ if (child_pid == 0) {
+ raise(signum);
+ _exit(1);
+ }
+ int status;
+ waitpid(child_pid, &status, 0);
+ return status;
+}
+
+// Tests the ExitedWithCode predicate.
+TEST(ExitStatusPredicateTest, ExitedWithCode) {
+ const int status0 = NormalExitStatus(0);
+ const int status1 = NormalExitStatus(1);
+ const int status42 = NormalExitStatus(42);
+ const testing::ExitedWithCode pred0(0);
+ const testing::ExitedWithCode pred1(1);
+ const testing::ExitedWithCode pred42(42);
+ EXPECT_PRED1(pred0, status0);
+ EXPECT_PRED1(pred1, status1);
+ EXPECT_PRED1(pred42, status42);
+ EXPECT_FALSE(pred0(status1));
+ EXPECT_FALSE(pred42(status0));
+ EXPECT_FALSE(pred1(status42));
+}
+
+// Tests the KilledBySignal predicate.
+TEST(ExitStatusPredicateTest, KilledBySignal) {
+ const int status_segv = KilledExitStatus(SIGSEGV);
+ const int status_kill = KilledExitStatus(SIGKILL);
+ const testing::KilledBySignal pred_segv(SIGSEGV);
+ const testing::KilledBySignal pred_kill(SIGKILL);
+ EXPECT_PRED1(pred_segv, status_segv);
+ EXPECT_PRED1(pred_kill, status_kill);
+ EXPECT_FALSE(pred_segv(status_kill));
+ EXPECT_FALSE(pred_kill(status_segv));
+}
+
+#endif // GTEST_OS_WINDOWS
+
+// Tests that the death test macros expand to code which may or may not
+// be followed by operator<<, and that in either case the complete text
+// comprises only a single C++ statement.
+TEST_F(TestForDeathTest, SingleStatement) {
+ if (AlwaysFalse())
+ // This would fail if executed; this is a compilation test only
+ ASSERT_DEATH(return, "");
+
+ if (AlwaysTrue())
+ EXPECT_DEATH(_exit(1), "");
+ else
+ // This empty "else" branch is meant to ensure that EXPECT_DEATH
+ // doesn't expand into an "if" statement without an "else"
+ ;
+
+ if (AlwaysFalse())
+ ASSERT_DEATH(return, "") << "did not die";
+
+ if (AlwaysFalse())
+ ;
+ else
+ EXPECT_DEATH(_exit(1), "") << 1 << 2 << 3;
+}
+
+void DieWithEmbeddedNul() {
+ fprintf(stderr, "Hello%cmy null world.\n", '\0');
+ fflush(stderr);
+ _exit(1);
+}
+
+#if GTEST_USES_PCRE
+// Tests that EXPECT_DEATH and ASSERT_DEATH work when the error
+// message has a NUL character in it.
+TEST_F(TestForDeathTest, EmbeddedNulInMessage) {
+ // TODO(wan@google.com): <regex.h> doesn't support matching strings
+ // with embedded NUL characters - find a way to workaround it.
+ EXPECT_DEATH(DieWithEmbeddedNul(), "my null world");
+ ASSERT_DEATH(DieWithEmbeddedNul(), "my null world");
+}
+#endif // GTEST_USES_PCRE
+
+// Tests that death test macros expand to code which interacts well with switch
+// statements.
+TEST_F(TestForDeathTest, SwitchStatement) {
+// Microsoft compiler usually complains about switch statements without
+// case labels. We suppress that warning for this test.
+#ifdef _MSC_VER
+#pragma warning(push)
+#pragma warning(disable: 4065)
+#endif // _MSC_VER
+
+ switch (0)
+ default:
+ ASSERT_DEATH(_exit(1), "") << "exit in default switch handler";
+
+ switch (0)
+ case 0:
+ EXPECT_DEATH(_exit(1), "") << "exit in switch case";
+
+#ifdef _MSC_VER
+#pragma warning(pop)
+#endif // _MSC_VER
+}
+
+// Tests that a static member function can be used in a "fast" style
+// death test.
+TEST_F(TestForDeathTest, StaticMemberFunctionFastStyle) {
+ testing::GTEST_FLAG(death_test_style) = "fast";
+ ASSERT_DEATH(StaticMemberFunction(), "death.*StaticMember");
+}
+
+// Tests that a method of the test fixture can be used in a "fast"
+// style death test.
+TEST_F(TestForDeathTest, MemberFunctionFastStyle) {
+ testing::GTEST_FLAG(death_test_style) = "fast";
+ should_die_ = true;
+ EXPECT_DEATH(MemberFunction(), "inside.*MemberFunction");
+}
+
+void ChangeToRootDir() { posix::ChDir(GTEST_PATH_SEP_); }
+
+// Tests that death tests work even if the current directory has been
+// changed.
+TEST_F(TestForDeathTest, FastDeathTestInChangedDir) {
+ testing::GTEST_FLAG(death_test_style) = "fast";
+
+ ChangeToRootDir();
+ EXPECT_EXIT(_exit(1), testing::ExitedWithCode(1), "");
+
+ ChangeToRootDir();
+ ASSERT_DEATH(_exit(1), "");
+}
+
+// Repeats a representative sample of death tests in the "threadsafe" style:
+
+TEST_F(TestForDeathTest, StaticMemberFunctionThreadsafeStyle) {
+ testing::GTEST_FLAG(death_test_style) = "threadsafe";
+ ASSERT_DEATH(StaticMemberFunction(), "death.*StaticMember");
+}
+
+TEST_F(TestForDeathTest, MemberFunctionThreadsafeStyle) {
+ testing::GTEST_FLAG(death_test_style) = "threadsafe";
+ should_die_ = true;
+ EXPECT_DEATH(MemberFunction(), "inside.*MemberFunction");
+}
+
+TEST_F(TestForDeathTest, ThreadsafeDeathTestInLoop) {
+ testing::GTEST_FLAG(death_test_style) = "threadsafe";
+
+ for (int i = 0; i < 3; ++i)
+ EXPECT_EXIT(_exit(i), testing::ExitedWithCode(i), "") << ": i = " << i;
+}
+
+TEST_F(TestForDeathTest, ThreadsafeDeathTestInChangedDir) {
+ testing::GTEST_FLAG(death_test_style) = "threadsafe";
+
+ ChangeToRootDir();
+ EXPECT_EXIT(_exit(1), testing::ExitedWithCode(1), "");
+
+ ChangeToRootDir();
+ ASSERT_DEATH(_exit(1), "");
+}
+
+TEST_F(TestForDeathTest, MixedStyles) {
+ testing::GTEST_FLAG(death_test_style) = "threadsafe";
+ EXPECT_DEATH(_exit(1), "");
+ testing::GTEST_FLAG(death_test_style) = "fast";
+ EXPECT_DEATH(_exit(1), "");
+}
+
+namespace {
+
+bool pthread_flag;
+
+void SetPthreadFlag() {
+ pthread_flag = true;
+}
+
+} // namespace
+
+#if GTEST_HAS_CLONE && GTEST_HAS_PTHREAD
+
+TEST_F(TestForDeathTest, DoesNotExecuteAtforkHooks) {
+ if (!testing::GTEST_FLAG(death_test_use_fork)) {
+ testing::GTEST_FLAG(death_test_style) = "threadsafe";
+ pthread_flag = false;
+ ASSERT_EQ(0, pthread_atfork(&SetPthreadFlag, NULL, NULL));
+ ASSERT_DEATH(_exit(1), "");
+ ASSERT_FALSE(pthread_flag);
+ }
+}
+
+#endif // GTEST_HAS_CLONE && GTEST_HAS_PTHREAD
+
+// Tests that a method of another class can be used in a death test.
+TEST_F(TestForDeathTest, MethodOfAnotherClass) {
+ const MayDie x(true);
+ ASSERT_DEATH(x.MemberFunction(), "MayDie\\:\\:MemberFunction");
+}
+
+// Tests that a global function can be used in a death test.
+TEST_F(TestForDeathTest, GlobalFunction) {
+ EXPECT_DEATH(GlobalFunction(), "GlobalFunction");
+}
+
+// Tests that any value convertible to an RE works as a second
+// argument to EXPECT_DEATH.
+TEST_F(TestForDeathTest, AcceptsAnythingConvertibleToRE) {
+ static const char regex_c_str[] = "GlobalFunction";
+ EXPECT_DEATH(GlobalFunction(), regex_c_str);
+
+ const testing::internal::RE regex(regex_c_str);
+ EXPECT_DEATH(GlobalFunction(), regex);
+
+#if GTEST_HAS_GLOBAL_STRING
+ const string regex_str(regex_c_str);
+ EXPECT_DEATH(GlobalFunction(), regex_str);
+#endif // GTEST_HAS_GLOBAL_STRING
+
+ const ::std::string regex_std_str(regex_c_str);
+ EXPECT_DEATH(GlobalFunction(), regex_std_str);
+}
+
+// Tests that a non-void function can be used in a death test.
+TEST_F(TestForDeathTest, NonVoidFunction) {
+ ASSERT_DEATH(NonVoidFunction(), "NonVoidFunction");
+}
+
+// Tests that functions that take parameter(s) can be used in a death test.
+TEST_F(TestForDeathTest, FunctionWithParameter) {
+ EXPECT_DEATH(DieIf(true), "DieIf\\(\\)");
+ EXPECT_DEATH(DieIfLessThan(2, 3), "DieIfLessThan");
+}
+
+// Tests that ASSERT_DEATH can be used outside a TEST, TEST_F, or test fixture.
+TEST_F(TestForDeathTest, OutsideFixture) {
+ DeathTestSubroutine();
+}
+
+// Tests that death tests can be done inside a loop.
+TEST_F(TestForDeathTest, InsideLoop) {
+ for (int i = 0; i < 5; i++) {
+ EXPECT_DEATH(DieIfLessThan(-1, i), "DieIfLessThan") << "where i == " << i;
+ }
+}
+
+// Tests that a compound statement can be used in a death test.
+TEST_F(TestForDeathTest, CompoundStatement) {
+ EXPECT_DEATH({ // NOLINT
+ const int x = 2;
+ const int y = x + 1;
+ DieIfLessThan(x, y);
+ },
+ "DieIfLessThan");
+}
+
+// Tests that code that doesn't die causes a death test to fail.
+TEST_F(TestForDeathTest, DoesNotDie) {
+ EXPECT_NONFATAL_FAILURE(EXPECT_DEATH(DieIf(false), "DieIf"),
+ "failed to die");
+}
+
+// Tests that a death test fails when the error message isn't expected.
+TEST_F(TestForDeathTest, ErrorMessageMismatch) {
+ EXPECT_NONFATAL_FAILURE({ // NOLINT
+ EXPECT_DEATH(DieIf(true), "DieIfLessThan") << "End of death test message.";
+ }, "died but not with expected error");
+}
+
+// On exit, *aborted will be true iff the EXPECT_DEATH() statement
+// aborted the function.
+void ExpectDeathTestHelper(bool* aborted) {
+ *aborted = true;
+ EXPECT_DEATH(DieIf(false), "DieIf"); // This assertion should fail.
+ *aborted = false;
+}
+
+// Tests that EXPECT_DEATH doesn't abort the test on failure.
+TEST_F(TestForDeathTest, EXPECT_DEATH) {
+ bool aborted = true;
+ EXPECT_NONFATAL_FAILURE(ExpectDeathTestHelper(&aborted),
+ "failed to die");
+ EXPECT_FALSE(aborted);
+}
+
+// Tests that ASSERT_DEATH does abort the test on failure.
+TEST_F(TestForDeathTest, ASSERT_DEATH) {
+ static bool aborted;
+ EXPECT_FATAL_FAILURE({ // NOLINT
+ aborted = true;
+ ASSERT_DEATH(DieIf(false), "DieIf"); // This assertion should fail.
+ aborted = false;
+ }, "failed to die");
+ EXPECT_TRUE(aborted);
+}
+
+// Tests that EXPECT_DEATH evaluates the arguments exactly once.
+TEST_F(TestForDeathTest, SingleEvaluation) {
+ int x = 3;
+ EXPECT_DEATH(DieIf((++x) == 4), "DieIf");
+
+ const char* regex = "DieIf";
+ const char* regex_save = regex;
+ EXPECT_DEATH(DieIfLessThan(3, 4), regex++);
+ EXPECT_EQ(regex_save + 1, regex);
+}
+
+// Tests that run-away death tests are reported as failures.
+TEST_F(TestForDeathTest, Runaway) {
+ EXPECT_NONFATAL_FAILURE(EXPECT_DEATH(static_cast<void>(0), "Foo"),
+ "failed to die.");
+
+ EXPECT_FATAL_FAILURE(ASSERT_DEATH(return, "Bar"),
+ "illegal return in test statement.");
+}
+
+
+// Tests that EXPECT_DEBUG_DEATH works as expected,
+// that is, in debug mode, it:
+// 1. Asserts on death.
+// 2. Has no side effect.
+//
+// And in opt mode, it:
+// 1. Has side effects but does not assert.
+TEST_F(TestForDeathTest, TestExpectDebugDeath) {
+ int sideeffect = 0;
+
+ EXPECT_DEBUG_DEATH(DieInDebugElse12(&sideeffect),
+ "death.*DieInDebugElse12");
+
+#ifdef NDEBUG
+ // Checks that the assignment occurs in opt mode (sideeffect).
+ EXPECT_EQ(12, sideeffect);
+#else
+ // Checks that the assignment does not occur in dbg mode (no sideeffect).
+ EXPECT_EQ(0, sideeffect);
+#endif
+}
+
+// Tests that ASSERT_DEBUG_DEATH works as expected
+// In debug mode:
+// 1. Asserts on debug death.
+// 2. Has no side effect.
+//
+// In opt mode:
+// 1. Has side effects and returns the expected value (12).
+TEST_F(TestForDeathTest, TestAssertDebugDeath) {
+ int sideeffect = 0;
+
+ ASSERT_DEBUG_DEATH({ // NOLINT
+ // Tests that the return value is 12 in opt mode.
+ EXPECT_EQ(12, DieInDebugElse12(&sideeffect));
+ // Tests that the side effect occurred in opt mode.
+ EXPECT_EQ(12, sideeffect);
+ }, "death.*DieInDebugElse12");
+
+#ifdef NDEBUG
+ // Checks that the assignment occurs in opt mode (sideeffect).
+ EXPECT_EQ(12, sideeffect);
+#else
+ // Checks that the assignment does not occur in dbg mode (no sideeffect).
+ EXPECT_EQ(0, sideeffect);
+#endif
+}
+
+#ifndef NDEBUG
+
+void ExpectDebugDeathHelper(bool* aborted) {
+ *aborted = true;
+ EXPECT_DEBUG_DEATH(return, "") << "This is expected to fail.";
+ *aborted = false;
+}
+
+#if GTEST_OS_WINDOWS
+TEST(PopUpDeathTest, DoesNotShowPopUpOnAbort) {
+ printf("This test should be considered failing if it shows "
+ "any pop-up dialogs.\n");
+ fflush(stdout);
+
+ EXPECT_DEATH({
+ testing::GTEST_FLAG(catch_exceptions) = false;
+ abort();
+ }, "");
+}
+
+TEST(PopUpDeathTest, DoesNotShowPopUpOnThrow) {
+ printf("This test should be considered failing if it shows "
+ "any pop-up dialogs.\n");
+ fflush(stdout);
+
+ EXPECT_DEATH({
+ testing::GTEST_FLAG(catch_exceptions) = false;
+ throw 1;
+ }, "");
+}
+#endif // GTEST_OS_WINDOWS
+
+// Tests that EXPECT_DEBUG_DEATH in debug mode does not abort
+// the function.
+TEST_F(TestForDeathTest, ExpectDebugDeathDoesNotAbort) {
+ bool aborted = true;
+ EXPECT_NONFATAL_FAILURE(ExpectDebugDeathHelper(&aborted), "");
+ EXPECT_FALSE(aborted);
+}
+
+void AssertDebugDeathHelper(bool* aborted) {
+ *aborted = true;
+ ASSERT_DEBUG_DEATH(return, "") << "This is expected to fail.";
+ *aborted = false;
+}
+
+// Tests that ASSERT_DEBUG_DEATH in debug mode aborts the function on
+// failure.
+TEST_F(TestForDeathTest, AssertDebugDeathAborts) {
+ static bool aborted;
+ aborted = false;
+ EXPECT_FATAL_FAILURE(AssertDebugDeathHelper(&aborted), "");
+ EXPECT_TRUE(aborted);
+}
+
+#endif // _NDEBUG
+
+// Tests the *_EXIT family of macros, using a variety of predicates.
+static void TestExitMacros() {
+ EXPECT_EXIT(_exit(1), testing::ExitedWithCode(1), "");
+ ASSERT_EXIT(_exit(42), testing::ExitedWithCode(42), "");
+
+#if GTEST_OS_WINDOWS
+ // Of all signals effects on the process exit code, only those of SIGABRT
+ // are documented on Windows.
+ // See http://msdn.microsoft.com/en-us/library/dwwzkt4c(VS.71).aspx.
+ EXPECT_EXIT(raise(SIGABRT), testing::ExitedWithCode(3), "");
+#else
+ EXPECT_EXIT(raise(SIGKILL), testing::KilledBySignal(SIGKILL), "") << "foo";
+ ASSERT_EXIT(raise(SIGUSR2), testing::KilledBySignal(SIGUSR2), "") << "bar";
+
+ EXPECT_FATAL_FAILURE({ // NOLINT
+ ASSERT_EXIT(_exit(0), testing::KilledBySignal(SIGSEGV), "")
+ << "This failure is expected, too.";
+ }, "This failure is expected, too.");
+#endif // GTEST_OS_WINDOWS
+
+ EXPECT_NONFATAL_FAILURE({ // NOLINT
+ EXPECT_EXIT(raise(SIGSEGV), testing::ExitedWithCode(0), "")
+ << "This failure is expected.";
+ }, "This failure is expected.");
+}
+
+TEST_F(TestForDeathTest, ExitMacros) {
+ TestExitMacros();
+}
+
+TEST_F(TestForDeathTest, ExitMacrosUsingFork) {
+ testing::GTEST_FLAG(death_test_use_fork) = true;
+ TestExitMacros();
+}
+
+TEST_F(TestForDeathTest, InvalidStyle) {
+ testing::GTEST_FLAG(death_test_style) = "rococo";
+ EXPECT_NONFATAL_FAILURE({ // NOLINT
+ EXPECT_DEATH(_exit(0), "") << "This failure is expected.";
+ }, "This failure is expected.");
+}
+
+// A DeathTestFactory that returns MockDeathTests.
+class MockDeathTestFactory : public DeathTestFactory {
+ public:
+ MockDeathTestFactory();
+ virtual bool Create(const char* statement,
+ const ::testing::internal::RE* regex,
+ const char* file, int line, DeathTest** test);
+
+ // Sets the parameters for subsequent calls to Create.
+ void SetParameters(bool create, DeathTest::TestRole role,
+ int status, bool passed);
+
+ // Accessors.
+ int AssumeRoleCalls() const { return assume_role_calls_; }
+ int WaitCalls() const { return wait_calls_; }
+ int PassedCalls() const { return passed_args_.size(); }
+ bool PassedArgument(int n) const { return passed_args_[n]; }
+ int AbortCalls() const { return abort_args_.size(); }
+ DeathTest::AbortReason AbortArgument(int n) const {
+ return abort_args_[n];
+ }
+ bool TestDeleted() const { return test_deleted_; }
+
+ private:
+ friend class MockDeathTest;
+ // If true, Create will return a MockDeathTest; otherwise it returns
+ // NULL.
+ bool create_;
+ // The value a MockDeathTest will return from its AssumeRole method.
+ DeathTest::TestRole role_;
+ // The value a MockDeathTest will return from its Wait method.
+ int status_;
+ // The value a MockDeathTest will return from its Passed method.
+ bool passed_;
+
+ // Number of times AssumeRole was called.
+ int assume_role_calls_;
+ // Number of times Wait was called.
+ int wait_calls_;
+ // The arguments to the calls to Passed since the last call to
+ // SetParameters.
+ std::vector<bool> passed_args_;
+ // The arguments to the calls to Abort since the last call to
+ // SetParameters.
+ std::vector<DeathTest::AbortReason> abort_args_;
+ // True if the last MockDeathTest returned by Create has been
+ // deleted.
+ bool test_deleted_;
+};
+
+
+// A DeathTest implementation useful in testing. It returns values set
+// at its creation from its various inherited DeathTest methods, and
+// reports calls to those methods to its parent MockDeathTestFactory
+// object.
+class MockDeathTest : public DeathTest {
+ public:
+ MockDeathTest(MockDeathTestFactory *parent,
+ TestRole role, int status, bool passed) :
+ parent_(parent), role_(role), status_(status), passed_(passed) {
+ }
+ virtual ~MockDeathTest() {
+ parent_->test_deleted_ = true;
+ }
+ virtual TestRole AssumeRole() {
+ ++parent_->assume_role_calls_;
+ return role_;
+ }
+ virtual int Wait() {
+ ++parent_->wait_calls_;
+ return status_;
+ }
+ virtual bool Passed(bool exit_status_ok) {
+ parent_->passed_args_.push_back(exit_status_ok);
+ return passed_;
+ }
+ virtual void Abort(AbortReason reason) {
+ parent_->abort_args_.push_back(reason);
+ }
+ private:
+ MockDeathTestFactory* const parent_;
+ const TestRole role_;
+ const int status_;
+ const bool passed_;
+};
+
+
+// MockDeathTestFactory constructor.
+MockDeathTestFactory::MockDeathTestFactory()
+ : create_(true),
+ role_(DeathTest::OVERSEE_TEST),
+ status_(0),
+ passed_(true),
+ assume_role_calls_(0),
+ wait_calls_(0),
+ passed_args_(),
+ abort_args_() {
+}
+
+
+// Sets the parameters for subsequent calls to Create.
+void MockDeathTestFactory::SetParameters(bool create,
+ DeathTest::TestRole role,
+ int status, bool passed) {
+ create_ = create;
+ role_ = role;
+ status_ = status;
+ passed_ = passed;
+
+ assume_role_calls_ = 0;
+ wait_calls_ = 0;
+ passed_args_.clear();
+ abort_args_.clear();
+}
+
+
+// Sets test to NULL (if create_ is false) or to the address of a new
+// MockDeathTest object with parameters taken from the last call
+// to SetParameters (if create_ is true). Always returns true.
+bool MockDeathTestFactory::Create(const char* /*statement*/,
+ const ::testing::internal::RE* /*regex*/,
+ const char* /*file*/,
+ int /*line*/,
+ DeathTest** test) {
+ test_deleted_ = false;
+ if (create_) {
+ *test = new MockDeathTest(this, role_, status_, passed_);
+ } else {
+ *test = NULL;
+ }
+ return true;
+}
+
+// A test fixture for testing the logic of the GTEST_DEATH_TEST_ macro.
+// It installs a MockDeathTestFactory that is used for the duration
+// of the test case.
+class MacroLogicDeathTest : public testing::Test {
+ protected:
+ static testing::internal::ReplaceDeathTestFactory* replacer_;
+ static MockDeathTestFactory* factory_;
+
+ static void SetUpTestCase() {
+ factory_ = new MockDeathTestFactory;
+ replacer_ = new testing::internal::ReplaceDeathTestFactory(factory_);
+ }
+
+ static void TearDownTestCase() {
+ delete replacer_;
+ replacer_ = NULL;
+ delete factory_;
+ factory_ = NULL;
+ }
+
+ // Runs a death test that breaks the rules by returning. Such a death
+ // test cannot be run directly from a test routine that uses a
+ // MockDeathTest, or the remainder of the routine will not be executed.
+ static void RunReturningDeathTest(bool* flag) {
+ ASSERT_DEATH({ // NOLINT
+ *flag = true;
+ return;
+ }, "");
+ }
+};
+
+testing::internal::ReplaceDeathTestFactory* MacroLogicDeathTest::replacer_
+ = NULL;
+MockDeathTestFactory* MacroLogicDeathTest::factory_ = NULL;
+
+
+// Test that nothing happens when the factory doesn't return a DeathTest:
+TEST_F(MacroLogicDeathTest, NothingHappens) {
+ bool flag = false;
+ factory_->SetParameters(false, DeathTest::OVERSEE_TEST, 0, true);
+ EXPECT_DEATH(flag = true, "");
+ EXPECT_FALSE(flag);
+ EXPECT_EQ(0, factory_->AssumeRoleCalls());
+ EXPECT_EQ(0, factory_->WaitCalls());
+ EXPECT_EQ(0, factory_->PassedCalls());
+ EXPECT_EQ(0, factory_->AbortCalls());
+ EXPECT_FALSE(factory_->TestDeleted());
+}
+
+// Test that the parent process doesn't run the death test code,
+// and that the Passed method returns false when the (simulated)
+// child process exits with status 0:
+TEST_F(MacroLogicDeathTest, ChildExitsSuccessfully) {
+ bool flag = false;
+ factory_->SetParameters(true, DeathTest::OVERSEE_TEST, 0, true);
+ EXPECT_DEATH(flag = true, "");
+ EXPECT_FALSE(flag);
+ EXPECT_EQ(1, factory_->AssumeRoleCalls());
+ EXPECT_EQ(1, factory_->WaitCalls());
+ ASSERT_EQ(1, factory_->PassedCalls());
+ EXPECT_FALSE(factory_->PassedArgument(0));
+ EXPECT_EQ(0, factory_->AbortCalls());
+ EXPECT_TRUE(factory_->TestDeleted());
+}
+
+// Tests that the Passed method was given the argument "true" when
+// the (simulated) child process exits with status 1:
+TEST_F(MacroLogicDeathTest, ChildExitsUnsuccessfully) {
+ bool flag = false;
+ factory_->SetParameters(true, DeathTest::OVERSEE_TEST, 1, true);
+ EXPECT_DEATH(flag = true, "");
+ EXPECT_FALSE(flag);
+ EXPECT_EQ(1, factory_->AssumeRoleCalls());
+ EXPECT_EQ(1, factory_->WaitCalls());
+ ASSERT_EQ(1, factory_->PassedCalls());
+ EXPECT_TRUE(factory_->PassedArgument(0));
+ EXPECT_EQ(0, factory_->AbortCalls());
+ EXPECT_TRUE(factory_->TestDeleted());
+}
+
+// Tests that the (simulated) child process executes the death test
+// code, and is aborted with the correct AbortReason if it
+// executes a return statement.
+TEST_F(MacroLogicDeathTest, ChildPerformsReturn) {
+ bool flag = false;
+ factory_->SetParameters(true, DeathTest::EXECUTE_TEST, 0, true);
+ RunReturningDeathTest(&flag);
+ EXPECT_TRUE(flag);
+ EXPECT_EQ(1, factory_->AssumeRoleCalls());
+ EXPECT_EQ(0, factory_->WaitCalls());
+ EXPECT_EQ(0, factory_->PassedCalls());
+ EXPECT_EQ(1, factory_->AbortCalls());
+ EXPECT_EQ(DeathTest::TEST_ENCOUNTERED_RETURN_STATEMENT,
+ factory_->AbortArgument(0));
+ EXPECT_TRUE(factory_->TestDeleted());
+}
+
+// Tests that the (simulated) child process is aborted with the
+// correct AbortReason if it does not die.
+TEST_F(MacroLogicDeathTest, ChildDoesNotDie) {
+ bool flag = false;
+ factory_->SetParameters(true, DeathTest::EXECUTE_TEST, 0, true);
+ EXPECT_DEATH(flag = true, "");
+ EXPECT_TRUE(flag);
+ EXPECT_EQ(1, factory_->AssumeRoleCalls());
+ EXPECT_EQ(0, factory_->WaitCalls());
+ EXPECT_EQ(0, factory_->PassedCalls());
+ // This time there are two calls to Abort: one since the test didn't
+ // die, and another from the ReturnSentinel when it's destroyed. The
+ // sentinel normally isn't destroyed if a test doesn't die, since
+ // _exit(2) is called in that case by ForkingDeathTest, but not by
+ // our MockDeathTest.
+ ASSERT_EQ(2, factory_->AbortCalls());
+ EXPECT_EQ(DeathTest::TEST_DID_NOT_DIE,
+ factory_->AbortArgument(0));
+ EXPECT_EQ(DeathTest::TEST_ENCOUNTERED_RETURN_STATEMENT,
+ factory_->AbortArgument(1));
+ EXPECT_TRUE(factory_->TestDeleted());
+}
+
+// Tests that a successful death test does not register a successful
+// test part.
+TEST(SuccessRegistrationDeathTest, NoSuccessPart) {
+ EXPECT_DEATH(_exit(1), "");
+ EXPECT_EQ(0, GetUnitTestImpl()->current_test_result()->total_part_count());
+}
+
+TEST(StreamingAssertionsDeathTest, DeathTest) {
+ EXPECT_DEATH(_exit(1), "") << "unexpected failure";
+ ASSERT_DEATH(_exit(1), "") << "unexpected failure";
+ EXPECT_NONFATAL_FAILURE({ // NOLINT
+ EXPECT_DEATH(_exit(0), "") << "expected failure";
+ }, "expected failure");
+ EXPECT_FATAL_FAILURE({ // NOLINT
+ ASSERT_DEATH(_exit(0), "") << "expected failure";
+ }, "expected failure");
+}
+
+// Tests that GetLastErrnoDescription returns an empty string when the
+// last error is 0 and non-empty string when it is non-zero.
+TEST(GetLastErrnoDescription, GetLastErrnoDescriptionWorks) {
+ errno = ENOENT;
+ EXPECT_STRNE("", GetLastErrnoDescription().c_str());
+ errno = 0;
+ EXPECT_STREQ("", GetLastErrnoDescription().c_str());
+}
+
+#if GTEST_OS_WINDOWS
+TEST(AutoHandleTest, AutoHandleWorks) {
+ HANDLE handle = ::CreateEvent(NULL, FALSE, FALSE, NULL);
+ ASSERT_NE(INVALID_HANDLE_VALUE, handle);
+
+ // Tests that the AutoHandle is correctly initialized with a handle.
+ testing::internal::AutoHandle auto_handle(handle);
+ EXPECT_EQ(handle, auto_handle.Get());
+
+ // Tests that Reset assigns INVALID_HANDLE_VALUE.
+ // Note that this cannot verify whether the original handle is closed.
+ auto_handle.Reset();
+ EXPECT_EQ(INVALID_HANDLE_VALUE, auto_handle.Get());
+
+ // Tests that Reset assigns the new handle.
+ // Note that this cannot verify whether the original handle is closed.
+ handle = ::CreateEvent(NULL, FALSE, FALSE, NULL);
+ ASSERT_NE(INVALID_HANDLE_VALUE, handle);
+ auto_handle.Reset(handle);
+ EXPECT_EQ(handle, auto_handle.Get());
+
+ // Tests that AutoHandle contains INVALID_HANDLE_VALUE by default.
+ testing::internal::AutoHandle auto_handle2;
+ EXPECT_EQ(INVALID_HANDLE_VALUE, auto_handle2.Get());
+}
+#endif // GTEST_OS_WINDOWS
+
+#if GTEST_OS_WINDOWS
+typedef unsigned __int64 BiggestParsable;
+typedef signed __int64 BiggestSignedParsable;
+const BiggestParsable kBiggestParsableMax = ULLONG_MAX;
+const BiggestParsable kBiggestSignedParsableMax = LLONG_MAX;
+#else
+typedef unsigned long long BiggestParsable;
+typedef signed long long BiggestSignedParsable;
+const BiggestParsable kBiggestParsableMax =
+ ::std::numeric_limits<BiggestParsable>::max();
+const BiggestSignedParsable kBiggestSignedParsableMax =
+ ::std::numeric_limits<BiggestSignedParsable>::max();
+#endif // GTEST_OS_WINDOWS
+
+TEST(ParseNaturalNumberTest, RejectsInvalidFormat) {
+ BiggestParsable result = 0;
+
+ // Rejects non-numbers.
+ EXPECT_FALSE(ParseNaturalNumber(String("non-number string"), &result));
+
+ // Rejects numbers with whitespace prefix.
+ EXPECT_FALSE(ParseNaturalNumber(String(" 123"), &result));
+
+ // Rejects negative numbers.
+ EXPECT_FALSE(ParseNaturalNumber(String("-123"), &result));
+
+ // Rejects numbers starting with a plus sign.
+ EXPECT_FALSE(ParseNaturalNumber(String("+123"), &result));
+ errno = 0;
+}
+
+TEST(ParseNaturalNumberTest, RejectsOverflownNumbers) {
+ BiggestParsable result = 0;
+
+ EXPECT_FALSE(ParseNaturalNumber(String("99999999999999999999999"), &result));
+
+ signed char char_result = 0;
+ EXPECT_FALSE(ParseNaturalNumber(String("200"), &char_result));
+ errno = 0;
+}
+
+TEST(ParseNaturalNumberTest, AcceptsValidNumbers) {
+ BiggestParsable result = 0;
+
+ result = 0;
+ ASSERT_TRUE(ParseNaturalNumber(String("123"), &result));
+ EXPECT_EQ(123U, result);
+
+ // Check 0 as an edge case.
+ result = 1;
+ ASSERT_TRUE(ParseNaturalNumber(String("0"), &result));
+ EXPECT_EQ(0U, result);
+
+ result = 1;
+ ASSERT_TRUE(ParseNaturalNumber(String("00000"), &result));
+ EXPECT_EQ(0U, result);
+}
+
+TEST(ParseNaturalNumberTest, AcceptsTypeLimits) {
+ Message msg;
+ msg << kBiggestParsableMax;
+
+ BiggestParsable result = 0;
+ EXPECT_TRUE(ParseNaturalNumber(msg.GetString(), &result));
+ EXPECT_EQ(kBiggestParsableMax, result);
+
+ Message msg2;
+ msg2 << kBiggestSignedParsableMax;
+
+ BiggestSignedParsable signed_result = 0;
+ EXPECT_TRUE(ParseNaturalNumber(msg2.GetString(), &signed_result));
+ EXPECT_EQ(kBiggestSignedParsableMax, signed_result);
+
+ Message msg3;
+ msg3 << INT_MAX;
+
+ int int_result = 0;
+ EXPECT_TRUE(ParseNaturalNumber(msg3.GetString(), &int_result));
+ EXPECT_EQ(INT_MAX, int_result);
+
+ Message msg4;
+ msg4 << UINT_MAX;
+
+ unsigned int uint_result = 0;
+ EXPECT_TRUE(ParseNaturalNumber(msg4.GetString(), &uint_result));
+ EXPECT_EQ(UINT_MAX, uint_result);
+}
+
+TEST(ParseNaturalNumberTest, WorksForShorterIntegers) {
+ short short_result = 0;
+ ASSERT_TRUE(ParseNaturalNumber(String("123"), &short_result));
+ EXPECT_EQ(123, short_result);
+
+ signed char char_result = 0;
+ ASSERT_TRUE(ParseNaturalNumber(String("123"), &char_result));
+ EXPECT_EQ(123, char_result);
+}
+
+#if GTEST_OS_WINDOWS
+TEST(EnvironmentTest, HandleFitsIntoSizeT) {
+ // TODO(vladl@google.com): Remove this test after this condition is verified
+ // in a static assertion in gtest-death-test.cc in the function
+ // GetStatusFileDescriptor.
+ ASSERT_TRUE(sizeof(HANDLE) <= sizeof(size_t));
+}
+#endif // GTEST_OS_WINDOWS
+
+// Tests that EXPECT_DEATH_IF_SUPPORTED/ASSERT_DEATH_IF_SUPPORTED trigger
+// failures when death tests are available on the system.
+TEST(ConditionalDeathMacrosDeathTest, ExpectsDeathWhenDeathTestsAvailable) {
+ EXPECT_DEATH_IF_SUPPORTED(DieInside("CondDeathTestExpectMacro"),
+ "death inside CondDeathTestExpectMacro");
+ ASSERT_DEATH_IF_SUPPORTED(DieInside("CondDeathTestAssertMacro"),
+ "death inside CondDeathTestAssertMacro");
+
+ // Empty statement will not crash, which must trigger a failure.
+ EXPECT_NONFATAL_FAILURE(EXPECT_DEATH_IF_SUPPORTED(;, ""), "");
+ EXPECT_FATAL_FAILURE(ASSERT_DEATH_IF_SUPPORTED(;, ""), "");
+}
+
+#else
+
+using testing::internal::CaptureStderr;
+using testing::internal::GetCapturedStderr;
+using testing::internal::String;
+
+// Tests that EXPECT_DEATH_IF_SUPPORTED/ASSERT_DEATH_IF_SUPPORTED are still
+// defined but do not trigger failures when death tests are not available on
+// the system.
+TEST(ConditionalDeathMacrosTest, WarnsWhenDeathTestsNotAvailable) {
+ // Empty statement will not crash, but that should not trigger a failure
+ // when death tests are not supported.
+ CaptureStderr();
+ EXPECT_DEATH_IF_SUPPORTED(;, "");
+ String output = GetCapturedStderr();
+ ASSERT_TRUE(NULL != strstr(output.c_str(),
+ "Death tests are not supported on this platform"));
+ ASSERT_TRUE(NULL != strstr(output.c_str(), ";"));
+
+ // The streamed message should not be printed as there is no test failure.
+ CaptureStderr();
+ EXPECT_DEATH_IF_SUPPORTED(;, "") << "streamed message";
+ output = GetCapturedStderr();
+ ASSERT_TRUE(NULL == strstr(output.c_str(), "streamed message"));
+
+ CaptureStderr();
+ ASSERT_DEATH_IF_SUPPORTED(;, ""); // NOLINT
+ output = GetCapturedStderr();
+ ASSERT_TRUE(NULL != strstr(output.c_str(),
+ "Death tests are not supported on this platform"));
+ ASSERT_TRUE(NULL != strstr(output.c_str(), ";"));
+
+ CaptureStderr();
+ ASSERT_DEATH_IF_SUPPORTED(;, "") << "streamed message"; // NOLINT
+ output = GetCapturedStderr();
+ ASSERT_TRUE(NULL == strstr(output.c_str(), "streamed message"));
+}
+
+void FuncWithAssert(int* n) {
+ ASSERT_DEATH_IF_SUPPORTED(return;, "");
+ (*n)++;
+}
+
+// Tests that ASSERT_DEATH_IF_SUPPORTED does not return from the current
+// function (as ASSERT_DEATH does) if death tests are not supported.
+TEST(ConditionalDeathMacrosTest, AssertDeatDoesNotReturnhIfUnsupported) {
+ int n = 0;
+ FuncWithAssert(&n);
+ EXPECT_EQ(1, n);
+}
+#endif // GTEST_HAS_DEATH_TEST
+
+// Tests that the death test macros expand to code which may or may not
+// be followed by operator<<, and that in either case the complete text
+// comprises only a single C++ statement.
+//
+// The syntax should work whether death tests are available or not.
+TEST(ConditionalDeathMacrosSyntaxDeathTest, SingleStatement) {
+ if (AlwaysFalse())
+ // This would fail if executed; this is a compilation test only
+ ASSERT_DEATH_IF_SUPPORTED(return, "");
+
+ if (AlwaysTrue())
+ EXPECT_DEATH_IF_SUPPORTED(_exit(1), "");
+ else
+ // This empty "else" branch is meant to ensure that EXPECT_DEATH
+ // doesn't expand into an "if" statement without an "else"
+ ; // NOLINT
+
+ if (AlwaysFalse())
+ ASSERT_DEATH_IF_SUPPORTED(return, "") << "did not die";
+
+ if (AlwaysFalse())
+ ; // NOLINT
+ else
+ EXPECT_DEATH_IF_SUPPORTED(_exit(1), "") << 1 << 2 << 3;
+}
+
+// Tests that conditional death test macros expand to code which interacts
+// well with switch statements.
+TEST(ConditionalDeathMacrosSyntaxDeathTest, SwitchStatement) {
+// Microsoft compiler usually complains about switch statements without
+// case labels. We suppress that warning for this test.
+#ifdef _MSC_VER
+#pragma warning(push)
+#pragma warning(disable: 4065)
+#endif // _MSC_VER
+
+ switch (0)
+ default:
+ ASSERT_DEATH_IF_SUPPORTED(_exit(1), "")
+ << "exit in default switch handler";
+
+ switch (0)
+ case 0:
+ EXPECT_DEATH_IF_SUPPORTED(_exit(1), "") << "exit in switch case";
+
+#ifdef _MSC_VER
+#pragma warning(pop)
+#endif // _MSC_VER
+}
+
+// Tests that a test case whose name ends with "DeathTest" works fine
+// on Windows.
+TEST(NotADeathTest, Test) {
+ SUCCEED();
+}
diff --git a/Source/ThirdParty/gtest/test/gtest-filepath_test.cc b/Source/ThirdParty/gtest/test/gtest-filepath_test.cc
new file mode 100644
index 000000000..625028275
--- /dev/null
+++ b/Source/ThirdParty/gtest/test/gtest-filepath_test.cc
@@ -0,0 +1,690 @@
+// Copyright 2008, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Authors: keith.ray@gmail.com (Keith Ray)
+//
+// Google Test filepath utilities
+//
+// This file tests classes and functions used internally by
+// Google Test. They are subject to change without notice.
+//
+// This file is #included from gtest_unittest.cc, to avoid changing
+// build or make-files for some existing Google Test clients. Do not
+// #include this file anywhere else!
+
+#include <gtest/internal/gtest-filepath.h>
+#include <gtest/gtest.h>
+
+// Indicates that this translation unit is part of Google Test's
+// implementation. It must come before gtest-internal-inl.h is
+// included, or there will be a compiler error. This trick is to
+// prevent a user from accidentally including gtest-internal-inl.h in
+// his code.
+#define GTEST_IMPLEMENTATION_ 1
+#include "src/gtest-internal-inl.h"
+#undef GTEST_IMPLEMENTATION_
+
+#if GTEST_OS_WINDOWS_MOBILE
+#include <windows.h> // NOLINT
+#elif GTEST_OS_WINDOWS
+#include <direct.h> // NOLINT
+#endif // GTEST_OS_WINDOWS_MOBILE
+
+namespace testing {
+namespace internal {
+namespace {
+
+#if GTEST_OS_WINDOWS_MOBILE
+// TODO(wan@google.com): Move these to the POSIX adapter section in
+// gtest-port.h.
+
+// Windows CE doesn't have the remove C function.
+int remove(const char* path) {
+ LPCWSTR wpath = String::AnsiToUtf16(path);
+ int ret = DeleteFile(wpath) ? 0 : -1;
+ delete [] wpath;
+ return ret;
+}
+// Windows CE doesn't have the _rmdir C function.
+int _rmdir(const char* path) {
+ FilePath filepath(path);
+ LPCWSTR wpath = String::AnsiToUtf16(
+ filepath.RemoveTrailingPathSeparator().c_str());
+ int ret = RemoveDirectory(wpath) ? 0 : -1;
+ delete [] wpath;
+ return ret;
+}
+
+#else
+
+TEST(GetCurrentDirTest, ReturnsCurrentDir) {
+ const FilePath original_dir = FilePath::GetCurrentDir();
+ EXPECT_FALSE(original_dir.IsEmpty());
+
+ posix::ChDir(GTEST_PATH_SEP_);
+ const FilePath cwd = FilePath::GetCurrentDir();
+ posix::ChDir(original_dir.c_str());
+
+#if GTEST_OS_WINDOWS
+ // Skips the ":".
+ const char* const cwd_without_drive = strchr(cwd.c_str(), ':');
+ ASSERT_TRUE(cwd_without_drive != NULL);
+ EXPECT_STREQ(GTEST_PATH_SEP_, cwd_without_drive + 1);
+#else
+ EXPECT_STREQ(GTEST_PATH_SEP_, cwd.c_str());
+#endif
+}
+
+#endif // GTEST_OS_WINDOWS_MOBILE
+
+TEST(IsEmptyTest, ReturnsTrueForEmptyPath) {
+ EXPECT_TRUE(FilePath("").IsEmpty());
+ EXPECT_TRUE(FilePath(NULL).IsEmpty());
+}
+
+TEST(IsEmptyTest, ReturnsFalseForNonEmptyPath) {
+ EXPECT_FALSE(FilePath("a").IsEmpty());
+ EXPECT_FALSE(FilePath(".").IsEmpty());
+ EXPECT_FALSE(FilePath("a/b").IsEmpty());
+ EXPECT_FALSE(FilePath("a\\b\\").IsEmpty());
+}
+
+// RemoveDirectoryName "" -> ""
+TEST(RemoveDirectoryNameTest, WhenEmptyName) {
+ EXPECT_STREQ("", FilePath("").RemoveDirectoryName().c_str());
+}
+
+// RemoveDirectoryName "afile" -> "afile"
+TEST(RemoveDirectoryNameTest, ButNoDirectory) {
+ EXPECT_STREQ("afile",
+ FilePath("afile").RemoveDirectoryName().c_str());
+}
+
+// RemoveDirectoryName "/afile" -> "afile"
+TEST(RemoveDirectoryNameTest, RootFileShouldGiveFileName) {
+ EXPECT_STREQ("afile",
+ FilePath(GTEST_PATH_SEP_ "afile").RemoveDirectoryName().c_str());
+}
+
+// RemoveDirectoryName "adir/" -> ""
+TEST(RemoveDirectoryNameTest, WhereThereIsNoFileName) {
+ EXPECT_STREQ("",
+ FilePath("adir" GTEST_PATH_SEP_).RemoveDirectoryName().c_str());
+}
+
+// RemoveDirectoryName "adir/afile" -> "afile"
+TEST(RemoveDirectoryNameTest, ShouldGiveFileName) {
+ EXPECT_STREQ("afile",
+ FilePath("adir" GTEST_PATH_SEP_ "afile").RemoveDirectoryName().c_str());
+}
+
+// RemoveDirectoryName "adir/subdir/afile" -> "afile"
+TEST(RemoveDirectoryNameTest, ShouldAlsoGiveFileName) {
+ EXPECT_STREQ("afile",
+ FilePath("adir" GTEST_PATH_SEP_ "subdir" GTEST_PATH_SEP_ "afile")
+ .RemoveDirectoryName().c_str());
+}
+
+#if GTEST_HAS_ALT_PATH_SEP_
+
+// Tests that RemoveDirectoryName() works with the alternate separator
+// on Windows.
+
+// RemoveDirectoryName("/afile") -> "afile"
+TEST(RemoveDirectoryNameTest, RootFileShouldGiveFileNameForAlternateSeparator) {
+ EXPECT_STREQ("afile",
+ FilePath("/afile").RemoveDirectoryName().c_str());
+}
+
+// RemoveDirectoryName("adir/") -> ""
+TEST(RemoveDirectoryNameTest, WhereThereIsNoFileNameForAlternateSeparator) {
+ EXPECT_STREQ("",
+ FilePath("adir/").RemoveDirectoryName().c_str());
+}
+
+// RemoveDirectoryName("adir/afile") -> "afile"
+TEST(RemoveDirectoryNameTest, ShouldGiveFileNameForAlternateSeparator) {
+ EXPECT_STREQ("afile",
+ FilePath("adir/afile").RemoveDirectoryName().c_str());
+}
+
+// RemoveDirectoryName("adir/subdir/afile") -> "afile"
+TEST(RemoveDirectoryNameTest, ShouldAlsoGiveFileNameForAlternateSeparator) {
+ EXPECT_STREQ("afile",
+ FilePath("adir/subdir/afile").RemoveDirectoryName().c_str());
+}
+
+#endif
+
+// RemoveFileName "" -> "./"
+TEST(RemoveFileNameTest, EmptyName) {
+#if GTEST_OS_WINDOWS_MOBILE
+ // On Windows CE, we use the root as the current directory.
+ EXPECT_STREQ(GTEST_PATH_SEP_,
+ FilePath("").RemoveFileName().c_str());
+#else
+ EXPECT_STREQ("." GTEST_PATH_SEP_,
+ FilePath("").RemoveFileName().c_str());
+#endif
+}
+
+// RemoveFileName "adir/" -> "adir/"
+TEST(RemoveFileNameTest, ButNoFile) {
+ EXPECT_STREQ("adir" GTEST_PATH_SEP_,
+ FilePath("adir" GTEST_PATH_SEP_).RemoveFileName().c_str());
+}
+
+// RemoveFileName "adir/afile" -> "adir/"
+TEST(RemoveFileNameTest, GivesDirName) {
+ EXPECT_STREQ("adir" GTEST_PATH_SEP_,
+ FilePath("adir" GTEST_PATH_SEP_ "afile")
+ .RemoveFileName().c_str());
+}
+
+// RemoveFileName "adir/subdir/afile" -> "adir/subdir/"
+TEST(RemoveFileNameTest, GivesDirAndSubDirName) {
+ EXPECT_STREQ("adir" GTEST_PATH_SEP_ "subdir" GTEST_PATH_SEP_,
+ FilePath("adir" GTEST_PATH_SEP_ "subdir" GTEST_PATH_SEP_ "afile")
+ .RemoveFileName().c_str());
+}
+
+// RemoveFileName "/afile" -> "/"
+TEST(RemoveFileNameTest, GivesRootDir) {
+ EXPECT_STREQ(GTEST_PATH_SEP_,
+ FilePath(GTEST_PATH_SEP_ "afile").RemoveFileName().c_str());
+}
+
+#if GTEST_HAS_ALT_PATH_SEP_
+
+// Tests that RemoveFileName() works with the alternate separator on
+// Windows.
+
+// RemoveFileName("adir/") -> "adir/"
+TEST(RemoveFileNameTest, ButNoFileForAlternateSeparator) {
+ EXPECT_STREQ("adir" GTEST_PATH_SEP_,
+ FilePath("adir/").RemoveFileName().c_str());
+}
+
+// RemoveFileName("adir/afile") -> "adir/"
+TEST(RemoveFileNameTest, GivesDirNameForAlternateSeparator) {
+ EXPECT_STREQ("adir" GTEST_PATH_SEP_,
+ FilePath("adir/afile").RemoveFileName().c_str());
+}
+
+// RemoveFileName("adir/subdir/afile") -> "adir/subdir/"
+TEST(RemoveFileNameTest, GivesDirAndSubDirNameForAlternateSeparator) {
+ EXPECT_STREQ("adir" GTEST_PATH_SEP_ "subdir" GTEST_PATH_SEP_,
+ FilePath("adir/subdir/afile").RemoveFileName().c_str());
+}
+
+// RemoveFileName("/afile") -> "\"
+TEST(RemoveFileNameTest, GivesRootDirForAlternateSeparator) {
+ EXPECT_STREQ(GTEST_PATH_SEP_,
+ FilePath("/afile").RemoveFileName().c_str());
+}
+
+#endif
+
+TEST(MakeFileNameTest, GenerateWhenNumberIsZero) {
+ FilePath actual = FilePath::MakeFileName(FilePath("foo"), FilePath("bar"),
+ 0, "xml");
+ EXPECT_STREQ("foo" GTEST_PATH_SEP_ "bar.xml", actual.c_str());
+}
+
+TEST(MakeFileNameTest, GenerateFileNameNumberGtZero) {
+ FilePath actual = FilePath::MakeFileName(FilePath("foo"), FilePath("bar"),
+ 12, "xml");
+ EXPECT_STREQ("foo" GTEST_PATH_SEP_ "bar_12.xml", actual.c_str());
+}
+
+TEST(MakeFileNameTest, GenerateFileNameWithSlashNumberIsZero) {
+ FilePath actual = FilePath::MakeFileName(FilePath("foo" GTEST_PATH_SEP_),
+ FilePath("bar"), 0, "xml");
+ EXPECT_STREQ("foo" GTEST_PATH_SEP_ "bar.xml", actual.c_str());
+}
+
+TEST(MakeFileNameTest, GenerateFileNameWithSlashNumberGtZero) {
+ FilePath actual = FilePath::MakeFileName(FilePath("foo" GTEST_PATH_SEP_),
+ FilePath("bar"), 12, "xml");
+ EXPECT_STREQ("foo" GTEST_PATH_SEP_ "bar_12.xml", actual.c_str());
+}
+
+TEST(MakeFileNameTest, GenerateWhenNumberIsZeroAndDirIsEmpty) {
+ FilePath actual = FilePath::MakeFileName(FilePath(""), FilePath("bar"),
+ 0, "xml");
+ EXPECT_STREQ("bar.xml", actual.c_str());
+}
+
+TEST(MakeFileNameTest, GenerateWhenNumberIsNotZeroAndDirIsEmpty) {
+ FilePath actual = FilePath::MakeFileName(FilePath(""), FilePath("bar"),
+ 14, "xml");
+ EXPECT_STREQ("bar_14.xml", actual.c_str());
+}
+
+TEST(ConcatPathsTest, WorksWhenDirDoesNotEndWithPathSep) {
+ FilePath actual = FilePath::ConcatPaths(FilePath("foo"),
+ FilePath("bar.xml"));
+ EXPECT_STREQ("foo" GTEST_PATH_SEP_ "bar.xml", actual.c_str());
+}
+
+TEST(ConcatPathsTest, WorksWhenPath1EndsWithPathSep) {
+ FilePath actual = FilePath::ConcatPaths(FilePath("foo" GTEST_PATH_SEP_),
+ FilePath("bar.xml"));
+ EXPECT_STREQ("foo" GTEST_PATH_SEP_ "bar.xml", actual.c_str());
+}
+
+TEST(ConcatPathsTest, Path1BeingEmpty) {
+ FilePath actual = FilePath::ConcatPaths(FilePath(""),
+ FilePath("bar.xml"));
+ EXPECT_STREQ("bar.xml", actual.c_str());
+}
+
+TEST(ConcatPathsTest, Path2BeingEmpty) {
+ FilePath actual = FilePath::ConcatPaths(FilePath("foo"),
+ FilePath(""));
+ EXPECT_STREQ("foo" GTEST_PATH_SEP_, actual.c_str());
+}
+
+TEST(ConcatPathsTest, BothPathBeingEmpty) {
+ FilePath actual = FilePath::ConcatPaths(FilePath(""),
+ FilePath(""));
+ EXPECT_STREQ("", actual.c_str());
+}
+
+TEST(ConcatPathsTest, Path1ContainsPathSep) {
+ FilePath actual = FilePath::ConcatPaths(FilePath("foo" GTEST_PATH_SEP_ "bar"),
+ FilePath("foobar.xml"));
+ EXPECT_STREQ("foo" GTEST_PATH_SEP_ "bar" GTEST_PATH_SEP_ "foobar.xml",
+ actual.c_str());
+}
+
+TEST(ConcatPathsTest, Path2ContainsPathSep) {
+ FilePath actual = FilePath::ConcatPaths(
+ FilePath("foo" GTEST_PATH_SEP_),
+ FilePath("bar" GTEST_PATH_SEP_ "bar.xml"));
+ EXPECT_STREQ("foo" GTEST_PATH_SEP_ "bar" GTEST_PATH_SEP_ "bar.xml",
+ actual.c_str());
+}
+
+TEST(ConcatPathsTest, Path2EndsWithPathSep) {
+ FilePath actual = FilePath::ConcatPaths(FilePath("foo"),
+ FilePath("bar" GTEST_PATH_SEP_));
+ EXPECT_STREQ("foo" GTEST_PATH_SEP_ "bar" GTEST_PATH_SEP_, actual.c_str());
+}
+
+// RemoveTrailingPathSeparator "" -> ""
+TEST(RemoveTrailingPathSeparatorTest, EmptyString) {
+ EXPECT_STREQ("",
+ FilePath("").RemoveTrailingPathSeparator().c_str());
+}
+
+// RemoveTrailingPathSeparator "foo" -> "foo"
+TEST(RemoveTrailingPathSeparatorTest, FileNoSlashString) {
+ EXPECT_STREQ("foo",
+ FilePath("foo").RemoveTrailingPathSeparator().c_str());
+}
+
+// RemoveTrailingPathSeparator "foo/" -> "foo"
+TEST(RemoveTrailingPathSeparatorTest, ShouldRemoveTrailingSeparator) {
+ EXPECT_STREQ(
+ "foo",
+ FilePath("foo" GTEST_PATH_SEP_).RemoveTrailingPathSeparator().c_str());
+#if GTEST_HAS_ALT_PATH_SEP_
+ EXPECT_STREQ("foo",
+ FilePath("foo/").RemoveTrailingPathSeparator().c_str());
+#endif
+}
+
+// RemoveTrailingPathSeparator "foo/bar/" -> "foo/bar/"
+TEST(RemoveTrailingPathSeparatorTest, ShouldRemoveLastSeparator) {
+ EXPECT_STREQ("foo" GTEST_PATH_SEP_ "bar",
+ FilePath("foo" GTEST_PATH_SEP_ "bar" GTEST_PATH_SEP_)
+ .RemoveTrailingPathSeparator().c_str());
+}
+
+// RemoveTrailingPathSeparator "foo/bar" -> "foo/bar"
+TEST(RemoveTrailingPathSeparatorTest, ShouldReturnUnmodified) {
+ EXPECT_STREQ("foo" GTEST_PATH_SEP_ "bar",
+ FilePath("foo" GTEST_PATH_SEP_ "bar")
+ .RemoveTrailingPathSeparator().c_str());
+}
+
+TEST(DirectoryTest, RootDirectoryExists) {
+#if GTEST_OS_WINDOWS // We are on Windows.
+ char current_drive[_MAX_PATH]; // NOLINT
+ current_drive[0] = static_cast<char>(_getdrive() + 'A' - 1);
+ current_drive[1] = ':';
+ current_drive[2] = '\\';
+ current_drive[3] = '\0';
+ EXPECT_TRUE(FilePath(current_drive).DirectoryExists());
+#else
+ EXPECT_TRUE(FilePath("/").DirectoryExists());
+#endif // GTEST_OS_WINDOWS
+}
+
+#if GTEST_OS_WINDOWS
+TEST(DirectoryTest, RootOfWrongDriveDoesNotExists) {
+ const int saved_drive_ = _getdrive();
+ // Find a drive that doesn't exist. Start with 'Z' to avoid common ones.
+ for (char drive = 'Z'; drive >= 'A'; drive--)
+ if (_chdrive(drive - 'A' + 1) == -1) {
+ char non_drive[_MAX_PATH]; // NOLINT
+ non_drive[0] = drive;
+ non_drive[1] = ':';
+ non_drive[2] = '\\';
+ non_drive[3] = '\0';
+ EXPECT_FALSE(FilePath(non_drive).DirectoryExists());
+ break;
+ }
+ _chdrive(saved_drive_);
+}
+#endif // GTEST_OS_WINDOWS
+
+#if !GTEST_OS_WINDOWS_MOBILE
+// Windows CE _does_ consider an empty directory to exist.
+TEST(DirectoryTest, EmptyPathDirectoryDoesNotExist) {
+ EXPECT_FALSE(FilePath("").DirectoryExists());
+}
+#endif // !GTEST_OS_WINDOWS_MOBILE
+
+TEST(DirectoryTest, CurrentDirectoryExists) {
+#if GTEST_OS_WINDOWS // We are on Windows.
+#ifndef _WIN32_CE // Windows CE doesn't have a current directory.
+ EXPECT_TRUE(FilePath(".").DirectoryExists());
+ EXPECT_TRUE(FilePath(".\\").DirectoryExists());
+#endif // _WIN32_CE
+#else
+ EXPECT_TRUE(FilePath(".").DirectoryExists());
+ EXPECT_TRUE(FilePath("./").DirectoryExists());
+#endif // GTEST_OS_WINDOWS
+}
+
+TEST(NormalizeTest, NullStringsEqualEmptyDirectory) {
+ EXPECT_STREQ("", FilePath(NULL).c_str());
+ EXPECT_STREQ("", FilePath(String(NULL)).c_str());
+}
+
+// "foo/bar" == foo//bar" == "foo///bar"
+TEST(NormalizeTest, MultipleConsecutiveSepaparatorsInMidstring) {
+ EXPECT_STREQ("foo" GTEST_PATH_SEP_ "bar",
+ FilePath("foo" GTEST_PATH_SEP_ "bar").c_str());
+ EXPECT_STREQ("foo" GTEST_PATH_SEP_ "bar",
+ FilePath("foo" GTEST_PATH_SEP_ GTEST_PATH_SEP_ "bar").c_str());
+ EXPECT_STREQ("foo" GTEST_PATH_SEP_ "bar",
+ FilePath("foo" GTEST_PATH_SEP_ GTEST_PATH_SEP_
+ GTEST_PATH_SEP_ "bar").c_str());
+}
+
+// "/bar" == //bar" == "///bar"
+TEST(NormalizeTest, MultipleConsecutiveSepaparatorsAtStringStart) {
+ EXPECT_STREQ(GTEST_PATH_SEP_ "bar",
+ FilePath(GTEST_PATH_SEP_ "bar").c_str());
+ EXPECT_STREQ(GTEST_PATH_SEP_ "bar",
+ FilePath(GTEST_PATH_SEP_ GTEST_PATH_SEP_ "bar").c_str());
+ EXPECT_STREQ(GTEST_PATH_SEP_ "bar",
+ FilePath(GTEST_PATH_SEP_ GTEST_PATH_SEP_ GTEST_PATH_SEP_ "bar").c_str());
+}
+
+// "foo/" == foo//" == "foo///"
+TEST(NormalizeTest, MultipleConsecutiveSepaparatorsAtStringEnd) {
+ EXPECT_STREQ("foo" GTEST_PATH_SEP_,
+ FilePath("foo" GTEST_PATH_SEP_).c_str());
+ EXPECT_STREQ("foo" GTEST_PATH_SEP_,
+ FilePath("foo" GTEST_PATH_SEP_ GTEST_PATH_SEP_).c_str());
+ EXPECT_STREQ("foo" GTEST_PATH_SEP_,
+ FilePath("foo" GTEST_PATH_SEP_ GTEST_PATH_SEP_ GTEST_PATH_SEP_).c_str());
+}
+
+#if GTEST_HAS_ALT_PATH_SEP_
+
+// Tests that separators at the end of the string are normalized
+// regardless of their combination (e.g. "foo\" =="foo/\" ==
+// "foo\\/").
+TEST(NormalizeTest, MixAlternateSeparatorAtStringEnd) {
+ EXPECT_STREQ("foo" GTEST_PATH_SEP_,
+ FilePath("foo/").c_str());
+ EXPECT_STREQ("foo" GTEST_PATH_SEP_,
+ FilePath("foo" GTEST_PATH_SEP_ "/").c_str());
+ EXPECT_STREQ("foo" GTEST_PATH_SEP_,
+ FilePath("foo//" GTEST_PATH_SEP_).c_str());
+}
+
+#endif
+
+TEST(AssignmentOperatorTest, DefaultAssignedToNonDefault) {
+ FilePath default_path;
+ FilePath non_default_path("path");
+ non_default_path = default_path;
+ EXPECT_STREQ("", non_default_path.c_str());
+ EXPECT_STREQ("", default_path.c_str()); // RHS var is unchanged.
+}
+
+TEST(AssignmentOperatorTest, NonDefaultAssignedToDefault) {
+ FilePath non_default_path("path");
+ FilePath default_path;
+ default_path = non_default_path;
+ EXPECT_STREQ("path", default_path.c_str());
+ EXPECT_STREQ("path", non_default_path.c_str()); // RHS var is unchanged.
+}
+
+TEST(AssignmentOperatorTest, ConstAssignedToNonConst) {
+ const FilePath const_default_path("const_path");
+ FilePath non_default_path("path");
+ non_default_path = const_default_path;
+ EXPECT_STREQ("const_path", non_default_path.c_str());
+}
+
+class DirectoryCreationTest : public Test {
+ protected:
+ virtual void SetUp() {
+ testdata_path_.Set(FilePath(String::Format("%s%s%s",
+ TempDir().c_str(), GetCurrentExecutableName().c_str(),
+ "_directory_creation" GTEST_PATH_SEP_ "test" GTEST_PATH_SEP_)));
+ testdata_file_.Set(testdata_path_.RemoveTrailingPathSeparator());
+
+ unique_file0_.Set(FilePath::MakeFileName(testdata_path_, FilePath("unique"),
+ 0, "txt"));
+ unique_file1_.Set(FilePath::MakeFileName(testdata_path_, FilePath("unique"),
+ 1, "txt"));
+
+ remove(testdata_file_.c_str());
+ remove(unique_file0_.c_str());
+ remove(unique_file1_.c_str());
+ posix::RmDir(testdata_path_.c_str());
+ }
+
+ virtual void TearDown() {
+ remove(testdata_file_.c_str());
+ remove(unique_file0_.c_str());
+ remove(unique_file1_.c_str());
+ posix::RmDir(testdata_path_.c_str());
+ }
+
+ String TempDir() const {
+#if GTEST_OS_WINDOWS_MOBILE
+ return String("\\temp\\");
+#elif GTEST_OS_WINDOWS
+ const char* temp_dir = posix::GetEnv("TEMP");
+ if (temp_dir == NULL || temp_dir[0] == '\0')
+ return String("\\temp\\");
+ else if (String(temp_dir).EndsWith("\\"))
+ return String(temp_dir);
+ else
+ return String::Format("%s\\", temp_dir);
+#else
+ return String("/tmp/");
+#endif // GTEST_OS_WINDOWS_MOBILE
+ }
+
+ void CreateTextFile(const char* filename) {
+ FILE* f = posix::FOpen(filename, "w");
+ fprintf(f, "text\n");
+ fclose(f);
+ }
+
+ // Strings representing a directory and a file, with identical paths
+ // except for the trailing separator character that distinquishes
+ // a directory named 'test' from a file named 'test'. Example names:
+ FilePath testdata_path_; // "/tmp/directory_creation/test/"
+ FilePath testdata_file_; // "/tmp/directory_creation/test"
+ FilePath unique_file0_; // "/tmp/directory_creation/test/unique.txt"
+ FilePath unique_file1_; // "/tmp/directory_creation/test/unique_1.txt"
+};
+
+TEST_F(DirectoryCreationTest, CreateDirectoriesRecursively) {
+ EXPECT_FALSE(testdata_path_.DirectoryExists()) << testdata_path_.c_str();
+ EXPECT_TRUE(testdata_path_.CreateDirectoriesRecursively());
+ EXPECT_TRUE(testdata_path_.DirectoryExists());
+}
+
+TEST_F(DirectoryCreationTest, CreateDirectoriesForAlreadyExistingPath) {
+ EXPECT_FALSE(testdata_path_.DirectoryExists()) << testdata_path_.c_str();
+ EXPECT_TRUE(testdata_path_.CreateDirectoriesRecursively());
+ // Call 'create' again... should still succeed.
+ EXPECT_TRUE(testdata_path_.CreateDirectoriesRecursively());
+}
+
+TEST_F(DirectoryCreationTest, CreateDirectoriesAndUniqueFilename) {
+ FilePath file_path(FilePath::GenerateUniqueFileName(testdata_path_,
+ FilePath("unique"), "txt"));
+ EXPECT_STREQ(unique_file0_.c_str(), file_path.c_str());
+ EXPECT_FALSE(file_path.FileOrDirectoryExists()); // file not there
+
+ testdata_path_.CreateDirectoriesRecursively();
+ EXPECT_FALSE(file_path.FileOrDirectoryExists()); // file still not there
+ CreateTextFile(file_path.c_str());
+ EXPECT_TRUE(file_path.FileOrDirectoryExists());
+
+ FilePath file_path2(FilePath::GenerateUniqueFileName(testdata_path_,
+ FilePath("unique"), "txt"));
+ EXPECT_STREQ(unique_file1_.c_str(), file_path2.c_str());
+ EXPECT_FALSE(file_path2.FileOrDirectoryExists()); // file not there
+ CreateTextFile(file_path2.c_str());
+ EXPECT_TRUE(file_path2.FileOrDirectoryExists());
+}
+
+TEST_F(DirectoryCreationTest, CreateDirectoriesFail) {
+ // force a failure by putting a file where we will try to create a directory.
+ CreateTextFile(testdata_file_.c_str());
+ EXPECT_TRUE(testdata_file_.FileOrDirectoryExists());
+ EXPECT_FALSE(testdata_file_.DirectoryExists());
+ EXPECT_FALSE(testdata_file_.CreateDirectoriesRecursively());
+}
+
+TEST(NoDirectoryCreationTest, CreateNoDirectoriesForDefaultXmlFile) {
+ const FilePath test_detail_xml("test_detail.xml");
+ EXPECT_FALSE(test_detail_xml.CreateDirectoriesRecursively());
+}
+
+TEST(FilePathTest, DefaultConstructor) {
+ FilePath fp;
+ EXPECT_STREQ("", fp.c_str());
+}
+
+TEST(FilePathTest, CharAndCopyConstructors) {
+ const FilePath fp("spicy");
+ EXPECT_STREQ("spicy", fp.c_str());
+
+ const FilePath fp_copy(fp);
+ EXPECT_STREQ("spicy", fp_copy.c_str());
+}
+
+TEST(FilePathTest, StringConstructor) {
+ const FilePath fp(String("cider"));
+ EXPECT_STREQ("cider", fp.c_str());
+}
+
+TEST(FilePathTest, Set) {
+ const FilePath apple("apple");
+ FilePath mac("mac");
+ mac.Set(apple); // Implement Set() since overloading operator= is forbidden.
+ EXPECT_STREQ("apple", mac.c_str());
+ EXPECT_STREQ("apple", apple.c_str());
+}
+
+TEST(FilePathTest, ToString) {
+ const FilePath file("drink");
+ String str(file.ToString());
+ EXPECT_STREQ("drink", str.c_str());
+}
+
+TEST(FilePathTest, RemoveExtension) {
+ EXPECT_STREQ("app", FilePath("app.exe").RemoveExtension("exe").c_str());
+ EXPECT_STREQ("APP", FilePath("APP.EXE").RemoveExtension("exe").c_str());
+}
+
+TEST(FilePathTest, RemoveExtensionWhenThereIsNoExtension) {
+ EXPECT_STREQ("app", FilePath("app").RemoveExtension("exe").c_str());
+}
+
+TEST(FilePathTest, IsDirectory) {
+ EXPECT_FALSE(FilePath("cola").IsDirectory());
+ EXPECT_TRUE(FilePath("koala" GTEST_PATH_SEP_).IsDirectory());
+#if GTEST_HAS_ALT_PATH_SEP_
+ EXPECT_TRUE(FilePath("koala/").IsDirectory());
+#endif
+}
+
+TEST(FilePathTest, IsAbsolutePath) {
+ EXPECT_FALSE(FilePath("is" GTEST_PATH_SEP_ "relative").IsAbsolutePath());
+ EXPECT_FALSE(FilePath("").IsAbsolutePath());
+#if GTEST_OS_WINDOWS
+ EXPECT_TRUE(FilePath("c:\\" GTEST_PATH_SEP_ "is_not"
+ GTEST_PATH_SEP_ "relative").IsAbsolutePath());
+ EXPECT_FALSE(FilePath("c:foo" GTEST_PATH_SEP_ "bar").IsAbsolutePath());
+ EXPECT_TRUE(FilePath("c:/" GTEST_PATH_SEP_ "is_not"
+ GTEST_PATH_SEP_ "relative").IsAbsolutePath());
+#else
+ EXPECT_TRUE(FilePath(GTEST_PATH_SEP_ "is_not" GTEST_PATH_SEP_ "relative")
+ .IsAbsolutePath());
+#endif // GTEST_OS_WINDOWS
+}
+
+TEST(FilePathTest, IsRootDirectory) {
+#if GTEST_OS_WINDOWS
+ EXPECT_TRUE(FilePath("a:\\").IsRootDirectory());
+ EXPECT_TRUE(FilePath("Z:/").IsRootDirectory());
+ EXPECT_TRUE(FilePath("e://").IsRootDirectory());
+ EXPECT_FALSE(FilePath("").IsRootDirectory());
+ EXPECT_FALSE(FilePath("b:").IsRootDirectory());
+ EXPECT_FALSE(FilePath("b:a").IsRootDirectory());
+ EXPECT_FALSE(FilePath("8:/").IsRootDirectory());
+ EXPECT_FALSE(FilePath("c|/").IsRootDirectory());
+#else
+ EXPECT_TRUE(FilePath("/").IsRootDirectory());
+ EXPECT_TRUE(FilePath("//").IsRootDirectory());
+ EXPECT_FALSE(FilePath("").IsRootDirectory());
+ EXPECT_FALSE(FilePath("\\").IsRootDirectory());
+ EXPECT_FALSE(FilePath("/x").IsRootDirectory());
+#endif
+}
+
+} // namespace
+} // namespace internal
+} // namespace testing
diff --git a/Source/ThirdParty/gtest/test/gtest-linked_ptr_test.cc b/Source/ThirdParty/gtest/test/gtest-linked_ptr_test.cc
new file mode 100644
index 000000000..eae82296c
--- /dev/null
+++ b/Source/ThirdParty/gtest/test/gtest-linked_ptr_test.cc
@@ -0,0 +1,154 @@
+// Copyright 2003, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Authors: Dan Egnor (egnor@google.com)
+// Ported to Windows: Vadim Berman (vadimb@google.com)
+
+#include <gtest/internal/gtest-linked_ptr.h>
+
+#include <stdlib.h>
+#include <gtest/gtest.h>
+
+namespace {
+
+using testing::Message;
+using testing::internal::linked_ptr;
+
+int num;
+Message* history = NULL;
+
+// Class which tracks allocation/deallocation
+class A {
+ public:
+ A(): mynum(num++) { *history << "A" << mynum << " ctor\n"; }
+ virtual ~A() { *history << "A" << mynum << " dtor\n"; }
+ virtual void Use() { *history << "A" << mynum << " use\n"; }
+ protected:
+ int mynum;
+};
+
+// Subclass
+class B : public A {
+ public:
+ B() { *history << "B" << mynum << " ctor\n"; }
+ ~B() { *history << "B" << mynum << " dtor\n"; }
+ virtual void Use() { *history << "B" << mynum << " use\n"; }
+};
+
+class LinkedPtrTest : public testing::Test {
+ public:
+ LinkedPtrTest() {
+ num = 0;
+ history = new Message;
+ }
+
+ virtual ~LinkedPtrTest() {
+ delete history;
+ history = NULL;
+ }
+};
+
+TEST_F(LinkedPtrTest, GeneralTest) {
+ {
+ linked_ptr<A> a0, a1, a2;
+ a0 = a0;
+ a1 = a2;
+ ASSERT_EQ(a0.get(), static_cast<A*>(NULL));
+ ASSERT_EQ(a1.get(), static_cast<A*>(NULL));
+ ASSERT_EQ(a2.get(), static_cast<A*>(NULL));
+ ASSERT_TRUE(a0 == NULL);
+ ASSERT_TRUE(a1 == NULL);
+ ASSERT_TRUE(a2 == NULL);
+
+ {
+ linked_ptr<A> a3(new A);
+ a0 = a3;
+ ASSERT_TRUE(a0 == a3);
+ ASSERT_TRUE(a0 != NULL);
+ ASSERT_TRUE(a0.get() == a3);
+ ASSERT_TRUE(a0 == a3.get());
+ linked_ptr<A> a4(a0);
+ a1 = a4;
+ linked_ptr<A> a5(new A);
+ ASSERT_TRUE(a5.get() != a3);
+ ASSERT_TRUE(a5 != a3.get());
+ a2 = a5;
+ linked_ptr<B> b0(new B);
+ linked_ptr<A> a6(b0);
+ ASSERT_TRUE(b0 == a6);
+ ASSERT_TRUE(a6 == b0);
+ ASSERT_TRUE(b0 != NULL);
+ a5 = b0;
+ a5 = b0;
+ a3->Use();
+ a4->Use();
+ a5->Use();
+ a6->Use();
+ b0->Use();
+ (*b0).Use();
+ b0.get()->Use();
+ }
+
+ a0->Use();
+ a1->Use();
+ a2->Use();
+
+ a1 = a2;
+ a2.reset(new A);
+ a0.reset();
+
+ linked_ptr<A> a7;
+ }
+
+ ASSERT_STREQ(
+ "A0 ctor\n"
+ "A1 ctor\n"
+ "A2 ctor\n"
+ "B2 ctor\n"
+ "A0 use\n"
+ "A0 use\n"
+ "B2 use\n"
+ "B2 use\n"
+ "B2 use\n"
+ "B2 use\n"
+ "B2 use\n"
+ "B2 dtor\n"
+ "A2 dtor\n"
+ "A0 use\n"
+ "A0 use\n"
+ "A1 use\n"
+ "A3 ctor\n"
+ "A0 dtor\n"
+ "A3 dtor\n"
+ "A1 dtor\n",
+ history->GetString().c_str()
+ );
+}
+
+} // Unnamed namespace
diff --git a/Source/ThirdParty/gtest/test/gtest-listener_test.cc b/Source/ThirdParty/gtest/test/gtest-listener_test.cc
new file mode 100644
index 000000000..c9be39a87
--- /dev/null
+++ b/Source/ThirdParty/gtest/test/gtest-listener_test.cc
@@ -0,0 +1,313 @@
+// Copyright 2009 Google Inc. All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: vladl@google.com (Vlad Losev)
+//
+// The Google C++ Testing Framework (Google Test)
+//
+// This file verifies Google Test event listeners receive events at the
+// right times.
+
+#include <gtest/gtest.h>
+#include <vector>
+
+using ::testing::AddGlobalTestEnvironment;
+using ::testing::Environment;
+using ::testing::InitGoogleTest;
+using ::testing::Test;
+using ::testing::TestCase;
+using ::testing::TestEventListener;
+using ::testing::TestInfo;
+using ::testing::TestPartResult;
+using ::testing::UnitTest;
+using ::testing::internal::String;
+
+// Used by tests to register their events.
+std::vector<String>* g_events = NULL;
+
+namespace testing {
+namespace internal {
+
+class EventRecordingListener : public TestEventListener {
+ public:
+ EventRecordingListener(const char* name) : name_(name) {}
+
+ protected:
+ virtual void OnTestProgramStart(const UnitTest& /*unit_test*/) {
+ g_events->push_back(GetFullMethodName("OnTestProgramStart"));
+ }
+
+ virtual void OnTestIterationStart(const UnitTest& /*unit_test*/,
+ int iteration) {
+ Message message;
+ message << GetFullMethodName("OnTestIterationStart")
+ << "(" << iteration << ")";
+ g_events->push_back(message.GetString());
+ }
+
+ virtual void OnEnvironmentsSetUpStart(const UnitTest& /*unit_test*/) {
+ g_events->push_back(GetFullMethodName("OnEnvironmentsSetUpStart"));
+ }
+
+ virtual void OnEnvironmentsSetUpEnd(const UnitTest& /*unit_test*/) {
+ g_events->push_back(GetFullMethodName("OnEnvironmentsSetUpEnd"));
+ }
+
+ virtual void OnTestCaseStart(const TestCase& /*test_case*/) {
+ g_events->push_back(GetFullMethodName("OnTestCaseStart"));
+ }
+
+ virtual void OnTestStart(const TestInfo& /*test_info*/) {
+ g_events->push_back(GetFullMethodName("OnTestStart"));
+ }
+
+ virtual void OnTestPartResult(const TestPartResult& /*test_part_result*/) {
+ g_events->push_back(GetFullMethodName("OnTestPartResult"));
+ }
+
+ virtual void OnTestEnd(const TestInfo& /*test_info*/) {
+ g_events->push_back(GetFullMethodName("OnTestEnd"));
+ }
+
+ virtual void OnTestCaseEnd(const TestCase& /*test_case*/) {
+ g_events->push_back(GetFullMethodName("OnTestCaseEnd"));
+ }
+
+ virtual void OnEnvironmentsTearDownStart(const UnitTest& /*unit_test*/) {
+ g_events->push_back(GetFullMethodName("OnEnvironmentsTearDownStart"));
+ }
+
+ virtual void OnEnvironmentsTearDownEnd(const UnitTest& /*unit_test*/) {
+ g_events->push_back(GetFullMethodName("OnEnvironmentsTearDownEnd"));
+ }
+
+ virtual void OnTestIterationEnd(const UnitTest& /*unit_test*/,
+ int iteration) {
+ Message message;
+ message << GetFullMethodName("OnTestIterationEnd")
+ << "(" << iteration << ")";
+ g_events->push_back(message.GetString());
+ }
+
+ virtual void OnTestProgramEnd(const UnitTest& /*unit_test*/) {
+ g_events->push_back(GetFullMethodName("OnTestProgramEnd"));
+ }
+
+ private:
+ String GetFullMethodName(const char* name) {
+ Message message;
+ message << name_ << "." << name;
+ return message.GetString();
+ }
+
+ String name_;
+};
+
+class EnvironmentInvocationCatcher : public Environment {
+ protected:
+ virtual void SetUp() {
+ g_events->push_back(String("Environment::SetUp"));
+ }
+
+ virtual void TearDown() {
+ g_events->push_back(String("Environment::TearDown"));
+ }
+};
+
+class ListenerTest : public Test {
+ protected:
+ static void SetUpTestCase() {
+ g_events->push_back(String("ListenerTest::SetUpTestCase"));
+ }
+
+ static void TearDownTestCase() {
+ g_events->push_back(String("ListenerTest::TearDownTestCase"));
+ }
+
+ virtual void SetUp() {
+ g_events->push_back(String("ListenerTest::SetUp"));
+ }
+
+ virtual void TearDown() {
+ g_events->push_back(String("ListenerTest::TearDown"));
+ }
+};
+
+TEST_F(ListenerTest, DoesFoo) {
+ // Test execution order within a test case is not guaranteed so we are not
+ // recording the test name.
+ g_events->push_back(String("ListenerTest::* Test Body"));
+ SUCCEED(); // Triggers OnTestPartResult.
+}
+
+TEST_F(ListenerTest, DoesBar) {
+ g_events->push_back(String("ListenerTest::* Test Body"));
+ SUCCEED(); // Triggers OnTestPartResult.
+}
+
+} // namespace internal
+
+} // namespace testing
+
+using ::testing::internal::EnvironmentInvocationCatcher;
+using ::testing::internal::EventRecordingListener;
+
+void VerifyResults(const std::vector<String>& data,
+ const char* const* expected_data,
+ int expected_data_size) {
+ const int actual_size = data.size();
+ // If the following assertion fails, a new entry will be appended to
+ // data. Hence we save data.size() first.
+ EXPECT_EQ(expected_data_size, actual_size);
+
+ // Compares the common prefix.
+ const int shorter_size = expected_data_size <= actual_size ?
+ expected_data_size : actual_size;
+ int i = 0;
+ for (; i < shorter_size; ++i) {
+ ASSERT_STREQ(expected_data[i], data[i].c_str())
+ << "at position " << i;
+ }
+
+ // Prints extra elements in the actual data.
+ for (; i < actual_size; ++i) {
+ printf(" Actual event #%d: %s\n", i, data[i].c_str());
+ }
+}
+
+int main(int argc, char **argv) {
+ std::vector<String> events;
+ g_events = &events;
+ InitGoogleTest(&argc, argv);
+
+ UnitTest::GetInstance()->listeners().Append(
+ new EventRecordingListener("1st"));
+ UnitTest::GetInstance()->listeners().Append(
+ new EventRecordingListener("2nd"));
+
+ AddGlobalTestEnvironment(new EnvironmentInvocationCatcher);
+
+ GTEST_CHECK_(events.size() == 0)
+ << "AddGlobalTestEnvironment should not generate any events itself.";
+
+ ::testing::GTEST_FLAG(repeat) = 2;
+ int ret_val = RUN_ALL_TESTS();
+
+ const char* const expected_events[] = {
+ "1st.OnTestProgramStart",
+ "2nd.OnTestProgramStart",
+ "1st.OnTestIterationStart(0)",
+ "2nd.OnTestIterationStart(0)",
+ "1st.OnEnvironmentsSetUpStart",
+ "2nd.OnEnvironmentsSetUpStart",
+ "Environment::SetUp",
+ "2nd.OnEnvironmentsSetUpEnd",
+ "1st.OnEnvironmentsSetUpEnd",
+ "1st.OnTestCaseStart",
+ "2nd.OnTestCaseStart",
+ "ListenerTest::SetUpTestCase",
+ "1st.OnTestStart",
+ "2nd.OnTestStart",
+ "ListenerTest::SetUp",
+ "ListenerTest::* Test Body",
+ "1st.OnTestPartResult",
+ "2nd.OnTestPartResult",
+ "ListenerTest::TearDown",
+ "2nd.OnTestEnd",
+ "1st.OnTestEnd",
+ "1st.OnTestStart",
+ "2nd.OnTestStart",
+ "ListenerTest::SetUp",
+ "ListenerTest::* Test Body",
+ "1st.OnTestPartResult",
+ "2nd.OnTestPartResult",
+ "ListenerTest::TearDown",
+ "2nd.OnTestEnd",
+ "1st.OnTestEnd",
+ "ListenerTest::TearDownTestCase",
+ "2nd.OnTestCaseEnd",
+ "1st.OnTestCaseEnd",
+ "1st.OnEnvironmentsTearDownStart",
+ "2nd.OnEnvironmentsTearDownStart",
+ "Environment::TearDown",
+ "2nd.OnEnvironmentsTearDownEnd",
+ "1st.OnEnvironmentsTearDownEnd",
+ "2nd.OnTestIterationEnd(0)",
+ "1st.OnTestIterationEnd(0)",
+ "1st.OnTestIterationStart(1)",
+ "2nd.OnTestIterationStart(1)",
+ "1st.OnEnvironmentsSetUpStart",
+ "2nd.OnEnvironmentsSetUpStart",
+ "Environment::SetUp",
+ "2nd.OnEnvironmentsSetUpEnd",
+ "1st.OnEnvironmentsSetUpEnd",
+ "1st.OnTestCaseStart",
+ "2nd.OnTestCaseStart",
+ "ListenerTest::SetUpTestCase",
+ "1st.OnTestStart",
+ "2nd.OnTestStart",
+ "ListenerTest::SetUp",
+ "ListenerTest::* Test Body",
+ "1st.OnTestPartResult",
+ "2nd.OnTestPartResult",
+ "ListenerTest::TearDown",
+ "2nd.OnTestEnd",
+ "1st.OnTestEnd",
+ "1st.OnTestStart",
+ "2nd.OnTestStart",
+ "ListenerTest::SetUp",
+ "ListenerTest::* Test Body",
+ "1st.OnTestPartResult",
+ "2nd.OnTestPartResult",
+ "ListenerTest::TearDown",
+ "2nd.OnTestEnd",
+ "1st.OnTestEnd",
+ "ListenerTest::TearDownTestCase",
+ "2nd.OnTestCaseEnd",
+ "1st.OnTestCaseEnd",
+ "1st.OnEnvironmentsTearDownStart",
+ "2nd.OnEnvironmentsTearDownStart",
+ "Environment::TearDown",
+ "2nd.OnEnvironmentsTearDownEnd",
+ "1st.OnEnvironmentsTearDownEnd",
+ "2nd.OnTestIterationEnd(1)",
+ "1st.OnTestIterationEnd(1)",
+ "2nd.OnTestProgramEnd",
+ "1st.OnTestProgramEnd"
+ };
+ VerifyResults(events,
+ expected_events,
+ sizeof(expected_events)/sizeof(expected_events[0]));
+
+ // We need to check manually for ad hoc test failures that happen after
+ // RUN_ALL_TESTS finishes.
+ if (UnitTest::GetInstance()->Failed())
+ ret_val = 1;
+
+ return ret_val;
+}
diff --git a/Source/ThirdParty/gtest/test/gtest-message_test.cc b/Source/ThirdParty/gtest/test/gtest-message_test.cc
new file mode 100644
index 000000000..e42b03447
--- /dev/null
+++ b/Source/ThirdParty/gtest/test/gtest-message_test.cc
@@ -0,0 +1,167 @@
+// Copyright 2005, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: wan@google.com (Zhanyong Wan)
+//
+// Tests for the Message class.
+
+#include <gtest/gtest-message.h>
+
+#include <gtest/gtest.h>
+
+namespace {
+
+using ::testing::Message;
+using ::testing::internal::StrStream;
+
+// A helper function that turns a Message into a C string.
+const char* ToCString(const Message& msg) {
+ static testing::internal::String result;
+ result = msg.GetString();
+ return result.c_str();
+}
+
+// Tests the testing::Message class
+
+// Tests the default constructor.
+TEST(MessageTest, DefaultConstructor) {
+ const Message msg;
+ EXPECT_STREQ("", ToCString(msg));
+}
+
+// Tests the copy constructor.
+TEST(MessageTest, CopyConstructor) {
+ const Message msg1("Hello");
+ const Message msg2(msg1);
+ EXPECT_STREQ("Hello", ToCString(msg2));
+}
+
+// Tests constructing a Message from a C-string.
+TEST(MessageTest, ConstructsFromCString) {
+ Message msg("Hello");
+ EXPECT_STREQ("Hello", ToCString(msg));
+}
+
+// Tests streaming a float.
+TEST(MessageTest, StreamsFloat) {
+ const char* const s = ToCString(Message() << 1.23456F << " " << 2.34567F);
+ // Both numbers should be printed with enough precision.
+ EXPECT_PRED_FORMAT2(testing::IsSubstring, "1.234560", s);
+ EXPECT_PRED_FORMAT2(testing::IsSubstring, " 2.345669", s);
+}
+
+// Tests streaming a double.
+TEST(MessageTest, StreamsDouble) {
+ const char* const s = ToCString(Message() << 1260570880.4555497 << " "
+ << 1260572265.1954534);
+ // Both numbers should be printed with enough precision.
+ EXPECT_PRED_FORMAT2(testing::IsSubstring, "1260570880.45", s);
+ EXPECT_PRED_FORMAT2(testing::IsSubstring, " 1260572265.19", s);
+}
+
+// Tests streaming a non-char pointer.
+TEST(MessageTest, StreamsPointer) {
+ int n = 0;
+ int* p = &n;
+ EXPECT_STRNE("(null)", ToCString(Message() << p));
+}
+
+// Tests streaming a NULL non-char pointer.
+TEST(MessageTest, StreamsNullPointer) {
+ int* p = NULL;
+ EXPECT_STREQ("(null)", ToCString(Message() << p));
+}
+
+// Tests streaming a C string.
+TEST(MessageTest, StreamsCString) {
+ EXPECT_STREQ("Foo", ToCString(Message() << "Foo"));
+}
+
+// Tests streaming a NULL C string.
+TEST(MessageTest, StreamsNullCString) {
+ char* p = NULL;
+ EXPECT_STREQ("(null)", ToCString(Message() << p));
+}
+
+// Tests streaming std::string.
+TEST(MessageTest, StreamsString) {
+ const ::std::string str("Hello");
+ EXPECT_STREQ("Hello", ToCString(Message() << str));
+}
+
+// Tests that we can output strings containing embedded NULs.
+TEST(MessageTest, StreamsStringWithEmbeddedNUL) {
+ const char char_array_with_nul[] =
+ "Here's a NUL\0 and some more string";
+ const ::std::string string_with_nul(char_array_with_nul,
+ sizeof(char_array_with_nul) - 1);
+ EXPECT_STREQ("Here's a NUL\\0 and some more string",
+ ToCString(Message() << string_with_nul));
+}
+
+// Tests streaming a NUL char.
+TEST(MessageTest, StreamsNULChar) {
+ EXPECT_STREQ("\\0", ToCString(Message() << '\0'));
+}
+
+// Tests streaming int.
+TEST(MessageTest, StreamsInt) {
+ EXPECT_STREQ("123", ToCString(Message() << 123));
+}
+
+// Tests that basic IO manipulators (endl, ends, and flush) can be
+// streamed to Message.
+TEST(MessageTest, StreamsBasicIoManip) {
+ EXPECT_STREQ("Line 1.\nA NUL char \\0 in line 2.",
+ ToCString(Message() << "Line 1." << std::endl
+ << "A NUL char " << std::ends << std::flush
+ << " in line 2."));
+}
+
+// Tests Message::GetString()
+TEST(MessageTest, GetString) {
+ Message msg;
+ msg << 1 << " lamb";
+ EXPECT_STREQ("1 lamb", msg.GetString().c_str());
+}
+
+// Tests streaming a Message object to an ostream.
+TEST(MessageTest, StreamsToOStream) {
+ Message msg("Hello");
+ StrStream ss;
+ ss << msg;
+ EXPECT_STREQ("Hello", testing::internal::StrStreamToString(&ss).c_str());
+}
+
+// Tests that a Message object doesn't take up too much stack space.
+TEST(MessageTest, DoesNotTakeUpMuchStackSpace) {
+ EXPECT_LE(sizeof(Message), 16U);
+}
+
+} // namespace
diff --git a/Source/ThirdParty/gtest/test/gtest-options_test.cc b/Source/ThirdParty/gtest/test/gtest-options_test.cc
new file mode 100644
index 000000000..2e2cbc92e
--- /dev/null
+++ b/Source/ThirdParty/gtest/test/gtest-options_test.cc
@@ -0,0 +1,212 @@
+// Copyright 2008, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Authors: keith.ray@gmail.com (Keith Ray)
+//
+// Google Test UnitTestOptions tests
+//
+// This file tests classes and functions used internally by
+// Google Test. They are subject to change without notice.
+//
+// This file is #included from gtest.cc, to avoid changing build or
+// make-files on Windows and other platforms. Do not #include this file
+// anywhere else!
+
+#include <gtest/gtest.h>
+
+#if GTEST_OS_WINDOWS_MOBILE
+#include <windows.h>
+#elif GTEST_OS_WINDOWS
+#include <direct.h>
+#endif // GTEST_OS_WINDOWS_MOBILE
+
+// Indicates that this translation unit is part of Google Test's
+// implementation. It must come before gtest-internal-inl.h is
+// included, or there will be a compiler error. This trick is to
+// prevent a user from accidentally including gtest-internal-inl.h in
+// his code.
+#define GTEST_IMPLEMENTATION_ 1
+#include "src/gtest-internal-inl.h"
+#undef GTEST_IMPLEMENTATION_
+
+namespace testing {
+namespace internal {
+namespace {
+
+// Turns the given relative path into an absolute path.
+FilePath GetAbsolutePathOf(const FilePath& relative_path) {
+ return FilePath::ConcatPaths(FilePath::GetCurrentDir(), relative_path);
+}
+
+// Testing UnitTestOptions::GetOutputFormat/GetOutputFile.
+
+TEST(XmlOutputTest, GetOutputFormatDefault) {
+ GTEST_FLAG(output) = "";
+ EXPECT_STREQ("", UnitTestOptions::GetOutputFormat().c_str());
+}
+
+TEST(XmlOutputTest, GetOutputFormat) {
+ GTEST_FLAG(output) = "xml:filename";
+ EXPECT_STREQ("xml", UnitTestOptions::GetOutputFormat().c_str());
+}
+
+TEST(XmlOutputTest, GetOutputFileDefault) {
+ GTEST_FLAG(output) = "";
+ EXPECT_STREQ(GetAbsolutePathOf(FilePath("test_detail.xml")).c_str(),
+ UnitTestOptions::GetAbsolutePathToOutputFile().c_str());
+}
+
+TEST(XmlOutputTest, GetOutputFileSingleFile) {
+ GTEST_FLAG(output) = "xml:filename.abc";
+ EXPECT_STREQ(GetAbsolutePathOf(FilePath("filename.abc")).c_str(),
+ UnitTestOptions::GetAbsolutePathToOutputFile().c_str());
+}
+
+TEST(XmlOutputTest, GetOutputFileFromDirectoryPath) {
+ GTEST_FLAG(output) = "xml:path" GTEST_PATH_SEP_;
+ const std::string expected_output_file =
+ GetAbsolutePathOf(
+ FilePath(std::string("path") + GTEST_PATH_SEP_ +
+ GetCurrentExecutableName().c_str() + ".xml")).c_str();
+ const String& output_file = UnitTestOptions::GetAbsolutePathToOutputFile();
+#if GTEST_OS_WINDOWS
+ EXPECT_STRCASEEQ(expected_output_file.c_str(), output_file.c_str());
+#else
+ EXPECT_EQ(expected_output_file, output_file.c_str());
+#endif
+}
+
+TEST(OutputFileHelpersTest, GetCurrentExecutableName) {
+ const std::string exe_str = GetCurrentExecutableName().c_str();
+#if GTEST_OS_WINDOWS
+ const bool success =
+ _strcmpi("gtest-options_test", exe_str.c_str()) == 0 ||
+ _strcmpi("gtest-options-ex_test", exe_str.c_str()) == 0 ||
+ _strcmpi("gtest_all_test", exe_str.c_str()) == 0 ||
+ _strcmpi("gtest_dll_test", exe_str.c_str()) == 0;
+#else
+ // TODO(wan@google.com): remove the hard-coded "lt-" prefix when
+ // Chandler Carruth's libtool replacement is ready.
+ const bool success =
+ exe_str == "gtest-options_test" ||
+ exe_str == "gtest_all_test" ||
+ exe_str == "lt-gtest_all_test" ||
+ exe_str == "gtest_dll_test";
+#endif // GTEST_OS_WINDOWS
+ if (!success)
+ FAIL() << "GetCurrentExecutableName() returns " << exe_str;
+}
+
+class XmlOutputChangeDirTest : public Test {
+ protected:
+ virtual void SetUp() {
+ original_working_dir_ = FilePath::GetCurrentDir();
+ posix::ChDir("..");
+ // This will make the test fail if run from the root directory.
+ EXPECT_STRNE(original_working_dir_.c_str(),
+ FilePath::GetCurrentDir().c_str());
+ }
+
+ virtual void TearDown() {
+ posix::ChDir(original_working_dir_.c_str());
+ }
+
+ FilePath original_working_dir_;
+};
+
+TEST_F(XmlOutputChangeDirTest, PreserveOriginalWorkingDirWithDefault) {
+ GTEST_FLAG(output) = "";
+ EXPECT_STREQ(FilePath::ConcatPaths(original_working_dir_,
+ FilePath("test_detail.xml")).c_str(),
+ UnitTestOptions::GetAbsolutePathToOutputFile().c_str());
+}
+
+TEST_F(XmlOutputChangeDirTest, PreserveOriginalWorkingDirWithDefaultXML) {
+ GTEST_FLAG(output) = "xml";
+ EXPECT_STREQ(FilePath::ConcatPaths(original_working_dir_,
+ FilePath("test_detail.xml")).c_str(),
+ UnitTestOptions::GetAbsolutePathToOutputFile().c_str());
+}
+
+TEST_F(XmlOutputChangeDirTest, PreserveOriginalWorkingDirWithRelativeFile) {
+ GTEST_FLAG(output) = "xml:filename.abc";
+ EXPECT_STREQ(FilePath::ConcatPaths(original_working_dir_,
+ FilePath("filename.abc")).c_str(),
+ UnitTestOptions::GetAbsolutePathToOutputFile().c_str());
+}
+
+TEST_F(XmlOutputChangeDirTest, PreserveOriginalWorkingDirWithRelativePath) {
+ GTEST_FLAG(output) = "xml:path" GTEST_PATH_SEP_;
+ const std::string expected_output_file =
+ FilePath::ConcatPaths(
+ original_working_dir_,
+ FilePath(std::string("path") + GTEST_PATH_SEP_ +
+ GetCurrentExecutableName().c_str() + ".xml")).c_str();
+ const String& output_file = UnitTestOptions::GetAbsolutePathToOutputFile();
+#if GTEST_OS_WINDOWS
+ EXPECT_STRCASEEQ(expected_output_file.c_str(), output_file.c_str());
+#else
+ EXPECT_EQ(expected_output_file, output_file.c_str());
+#endif
+}
+
+TEST_F(XmlOutputChangeDirTest, PreserveOriginalWorkingDirWithAbsoluteFile) {
+#if GTEST_OS_WINDOWS
+ GTEST_FLAG(output) = "xml:c:\\tmp\\filename.abc";
+ EXPECT_STREQ(FilePath("c:\\tmp\\filename.abc").c_str(),
+ UnitTestOptions::GetAbsolutePathToOutputFile().c_str());
+#else
+ GTEST_FLAG(output) ="xml:/tmp/filename.abc";
+ EXPECT_STREQ(FilePath("/tmp/filename.abc").c_str(),
+ UnitTestOptions::GetAbsolutePathToOutputFile().c_str());
+#endif
+}
+
+TEST_F(XmlOutputChangeDirTest, PreserveOriginalWorkingDirWithAbsolutePath) {
+#if GTEST_OS_WINDOWS
+ const std::string path = "c:\\tmp\\";
+#else
+ const std::string path = "/tmp/";
+#endif
+
+ GTEST_FLAG(output) = "xml:" + path;
+ const std::string expected_output_file =
+ path + GetCurrentExecutableName().c_str() + ".xml";
+ const String& output_file = UnitTestOptions::GetAbsolutePathToOutputFile();
+
+#if GTEST_OS_WINDOWS
+ EXPECT_STRCASEEQ(expected_output_file.c_str(), output_file.c_str());
+#else
+ EXPECT_EQ(expected_output_file, output_file.c_str());
+#endif
+}
+
+} // namespace
+} // namespace internal
+} // namespace testing
diff --git a/Source/ThirdParty/gtest/test/gtest-param-test2_test.cc b/Source/ThirdParty/gtest/test/gtest-param-test2_test.cc
new file mode 100644
index 000000000..ccb6cfac9
--- /dev/null
+++ b/Source/ThirdParty/gtest/test/gtest-param-test2_test.cc
@@ -0,0 +1,65 @@
+// Copyright 2008, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: vladl@google.com (Vlad Losev)
+//
+// Tests for Google Test itself. This verifies that the basic constructs of
+// Google Test work.
+
+#include <gtest/gtest.h>
+
+#include "test/gtest-param-test_test.h"
+
+#if GTEST_HAS_PARAM_TEST
+
+using ::testing::Values;
+using ::testing::internal::ParamGenerator;
+
+// Tests that generators defined in a different translation unit
+// are functional. The test using extern_gen is defined
+// in gtest-param-test_test.cc.
+ParamGenerator<int> extern_gen = Values(33);
+
+// Tests that a parameterized test case can be defined in one translation unit
+// and instantiated in another. The test is defined in gtest-param-test_test.cc
+// and ExternalInstantiationTest fixture class is defined in
+// gtest-param-test_test.h.
+INSTANTIATE_TEST_CASE_P(MultiplesOf33,
+ ExternalInstantiationTest,
+ Values(33, 66));
+
+// Tests that a parameterized test case can be instantiated
+// in multiple translation units. Another instantiation is defined
+// in gtest-param-test_test.cc and InstantiationInMultipleTranslaionUnitsTest
+// fixture is defined in gtest-param-test_test.h
+INSTANTIATE_TEST_CASE_P(Sequence2,
+ InstantiationInMultipleTranslaionUnitsTest,
+ Values(42*3, 42*4, 42*5));
+
+#endif // GTEST_HAS_PARAM_TEST
diff --git a/Source/ThirdParty/gtest/test/gtest-param-test_test.cc b/Source/ThirdParty/gtest/test/gtest-param-test_test.cc
new file mode 100644
index 000000000..d0a0e735c
--- /dev/null
+++ b/Source/ThirdParty/gtest/test/gtest-param-test_test.cc
@@ -0,0 +1,835 @@
+// Copyright 2008, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: vladl@google.com (Vlad Losev)
+//
+// Tests for Google Test itself. This file verifies that the parameter
+// generators objects produce correct parameter sequences and that
+// Google Test runtime instantiates correct tests from those sequences.
+
+#include <gtest/gtest.h>
+
+#if GTEST_HAS_PARAM_TEST
+
+#include <algorithm>
+#include <iostream>
+#include <list>
+#include <sstream>
+#include <string>
+#include <vector>
+
+// To include gtest-internal-inl.h.
+#define GTEST_IMPLEMENTATION_ 1
+#include "src/gtest-internal-inl.h" // for UnitTestOptions
+#undef GTEST_IMPLEMENTATION_
+
+#include "test/gtest-param-test_test.h"
+
+using ::std::vector;
+using ::std::sort;
+
+using ::testing::AddGlobalTestEnvironment;
+using ::testing::Bool;
+using ::testing::Message;
+using ::testing::Range;
+using ::testing::TestWithParam;
+using ::testing::Values;
+using ::testing::ValuesIn;
+
+#if GTEST_HAS_COMBINE
+using ::testing::Combine;
+using ::std::tr1::get;
+using ::std::tr1::make_tuple;
+using ::std::tr1::tuple;
+#endif // GTEST_HAS_COMBINE
+
+using ::testing::internal::ParamGenerator;
+using ::testing::internal::UnitTestOptions;
+
+// Prints a value to a string.
+//
+// TODO(wan@google.com): remove PrintValue() when we move matchers and
+// EXPECT_THAT() from Google Mock to Google Test. At that time, we
+// can write EXPECT_THAT(x, Eq(y)) to compare two tuples x and y, as
+// EXPECT_THAT() and the matchers know how to print tuples.
+template <typename T>
+::std::string PrintValue(const T& value) {
+ ::std::stringstream stream;
+ stream << value;
+ return stream.str();
+}
+
+#if GTEST_HAS_COMBINE
+
+// These overloads allow printing tuples in our tests. We cannot
+// define an operator<< for tuples, as that definition needs to be in
+// the std namespace in order to be picked up by Google Test via
+// Argument-Dependent Lookup, yet defining anything in the std
+// namespace in non-STL code is undefined behavior.
+
+template <typename T1, typename T2>
+::std::string PrintValue(const tuple<T1, T2>& value) {
+ ::std::stringstream stream;
+ stream << "(" << get<0>(value) << ", " << get<1>(value) << ")";
+ return stream.str();
+}
+
+template <typename T1, typename T2, typename T3>
+::std::string PrintValue(const tuple<T1, T2, T3>& value) {
+ ::std::stringstream stream;
+ stream << "(" << get<0>(value) << ", " << get<1>(value)
+ << ", "<< get<2>(value) << ")";
+ return stream.str();
+}
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+ typename T6, typename T7, typename T8, typename T9, typename T10>
+::std::string PrintValue(
+ const tuple<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10>& value) {
+ ::std::stringstream stream;
+ stream << "(" << get<0>(value) << ", " << get<1>(value)
+ << ", "<< get<2>(value) << ", " << get<3>(value)
+ << ", "<< get<4>(value) << ", " << get<5>(value)
+ << ", "<< get<6>(value) << ", " << get<7>(value)
+ << ", "<< get<8>(value) << ", " << get<9>(value) << ")";
+ return stream.str();
+}
+
+#endif // GTEST_HAS_COMBINE
+
+// Verifies that a sequence generated by the generator and accessed
+// via the iterator object matches the expected one using Google Test
+// assertions.
+template <typename T, size_t N>
+void VerifyGenerator(const ParamGenerator<T>& generator,
+ const T (&expected_values)[N]) {
+ typename ParamGenerator<T>::iterator it = generator.begin();
+ for (size_t i = 0; i < N; ++i) {
+ ASSERT_FALSE(it == generator.end())
+ << "At element " << i << " when accessing via an iterator "
+ << "created with the copy constructor.\n";
+ // We cannot use EXPECT_EQ() here as the values may be tuples,
+ // which don't support <<.
+ EXPECT_TRUE(expected_values[i] == *it)
+ << "where i is " << i
+ << ", expected_values[i] is " << PrintValue(expected_values[i])
+ << ", *it is " << PrintValue(*it)
+ << ", and 'it' is an iterator created with the copy constructor.\n";
+ it++;
+ }
+ EXPECT_TRUE(it == generator.end())
+ << "At the presumed end of sequence when accessing via an iterator "
+ << "created with the copy constructor.\n";
+
+ // Test the iterator assignment. The following lines verify that
+ // the sequence accessed via an iterator initialized via the
+ // assignment operator (as opposed to a copy constructor) matches
+ // just the same.
+ it = generator.begin();
+ for (size_t i = 0; i < N; ++i) {
+ ASSERT_FALSE(it == generator.end())
+ << "At element " << i << " when accessing via an iterator "
+ << "created with the assignment operator.\n";
+ EXPECT_TRUE(expected_values[i] == *it)
+ << "where i is " << i
+ << ", expected_values[i] is " << PrintValue(expected_values[i])
+ << ", *it is " << PrintValue(*it)
+ << ", and 'it' is an iterator created with the copy constructor.\n";
+ it++;
+ }
+ EXPECT_TRUE(it == generator.end())
+ << "At the presumed end of sequence when accessing via an iterator "
+ << "created with the assignment operator.\n";
+}
+
+template <typename T>
+void VerifyGeneratorIsEmpty(const ParamGenerator<T>& generator) {
+ typename ParamGenerator<T>::iterator it = generator.begin();
+ EXPECT_TRUE(it == generator.end());
+
+ it = generator.begin();
+ EXPECT_TRUE(it == generator.end());
+}
+
+// Generator tests. They test that each of the provided generator functions
+// generates an expected sequence of values. The general test pattern
+// instantiates a generator using one of the generator functions,
+// checks the sequence produced by the generator using its iterator API,
+// and then resets the iterator back to the beginning of the sequence
+// and checks the sequence again.
+
+// Tests that iterators produced by generator functions conform to the
+// ForwardIterator concept.
+TEST(IteratorTest, ParamIteratorConformsToForwardIteratorConcept) {
+ const ParamGenerator<int> gen = Range(0, 10);
+ ParamGenerator<int>::iterator it = gen.begin();
+
+ // Verifies that iterator initialization works as expected.
+ ParamGenerator<int>::iterator it2 = it;
+ EXPECT_TRUE(*it == *it2) << "Initialized iterators must point to the "
+ << "element same as its source points to";
+
+ // Verifies that iterator assignment works as expected.
+ it++;
+ EXPECT_FALSE(*it == *it2);
+ it2 = it;
+ EXPECT_TRUE(*it == *it2) << "Assigned iterators must point to the "
+ << "element same as its source points to";
+
+ // Verifies that prefix operator++() returns *this.
+ EXPECT_EQ(&it, &(++it)) << "Result of the prefix operator++ must be "
+ << "refer to the original object";
+
+ // Verifies that the result of the postfix operator++ points to the value
+ // pointed to by the original iterator.
+ int original_value = *it; // Have to compute it outside of macro call to be
+ // unaffected by the parameter evaluation order.
+ EXPECT_EQ(original_value, *(it++));
+
+ // Verifies that prefix and postfix operator++() advance an iterator
+ // all the same.
+ it2 = it;
+ it++;
+ ++it2;
+ EXPECT_TRUE(*it == *it2);
+}
+
+// Tests that Range() generates the expected sequence.
+TEST(RangeTest, IntRangeWithDefaultStep) {
+ const ParamGenerator<int> gen = Range(0, 3);
+ const int expected_values[] = {0, 1, 2};
+ VerifyGenerator(gen, expected_values);
+}
+
+// Edge case. Tests that Range() generates the single element sequence
+// as expected when provided with range limits that are equal.
+TEST(RangeTest, IntRangeSingleValue) {
+ const ParamGenerator<int> gen = Range(0, 1);
+ const int expected_values[] = {0};
+ VerifyGenerator(gen, expected_values);
+}
+
+// Edge case. Tests that Range() with generates empty sequence when
+// supplied with an empty range.
+TEST(RangeTest, IntRangeEmpty) {
+ const ParamGenerator<int> gen = Range(0, 0);
+ VerifyGeneratorIsEmpty(gen);
+}
+
+// Tests that Range() with custom step (greater then one) generates
+// the expected sequence.
+TEST(RangeTest, IntRangeWithCustomStep) {
+ const ParamGenerator<int> gen = Range(0, 9, 3);
+ const int expected_values[] = {0, 3, 6};
+ VerifyGenerator(gen, expected_values);
+}
+
+// Tests that Range() with custom step (greater then one) generates
+// the expected sequence when the last element does not fall on the
+// upper range limit. Sequences generated by Range() must not have
+// elements beyond the range limits.
+TEST(RangeTest, IntRangeWithCustomStepOverUpperBound) {
+ const ParamGenerator<int> gen = Range(0, 4, 3);
+ const int expected_values[] = {0, 3};
+ VerifyGenerator(gen, expected_values);
+}
+
+// Verifies that Range works with user-defined types that define
+// copy constructor, operator=(), operator+(), and operator<().
+class DogAdder {
+ public:
+ explicit DogAdder(const char* a_value) : value_(a_value) {}
+ DogAdder(const DogAdder& other) : value_(other.value_.c_str()) {}
+
+ DogAdder operator=(const DogAdder& other) {
+ if (this != &other)
+ value_ = other.value_;
+ return *this;
+ }
+ DogAdder operator+(const DogAdder& other) const {
+ Message msg;
+ msg << value_.c_str() << other.value_.c_str();
+ return DogAdder(msg.GetString().c_str());
+ }
+ bool operator<(const DogAdder& other) const {
+ return value_ < other.value_;
+ }
+ const ::testing::internal::String& value() const { return value_; }
+
+ private:
+ ::testing::internal::String value_;
+};
+
+TEST(RangeTest, WorksWithACustomType) {
+ const ParamGenerator<DogAdder> gen =
+ Range(DogAdder("cat"), DogAdder("catdogdog"), DogAdder("dog"));
+ ParamGenerator<DogAdder>::iterator it = gen.begin();
+
+ ASSERT_FALSE(it == gen.end());
+ EXPECT_STREQ("cat", it->value().c_str());
+
+ ASSERT_FALSE(++it == gen.end());
+ EXPECT_STREQ("catdog", it->value().c_str());
+
+ EXPECT_TRUE(++it == gen.end());
+}
+
+class IntWrapper {
+ public:
+ explicit IntWrapper(int a_value) : value_(a_value) {}
+ IntWrapper(const IntWrapper& other) : value_(other.value_) {}
+
+ IntWrapper operator=(const IntWrapper& other) {
+ value_ = other.value_;
+ return *this;
+ }
+ // operator+() adds a different type.
+ IntWrapper operator+(int other) const { return IntWrapper(value_ + other); }
+ bool operator<(const IntWrapper& other) const {
+ return value_ < other.value_;
+ }
+ int value() const { return value_; }
+
+ private:
+ int value_;
+};
+
+TEST(RangeTest, WorksWithACustomTypeWithDifferentIncrementType) {
+ const ParamGenerator<IntWrapper> gen = Range(IntWrapper(0), IntWrapper(2));
+ ParamGenerator<IntWrapper>::iterator it = gen.begin();
+
+ ASSERT_FALSE(it == gen.end());
+ EXPECT_EQ(0, it->value());
+
+ ASSERT_FALSE(++it == gen.end());
+ EXPECT_EQ(1, it->value());
+
+ EXPECT_TRUE(++it == gen.end());
+}
+
+// Tests that ValuesIn() with an array parameter generates
+// the expected sequence.
+TEST(ValuesInTest, ValuesInArray) {
+ int array[] = {3, 5, 8};
+ const ParamGenerator<int> gen = ValuesIn(array);
+ VerifyGenerator(gen, array);
+}
+
+// Tests that ValuesIn() with a const array parameter generates
+// the expected sequence.
+TEST(ValuesInTest, ValuesInConstArray) {
+ const int array[] = {3, 5, 8};
+ const ParamGenerator<int> gen = ValuesIn(array);
+ VerifyGenerator(gen, array);
+}
+
+// Edge case. Tests that ValuesIn() with an array parameter containing a
+// single element generates the single element sequence.
+TEST(ValuesInTest, ValuesInSingleElementArray) {
+ int array[] = {42};
+ const ParamGenerator<int> gen = ValuesIn(array);
+ VerifyGenerator(gen, array);
+}
+
+// Tests that ValuesIn() generates the expected sequence for an STL
+// container (vector).
+TEST(ValuesInTest, ValuesInVector) {
+ typedef ::std::vector<int> ContainerType;
+ ContainerType values;
+ values.push_back(3);
+ values.push_back(5);
+ values.push_back(8);
+ const ParamGenerator<int> gen = ValuesIn(values);
+
+ const int expected_values[] = {3, 5, 8};
+ VerifyGenerator(gen, expected_values);
+}
+
+// Tests that ValuesIn() generates the expected sequence.
+TEST(ValuesInTest, ValuesInIteratorRange) {
+ typedef ::std::vector<int> ContainerType;
+ ContainerType values;
+ values.push_back(3);
+ values.push_back(5);
+ values.push_back(8);
+ const ParamGenerator<int> gen = ValuesIn(values.begin(), values.end());
+
+ const int expected_values[] = {3, 5, 8};
+ VerifyGenerator(gen, expected_values);
+}
+
+// Edge case. Tests that ValuesIn() provided with an iterator range specifying a
+// single value generates a single-element sequence.
+TEST(ValuesInTest, ValuesInSingleElementIteratorRange) {
+ typedef ::std::vector<int> ContainerType;
+ ContainerType values;
+ values.push_back(42);
+ const ParamGenerator<int> gen = ValuesIn(values.begin(), values.end());
+
+ const int expected_values[] = {42};
+ VerifyGenerator(gen, expected_values);
+}
+
+// Edge case. Tests that ValuesIn() provided with an empty iterator range
+// generates an empty sequence.
+TEST(ValuesInTest, ValuesInEmptyIteratorRange) {
+ typedef ::std::vector<int> ContainerType;
+ ContainerType values;
+ const ParamGenerator<int> gen = ValuesIn(values.begin(), values.end());
+
+ VerifyGeneratorIsEmpty(gen);
+}
+
+// Tests that the Values() generates the expected sequence.
+TEST(ValuesTest, ValuesWorks) {
+ const ParamGenerator<int> gen = Values(3, 5, 8);
+
+ const int expected_values[] = {3, 5, 8};
+ VerifyGenerator(gen, expected_values);
+}
+
+// Tests that Values() generates the expected sequences from elements of
+// different types convertible to ParamGenerator's parameter type.
+TEST(ValuesTest, ValuesWorksForValuesOfCompatibleTypes) {
+ const ParamGenerator<double> gen = Values(3, 5.0f, 8.0);
+
+ const double expected_values[] = {3.0, 5.0, 8.0};
+ VerifyGenerator(gen, expected_values);
+}
+
+TEST(ValuesTest, ValuesWorksForMaxLengthList) {
+ const ParamGenerator<int> gen = Values(
+ 10, 20, 30, 40, 50, 60, 70, 80, 90, 100,
+ 110, 120, 130, 140, 150, 160, 170, 180, 190, 200,
+ 210, 220, 230, 240, 250, 260, 270, 280, 290, 300,
+ 310, 320, 330, 340, 350, 360, 370, 380, 390, 400,
+ 410, 420, 430, 440, 450, 460, 470, 480, 490, 500);
+
+ const int expected_values[] = {
+ 10, 20, 30, 40, 50, 60, 70, 80, 90, 100,
+ 110, 120, 130, 140, 150, 160, 170, 180, 190, 200,
+ 210, 220, 230, 240, 250, 260, 270, 280, 290, 300,
+ 310, 320, 330, 340, 350, 360, 370, 380, 390, 400,
+ 410, 420, 430, 440, 450, 460, 470, 480, 490, 500};
+ VerifyGenerator(gen, expected_values);
+}
+
+// Edge case test. Tests that single-parameter Values() generates the sequence
+// with the single value.
+TEST(ValuesTest, ValuesWithSingleParameter) {
+ const ParamGenerator<int> gen = Values(42);
+
+ const int expected_values[] = {42};
+ VerifyGenerator(gen, expected_values);
+}
+
+// Tests that Bool() generates sequence (false, true).
+TEST(BoolTest, BoolWorks) {
+ const ParamGenerator<bool> gen = Bool();
+
+ const bool expected_values[] = {false, true};
+ VerifyGenerator(gen, expected_values);
+}
+
+#if GTEST_HAS_COMBINE
+
+// Tests that Combine() with two parameters generates the expected sequence.
+TEST(CombineTest, CombineWithTwoParameters) {
+ const char* foo = "foo";
+ const char* bar = "bar";
+ const ParamGenerator<tuple<const char*, int> > gen =
+ Combine(Values(foo, bar), Values(3, 4));
+
+ tuple<const char*, int> expected_values[] = {
+ make_tuple(foo, 3), make_tuple(foo, 4),
+ make_tuple(bar, 3), make_tuple(bar, 4)};
+ VerifyGenerator(gen, expected_values);
+}
+
+// Tests that Combine() with three parameters generates the expected sequence.
+TEST(CombineTest, CombineWithThreeParameters) {
+ const ParamGenerator<tuple<int, int, int> > gen = Combine(Values(0, 1),
+ Values(3, 4),
+ Values(5, 6));
+ tuple<int, int, int> expected_values[] = {
+ make_tuple(0, 3, 5), make_tuple(0, 3, 6),
+ make_tuple(0, 4, 5), make_tuple(0, 4, 6),
+ make_tuple(1, 3, 5), make_tuple(1, 3, 6),
+ make_tuple(1, 4, 5), make_tuple(1, 4, 6)};
+ VerifyGenerator(gen, expected_values);
+}
+
+// Tests that the Combine() with the first parameter generating a single value
+// sequence generates a sequence with the number of elements equal to the
+// number of elements in the sequence generated by the second parameter.
+TEST(CombineTest, CombineWithFirstParameterSingleValue) {
+ const ParamGenerator<tuple<int, int> > gen = Combine(Values(42),
+ Values(0, 1));
+
+ tuple<int, int> expected_values[] = {make_tuple(42, 0), make_tuple(42, 1)};
+ VerifyGenerator(gen, expected_values);
+}
+
+// Tests that the Combine() with the second parameter generating a single value
+// sequence generates a sequence with the number of elements equal to the
+// number of elements in the sequence generated by the first parameter.
+TEST(CombineTest, CombineWithSecondParameterSingleValue) {
+ const ParamGenerator<tuple<int, int> > gen = Combine(Values(0, 1),
+ Values(42));
+
+ tuple<int, int> expected_values[] = {make_tuple(0, 42), make_tuple(1, 42)};
+ VerifyGenerator(gen, expected_values);
+}
+
+// Tests that when the first parameter produces an empty sequence,
+// Combine() produces an empty sequence, too.
+TEST(CombineTest, CombineWithFirstParameterEmptyRange) {
+ const ParamGenerator<tuple<int, int> > gen = Combine(Range(0, 0),
+ Values(0, 1));
+ VerifyGeneratorIsEmpty(gen);
+}
+
+// Tests that when the second parameter produces an empty sequence,
+// Combine() produces an empty sequence, too.
+TEST(CombineTest, CombineWithSecondParameterEmptyRange) {
+ const ParamGenerator<tuple<int, int> > gen = Combine(Values(0, 1),
+ Range(1, 1));
+ VerifyGeneratorIsEmpty(gen);
+}
+
+// Edge case. Tests that combine works with the maximum number
+// of parameters supported by Google Test (currently 10).
+TEST(CombineTest, CombineWithMaxNumberOfParameters) {
+ const char* foo = "foo";
+ const char* bar = "bar";
+ const ParamGenerator<tuple<const char*, int, int, int, int, int, int, int,
+ int, int> > gen = Combine(Values(foo, bar),
+ Values(1), Values(2),
+ Values(3), Values(4),
+ Values(5), Values(6),
+ Values(7), Values(8),
+ Values(9));
+
+ tuple<const char*, int, int, int, int, int, int, int, int, int>
+ expected_values[] = {make_tuple(foo, 1, 2, 3, 4, 5, 6, 7, 8, 9),
+ make_tuple(bar, 1, 2, 3, 4, 5, 6, 7, 8, 9)};
+ VerifyGenerator(gen, expected_values);
+}
+
+#endif // GTEST_HAS_COMBINE
+
+// Tests that an generator produces correct sequence after being
+// assigned from another generator.
+TEST(ParamGeneratorTest, AssignmentWorks) {
+ ParamGenerator<int> gen = Values(1, 2);
+ const ParamGenerator<int> gen2 = Values(3, 4);
+ gen = gen2;
+
+ const int expected_values[] = {3, 4};
+ VerifyGenerator(gen, expected_values);
+}
+
+// This test verifies that the tests are expanded and run as specified:
+// one test per element from the sequence produced by the generator
+// specified in INSTANTIATE_TEST_CASE_P. It also verifies that the test's
+// fixture constructor, SetUp(), and TearDown() have run and have been
+// supplied with the correct parameters.
+
+// The use of environment object allows detection of the case where no test
+// case functionality is run at all. In this case TestCaseTearDown will not
+// be able to detect missing tests, naturally.
+template <int kExpectedCalls>
+class TestGenerationEnvironment : public ::testing::Environment {
+ public:
+ static TestGenerationEnvironment* Instance() {
+ static TestGenerationEnvironment* instance = new TestGenerationEnvironment;
+ return instance;
+ }
+
+ void FixtureConstructorExecuted() { fixture_constructor_count_++; }
+ void SetUpExecuted() { set_up_count_++; }
+ void TearDownExecuted() { tear_down_count_++; }
+ void TestBodyExecuted() { test_body_count_++; }
+
+ virtual void TearDown() {
+ // If all MultipleTestGenerationTest tests have been de-selected
+ // by the filter flag, the following checks make no sense.
+ bool perform_check = false;
+
+ for (int i = 0; i < kExpectedCalls; ++i) {
+ Message msg;
+ msg << "TestsExpandedAndRun/" << i;
+ if (UnitTestOptions::FilterMatchesTest(
+ "TestExpansionModule/MultipleTestGenerationTest",
+ msg.GetString().c_str())) {
+ perform_check = true;
+ }
+ }
+ if (perform_check) {
+ EXPECT_EQ(kExpectedCalls, fixture_constructor_count_)
+ << "Fixture constructor of ParamTestGenerationTest test case "
+ << "has not been run as expected.";
+ EXPECT_EQ(kExpectedCalls, set_up_count_)
+ << "Fixture SetUp method of ParamTestGenerationTest test case "
+ << "has not been run as expected.";
+ EXPECT_EQ(kExpectedCalls, tear_down_count_)
+ << "Fixture TearDown method of ParamTestGenerationTest test case "
+ << "has not been run as expected.";
+ EXPECT_EQ(kExpectedCalls, test_body_count_)
+ << "Test in ParamTestGenerationTest test case "
+ << "has not been run as expected.";
+ }
+ }
+ private:
+ TestGenerationEnvironment() : fixture_constructor_count_(0), set_up_count_(0),
+ tear_down_count_(0), test_body_count_(0) {}
+
+ int fixture_constructor_count_;
+ int set_up_count_;
+ int tear_down_count_;
+ int test_body_count_;
+
+ GTEST_DISALLOW_COPY_AND_ASSIGN_(TestGenerationEnvironment);
+};
+
+const int test_generation_params[] = {36, 42, 72};
+
+class TestGenerationTest : public TestWithParam<int> {
+ public:
+ enum {
+ PARAMETER_COUNT =
+ sizeof(test_generation_params)/sizeof(test_generation_params[0])
+ };
+
+ typedef TestGenerationEnvironment<PARAMETER_COUNT> Environment;
+
+ TestGenerationTest() {
+ Environment::Instance()->FixtureConstructorExecuted();
+ current_parameter_ = GetParam();
+ }
+ virtual void SetUp() {
+ Environment::Instance()->SetUpExecuted();
+ EXPECT_EQ(current_parameter_, GetParam());
+ }
+ virtual void TearDown() {
+ Environment::Instance()->TearDownExecuted();
+ EXPECT_EQ(current_parameter_, GetParam());
+ }
+
+ static void SetUpTestCase() {
+ bool all_tests_in_test_case_selected = true;
+
+ for (int i = 0; i < PARAMETER_COUNT; ++i) {
+ Message test_name;
+ test_name << "TestsExpandedAndRun/" << i;
+ if ( !UnitTestOptions::FilterMatchesTest(
+ "TestExpansionModule/MultipleTestGenerationTest",
+ test_name.GetString())) {
+ all_tests_in_test_case_selected = false;
+ }
+ }
+ EXPECT_TRUE(all_tests_in_test_case_selected)
+ << "When running the TestGenerationTest test case all of its tests\n"
+ << "must be selected by the filter flag for the test case to pass.\n"
+ << "If not all of them are enabled, we can't reliably conclude\n"
+ << "that the correct number of tests have been generated.";
+
+ collected_parameters_.clear();
+ }
+
+ static void TearDownTestCase() {
+ vector<int> expected_values(test_generation_params,
+ test_generation_params + PARAMETER_COUNT);
+ // Test execution order is not guaranteed by Google Test,
+ // so the order of values in collected_parameters_ can be
+ // different and we have to sort to compare.
+ sort(expected_values.begin(), expected_values.end());
+ sort(collected_parameters_.begin(), collected_parameters_.end());
+
+ EXPECT_TRUE(collected_parameters_ == expected_values);
+ }
+ protected:
+ int current_parameter_;
+ static vector<int> collected_parameters_;
+
+ private:
+ GTEST_DISALLOW_COPY_AND_ASSIGN_(TestGenerationTest);
+};
+vector<int> TestGenerationTest::collected_parameters_;
+
+TEST_P(TestGenerationTest, TestsExpandedAndRun) {
+ Environment::Instance()->TestBodyExecuted();
+ EXPECT_EQ(current_parameter_, GetParam());
+ collected_parameters_.push_back(GetParam());
+}
+INSTANTIATE_TEST_CASE_P(TestExpansionModule, TestGenerationTest,
+ ValuesIn(test_generation_params));
+
+// This test verifies that the element sequence (third parameter of
+// INSTANTIATE_TEST_CASE_P) is evaluated in InitGoogleTest() and neither at
+// the call site of INSTANTIATE_TEST_CASE_P nor in RUN_ALL_TESTS(). For
+// that, we declare param_value_ to be a static member of
+// GeneratorEvaluationTest and initialize it to 0. We set it to 1 in
+// main(), just before invocation of InitGoogleTest(). After calling
+// InitGoogleTest(), we set the value to 2. If the sequence is evaluated
+// before or after InitGoogleTest, INSTANTIATE_TEST_CASE_P will create a
+// test with parameter other than 1, and the test body will fail the
+// assertion.
+class GeneratorEvaluationTest : public TestWithParam<int> {
+ public:
+ static int param_value() { return param_value_; }
+ static void set_param_value(int param_value) { param_value_ = param_value; }
+
+ private:
+ static int param_value_;
+};
+int GeneratorEvaluationTest::param_value_ = 0;
+
+TEST_P(GeneratorEvaluationTest, GeneratorsEvaluatedInMain) {
+ EXPECT_EQ(1, GetParam());
+}
+INSTANTIATE_TEST_CASE_P(GenEvalModule,
+ GeneratorEvaluationTest,
+ Values(GeneratorEvaluationTest::param_value()));
+
+// Tests that generators defined in a different translation unit are
+// functional. Generator extern_gen is defined in gtest-param-test_test2.cc.
+extern ParamGenerator<int> extern_gen;
+class ExternalGeneratorTest : public TestWithParam<int> {};
+TEST_P(ExternalGeneratorTest, ExternalGenerator) {
+ // Sequence produced by extern_gen contains only a single value
+ // which we verify here.
+ EXPECT_EQ(GetParam(), 33);
+}
+INSTANTIATE_TEST_CASE_P(ExternalGeneratorModule,
+ ExternalGeneratorTest,
+ extern_gen);
+
+// Tests that a parameterized test case can be defined in one translation
+// unit and instantiated in another. This test will be instantiated in
+// gtest-param-test_test2.cc. ExternalInstantiationTest fixture class is
+// defined in gtest-param-test_test.h.
+TEST_P(ExternalInstantiationTest, IsMultipleOf33) {
+ EXPECT_EQ(0, GetParam() % 33);
+}
+
+// Tests that a parameterized test case can be instantiated with multiple
+// generators.
+class MultipleInstantiationTest : public TestWithParam<int> {};
+TEST_P(MultipleInstantiationTest, AllowsMultipleInstances) {
+}
+INSTANTIATE_TEST_CASE_P(Sequence1, MultipleInstantiationTest, Values(1, 2));
+INSTANTIATE_TEST_CASE_P(Sequence2, MultipleInstantiationTest, Range(3, 5));
+
+// Tests that a parameterized test case can be instantiated
+// in multiple translation units. This test will be instantiated
+// here and in gtest-param-test_test2.cc.
+// InstantiationInMultipleTranslationUnitsTest fixture class
+// is defined in gtest-param-test_test.h.
+TEST_P(InstantiationInMultipleTranslaionUnitsTest, IsMultipleOf42) {
+ EXPECT_EQ(0, GetParam() % 42);
+}
+INSTANTIATE_TEST_CASE_P(Sequence1,
+ InstantiationInMultipleTranslaionUnitsTest,
+ Values(42, 42*2));
+
+// Tests that each iteration of parameterized test runs in a separate test
+// object.
+class SeparateInstanceTest : public TestWithParam<int> {
+ public:
+ SeparateInstanceTest() : count_(0) {}
+
+ static void TearDownTestCase() {
+ EXPECT_GE(global_count_, 2)
+ << "If some (but not all) SeparateInstanceTest tests have been "
+ << "filtered out this test will fail. Make sure that all "
+ << "GeneratorEvaluationTest are selected or de-selected together "
+ << "by the test filter.";
+ }
+
+ protected:
+ int count_;
+ static int global_count_;
+};
+int SeparateInstanceTest::global_count_ = 0;
+
+TEST_P(SeparateInstanceTest, TestsRunInSeparateInstances) {
+ EXPECT_EQ(0, count_++);
+ global_count_++;
+}
+INSTANTIATE_TEST_CASE_P(FourElemSequence, SeparateInstanceTest, Range(1, 4));
+
+// Tests that all instantiations of a test have named appropriately. Test
+// defined with TEST_P(TestCaseName, TestName) and instantiated with
+// INSTANTIATE_TEST_CASE_P(SequenceName, TestCaseName, generator) must be named
+// SequenceName/TestCaseName.TestName/i, where i is the 0-based index of the
+// sequence element used to instantiate the test.
+class NamingTest : public TestWithParam<int> {};
+
+TEST_P(NamingTest, TestsAreNamedAppropriately) {
+ const ::testing::TestInfo* const test_info =
+ ::testing::UnitTest::GetInstance()->current_test_info();
+
+ EXPECT_STREQ("ZeroToFiveSequence/NamingTest", test_info->test_case_name());
+
+ Message msg;
+ msg << "TestsAreNamedAppropriately/" << GetParam();
+ EXPECT_STREQ(msg.GetString().c_str(), test_info->name());
+}
+
+INSTANTIATE_TEST_CASE_P(ZeroToFiveSequence, NamingTest, Range(0, 5));
+
+#endif // GTEST_HAS_PARAM_TEST
+
+TEST(CompileTest, CombineIsDefinedOnlyWhenGtestHasParamTestIsDefined) {
+#if GTEST_HAS_COMBINE && !GTEST_HAS_PARAM_TEST
+ FAIL() << "GTEST_HAS_COMBINE is defined while GTEST_HAS_PARAM_TEST is not\n"
+#endif
+}
+
+int main(int argc, char **argv) {
+#if GTEST_HAS_PARAM_TEST
+ // Used in TestGenerationTest test case.
+ AddGlobalTestEnvironment(TestGenerationTest::Environment::Instance());
+ // Used in GeneratorEvaluationTest test case. Tests that the updated value
+ // will be picked up for instantiating tests in GeneratorEvaluationTest.
+ GeneratorEvaluationTest::set_param_value(1);
+#endif // GTEST_HAS_PARAM_TEST
+
+ ::testing::InitGoogleTest(&argc, argv);
+
+#if GTEST_HAS_PARAM_TEST
+ // Used in GeneratorEvaluationTest test case. Tests that value updated
+ // here will NOT be used for instantiating tests in
+ // GeneratorEvaluationTest.
+ GeneratorEvaluationTest::set_param_value(2);
+#endif // GTEST_HAS_PARAM_TEST
+
+ return RUN_ALL_TESTS();
+}
diff --git a/Source/ThirdParty/gtest/test/gtest-param-test_test.h b/Source/ThirdParty/gtest/test/gtest-param-test_test.h
new file mode 100644
index 000000000..b7f949361
--- /dev/null
+++ b/Source/ThirdParty/gtest/test/gtest-param-test_test.h
@@ -0,0 +1,55 @@
+// Copyright 2008, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Authors: vladl@google.com (Vlad Losev)
+//
+// The Google C++ Testing Framework (Google Test)
+//
+// This header file provides classes and functions used internally
+// for testing Google Test itself.
+
+#ifndef GTEST_TEST_GTEST_PARAM_TEST_TEST_H_
+#define GTEST_TEST_GTEST_PARAM_TEST_TEST_H_
+
+#include <gtest/gtest.h>
+
+#if GTEST_HAS_PARAM_TEST
+
+// Test fixture for testing definition and instantiation of a test
+// in separate translation units.
+class ExternalInstantiationTest : public ::testing::TestWithParam<int> {};
+
+// Test fixture for testing instantiation of a test in multiple
+// translation units.
+class InstantiationInMultipleTranslaionUnitsTest
+ : public ::testing::TestWithParam<int> {};
+
+#endif // GTEST_HAS_PARAM_TEST
+
+#endif // GTEST_TEST_GTEST_PARAM_TEST_TEST_H_
diff --git a/Source/ThirdParty/gtest/test/gtest-port_test.cc b/Source/ThirdParty/gtest/test/gtest-port_test.cc
new file mode 100644
index 000000000..37258602f
--- /dev/null
+++ b/Source/ThirdParty/gtest/test/gtest-port_test.cc
@@ -0,0 +1,1018 @@
+// Copyright 2008, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Authors: vladl@google.com (Vlad Losev), wan@google.com (Zhanyong Wan)
+//
+// This file tests the internal cross-platform support utilities.
+
+#include <gtest/internal/gtest-port.h>
+
+#include <stdio.h>
+
+#if GTEST_OS_MAC
+#include <time.h>
+#endif // GTEST_OS_MAC
+
+#include <utility> // For std::pair and std::make_pair.
+
+#include <gtest/gtest.h>
+#include <gtest/gtest-spi.h>
+
+// Indicates that this translation unit is part of Google Test's
+// implementation. It must come before gtest-internal-inl.h is
+// included, or there will be a compiler error. This trick is to
+// prevent a user from accidentally including gtest-internal-inl.h in
+// his code.
+#define GTEST_IMPLEMENTATION_ 1
+#include "src/gtest-internal-inl.h"
+#undef GTEST_IMPLEMENTATION_
+
+using std::make_pair;
+using std::pair;
+
+namespace testing {
+namespace internal {
+
+// Tests that the element_type typedef is available in scoped_ptr and refers
+// to the parameter type.
+TEST(ScopedPtrTest, DefinesElementType) {
+ StaticAssertTypeEq<int, ::testing::internal::scoped_ptr<int>::element_type>();
+}
+
+// TODO(vladl@google.com): Implement THE REST of scoped_ptr tests.
+
+TEST(GtestCheckSyntaxTest, BehavesLikeASingleStatement) {
+ if (AlwaysFalse())
+ GTEST_CHECK_(false) << "This should never be executed; "
+ "It's a compilation test only.";
+
+ if (AlwaysTrue())
+ GTEST_CHECK_(true);
+ else
+ ; // NOLINT
+
+ if (AlwaysFalse())
+ ; // NOLINT
+ else
+ GTEST_CHECK_(true) << "";
+}
+
+TEST(GtestCheckSyntaxTest, WorksWithSwitch) {
+ switch (0) {
+ case 1:
+ break;
+ default:
+ GTEST_CHECK_(true);
+ }
+
+ switch(0)
+ case 0:
+ GTEST_CHECK_(true) << "Check failed in switch case";
+}
+
+#if GTEST_OS_MAC
+void* ThreadFunc(void* data) {
+ pthread_mutex_t* mutex = static_cast<pthread_mutex_t*>(data);
+ pthread_mutex_lock(mutex);
+ pthread_mutex_unlock(mutex);
+ return NULL;
+}
+
+TEST(GetThreadCountTest, ReturnsCorrectValue) {
+ EXPECT_EQ(1U, GetThreadCount());
+ pthread_mutex_t mutex;
+ pthread_attr_t attr;
+ pthread_t thread_id;
+
+ // TODO(vladl@google.com): turn mutex into internal::Mutex for automatic
+ // destruction.
+ pthread_mutex_init(&mutex, NULL);
+ pthread_mutex_lock(&mutex);
+ ASSERT_EQ(0, pthread_attr_init(&attr));
+ ASSERT_EQ(0, pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE));
+
+ const int status = pthread_create(&thread_id, &attr, &ThreadFunc, &mutex);
+ ASSERT_EQ(0, pthread_attr_destroy(&attr));
+ ASSERT_EQ(0, status);
+ EXPECT_EQ(2U, GetThreadCount());
+ pthread_mutex_unlock(&mutex);
+
+ void* dummy;
+ ASSERT_EQ(0, pthread_join(thread_id, &dummy));
+
+ // MacOS X may not immediately report the updated thread count after
+ // joining a thread, causing flakiness in this test. To counter that, we
+ // wait for up to .5 seconds for the OS to report the correct value.
+ for (int i = 0; i < 5; ++i) {
+ if (GetThreadCount() == 1)
+ break;
+
+ SleepMilliseconds(100);
+ }
+ EXPECT_EQ(1U, GetThreadCount());
+ pthread_mutex_destroy(&mutex);
+}
+#else
+TEST(GetThreadCountTest, ReturnsZeroWhenUnableToCountThreads) {
+ EXPECT_EQ(0U, GetThreadCount());
+}
+#endif // GTEST_OS_MAC
+
+TEST(GtestCheckDeathTest, DiesWithCorrectOutputOnFailure) {
+ const bool a_false_condition = false;
+ const char regex[] =
+#ifdef _MSC_VER
+ "gtest-port_test\\.cc\\(\\d+\\):"
+#else
+ "gtest-port_test\\.cc:[0-9]+"
+#endif // _MSC_VER
+ ".*a_false_condition.*Extra info.*";
+
+ EXPECT_DEATH_IF_SUPPORTED(GTEST_CHECK_(a_false_condition) << "Extra info",
+ regex);
+}
+
+#if GTEST_HAS_DEATH_TEST
+
+TEST(GtestCheckDeathTest, LivesSilentlyOnSuccess) {
+ EXPECT_EXIT({
+ GTEST_CHECK_(true) << "Extra info";
+ ::std::cerr << "Success\n";
+ exit(0); },
+ ::testing::ExitedWithCode(0), "Success");
+}
+
+#endif // GTEST_HAS_DEATH_TEST
+
+#if GTEST_USES_POSIX_RE
+
+#if GTEST_HAS_TYPED_TEST
+
+template <typename Str>
+class RETest : public ::testing::Test {};
+
+// Defines StringTypes as the list of all string types that class RE
+// supports.
+typedef testing::Types<
+ ::std::string,
+#if GTEST_HAS_GLOBAL_STRING
+ ::string,
+#endif // GTEST_HAS_GLOBAL_STRING
+ const char*> StringTypes;
+
+TYPED_TEST_CASE(RETest, StringTypes);
+
+// Tests RE's implicit constructors.
+TYPED_TEST(RETest, ImplicitConstructorWorks) {
+ const RE empty(TypeParam(""));
+ EXPECT_STREQ("", empty.pattern());
+
+ const RE simple(TypeParam("hello"));
+ EXPECT_STREQ("hello", simple.pattern());
+
+ const RE normal(TypeParam(".*(\\w+)"));
+ EXPECT_STREQ(".*(\\w+)", normal.pattern());
+}
+
+// Tests that RE's constructors reject invalid regular expressions.
+TYPED_TEST(RETest, RejectsInvalidRegex) {
+ EXPECT_NONFATAL_FAILURE({
+ const RE invalid(TypeParam("?"));
+ }, "\"?\" is not a valid POSIX Extended regular expression.");
+}
+
+// Tests RE::FullMatch().
+TYPED_TEST(RETest, FullMatchWorks) {
+ const RE empty(TypeParam(""));
+ EXPECT_TRUE(RE::FullMatch(TypeParam(""), empty));
+ EXPECT_FALSE(RE::FullMatch(TypeParam("a"), empty));
+
+ const RE re(TypeParam("a.*z"));
+ EXPECT_TRUE(RE::FullMatch(TypeParam("az"), re));
+ EXPECT_TRUE(RE::FullMatch(TypeParam("axyz"), re));
+ EXPECT_FALSE(RE::FullMatch(TypeParam("baz"), re));
+ EXPECT_FALSE(RE::FullMatch(TypeParam("azy"), re));
+}
+
+// Tests RE::PartialMatch().
+TYPED_TEST(RETest, PartialMatchWorks) {
+ const RE empty(TypeParam(""));
+ EXPECT_TRUE(RE::PartialMatch(TypeParam(""), empty));
+ EXPECT_TRUE(RE::PartialMatch(TypeParam("a"), empty));
+
+ const RE re(TypeParam("a.*z"));
+ EXPECT_TRUE(RE::PartialMatch(TypeParam("az"), re));
+ EXPECT_TRUE(RE::PartialMatch(TypeParam("axyz"), re));
+ EXPECT_TRUE(RE::PartialMatch(TypeParam("baz"), re));
+ EXPECT_TRUE(RE::PartialMatch(TypeParam("azy"), re));
+ EXPECT_FALSE(RE::PartialMatch(TypeParam("zza"), re));
+}
+
+#endif // GTEST_HAS_TYPED_TEST
+
+#elif GTEST_USES_SIMPLE_RE
+
+TEST(IsInSetTest, NulCharIsNotInAnySet) {
+ EXPECT_FALSE(IsInSet('\0', ""));
+ EXPECT_FALSE(IsInSet('\0', "\0"));
+ EXPECT_FALSE(IsInSet('\0', "a"));
+}
+
+TEST(IsInSetTest, WorksForNonNulChars) {
+ EXPECT_FALSE(IsInSet('a', "Ab"));
+ EXPECT_FALSE(IsInSet('c', ""));
+
+ EXPECT_TRUE(IsInSet('b', "bcd"));
+ EXPECT_TRUE(IsInSet('b', "ab"));
+}
+
+TEST(IsDigitTest, IsFalseForNonDigit) {
+ EXPECT_FALSE(IsDigit('\0'));
+ EXPECT_FALSE(IsDigit(' '));
+ EXPECT_FALSE(IsDigit('+'));
+ EXPECT_FALSE(IsDigit('-'));
+ EXPECT_FALSE(IsDigit('.'));
+ EXPECT_FALSE(IsDigit('a'));
+}
+
+TEST(IsDigitTest, IsTrueForDigit) {
+ EXPECT_TRUE(IsDigit('0'));
+ EXPECT_TRUE(IsDigit('1'));
+ EXPECT_TRUE(IsDigit('5'));
+ EXPECT_TRUE(IsDigit('9'));
+}
+
+TEST(IsPunctTest, IsFalseForNonPunct) {
+ EXPECT_FALSE(IsPunct('\0'));
+ EXPECT_FALSE(IsPunct(' '));
+ EXPECT_FALSE(IsPunct('\n'));
+ EXPECT_FALSE(IsPunct('a'));
+ EXPECT_FALSE(IsPunct('0'));
+}
+
+TEST(IsPunctTest, IsTrueForPunct) {
+ for (const char* p = "^-!\"#$%&'()*+,./:;<=>?@[\\]_`{|}~"; *p; p++) {
+ EXPECT_PRED1(IsPunct, *p);
+ }
+}
+
+TEST(IsRepeatTest, IsFalseForNonRepeatChar) {
+ EXPECT_FALSE(IsRepeat('\0'));
+ EXPECT_FALSE(IsRepeat(' '));
+ EXPECT_FALSE(IsRepeat('a'));
+ EXPECT_FALSE(IsRepeat('1'));
+ EXPECT_FALSE(IsRepeat('-'));
+}
+
+TEST(IsRepeatTest, IsTrueForRepeatChar) {
+ EXPECT_TRUE(IsRepeat('?'));
+ EXPECT_TRUE(IsRepeat('*'));
+ EXPECT_TRUE(IsRepeat('+'));
+}
+
+TEST(IsWhiteSpaceTest, IsFalseForNonWhiteSpace) {
+ EXPECT_FALSE(IsWhiteSpace('\0'));
+ EXPECT_FALSE(IsWhiteSpace('a'));
+ EXPECT_FALSE(IsWhiteSpace('1'));
+ EXPECT_FALSE(IsWhiteSpace('+'));
+ EXPECT_FALSE(IsWhiteSpace('_'));
+}
+
+TEST(IsWhiteSpaceTest, IsTrueForWhiteSpace) {
+ EXPECT_TRUE(IsWhiteSpace(' '));
+ EXPECT_TRUE(IsWhiteSpace('\n'));
+ EXPECT_TRUE(IsWhiteSpace('\r'));
+ EXPECT_TRUE(IsWhiteSpace('\t'));
+ EXPECT_TRUE(IsWhiteSpace('\v'));
+ EXPECT_TRUE(IsWhiteSpace('\f'));
+}
+
+TEST(IsWordCharTest, IsFalseForNonWordChar) {
+ EXPECT_FALSE(IsWordChar('\0'));
+ EXPECT_FALSE(IsWordChar('+'));
+ EXPECT_FALSE(IsWordChar('.'));
+ EXPECT_FALSE(IsWordChar(' '));
+ EXPECT_FALSE(IsWordChar('\n'));
+}
+
+TEST(IsWordCharTest, IsTrueForLetter) {
+ EXPECT_TRUE(IsWordChar('a'));
+ EXPECT_TRUE(IsWordChar('b'));
+ EXPECT_TRUE(IsWordChar('A'));
+ EXPECT_TRUE(IsWordChar('Z'));
+}
+
+TEST(IsWordCharTest, IsTrueForDigit) {
+ EXPECT_TRUE(IsWordChar('0'));
+ EXPECT_TRUE(IsWordChar('1'));
+ EXPECT_TRUE(IsWordChar('7'));
+ EXPECT_TRUE(IsWordChar('9'));
+}
+
+TEST(IsWordCharTest, IsTrueForUnderscore) {
+ EXPECT_TRUE(IsWordChar('_'));
+}
+
+TEST(IsValidEscapeTest, IsFalseForNonPrintable) {
+ EXPECT_FALSE(IsValidEscape('\0'));
+ EXPECT_FALSE(IsValidEscape('\007'));
+}
+
+TEST(IsValidEscapeTest, IsFalseForDigit) {
+ EXPECT_FALSE(IsValidEscape('0'));
+ EXPECT_FALSE(IsValidEscape('9'));
+}
+
+TEST(IsValidEscapeTest, IsFalseForWhiteSpace) {
+ EXPECT_FALSE(IsValidEscape(' '));
+ EXPECT_FALSE(IsValidEscape('\n'));
+}
+
+TEST(IsValidEscapeTest, IsFalseForSomeLetter) {
+ EXPECT_FALSE(IsValidEscape('a'));
+ EXPECT_FALSE(IsValidEscape('Z'));
+}
+
+TEST(IsValidEscapeTest, IsTrueForPunct) {
+ EXPECT_TRUE(IsValidEscape('.'));
+ EXPECT_TRUE(IsValidEscape('-'));
+ EXPECT_TRUE(IsValidEscape('^'));
+ EXPECT_TRUE(IsValidEscape('$'));
+ EXPECT_TRUE(IsValidEscape('('));
+ EXPECT_TRUE(IsValidEscape(']'));
+ EXPECT_TRUE(IsValidEscape('{'));
+ EXPECT_TRUE(IsValidEscape('|'));
+}
+
+TEST(IsValidEscapeTest, IsTrueForSomeLetter) {
+ EXPECT_TRUE(IsValidEscape('d'));
+ EXPECT_TRUE(IsValidEscape('D'));
+ EXPECT_TRUE(IsValidEscape('s'));
+ EXPECT_TRUE(IsValidEscape('S'));
+ EXPECT_TRUE(IsValidEscape('w'));
+ EXPECT_TRUE(IsValidEscape('W'));
+}
+
+TEST(AtomMatchesCharTest, EscapedPunct) {
+ EXPECT_FALSE(AtomMatchesChar(true, '\\', '\0'));
+ EXPECT_FALSE(AtomMatchesChar(true, '\\', ' '));
+ EXPECT_FALSE(AtomMatchesChar(true, '_', '.'));
+ EXPECT_FALSE(AtomMatchesChar(true, '.', 'a'));
+
+ EXPECT_TRUE(AtomMatchesChar(true, '\\', '\\'));
+ EXPECT_TRUE(AtomMatchesChar(true, '_', '_'));
+ EXPECT_TRUE(AtomMatchesChar(true, '+', '+'));
+ EXPECT_TRUE(AtomMatchesChar(true, '.', '.'));
+}
+
+TEST(AtomMatchesCharTest, Escaped_d) {
+ EXPECT_FALSE(AtomMatchesChar(true, 'd', '\0'));
+ EXPECT_FALSE(AtomMatchesChar(true, 'd', 'a'));
+ EXPECT_FALSE(AtomMatchesChar(true, 'd', '.'));
+
+ EXPECT_TRUE(AtomMatchesChar(true, 'd', '0'));
+ EXPECT_TRUE(AtomMatchesChar(true, 'd', '9'));
+}
+
+TEST(AtomMatchesCharTest, Escaped_D) {
+ EXPECT_FALSE(AtomMatchesChar(true, 'D', '0'));
+ EXPECT_FALSE(AtomMatchesChar(true, 'D', '9'));
+
+ EXPECT_TRUE(AtomMatchesChar(true, 'D', '\0'));
+ EXPECT_TRUE(AtomMatchesChar(true, 'D', 'a'));
+ EXPECT_TRUE(AtomMatchesChar(true, 'D', '-'));
+}
+
+TEST(AtomMatchesCharTest, Escaped_s) {
+ EXPECT_FALSE(AtomMatchesChar(true, 's', '\0'));
+ EXPECT_FALSE(AtomMatchesChar(true, 's', 'a'));
+ EXPECT_FALSE(AtomMatchesChar(true, 's', '.'));
+ EXPECT_FALSE(AtomMatchesChar(true, 's', '9'));
+
+ EXPECT_TRUE(AtomMatchesChar(true, 's', ' '));
+ EXPECT_TRUE(AtomMatchesChar(true, 's', '\n'));
+ EXPECT_TRUE(AtomMatchesChar(true, 's', '\t'));
+}
+
+TEST(AtomMatchesCharTest, Escaped_S) {
+ EXPECT_FALSE(AtomMatchesChar(true, 'S', ' '));
+ EXPECT_FALSE(AtomMatchesChar(true, 'S', '\r'));
+
+ EXPECT_TRUE(AtomMatchesChar(true, 'S', '\0'));
+ EXPECT_TRUE(AtomMatchesChar(true, 'S', 'a'));
+ EXPECT_TRUE(AtomMatchesChar(true, 'S', '9'));
+}
+
+TEST(AtomMatchesCharTest, Escaped_w) {
+ EXPECT_FALSE(AtomMatchesChar(true, 'w', '\0'));
+ EXPECT_FALSE(AtomMatchesChar(true, 'w', '+'));
+ EXPECT_FALSE(AtomMatchesChar(true, 'w', ' '));
+ EXPECT_FALSE(AtomMatchesChar(true, 'w', '\n'));
+
+ EXPECT_TRUE(AtomMatchesChar(true, 'w', '0'));
+ EXPECT_TRUE(AtomMatchesChar(true, 'w', 'b'));
+ EXPECT_TRUE(AtomMatchesChar(true, 'w', 'C'));
+ EXPECT_TRUE(AtomMatchesChar(true, 'w', '_'));
+}
+
+TEST(AtomMatchesCharTest, Escaped_W) {
+ EXPECT_FALSE(AtomMatchesChar(true, 'W', 'A'));
+ EXPECT_FALSE(AtomMatchesChar(true, 'W', 'b'));
+ EXPECT_FALSE(AtomMatchesChar(true, 'W', '9'));
+ EXPECT_FALSE(AtomMatchesChar(true, 'W', '_'));
+
+ EXPECT_TRUE(AtomMatchesChar(true, 'W', '\0'));
+ EXPECT_TRUE(AtomMatchesChar(true, 'W', '*'));
+ EXPECT_TRUE(AtomMatchesChar(true, 'W', '\n'));
+}
+
+TEST(AtomMatchesCharTest, EscapedWhiteSpace) {
+ EXPECT_FALSE(AtomMatchesChar(true, 'f', '\0'));
+ EXPECT_FALSE(AtomMatchesChar(true, 'f', '\n'));
+ EXPECT_FALSE(AtomMatchesChar(true, 'n', '\0'));
+ EXPECT_FALSE(AtomMatchesChar(true, 'n', '\r'));
+ EXPECT_FALSE(AtomMatchesChar(true, 'r', '\0'));
+ EXPECT_FALSE(AtomMatchesChar(true, 'r', 'a'));
+ EXPECT_FALSE(AtomMatchesChar(true, 't', '\0'));
+ EXPECT_FALSE(AtomMatchesChar(true, 't', 't'));
+ EXPECT_FALSE(AtomMatchesChar(true, 'v', '\0'));
+ EXPECT_FALSE(AtomMatchesChar(true, 'v', '\f'));
+
+ EXPECT_TRUE(AtomMatchesChar(true, 'f', '\f'));
+ EXPECT_TRUE(AtomMatchesChar(true, 'n', '\n'));
+ EXPECT_TRUE(AtomMatchesChar(true, 'r', '\r'));
+ EXPECT_TRUE(AtomMatchesChar(true, 't', '\t'));
+ EXPECT_TRUE(AtomMatchesChar(true, 'v', '\v'));
+}
+
+TEST(AtomMatchesCharTest, UnescapedDot) {
+ EXPECT_FALSE(AtomMatchesChar(false, '.', '\n'));
+
+ EXPECT_TRUE(AtomMatchesChar(false, '.', '\0'));
+ EXPECT_TRUE(AtomMatchesChar(false, '.', '.'));
+ EXPECT_TRUE(AtomMatchesChar(false, '.', 'a'));
+ EXPECT_TRUE(AtomMatchesChar(false, '.', ' '));
+}
+
+TEST(AtomMatchesCharTest, UnescapedChar) {
+ EXPECT_FALSE(AtomMatchesChar(false, 'a', '\0'));
+ EXPECT_FALSE(AtomMatchesChar(false, 'a', 'b'));
+ EXPECT_FALSE(AtomMatchesChar(false, '$', 'a'));
+
+ EXPECT_TRUE(AtomMatchesChar(false, '$', '$'));
+ EXPECT_TRUE(AtomMatchesChar(false, '5', '5'));
+ EXPECT_TRUE(AtomMatchesChar(false, 'Z', 'Z'));
+}
+
+TEST(ValidateRegexTest, GeneratesFailureAndReturnsFalseForInvalid) {
+ EXPECT_NONFATAL_FAILURE(ASSERT_FALSE(ValidateRegex(NULL)),
+ "NULL is not a valid simple regular expression");
+ EXPECT_NONFATAL_FAILURE(
+ ASSERT_FALSE(ValidateRegex("a\\")),
+ "Syntax error at index 1 in simple regular expression \"a\\\": ");
+ EXPECT_NONFATAL_FAILURE(ASSERT_FALSE(ValidateRegex("a\\")),
+ "'\\' cannot appear at the end");
+ EXPECT_NONFATAL_FAILURE(ASSERT_FALSE(ValidateRegex("\\n\\")),
+ "'\\' cannot appear at the end");
+ EXPECT_NONFATAL_FAILURE(ASSERT_FALSE(ValidateRegex("\\s\\hb")),
+ "invalid escape sequence \"\\h\"");
+ EXPECT_NONFATAL_FAILURE(ASSERT_FALSE(ValidateRegex("^^")),
+ "'^' can only appear at the beginning");
+ EXPECT_NONFATAL_FAILURE(ASSERT_FALSE(ValidateRegex(".*^b")),
+ "'^' can only appear at the beginning");
+ EXPECT_NONFATAL_FAILURE(ASSERT_FALSE(ValidateRegex("$$")),
+ "'$' can only appear at the end");
+ EXPECT_NONFATAL_FAILURE(ASSERT_FALSE(ValidateRegex("^$a")),
+ "'$' can only appear at the end");
+ EXPECT_NONFATAL_FAILURE(ASSERT_FALSE(ValidateRegex("a(b")),
+ "'(' is unsupported");
+ EXPECT_NONFATAL_FAILURE(ASSERT_FALSE(ValidateRegex("ab)")),
+ "')' is unsupported");
+ EXPECT_NONFATAL_FAILURE(ASSERT_FALSE(ValidateRegex("[ab")),
+ "'[' is unsupported");
+ EXPECT_NONFATAL_FAILURE(ASSERT_FALSE(ValidateRegex("a{2")),
+ "'{' is unsupported");
+ EXPECT_NONFATAL_FAILURE(ASSERT_FALSE(ValidateRegex("?")),
+ "'?' can only follow a repeatable token");
+ EXPECT_NONFATAL_FAILURE(ASSERT_FALSE(ValidateRegex("^*")),
+ "'*' can only follow a repeatable token");
+ EXPECT_NONFATAL_FAILURE(ASSERT_FALSE(ValidateRegex("5*+")),
+ "'+' can only follow a repeatable token");
+}
+
+TEST(ValidateRegexTest, ReturnsTrueForValid) {
+ EXPECT_TRUE(ValidateRegex(""));
+ EXPECT_TRUE(ValidateRegex("a"));
+ EXPECT_TRUE(ValidateRegex(".*"));
+ EXPECT_TRUE(ValidateRegex("^a_+"));
+ EXPECT_TRUE(ValidateRegex("^a\\t\\&?"));
+ EXPECT_TRUE(ValidateRegex("09*$"));
+ EXPECT_TRUE(ValidateRegex("^Z$"));
+ EXPECT_TRUE(ValidateRegex("a\\^Z\\$\\(\\)\\|\\[\\]\\{\\}"));
+}
+
+TEST(MatchRepetitionAndRegexAtHeadTest, WorksForZeroOrOne) {
+ EXPECT_FALSE(MatchRepetitionAndRegexAtHead(false, 'a', '?', "a", "ba"));
+ // Repeating more than once.
+ EXPECT_FALSE(MatchRepetitionAndRegexAtHead(false, 'a', '?', "b", "aab"));
+
+ // Repeating zero times.
+ EXPECT_TRUE(MatchRepetitionAndRegexAtHead(false, 'a', '?', "b", "ba"));
+ // Repeating once.
+ EXPECT_TRUE(MatchRepetitionAndRegexAtHead(false, 'a', '?', "b", "ab"));
+ EXPECT_TRUE(MatchRepetitionAndRegexAtHead(false, '#', '?', ".", "##"));
+}
+
+TEST(MatchRepetitionAndRegexAtHeadTest, WorksForZeroOrMany) {
+ EXPECT_FALSE(MatchRepetitionAndRegexAtHead(false, '.', '*', "a$", "baab"));
+
+ // Repeating zero times.
+ EXPECT_TRUE(MatchRepetitionAndRegexAtHead(false, '.', '*', "b", "bc"));
+ // Repeating once.
+ EXPECT_TRUE(MatchRepetitionAndRegexAtHead(false, '.', '*', "b", "abc"));
+ // Repeating more than once.
+ EXPECT_TRUE(MatchRepetitionAndRegexAtHead(true, 'w', '*', "-", "ab_1-g"));
+}
+
+TEST(MatchRepetitionAndRegexAtHeadTest, WorksForOneOrMany) {
+ EXPECT_FALSE(MatchRepetitionAndRegexAtHead(false, '.', '+', "a$", "baab"));
+ // Repeating zero times.
+ EXPECT_FALSE(MatchRepetitionAndRegexAtHead(false, '.', '+', "b", "bc"));
+
+ // Repeating once.
+ EXPECT_TRUE(MatchRepetitionAndRegexAtHead(false, '.', '+', "b", "abc"));
+ // Repeating more than once.
+ EXPECT_TRUE(MatchRepetitionAndRegexAtHead(true, 'w', '+', "-", "ab_1-g"));
+}
+
+TEST(MatchRegexAtHeadTest, ReturnsTrueForEmptyRegex) {
+ EXPECT_TRUE(MatchRegexAtHead("", ""));
+ EXPECT_TRUE(MatchRegexAtHead("", "ab"));
+}
+
+TEST(MatchRegexAtHeadTest, WorksWhenDollarIsInRegex) {
+ EXPECT_FALSE(MatchRegexAtHead("$", "a"));
+
+ EXPECT_TRUE(MatchRegexAtHead("$", ""));
+ EXPECT_TRUE(MatchRegexAtHead("a$", "a"));
+}
+
+TEST(MatchRegexAtHeadTest, WorksWhenRegexStartsWithEscapeSequence) {
+ EXPECT_FALSE(MatchRegexAtHead("\\w", "+"));
+ EXPECT_FALSE(MatchRegexAtHead("\\W", "ab"));
+
+ EXPECT_TRUE(MatchRegexAtHead("\\sa", "\nab"));
+ EXPECT_TRUE(MatchRegexAtHead("\\d", "1a"));
+}
+
+TEST(MatchRegexAtHeadTest, WorksWhenRegexStartsWithRepetition) {
+ EXPECT_FALSE(MatchRegexAtHead(".+a", "abc"));
+ EXPECT_FALSE(MatchRegexAtHead("a?b", "aab"));
+
+ EXPECT_TRUE(MatchRegexAtHead(".*a", "bc12-ab"));
+ EXPECT_TRUE(MatchRegexAtHead("a?b", "b"));
+ EXPECT_TRUE(MatchRegexAtHead("a?b", "ab"));
+}
+
+TEST(MatchRegexAtHeadTest,
+ WorksWhenRegexStartsWithRepetionOfEscapeSequence) {
+ EXPECT_FALSE(MatchRegexAtHead("\\.+a", "abc"));
+ EXPECT_FALSE(MatchRegexAtHead("\\s?b", " b"));
+
+ EXPECT_TRUE(MatchRegexAtHead("\\(*a", "((((ab"));
+ EXPECT_TRUE(MatchRegexAtHead("\\^?b", "^b"));
+ EXPECT_TRUE(MatchRegexAtHead("\\\\?b", "b"));
+ EXPECT_TRUE(MatchRegexAtHead("\\\\?b", "\\b"));
+}
+
+TEST(MatchRegexAtHeadTest, MatchesSequentially) {
+ EXPECT_FALSE(MatchRegexAtHead("ab.*c", "acabc"));
+
+ EXPECT_TRUE(MatchRegexAtHead("ab.*c", "ab-fsc"));
+}
+
+TEST(MatchRegexAnywhereTest, ReturnsFalseWhenStringIsNull) {
+ EXPECT_FALSE(MatchRegexAnywhere("", NULL));
+}
+
+TEST(MatchRegexAnywhereTest, WorksWhenRegexStartsWithCaret) {
+ EXPECT_FALSE(MatchRegexAnywhere("^a", "ba"));
+ EXPECT_FALSE(MatchRegexAnywhere("^$", "a"));
+
+ EXPECT_TRUE(MatchRegexAnywhere("^a", "ab"));
+ EXPECT_TRUE(MatchRegexAnywhere("^", "ab"));
+ EXPECT_TRUE(MatchRegexAnywhere("^$", ""));
+}
+
+TEST(MatchRegexAnywhereTest, ReturnsFalseWhenNoMatch) {
+ EXPECT_FALSE(MatchRegexAnywhere("a", "bcde123"));
+ EXPECT_FALSE(MatchRegexAnywhere("a.+a", "--aa88888888"));
+}
+
+TEST(MatchRegexAnywhereTest, ReturnsTrueWhenMatchingPrefix) {
+ EXPECT_TRUE(MatchRegexAnywhere("\\w+", "ab1_ - 5"));
+ EXPECT_TRUE(MatchRegexAnywhere(".*=", "="));
+ EXPECT_TRUE(MatchRegexAnywhere("x.*ab?.*bc", "xaaabc"));
+}
+
+TEST(MatchRegexAnywhereTest, ReturnsTrueWhenMatchingNonPrefix) {
+ EXPECT_TRUE(MatchRegexAnywhere("\\w+", "$$$ ab1_ - 5"));
+ EXPECT_TRUE(MatchRegexAnywhere("\\.+=", "= ...="));
+}
+
+// Tests RE's implicit constructors.
+TEST(RETest, ImplicitConstructorWorks) {
+ const RE empty("");
+ EXPECT_STREQ("", empty.pattern());
+
+ const RE simple("hello");
+ EXPECT_STREQ("hello", simple.pattern());
+}
+
+// Tests that RE's constructors reject invalid regular expressions.
+TEST(RETest, RejectsInvalidRegex) {
+ EXPECT_NONFATAL_FAILURE({
+ const RE normal(NULL);
+ }, "NULL is not a valid simple regular expression");
+
+ EXPECT_NONFATAL_FAILURE({
+ const RE normal(".*(\\w+");
+ }, "'(' is unsupported");
+
+ EXPECT_NONFATAL_FAILURE({
+ const RE invalid("^?");
+ }, "'?' can only follow a repeatable token");
+}
+
+// Tests RE::FullMatch().
+TEST(RETest, FullMatchWorks) {
+ const RE empty("");
+ EXPECT_TRUE(RE::FullMatch("", empty));
+ EXPECT_FALSE(RE::FullMatch("a", empty));
+
+ const RE re1("a");
+ EXPECT_TRUE(RE::FullMatch("a", re1));
+
+ const RE re("a.*z");
+ EXPECT_TRUE(RE::FullMatch("az", re));
+ EXPECT_TRUE(RE::FullMatch("axyz", re));
+ EXPECT_FALSE(RE::FullMatch("baz", re));
+ EXPECT_FALSE(RE::FullMatch("azy", re));
+}
+
+// Tests RE::PartialMatch().
+TEST(RETest, PartialMatchWorks) {
+ const RE empty("");
+ EXPECT_TRUE(RE::PartialMatch("", empty));
+ EXPECT_TRUE(RE::PartialMatch("a", empty));
+
+ const RE re("a.*z");
+ EXPECT_TRUE(RE::PartialMatch("az", re));
+ EXPECT_TRUE(RE::PartialMatch("axyz", re));
+ EXPECT_TRUE(RE::PartialMatch("baz", re));
+ EXPECT_TRUE(RE::PartialMatch("azy", re));
+ EXPECT_FALSE(RE::PartialMatch("zza", re));
+}
+
+#endif // GTEST_USES_POSIX_RE
+
+#if !GTEST_OS_WINDOWS_MOBILE
+
+TEST(CaptureTest, CapturesStdout) {
+ CaptureStdout();
+ fprintf(stdout, "abc");
+ EXPECT_STREQ("abc", GetCapturedStdout().c_str());
+
+ CaptureStdout();
+ fprintf(stdout, "def%cghi", '\0');
+ EXPECT_EQ(::std::string("def\0ghi", 7), ::std::string(GetCapturedStdout()));
+}
+
+TEST(CaptureTest, CapturesStderr) {
+ CaptureStderr();
+ fprintf(stderr, "jkl");
+ EXPECT_STREQ("jkl", GetCapturedStderr().c_str());
+
+ CaptureStderr();
+ fprintf(stderr, "jkl%cmno", '\0');
+ EXPECT_EQ(::std::string("jkl\0mno", 7), ::std::string(GetCapturedStderr()));
+}
+
+// Tests that stdout and stderr capture don't interfere with each other.
+TEST(CaptureTest, CapturesStdoutAndStderr) {
+ CaptureStdout();
+ CaptureStderr();
+ fprintf(stdout, "pqr");
+ fprintf(stderr, "stu");
+ EXPECT_STREQ("pqr", GetCapturedStdout().c_str());
+ EXPECT_STREQ("stu", GetCapturedStderr().c_str());
+}
+
+TEST(CaptureDeathTest, CannotReenterStdoutCapture) {
+ CaptureStdout();
+ EXPECT_DEATH_IF_SUPPORTED(CaptureStdout();,
+ "Only one stdout capturer can exist at a time");
+ GetCapturedStdout();
+
+ // We cannot test stderr capturing using death tests as they use it
+ // themselves.
+}
+
+#endif // !GTEST_OS_WINDOWS_MOBILE
+
+TEST(ThreadLocalTest, DefaultConstructorInitializesToDefaultValues) {
+ ThreadLocal<int> t1;
+ EXPECT_EQ(0, t1.get());
+
+ ThreadLocal<void*> t2;
+ EXPECT_TRUE(t2.get() == NULL);
+}
+
+TEST(ThreadLocalTest, SingleParamConstructorInitializesToParam) {
+ ThreadLocal<int> t1(123);
+ EXPECT_EQ(123, t1.get());
+
+ int i = 0;
+ ThreadLocal<int*> t2(&i);
+ EXPECT_EQ(&i, t2.get());
+}
+
+class NoDefaultContructor {
+ public:
+ explicit NoDefaultContructor(const char*) {}
+ NoDefaultContructor(const NoDefaultContructor&) {}
+};
+
+TEST(ThreadLocalTest, ValueDefaultContructorIsNotRequiredForParamVersion) {
+ ThreadLocal<NoDefaultContructor> bar(NoDefaultContructor("foo"));
+ bar.pointer();
+}
+
+TEST(ThreadLocalTest, GetAndPointerReturnSameValue) {
+ ThreadLocal<String> thread_local;
+
+ EXPECT_EQ(thread_local.pointer(), &(thread_local.get()));
+
+ // Verifies the condition still holds after calling set.
+ thread_local.set("foo");
+ EXPECT_EQ(thread_local.pointer(), &(thread_local.get()));
+}
+
+TEST(ThreadLocalTest, PointerAndConstPointerReturnSameValue) {
+ ThreadLocal<String> thread_local;
+ const ThreadLocal<String>& const_thread_local = thread_local;
+
+ EXPECT_EQ(thread_local.pointer(), const_thread_local.pointer());
+
+ thread_local.set("foo");
+ EXPECT_EQ(thread_local.pointer(), const_thread_local.pointer());
+}
+
+#if GTEST_IS_THREADSAFE
+
+void AddTwo(int* param) { *param += 2; }
+
+TEST(ThreadWithParamTest, ConstructorExecutesThreadFunc) {
+ int i = 40;
+ ThreadWithParam<int*> thread(&AddTwo, &i, NULL);
+ thread.Join();
+ EXPECT_EQ(42, i);
+}
+
+TEST(MutexDeathTest, AssertHeldShouldAssertWhenNotLocked) {
+ // AssertHeld() is flaky only in the presence of multiple threads accessing
+ // the lock. In this case, the test is robust.
+ EXPECT_DEATH_IF_SUPPORTED({
+ Mutex m;
+ { MutexLock lock(&m); }
+ m.AssertHeld();
+ },
+ "thread .*hold");
+}
+
+TEST(MutexTest, AssertHeldShouldNotAssertWhenLocked) {
+ Mutex m;
+ MutexLock lock(&m);
+ m.AssertHeld();
+}
+
+class AtomicCounterWithMutex {
+ public:
+ explicit AtomicCounterWithMutex(Mutex* mutex) :
+ value_(0), mutex_(mutex), random_(42) {}
+
+ void Increment() {
+ MutexLock lock(mutex_);
+ int temp = value_;
+ {
+ // Locking a mutex puts up a memory barrier, preventing reads and
+ // writes to value_ rearranged when observed from other threads.
+ //
+ // We cannot use Mutex and MutexLock here or rely on their memory
+ // barrier functionality as we are testing them here.
+ pthread_mutex_t memory_barrier_mutex;
+ GTEST_CHECK_POSIX_SUCCESS_(
+ pthread_mutex_init(&memory_barrier_mutex, NULL));
+ GTEST_CHECK_POSIX_SUCCESS_(pthread_mutex_lock(&memory_barrier_mutex));
+
+ SleepMilliseconds(random_.Generate(30));
+
+ GTEST_CHECK_POSIX_SUCCESS_(pthread_mutex_unlock(&memory_barrier_mutex));
+ }
+ value_ = temp + 1;
+ }
+ int value() const { return value_; }
+
+ private:
+ volatile int value_;
+ Mutex* const mutex_; // Protects value_.
+ Random random_;
+};
+
+void CountingThreadFunc(pair<AtomicCounterWithMutex*, int> param) {
+ for (int i = 0; i < param.second; ++i)
+ param.first->Increment();
+}
+
+// Tests that the mutex only lets one thread at a time to lock it.
+TEST(MutexTest, OnlyOneThreadCanLockAtATime) {
+ Mutex mutex;
+ AtomicCounterWithMutex locked_counter(&mutex);
+
+ typedef ThreadWithParam<pair<AtomicCounterWithMutex*, int> > ThreadType;
+ const int kCycleCount = 20;
+ const int kThreadCount = 7;
+ scoped_ptr<ThreadType> counting_threads[kThreadCount];
+ Notification threads_can_start;
+ // Creates and runs kThreadCount threads that increment locked_counter
+ // kCycleCount times each.
+ for (int i = 0; i < kThreadCount; ++i) {
+ counting_threads[i].reset(new ThreadType(&CountingThreadFunc,
+ make_pair(&locked_counter,
+ kCycleCount),
+ &threads_can_start));
+ }
+ threads_can_start.Notify();
+ for (int i = 0; i < kThreadCount; ++i)
+ counting_threads[i]->Join();
+
+ // If the mutex lets more than one thread to increment the counter at a
+ // time, they are likely to encounter a race condition and have some
+ // increments overwritten, resulting in the lower then expected counter
+ // value.
+ EXPECT_EQ(kCycleCount * kThreadCount, locked_counter.value());
+}
+
+template <typename T>
+void RunFromThread(void (func)(T), T param) {
+ ThreadWithParam<T> thread(func, param, NULL);
+ thread.Join();
+}
+
+void RetrieveThreadLocalValue(pair<ThreadLocal<String>*, String*> param) {
+ *param.second = param.first->get();
+}
+
+TEST(ThreadLocalTest, ParameterizedConstructorSetsDefault) {
+ ThreadLocal<String> thread_local("foo");
+ EXPECT_STREQ("foo", thread_local.get().c_str());
+
+ thread_local.set("bar");
+ EXPECT_STREQ("bar", thread_local.get().c_str());
+
+ String result;
+ RunFromThread(&RetrieveThreadLocalValue, make_pair(&thread_local, &result));
+ EXPECT_STREQ("foo", result.c_str());
+}
+
+// DestructorTracker keeps track of whether its instances have been
+// destroyed.
+static std::vector<bool> g_destroyed;
+
+class DestructorTracker {
+ public:
+ DestructorTracker() : index_(GetNewIndex()) {}
+ DestructorTracker(const DestructorTracker& /* rhs */)
+ : index_(GetNewIndex()) {}
+ ~DestructorTracker() {
+ // We never access g_destroyed concurrently, so we don't need to
+ // protect the write operation under a mutex.
+ g_destroyed[index_] = true;
+ }
+
+ private:
+ static int GetNewIndex() {
+ g_destroyed.push_back(false);
+ return g_destroyed.size() - 1;
+ }
+ const int index_;
+};
+
+typedef ThreadLocal<DestructorTracker>* ThreadParam;
+
+void CallThreadLocalGet(ThreadParam thread_local) {
+ thread_local->get();
+}
+
+// Tests that when a ThreadLocal object dies in a thread, it destroys
+// the managed object for that thread.
+TEST(ThreadLocalTest, DestroysManagedObjectForOwnThreadWhenDying) {
+ g_destroyed.clear();
+
+ {
+ // The next line default constructs a DestructorTracker object as
+ // the default value of objects managed by thread_local.
+ ThreadLocal<DestructorTracker> thread_local;
+ ASSERT_EQ(1U, g_destroyed.size());
+ ASSERT_FALSE(g_destroyed[0]);
+
+ // This creates another DestructorTracker object for the main thread.
+ thread_local.get();
+ ASSERT_EQ(2U, g_destroyed.size());
+ ASSERT_FALSE(g_destroyed[0]);
+ ASSERT_FALSE(g_destroyed[1]);
+ }
+
+ // Now thread_local has died. It should have destroyed both the
+ // default value shared by all threads and the value for the main
+ // thread.
+ ASSERT_EQ(2U, g_destroyed.size());
+ EXPECT_TRUE(g_destroyed[0]);
+ EXPECT_TRUE(g_destroyed[1]);
+
+ g_destroyed.clear();
+}
+
+// Tests that when a thread exits, the thread-local object for that
+// thread is destroyed.
+TEST(ThreadLocalTest, DestroysManagedObjectAtThreadExit) {
+ g_destroyed.clear();
+
+ {
+ // The next line default constructs a DestructorTracker object as
+ // the default value of objects managed by thread_local.
+ ThreadLocal<DestructorTracker> thread_local;
+ ASSERT_EQ(1U, g_destroyed.size());
+ ASSERT_FALSE(g_destroyed[0]);
+
+ // This creates another DestructorTracker object in the new thread.
+ ThreadWithParam<ThreadParam> thread(
+ &CallThreadLocalGet, &thread_local, NULL);
+ thread.Join();
+
+ // Now the new thread has exited. The per-thread object for it
+ // should have been destroyed.
+ ASSERT_EQ(2U, g_destroyed.size());
+ ASSERT_FALSE(g_destroyed[0]);
+ ASSERT_TRUE(g_destroyed[1]);
+ }
+
+ // Now thread_local has died. The default value should have been
+ // destroyed too.
+ ASSERT_EQ(2U, g_destroyed.size());
+ EXPECT_TRUE(g_destroyed[0]);
+ EXPECT_TRUE(g_destroyed[1]);
+
+ g_destroyed.clear();
+}
+
+TEST(ThreadLocalTest, ThreadLocalMutationsAffectOnlyCurrentThread) {
+ ThreadLocal<String> thread_local;
+ thread_local.set("Foo");
+ EXPECT_STREQ("Foo", thread_local.get().c_str());
+
+ String result;
+ RunFromThread(&RetrieveThreadLocalValue, make_pair(&thread_local, &result));
+ EXPECT_TRUE(result.c_str() == NULL);
+}
+
+#endif // GTEST_IS_THREADSAFE
+
+} // namespace internal
+} // namespace testing
diff --git a/Source/ThirdParty/gtest/test/gtest-test-part_test.cc b/Source/ThirdParty/gtest/test/gtest-test-part_test.cc
new file mode 100644
index 000000000..5a3e9196a
--- /dev/null
+++ b/Source/ThirdParty/gtest/test/gtest-test-part_test.cc
@@ -0,0 +1,208 @@
+// Copyright 2008 Google Inc.
+// All Rights Reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: mheule@google.com (Markus Heule)
+//
+
+#include <gtest/gtest-test-part.h>
+
+#include <gtest/gtest.h>
+
+using testing::Message;
+using testing::Test;
+using testing::TestPartResult;
+using testing::TestPartResultArray;
+
+namespace {
+
+// Tests the TestPartResult class.
+
+// The test fixture for testing TestPartResult.
+class TestPartResultTest : public Test {
+ protected:
+ TestPartResultTest()
+ : r1_(TestPartResult::kSuccess, "foo/bar.cc", 10, "Success!"),
+ r2_(TestPartResult::kNonFatalFailure, "foo/bar.cc", -1, "Failure!"),
+ r3_(TestPartResult::kFatalFailure, NULL, -1, "Failure!") {}
+
+ TestPartResult r1_, r2_, r3_;
+};
+
+
+TEST_F(TestPartResultTest, ConstructorWorks) {
+ Message message;
+ message << "something is terribly wrong";
+ message << static_cast<const char*>(testing::internal::kStackTraceMarker);
+ message << "some unimportant stack trace";
+
+ const TestPartResult result(TestPartResult::kNonFatalFailure,
+ "some_file.cc",
+ 42,
+ message.GetString().c_str());
+
+ EXPECT_EQ(TestPartResult::kNonFatalFailure, result.type());
+ EXPECT_STREQ("some_file.cc", result.file_name());
+ EXPECT_EQ(42, result.line_number());
+ EXPECT_STREQ(message.GetString().c_str(), result.message());
+ EXPECT_STREQ("something is terribly wrong", result.summary());
+}
+
+TEST_F(TestPartResultTest, ResultAccessorsWork) {
+ const TestPartResult success(TestPartResult::kSuccess,
+ "file.cc",
+ 42,
+ "message");
+ EXPECT_TRUE(success.passed());
+ EXPECT_FALSE(success.failed());
+ EXPECT_FALSE(success.nonfatally_failed());
+ EXPECT_FALSE(success.fatally_failed());
+
+ const TestPartResult nonfatal_failure(TestPartResult::kNonFatalFailure,
+ "file.cc",
+ 42,
+ "message");
+ EXPECT_FALSE(nonfatal_failure.passed());
+ EXPECT_TRUE(nonfatal_failure.failed());
+ EXPECT_TRUE(nonfatal_failure.nonfatally_failed());
+ EXPECT_FALSE(nonfatal_failure.fatally_failed());
+
+ const TestPartResult fatal_failure(TestPartResult::kFatalFailure,
+ "file.cc",
+ 42,
+ "message");
+ EXPECT_FALSE(fatal_failure.passed());
+ EXPECT_TRUE(fatal_failure.failed());
+ EXPECT_FALSE(fatal_failure.nonfatally_failed());
+ EXPECT_TRUE(fatal_failure.fatally_failed());
+}
+
+// Tests TestPartResult::type().
+TEST_F(TestPartResultTest, type) {
+ EXPECT_EQ(TestPartResult::kSuccess, r1_.type());
+ EXPECT_EQ(TestPartResult::kNonFatalFailure, r2_.type());
+ EXPECT_EQ(TestPartResult::kFatalFailure, r3_.type());
+}
+
+// Tests TestPartResult::file_name().
+TEST_F(TestPartResultTest, file_name) {
+ EXPECT_STREQ("foo/bar.cc", r1_.file_name());
+ EXPECT_STREQ(NULL, r3_.file_name());
+}
+
+// Tests TestPartResult::line_number().
+TEST_F(TestPartResultTest, line_number) {
+ EXPECT_EQ(10, r1_.line_number());
+ EXPECT_EQ(-1, r2_.line_number());
+}
+
+// Tests TestPartResult::message().
+TEST_F(TestPartResultTest, message) {
+ EXPECT_STREQ("Success!", r1_.message());
+}
+
+// Tests TestPartResult::passed().
+TEST_F(TestPartResultTest, Passed) {
+ EXPECT_TRUE(r1_.passed());
+ EXPECT_FALSE(r2_.passed());
+ EXPECT_FALSE(r3_.passed());
+}
+
+// Tests TestPartResult::failed().
+TEST_F(TestPartResultTest, Failed) {
+ EXPECT_FALSE(r1_.failed());
+ EXPECT_TRUE(r2_.failed());
+ EXPECT_TRUE(r3_.failed());
+}
+
+// Tests TestPartResult::fatally_failed().
+TEST_F(TestPartResultTest, FatallyFailed) {
+ EXPECT_FALSE(r1_.fatally_failed());
+ EXPECT_FALSE(r2_.fatally_failed());
+ EXPECT_TRUE(r3_.fatally_failed());
+}
+
+// Tests TestPartResult::nonfatally_failed().
+TEST_F(TestPartResultTest, NonfatallyFailed) {
+ EXPECT_FALSE(r1_.nonfatally_failed());
+ EXPECT_TRUE(r2_.nonfatally_failed());
+ EXPECT_FALSE(r3_.nonfatally_failed());
+}
+
+// Tests the TestPartResultArray class.
+
+class TestPartResultArrayTest : public Test {
+ protected:
+ TestPartResultArrayTest()
+ : r1_(TestPartResult::kNonFatalFailure, "foo/bar.cc", -1, "Failure 1"),
+ r2_(TestPartResult::kFatalFailure, "foo/bar.cc", -1, "Failure 2") {}
+
+ const TestPartResult r1_, r2_;
+};
+
+// Tests that TestPartResultArray initially has size 0.
+TEST_F(TestPartResultArrayTest, InitialSizeIsZero) {
+ TestPartResultArray results;
+ EXPECT_EQ(0, results.size());
+}
+
+// Tests that TestPartResultArray contains the given TestPartResult
+// after one Append() operation.
+TEST_F(TestPartResultArrayTest, ContainsGivenResultAfterAppend) {
+ TestPartResultArray results;
+ results.Append(r1_);
+ EXPECT_EQ(1, results.size());
+ EXPECT_STREQ("Failure 1", results.GetTestPartResult(0).message());
+}
+
+// Tests that TestPartResultArray contains the given TestPartResults
+// after two Append() operations.
+TEST_F(TestPartResultArrayTest, ContainsGivenResultsAfterTwoAppends) {
+ TestPartResultArray results;
+ results.Append(r1_);
+ results.Append(r2_);
+ EXPECT_EQ(2, results.size());
+ EXPECT_STREQ("Failure 1", results.GetTestPartResult(0).message());
+ EXPECT_STREQ("Failure 2", results.GetTestPartResult(1).message());
+}
+
+typedef TestPartResultArrayTest TestPartResultArrayDeathTest;
+
+// Tests that the program dies when GetTestPartResult() is called with
+// an invalid index.
+TEST_F(TestPartResultArrayDeathTest, DiesWhenIndexIsOutOfBound) {
+ TestPartResultArray results;
+ results.Append(r1_);
+
+ EXPECT_DEATH_IF_SUPPORTED(results.GetTestPartResult(-1), "");
+ EXPECT_DEATH_IF_SUPPORTED(results.GetTestPartResult(1), "");
+}
+
+// TODO(mheule@google.com): Add a test for the class HasNewFatalFailureHelper.
+
+} // namespace
diff --git a/Source/ThirdParty/gtest/test/gtest-tuple_test.cc b/Source/ThirdParty/gtest/test/gtest-tuple_test.cc
new file mode 100644
index 000000000..532f70b3d
--- /dev/null
+++ b/Source/ThirdParty/gtest/test/gtest-tuple_test.cc
@@ -0,0 +1,320 @@
+// Copyright 2007, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: wan@google.com (Zhanyong Wan)
+
+#include <gtest/internal/gtest-tuple.h>
+#include <utility>
+#include <gtest/gtest.h>
+
+namespace {
+
+using ::std::tr1::get;
+using ::std::tr1::make_tuple;
+using ::std::tr1::tuple;
+using ::std::tr1::tuple_element;
+using ::std::tr1::tuple_size;
+using ::testing::StaticAssertTypeEq;
+
+// Tests that tuple_element<K, tuple<T0, T1, ..., TN> >::type returns TK.
+TEST(tuple_element_Test, ReturnsElementType) {
+ StaticAssertTypeEq<int, tuple_element<0, tuple<int, char> >::type>();
+ StaticAssertTypeEq<int&, tuple_element<1, tuple<double, int&> >::type>();
+ StaticAssertTypeEq<bool, tuple_element<2, tuple<double, int, bool> >::type>();
+}
+
+// Tests that tuple_size<T>::value gives the number of fields in tuple
+// type T.
+TEST(tuple_size_Test, ReturnsNumberOfFields) {
+ EXPECT_EQ(0, +tuple_size<tuple<> >::value);
+ EXPECT_EQ(1, +tuple_size<tuple<void*> >::value);
+ EXPECT_EQ(1, +tuple_size<tuple<char> >::value);
+ EXPECT_EQ(1, +(tuple_size<tuple<tuple<int, double> > >::value));
+ EXPECT_EQ(2, +(tuple_size<tuple<int&, const char> >::value));
+ EXPECT_EQ(3, +(tuple_size<tuple<char*, void, const bool&> >::value));
+}
+
+// Tests comparing a tuple with itself.
+TEST(ComparisonTest, ComparesWithSelf) {
+ const tuple<int, char, bool> a(5, 'a', false);
+
+ EXPECT_TRUE(a == a);
+ EXPECT_FALSE(a != a);
+}
+
+// Tests comparing two tuples with the same value.
+TEST(ComparisonTest, ComparesEqualTuples) {
+ const tuple<int, bool> a(5, true), b(5, true);
+
+ EXPECT_TRUE(a == b);
+ EXPECT_FALSE(a != b);
+}
+
+// Tests comparing two different tuples that have no reference fields.
+TEST(ComparisonTest, ComparesUnequalTuplesWithoutReferenceFields) {
+ typedef tuple<const int, char> FooTuple;
+
+ const FooTuple a(0, 'x');
+ const FooTuple b(1, 'a');
+
+ EXPECT_TRUE(a != b);
+ EXPECT_FALSE(a == b);
+
+ const FooTuple c(1, 'b');
+
+ EXPECT_TRUE(b != c);
+ EXPECT_FALSE(b == c);
+}
+
+// Tests comparing two different tuples that have reference fields.
+TEST(ComparisonTest, ComparesUnequalTuplesWithReferenceFields) {
+ typedef tuple<int&, const char&> FooTuple;
+
+ int i = 5;
+ const char ch = 'a';
+ const FooTuple a(i, ch);
+
+ int j = 6;
+ const FooTuple b(j, ch);
+
+ EXPECT_TRUE(a != b);
+ EXPECT_FALSE(a == b);
+
+ j = 5;
+ const char ch2 = 'b';
+ const FooTuple c(j, ch2);
+
+ EXPECT_TRUE(b != c);
+ EXPECT_FALSE(b == c);
+}
+
+// Tests that a tuple field with a reference type is an alias of the
+// variable it's supposed to reference.
+TEST(ReferenceFieldTest, IsAliasOfReferencedVariable) {
+ int n = 0;
+ tuple<bool, int&> t(true, n);
+
+ n = 1;
+ EXPECT_EQ(n, get<1>(t))
+ << "Changing a underlying variable should update the reference field.";
+
+ // Makes sure that the implementation doesn't do anything funny with
+ // the & operator for the return type of get<>().
+ EXPECT_EQ(&n, &(get<1>(t)))
+ << "The address of a reference field should equal the address of "
+ << "the underlying variable.";
+
+ get<1>(t) = 2;
+ EXPECT_EQ(2, n)
+ << "Changing a reference field should update the underlying variable.";
+}
+
+// Tests that tuple's default constructor default initializes each field.
+// This test needs to compile without generating warnings.
+TEST(TupleConstructorTest, DefaultConstructorDefaultInitializesEachField) {
+ // The TR1 report requires that tuple's default constructor default
+ // initializes each field, even if it's a primitive type. If the
+ // implementation forgets to do this, this test will catch it by
+ // generating warnings about using uninitialized variables (assuming
+ // a decent compiler).
+
+ tuple<> empty;
+
+ tuple<int> a1, b1;
+ b1 = a1;
+ EXPECT_EQ(0, get<0>(b1));
+
+ tuple<int, double> a2, b2;
+ b2 = a2;
+ EXPECT_EQ(0, get<0>(b2));
+ EXPECT_EQ(0.0, get<1>(b2));
+
+ tuple<double, char, bool*> a3, b3;
+ b3 = a3;
+ EXPECT_EQ(0.0, get<0>(b3));
+ EXPECT_EQ('\0', get<1>(b3));
+ EXPECT_TRUE(get<2>(b3) == NULL);
+
+ tuple<int, int, int, int, int, int, int, int, int, int> a10, b10;
+ b10 = a10;
+ EXPECT_EQ(0, get<0>(b10));
+ EXPECT_EQ(0, get<1>(b10));
+ EXPECT_EQ(0, get<2>(b10));
+ EXPECT_EQ(0, get<3>(b10));
+ EXPECT_EQ(0, get<4>(b10));
+ EXPECT_EQ(0, get<5>(b10));
+ EXPECT_EQ(0, get<6>(b10));
+ EXPECT_EQ(0, get<7>(b10));
+ EXPECT_EQ(0, get<8>(b10));
+ EXPECT_EQ(0, get<9>(b10));
+}
+
+// Tests constructing a tuple from its fields.
+TEST(TupleConstructorTest, ConstructsFromFields) {
+ int n = 1;
+ // Reference field.
+ tuple<int&> a(n);
+ EXPECT_EQ(&n, &(get<0>(a)));
+
+ // Non-reference fields.
+ tuple<int, char> b(5, 'a');
+ EXPECT_EQ(5, get<0>(b));
+ EXPECT_EQ('a', get<1>(b));
+
+ // Const reference field.
+ const int m = 2;
+ tuple<bool, const int&> c(true, m);
+ EXPECT_TRUE(get<0>(c));
+ EXPECT_EQ(&m, &(get<1>(c)));
+}
+
+// Tests tuple's copy constructor.
+TEST(TupleConstructorTest, CopyConstructor) {
+ tuple<double, bool> a(0.0, true);
+ tuple<double, bool> b(a);
+
+ EXPECT_DOUBLE_EQ(0.0, get<0>(b));
+ EXPECT_TRUE(get<1>(b));
+}
+
+// Tests constructing a tuple from another tuple that has a compatible
+// but different type.
+TEST(TupleConstructorTest, ConstructsFromDifferentTupleType) {
+ tuple<int, int, char> a(0, 1, 'a');
+ tuple<double, long, int> b(a);
+
+ EXPECT_DOUBLE_EQ(0.0, get<0>(b));
+ EXPECT_EQ(1, get<1>(b));
+ EXPECT_EQ('a', get<2>(b));
+}
+
+// Tests constructing a 2-tuple from an std::pair.
+TEST(TupleConstructorTest, ConstructsFromPair) {
+ ::std::pair<int, char> a(1, 'a');
+ tuple<int, char> b(a);
+ tuple<int, const char&> c(a);
+}
+
+// Tests assigning a tuple to another tuple with the same type.
+TEST(TupleAssignmentTest, AssignsToSameTupleType) {
+ const tuple<int, long> a(5, 7L);
+ tuple<int, long> b;
+ b = a;
+ EXPECT_EQ(5, get<0>(b));
+ EXPECT_EQ(7L, get<1>(b));
+}
+
+// Tests assigning a tuple to another tuple with a different but
+// compatible type.
+TEST(TupleAssignmentTest, AssignsToDifferentTupleType) {
+ const tuple<int, long, bool> a(1, 7L, true);
+ tuple<long, int, bool> b;
+ b = a;
+ EXPECT_EQ(1L, get<0>(b));
+ EXPECT_EQ(7, get<1>(b));
+ EXPECT_TRUE(get<2>(b));
+}
+
+// Tests assigning an std::pair to a 2-tuple.
+TEST(TupleAssignmentTest, AssignsFromPair) {
+ const ::std::pair<int, bool> a(5, true);
+ tuple<int, bool> b;
+ b = a;
+ EXPECT_EQ(5, get<0>(b));
+ EXPECT_TRUE(get<1>(b));
+
+ tuple<long, bool> c;
+ c = a;
+ EXPECT_EQ(5L, get<0>(c));
+ EXPECT_TRUE(get<1>(c));
+}
+
+// A fixture for testing big tuples.
+class BigTupleTest : public testing::Test {
+ protected:
+ typedef tuple<int, int, int, int, int, int, int, int, int, int> BigTuple;
+
+ BigTupleTest() :
+ a_(1, 0, 0, 0, 0, 0, 0, 0, 0, 2),
+ b_(1, 0, 0, 0, 0, 0, 0, 0, 0, 3) {}
+
+ BigTuple a_, b_;
+};
+
+// Tests constructing big tuples.
+TEST_F(BigTupleTest, Construction) {
+ BigTuple a;
+ BigTuple b(b_);
+}
+
+// Tests that get<N>(t) returns the N-th (0-based) field of tuple t.
+TEST_F(BigTupleTest, get) {
+ EXPECT_EQ(1, get<0>(a_));
+ EXPECT_EQ(2, get<9>(a_));
+
+ // Tests that get() works on a const tuple too.
+ const BigTuple a(a_);
+ EXPECT_EQ(1, get<0>(a));
+ EXPECT_EQ(2, get<9>(a));
+}
+
+// Tests comparing big tuples.
+TEST_F(BigTupleTest, Comparisons) {
+ EXPECT_TRUE(a_ == a_);
+ EXPECT_FALSE(a_ != a_);
+
+ EXPECT_TRUE(a_ != b_);
+ EXPECT_FALSE(a_ == b_);
+}
+
+TEST(MakeTupleTest, WorksForScalarTypes) {
+ tuple<bool, int> a;
+ a = make_tuple(true, 5);
+ EXPECT_TRUE(get<0>(a));
+ EXPECT_EQ(5, get<1>(a));
+
+ tuple<char, int, long> b;
+ b = make_tuple('a', 'b', 5);
+ EXPECT_EQ('a', get<0>(b));
+ EXPECT_EQ('b', get<1>(b));
+ EXPECT_EQ(5, get<2>(b));
+}
+
+TEST(MakeTupleTest, WorksForPointers) {
+ int a[] = { 1, 2, 3, 4 };
+ const char* const str = "hi";
+ int* const p = a;
+
+ tuple<const char*, int*> t;
+ t = make_tuple(str, p);
+ EXPECT_EQ(str, get<0>(t));
+ EXPECT_EQ(p, get<1>(t));
+}
+
+} // namespace
diff --git a/Source/ThirdParty/gtest/test/gtest-typed-test2_test.cc b/Source/ThirdParty/gtest/test/gtest-typed-test2_test.cc
new file mode 100644
index 000000000..79a8a87dc
--- /dev/null
+++ b/Source/ThirdParty/gtest/test/gtest-typed-test2_test.cc
@@ -0,0 +1,45 @@
+// Copyright 2008 Google Inc.
+// All Rights Reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: wan@google.com (Zhanyong Wan)
+
+#include <vector>
+
+#include "test/gtest-typed-test_test.h"
+#include <gtest/gtest.h>
+
+#if GTEST_HAS_TYPED_TEST_P
+
+// Tests that the same type-parameterized test case can be
+// instantiated in different translation units linked together.
+// (ContainerTest is also instantiated in gtest-typed-test_test.cc.)
+INSTANTIATE_TYPED_TEST_CASE_P(Vector, ContainerTest,
+ testing::Types<std::vector<int> >);
+
+#endif // GTEST_HAS_TYPED_TEST_P
diff --git a/Source/ThirdParty/gtest/test/gtest-typed-test_test.cc b/Source/ThirdParty/gtest/test/gtest-typed-test_test.cc
new file mode 100644
index 000000000..f2c397231
--- /dev/null
+++ b/Source/ThirdParty/gtest/test/gtest-typed-test_test.cc
@@ -0,0 +1,360 @@
+// Copyright 2008 Google Inc.
+// All Rights Reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: wan@google.com (Zhanyong Wan)
+
+#include <set>
+#include <vector>
+
+#include "test/gtest-typed-test_test.h"
+#include <gtest/gtest.h>
+
+using testing::Test;
+
+// Used for testing that SetUpTestCase()/TearDownTestCase(), fixture
+// ctor/dtor, and SetUp()/TearDown() work correctly in typed tests and
+// type-parameterized test.
+template <typename T>
+class CommonTest : public Test {
+ // For some technical reason, SetUpTestCase() and TearDownTestCase()
+ // must be public.
+ public:
+ static void SetUpTestCase() {
+ shared_ = new T(5);
+ }
+
+ static void TearDownTestCase() {
+ delete shared_;
+ shared_ = NULL;
+ }
+
+ // This 'protected:' is optional. There's no harm in making all
+ // members of this fixture class template public.
+ protected:
+ // We used to use std::list here, but switched to std::vector since
+ // MSVC's <list> doesn't compile cleanly with /W4.
+ typedef std::vector<T> Vector;
+ typedef std::set<int> IntSet;
+
+ CommonTest() : value_(1) {}
+
+ virtual ~CommonTest() { EXPECT_EQ(3, value_); }
+
+ virtual void SetUp() {
+ EXPECT_EQ(1, value_);
+ value_++;
+ }
+
+ virtual void TearDown() {
+ EXPECT_EQ(2, value_);
+ value_++;
+ }
+
+ T value_;
+ static T* shared_;
+};
+
+template <typename T>
+T* CommonTest<T>::shared_ = NULL;
+
+// This #ifdef block tests typed tests.
+#if GTEST_HAS_TYPED_TEST
+
+using testing::Types;
+
+// Tests that SetUpTestCase()/TearDownTestCase(), fixture ctor/dtor,
+// and SetUp()/TearDown() work correctly in typed tests
+
+typedef Types<char, int> TwoTypes;
+TYPED_TEST_CASE(CommonTest, TwoTypes);
+
+TYPED_TEST(CommonTest, ValuesAreCorrect) {
+ // Static members of the fixture class template can be visited via
+ // the TestFixture:: prefix.
+ EXPECT_EQ(5, *TestFixture::shared_);
+
+ // Typedefs in the fixture class template can be visited via the
+ // "typename TestFixture::" prefix.
+ typename TestFixture::Vector empty;
+ EXPECT_EQ(0U, empty.size());
+
+ typename TestFixture::IntSet empty2;
+ EXPECT_EQ(0U, empty2.size());
+
+ // Non-static members of the fixture class must be visited via
+ // 'this', as required by C++ for class templates.
+ EXPECT_EQ(2, this->value_);
+}
+
+// The second test makes sure shared_ is not deleted after the first
+// test.
+TYPED_TEST(CommonTest, ValuesAreStillCorrect) {
+ // Static members of the fixture class template can also be visited
+ // via 'this'.
+ ASSERT_TRUE(this->shared_ != NULL);
+ EXPECT_EQ(5, *this->shared_);
+
+ // TypeParam can be used to refer to the type parameter.
+ EXPECT_EQ(static_cast<TypeParam>(2), this->value_);
+}
+
+// Tests that multiple TYPED_TEST_CASE's can be defined in the same
+// translation unit.
+
+template <typename T>
+class TypedTest1 : public Test {
+};
+
+// Verifies that the second argument of TYPED_TEST_CASE can be a
+// single type.
+TYPED_TEST_CASE(TypedTest1, int);
+TYPED_TEST(TypedTest1, A) {}
+
+template <typename T>
+class TypedTest2 : public Test {
+};
+
+// Verifies that the second argument of TYPED_TEST_CASE can be a
+// Types<...> type list.
+TYPED_TEST_CASE(TypedTest2, Types<int>);
+
+// This also verifies that tests from different typed test cases can
+// share the same name.
+TYPED_TEST(TypedTest2, A) {}
+
+// Tests that a typed test case can be defined in a namespace.
+
+namespace library1 {
+
+template <typename T>
+class NumericTest : public Test {
+};
+
+typedef Types<int, long> NumericTypes;
+TYPED_TEST_CASE(NumericTest, NumericTypes);
+
+TYPED_TEST(NumericTest, DefaultIsZero) {
+ EXPECT_EQ(0, TypeParam());
+}
+
+} // namespace library1
+
+#endif // GTEST_HAS_TYPED_TEST
+
+// This #ifdef block tests type-parameterized tests.
+#if GTEST_HAS_TYPED_TEST_P
+
+using testing::Types;
+using testing::internal::TypedTestCasePState;
+
+// Tests TypedTestCasePState.
+
+class TypedTestCasePStateTest : public Test {
+ protected:
+ virtual void SetUp() {
+ state_.AddTestName("foo.cc", 0, "FooTest", "A");
+ state_.AddTestName("foo.cc", 0, "FooTest", "B");
+ state_.AddTestName("foo.cc", 0, "FooTest", "C");
+ }
+
+ TypedTestCasePState state_;
+};
+
+TEST_F(TypedTestCasePStateTest, SucceedsForMatchingList) {
+ const char* tests = "A, B, C";
+ EXPECT_EQ(tests,
+ state_.VerifyRegisteredTestNames("foo.cc", 1, tests));
+}
+
+// Makes sure that the order of the tests and spaces around the names
+// don't matter.
+TEST_F(TypedTestCasePStateTest, IgnoresOrderAndSpaces) {
+ const char* tests = "A,C, B";
+ EXPECT_EQ(tests,
+ state_.VerifyRegisteredTestNames("foo.cc", 1, tests));
+}
+
+typedef TypedTestCasePStateTest TypedTestCasePStateDeathTest;
+
+TEST_F(TypedTestCasePStateDeathTest, DetectsDuplicates) {
+ EXPECT_DEATH_IF_SUPPORTED(
+ state_.VerifyRegisteredTestNames("foo.cc", 1, "A, B, A, C"),
+ "foo\\.cc.1.?: Test A is listed more than once\\.");
+}
+
+TEST_F(TypedTestCasePStateDeathTest, DetectsExtraTest) {
+ EXPECT_DEATH_IF_SUPPORTED(
+ state_.VerifyRegisteredTestNames("foo.cc", 1, "A, B, C, D"),
+ "foo\\.cc.1.?: No test named D can be found in this test case\\.");
+}
+
+TEST_F(TypedTestCasePStateDeathTest, DetectsMissedTest) {
+ EXPECT_DEATH_IF_SUPPORTED(
+ state_.VerifyRegisteredTestNames("foo.cc", 1, "A, C"),
+ "foo\\.cc.1.?: You forgot to list test B\\.");
+}
+
+// Tests that defining a test for a parameterized test case generates
+// a run-time error if the test case has been registered.
+TEST_F(TypedTestCasePStateDeathTest, DetectsTestAfterRegistration) {
+ state_.VerifyRegisteredTestNames("foo.cc", 1, "A, B, C");
+ EXPECT_DEATH_IF_SUPPORTED(
+ state_.AddTestName("foo.cc", 2, "FooTest", "D"),
+ "foo\\.cc.2.?: Test D must be defined before REGISTER_TYPED_TEST_CASE_P"
+ "\\(FooTest, \\.\\.\\.\\)\\.");
+}
+
+// Tests that SetUpTestCase()/TearDownTestCase(), fixture ctor/dtor,
+// and SetUp()/TearDown() work correctly in type-parameterized tests.
+
+template <typename T>
+class DerivedTest : public CommonTest<T> {
+};
+
+TYPED_TEST_CASE_P(DerivedTest);
+
+TYPED_TEST_P(DerivedTest, ValuesAreCorrect) {
+ // Static members of the fixture class template can be visited via
+ // the TestFixture:: prefix.
+ EXPECT_EQ(5, *TestFixture::shared_);
+
+ // Non-static members of the fixture class must be visited via
+ // 'this', as required by C++ for class templates.
+ EXPECT_EQ(2, this->value_);
+}
+
+// The second test makes sure shared_ is not deleted after the first
+// test.
+TYPED_TEST_P(DerivedTest, ValuesAreStillCorrect) {
+ // Static members of the fixture class template can also be visited
+ // via 'this'.
+ ASSERT_TRUE(this->shared_ != NULL);
+ EXPECT_EQ(5, *this->shared_);
+ EXPECT_EQ(2, this->value_);
+}
+
+REGISTER_TYPED_TEST_CASE_P(DerivedTest,
+ ValuesAreCorrect, ValuesAreStillCorrect);
+
+typedef Types<short, long> MyTwoTypes;
+INSTANTIATE_TYPED_TEST_CASE_P(My, DerivedTest, MyTwoTypes);
+
+// Tests that multiple TYPED_TEST_CASE_P's can be defined in the same
+// translation unit.
+
+template <typename T>
+class TypedTestP1 : public Test {
+};
+
+TYPED_TEST_CASE_P(TypedTestP1);
+
+// For testing that the code between TYPED_TEST_CASE_P() and
+// TYPED_TEST_P() is not enclosed in a namespace.
+typedef int IntAfterTypedTestCaseP;
+
+TYPED_TEST_P(TypedTestP1, A) {}
+TYPED_TEST_P(TypedTestP1, B) {}
+
+// For testing that the code between TYPED_TEST_P() and
+// REGISTER_TYPED_TEST_CASE_P() is not enclosed in a namespace.
+typedef int IntBeforeRegisterTypedTestCaseP;
+
+REGISTER_TYPED_TEST_CASE_P(TypedTestP1, A, B);
+
+template <typename T>
+class TypedTestP2 : public Test {
+};
+
+TYPED_TEST_CASE_P(TypedTestP2);
+
+// This also verifies that tests from different type-parameterized
+// test cases can share the same name.
+TYPED_TEST_P(TypedTestP2, A) {}
+
+REGISTER_TYPED_TEST_CASE_P(TypedTestP2, A);
+
+// Verifies that the code between TYPED_TEST_CASE_P() and
+// REGISTER_TYPED_TEST_CASE_P() is not enclosed in a namespace.
+IntAfterTypedTestCaseP after = 0;
+IntBeforeRegisterTypedTestCaseP before = 0;
+
+// Verifies that the last argument of INSTANTIATE_TYPED_TEST_CASE_P()
+// can be either a single type or a Types<...> type list.
+INSTANTIATE_TYPED_TEST_CASE_P(Int, TypedTestP1, int);
+INSTANTIATE_TYPED_TEST_CASE_P(Int, TypedTestP2, Types<int>);
+
+// Tests that the same type-parameterized test case can be
+// instantiated more than once in the same translation unit.
+INSTANTIATE_TYPED_TEST_CASE_P(Double, TypedTestP2, Types<double>);
+
+// Tests that the same type-parameterized test case can be
+// instantiated in different translation units linked together.
+// (ContainerTest is also instantiated in gtest-typed-test_test.cc.)
+typedef Types<std::vector<double>, std::set<char> > MyContainers;
+INSTANTIATE_TYPED_TEST_CASE_P(My, ContainerTest, MyContainers);
+
+// Tests that a type-parameterized test case can be defined and
+// instantiated in a namespace.
+
+namespace library2 {
+
+template <typename T>
+class NumericTest : public Test {
+};
+
+TYPED_TEST_CASE_P(NumericTest);
+
+TYPED_TEST_P(NumericTest, DefaultIsZero) {
+ EXPECT_EQ(0, TypeParam());
+}
+
+TYPED_TEST_P(NumericTest, ZeroIsLessThanOne) {
+ EXPECT_LT(TypeParam(0), TypeParam(1));
+}
+
+REGISTER_TYPED_TEST_CASE_P(NumericTest,
+ DefaultIsZero, ZeroIsLessThanOne);
+typedef Types<int, double> NumericTypes;
+INSTANTIATE_TYPED_TEST_CASE_P(My, NumericTest, NumericTypes);
+
+} // namespace library2
+
+#endif // GTEST_HAS_TYPED_TEST_P
+
+#if !defined(GTEST_HAS_TYPED_TEST) && !defined(GTEST_HAS_TYPED_TEST_P)
+
+// Google Test may not support type-parameterized tests with some
+// compilers. If we use conditional compilation to compile out all
+// code referring to the gtest_main library, MSVC linker will not link
+// that library at all and consequently complain about missing entry
+// point defined in that library (fatal error LNK1561: entry point
+// must be defined). This dummy test keeps gtest_main linked in.
+TEST(DummyTest, TypedTestsAreNotSupportedOnThisPlatform) {}
+
+#endif // #if !defined(GTEST_HAS_TYPED_TEST) && !defined(GTEST_HAS_TYPED_TEST_P)
diff --git a/Source/ThirdParty/gtest/test/gtest-typed-test_test.h b/Source/ThirdParty/gtest/test/gtest-typed-test_test.h
new file mode 100644
index 000000000..40dfeac6e
--- /dev/null
+++ b/Source/ThirdParty/gtest/test/gtest-typed-test_test.h
@@ -0,0 +1,66 @@
+// Copyright 2008 Google Inc.
+// All Rights Reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: wan@google.com (Zhanyong Wan)
+
+#ifndef GTEST_TEST_GTEST_TYPED_TEST_TEST_H_
+#define GTEST_TEST_GTEST_TYPED_TEST_TEST_H_
+
+#include <gtest/gtest.h>
+
+#if GTEST_HAS_TYPED_TEST_P
+
+using testing::Test;
+
+// For testing that the same type-parameterized test case can be
+// instantiated in different translation units linked together.
+// ContainerTest will be instantiated in both gtest-typed-test_test.cc
+// and gtest-typed-test2_test.cc.
+
+template <typename T>
+class ContainerTest : public Test {
+};
+
+TYPED_TEST_CASE_P(ContainerTest);
+
+TYPED_TEST_P(ContainerTest, CanBeDefaultConstructed) {
+ TypeParam container;
+}
+
+TYPED_TEST_P(ContainerTest, InitialSizeIsZero) {
+ TypeParam container;
+ EXPECT_EQ(0U, container.size());
+}
+
+REGISTER_TYPED_TEST_CASE_P(ContainerTest,
+ CanBeDefaultConstructed, InitialSizeIsZero);
+
+#endif // GTEST_HAS_TYPED_TEST_P
+
+#endif // GTEST_TEST_GTEST_TYPED_TEST_TEST_H_
diff --git a/Source/ThirdParty/gtest/test/gtest-unittest-api_test.cc b/Source/ThirdParty/gtest/test/gtest-unittest-api_test.cc
new file mode 100644
index 000000000..7e0f8f804
--- /dev/null
+++ b/Source/ThirdParty/gtest/test/gtest-unittest-api_test.cc
@@ -0,0 +1,343 @@
+// Copyright 2009 Google Inc. All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: vladl@google.com (Vlad Losev)
+//
+// The Google C++ Testing Framework (Google Test)
+//
+// This file contains tests verifying correctness of data provided via
+// UnitTest's public methods.
+
+#include <gtest/gtest.h>
+
+#include <string.h> // For strcmp.
+#include <algorithm>
+
+using ::testing::InitGoogleTest;
+
+namespace testing {
+namespace internal {
+
+template <typename T>
+struct LessByName {
+ bool operator()(const T* a, const T* b) {
+ return strcmp(a->name(), b->name()) < 0;
+ }
+};
+
+class UnitTestHelper {
+ public:
+ // Returns the array of pointers to all test cases sorted by the test case
+ // name. The caller is responsible for deleting the array.
+ static TestCase const** const GetSortedTestCases() {
+ UnitTest& unit_test = *UnitTest::GetInstance();
+ TestCase const** const test_cases =
+ new const TestCase*[unit_test.total_test_case_count()];
+
+ for (int i = 0; i < unit_test.total_test_case_count(); ++i)
+ test_cases[i] = unit_test.GetTestCase(i);
+
+ std::sort(test_cases,
+ test_cases + unit_test.total_test_case_count(),
+ LessByName<TestCase>());
+ return test_cases;
+ }
+
+ // Returns the test case by its name. The caller doesn't own the returned
+ // pointer.
+ static const TestCase* FindTestCase(const char* name) {
+ UnitTest& unit_test = *UnitTest::GetInstance();
+ for (int i = 0; i < unit_test.total_test_case_count(); ++i) {
+ const TestCase* test_case = unit_test.GetTestCase(i);
+ if (0 == strcmp(test_case->name(), name))
+ return test_case;
+ }
+ return NULL;
+ }
+
+ // Returns the array of pointers to all tests in a particular test case
+ // sorted by the test name. The caller is responsible for deleting the
+ // array.
+ static TestInfo const** const GetSortedTests(const TestCase* test_case) {
+ TestInfo const** const tests =
+ new const TestInfo*[test_case->total_test_count()];
+
+ for (int i = 0; i < test_case->total_test_count(); ++i)
+ tests[i] = test_case->GetTestInfo(i);
+
+ std::sort(tests, tests + test_case->total_test_count(),
+ LessByName<TestInfo>());
+ return tests;
+ }
+};
+
+#if GTEST_HAS_TYPED_TEST
+template <typename T> class TestCaseWithCommentTest : public Test {};
+TYPED_TEST_CASE(TestCaseWithCommentTest, Types<int>);
+TYPED_TEST(TestCaseWithCommentTest, Dummy) {}
+
+const int kTypedTestCases = 1;
+const int kTypedTests = 1;
+
+String GetExpectedTestCaseComment() {
+ Message comment;
+ comment << "TypeParam = " << GetTypeName<int>().c_str();
+ return comment.GetString();
+}
+#else
+const int kTypedTestCases = 0;
+const int kTypedTests = 0;
+#endif // GTEST_HAS_TYPED_TEST
+
+// We can only test the accessors that do not change value while tests run.
+// Since tests can be run in any order, the values the accessors that track
+// test execution (such as failed_test_count) can not be predicted.
+TEST(ApiTest, UnitTestImmutableAccessorsWork) {
+ UnitTest* unit_test = UnitTest::GetInstance();
+
+ ASSERT_EQ(2 + kTypedTestCases, unit_test->total_test_case_count());
+ EXPECT_EQ(1 + kTypedTestCases, unit_test->test_case_to_run_count());
+ EXPECT_EQ(2, unit_test->disabled_test_count());
+ EXPECT_EQ(5 + kTypedTests, unit_test->total_test_count());
+ EXPECT_EQ(3 + kTypedTests, unit_test->test_to_run_count());
+
+ const TestCase** const test_cases = UnitTestHelper::GetSortedTestCases();
+
+ EXPECT_STREQ("ApiTest", test_cases[0]->name());
+ EXPECT_STREQ("DISABLED_Test", test_cases[1]->name());
+#if GTEST_HAS_TYPED_TEST
+ EXPECT_STREQ("TestCaseWithCommentTest/0", test_cases[2]->name());
+#endif // GTEST_HAS_TYPED_TEST
+
+ delete[] test_cases;
+
+ // The following lines initiate actions to verify certain methods in
+ // FinalSuccessChecker::TearDown.
+
+ // Records a test property to verify TestResult::GetTestProperty().
+ RecordProperty("key", "value");
+}
+
+TEST(ApiTest, TestCaseImmutableAccessorsWork) {
+ const TestCase* test_case = UnitTestHelper::FindTestCase("ApiTest");
+ ASSERT_TRUE(test_case != NULL);
+
+ EXPECT_STREQ("ApiTest", test_case->name());
+ EXPECT_STREQ("", test_case->comment());
+ EXPECT_TRUE(test_case->should_run());
+ EXPECT_EQ(1, test_case->disabled_test_count());
+ EXPECT_EQ(3, test_case->test_to_run_count());
+ ASSERT_EQ(4, test_case->total_test_count());
+
+ const TestInfo** tests = UnitTestHelper::GetSortedTests(test_case);
+
+ EXPECT_STREQ("DISABLED_Dummy1", tests[0]->name());
+ EXPECT_STREQ("ApiTest", tests[0]->test_case_name());
+ EXPECT_STREQ("", tests[0]->comment());
+ EXPECT_STREQ("", tests[0]->test_case_comment());
+ EXPECT_FALSE(tests[0]->should_run());
+
+ EXPECT_STREQ("TestCaseDisabledAccessorsWork", tests[1]->name());
+ EXPECT_STREQ("ApiTest", tests[1]->test_case_name());
+ EXPECT_STREQ("", tests[1]->comment());
+ EXPECT_STREQ("", tests[1]->test_case_comment());
+ EXPECT_TRUE(tests[1]->should_run());
+
+ EXPECT_STREQ("TestCaseImmutableAccessorsWork", tests[2]->name());
+ EXPECT_STREQ("ApiTest", tests[2]->test_case_name());
+ EXPECT_STREQ("", tests[2]->comment());
+ EXPECT_STREQ("", tests[2]->test_case_comment());
+ EXPECT_TRUE(tests[2]->should_run());
+
+ EXPECT_STREQ("UnitTestImmutableAccessorsWork", tests[3]->name());
+ EXPECT_STREQ("ApiTest", tests[3]->test_case_name());
+ EXPECT_STREQ("", tests[3]->comment());
+ EXPECT_STREQ("", tests[3]->test_case_comment());
+ EXPECT_TRUE(tests[3]->should_run());
+
+ delete[] tests;
+ tests = NULL;
+
+#if GTEST_HAS_TYPED_TEST
+ test_case = UnitTestHelper::FindTestCase("TestCaseWithCommentTest/0");
+ ASSERT_TRUE(test_case != NULL);
+
+ EXPECT_STREQ("TestCaseWithCommentTest/0", test_case->name());
+ EXPECT_STREQ(GetExpectedTestCaseComment().c_str(), test_case->comment());
+ EXPECT_TRUE(test_case->should_run());
+ EXPECT_EQ(0, test_case->disabled_test_count());
+ EXPECT_EQ(1, test_case->test_to_run_count());
+ ASSERT_EQ(1, test_case->total_test_count());
+
+ tests = UnitTestHelper::GetSortedTests(test_case);
+
+ EXPECT_STREQ("Dummy", tests[0]->name());
+ EXPECT_STREQ("TestCaseWithCommentTest/0", tests[0]->test_case_name());
+ EXPECT_STREQ("", tests[0]->comment());
+ EXPECT_STREQ(GetExpectedTestCaseComment().c_str(),
+ tests[0]->test_case_comment());
+ EXPECT_TRUE(tests[0]->should_run());
+
+ delete[] tests;
+#endif // GTEST_HAS_TYPED_TEST
+}
+
+TEST(ApiTest, TestCaseDisabledAccessorsWork) {
+ const TestCase* test_case = UnitTestHelper::FindTestCase("DISABLED_Test");
+ ASSERT_TRUE(test_case != NULL);
+
+ EXPECT_STREQ("DISABLED_Test", test_case->name());
+ EXPECT_STREQ("", test_case->comment());
+ EXPECT_FALSE(test_case->should_run());
+ EXPECT_EQ(1, test_case->disabled_test_count());
+ EXPECT_EQ(0, test_case->test_to_run_count());
+ ASSERT_EQ(1, test_case->total_test_count());
+
+ const TestInfo* const test_info = test_case->GetTestInfo(0);
+ EXPECT_STREQ("Dummy2", test_info->name());
+ EXPECT_STREQ("DISABLED_Test", test_info->test_case_name());
+ EXPECT_STREQ("", test_info->comment());
+ EXPECT_STREQ("", test_info->test_case_comment());
+ EXPECT_FALSE(test_info->should_run());
+}
+
+// These two tests are here to provide support for testing
+// test_case_to_run_count, disabled_test_count, and test_to_run_count.
+TEST(ApiTest, DISABLED_Dummy1) {}
+TEST(DISABLED_Test, Dummy2) {}
+
+class FinalSuccessChecker : public Environment {
+ protected:
+ virtual void TearDown() {
+ UnitTest* unit_test = UnitTest::GetInstance();
+
+ EXPECT_EQ(1 + kTypedTestCases, unit_test->successful_test_case_count());
+ EXPECT_EQ(3 + kTypedTests, unit_test->successful_test_count());
+ EXPECT_EQ(0, unit_test->failed_test_case_count());
+ EXPECT_EQ(0, unit_test->failed_test_count());
+ EXPECT_TRUE(unit_test->Passed());
+ EXPECT_FALSE(unit_test->Failed());
+ ASSERT_EQ(2 + kTypedTestCases, unit_test->total_test_case_count());
+
+ const TestCase** const test_cases = UnitTestHelper::GetSortedTestCases();
+
+ EXPECT_STREQ("ApiTest", test_cases[0]->name());
+ EXPECT_STREQ("", test_cases[0]->comment());
+ EXPECT_TRUE(test_cases[0]->should_run());
+ EXPECT_EQ(1, test_cases[0]->disabled_test_count());
+ ASSERT_EQ(4, test_cases[0]->total_test_count());
+ EXPECT_EQ(3, test_cases[0]->successful_test_count());
+ EXPECT_EQ(0, test_cases[0]->failed_test_count());
+ EXPECT_TRUE(test_cases[0]->Passed());
+ EXPECT_FALSE(test_cases[0]->Failed());
+
+ EXPECT_STREQ("DISABLED_Test", test_cases[1]->name());
+ EXPECT_STREQ("", test_cases[1]->comment());
+ EXPECT_FALSE(test_cases[1]->should_run());
+ EXPECT_EQ(1, test_cases[1]->disabled_test_count());
+ ASSERT_EQ(1, test_cases[1]->total_test_count());
+ EXPECT_EQ(0, test_cases[1]->successful_test_count());
+ EXPECT_EQ(0, test_cases[1]->failed_test_count());
+
+#if GTEST_HAS_TYPED_TEST
+ EXPECT_STREQ("TestCaseWithCommentTest/0", test_cases[2]->name());
+ EXPECT_STREQ(GetExpectedTestCaseComment().c_str(),
+ test_cases[2]->comment());
+ EXPECT_TRUE(test_cases[2]->should_run());
+ EXPECT_EQ(0, test_cases[2]->disabled_test_count());
+ ASSERT_EQ(1, test_cases[2]->total_test_count());
+ EXPECT_EQ(1, test_cases[2]->successful_test_count());
+ EXPECT_EQ(0, test_cases[2]->failed_test_count());
+ EXPECT_TRUE(test_cases[2]->Passed());
+ EXPECT_FALSE(test_cases[2]->Failed());
+#endif // GTEST_HAS_TYPED_TEST
+
+ const TestCase* test_case = UnitTestHelper::FindTestCase("ApiTest");
+ const TestInfo** tests = UnitTestHelper::GetSortedTests(test_case);
+ EXPECT_STREQ("DISABLED_Dummy1", tests[0]->name());
+ EXPECT_STREQ("ApiTest", tests[0]->test_case_name());
+ EXPECT_FALSE(tests[0]->should_run());
+
+ EXPECT_STREQ("TestCaseDisabledAccessorsWork", tests[1]->name());
+ EXPECT_STREQ("ApiTest", tests[1]->test_case_name());
+ EXPECT_STREQ("", tests[1]->comment());
+ EXPECT_STREQ("", tests[1]->test_case_comment());
+ EXPECT_TRUE(tests[1]->should_run());
+ EXPECT_TRUE(tests[1]->result()->Passed());
+ EXPECT_EQ(0, tests[1]->result()->test_property_count());
+
+ EXPECT_STREQ("TestCaseImmutableAccessorsWork", tests[2]->name());
+ EXPECT_STREQ("ApiTest", tests[2]->test_case_name());
+ EXPECT_STREQ("", tests[2]->comment());
+ EXPECT_STREQ("", tests[2]->test_case_comment());
+ EXPECT_TRUE(tests[2]->should_run());
+ EXPECT_TRUE(tests[2]->result()->Passed());
+ EXPECT_EQ(0, tests[2]->result()->test_property_count());
+
+ EXPECT_STREQ("UnitTestImmutableAccessorsWork", tests[3]->name());
+ EXPECT_STREQ("ApiTest", tests[3]->test_case_name());
+ EXPECT_STREQ("", tests[3]->comment());
+ EXPECT_STREQ("", tests[3]->test_case_comment());
+ EXPECT_TRUE(tests[3]->should_run());
+ EXPECT_TRUE(tests[3]->result()->Passed());
+ EXPECT_EQ(1, tests[3]->result()->test_property_count());
+ const TestProperty& property = tests[3]->result()->GetTestProperty(0);
+ EXPECT_STREQ("key", property.key());
+ EXPECT_STREQ("value", property.value());
+
+ delete[] tests;
+
+#if GTEST_HAS_TYPED_TEST
+ test_case = UnitTestHelper::FindTestCase("TestCaseWithCommentTest/0");
+ tests = UnitTestHelper::GetSortedTests(test_case);
+
+ EXPECT_STREQ("Dummy", tests[0]->name());
+ EXPECT_STREQ("TestCaseWithCommentTest/0", tests[0]->test_case_name());
+ EXPECT_STREQ("", tests[0]->comment());
+ EXPECT_STREQ(GetExpectedTestCaseComment().c_str(),
+ tests[0]->test_case_comment());
+ EXPECT_TRUE(tests[0]->should_run());
+ EXPECT_TRUE(tests[0]->result()->Passed());
+ EXPECT_EQ(0, tests[0]->result()->test_property_count());
+
+ delete[] tests;
+#endif // GTEST_HAS_TYPED_TEST
+ delete[] test_cases;
+ }
+};
+
+} // namespace internal
+} // namespace testing
+
+int main(int argc, char **argv) {
+ InitGoogleTest(&argc, argv);
+
+ AddGlobalTestEnvironment(new testing::internal::FinalSuccessChecker());
+
+ return RUN_ALL_TESTS();
+}
diff --git a/Source/ThirdParty/gtest/test/gtest_all_test.cc b/Source/ThirdParty/gtest/test/gtest_all_test.cc
new file mode 100644
index 000000000..e1edb08e3
--- /dev/null
+++ b/Source/ThirdParty/gtest/test/gtest_all_test.cc
@@ -0,0 +1,48 @@
+// Copyright 2009, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: wan@google.com (Zhanyong Wan)
+//
+// Tests for Google C++ Testing Framework (Google Test)
+//
+// Sometimes it's desirable to build most of Google Test's own tests
+// by compiling a single file. This file serves this purpose.
+#include "test/gtest-filepath_test.cc"
+#include "test/gtest-linked_ptr_test.cc"
+#include "test/gtest-message_test.cc"
+#include "test/gtest-options_test.cc"
+#include "test/gtest-port_test.cc"
+#include "test/gtest_pred_impl_unittest.cc"
+#include "test/gtest_prod_test.cc"
+#include "test/gtest-test-part_test.cc"
+#include "test/gtest-typed-test_test.cc"
+#include "test/gtest-typed-test2_test.cc"
+#include "test/gtest_unittest.cc"
+#include "test/production.cc"
+#include "src/gtest_main.cc"
diff --git a/Source/ThirdParty/gtest/test/gtest_break_on_failure_unittest.py b/Source/ThirdParty/gtest/test/gtest_break_on_failure_unittest.py
new file mode 100755
index 000000000..c81918331
--- /dev/null
+++ b/Source/ThirdParty/gtest/test/gtest_break_on_failure_unittest.py
@@ -0,0 +1,218 @@
+#!/usr/bin/env python
+#
+# Copyright 2006, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""Unit test for Google Test's break-on-failure mode.
+
+A user can ask Google Test to seg-fault when an assertion fails, using
+either the GTEST_BREAK_ON_FAILURE environment variable or the
+--gtest_break_on_failure flag. This script tests such functionality
+by invoking gtest_break_on_failure_unittest_ (a program written with
+Google Test) with different environments and command line flags.
+"""
+
+__author__ = 'wan@google.com (Zhanyong Wan)'
+
+import gtest_test_utils
+import os
+import sys
+
+
+# Constants.
+
+IS_WINDOWS = os.name == 'nt'
+
+# The environment variable for enabling/disabling the break-on-failure mode.
+BREAK_ON_FAILURE_ENV_VAR = 'GTEST_BREAK_ON_FAILURE'
+
+# The command line flag for enabling/disabling the break-on-failure mode.
+BREAK_ON_FAILURE_FLAG = 'gtest_break_on_failure'
+
+# The environment variable for enabling/disabling the throw-on-failure mode.
+THROW_ON_FAILURE_ENV_VAR = 'GTEST_THROW_ON_FAILURE'
+
+# The environment variable for enabling/disabling the catch-exceptions mode.
+CATCH_EXCEPTIONS_ENV_VAR = 'GTEST_CATCH_EXCEPTIONS'
+
+# Path to the gtest_break_on_failure_unittest_ program.
+EXE_PATH = gtest_test_utils.GetTestExecutablePath(
+ 'gtest_break_on_failure_unittest_')
+
+
+# Utilities.
+
+
+environ = os.environ.copy()
+
+
+def SetEnvVar(env_var, value):
+ """Sets an environment variable to a given value; unsets it when the
+ given value is None.
+ """
+
+ if value is not None:
+ environ[env_var] = value
+ elif env_var in environ:
+ del environ[env_var]
+
+
+def Run(command):
+ """Runs a command; returns 1 if it was killed by a signal, or 0 otherwise."""
+
+ p = gtest_test_utils.Subprocess(command, env=environ)
+ if p.terminated_by_signal:
+ return 1
+ else:
+ return 0
+
+
+# The tests.
+
+
+class GTestBreakOnFailureUnitTest(gtest_test_utils.TestCase):
+ """Tests using the GTEST_BREAK_ON_FAILURE environment variable or
+ the --gtest_break_on_failure flag to turn assertion failures into
+ segmentation faults.
+ """
+
+ def RunAndVerify(self, env_var_value, flag_value, expect_seg_fault):
+ """Runs gtest_break_on_failure_unittest_ and verifies that it does
+ (or does not) have a seg-fault.
+
+ Args:
+ env_var_value: value of the GTEST_BREAK_ON_FAILURE environment
+ variable; None if the variable should be unset.
+ flag_value: value of the --gtest_break_on_failure flag;
+ None if the flag should not be present.
+ expect_seg_fault: 1 if the program is expected to generate a seg-fault;
+ 0 otherwise.
+ """
+
+ SetEnvVar(BREAK_ON_FAILURE_ENV_VAR, env_var_value)
+
+ if env_var_value is None:
+ env_var_value_msg = ' is not set'
+ else:
+ env_var_value_msg = '=' + env_var_value
+
+ if flag_value is None:
+ flag = ''
+ elif flag_value == '0':
+ flag = '--%s=0' % BREAK_ON_FAILURE_FLAG
+ else:
+ flag = '--%s' % BREAK_ON_FAILURE_FLAG
+
+ command = [EXE_PATH]
+ if flag:
+ command.append(flag)
+
+ if expect_seg_fault:
+ should_or_not = 'should'
+ else:
+ should_or_not = 'should not'
+
+ has_seg_fault = Run(command)
+
+ SetEnvVar(BREAK_ON_FAILURE_ENV_VAR, None)
+
+ msg = ('when %s%s, an assertion failure in "%s" %s cause a seg-fault.' %
+ (BREAK_ON_FAILURE_ENV_VAR, env_var_value_msg, ' '.join(command),
+ should_or_not))
+ self.assert_(has_seg_fault == expect_seg_fault, msg)
+
+ def testDefaultBehavior(self):
+ """Tests the behavior of the default mode."""
+
+ self.RunAndVerify(env_var_value=None,
+ flag_value=None,
+ expect_seg_fault=0)
+
+ def testEnvVar(self):
+ """Tests using the GTEST_BREAK_ON_FAILURE environment variable."""
+
+ self.RunAndVerify(env_var_value='0',
+ flag_value=None,
+ expect_seg_fault=0)
+ self.RunAndVerify(env_var_value='1',
+ flag_value=None,
+ expect_seg_fault=1)
+
+ def testFlag(self):
+ """Tests using the --gtest_break_on_failure flag."""
+
+ self.RunAndVerify(env_var_value=None,
+ flag_value='0',
+ expect_seg_fault=0)
+ self.RunAndVerify(env_var_value=None,
+ flag_value='1',
+ expect_seg_fault=1)
+
+ def testFlagOverridesEnvVar(self):
+ """Tests that the flag overrides the environment variable."""
+
+ self.RunAndVerify(env_var_value='0',
+ flag_value='0',
+ expect_seg_fault=0)
+ self.RunAndVerify(env_var_value='0',
+ flag_value='1',
+ expect_seg_fault=1)
+ self.RunAndVerify(env_var_value='1',
+ flag_value='0',
+ expect_seg_fault=0)
+ self.RunAndVerify(env_var_value='1',
+ flag_value='1',
+ expect_seg_fault=1)
+
+ def testBreakOnFailureOverridesThrowOnFailure(self):
+ """Tests that gtest_break_on_failure overrides gtest_throw_on_failure."""
+
+ SetEnvVar(THROW_ON_FAILURE_ENV_VAR, '1')
+ try:
+ self.RunAndVerify(env_var_value=None,
+ flag_value='1',
+ expect_seg_fault=1)
+ finally:
+ SetEnvVar(THROW_ON_FAILURE_ENV_VAR, None)
+
+ if IS_WINDOWS:
+ def testCatchExceptionsDoesNotInterfere(self):
+ """Tests that gtest_catch_exceptions doesn't interfere."""
+
+ SetEnvVar(CATCH_EXCEPTIONS_ENV_VAR, '1')
+ try:
+ self.RunAndVerify(env_var_value='1',
+ flag_value='1',
+ expect_seg_fault=1)
+ finally:
+ SetEnvVar(CATCH_EXCEPTIONS_ENV_VAR, None)
+
+
+if __name__ == '__main__':
+ gtest_test_utils.Main()
diff --git a/Source/ThirdParty/gtest/test/gtest_break_on_failure_unittest_.cc b/Source/ThirdParty/gtest/test/gtest_break_on_failure_unittest_.cc
new file mode 100644
index 000000000..d28d1d3da
--- /dev/null
+++ b/Source/ThirdParty/gtest/test/gtest_break_on_failure_unittest_.cc
@@ -0,0 +1,86 @@
+// Copyright 2006, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: wan@google.com (Zhanyong Wan)
+
+// Unit test for Google Test's break-on-failure mode.
+//
+// A user can ask Google Test to seg-fault when an assertion fails, using
+// either the GTEST_BREAK_ON_FAILURE environment variable or the
+// --gtest_break_on_failure flag. This file is used for testing such
+// functionality.
+//
+// This program will be invoked from a Python unit test. It is
+// expected to fail. Don't run it directly.
+
+#include <gtest/gtest.h>
+
+#if GTEST_OS_WINDOWS
+#include <windows.h>
+#include <stdlib.h>
+#endif
+
+namespace {
+
+// A test that's expected to fail.
+TEST(Foo, Bar) {
+ EXPECT_EQ(2, 3);
+}
+
+#if GTEST_HAS_SEH && !GTEST_OS_WINDOWS_MOBILE
+// On Windows Mobile global exception handlers are not supported.
+LONG WINAPI ExitWithExceptionCode(
+ struct _EXCEPTION_POINTERS* exception_pointers) {
+ exit(exception_pointers->ExceptionRecord->ExceptionCode);
+}
+#endif
+
+} // namespace
+
+int main(int argc, char **argv) {
+#if GTEST_OS_WINDOWS
+ // Suppresses display of the Windows error dialog upon encountering
+ // a general protection fault (segment violation).
+ SetErrorMode(SEM_NOGPFAULTERRORBOX | SEM_FAILCRITICALERRORS);
+
+#if !GTEST_OS_WINDOWS_MOBILE
+ // The default unhandled exception filter does not always exit
+ // with the exception code as exit code - for example it exits with
+ // 0 for EXCEPTION_ACCESS_VIOLATION and 1 for EXCEPTION_BREAKPOINT
+ // if the application is compiled in debug mode. Thus we use our own
+ // filter which always exits with the exception code for unhandled
+ // exceptions.
+ SetUnhandledExceptionFilter(ExitWithExceptionCode);
+#endif
+#endif
+
+ testing::InitGoogleTest(&argc, argv);
+
+ return RUN_ALL_TESTS();
+}
diff --git a/Source/ThirdParty/gtest/test/gtest_color_test.py b/Source/ThirdParty/gtest/test/gtest_color_test.py
new file mode 100755
index 000000000..d02a53ed8
--- /dev/null
+++ b/Source/ThirdParty/gtest/test/gtest_color_test.py
@@ -0,0 +1,130 @@
+#!/usr/bin/env python
+#
+# Copyright 2008, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""Verifies that Google Test correctly determines whether to use colors."""
+
+__author__ = 'wan@google.com (Zhanyong Wan)'
+
+import os
+import gtest_test_utils
+
+
+IS_WINDOWS = os.name = 'nt'
+
+COLOR_ENV_VAR = 'GTEST_COLOR'
+COLOR_FLAG = 'gtest_color'
+COMMAND = gtest_test_utils.GetTestExecutablePath('gtest_color_test_')
+
+
+def SetEnvVar(env_var, value):
+ """Sets the env variable to 'value'; unsets it when 'value' is None."""
+
+ if value is not None:
+ os.environ[env_var] = value
+ elif env_var in os.environ:
+ del os.environ[env_var]
+
+
+def UsesColor(term, color_env_var, color_flag):
+ """Runs gtest_color_test_ and returns its exit code."""
+
+ SetEnvVar('TERM', term)
+ SetEnvVar(COLOR_ENV_VAR, color_env_var)
+
+ if color_flag is None:
+ args = []
+ else:
+ args = ['--%s=%s' % (COLOR_FLAG, color_flag)]
+ p = gtest_test_utils.Subprocess([COMMAND] + args)
+ return not p.exited or p.exit_code
+
+
+class GTestColorTest(gtest_test_utils.TestCase):
+ def testNoEnvVarNoFlag(self):
+ """Tests the case when there's neither GTEST_COLOR nor --gtest_color."""
+
+ if not IS_WINDOWS:
+ self.assert_(not UsesColor('dumb', None, None))
+ self.assert_(not UsesColor('emacs', None, None))
+ self.assert_(not UsesColor('xterm-mono', None, None))
+ self.assert_(not UsesColor('unknown', None, None))
+ self.assert_(not UsesColor(None, None, None))
+ self.assert_(UsesColor('linux', None, None))
+ self.assert_(UsesColor('cygwin', None, None))
+ self.assert_(UsesColor('xterm', None, None))
+ self.assert_(UsesColor('xterm-color', None, None))
+ self.assert_(UsesColor('xterm-256color', None, None))
+
+ def testFlagOnly(self):
+ """Tests the case when there's --gtest_color but not GTEST_COLOR."""
+
+ self.assert_(not UsesColor('dumb', None, 'no'))
+ self.assert_(not UsesColor('xterm-color', None, 'no'))
+ if not IS_WINDOWS:
+ self.assert_(not UsesColor('emacs', None, 'auto'))
+ self.assert_(UsesColor('xterm', None, 'auto'))
+ self.assert_(UsesColor('dumb', None, 'yes'))
+ self.assert_(UsesColor('xterm', None, 'yes'))
+
+ def testEnvVarOnly(self):
+ """Tests the case when there's GTEST_COLOR but not --gtest_color."""
+
+ self.assert_(not UsesColor('dumb', 'no', None))
+ self.assert_(not UsesColor('xterm-color', 'no', None))
+ if not IS_WINDOWS:
+ self.assert_(not UsesColor('dumb', 'auto', None))
+ self.assert_(UsesColor('xterm-color', 'auto', None))
+ self.assert_(UsesColor('dumb', 'yes', None))
+ self.assert_(UsesColor('xterm-color', 'yes', None))
+
+ def testEnvVarAndFlag(self):
+ """Tests the case when there are both GTEST_COLOR and --gtest_color."""
+
+ self.assert_(not UsesColor('xterm-color', 'no', 'no'))
+ self.assert_(UsesColor('dumb', 'no', 'yes'))
+ self.assert_(UsesColor('xterm-color', 'no', 'auto'))
+
+ def testAliasesOfYesAndNo(self):
+ """Tests using aliases in specifying --gtest_color."""
+
+ self.assert_(UsesColor('dumb', None, 'true'))
+ self.assert_(UsesColor('dumb', None, 'YES'))
+ self.assert_(UsesColor('dumb', None, 'T'))
+ self.assert_(UsesColor('dumb', None, '1'))
+
+ self.assert_(not UsesColor('xterm', None, 'f'))
+ self.assert_(not UsesColor('xterm', None, 'false'))
+ self.assert_(not UsesColor('xterm', None, '0'))
+ self.assert_(not UsesColor('xterm', None, 'unknown'))
+
+
+if __name__ == '__main__':
+ gtest_test_utils.Main()
diff --git a/Source/ThirdParty/gtest/test/gtest_color_test_.cc b/Source/ThirdParty/gtest/test/gtest_color_test_.cc
new file mode 100644
index 000000000..58d377c9b
--- /dev/null
+++ b/Source/ThirdParty/gtest/test/gtest_color_test_.cc
@@ -0,0 +1,71 @@
+// Copyright 2008, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: wan@google.com (Zhanyong Wan)
+
+// A helper program for testing how Google Test determines whether to use
+// colors in the output. It prints "YES" and returns 1 if Google Test
+// decides to use colors, and prints "NO" and returns 0 otherwise.
+
+#include <stdio.h>
+
+#include <gtest/gtest.h>
+
+// Indicates that this translation unit is part of Google Test's
+// implementation. It must come before gtest-internal-inl.h is
+// included, or there will be a compiler error. This trick is to
+// prevent a user from accidentally including gtest-internal-inl.h in
+// his code.
+#define GTEST_IMPLEMENTATION_ 1
+#include "src/gtest-internal-inl.h"
+#undef GTEST_IMPLEMENTATION_
+
+using testing::internal::ShouldUseColor;
+
+// The purpose of this is to ensure that the UnitTest singleton is
+// created before main() is entered, and thus that ShouldUseColor()
+// works the same way as in a real Google-Test-based test. We don't actual
+// run the TEST itself.
+TEST(GTestColorTest, Dummy) {
+}
+
+int main(int argc, char** argv) {
+ testing::InitGoogleTest(&argc, argv);
+
+ if (ShouldUseColor(true)) {
+ // Google Test decides to use colors in the output (assuming it
+ // goes to a TTY).
+ printf("YES\n");
+ return 1;
+ } else {
+ // Google Test decides not to use colors in the output.
+ printf("NO\n");
+ return 0;
+ }
+}
diff --git a/Source/ThirdParty/gtest/test/gtest_env_var_test.py b/Source/ThirdParty/gtest/test/gtest_env_var_test.py
new file mode 100755
index 000000000..bcc0bfd55
--- /dev/null
+++ b/Source/ThirdParty/gtest/test/gtest_env_var_test.py
@@ -0,0 +1,105 @@
+#!/usr/bin/env python
+#
+# Copyright 2008, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""Verifies that Google Test correctly parses environment variables."""
+
+__author__ = 'wan@google.com (Zhanyong Wan)'
+
+import os
+import gtest_test_utils
+
+
+IS_WINDOWS = os.name == 'nt'
+IS_LINUX = os.name == 'posix' and os.uname()[0] == 'Linux'
+
+COMMAND = gtest_test_utils.GetTestExecutablePath('gtest_env_var_test_')
+
+environ = os.environ.copy()
+
+
+def AssertEq(expected, actual):
+ if expected != actual:
+ print 'Expected: %s' % (expected,)
+ print ' Actual: %s' % (actual,)
+ raise AssertionError
+
+
+def SetEnvVar(env_var, value):
+ """Sets the env variable to 'value'; unsets it when 'value' is None."""
+
+ if value is not None:
+ environ[env_var] = value
+ elif env_var in environ:
+ del environ[env_var]
+
+
+def GetFlag(flag):
+ """Runs gtest_env_var_test_ and returns its output."""
+
+ args = [COMMAND]
+ if flag is not None:
+ args += [flag]
+ return gtest_test_utils.Subprocess(args, env=environ).output
+
+
+def TestFlag(flag, test_val, default_val):
+ """Verifies that the given flag is affected by the corresponding env var."""
+
+ env_var = 'GTEST_' + flag.upper()
+ SetEnvVar(env_var, test_val)
+ AssertEq(test_val, GetFlag(flag))
+ SetEnvVar(env_var, None)
+ AssertEq(default_val, GetFlag(flag))
+
+
+class GTestEnvVarTest(gtest_test_utils.TestCase):
+ def testEnvVarAffectsFlag(self):
+ """Tests that environment variable should affect the corresponding flag."""
+
+ TestFlag('break_on_failure', '1', '0')
+ TestFlag('color', 'yes', 'auto')
+ TestFlag('filter', 'FooTest.Bar', '*')
+ TestFlag('output', 'xml:tmp/foo.xml', '')
+ TestFlag('print_time', '0', '1')
+ TestFlag('repeat', '999', '1')
+ TestFlag('throw_on_failure', '1', '0')
+ TestFlag('death_test_style', 'threadsafe', 'fast')
+
+ if IS_WINDOWS:
+ TestFlag('catch_exceptions', '1', '0')
+
+ if IS_LINUX:
+ TestFlag('death_test_use_fork', '1', '0')
+ TestFlag('stack_trace_depth', '0', '100')
+
+
+if __name__ == '__main__':
+ gtest_test_utils.Main()
diff --git a/Source/ThirdParty/gtest/test/gtest_env_var_test_.cc b/Source/ThirdParty/gtest/test/gtest_env_var_test_.cc
new file mode 100644
index 000000000..f7c78fcf3
--- /dev/null
+++ b/Source/ThirdParty/gtest/test/gtest_env_var_test_.cc
@@ -0,0 +1,126 @@
+// Copyright 2008, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: wan@google.com (Zhanyong Wan)
+
+// A helper program for testing that Google Test parses the environment
+// variables correctly.
+
+#include <gtest/gtest.h>
+
+#include <iostream>
+
+#define GTEST_IMPLEMENTATION_ 1
+#include "src/gtest-internal-inl.h"
+#undef GTEST_IMPLEMENTATION_
+
+using ::std::cout;
+
+namespace testing {
+
+// The purpose of this is to make the test more realistic by ensuring
+// that the UnitTest singleton is created before main() is entered.
+// We don't actual run the TEST itself.
+TEST(GTestEnvVarTest, Dummy) {
+}
+
+void PrintFlag(const char* flag) {
+ if (strcmp(flag, "break_on_failure") == 0) {
+ cout << GTEST_FLAG(break_on_failure);
+ return;
+ }
+
+ if (strcmp(flag, "catch_exceptions") == 0) {
+ cout << GTEST_FLAG(catch_exceptions);
+ return;
+ }
+
+ if (strcmp(flag, "color") == 0) {
+ cout << GTEST_FLAG(color);
+ return;
+ }
+
+ if (strcmp(flag, "death_test_style") == 0) {
+ cout << GTEST_FLAG(death_test_style);
+ return;
+ }
+
+ if (strcmp(flag, "death_test_use_fork") == 0) {
+ cout << GTEST_FLAG(death_test_use_fork);
+ return;
+ }
+
+ if (strcmp(flag, "filter") == 0) {
+ cout << GTEST_FLAG(filter);
+ return;
+ }
+
+ if (strcmp(flag, "output") == 0) {
+ cout << GTEST_FLAG(output);
+ return;
+ }
+
+ if (strcmp(flag, "print_time") == 0) {
+ cout << GTEST_FLAG(print_time);
+ return;
+ }
+
+ if (strcmp(flag, "repeat") == 0) {
+ cout << GTEST_FLAG(repeat);
+ return;
+ }
+
+ if (strcmp(flag, "stack_trace_depth") == 0) {
+ cout << GTEST_FLAG(stack_trace_depth);
+ return;
+ }
+
+ if (strcmp(flag, "throw_on_failure") == 0) {
+ cout << GTEST_FLAG(throw_on_failure);
+ return;
+ }
+
+ cout << "Invalid flag name " << flag
+ << ". Valid names are break_on_failure, color, filter, etc.\n";
+ exit(1);
+}
+
+} // namespace testing
+
+int main(int argc, char** argv) {
+ testing::InitGoogleTest(&argc, argv);
+
+ if (argc != 2) {
+ cout << "Usage: gtest_env_var_test_ NAME_OF_FLAG\n";
+ return 1;
+ }
+
+ testing::PrintFlag(argv[1]);
+ return 0;
+}
diff --git a/Source/ThirdParty/gtest/test/gtest_environment_test.cc b/Source/ThirdParty/gtest/test/gtest_environment_test.cc
new file mode 100644
index 000000000..c9392614d
--- /dev/null
+++ b/Source/ThirdParty/gtest/test/gtest_environment_test.cc
@@ -0,0 +1,186 @@
+// Copyright 2007, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: wan@google.com (Zhanyong Wan)
+//
+// Tests using global test environments.
+
+#include <stdlib.h>
+#include <stdio.h>
+#include <gtest/gtest.h>
+
+namespace testing {
+GTEST_DECLARE_string_(filter);
+}
+
+namespace {
+
+enum FailureType {
+ NO_FAILURE, NON_FATAL_FAILURE, FATAL_FAILURE
+};
+
+// For testing using global test environments.
+class MyEnvironment : public testing::Environment {
+ public:
+ MyEnvironment() { Reset(); }
+
+ // Depending on the value of failure_in_set_up_, SetUp() will
+ // generate a non-fatal failure, generate a fatal failure, or
+ // succeed.
+ virtual void SetUp() {
+ set_up_was_run_ = true;
+
+ switch (failure_in_set_up_) {
+ case NON_FATAL_FAILURE:
+ ADD_FAILURE() << "Expected non-fatal failure in global set-up.";
+ break;
+ case FATAL_FAILURE:
+ FAIL() << "Expected fatal failure in global set-up.";
+ break;
+ default:
+ break;
+ }
+ }
+
+ // Generates a non-fatal failure.
+ virtual void TearDown() {
+ tear_down_was_run_ = true;
+ ADD_FAILURE() << "Expected non-fatal failure in global tear-down.";
+ }
+
+ // Resets the state of the environment s.t. it can be reused.
+ void Reset() {
+ failure_in_set_up_ = NO_FAILURE;
+ set_up_was_run_ = false;
+ tear_down_was_run_ = false;
+ }
+
+ // We call this function to set the type of failure SetUp() should
+ // generate.
+ void set_failure_in_set_up(FailureType type) {
+ failure_in_set_up_ = type;
+ }
+
+ // Was SetUp() run?
+ bool set_up_was_run() const { return set_up_was_run_; }
+
+ // Was TearDown() run?
+ bool tear_down_was_run() const { return tear_down_was_run_; }
+ private:
+ FailureType failure_in_set_up_;
+ bool set_up_was_run_;
+ bool tear_down_was_run_;
+};
+
+// Was the TEST run?
+bool test_was_run;
+
+// The sole purpose of this TEST is to enable us to check whether it
+// was run.
+TEST(FooTest, Bar) {
+ test_was_run = true;
+}
+
+// Prints the message and aborts the program if condition is false.
+void Check(bool condition, const char* msg) {
+ if (!condition) {
+ printf("FAILED: %s\n", msg);
+ abort();
+ }
+}
+
+// Runs the tests. Return true iff successful.
+//
+// The 'failure' parameter specifies the type of failure that should
+// be generated by the global set-up.
+int RunAllTests(MyEnvironment* env, FailureType failure) {
+ env->Reset();
+ env->set_failure_in_set_up(failure);
+ test_was_run = false;
+ return RUN_ALL_TESTS();
+}
+
+} // namespace
+
+int main(int argc, char **argv) {
+ testing::InitGoogleTest(&argc, argv);
+
+ // Registers a global test environment, and verifies that the
+ // registration function returns its argument.
+ MyEnvironment* const env = new MyEnvironment;
+ Check(testing::AddGlobalTestEnvironment(env) == env,
+ "AddGlobalTestEnvironment() should return its argument.");
+
+ // Verifies that RUN_ALL_TESTS() runs the tests when the global
+ // set-up is successful.
+ Check(RunAllTests(env, NO_FAILURE) != 0,
+ "RUN_ALL_TESTS() should return non-zero, as the global tear-down "
+ "should generate a failure.");
+ Check(test_was_run,
+ "The tests should run, as the global set-up should generate no "
+ "failure");
+ Check(env->tear_down_was_run(),
+ "The global tear-down should run, as the global set-up was run.");
+
+ // Verifies that RUN_ALL_TESTS() runs the tests when the global
+ // set-up generates no fatal failure.
+ Check(RunAllTests(env, NON_FATAL_FAILURE) != 0,
+ "RUN_ALL_TESTS() should return non-zero, as both the global set-up "
+ "and the global tear-down should generate a non-fatal failure.");
+ Check(test_was_run,
+ "The tests should run, as the global set-up should generate no "
+ "fatal failure.");
+ Check(env->tear_down_was_run(),
+ "The global tear-down should run, as the global set-up was run.");
+
+ // Verifies that RUN_ALL_TESTS() runs no test when the global set-up
+ // generates a fatal failure.
+ Check(RunAllTests(env, FATAL_FAILURE) != 0,
+ "RUN_ALL_TESTS() should return non-zero, as the global set-up "
+ "should generate a fatal failure.");
+ Check(!test_was_run,
+ "The tests should not run, as the global set-up should generate "
+ "a fatal failure.");
+ Check(env->tear_down_was_run(),
+ "The global tear-down should run, as the global set-up was run.");
+
+ // Verifies that RUN_ALL_TESTS() doesn't do global set-up or
+ // tear-down when there is no test to run.
+ testing::GTEST_FLAG(filter) = "-*";
+ Check(RunAllTests(env, NO_FAILURE) == 0,
+ "RUN_ALL_TESTS() should return zero, as there is no test to run.");
+ Check(!env->set_up_was_run(),
+ "The global set-up should not run, as there is no test to run.");
+ Check(!env->tear_down_was_run(),
+ "The global tear-down should not run, "
+ "as the global set-up was not run.");
+
+ printf("PASS\n");
+ return 0;
+}
diff --git a/Source/ThirdParty/gtest/test/gtest_filter_unittest.py b/Source/ThirdParty/gtest/test/gtest_filter_unittest.py
new file mode 100755
index 000000000..0d1a77005
--- /dev/null
+++ b/Source/ThirdParty/gtest/test/gtest_filter_unittest.py
@@ -0,0 +1,633 @@
+#!/usr/bin/env python
+#
+# Copyright 2005 Google Inc. All Rights Reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""Unit test for Google Test test filters.
+
+A user can specify which test(s) in a Google Test program to run via either
+the GTEST_FILTER environment variable or the --gtest_filter flag.
+This script tests such functionality by invoking
+gtest_filter_unittest_ (a program written with Google Test) with different
+environments and command line flags.
+
+Note that test sharding may also influence which tests are filtered. Therefore,
+we test that here also.
+"""
+
+__author__ = 'wan@google.com (Zhanyong Wan)'
+
+import os
+import re
+import sets
+import sys
+
+import gtest_test_utils
+
+# Constants.
+
+# Checks if this platform can pass empty environment variables to child
+# processes. We set an env variable to an empty string and invoke a python
+# script in a subprocess to print whether the variable is STILL in
+# os.environ. We then use 'eval' to parse the child's output so that an
+# exception is thrown if the input is anything other than 'True' nor 'False'.
+os.environ['EMPTY_VAR'] = ''
+child = gtest_test_utils.Subprocess(
+ [sys.executable, '-c', 'import os; print \'EMPTY_VAR\' in os.environ'])
+CAN_PASS_EMPTY_ENV = eval(child.output)
+
+
+# Check if this platform can unset environment variables in child processes.
+# We set an env variable to a non-empty string, unset it, and invoke
+# a python script in a subprocess to print whether the variable
+# is NO LONGER in os.environ.
+# We use 'eval' to parse the child's output so that an exception
+# is thrown if the input is neither 'True' nor 'False'.
+os.environ['UNSET_VAR'] = 'X'
+del os.environ['UNSET_VAR']
+child = gtest_test_utils.Subprocess(
+ [sys.executable, '-c', 'import os; print \'UNSET_VAR\' not in os.environ'])
+CAN_UNSET_ENV = eval(child.output)
+
+
+# Checks if we should test with an empty filter. This doesn't
+# make sense on platforms that cannot pass empty env variables (Win32)
+# and on platforms that cannot unset variables (since we cannot tell
+# the difference between "" and NULL -- Borland and Solaris < 5.10)
+CAN_TEST_EMPTY_FILTER = (CAN_PASS_EMPTY_ENV and CAN_UNSET_ENV)
+
+
+# The environment variable for specifying the test filters.
+FILTER_ENV_VAR = 'GTEST_FILTER'
+
+# The environment variables for test sharding.
+TOTAL_SHARDS_ENV_VAR = 'GTEST_TOTAL_SHARDS'
+SHARD_INDEX_ENV_VAR = 'GTEST_SHARD_INDEX'
+SHARD_STATUS_FILE_ENV_VAR = 'GTEST_SHARD_STATUS_FILE'
+
+# The command line flag for specifying the test filters.
+FILTER_FLAG = 'gtest_filter'
+
+# The command line flag for including disabled tests.
+ALSO_RUN_DISABED_TESTS_FLAG = 'gtest_also_run_disabled_tests'
+
+# Command to run the gtest_filter_unittest_ program.
+COMMAND = gtest_test_utils.GetTestExecutablePath('gtest_filter_unittest_')
+
+# Regex for determining whether parameterized tests are enabled in the binary.
+PARAM_TEST_REGEX = re.compile(r'/ParamTest')
+
+# Regex for parsing test case names from Google Test's output.
+TEST_CASE_REGEX = re.compile(r'^\[\-+\] \d+ tests? from (\w+(/\w+)?)')
+
+# Regex for parsing test names from Google Test's output.
+TEST_REGEX = re.compile(r'^\[\s*RUN\s*\].*\.(\w+(/\w+)?)')
+
+# The command line flag to tell Google Test to output the list of tests it
+# will run.
+LIST_TESTS_FLAG = '--gtest_list_tests'
+
+# Indicates whether Google Test supports death tests.
+SUPPORTS_DEATH_TESTS = 'HasDeathTest' in gtest_test_utils.Subprocess(
+ [COMMAND, LIST_TESTS_FLAG]).output
+
+# Full names of all tests in gtest_filter_unittests_.
+PARAM_TESTS = [
+ 'SeqP/ParamTest.TestX/0',
+ 'SeqP/ParamTest.TestX/1',
+ 'SeqP/ParamTest.TestY/0',
+ 'SeqP/ParamTest.TestY/1',
+ 'SeqQ/ParamTest.TestX/0',
+ 'SeqQ/ParamTest.TestX/1',
+ 'SeqQ/ParamTest.TestY/0',
+ 'SeqQ/ParamTest.TestY/1',
+ ]
+
+DISABLED_TESTS = [
+ 'BarTest.DISABLED_TestFour',
+ 'BarTest.DISABLED_TestFive',
+ 'BazTest.DISABLED_TestC',
+ 'DISABLED_FoobarTest.Test1',
+ 'DISABLED_FoobarTest.DISABLED_Test2',
+ 'DISABLED_FoobarbazTest.TestA',
+ ]
+
+if SUPPORTS_DEATH_TESTS:
+ DEATH_TESTS = [
+ 'HasDeathTest.Test1',
+ 'HasDeathTest.Test2',
+ ]
+else:
+ DEATH_TESTS = []
+
+# All the non-disabled tests.
+ACTIVE_TESTS = [
+ 'FooTest.Abc',
+ 'FooTest.Xyz',
+
+ 'BarTest.TestOne',
+ 'BarTest.TestTwo',
+ 'BarTest.TestThree',
+
+ 'BazTest.TestOne',
+ 'BazTest.TestA',
+ 'BazTest.TestB',
+ ] + DEATH_TESTS + PARAM_TESTS
+
+param_tests_present = None
+
+# Utilities.
+
+environ = os.environ.copy()
+
+
+def SetEnvVar(env_var, value):
+ """Sets the env variable to 'value'; unsets it when 'value' is None."""
+
+ if value is not None:
+ environ[env_var] = value
+ elif env_var in environ:
+ del environ[env_var]
+
+
+def RunAndReturnOutput(args = None):
+ """Runs the test program and returns its output."""
+
+ return gtest_test_utils.Subprocess([COMMAND] + (args or []),
+ env=environ).output
+
+
+def RunAndExtractTestList(args = None):
+ """Runs the test program and returns its exit code and a list of tests run."""
+
+ p = gtest_test_utils.Subprocess([COMMAND] + (args or []), env=environ)
+ tests_run = []
+ test_case = ''
+ test = ''
+ for line in p.output.split('\n'):
+ match = TEST_CASE_REGEX.match(line)
+ if match is not None:
+ test_case = match.group(1)
+ else:
+ match = TEST_REGEX.match(line)
+ if match is not None:
+ test = match.group(1)
+ tests_run.append(test_case + '.' + test)
+ return (tests_run, p.exit_code)
+
+
+def InvokeWithModifiedEnv(extra_env, function, *args, **kwargs):
+ """Runs the given function and arguments in a modified environment."""
+ try:
+ original_env = environ.copy()
+ environ.update(extra_env)
+ return function(*args, **kwargs)
+ finally:
+ environ.clear()
+ environ.update(original_env)
+
+
+def RunWithSharding(total_shards, shard_index, command):
+ """Runs a test program shard and returns exit code and a list of tests run."""
+
+ extra_env = {SHARD_INDEX_ENV_VAR: str(shard_index),
+ TOTAL_SHARDS_ENV_VAR: str(total_shards)}
+ return InvokeWithModifiedEnv(extra_env, RunAndExtractTestList, command)
+
+# The unit test.
+
+
+class GTestFilterUnitTest(gtest_test_utils.TestCase):
+ """Tests the env variable or the command line flag to filter tests."""
+
+ # Utilities.
+
+ def AssertSetEqual(self, lhs, rhs):
+ """Asserts that two sets are equal."""
+
+ for elem in lhs:
+ self.assert_(elem in rhs, '%s in %s' % (elem, rhs))
+
+ for elem in rhs:
+ self.assert_(elem in lhs, '%s in %s' % (elem, lhs))
+
+ def AssertPartitionIsValid(self, set_var, list_of_sets):
+ """Asserts that list_of_sets is a valid partition of set_var."""
+
+ full_partition = []
+ for slice_var in list_of_sets:
+ full_partition.extend(slice_var)
+ self.assertEqual(len(set_var), len(full_partition))
+ self.assertEqual(sets.Set(set_var), sets.Set(full_partition))
+
+ def AdjustForParameterizedTests(self, tests_to_run):
+ """Adjust tests_to_run in case value parameterized tests are disabled."""
+
+ global param_tests_present
+ if not param_tests_present:
+ return list(sets.Set(tests_to_run) - sets.Set(PARAM_TESTS))
+ else:
+ return tests_to_run
+
+ def RunAndVerify(self, gtest_filter, tests_to_run):
+ """Checks that the binary runs correct set of tests for a given filter."""
+
+ tests_to_run = self.AdjustForParameterizedTests(tests_to_run)
+
+ # First, tests using the environment variable.
+
+ # Windows removes empty variables from the environment when passing it
+ # to a new process. This means it is impossible to pass an empty filter
+ # into a process using the environment variable. However, we can still
+ # test the case when the variable is not supplied (i.e., gtest_filter is
+ # None).
+ # pylint: disable-msg=C6403
+ if CAN_TEST_EMPTY_FILTER or gtest_filter != '':
+ SetEnvVar(FILTER_ENV_VAR, gtest_filter)
+ tests_run = RunAndExtractTestList()[0]
+ SetEnvVar(FILTER_ENV_VAR, None)
+ self.AssertSetEqual(tests_run, tests_to_run)
+ # pylint: enable-msg=C6403
+
+ # Next, tests using the command line flag.
+
+ if gtest_filter is None:
+ args = []
+ else:
+ args = ['--%s=%s' % (FILTER_FLAG, gtest_filter)]
+
+ tests_run = RunAndExtractTestList(args)[0]
+ self.AssertSetEqual(tests_run, tests_to_run)
+
+ def RunAndVerifyWithSharding(self, gtest_filter, total_shards, tests_to_run,
+ args=None, check_exit_0=False):
+ """Checks that binary runs correct tests for the given filter and shard.
+
+ Runs all shards of gtest_filter_unittest_ with the given filter, and
+ verifies that the right set of tests were run. The union of tests run
+ on each shard should be identical to tests_to_run, without duplicates.
+
+ Args:
+ gtest_filter: A filter to apply to the tests.
+ total_shards: A total number of shards to split test run into.
+ tests_to_run: A set of tests expected to run.
+ args : Arguments to pass to the to the test binary.
+ check_exit_0: When set to a true value, make sure that all shards
+ return 0.
+ """
+
+ tests_to_run = self.AdjustForParameterizedTests(tests_to_run)
+
+ # Windows removes empty variables from the environment when passing it
+ # to a new process. This means it is impossible to pass an empty filter
+ # into a process using the environment variable. However, we can still
+ # test the case when the variable is not supplied (i.e., gtest_filter is
+ # None).
+ # pylint: disable-msg=C6403
+ if CAN_TEST_EMPTY_FILTER or gtest_filter != '':
+ SetEnvVar(FILTER_ENV_VAR, gtest_filter)
+ partition = []
+ for i in range(0, total_shards):
+ (tests_run, exit_code) = RunWithSharding(total_shards, i, args)
+ if check_exit_0:
+ self.assertEqual(0, exit_code)
+ partition.append(tests_run)
+
+ self.AssertPartitionIsValid(tests_to_run, partition)
+ SetEnvVar(FILTER_ENV_VAR, None)
+ # pylint: enable-msg=C6403
+
+ def RunAndVerifyAllowingDisabled(self, gtest_filter, tests_to_run):
+ """Checks that the binary runs correct set of tests for the given filter.
+
+ Runs gtest_filter_unittest_ with the given filter, and enables
+ disabled tests. Verifies that the right set of tests were run.
+
+ Args:
+ gtest_filter: A filter to apply to the tests.
+ tests_to_run: A set of tests expected to run.
+ """
+
+ tests_to_run = self.AdjustForParameterizedTests(tests_to_run)
+
+ # Construct the command line.
+ args = ['--%s' % ALSO_RUN_DISABED_TESTS_FLAG]
+ if gtest_filter is not None:
+ args.append('--%s=%s' % (FILTER_FLAG, gtest_filter))
+
+ tests_run = RunAndExtractTestList(args)[0]
+ self.AssertSetEqual(tests_run, tests_to_run)
+
+ def setUp(self):
+ """Sets up test case.
+
+ Determines whether value-parameterized tests are enabled in the binary and
+ sets the flags accordingly.
+ """
+
+ global param_tests_present
+ if param_tests_present is None:
+ param_tests_present = PARAM_TEST_REGEX.search(
+ RunAndReturnOutput()) is not None
+
+ def testDefaultBehavior(self):
+ """Tests the behavior of not specifying the filter."""
+
+ self.RunAndVerify(None, ACTIVE_TESTS)
+
+ def testDefaultBehaviorWithShards(self):
+ """Tests the behavior without the filter, with sharding enabled."""
+
+ self.RunAndVerifyWithSharding(None, 1, ACTIVE_TESTS)
+ self.RunAndVerifyWithSharding(None, 2, ACTIVE_TESTS)
+ self.RunAndVerifyWithSharding(None, len(ACTIVE_TESTS) - 1, ACTIVE_TESTS)
+ self.RunAndVerifyWithSharding(None, len(ACTIVE_TESTS), ACTIVE_TESTS)
+ self.RunAndVerifyWithSharding(None, len(ACTIVE_TESTS) + 1, ACTIVE_TESTS)
+
+ def testEmptyFilter(self):
+ """Tests an empty filter."""
+
+ self.RunAndVerify('', [])
+ self.RunAndVerifyWithSharding('', 1, [])
+ self.RunAndVerifyWithSharding('', 2, [])
+
+ def testBadFilter(self):
+ """Tests a filter that matches nothing."""
+
+ self.RunAndVerify('BadFilter', [])
+ self.RunAndVerifyAllowingDisabled('BadFilter', [])
+
+ def testFullName(self):
+ """Tests filtering by full name."""
+
+ self.RunAndVerify('FooTest.Xyz', ['FooTest.Xyz'])
+ self.RunAndVerifyAllowingDisabled('FooTest.Xyz', ['FooTest.Xyz'])
+ self.RunAndVerifyWithSharding('FooTest.Xyz', 5, ['FooTest.Xyz'])
+
+ def testUniversalFilters(self):
+ """Tests filters that match everything."""
+
+ self.RunAndVerify('*', ACTIVE_TESTS)
+ self.RunAndVerify('*.*', ACTIVE_TESTS)
+ self.RunAndVerifyWithSharding('*.*', len(ACTIVE_TESTS) - 3, ACTIVE_TESTS)
+ self.RunAndVerifyAllowingDisabled('*', ACTIVE_TESTS + DISABLED_TESTS)
+ self.RunAndVerifyAllowingDisabled('*.*', ACTIVE_TESTS + DISABLED_TESTS)
+
+ def testFilterByTestCase(self):
+ """Tests filtering by test case name."""
+
+ self.RunAndVerify('FooTest.*', ['FooTest.Abc', 'FooTest.Xyz'])
+
+ BAZ_TESTS = ['BazTest.TestOne', 'BazTest.TestA', 'BazTest.TestB']
+ self.RunAndVerify('BazTest.*', BAZ_TESTS)
+ self.RunAndVerifyAllowingDisabled('BazTest.*',
+ BAZ_TESTS + ['BazTest.DISABLED_TestC'])
+
+ def testFilterByTest(self):
+ """Tests filtering by test name."""
+
+ self.RunAndVerify('*.TestOne', ['BarTest.TestOne', 'BazTest.TestOne'])
+
+ def testFilterDisabledTests(self):
+ """Select only the disabled tests to run."""
+
+ self.RunAndVerify('DISABLED_FoobarTest.Test1', [])
+ self.RunAndVerifyAllowingDisabled('DISABLED_FoobarTest.Test1',
+ ['DISABLED_FoobarTest.Test1'])
+
+ self.RunAndVerify('*DISABLED_*', [])
+ self.RunAndVerifyAllowingDisabled('*DISABLED_*', DISABLED_TESTS)
+
+ self.RunAndVerify('*.DISABLED_*', [])
+ self.RunAndVerifyAllowingDisabled('*.DISABLED_*', [
+ 'BarTest.DISABLED_TestFour',
+ 'BarTest.DISABLED_TestFive',
+ 'BazTest.DISABLED_TestC',
+ 'DISABLED_FoobarTest.DISABLED_Test2',
+ ])
+
+ self.RunAndVerify('DISABLED_*', [])
+ self.RunAndVerifyAllowingDisabled('DISABLED_*', [
+ 'DISABLED_FoobarTest.Test1',
+ 'DISABLED_FoobarTest.DISABLED_Test2',
+ 'DISABLED_FoobarbazTest.TestA',
+ ])
+
+ def testWildcardInTestCaseName(self):
+ """Tests using wildcard in the test case name."""
+
+ self.RunAndVerify('*a*.*', [
+ 'BarTest.TestOne',
+ 'BarTest.TestTwo',
+ 'BarTest.TestThree',
+
+ 'BazTest.TestOne',
+ 'BazTest.TestA',
+ 'BazTest.TestB', ] + DEATH_TESTS + PARAM_TESTS)
+
+ def testWildcardInTestName(self):
+ """Tests using wildcard in the test name."""
+
+ self.RunAndVerify('*.*A*', ['FooTest.Abc', 'BazTest.TestA'])
+
+ def testFilterWithoutDot(self):
+ """Tests a filter that has no '.' in it."""
+
+ self.RunAndVerify('*z*', [
+ 'FooTest.Xyz',
+
+ 'BazTest.TestOne',
+ 'BazTest.TestA',
+ 'BazTest.TestB',
+ ])
+
+ def testTwoPatterns(self):
+ """Tests filters that consist of two patterns."""
+
+ self.RunAndVerify('Foo*.*:*A*', [
+ 'FooTest.Abc',
+ 'FooTest.Xyz',
+
+ 'BazTest.TestA',
+ ])
+
+ # An empty pattern + a non-empty one
+ self.RunAndVerify(':*A*', ['FooTest.Abc', 'BazTest.TestA'])
+
+ def testThreePatterns(self):
+ """Tests filters that consist of three patterns."""
+
+ self.RunAndVerify('*oo*:*A*:*One', [
+ 'FooTest.Abc',
+ 'FooTest.Xyz',
+
+ 'BarTest.TestOne',
+
+ 'BazTest.TestOne',
+ 'BazTest.TestA',
+ ])
+
+ # The 2nd pattern is empty.
+ self.RunAndVerify('*oo*::*One', [
+ 'FooTest.Abc',
+ 'FooTest.Xyz',
+
+ 'BarTest.TestOne',
+
+ 'BazTest.TestOne',
+ ])
+
+ # The last 2 patterns are empty.
+ self.RunAndVerify('*oo*::', [
+ 'FooTest.Abc',
+ 'FooTest.Xyz',
+ ])
+
+ def testNegativeFilters(self):
+ self.RunAndVerify('*-BazTest.TestOne', [
+ 'FooTest.Abc',
+ 'FooTest.Xyz',
+
+ 'BarTest.TestOne',
+ 'BarTest.TestTwo',
+ 'BarTest.TestThree',
+
+ 'BazTest.TestA',
+ 'BazTest.TestB',
+ ] + DEATH_TESTS + PARAM_TESTS)
+
+ self.RunAndVerify('*-FooTest.Abc:BazTest.*', [
+ 'FooTest.Xyz',
+
+ 'BarTest.TestOne',
+ 'BarTest.TestTwo',
+ 'BarTest.TestThree',
+ ] + DEATH_TESTS + PARAM_TESTS)
+
+ self.RunAndVerify('BarTest.*-BarTest.TestOne', [
+ 'BarTest.TestTwo',
+ 'BarTest.TestThree',
+ ])
+
+ # Tests without leading '*'.
+ self.RunAndVerify('-FooTest.Abc:FooTest.Xyz:BazTest.*', [
+ 'BarTest.TestOne',
+ 'BarTest.TestTwo',
+ 'BarTest.TestThree',
+ ] + DEATH_TESTS + PARAM_TESTS)
+
+ # Value parameterized tests.
+ self.RunAndVerify('*/*', PARAM_TESTS)
+
+ # Value parameterized tests filtering by the sequence name.
+ self.RunAndVerify('SeqP/*', [
+ 'SeqP/ParamTest.TestX/0',
+ 'SeqP/ParamTest.TestX/1',
+ 'SeqP/ParamTest.TestY/0',
+ 'SeqP/ParamTest.TestY/1',
+ ])
+
+ # Value parameterized tests filtering by the test name.
+ self.RunAndVerify('*/0', [
+ 'SeqP/ParamTest.TestX/0',
+ 'SeqP/ParamTest.TestY/0',
+ 'SeqQ/ParamTest.TestX/0',
+ 'SeqQ/ParamTest.TestY/0',
+ ])
+
+ def testFlagOverridesEnvVar(self):
+ """Tests that the filter flag overrides the filtering env. variable."""
+
+ SetEnvVar(FILTER_ENV_VAR, 'Foo*')
+ args = ['--%s=%s' % (FILTER_FLAG, '*One')]
+ tests_run = RunAndExtractTestList(args)[0]
+ SetEnvVar(FILTER_ENV_VAR, None)
+
+ self.AssertSetEqual(tests_run, ['BarTest.TestOne', 'BazTest.TestOne'])
+
+ def testShardStatusFileIsCreated(self):
+ """Tests that the shard file is created if specified in the environment."""
+
+ shard_status_file = os.path.join(gtest_test_utils.GetTempDir(),
+ 'shard_status_file')
+ self.assert_(not os.path.exists(shard_status_file))
+
+ extra_env = {SHARD_STATUS_FILE_ENV_VAR: shard_status_file}
+ try:
+ InvokeWithModifiedEnv(extra_env, RunAndReturnOutput)
+ finally:
+ self.assert_(os.path.exists(shard_status_file))
+ os.remove(shard_status_file)
+
+ def testShardStatusFileIsCreatedWithListTests(self):
+ """Tests that the shard file is created with the "list_tests" flag."""
+
+ shard_status_file = os.path.join(gtest_test_utils.GetTempDir(),
+ 'shard_status_file2')
+ self.assert_(not os.path.exists(shard_status_file))
+
+ extra_env = {SHARD_STATUS_FILE_ENV_VAR: shard_status_file}
+ try:
+ output = InvokeWithModifiedEnv(extra_env,
+ RunAndReturnOutput,
+ [LIST_TESTS_FLAG])
+ finally:
+ # This assertion ensures that Google Test enumerated the tests as
+ # opposed to running them.
+ self.assert_('[==========]' not in output,
+ 'Unexpected output during test enumeration.\n'
+ 'Please ensure that LIST_TESTS_FLAG is assigned the\n'
+ 'correct flag value for listing Google Test tests.')
+
+ self.assert_(os.path.exists(shard_status_file))
+ os.remove(shard_status_file)
+
+ if SUPPORTS_DEATH_TESTS:
+ def testShardingWorksWithDeathTests(self):
+ """Tests integration with death tests and sharding."""
+
+ gtest_filter = 'HasDeathTest.*:SeqP/*'
+ expected_tests = [
+ 'HasDeathTest.Test1',
+ 'HasDeathTest.Test2',
+
+ 'SeqP/ParamTest.TestX/0',
+ 'SeqP/ParamTest.TestX/1',
+ 'SeqP/ParamTest.TestY/0',
+ 'SeqP/ParamTest.TestY/1',
+ ]
+
+ for flag in ['--gtest_death_test_style=threadsafe',
+ '--gtest_death_test_style=fast']:
+ self.RunAndVerifyWithSharding(gtest_filter, 3, expected_tests,
+ check_exit_0=True, args=[flag])
+ self.RunAndVerifyWithSharding(gtest_filter, 5, expected_tests,
+ check_exit_0=True, args=[flag])
+
+if __name__ == '__main__':
+ gtest_test_utils.Main()
diff --git a/Source/ThirdParty/gtest/test/gtest_filter_unittest_.cc b/Source/ThirdParty/gtest/test/gtest_filter_unittest_.cc
new file mode 100644
index 000000000..325504fe9
--- /dev/null
+++ b/Source/ThirdParty/gtest/test/gtest_filter_unittest_.cc
@@ -0,0 +1,140 @@
+// Copyright 2005, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: wan@google.com (Zhanyong Wan)
+
+// Unit test for Google Test test filters.
+//
+// A user can specify which test(s) in a Google Test program to run via
+// either the GTEST_FILTER environment variable or the --gtest_filter
+// flag. This is used for testing such functionality.
+//
+// The program will be invoked from a Python unit test. Don't run it
+// directly.
+
+#include <gtest/gtest.h>
+
+namespace {
+
+// Test case FooTest.
+
+class FooTest : public testing::Test {
+};
+
+TEST_F(FooTest, Abc) {
+}
+
+TEST_F(FooTest, Xyz) {
+ FAIL() << "Expected failure.";
+}
+
+// Test case BarTest.
+
+TEST(BarTest, TestOne) {
+}
+
+TEST(BarTest, TestTwo) {
+}
+
+TEST(BarTest, TestThree) {
+}
+
+TEST(BarTest, DISABLED_TestFour) {
+ FAIL() << "Expected failure.";
+}
+
+TEST(BarTest, DISABLED_TestFive) {
+ FAIL() << "Expected failure.";
+}
+
+// Test case BazTest.
+
+TEST(BazTest, TestOne) {
+ FAIL() << "Expected failure.";
+}
+
+TEST(BazTest, TestA) {
+}
+
+TEST(BazTest, TestB) {
+}
+
+TEST(BazTest, DISABLED_TestC) {
+ FAIL() << "Expected failure.";
+}
+
+// Test case HasDeathTest
+
+TEST(HasDeathTest, Test1) {
+ EXPECT_DEATH_IF_SUPPORTED(exit(1), ".*");
+}
+
+// We need at least two death tests to make sure that the all death tests
+// aren't on the first shard.
+TEST(HasDeathTest, Test2) {
+ EXPECT_DEATH_IF_SUPPORTED(exit(1), ".*");
+}
+
+// Test case FoobarTest
+
+TEST(DISABLED_FoobarTest, Test1) {
+ FAIL() << "Expected failure.";
+}
+
+TEST(DISABLED_FoobarTest, DISABLED_Test2) {
+ FAIL() << "Expected failure.";
+}
+
+// Test case FoobarbazTest
+
+TEST(DISABLED_FoobarbazTest, TestA) {
+ FAIL() << "Expected failure.";
+}
+
+#if GTEST_HAS_PARAM_TEST
+class ParamTest : public testing::TestWithParam<int> {
+};
+
+TEST_P(ParamTest, TestX) {
+}
+
+TEST_P(ParamTest, TestY) {
+}
+
+INSTANTIATE_TEST_CASE_P(SeqP, ParamTest, testing::Values(1, 2));
+INSTANTIATE_TEST_CASE_P(SeqQ, ParamTest, testing::Values(5, 6));
+#endif // GTEST_HAS_PARAM_TEST
+
+} // namespace
+
+int main(int argc, char **argv) {
+ ::testing::InitGoogleTest(&argc, argv);
+
+ return RUN_ALL_TESTS();
+}
diff --git a/Source/ThirdParty/gtest/test/gtest_help_test.py b/Source/ThirdParty/gtest/test/gtest_help_test.py
new file mode 100755
index 000000000..3cb4c48e0
--- /dev/null
+++ b/Source/ThirdParty/gtest/test/gtest_help_test.py
@@ -0,0 +1,169 @@
+#!/usr/bin/env python
+#
+# Copyright 2009, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""Tests the --help flag of Google C++ Testing Framework.
+
+SYNOPSIS
+ gtest_help_test.py --gtest_build_dir=BUILD/DIR
+ # where BUILD/DIR contains the built gtest_help_test_ file.
+ gtest_help_test.py
+"""
+
+__author__ = 'wan@google.com (Zhanyong Wan)'
+
+import os
+import re
+import gtest_test_utils
+
+
+IS_WINDOWS = os.name == 'nt'
+
+PROGRAM_PATH = gtest_test_utils.GetTestExecutablePath('gtest_help_test_')
+FLAG_PREFIX = '--gtest_'
+CATCH_EXCEPTIONS_FLAG = FLAG_PREFIX + 'catch_exceptions'
+DEATH_TEST_STYLE_FLAG = FLAG_PREFIX + 'death_test_style'
+UNKNOWN_FLAG = FLAG_PREFIX + 'unknown_flag_for_testing'
+LIST_TESTS_FLAG = FLAG_PREFIX + 'list_tests'
+INCORRECT_FLAG_VARIANTS = [re.sub('^--', '-', LIST_TESTS_FLAG),
+ re.sub('^--', '/', LIST_TESTS_FLAG),
+ re.sub('_', '-', LIST_TESTS_FLAG)]
+INTERNAL_FLAG_FOR_TESTING = FLAG_PREFIX + 'internal_flag_for_testing'
+
+SUPPORTS_DEATH_TESTS = "DeathTest" in gtest_test_utils.Subprocess(
+ [PROGRAM_PATH, LIST_TESTS_FLAG]).output
+
+# The help message must match this regex.
+HELP_REGEX = re.compile(
+ FLAG_PREFIX + r'list_tests.*' +
+ FLAG_PREFIX + r'filter=.*' +
+ FLAG_PREFIX + r'also_run_disabled_tests.*' +
+ FLAG_PREFIX + r'repeat=.*' +
+ FLAG_PREFIX + r'shuffle.*' +
+ FLAG_PREFIX + r'random_seed=.*' +
+ FLAG_PREFIX + r'color=.*' +
+ FLAG_PREFIX + r'print_time.*' +
+ FLAG_PREFIX + r'output=.*' +
+ FLAG_PREFIX + r'break_on_failure.*' +
+ FLAG_PREFIX + r'throw_on_failure.*',
+ re.DOTALL)
+
+
+def RunWithFlag(flag):
+ """Runs gtest_help_test_ with the given flag.
+
+ Returns:
+ the exit code and the text output as a tuple.
+ Args:
+ flag: the command-line flag to pass to gtest_help_test_, or None.
+ """
+
+ if flag is None:
+ command = [PROGRAM_PATH]
+ else:
+ command = [PROGRAM_PATH, flag]
+ child = gtest_test_utils.Subprocess(command)
+ return child.exit_code, child.output
+
+
+class GTestHelpTest(gtest_test_utils.TestCase):
+ """Tests the --help flag and its equivalent forms."""
+
+ def TestHelpFlag(self, flag):
+ """Verifies correct behavior when help flag is specified.
+
+ The right message must be printed and the tests must
+ skipped when the given flag is specified.
+
+ Args:
+ flag: A flag to pass to the binary or None.
+ """
+
+ exit_code, output = RunWithFlag(flag)
+ self.assertEquals(0, exit_code)
+ self.assert_(HELP_REGEX.search(output), output)
+ if IS_WINDOWS:
+ self.assert_(CATCH_EXCEPTIONS_FLAG in output, output)
+ else:
+ self.assert_(CATCH_EXCEPTIONS_FLAG not in output, output)
+
+ if SUPPORTS_DEATH_TESTS and not IS_WINDOWS:
+ self.assert_(DEATH_TEST_STYLE_FLAG in output, output)
+ else:
+ self.assert_(DEATH_TEST_STYLE_FLAG not in output, output)
+
+ def TestNonHelpFlag(self, flag):
+ """Verifies correct behavior when no help flag is specified.
+
+ Verifies that when no help flag is specified, the tests are run
+ and the help message is not printed.
+
+ Args:
+ flag: A flag to pass to the binary or None.
+ """
+
+ exit_code, output = RunWithFlag(flag)
+ self.assert_(exit_code != 0)
+ self.assert_(not HELP_REGEX.search(output), output)
+
+ def testPrintsHelpWithFullFlag(self):
+ self.TestHelpFlag('--help')
+
+ def testPrintsHelpWithShortFlag(self):
+ self.TestHelpFlag('-h')
+
+ def testPrintsHelpWithQuestionFlag(self):
+ self.TestHelpFlag('-?')
+
+ def testPrintsHelpWithWindowsStyleQuestionFlag(self):
+ self.TestHelpFlag('/?')
+
+ def testPrintsHelpWithUnrecognizedGoogleTestFlag(self):
+ self.TestHelpFlag(UNKNOWN_FLAG)
+
+ def testPrintsHelpWithIncorrectFlagStyle(self):
+ for incorrect_flag in INCORRECT_FLAG_VARIANTS:
+ self.TestHelpFlag(incorrect_flag)
+
+ def testRunsTestsWithoutHelpFlag(self):
+ """Verifies that when no help flag is specified, the tests are run
+ and the help message is not printed."""
+
+ self.TestNonHelpFlag(None)
+
+ def testRunsTestsWithGtestInternalFlag(self):
+ """Verifies that the tests are run and no help message is printed when
+ a flag starting with Google Test prefix and 'internal_' is supplied."""
+
+ self.TestNonHelpFlag(INTERNAL_FLAG_FOR_TESTING)
+
+
+if __name__ == '__main__':
+ gtest_test_utils.Main()
diff --git a/Source/ThirdParty/gtest/test/gtest_help_test_.cc b/Source/ThirdParty/gtest/test/gtest_help_test_.cc
new file mode 100644
index 000000000..aad0d72d7
--- /dev/null
+++ b/Source/ThirdParty/gtest/test/gtest_help_test_.cc
@@ -0,0 +1,46 @@
+// Copyright 2009, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: wan@google.com (Zhanyong Wan)
+
+// This program is meant to be run by gtest_help_test.py. Do not run
+// it directly.
+
+#include <gtest/gtest.h>
+
+// When a help flag is specified, this program should skip the tests
+// and exit with 0; otherwise the following test will be executed,
+// causing this program to exit with a non-zero code.
+TEST(HelpFlagTest, ShouldNotBeRun) {
+ ASSERT_TRUE(false) << "Tests shouldn't be run when --help is specified.";
+}
+
+#if GTEST_HAS_DEATH_TEST
+TEST(DeathTest, UsedByPythonScriptToDetectSupportForDeathTestsInThisBinary) {}
+#endif
diff --git a/Source/ThirdParty/gtest/test/gtest_list_tests_unittest.py b/Source/ThirdParty/gtest/test/gtest_list_tests_unittest.py
new file mode 100755
index 000000000..ce8c3ef05
--- /dev/null
+++ b/Source/ThirdParty/gtest/test/gtest_list_tests_unittest.py
@@ -0,0 +1,177 @@
+#!/usr/bin/env python
+#
+# Copyright 2006, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""Unit test for Google Test's --gtest_list_tests flag.
+
+A user can ask Google Test to list all tests by specifying the
+--gtest_list_tests flag. This script tests such functionality
+by invoking gtest_list_tests_unittest_ (a program written with
+Google Test) the command line flags.
+"""
+
+__author__ = 'phanna@google.com (Patrick Hanna)'
+
+import gtest_test_utils
+
+
+# Constants.
+
+# The command line flag for enabling/disabling listing all tests.
+LIST_TESTS_FLAG = 'gtest_list_tests'
+
+# Path to the gtest_list_tests_unittest_ program.
+EXE_PATH = gtest_test_utils.GetTestExecutablePath('gtest_list_tests_unittest_')
+
+# The expected output when running gtest_list_tests_unittest_ with
+# --gtest_list_tests
+EXPECTED_OUTPUT_NO_FILTER = """FooDeathTest.
+ Test1
+Foo.
+ Bar1
+ Bar2
+ DISABLED_Bar3
+Abc.
+ Xyz
+ Def
+FooBar.
+ Baz
+FooTest.
+ Test1
+ DISABLED_Test2
+ Test3
+"""
+
+# The expected output when running gtest_list_tests_unittest_ with
+# --gtest_list_tests and --gtest_filter=Foo*.
+EXPECTED_OUTPUT_FILTER_FOO = """FooDeathTest.
+ Test1
+Foo.
+ Bar1
+ Bar2
+ DISABLED_Bar3
+FooBar.
+ Baz
+FooTest.
+ Test1
+ DISABLED_Test2
+ Test3
+"""
+
+# Utilities.
+
+
+def Run(args):
+ """Runs gtest_list_tests_unittest_ and returns the list of tests printed."""
+
+ return gtest_test_utils.Subprocess([EXE_PATH] + args,
+ capture_stderr=False).output
+
+
+# The unit test.
+
+class GTestListTestsUnitTest(gtest_test_utils.TestCase):
+ """Tests using the --gtest_list_tests flag to list all tests."""
+
+ def RunAndVerify(self, flag_value, expected_output, other_flag):
+ """Runs gtest_list_tests_unittest_ and verifies that it prints
+ the correct tests.
+
+ Args:
+ flag_value: value of the --gtest_list_tests flag;
+ None if the flag should not be present.
+
+ expected_output: the expected output after running command;
+
+ other_flag: a different flag to be passed to command
+ along with gtest_list_tests;
+ None if the flag should not be present.
+ """
+
+ if flag_value is None:
+ flag = ''
+ flag_expression = 'not set'
+ elif flag_value == '0':
+ flag = '--%s=0' % LIST_TESTS_FLAG
+ flag_expression = '0'
+ else:
+ flag = '--%s' % LIST_TESTS_FLAG
+ flag_expression = '1'
+
+ args = [flag]
+
+ if other_flag is not None:
+ args += [other_flag]
+
+ output = Run(args)
+
+ msg = ('when %s is %s, the output of "%s" is "%s".' %
+ (LIST_TESTS_FLAG, flag_expression, ' '.join(args), output))
+
+ if expected_output is not None:
+ self.assert_(output == expected_output, msg)
+ else:
+ self.assert_(output != EXPECTED_OUTPUT_NO_FILTER, msg)
+
+ def testDefaultBehavior(self):
+ """Tests the behavior of the default mode."""
+
+ self.RunAndVerify(flag_value=None,
+ expected_output=None,
+ other_flag=None)
+
+ def testFlag(self):
+ """Tests using the --gtest_list_tests flag."""
+
+ self.RunAndVerify(flag_value='0',
+ expected_output=None,
+ other_flag=None)
+ self.RunAndVerify(flag_value='1',
+ expected_output=EXPECTED_OUTPUT_NO_FILTER,
+ other_flag=None)
+
+ def testOverrideNonFilterFlags(self):
+ """Tests that --gtest_list_tests overrides the non-filter flags."""
+
+ self.RunAndVerify(flag_value='1',
+ expected_output=EXPECTED_OUTPUT_NO_FILTER,
+ other_flag='--gtest_break_on_failure')
+
+ def testWithFilterFlags(self):
+ """Tests that --gtest_list_tests takes into account the
+ --gtest_filter flag."""
+
+ self.RunAndVerify(flag_value='1',
+ expected_output=EXPECTED_OUTPUT_FILTER_FOO,
+ other_flag='--gtest_filter=Foo*')
+
+
+if __name__ == '__main__':
+ gtest_test_utils.Main()
diff --git a/Source/ThirdParty/gtest/test/gtest_list_tests_unittest_.cc b/Source/ThirdParty/gtest/test/gtest_list_tests_unittest_.cc
new file mode 100644
index 000000000..a0ed0825b
--- /dev/null
+++ b/Source/ThirdParty/gtest/test/gtest_list_tests_unittest_.cc
@@ -0,0 +1,85 @@
+// Copyright 2006, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: phanna@google.com (Patrick Hanna)
+
+// Unit test for Google Test's --gtest_list_tests flag.
+//
+// A user can ask Google Test to list all tests that will run
+// so that when using a filter, a user will know what
+// tests to look for. The tests will not be run after listing.
+//
+// This program will be invoked from a Python unit test.
+// Don't run it directly.
+
+#include <gtest/gtest.h>
+
+namespace {
+
+// Several different test cases and tests that will be listed.
+TEST(Foo, Bar1) {
+}
+
+TEST(Foo, Bar2) {
+}
+
+TEST(Foo, DISABLED_Bar3) {
+}
+
+TEST(Abc, Xyz) {
+}
+
+TEST(Abc, Def) {
+}
+
+TEST(FooBar, Baz) {
+}
+
+class FooTest : public testing::Test {
+};
+
+TEST_F(FooTest, Test1) {
+}
+
+TEST_F(FooTest, DISABLED_Test2) {
+}
+
+TEST_F(FooTest, Test3) {
+}
+
+TEST(FooDeathTest, Test1) {
+}
+
+} // namespace
+
+int main(int argc, char **argv) {
+ ::testing::InitGoogleTest(&argc, argv);
+
+ return RUN_ALL_TESTS();
+}
diff --git a/Source/ThirdParty/gtest/test/gtest_main_unittest.cc b/Source/ThirdParty/gtest/test/gtest_main_unittest.cc
new file mode 100644
index 000000000..7a3f0adfa
--- /dev/null
+++ b/Source/ThirdParty/gtest/test/gtest_main_unittest.cc
@@ -0,0 +1,45 @@
+// Copyright 2006, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: wan@google.com (Zhanyong Wan)
+
+#include <gtest/gtest.h>
+
+// Tests that we don't have to define main() when we link to
+// gtest_main instead of gtest.
+
+namespace {
+
+TEST(GTestMainTest, ShouldSucceed) {
+}
+
+} // namespace
+
+// We are using the main() function defined in src/gtest_main.cc, so
+// we don't define it here.
diff --git a/Source/ThirdParty/gtest/test/gtest_nc.cc b/Source/ThirdParty/gtest/test/gtest_nc.cc
new file mode 100644
index 000000000..73b5db6d4
--- /dev/null
+++ b/Source/ThirdParty/gtest/test/gtest_nc.cc
@@ -0,0 +1,234 @@
+// Copyright 2007, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: wan@google.com (Zhanyong Wan)
+
+// This file is the input to a negative-compilation test for Google
+// Test. Code here is NOT supposed to compile. Its purpose is to
+// verify that certain incorrect usages of the Google Test API are
+// indeed rejected by the compiler.
+//
+// We still need to write the negative-compilation test itself, which
+// will be tightly coupled with the build environment.
+//
+// TODO(wan@google.com): finish the negative-compilation test.
+
+#ifdef TEST_CANNOT_IGNORE_RUN_ALL_TESTS_RESULT
+// Tests that the result of RUN_ALL_TESTS() cannot be ignored.
+
+#include <gtest/gtest.h>
+
+int main(int argc, char** argv) {
+ testing::InitGoogleTest(&argc, argv);
+ RUN_ALL_TESTS(); // This line shouldn't compile.
+}
+
+#elif defined(TEST_USER_CANNOT_INCLUDE_GTEST_INTERNAL_INL_H)
+// Tests that a user cannot include gtest-internal-inl.h in his code.
+
+#include "src/gtest-internal-inl.h"
+
+#elif defined(TEST_CATCHES_DECLARING_SETUP_IN_TEST_FIXTURE_WITH_TYPO)
+// Tests that the compiler catches the typo when a user declares a
+// Setup() method in a test fixture.
+
+#include <gtest/gtest.h>
+
+class MyTest : public testing::Test {
+ protected:
+ void Setup() {}
+};
+
+#elif defined(TEST_CATCHES_CALLING_SETUP_IN_TEST_WITH_TYPO)
+// Tests that the compiler catches the typo when a user calls Setup()
+// from a test fixture.
+
+#include <gtest/gtest.h>
+
+class MyTest : public testing::Test {
+ protected:
+ virtual void SetUp() {
+ testing::Test::Setup(); // Tries to call SetUp() in the parent class.
+ }
+};
+
+#elif defined(TEST_CATCHES_DECLARING_SETUP_IN_ENVIRONMENT_WITH_TYPO)
+// Tests that the compiler catches the typo when a user declares a
+// Setup() method in a subclass of Environment.
+
+#include <gtest/gtest.h>
+
+class MyEnvironment : public testing::Environment {
+ public:
+ void Setup() {}
+};
+
+#elif defined(TEST_CATCHES_CALLING_SETUP_IN_ENVIRONMENT_WITH_TYPO)
+// Tests that the compiler catches the typo when a user calls Setup()
+// in an Environment.
+
+#include <gtest/gtest.h>
+
+class MyEnvironment : public testing::Environment {
+ protected:
+ virtual void SetUp() {
+ // Tries to call SetUp() in the parent class.
+ testing::Environment::Setup();
+ }
+};
+
+#elif defined(TEST_CATCHES_WRONG_CASE_IN_TYPED_TEST_P)
+// Tests that the compiler catches using the wrong test case name in
+// TYPED_TEST_P.
+
+#include <gtest/gtest.h>
+
+template <typename T>
+class FooTest : public testing::Test {
+};
+
+template <typename T>
+class BarTest : public testing::Test {
+};
+
+TYPED_TEST_CASE_P(FooTest);
+TYPED_TEST_P(BarTest, A) {} // Wrong test case name.
+REGISTER_TYPED_TEST_CASE_P(FooTest, A);
+INSTANTIATE_TYPED_TEST_CASE_P(My, FooTest, testing::Types<int>);
+
+#elif defined(TEST_CATCHES_WRONG_CASE_IN_REGISTER_TYPED_TEST_CASE_P)
+// Tests that the compiler catches using the wrong test case name in
+// REGISTER_TYPED_TEST_CASE_P.
+
+#include <gtest/gtest.h>
+
+template <typename T>
+class FooTest : public testing::Test {
+};
+
+template <typename T>
+class BarTest : public testing::Test {
+};
+
+TYPED_TEST_CASE_P(FooTest);
+TYPED_TEST_P(FooTest, A) {}
+REGISTER_TYPED_TEST_CASE_P(BarTest, A); // Wrong test case name.
+INSTANTIATE_TYPED_TEST_CASE_P(My, FooTest, testing::Types<int>);
+
+#elif defined(TEST_CATCHES_WRONG_CASE_IN_INSTANTIATE_TYPED_TEST_CASE_P)
+// Tests that the compiler catches using the wrong test case name in
+// INSTANTIATE_TYPED_TEST_CASE_P.
+
+#include <gtest/gtest.h>
+
+template <typename T>
+class FooTest : public testing::Test {
+};
+
+template <typename T>
+class BarTest : public testing::Test {
+};
+
+TYPED_TEST_CASE_P(FooTest);
+TYPED_TEST_P(FooTest, A) {}
+REGISTER_TYPED_TEST_CASE_P(FooTest, A);
+
+// Wrong test case name.
+INSTANTIATE_TYPED_TEST_CASE_P(My, BarTest, testing::Types<int>);
+
+#elif defined(TEST_CATCHES_INSTANTIATE_TYPED_TESET_CASE_P_WITH_SAME_NAME_PREFIX)
+// Tests that the compiler catches instantiating TYPED_TEST_CASE_P
+// twice with the same name prefix.
+
+#include <gtest/gtest.h>
+
+template <typename T>
+class FooTest : public testing::Test {
+};
+
+TYPED_TEST_CASE_P(FooTest);
+TYPED_TEST_P(FooTest, A) {}
+REGISTER_TYPED_TEST_CASE_P(FooTest, A);
+
+INSTANTIATE_TYPED_TEST_CASE_P(My, FooTest, testing::Types<int>);
+
+// Wrong name prefix: "My" has been used.
+INSTANTIATE_TYPED_TEST_CASE_P(My, FooTest, testing::Types<double>);
+
+#elif defined(TEST_STATIC_ASSERT_TYPE_EQ_IS_NOT_A_TYPE)
+
+#include <gtest/gtest.h>
+
+// Tests that StaticAssertTypeEq<T1, T2> cannot be used as a type.
+testing::StaticAssertTypeEq<int, int> dummy;
+
+#elif defined(TEST_STATIC_ASSERT_TYPE_EQ_WORKS_IN_NAMESPACE)
+
+#include <gtest/gtest.h>
+
+// Tests that StaticAssertTypeEq<T1, T2> works in a namespace scope.
+static bool dummy = testing::StaticAssertTypeEq<int, const int>();
+
+#elif defined(TEST_STATIC_ASSERT_TYPE_EQ_WORKS_IN_CLASS)
+
+#include <gtest/gtest.h>
+
+template <typename T>
+class Helper {
+ public:
+ // Tests that StaticAssertTypeEq<T1, T2> works in a class.
+ Helper() { testing::StaticAssertTypeEq<int, T>(); }
+
+ void DoSomething() {}
+};
+
+void Test() {
+ Helper<bool> h;
+ h.DoSomething(); // To avoid the "unused variable" warning.
+}
+
+#elif defined(TEST_STATIC_ASSERT_TYPE_EQ_WORKS_IN_FUNCTION)
+
+#include <gtest/gtest.h>
+
+void Test() {
+ // Tests that StaticAssertTypeEq<T1, T2> works inside a function.
+ testing::StaticAssertTypeEq<const int, int>();
+}
+
+#else
+// A sanity test. This should compile.
+
+#include <gtest/gtest.h>
+
+int main() {
+ return RUN_ALL_TESTS();
+}
+
+#endif
diff --git a/Source/ThirdParty/gtest/test/gtest_nc_test.py b/Source/ThirdParty/gtest/test/gtest_nc_test.py
new file mode 100755
index 000000000..06ffb3f80
--- /dev/null
+++ b/Source/ThirdParty/gtest/test/gtest_nc_test.py
@@ -0,0 +1,106 @@
+#!/usr/bin/env python
+#
+# Copyright 2007, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""Negative compilation test for Google Test."""
+
+__author__ = 'wan@google.com (Zhanyong Wan)'
+
+import os
+import sys
+import unittest
+
+
+IS_LINUX = os.name == 'posix' and os.uname()[0] == 'Linux'
+if not IS_LINUX:
+ sys.exit(0) # Negative compilation tests are not supported on Windows & Mac.
+
+
+class GTestNCTest(unittest.TestCase):
+ """Negative compilation test for Google Test."""
+
+ def testCompilerError(self):
+ """Verifies that erroneous code leads to expected compiler
+ messages."""
+
+ # Defines a list of test specs, where each element is a tuple
+ # (test name, list of regexes for matching the compiler errors).
+ test_specs = [
+ ('CANNOT_IGNORE_RUN_ALL_TESTS_RESULT',
+ [r'ignoring return value']),
+
+ ('USER_CANNOT_INCLUDE_GTEST_INTERNAL_INL_H',
+ [r'must not be included except by Google Test itself']),
+
+ ('CATCHES_DECLARING_SETUP_IN_TEST_FIXTURE_WITH_TYPO',
+ [r'Setup_should_be_spelled_SetUp']),
+
+ ('CATCHES_CALLING_SETUP_IN_TEST_WITH_TYPO',
+ [r'Setup_should_be_spelled_SetUp']),
+
+ ('CATCHES_DECLARING_SETUP_IN_ENVIRONMENT_WITH_TYPO',
+ [r'Setup_should_be_spelled_SetUp']),
+
+ ('CATCHES_CALLING_SETUP_IN_ENVIRONMENT_WITH_TYPO',
+ [r'Setup_should_be_spelled_SetUp']),
+
+ ('CATCHES_WRONG_CASE_IN_TYPED_TEST_P',
+ [r'BarTest.*was not declared']),
+
+ ('CATCHES_WRONG_CASE_IN_REGISTER_TYPED_TEST_CASE_P',
+ [r'BarTest.*was not declared']),
+
+ ('CATCHES_WRONG_CASE_IN_INSTANTIATE_TYPED_TEST_CASE_P',
+ [r'BarTest.*not declared']),
+
+ ('CATCHES_INSTANTIATE_TYPED_TESET_CASE_P_WITH_SAME_NAME_PREFIX',
+ [r'redefinition of.*My.*FooTest']),
+
+ ('STATIC_ASSERT_TYPE_EQ_IS_NOT_A_TYPE',
+ [r'StaticAssertTypeEq.* does not name a type']),
+
+ ('STATIC_ASSERT_TYPE_EQ_WORKS_IN_NAMESPACE',
+ [r'StaticAssertTypeEq.*int.*const int']),
+
+ ('STATIC_ASSERT_TYPE_EQ_WORKS_IN_CLASS',
+ [r'StaticAssertTypeEq.*int.*bool']),
+
+ ('STATIC_ASSERT_TYPE_EQ_WORKS_IN_FUNCTION',
+ [r'StaticAssertTypeEq.*const int.*int']),
+
+ ('SANITY',
+ None)
+ ]
+
+ # TODO(wan@google.com): verify that the test specs are satisfied.
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/Source/ThirdParty/gtest/test/gtest_no_test_unittest.cc b/Source/ThirdParty/gtest/test/gtest_no_test_unittest.cc
new file mode 100644
index 000000000..afe2dc0c9
--- /dev/null
+++ b/Source/ThirdParty/gtest/test/gtest_no_test_unittest.cc
@@ -0,0 +1,54 @@
+// Copyright 2006, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Tests that a Google Test program that has no test defined can run
+// successfully.
+//
+// Author: wan@google.com (Zhanyong Wan)
+
+#include <gtest/gtest.h>
+
+
+int main(int argc, char **argv) {
+ testing::InitGoogleTest(&argc, argv);
+
+ // An ad-hoc assertion outside of all tests.
+ //
+ // This serves two purposes:
+ //
+ // 1. It verifies that an ad-hoc assertion can be executed even if
+ // no test is defined.
+ // 2. We had a bug where the XML output won't be generated if an
+ // assertion is executed before RUN_ALL_TESTS() is called, even
+ // though --gtest_output=xml is specified. This makes sure the
+ // bug is fixed and doesn't regress.
+ EXPECT_EQ(1, 1);
+
+ return RUN_ALL_TESTS();
+}
diff --git a/Source/ThirdParty/gtest/test/gtest_output_test.py b/Source/ThirdParty/gtest/test/gtest_output_test.py
new file mode 100755
index 000000000..192030a20
--- /dev/null
+++ b/Source/ThirdParty/gtest/test/gtest_output_test.py
@@ -0,0 +1,327 @@
+#!/usr/bin/env python
+#
+# Copyright 2008, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""Tests the text output of Google C++ Testing Framework.
+
+SYNOPSIS
+ gtest_output_test.py --gtest_build_dir=BUILD/DIR --gengolden
+ # where BUILD/DIR contains the built gtest_output_test_ file.
+ gtest_output_test.py --gengolden
+ gtest_output_test.py
+"""
+
+__author__ = 'wan@google.com (Zhanyong Wan)'
+
+import os
+import re
+import sys
+import gtest_test_utils
+
+
+# The flag for generating the golden file
+GENGOLDEN_FLAG = '--gengolden'
+CATCH_EXCEPTIONS_ENV_VAR_NAME = 'GTEST_CATCH_EXCEPTIONS'
+
+IS_WINDOWS = os.name == 'nt'
+
+if IS_WINDOWS:
+ GOLDEN_NAME = 'gtest_output_test_golden_win.txt'
+else:
+ GOLDEN_NAME = 'gtest_output_test_golden_lin.txt'
+
+PROGRAM_PATH = gtest_test_utils.GetTestExecutablePath('gtest_output_test_')
+
+# At least one command we exercise must not have the
+# --gtest_internal_skip_environment_and_ad_hoc_tests flag.
+COMMAND_LIST_TESTS = ({}, [PROGRAM_PATH, '--gtest_list_tests'])
+COMMAND_WITH_COLOR = ({}, [PROGRAM_PATH, '--gtest_color=yes'])
+COMMAND_WITH_TIME = ({}, [PROGRAM_PATH,
+ '--gtest_print_time',
+ '--gtest_internal_skip_environment_and_ad_hoc_tests',
+ '--gtest_filter=FatalFailureTest.*:LoggingTest.*'])
+COMMAND_WITH_DISABLED = (
+ {}, [PROGRAM_PATH,
+ '--gtest_also_run_disabled_tests',
+ '--gtest_internal_skip_environment_and_ad_hoc_tests',
+ '--gtest_filter=*DISABLED_*'])
+COMMAND_WITH_SHARDING = (
+ {'GTEST_SHARD_INDEX': '1', 'GTEST_TOTAL_SHARDS': '2'},
+ [PROGRAM_PATH,
+ '--gtest_internal_skip_environment_and_ad_hoc_tests',
+ '--gtest_filter=PassingTest.*'])
+
+GOLDEN_PATH = os.path.join(gtest_test_utils.GetSourceDir(), GOLDEN_NAME)
+
+
+def ToUnixLineEnding(s):
+ """Changes all Windows/Mac line endings in s to UNIX line endings."""
+
+ return s.replace('\r\n', '\n').replace('\r', '\n')
+
+
+def RemoveLocations(test_output):
+ """Removes all file location info from a Google Test program's output.
+
+ Args:
+ test_output: the output of a Google Test program.
+
+ Returns:
+ output with all file location info (in the form of
+ 'DIRECTORY/FILE_NAME:LINE_NUMBER: 'or
+ 'DIRECTORY\\FILE_NAME(LINE_NUMBER): ') replaced by
+ 'FILE_NAME:#: '.
+ """
+
+ return re.sub(r'.*[/\\](.+)(\:\d+|\(\d+\))\: ', r'\1:#: ', test_output)
+
+
+def RemoveStackTraceDetails(output):
+ """Removes all stack traces from a Google Test program's output."""
+
+ # *? means "find the shortest string that matches".
+ return re.sub(r'Stack trace:(.|\n)*?\n\n',
+ 'Stack trace: (omitted)\n\n', output)
+
+
+def RemoveStackTraces(output):
+ """Removes all traces of stack traces from a Google Test program's output."""
+
+ # *? means "find the shortest string that matches".
+ return re.sub(r'Stack trace:(.|\n)*?\n\n', '', output)
+
+
+def RemoveTime(output):
+ """Removes all time information from a Google Test program's output."""
+
+ return re.sub(r'\(\d+ ms', '(? ms', output)
+
+
+def RemoveTypeInfoDetails(test_output):
+ """Removes compiler-specific type info from Google Test program's output.
+
+ Args:
+ test_output: the output of a Google Test program.
+
+ Returns:
+ output with type information normalized to canonical form.
+ """
+
+ # some compilers output the name of type 'unsigned int' as 'unsigned'
+ return re.sub(r'unsigned int', 'unsigned', test_output)
+
+
+def RemoveTestCounts(output):
+ """Removes test counts from a Google Test program's output."""
+
+ output = re.sub(r'\d+ tests?, listed below',
+ '? tests, listed below', output)
+ output = re.sub(r'\d+ FAILED TESTS',
+ '? FAILED TESTS', output)
+ output = re.sub(r'\d+ tests? from \d+ test cases?',
+ '? tests from ? test cases', output)
+ output = re.sub(r'\d+ tests? from ([a-zA-Z_])',
+ r'? tests from \1', output)
+ return re.sub(r'\d+ tests?\.', '? tests.', output)
+
+
+def RemoveMatchingTests(test_output, pattern):
+ """Removes output of specified tests from a Google Test program's output.
+
+ This function strips not only the beginning and the end of a test but also
+ all output in between.
+
+ Args:
+ test_output: A string containing the test output.
+ pattern: A regex string that matches names of test cases or
+ tests to remove.
+
+ Returns:
+ Contents of test_output with tests whose names match pattern removed.
+ """
+
+ test_output = re.sub(
+ r'.*\[ RUN \] .*%s(.|\n)*?\[( FAILED | OK )\] .*%s.*\n' % (
+ pattern, pattern),
+ '',
+ test_output)
+ return re.sub(r'.*%s.*\n' % pattern, '', test_output)
+
+
+def NormalizeOutput(output):
+ """Normalizes output (the output of gtest_output_test_.exe)."""
+
+ output = ToUnixLineEnding(output)
+ output = RemoveLocations(output)
+ output = RemoveStackTraceDetails(output)
+ output = RemoveTime(output)
+ return output
+
+
+def GetShellCommandOutput(env_cmd):
+ """Runs a command in a sub-process, and returns its output in a string.
+
+ Args:
+ env_cmd: The shell command. A 2-tuple where element 0 is a dict of extra
+ environment variables to set, and element 1 is a string with
+ the command and any flags.
+
+ Returns:
+ A string with the command's combined standard and diagnostic output.
+ """
+
+ # Spawns cmd in a sub-process, and gets its standard I/O file objects.
+ # Set and save the environment properly.
+ environ = os.environ.copy()
+ environ.update(env_cmd[0])
+ p = gtest_test_utils.Subprocess(env_cmd[1], env=environ)
+
+ return p.output
+
+
+def GetCommandOutput(env_cmd):
+ """Runs a command and returns its output with all file location
+ info stripped off.
+
+ Args:
+ env_cmd: The shell command. A 2-tuple where element 0 is a dict of extra
+ environment variables to set, and element 1 is a string with
+ the command and any flags.
+ """
+
+ # Disables exception pop-ups on Windows.
+ environ, cmdline = env_cmd
+ environ = dict(environ) # Ensures we are modifying a copy.
+ environ[CATCH_EXCEPTIONS_ENV_VAR_NAME] = '1'
+ return NormalizeOutput(GetShellCommandOutput((environ, cmdline)))
+
+
+def GetOutputOfAllCommands():
+ """Returns concatenated output from several representative commands."""
+
+ return (GetCommandOutput(COMMAND_WITH_COLOR) +
+ GetCommandOutput(COMMAND_WITH_TIME) +
+ GetCommandOutput(COMMAND_WITH_DISABLED) +
+ GetCommandOutput(COMMAND_WITH_SHARDING))
+
+
+test_list = GetShellCommandOutput(COMMAND_LIST_TESTS)
+SUPPORTS_DEATH_TESTS = 'DeathTest' in test_list
+SUPPORTS_TYPED_TESTS = 'TypedTest' in test_list
+SUPPORTS_THREADS = 'ExpectFailureWithThreadsTest' in test_list
+SUPPORTS_STACK_TRACES = False
+
+CAN_GENERATE_GOLDEN_FILE = (SUPPORTS_DEATH_TESTS and
+ SUPPORTS_TYPED_TESTS and
+ SUPPORTS_THREADS)
+
+
+class GTestOutputTest(gtest_test_utils.TestCase):
+ def RemoveUnsupportedTests(self, test_output):
+ if not SUPPORTS_DEATH_TESTS:
+ test_output = RemoveMatchingTests(test_output, 'DeathTest')
+ if not SUPPORTS_TYPED_TESTS:
+ test_output = RemoveMatchingTests(test_output, 'TypedTest')
+ test_output = RemoveMatchingTests(test_output, 'TypedDeathTest')
+ test_output = RemoveMatchingTests(test_output, 'TypeParamDeathTest')
+ if not SUPPORTS_THREADS:
+ test_output = RemoveMatchingTests(test_output,
+ 'ExpectFailureWithThreadsTest')
+ test_output = RemoveMatchingTests(test_output,
+ 'ScopedFakeTestPartResultReporterTest')
+ test_output = RemoveMatchingTests(test_output,
+ 'WorksConcurrently')
+ if not SUPPORTS_STACK_TRACES:
+ test_output = RemoveStackTraces(test_output)
+
+ return test_output
+
+ def testOutput(self):
+ output = GetOutputOfAllCommands()
+
+ golden_file = open(GOLDEN_PATH, 'rb')
+ # A mis-configured source control system can cause \r appear in EOL
+ # sequences when we read the golden file irrespective of an operating
+ # system used. Therefore, we need to strip those \r's from newlines
+ # unconditionally.
+ golden = ToUnixLineEnding(golden_file.read())
+ golden_file.close()
+
+ # We want the test to pass regardless of certain features being
+ # supported or not.
+
+ # We still have to remove type name specifics in all cases.
+ normalized_actual = RemoveTypeInfoDetails(output)
+ normalized_golden = RemoveTypeInfoDetails(golden)
+
+ if CAN_GENERATE_GOLDEN_FILE:
+ self.assertEqual(normalized_golden, normalized_actual)
+ else:
+ normalized_actual = RemoveTestCounts(normalized_actual)
+ normalized_golden = RemoveTestCounts(self.RemoveUnsupportedTests(
+ normalized_golden))
+
+ # This code is very handy when debugging golden file differences:
+ if os.getenv('DEBUG_GTEST_OUTPUT_TEST'):
+ open(os.path.join(
+ gtest_test_utils.GetSourceDir(),
+ '_gtest_output_test_normalized_actual.txt'), 'wb').write(
+ normalized_actual)
+ open(os.path.join(
+ gtest_test_utils.GetSourceDir(),
+ '_gtest_output_test_normalized_golden.txt'), 'wb').write(
+ normalized_golden)
+
+ self.assertEqual(normalized_golden, normalized_actual)
+
+
+if __name__ == '__main__':
+ if sys.argv[1:] == [GENGOLDEN_FLAG]:
+ if CAN_GENERATE_GOLDEN_FILE:
+ output = GetOutputOfAllCommands()
+ golden_file = open(GOLDEN_PATH, 'wb')
+ golden_file.write(output)
+ golden_file.close()
+ else:
+ message = (
+ """Unable to write a golden file when compiled in an environment
+that does not support all the required features (death tests""")
+ if IS_WINDOWS:
+ message += (
+ """\nand typed tests). Please check that you are using VC++ 8.0 SP1
+or higher as your compiler.""")
+ else:
+ message += """\ntyped tests, and threads). Please generate the
+golden file using a binary built with those features enabled."""
+
+ sys.stderr.write(message)
+ sys.exit(1)
+ else:
+ gtest_test_utils.Main()
diff --git a/Source/ThirdParty/gtest/test/gtest_output_test_.cc b/Source/ThirdParty/gtest/test/gtest_output_test_.cc
new file mode 100644
index 000000000..273e8e93e
--- /dev/null
+++ b/Source/ThirdParty/gtest/test/gtest_output_test_.cc
@@ -0,0 +1,1135 @@
+// Copyright 2005, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// A unit test for Google Test itself. This verifies that the basic
+// constructs of Google Test work.
+//
+// Author: wan@google.com (Zhanyong Wan)
+
+#include <gtest/gtest-spi.h>
+#include <gtest/gtest.h>
+
+// Indicates that this translation unit is part of Google Test's
+// implementation. It must come before gtest-internal-inl.h is
+// included, or there will be a compiler error. This trick is to
+// prevent a user from accidentally including gtest-internal-inl.h in
+// his code.
+#define GTEST_IMPLEMENTATION_ 1
+#include "src/gtest-internal-inl.h"
+#undef GTEST_IMPLEMENTATION_
+
+#include <stdlib.h>
+
+#if GTEST_IS_THREADSAFE
+using testing::ScopedFakeTestPartResultReporter;
+using testing::TestPartResultArray;
+
+using testing::internal::Notification;
+using testing::internal::ThreadWithParam;
+#endif
+
+namespace posix = ::testing::internal::posix;
+using testing::internal::String;
+using testing::internal::scoped_ptr;
+
+// Tests catching fatal failures.
+
+// A subroutine used by the following test.
+void TestEq1(int x) {
+ ASSERT_EQ(1, x);
+}
+
+// This function calls a test subroutine, catches the fatal failure it
+// generates, and then returns early.
+void TryTestSubroutine() {
+ // Calls a subrountine that yields a fatal failure.
+ TestEq1(2);
+
+ // Catches the fatal failure and aborts the test.
+ //
+ // The testing::Test:: prefix is necessary when calling
+ // HasFatalFailure() outside of a TEST, TEST_F, or test fixture.
+ if (testing::Test::HasFatalFailure()) return;
+
+ // If we get here, something is wrong.
+ FAIL() << "This should never be reached.";
+}
+
+TEST(PassingTest, PassingTest1) {
+}
+
+TEST(PassingTest, PassingTest2) {
+}
+
+// Tests catching a fatal failure in a subroutine.
+TEST(FatalFailureTest, FatalFailureInSubroutine) {
+ printf("(expecting a failure that x should be 1)\n");
+
+ TryTestSubroutine();
+}
+
+// Tests catching a fatal failure in a nested subroutine.
+TEST(FatalFailureTest, FatalFailureInNestedSubroutine) {
+ printf("(expecting a failure that x should be 1)\n");
+
+ // Calls a subrountine that yields a fatal failure.
+ TryTestSubroutine();
+
+ // Catches the fatal failure and aborts the test.
+ //
+ // When calling HasFatalFailure() inside a TEST, TEST_F, or test
+ // fixture, the testing::Test:: prefix is not needed.
+ if (HasFatalFailure()) return;
+
+ // If we get here, something is wrong.
+ FAIL() << "This should never be reached.";
+}
+
+// Tests HasFatalFailure() after a failed EXPECT check.
+TEST(FatalFailureTest, NonfatalFailureInSubroutine) {
+ printf("(expecting a failure on false)\n");
+ EXPECT_TRUE(false); // Generates a nonfatal failure
+ ASSERT_FALSE(HasFatalFailure()); // This should succeed.
+}
+
+// Tests interleaving user logging and Google Test assertions.
+TEST(LoggingTest, InterleavingLoggingAndAssertions) {
+ static const int a[4] = {
+ 3, 9, 2, 6
+ };
+
+ printf("(expecting 2 failures on (3) >= (a[i]))\n");
+ for (int i = 0; i < static_cast<int>(sizeof(a)/sizeof(*a)); i++) {
+ printf("i == %d\n", i);
+ EXPECT_GE(3, a[i]);
+ }
+}
+
+// Tests the SCOPED_TRACE macro.
+
+// A helper function for testing SCOPED_TRACE.
+void SubWithoutTrace(int n) {
+ EXPECT_EQ(1, n);
+ ASSERT_EQ(2, n);
+}
+
+// Another helper function for testing SCOPED_TRACE.
+void SubWithTrace(int n) {
+ SCOPED_TRACE(testing::Message() << "n = " << n);
+
+ SubWithoutTrace(n);
+}
+
+// Tests that SCOPED_TRACE() obeys lexical scopes.
+TEST(SCOPED_TRACETest, ObeysScopes) {
+ printf("(expected to fail)\n");
+
+ // There should be no trace before SCOPED_TRACE() is invoked.
+ ADD_FAILURE() << "This failure is expected, and shouldn't have a trace.";
+
+ {
+ SCOPED_TRACE("Expected trace");
+ // After SCOPED_TRACE(), a failure in the current scope should contain
+ // the trace.
+ ADD_FAILURE() << "This failure is expected, and should have a trace.";
+ }
+
+ // Once the control leaves the scope of the SCOPED_TRACE(), there
+ // should be no trace again.
+ ADD_FAILURE() << "This failure is expected, and shouldn't have a trace.";
+}
+
+// Tests that SCOPED_TRACE works inside a loop.
+TEST(SCOPED_TRACETest, WorksInLoop) {
+ printf("(expected to fail)\n");
+
+ for (int i = 1; i <= 2; i++) {
+ SCOPED_TRACE(testing::Message() << "i = " << i);
+
+ SubWithoutTrace(i);
+ }
+}
+
+// Tests that SCOPED_TRACE works in a subroutine.
+TEST(SCOPED_TRACETest, WorksInSubroutine) {
+ printf("(expected to fail)\n");
+
+ SubWithTrace(1);
+ SubWithTrace(2);
+}
+
+// Tests that SCOPED_TRACE can be nested.
+TEST(SCOPED_TRACETest, CanBeNested) {
+ printf("(expected to fail)\n");
+
+ SCOPED_TRACE(""); // A trace without a message.
+
+ SubWithTrace(2);
+}
+
+// Tests that multiple SCOPED_TRACEs can be used in the same scope.
+TEST(SCOPED_TRACETest, CanBeRepeated) {
+ printf("(expected to fail)\n");
+
+ SCOPED_TRACE("A");
+ ADD_FAILURE()
+ << "This failure is expected, and should contain trace point A.";
+
+ SCOPED_TRACE("B");
+ ADD_FAILURE()
+ << "This failure is expected, and should contain trace point A and B.";
+
+ {
+ SCOPED_TRACE("C");
+ ADD_FAILURE() << "This failure is expected, and should contain "
+ << "trace point A, B, and C.";
+ }
+
+ SCOPED_TRACE("D");
+ ADD_FAILURE() << "This failure is expected, and should contain "
+ << "trace point A, B, and D.";
+}
+
+#if GTEST_IS_THREADSAFE
+// Tests that SCOPED_TRACE()s can be used concurrently from multiple
+// threads. Namely, an assertion should be affected by
+// SCOPED_TRACE()s in its own thread only.
+
+// Here's the sequence of actions that happen in the test:
+//
+// Thread A (main) | Thread B (spawned)
+// ===============================|================================
+// spawns thread B |
+// -------------------------------+--------------------------------
+// waits for n1 | SCOPED_TRACE("Trace B");
+// | generates failure #1
+// | notifies n1
+// -------------------------------+--------------------------------
+// SCOPED_TRACE("Trace A"); | waits for n2
+// generates failure #2 |
+// notifies n2 |
+// -------------------------------|--------------------------------
+// waits for n3 | generates failure #3
+// | trace B dies
+// | generates failure #4
+// | notifies n3
+// -------------------------------|--------------------------------
+// generates failure #5 | finishes
+// trace A dies |
+// generates failure #6 |
+// -------------------------------|--------------------------------
+// waits for thread B to finish |
+
+struct CheckPoints {
+ Notification n1;
+ Notification n2;
+ Notification n3;
+};
+
+static void ThreadWithScopedTrace(CheckPoints* check_points) {
+ {
+ SCOPED_TRACE("Trace B");
+ ADD_FAILURE()
+ << "Expected failure #1 (in thread B, only trace B alive).";
+ check_points->n1.Notify();
+ check_points->n2.WaitForNotification();
+
+ ADD_FAILURE()
+ << "Expected failure #3 (in thread B, trace A & B both alive).";
+ } // Trace B dies here.
+ ADD_FAILURE()
+ << "Expected failure #4 (in thread B, only trace A alive).";
+ check_points->n3.Notify();
+}
+
+TEST(SCOPED_TRACETest, WorksConcurrently) {
+ printf("(expecting 6 failures)\n");
+
+ CheckPoints check_points;
+ ThreadWithParam<CheckPoints*> thread(&ThreadWithScopedTrace,
+ &check_points,
+ NULL);
+ check_points.n1.WaitForNotification();
+
+ {
+ SCOPED_TRACE("Trace A");
+ ADD_FAILURE()
+ << "Expected failure #2 (in thread A, trace A & B both alive).";
+ check_points.n2.Notify();
+ check_points.n3.WaitForNotification();
+
+ ADD_FAILURE()
+ << "Expected failure #5 (in thread A, only trace A alive).";
+ } // Trace A dies here.
+ ADD_FAILURE()
+ << "Expected failure #6 (in thread A, no trace alive).";
+ thread.Join();
+}
+#endif // GTEST_IS_THREADSAFE
+
+TEST(DisabledTestsWarningTest,
+ DISABLED_AlsoRunDisabledTestsFlagSuppressesWarning) {
+ // This test body is intentionally empty. Its sole purpose is for
+ // verifying that the --gtest_also_run_disabled_tests flag
+ // suppresses the "YOU HAVE 12 DISABLED TESTS" warning at the end of
+ // the test output.
+}
+
+// Tests using assertions outside of TEST and TEST_F.
+//
+// This function creates two failures intentionally.
+void AdHocTest() {
+ printf("The non-test part of the code is expected to have 2 failures.\n\n");
+ EXPECT_TRUE(false);
+ EXPECT_EQ(2, 3);
+}
+
+// Runs all TESTs, all TEST_Fs, and the ad hoc test.
+int RunAllTests() {
+ AdHocTest();
+ return RUN_ALL_TESTS();
+}
+
+// Tests non-fatal failures in the fixture constructor.
+class NonFatalFailureInFixtureConstructorTest : public testing::Test {
+ protected:
+ NonFatalFailureInFixtureConstructorTest() {
+ printf("(expecting 5 failures)\n");
+ ADD_FAILURE() << "Expected failure #1, in the test fixture c'tor.";
+ }
+
+ ~NonFatalFailureInFixtureConstructorTest() {
+ ADD_FAILURE() << "Expected failure #5, in the test fixture d'tor.";
+ }
+
+ virtual void SetUp() {
+ ADD_FAILURE() << "Expected failure #2, in SetUp().";
+ }
+
+ virtual void TearDown() {
+ ADD_FAILURE() << "Expected failure #4, in TearDown.";
+ }
+};
+
+TEST_F(NonFatalFailureInFixtureConstructorTest, FailureInConstructor) {
+ ADD_FAILURE() << "Expected failure #3, in the test body.";
+}
+
+// Tests fatal failures in the fixture constructor.
+class FatalFailureInFixtureConstructorTest : public testing::Test {
+ protected:
+ FatalFailureInFixtureConstructorTest() {
+ printf("(expecting 2 failures)\n");
+ Init();
+ }
+
+ ~FatalFailureInFixtureConstructorTest() {
+ ADD_FAILURE() << "Expected failure #2, in the test fixture d'tor.";
+ }
+
+ virtual void SetUp() {
+ ADD_FAILURE() << "UNEXPECTED failure in SetUp(). "
+ << "We should never get here, as the test fixture c'tor "
+ << "had a fatal failure.";
+ }
+
+ virtual void TearDown() {
+ ADD_FAILURE() << "UNEXPECTED failure in TearDown(). "
+ << "We should never get here, as the test fixture c'tor "
+ << "had a fatal failure.";
+ }
+ private:
+ void Init() {
+ FAIL() << "Expected failure #1, in the test fixture c'tor.";
+ }
+};
+
+TEST_F(FatalFailureInFixtureConstructorTest, FailureInConstructor) {
+ ADD_FAILURE() << "UNEXPECTED failure in the test body. "
+ << "We should never get here, as the test fixture c'tor "
+ << "had a fatal failure.";
+}
+
+// Tests non-fatal failures in SetUp().
+class NonFatalFailureInSetUpTest : public testing::Test {
+ protected:
+ virtual ~NonFatalFailureInSetUpTest() {
+ Deinit();
+ }
+
+ virtual void SetUp() {
+ printf("(expecting 4 failures)\n");
+ ADD_FAILURE() << "Expected failure #1, in SetUp().";
+ }
+
+ virtual void TearDown() {
+ FAIL() << "Expected failure #3, in TearDown().";
+ }
+ private:
+ void Deinit() {
+ FAIL() << "Expected failure #4, in the test fixture d'tor.";
+ }
+};
+
+TEST_F(NonFatalFailureInSetUpTest, FailureInSetUp) {
+ FAIL() << "Expected failure #2, in the test function.";
+}
+
+// Tests fatal failures in SetUp().
+class FatalFailureInSetUpTest : public testing::Test {
+ protected:
+ virtual ~FatalFailureInSetUpTest() {
+ Deinit();
+ }
+
+ virtual void SetUp() {
+ printf("(expecting 3 failures)\n");
+ FAIL() << "Expected failure #1, in SetUp().";
+ }
+
+ virtual void TearDown() {
+ FAIL() << "Expected failure #2, in TearDown().";
+ }
+ private:
+ void Deinit() {
+ FAIL() << "Expected failure #3, in the test fixture d'tor.";
+ }
+};
+
+TEST_F(FatalFailureInSetUpTest, FailureInSetUp) {
+ FAIL() << "UNEXPECTED failure in the test function. "
+ << "We should never get here, as SetUp() failed.";
+}
+
+#if GTEST_OS_WINDOWS
+
+// This group of tests verifies that Google Test handles SEH and C++
+// exceptions correctly.
+
+// A function that throws an SEH exception.
+static void ThrowSEH() {
+ int* p = NULL;
+ *p = 0; // Raises an access violation.
+}
+
+// Tests exceptions thrown in the test fixture constructor.
+class ExceptionInFixtureCtorTest : public testing::Test {
+ protected:
+ ExceptionInFixtureCtorTest() {
+ printf("(expecting a failure on thrown exception "
+ "in the test fixture's constructor)\n");
+
+ ThrowSEH();
+ }
+
+ virtual ~ExceptionInFixtureCtorTest() {
+ Deinit();
+ }
+
+ virtual void SetUp() {
+ FAIL() << "UNEXPECTED failure in SetUp(). "
+ << "We should never get here, as the test fixture c'tor threw.";
+ }
+
+ virtual void TearDown() {
+ FAIL() << "UNEXPECTED failure in TearDown(). "
+ << "We should never get here, as the test fixture c'tor threw.";
+ }
+ private:
+ void Deinit() {
+ FAIL() << "UNEXPECTED failure in the d'tor. "
+ << "We should never get here, as the test fixture c'tor threw.";
+ }
+};
+
+TEST_F(ExceptionInFixtureCtorTest, ExceptionInFixtureCtor) {
+ FAIL() << "UNEXPECTED failure in the test function. "
+ << "We should never get here, as the test fixture c'tor threw.";
+}
+
+// Tests exceptions thrown in SetUp().
+class ExceptionInSetUpTest : public testing::Test {
+ protected:
+ virtual ~ExceptionInSetUpTest() {
+ Deinit();
+ }
+
+ virtual void SetUp() {
+ printf("(expecting 3 failures)\n");
+
+ ThrowSEH();
+ }
+
+ virtual void TearDown() {
+ FAIL() << "Expected failure #2, in TearDown().";
+ }
+ private:
+ void Deinit() {
+ FAIL() << "Expected failure #3, in the test fixture d'tor.";
+ }
+};
+
+TEST_F(ExceptionInSetUpTest, ExceptionInSetUp) {
+ FAIL() << "UNEXPECTED failure in the test function. "
+ << "We should never get here, as SetUp() threw.";
+}
+
+// Tests that TearDown() and the test fixture d'tor are always called,
+// even when the test function throws an exception.
+class ExceptionInTestFunctionTest : public testing::Test {
+ protected:
+ virtual ~ExceptionInTestFunctionTest() {
+ Deinit();
+ }
+
+ virtual void TearDown() {
+ FAIL() << "Expected failure #2, in TearDown().";
+ }
+ private:
+ void Deinit() {
+ FAIL() << "Expected failure #3, in the test fixture d'tor.";
+ }
+};
+
+// Tests that the test fixture d'tor is always called, even when the
+// test function throws an SEH exception.
+TEST_F(ExceptionInTestFunctionTest, SEH) {
+ printf("(expecting 3 failures)\n");
+
+ ThrowSEH();
+}
+
+#if GTEST_HAS_EXCEPTIONS
+
+// Tests that the test fixture d'tor is always called, even when the
+// test function throws a C++ exception. We do this only when
+// GTEST_HAS_EXCEPTIONS is non-zero, i.e. C++ exceptions are enabled.
+TEST_F(ExceptionInTestFunctionTest, CppException) {
+ throw 1;
+}
+
+// Tests exceptions thrown in TearDown().
+class ExceptionInTearDownTest : public testing::Test {
+ protected:
+ virtual ~ExceptionInTearDownTest() {
+ Deinit();
+ }
+
+ virtual void TearDown() {
+ throw 1;
+ }
+ private:
+ void Deinit() {
+ FAIL() << "Expected failure #2, in the test fixture d'tor.";
+ }
+};
+
+TEST_F(ExceptionInTearDownTest, ExceptionInTearDown) {
+ printf("(expecting 2 failures)\n");
+}
+
+#endif // GTEST_HAS_EXCEPTIONS
+
+#endif // GTEST_OS_WINDOWS
+
+#if GTEST_IS_THREADSAFE
+
+// A unary function that may die.
+void DieIf(bool should_die) {
+ GTEST_CHECK_(!should_die) << " - death inside DieIf().";
+}
+
+// Tests running death tests in a multi-threaded context.
+
+// Used for coordination between the main and the spawn thread.
+struct SpawnThreadNotifications {
+ SpawnThreadNotifications() {}
+
+ Notification spawn_thread_started;
+ Notification spawn_thread_ok_to_terminate;
+
+ private:
+ GTEST_DISALLOW_COPY_AND_ASSIGN_(SpawnThreadNotifications);
+};
+
+// The function to be executed in the thread spawn by the
+// MultipleThreads test (below).
+static void ThreadRoutine(SpawnThreadNotifications* notifications) {
+ // Signals the main thread that this thread has started.
+ notifications->spawn_thread_started.Notify();
+
+ // Waits for permission to finish from the main thread.
+ notifications->spawn_thread_ok_to_terminate.WaitForNotification();
+}
+
+// This is a death-test test, but it's not named with a DeathTest
+// suffix. It starts threads which might interfere with later
+// death tests, so it must run after all other death tests.
+class DeathTestAndMultiThreadsTest : public testing::Test {
+ protected:
+ // Starts a thread and waits for it to begin.
+ virtual void SetUp() {
+ thread_.reset(new ThreadWithParam<SpawnThreadNotifications*>(
+ &ThreadRoutine, &notifications_, NULL));
+ notifications_.spawn_thread_started.WaitForNotification();
+ }
+ // Tells the thread to finish, and reaps it.
+ // Depending on the version of the thread library in use,
+ // a manager thread might still be left running that will interfere
+ // with later death tests. This is unfortunate, but this class
+ // cleans up after itself as best it can.
+ virtual void TearDown() {
+ notifications_.spawn_thread_ok_to_terminate.Notify();
+ }
+
+ private:
+ SpawnThreadNotifications notifications_;
+ scoped_ptr<ThreadWithParam<SpawnThreadNotifications*> > thread_;
+};
+
+#endif // GTEST_IS_THREADSAFE
+
+// The MixedUpTestCaseTest test case verifies that Google Test will fail a
+// test if it uses a different fixture class than what other tests in
+// the same test case use. It deliberately contains two fixture
+// classes with the same name but defined in different namespaces.
+
+// The MixedUpTestCaseWithSameTestNameTest test case verifies that
+// when the user defines two tests with the same test case name AND
+// same test name (but in different namespaces), the second test will
+// fail.
+
+namespace foo {
+
+class MixedUpTestCaseTest : public testing::Test {
+};
+
+TEST_F(MixedUpTestCaseTest, FirstTestFromNamespaceFoo) {}
+TEST_F(MixedUpTestCaseTest, SecondTestFromNamespaceFoo) {}
+
+class MixedUpTestCaseWithSameTestNameTest : public testing::Test {
+};
+
+TEST_F(MixedUpTestCaseWithSameTestNameTest,
+ TheSecondTestWithThisNameShouldFail) {}
+
+} // namespace foo
+
+namespace bar {
+
+class MixedUpTestCaseTest : public testing::Test {
+};
+
+// The following two tests are expected to fail. We rely on the
+// golden file to check that Google Test generates the right error message.
+TEST_F(MixedUpTestCaseTest, ThisShouldFail) {}
+TEST_F(MixedUpTestCaseTest, ThisShouldFailToo) {}
+
+class MixedUpTestCaseWithSameTestNameTest : public testing::Test {
+};
+
+// Expected to fail. We rely on the golden file to check that Google Test
+// generates the right error message.
+TEST_F(MixedUpTestCaseWithSameTestNameTest,
+ TheSecondTestWithThisNameShouldFail) {}
+
+} // namespace bar
+
+// The following two test cases verify that Google Test catches the user
+// error of mixing TEST and TEST_F in the same test case. The first
+// test case checks the scenario where TEST_F appears before TEST, and
+// the second one checks where TEST appears before TEST_F.
+
+class TEST_F_before_TEST_in_same_test_case : public testing::Test {
+};
+
+TEST_F(TEST_F_before_TEST_in_same_test_case, DefinedUsingTEST_F) {}
+
+// Expected to fail. We rely on the golden file to check that Google Test
+// generates the right error message.
+TEST(TEST_F_before_TEST_in_same_test_case, DefinedUsingTESTAndShouldFail) {}
+
+class TEST_before_TEST_F_in_same_test_case : public testing::Test {
+};
+
+TEST(TEST_before_TEST_F_in_same_test_case, DefinedUsingTEST) {}
+
+// Expected to fail. We rely on the golden file to check that Google Test
+// generates the right error message.
+TEST_F(TEST_before_TEST_F_in_same_test_case, DefinedUsingTEST_FAndShouldFail) {
+}
+
+// Used for testing EXPECT_NONFATAL_FAILURE() and EXPECT_FATAL_FAILURE().
+int global_integer = 0;
+
+// Tests that EXPECT_NONFATAL_FAILURE() can reference global variables.
+TEST(ExpectNonfatalFailureTest, CanReferenceGlobalVariables) {
+ global_integer = 0;
+ EXPECT_NONFATAL_FAILURE({
+ EXPECT_EQ(1, global_integer) << "Expected non-fatal failure.";
+ }, "Expected non-fatal failure.");
+}
+
+// Tests that EXPECT_NONFATAL_FAILURE() can reference local variables
+// (static or not).
+TEST(ExpectNonfatalFailureTest, CanReferenceLocalVariables) {
+ int m = 0;
+ static int n;
+ n = 1;
+ EXPECT_NONFATAL_FAILURE({
+ EXPECT_EQ(m, n) << "Expected non-fatal failure.";
+ }, "Expected non-fatal failure.");
+}
+
+// Tests that EXPECT_NONFATAL_FAILURE() succeeds when there is exactly
+// one non-fatal failure and no fatal failure.
+TEST(ExpectNonfatalFailureTest, SucceedsWhenThereIsOneNonfatalFailure) {
+ EXPECT_NONFATAL_FAILURE({
+ ADD_FAILURE() << "Expected non-fatal failure.";
+ }, "Expected non-fatal failure.");
+}
+
+// Tests that EXPECT_NONFATAL_FAILURE() fails when there is no
+// non-fatal failure.
+TEST(ExpectNonfatalFailureTest, FailsWhenThereIsNoNonfatalFailure) {
+ printf("(expecting a failure)\n");
+ EXPECT_NONFATAL_FAILURE({
+ }, "");
+}
+
+// Tests that EXPECT_NONFATAL_FAILURE() fails when there are two
+// non-fatal failures.
+TEST(ExpectNonfatalFailureTest, FailsWhenThereAreTwoNonfatalFailures) {
+ printf("(expecting a failure)\n");
+ EXPECT_NONFATAL_FAILURE({
+ ADD_FAILURE() << "Expected non-fatal failure 1.";
+ ADD_FAILURE() << "Expected non-fatal failure 2.";
+ }, "");
+}
+
+// Tests that EXPECT_NONFATAL_FAILURE() fails when there is one fatal
+// failure.
+TEST(ExpectNonfatalFailureTest, FailsWhenThereIsOneFatalFailure) {
+ printf("(expecting a failure)\n");
+ EXPECT_NONFATAL_FAILURE({
+ FAIL() << "Expected fatal failure.";
+ }, "");
+}
+
+// Tests that EXPECT_NONFATAL_FAILURE() fails when the statement being
+// tested returns.
+TEST(ExpectNonfatalFailureTest, FailsWhenStatementReturns) {
+ printf("(expecting a failure)\n");
+ EXPECT_NONFATAL_FAILURE({
+ return;
+ }, "");
+}
+
+#if GTEST_HAS_EXCEPTIONS
+
+// Tests that EXPECT_NONFATAL_FAILURE() fails when the statement being
+// tested throws.
+TEST(ExpectNonfatalFailureTest, FailsWhenStatementThrows) {
+ printf("(expecting a failure)\n");
+ try {
+ EXPECT_NONFATAL_FAILURE({
+ throw 0;
+ }, "");
+ } catch(int) { // NOLINT
+ }
+}
+
+#endif // GTEST_HAS_EXCEPTIONS
+
+// Tests that EXPECT_FATAL_FAILURE() can reference global variables.
+TEST(ExpectFatalFailureTest, CanReferenceGlobalVariables) {
+ global_integer = 0;
+ EXPECT_FATAL_FAILURE({
+ ASSERT_EQ(1, global_integer) << "Expected fatal failure.";
+ }, "Expected fatal failure.");
+}
+
+// Tests that EXPECT_FATAL_FAILURE() can reference local static
+// variables.
+TEST(ExpectFatalFailureTest, CanReferenceLocalStaticVariables) {
+ static int n;
+ n = 1;
+ EXPECT_FATAL_FAILURE({
+ ASSERT_EQ(0, n) << "Expected fatal failure.";
+ }, "Expected fatal failure.");
+}
+
+// Tests that EXPECT_FATAL_FAILURE() succeeds when there is exactly
+// one fatal failure and no non-fatal failure.
+TEST(ExpectFatalFailureTest, SucceedsWhenThereIsOneFatalFailure) {
+ EXPECT_FATAL_FAILURE({
+ FAIL() << "Expected fatal failure.";
+ }, "Expected fatal failure.");
+}
+
+// Tests that EXPECT_FATAL_FAILURE() fails when there is no fatal
+// failure.
+TEST(ExpectFatalFailureTest, FailsWhenThereIsNoFatalFailure) {
+ printf("(expecting a failure)\n");
+ EXPECT_FATAL_FAILURE({
+ }, "");
+}
+
+// A helper for generating a fatal failure.
+void FatalFailure() {
+ FAIL() << "Expected fatal failure.";
+}
+
+// Tests that EXPECT_FATAL_FAILURE() fails when there are two
+// fatal failures.
+TEST(ExpectFatalFailureTest, FailsWhenThereAreTwoFatalFailures) {
+ printf("(expecting a failure)\n");
+ EXPECT_FATAL_FAILURE({
+ FatalFailure();
+ FatalFailure();
+ }, "");
+}
+
+// Tests that EXPECT_FATAL_FAILURE() fails when there is one non-fatal
+// failure.
+TEST(ExpectFatalFailureTest, FailsWhenThereIsOneNonfatalFailure) {
+ printf("(expecting a failure)\n");
+ EXPECT_FATAL_FAILURE({
+ ADD_FAILURE() << "Expected non-fatal failure.";
+ }, "");
+}
+
+// Tests that EXPECT_FATAL_FAILURE() fails when the statement being
+// tested returns.
+TEST(ExpectFatalFailureTest, FailsWhenStatementReturns) {
+ printf("(expecting a failure)\n");
+ EXPECT_FATAL_FAILURE({
+ return;
+ }, "");
+}
+
+#if GTEST_HAS_EXCEPTIONS
+
+// Tests that EXPECT_FATAL_FAILURE() fails when the statement being
+// tested throws.
+TEST(ExpectFatalFailureTest, FailsWhenStatementThrows) {
+ printf("(expecting a failure)\n");
+ try {
+ EXPECT_FATAL_FAILURE({
+ throw 0;
+ }, "");
+ } catch(int) { // NOLINT
+ }
+}
+
+#endif // GTEST_HAS_EXCEPTIONS
+
+// This #ifdef block tests the output of typed tests.
+#if GTEST_HAS_TYPED_TEST
+
+template <typename T>
+class TypedTest : public testing::Test {
+};
+
+TYPED_TEST_CASE(TypedTest, testing::Types<int>);
+
+TYPED_TEST(TypedTest, Success) {
+ EXPECT_EQ(0, TypeParam());
+}
+
+TYPED_TEST(TypedTest, Failure) {
+ EXPECT_EQ(1, TypeParam()) << "Expected failure";
+}
+
+#endif // GTEST_HAS_TYPED_TEST
+
+// This #ifdef block tests the output of type-parameterized tests.
+#if GTEST_HAS_TYPED_TEST_P
+
+template <typename T>
+class TypedTestP : public testing::Test {
+};
+
+TYPED_TEST_CASE_P(TypedTestP);
+
+TYPED_TEST_P(TypedTestP, Success) {
+ EXPECT_EQ(0U, TypeParam());
+}
+
+TYPED_TEST_P(TypedTestP, Failure) {
+ EXPECT_EQ(1U, TypeParam()) << "Expected failure";
+}
+
+REGISTER_TYPED_TEST_CASE_P(TypedTestP, Success, Failure);
+
+typedef testing::Types<unsigned char, unsigned int> UnsignedTypes;
+INSTANTIATE_TYPED_TEST_CASE_P(Unsigned, TypedTestP, UnsignedTypes);
+
+#endif // GTEST_HAS_TYPED_TEST_P
+
+#if GTEST_HAS_DEATH_TEST
+
+// We rely on the golden file to verify that tests whose test case
+// name ends with DeathTest are run first.
+
+TEST(ADeathTest, ShouldRunFirst) {
+}
+
+#if GTEST_HAS_TYPED_TEST
+
+// We rely on the golden file to verify that typed tests whose test
+// case name ends with DeathTest are run first.
+
+template <typename T>
+class ATypedDeathTest : public testing::Test {
+};
+
+typedef testing::Types<int, double> NumericTypes;
+TYPED_TEST_CASE(ATypedDeathTest, NumericTypes);
+
+TYPED_TEST(ATypedDeathTest, ShouldRunFirst) {
+}
+
+#endif // GTEST_HAS_TYPED_TEST
+
+#if GTEST_HAS_TYPED_TEST_P
+
+
+// We rely on the golden file to verify that type-parameterized tests
+// whose test case name ends with DeathTest are run first.
+
+template <typename T>
+class ATypeParamDeathTest : public testing::Test {
+};
+
+TYPED_TEST_CASE_P(ATypeParamDeathTest);
+
+TYPED_TEST_P(ATypeParamDeathTest, ShouldRunFirst) {
+}
+
+REGISTER_TYPED_TEST_CASE_P(ATypeParamDeathTest, ShouldRunFirst);
+
+INSTANTIATE_TYPED_TEST_CASE_P(My, ATypeParamDeathTest, NumericTypes);
+
+#endif // GTEST_HAS_TYPED_TEST_P
+
+#endif // GTEST_HAS_DEATH_TEST
+
+// Tests various failure conditions of
+// EXPECT_{,NON}FATAL_FAILURE{,_ON_ALL_THREADS}.
+class ExpectFailureTest : public testing::Test {
+ public: // Must be public and not protected due to a bug in g++ 3.4.2.
+ enum FailureMode {
+ FATAL_FAILURE,
+ NONFATAL_FAILURE
+ };
+ static void AddFailure(FailureMode failure) {
+ if (failure == FATAL_FAILURE) {
+ FAIL() << "Expected fatal failure.";
+ } else {
+ ADD_FAILURE() << "Expected non-fatal failure.";
+ }
+ }
+};
+
+TEST_F(ExpectFailureTest, ExpectFatalFailure) {
+ // Expected fatal failure, but succeeds.
+ printf("(expecting 1 failure)\n");
+ EXPECT_FATAL_FAILURE(SUCCEED(), "Expected fatal failure.");
+ // Expected fatal failure, but got a non-fatal failure.
+ printf("(expecting 1 failure)\n");
+ EXPECT_FATAL_FAILURE(AddFailure(NONFATAL_FAILURE), "Expected non-fatal "
+ "failure.");
+ // Wrong message.
+ printf("(expecting 1 failure)\n");
+ EXPECT_FATAL_FAILURE(AddFailure(FATAL_FAILURE), "Some other fatal failure "
+ "expected.");
+}
+
+TEST_F(ExpectFailureTest, ExpectNonFatalFailure) {
+ // Expected non-fatal failure, but succeeds.
+ printf("(expecting 1 failure)\n");
+ EXPECT_NONFATAL_FAILURE(SUCCEED(), "Expected non-fatal failure.");
+ // Expected non-fatal failure, but got a fatal failure.
+ printf("(expecting 1 failure)\n");
+ EXPECT_NONFATAL_FAILURE(AddFailure(FATAL_FAILURE), "Expected fatal failure.");
+ // Wrong message.
+ printf("(expecting 1 failure)\n");
+ EXPECT_NONFATAL_FAILURE(AddFailure(NONFATAL_FAILURE), "Some other non-fatal "
+ "failure.");
+}
+
+#if GTEST_IS_THREADSAFE
+
+class ExpectFailureWithThreadsTest : public ExpectFailureTest {
+ protected:
+ static void AddFailureInOtherThread(FailureMode failure) {
+ ThreadWithParam<FailureMode> thread(&AddFailure, failure, NULL);
+ thread.Join();
+ }
+};
+
+TEST_F(ExpectFailureWithThreadsTest, ExpectFatalFailure) {
+ // We only intercept the current thread.
+ printf("(expecting 2 failures)\n");
+ EXPECT_FATAL_FAILURE(AddFailureInOtherThread(FATAL_FAILURE),
+ "Expected fatal failure.");
+}
+
+TEST_F(ExpectFailureWithThreadsTest, ExpectNonFatalFailure) {
+ // We only intercept the current thread.
+ printf("(expecting 2 failures)\n");
+ EXPECT_NONFATAL_FAILURE(AddFailureInOtherThread(NONFATAL_FAILURE),
+ "Expected non-fatal failure.");
+}
+
+typedef ExpectFailureWithThreadsTest ScopedFakeTestPartResultReporterTest;
+
+// Tests that the ScopedFakeTestPartResultReporter only catches failures from
+// the current thread if it is instantiated with INTERCEPT_ONLY_CURRENT_THREAD.
+TEST_F(ScopedFakeTestPartResultReporterTest, InterceptOnlyCurrentThread) {
+ printf("(expecting 2 failures)\n");
+ TestPartResultArray results;
+ {
+ ScopedFakeTestPartResultReporter reporter(
+ ScopedFakeTestPartResultReporter::INTERCEPT_ONLY_CURRENT_THREAD,
+ &results);
+ AddFailureInOtherThread(FATAL_FAILURE);
+ AddFailureInOtherThread(NONFATAL_FAILURE);
+ }
+ // The two failures should not have been intercepted.
+ EXPECT_EQ(0, results.size()) << "This shouldn't fail.";
+}
+
+#endif // GTEST_IS_THREADSAFE
+
+TEST_F(ExpectFailureTest, ExpectFatalFailureOnAllThreads) {
+ // Expected fatal failure, but succeeds.
+ printf("(expecting 1 failure)\n");
+ EXPECT_FATAL_FAILURE_ON_ALL_THREADS(SUCCEED(), "Expected fatal failure.");
+ // Expected fatal failure, but got a non-fatal failure.
+ printf("(expecting 1 failure)\n");
+ EXPECT_FATAL_FAILURE_ON_ALL_THREADS(AddFailure(NONFATAL_FAILURE),
+ "Expected non-fatal failure.");
+ // Wrong message.
+ printf("(expecting 1 failure)\n");
+ EXPECT_FATAL_FAILURE_ON_ALL_THREADS(AddFailure(FATAL_FAILURE),
+ "Some other fatal failure expected.");
+}
+
+TEST_F(ExpectFailureTest, ExpectNonFatalFailureOnAllThreads) {
+ // Expected non-fatal failure, but succeeds.
+ printf("(expecting 1 failure)\n");
+ EXPECT_NONFATAL_FAILURE_ON_ALL_THREADS(SUCCEED(), "Expected non-fatal "
+ "failure.");
+ // Expected non-fatal failure, but got a fatal failure.
+ printf("(expecting 1 failure)\n");
+ EXPECT_NONFATAL_FAILURE_ON_ALL_THREADS(AddFailure(FATAL_FAILURE),
+ "Expected fatal failure.");
+ // Wrong message.
+ printf("(expecting 1 failure)\n");
+ EXPECT_NONFATAL_FAILURE_ON_ALL_THREADS(AddFailure(NONFATAL_FAILURE),
+ "Some other non-fatal failure.");
+}
+
+
+// Two test environments for testing testing::AddGlobalTestEnvironment().
+
+class FooEnvironment : public testing::Environment {
+ public:
+ virtual void SetUp() {
+ printf("%s", "FooEnvironment::SetUp() called.\n");
+ }
+
+ virtual void TearDown() {
+ printf("%s", "FooEnvironment::TearDown() called.\n");
+ FAIL() << "Expected fatal failure.";
+ }
+};
+
+class BarEnvironment : public testing::Environment {
+ public:
+ virtual void SetUp() {
+ printf("%s", "BarEnvironment::SetUp() called.\n");
+ }
+
+ virtual void TearDown() {
+ printf("%s", "BarEnvironment::TearDown() called.\n");
+ ADD_FAILURE() << "Expected non-fatal failure.";
+ }
+};
+
+GTEST_DEFINE_bool_(internal_skip_environment_and_ad_hoc_tests, false,
+ "This flag causes the program to skip test environment "
+ "tests and ad hoc tests.");
+
+// The main function.
+//
+// The idea is to use Google Test to run all the tests we have defined (some
+// of them are intended to fail), and then compare the test results
+// with the "golden" file.
+int main(int argc, char **argv) {
+ testing::GTEST_FLAG(print_time) = false;
+
+ // We just run the tests, knowing some of them are intended to fail.
+ // We will use a separate Python script to compare the output of
+ // this program with the golden file.
+
+ // It's hard to test InitGoogleTest() directly, as it has many
+ // global side effects. The following line serves as a sanity test
+ // for it.
+ testing::InitGoogleTest(&argc, argv);
+ if (argc >= 2 &&
+ String(argv[1]) == "--gtest_internal_skip_environment_and_ad_hoc_tests")
+ GTEST_FLAG(internal_skip_environment_and_ad_hoc_tests) = true;
+
+#if GTEST_HAS_DEATH_TEST
+ if (testing::internal::GTEST_FLAG(internal_run_death_test) != "") {
+ // Skip the usual output capturing if we're running as the child
+ // process of an threadsafe-style death test.
+#if GTEST_OS_WINDOWS
+ posix::FReopen("nul:", "w", stdout);
+#else
+ posix::FReopen("/dev/null", "w", stdout);
+#endif // GTEST_OS_WINDOWS
+ return RUN_ALL_TESTS();
+ }
+#endif // GTEST_HAS_DEATH_TEST
+
+ if (GTEST_FLAG(internal_skip_environment_and_ad_hoc_tests))
+ return RUN_ALL_TESTS();
+
+ // Registers two global test environments.
+ // The golden file verifies that they are set up in the order they
+ // are registered, and torn down in the reverse order.
+ testing::AddGlobalTestEnvironment(new FooEnvironment);
+ testing::AddGlobalTestEnvironment(new BarEnvironment);
+
+ return RunAllTests();
+}
diff --git a/Source/ThirdParty/gtest/test/gtest_output_test_golden_lin.txt b/Source/ThirdParty/gtest/test/gtest_output_test_golden_lin.txt
new file mode 100644
index 000000000..ec60437ac
--- /dev/null
+++ b/Source/ThirdParty/gtest/test/gtest_output_test_golden_lin.txt
@@ -0,0 +1,696 @@
+The non-test part of the code is expected to have 2 failures.
+
+gtest_output_test_.cc:#: Failure
+Value of: false
+ Actual: false
+Expected: true
+gtest_output_test_.cc:#: Failure
+Value of: 3
+Expected: 2
+[==========] Running 60 tests from 25 test cases.
+[----------] Global test environment set-up.
+FooEnvironment::SetUp() called.
+BarEnvironment::SetUp() called.
+[----------] 1 test from ADeathTest
+[ RUN ] ADeathTest.ShouldRunFirst
+[ OK ] ADeathTest.ShouldRunFirst
+[----------] 1 test from ATypedDeathTest/0, where TypeParam = int
+[ RUN ] ATypedDeathTest/0.ShouldRunFirst
+[ OK ] ATypedDeathTest/0.ShouldRunFirst
+[----------] 1 test from ATypedDeathTest/1, where TypeParam = double
+[ RUN ] ATypedDeathTest/1.ShouldRunFirst
+[ OK ] ATypedDeathTest/1.ShouldRunFirst
+[----------] 1 test from My/ATypeParamDeathTest/0, where TypeParam = int
+[ RUN ] My/ATypeParamDeathTest/0.ShouldRunFirst
+[ OK ] My/ATypeParamDeathTest/0.ShouldRunFirst
+[----------] 1 test from My/ATypeParamDeathTest/1, where TypeParam = double
+[ RUN ] My/ATypeParamDeathTest/1.ShouldRunFirst
+[ OK ] My/ATypeParamDeathTest/1.ShouldRunFirst
+[----------] 2 tests from PassingTest
+[ RUN ] PassingTest.PassingTest1
+[ OK ] PassingTest.PassingTest1
+[ RUN ] PassingTest.PassingTest2
+[ OK ] PassingTest.PassingTest2
+[----------] 3 tests from FatalFailureTest
+[ RUN ] FatalFailureTest.FatalFailureInSubroutine
+(expecting a failure that x should be 1)
+gtest_output_test_.cc:#: Failure
+Value of: x
+ Actual: 2
+Expected: 1
+[ FAILED ] FatalFailureTest.FatalFailureInSubroutine
+[ RUN ] FatalFailureTest.FatalFailureInNestedSubroutine
+(expecting a failure that x should be 1)
+gtest_output_test_.cc:#: Failure
+Value of: x
+ Actual: 2
+Expected: 1
+[ FAILED ] FatalFailureTest.FatalFailureInNestedSubroutine
+[ RUN ] FatalFailureTest.NonfatalFailureInSubroutine
+(expecting a failure on false)
+gtest_output_test_.cc:#: Failure
+Value of: false
+ Actual: false
+Expected: true
+[ FAILED ] FatalFailureTest.NonfatalFailureInSubroutine
+[----------] 1 test from LoggingTest
+[ RUN ] LoggingTest.InterleavingLoggingAndAssertions
+(expecting 2 failures on (3) >= (a[i]))
+i == 0
+i == 1
+gtest_output_test_.cc:#: Failure
+Expected: (3) >= (a[i]), actual: 3 vs 9
+i == 2
+i == 3
+gtest_output_test_.cc:#: Failure
+Expected: (3) >= (a[i]), actual: 3 vs 6
+[ FAILED ] LoggingTest.InterleavingLoggingAndAssertions
+[----------] 6 tests from SCOPED_TRACETest
+[ RUN ] SCOPED_TRACETest.ObeysScopes
+(expected to fail)
+gtest_output_test_.cc:#: Failure
+Failed
+This failure is expected, and shouldn't have a trace.
+gtest_output_test_.cc:#: Failure
+Failed
+This failure is expected, and should have a trace.
+Google Test trace:
+gtest_output_test_.cc:#: Expected trace
+gtest_output_test_.cc:#: Failure
+Failed
+This failure is expected, and shouldn't have a trace.
+[ FAILED ] SCOPED_TRACETest.ObeysScopes
+[ RUN ] SCOPED_TRACETest.WorksInLoop
+(expected to fail)
+gtest_output_test_.cc:#: Failure
+Value of: n
+ Actual: 1
+Expected: 2
+Google Test trace:
+gtest_output_test_.cc:#: i = 1
+gtest_output_test_.cc:#: Failure
+Value of: n
+ Actual: 2
+Expected: 1
+Google Test trace:
+gtest_output_test_.cc:#: i = 2
+[ FAILED ] SCOPED_TRACETest.WorksInLoop
+[ RUN ] SCOPED_TRACETest.WorksInSubroutine
+(expected to fail)
+gtest_output_test_.cc:#: Failure
+Value of: n
+ Actual: 1
+Expected: 2
+Google Test trace:
+gtest_output_test_.cc:#: n = 1
+gtest_output_test_.cc:#: Failure
+Value of: n
+ Actual: 2
+Expected: 1
+Google Test trace:
+gtest_output_test_.cc:#: n = 2
+[ FAILED ] SCOPED_TRACETest.WorksInSubroutine
+[ RUN ] SCOPED_TRACETest.CanBeNested
+(expected to fail)
+gtest_output_test_.cc:#: Failure
+Value of: n
+ Actual: 2
+Expected: 1
+Google Test trace:
+gtest_output_test_.cc:#: n = 2
+gtest_output_test_.cc:#:
+[ FAILED ] SCOPED_TRACETest.CanBeNested
+[ RUN ] SCOPED_TRACETest.CanBeRepeated
+(expected to fail)
+gtest_output_test_.cc:#: Failure
+Failed
+This failure is expected, and should contain trace point A.
+Google Test trace:
+gtest_output_test_.cc:#: A
+gtest_output_test_.cc:#: Failure
+Failed
+This failure is expected, and should contain trace point A and B.
+Google Test trace:
+gtest_output_test_.cc:#: B
+gtest_output_test_.cc:#: A
+gtest_output_test_.cc:#: Failure
+Failed
+This failure is expected, and should contain trace point A, B, and C.
+Google Test trace:
+gtest_output_test_.cc:#: C
+gtest_output_test_.cc:#: B
+gtest_output_test_.cc:#: A
+gtest_output_test_.cc:#: Failure
+Failed
+This failure is expected, and should contain trace point A, B, and D.
+Google Test trace:
+gtest_output_test_.cc:#: D
+gtest_output_test_.cc:#: B
+gtest_output_test_.cc:#: A
+[ FAILED ] SCOPED_TRACETest.CanBeRepeated
+[ RUN ] SCOPED_TRACETest.WorksConcurrently
+(expecting 6 failures)
+gtest_output_test_.cc:#: Failure
+Failed
+Expected failure #1 (in thread B, only trace B alive).
+Google Test trace:
+gtest_output_test_.cc:#: Trace B
+gtest_output_test_.cc:#: Failure
+Failed
+Expected failure #2 (in thread A, trace A & B both alive).
+Google Test trace:
+gtest_output_test_.cc:#: Trace A
+gtest_output_test_.cc:#: Failure
+Failed
+Expected failure #3 (in thread B, trace A & B both alive).
+Google Test trace:
+gtest_output_test_.cc:#: Trace B
+gtest_output_test_.cc:#: Failure
+Failed
+Expected failure #4 (in thread B, only trace A alive).
+gtest_output_test_.cc:#: Failure
+Failed
+Expected failure #5 (in thread A, only trace A alive).
+Google Test trace:
+gtest_output_test_.cc:#: Trace A
+gtest_output_test_.cc:#: Failure
+Failed
+Expected failure #6 (in thread A, no trace alive).
+[ FAILED ] SCOPED_TRACETest.WorksConcurrently
+[----------] 1 test from NonFatalFailureInFixtureConstructorTest
+[ RUN ] NonFatalFailureInFixtureConstructorTest.FailureInConstructor
+(expecting 5 failures)
+gtest_output_test_.cc:#: Failure
+Failed
+Expected failure #1, in the test fixture c'tor.
+gtest_output_test_.cc:#: Failure
+Failed
+Expected failure #2, in SetUp().
+gtest_output_test_.cc:#: Failure
+Failed
+Expected failure #3, in the test body.
+gtest_output_test_.cc:#: Failure
+Failed
+Expected failure #4, in TearDown.
+gtest_output_test_.cc:#: Failure
+Failed
+Expected failure #5, in the test fixture d'tor.
+[ FAILED ] NonFatalFailureInFixtureConstructorTest.FailureInConstructor
+[----------] 1 test from FatalFailureInFixtureConstructorTest
+[ RUN ] FatalFailureInFixtureConstructorTest.FailureInConstructor
+(expecting 2 failures)
+gtest_output_test_.cc:#: Failure
+Failed
+Expected failure #1, in the test fixture c'tor.
+gtest_output_test_.cc:#: Failure
+Failed
+Expected failure #2, in the test fixture d'tor.
+[ FAILED ] FatalFailureInFixtureConstructorTest.FailureInConstructor
+[----------] 1 test from NonFatalFailureInSetUpTest
+[ RUN ] NonFatalFailureInSetUpTest.FailureInSetUp
+(expecting 4 failures)
+gtest_output_test_.cc:#: Failure
+Failed
+Expected failure #1, in SetUp().
+gtest_output_test_.cc:#: Failure
+Failed
+Expected failure #2, in the test function.
+gtest_output_test_.cc:#: Failure
+Failed
+Expected failure #3, in TearDown().
+gtest_output_test_.cc:#: Failure
+Failed
+Expected failure #4, in the test fixture d'tor.
+[ FAILED ] NonFatalFailureInSetUpTest.FailureInSetUp
+[----------] 1 test from FatalFailureInSetUpTest
+[ RUN ] FatalFailureInSetUpTest.FailureInSetUp
+(expecting 3 failures)
+gtest_output_test_.cc:#: Failure
+Failed
+Expected failure #1, in SetUp().
+gtest_output_test_.cc:#: Failure
+Failed
+Expected failure #2, in TearDown().
+gtest_output_test_.cc:#: Failure
+Failed
+Expected failure #3, in the test fixture d'tor.
+[ FAILED ] FatalFailureInSetUpTest.FailureInSetUp
+[----------] 4 tests from MixedUpTestCaseTest
+[ RUN ] MixedUpTestCaseTest.FirstTestFromNamespaceFoo
+[ OK ] MixedUpTestCaseTest.FirstTestFromNamespaceFoo
+[ RUN ] MixedUpTestCaseTest.SecondTestFromNamespaceFoo
+[ OK ] MixedUpTestCaseTest.SecondTestFromNamespaceFoo
+[ RUN ] MixedUpTestCaseTest.ThisShouldFail
+gtest.cc:#: Failure
+Failed
+All tests in the same test case must use the same test fixture
+class. However, in test case MixedUpTestCaseTest,
+you defined test FirstTestFromNamespaceFoo and test ThisShouldFail
+using two different test fixture classes. This can happen if
+the two classes are from different namespaces or translation
+units and have the same name. You should probably rename one
+of the classes to put the tests into different test cases.
+[ FAILED ] MixedUpTestCaseTest.ThisShouldFail
+[ RUN ] MixedUpTestCaseTest.ThisShouldFailToo
+gtest.cc:#: Failure
+Failed
+All tests in the same test case must use the same test fixture
+class. However, in test case MixedUpTestCaseTest,
+you defined test FirstTestFromNamespaceFoo and test ThisShouldFailToo
+using two different test fixture classes. This can happen if
+the two classes are from different namespaces or translation
+units and have the same name. You should probably rename one
+of the classes to put the tests into different test cases.
+[ FAILED ] MixedUpTestCaseTest.ThisShouldFailToo
+[----------] 2 tests from MixedUpTestCaseWithSameTestNameTest
+[ RUN ] MixedUpTestCaseWithSameTestNameTest.TheSecondTestWithThisNameShouldFail
+[ OK ] MixedUpTestCaseWithSameTestNameTest.TheSecondTestWithThisNameShouldFail
+[ RUN ] MixedUpTestCaseWithSameTestNameTest.TheSecondTestWithThisNameShouldFail
+gtest.cc:#: Failure
+Failed
+All tests in the same test case must use the same test fixture
+class. However, in test case MixedUpTestCaseWithSameTestNameTest,
+you defined test TheSecondTestWithThisNameShouldFail and test TheSecondTestWithThisNameShouldFail
+using two different test fixture classes. This can happen if
+the two classes are from different namespaces or translation
+units and have the same name. You should probably rename one
+of the classes to put the tests into different test cases.
+[ FAILED ] MixedUpTestCaseWithSameTestNameTest.TheSecondTestWithThisNameShouldFail
+[----------] 2 tests from TEST_F_before_TEST_in_same_test_case
+[ RUN ] TEST_F_before_TEST_in_same_test_case.DefinedUsingTEST_F
+[ OK ] TEST_F_before_TEST_in_same_test_case.DefinedUsingTEST_F
+[ RUN ] TEST_F_before_TEST_in_same_test_case.DefinedUsingTESTAndShouldFail
+gtest.cc:#: Failure
+Failed
+All tests in the same test case must use the same test fixture
+class, so mixing TEST_F and TEST in the same test case is
+illegal. In test case TEST_F_before_TEST_in_same_test_case,
+test DefinedUsingTEST_F is defined using TEST_F but
+test DefinedUsingTESTAndShouldFail is defined using TEST. You probably
+want to change the TEST to TEST_F or move it to another test
+case.
+[ FAILED ] TEST_F_before_TEST_in_same_test_case.DefinedUsingTESTAndShouldFail
+[----------] 2 tests from TEST_before_TEST_F_in_same_test_case
+[ RUN ] TEST_before_TEST_F_in_same_test_case.DefinedUsingTEST
+[ OK ] TEST_before_TEST_F_in_same_test_case.DefinedUsingTEST
+[ RUN ] TEST_before_TEST_F_in_same_test_case.DefinedUsingTEST_FAndShouldFail
+gtest.cc:#: Failure
+Failed
+All tests in the same test case must use the same test fixture
+class, so mixing TEST_F and TEST in the same test case is
+illegal. In test case TEST_before_TEST_F_in_same_test_case,
+test DefinedUsingTEST_FAndShouldFail is defined using TEST_F but
+test DefinedUsingTEST is defined using TEST. You probably
+want to change the TEST to TEST_F or move it to another test
+case.
+[ FAILED ] TEST_before_TEST_F_in_same_test_case.DefinedUsingTEST_FAndShouldFail
+[----------] 8 tests from ExpectNonfatalFailureTest
+[ RUN ] ExpectNonfatalFailureTest.CanReferenceGlobalVariables
+[ OK ] ExpectNonfatalFailureTest.CanReferenceGlobalVariables
+[ RUN ] ExpectNonfatalFailureTest.CanReferenceLocalVariables
+[ OK ] ExpectNonfatalFailureTest.CanReferenceLocalVariables
+[ RUN ] ExpectNonfatalFailureTest.SucceedsWhenThereIsOneNonfatalFailure
+[ OK ] ExpectNonfatalFailureTest.SucceedsWhenThereIsOneNonfatalFailure
+[ RUN ] ExpectNonfatalFailureTest.FailsWhenThereIsNoNonfatalFailure
+(expecting a failure)
+gtest.cc:#: Failure
+Expected: 1 non-fatal failure
+ Actual: 0 failures
+[ FAILED ] ExpectNonfatalFailureTest.FailsWhenThereIsNoNonfatalFailure
+[ RUN ] ExpectNonfatalFailureTest.FailsWhenThereAreTwoNonfatalFailures
+(expecting a failure)
+gtest.cc:#: Failure
+Expected: 1 non-fatal failure
+ Actual: 2 failures
+gtest_output_test_.cc:#: Non-fatal failure:
+Failed
+Expected non-fatal failure 1.
+
+gtest_output_test_.cc:#: Non-fatal failure:
+Failed
+Expected non-fatal failure 2.
+
+[ FAILED ] ExpectNonfatalFailureTest.FailsWhenThereAreTwoNonfatalFailures
+[ RUN ] ExpectNonfatalFailureTest.FailsWhenThereIsOneFatalFailure
+(expecting a failure)
+gtest.cc:#: Failure
+Expected: 1 non-fatal failure
+ Actual:
+gtest_output_test_.cc:#: Fatal failure:
+Failed
+Expected fatal failure.
+
+[ FAILED ] ExpectNonfatalFailureTest.FailsWhenThereIsOneFatalFailure
+[ RUN ] ExpectNonfatalFailureTest.FailsWhenStatementReturns
+(expecting a failure)
+gtest.cc:#: Failure
+Expected: 1 non-fatal failure
+ Actual: 0 failures
+[ FAILED ] ExpectNonfatalFailureTest.FailsWhenStatementReturns
+[ RUN ] ExpectNonfatalFailureTest.FailsWhenStatementThrows
+(expecting a failure)
+gtest.cc:#: Failure
+Expected: 1 non-fatal failure
+ Actual: 0 failures
+[ FAILED ] ExpectNonfatalFailureTest.FailsWhenStatementThrows
+[----------] 8 tests from ExpectFatalFailureTest
+[ RUN ] ExpectFatalFailureTest.CanReferenceGlobalVariables
+[ OK ] ExpectFatalFailureTest.CanReferenceGlobalVariables
+[ RUN ] ExpectFatalFailureTest.CanReferenceLocalStaticVariables
+[ OK ] ExpectFatalFailureTest.CanReferenceLocalStaticVariables
+[ RUN ] ExpectFatalFailureTest.SucceedsWhenThereIsOneFatalFailure
+[ OK ] ExpectFatalFailureTest.SucceedsWhenThereIsOneFatalFailure
+[ RUN ] ExpectFatalFailureTest.FailsWhenThereIsNoFatalFailure
+(expecting a failure)
+gtest.cc:#: Failure
+Expected: 1 fatal failure
+ Actual: 0 failures
+[ FAILED ] ExpectFatalFailureTest.FailsWhenThereIsNoFatalFailure
+[ RUN ] ExpectFatalFailureTest.FailsWhenThereAreTwoFatalFailures
+(expecting a failure)
+gtest.cc:#: Failure
+Expected: 1 fatal failure
+ Actual: 2 failures
+gtest_output_test_.cc:#: Fatal failure:
+Failed
+Expected fatal failure.
+
+gtest_output_test_.cc:#: Fatal failure:
+Failed
+Expected fatal failure.
+
+[ FAILED ] ExpectFatalFailureTest.FailsWhenThereAreTwoFatalFailures
+[ RUN ] ExpectFatalFailureTest.FailsWhenThereIsOneNonfatalFailure
+(expecting a failure)
+gtest.cc:#: Failure
+Expected: 1 fatal failure
+ Actual:
+gtest_output_test_.cc:#: Non-fatal failure:
+Failed
+Expected non-fatal failure.
+
+[ FAILED ] ExpectFatalFailureTest.FailsWhenThereIsOneNonfatalFailure
+[ RUN ] ExpectFatalFailureTest.FailsWhenStatementReturns
+(expecting a failure)
+gtest.cc:#: Failure
+Expected: 1 fatal failure
+ Actual: 0 failures
+[ FAILED ] ExpectFatalFailureTest.FailsWhenStatementReturns
+[ RUN ] ExpectFatalFailureTest.FailsWhenStatementThrows
+(expecting a failure)
+gtest.cc:#: Failure
+Expected: 1 fatal failure
+ Actual: 0 failures
+[ FAILED ] ExpectFatalFailureTest.FailsWhenStatementThrows
+[----------] 2 tests from TypedTest/0, where TypeParam = int
+[ RUN ] TypedTest/0.Success
+[ OK ] TypedTest/0.Success
+[ RUN ] TypedTest/0.Failure
+gtest_output_test_.cc:#: Failure
+Value of: TypeParam()
+ Actual: 0
+Expected: 1
+Expected failure
+[ FAILED ] TypedTest/0.Failure
+[----------] 2 tests from Unsigned/TypedTestP/0, where TypeParam = unsigned char
+[ RUN ] Unsigned/TypedTestP/0.Success
+[ OK ] Unsigned/TypedTestP/0.Success
+[ RUN ] Unsigned/TypedTestP/0.Failure
+gtest_output_test_.cc:#: Failure
+Value of: TypeParam()
+ Actual: \0
+Expected: 1U
+Which is: 1
+Expected failure
+[ FAILED ] Unsigned/TypedTestP/0.Failure
+[----------] 2 tests from Unsigned/TypedTestP/1, where TypeParam = unsigned int
+[ RUN ] Unsigned/TypedTestP/1.Success
+[ OK ] Unsigned/TypedTestP/1.Success
+[ RUN ] Unsigned/TypedTestP/1.Failure
+gtest_output_test_.cc:#: Failure
+Value of: TypeParam()
+ Actual: 0
+Expected: 1U
+Which is: 1
+Expected failure
+[ FAILED ] Unsigned/TypedTestP/1.Failure
+[----------] 4 tests from ExpectFailureTest
+[ RUN ] ExpectFailureTest.ExpectFatalFailure
+(expecting 1 failure)
+gtest.cc:#: Failure
+Expected: 1 fatal failure
+ Actual:
+gtest_output_test_.cc:#: Success:
+Succeeded
+
+(expecting 1 failure)
+gtest.cc:#: Failure
+Expected: 1 fatal failure
+ Actual:
+gtest_output_test_.cc:#: Non-fatal failure:
+Failed
+Expected non-fatal failure.
+
+(expecting 1 failure)
+gtest.cc:#: Failure
+Expected: 1 fatal failure containing "Some other fatal failure expected."
+ Actual:
+gtest_output_test_.cc:#: Fatal failure:
+Failed
+Expected fatal failure.
+
+[ FAILED ] ExpectFailureTest.ExpectFatalFailure
+[ RUN ] ExpectFailureTest.ExpectNonFatalFailure
+(expecting 1 failure)
+gtest.cc:#: Failure
+Expected: 1 non-fatal failure
+ Actual:
+gtest_output_test_.cc:#: Success:
+Succeeded
+
+(expecting 1 failure)
+gtest.cc:#: Failure
+Expected: 1 non-fatal failure
+ Actual:
+gtest_output_test_.cc:#: Fatal failure:
+Failed
+Expected fatal failure.
+
+(expecting 1 failure)
+gtest.cc:#: Failure
+Expected: 1 non-fatal failure containing "Some other non-fatal failure."
+ Actual:
+gtest_output_test_.cc:#: Non-fatal failure:
+Failed
+Expected non-fatal failure.
+
+[ FAILED ] ExpectFailureTest.ExpectNonFatalFailure
+[ RUN ] ExpectFailureTest.ExpectFatalFailureOnAllThreads
+(expecting 1 failure)
+gtest.cc:#: Failure
+Expected: 1 fatal failure
+ Actual:
+gtest_output_test_.cc:#: Success:
+Succeeded
+
+(expecting 1 failure)
+gtest.cc:#: Failure
+Expected: 1 fatal failure
+ Actual:
+gtest_output_test_.cc:#: Non-fatal failure:
+Failed
+Expected non-fatal failure.
+
+(expecting 1 failure)
+gtest.cc:#: Failure
+Expected: 1 fatal failure containing "Some other fatal failure expected."
+ Actual:
+gtest_output_test_.cc:#: Fatal failure:
+Failed
+Expected fatal failure.
+
+[ FAILED ] ExpectFailureTest.ExpectFatalFailureOnAllThreads
+[ RUN ] ExpectFailureTest.ExpectNonFatalFailureOnAllThreads
+(expecting 1 failure)
+gtest.cc:#: Failure
+Expected: 1 non-fatal failure
+ Actual:
+gtest_output_test_.cc:#: Success:
+Succeeded
+
+(expecting 1 failure)
+gtest.cc:#: Failure
+Expected: 1 non-fatal failure
+ Actual:
+gtest_output_test_.cc:#: Fatal failure:
+Failed
+Expected fatal failure.
+
+(expecting 1 failure)
+gtest.cc:#: Failure
+Expected: 1 non-fatal failure containing "Some other non-fatal failure."
+ Actual:
+gtest_output_test_.cc:#: Non-fatal failure:
+Failed
+Expected non-fatal failure.
+
+[ FAILED ] ExpectFailureTest.ExpectNonFatalFailureOnAllThreads
+[----------] 2 tests from ExpectFailureWithThreadsTest
+[ RUN ] ExpectFailureWithThreadsTest.ExpectFatalFailure
+(expecting 2 failures)
+gtest_output_test_.cc:#: Failure
+Failed
+Expected fatal failure.
+gtest.cc:#: Failure
+Expected: 1 fatal failure
+ Actual: 0 failures
+[ FAILED ] ExpectFailureWithThreadsTest.ExpectFatalFailure
+[ RUN ] ExpectFailureWithThreadsTest.ExpectNonFatalFailure
+(expecting 2 failures)
+gtest_output_test_.cc:#: Failure
+Failed
+Expected non-fatal failure.
+gtest.cc:#: Failure
+Expected: 1 non-fatal failure
+ Actual: 0 failures
+[ FAILED ] ExpectFailureWithThreadsTest.ExpectNonFatalFailure
+[----------] 1 test from ScopedFakeTestPartResultReporterTest
+[ RUN ] ScopedFakeTestPartResultReporterTest.InterceptOnlyCurrentThread
+(expecting 2 failures)
+gtest_output_test_.cc:#: Failure
+Failed
+Expected fatal failure.
+gtest_output_test_.cc:#: Failure
+Failed
+Expected non-fatal failure.
+[ FAILED ] ScopedFakeTestPartResultReporterTest.InterceptOnlyCurrentThread
+[----------] Global test environment tear-down
+BarEnvironment::TearDown() called.
+gtest_output_test_.cc:#: Failure
+Failed
+Expected non-fatal failure.
+FooEnvironment::TearDown() called.
+gtest_output_test_.cc:#: Failure
+Failed
+Expected fatal failure.
+[==========] 60 tests from 25 test cases ran.
+[ PASSED ] 21 tests.
+[ FAILED ] 39 tests, listed below:
+[ FAILED ] FatalFailureTest.FatalFailureInSubroutine
+[ FAILED ] FatalFailureTest.FatalFailureInNestedSubroutine
+[ FAILED ] FatalFailureTest.NonfatalFailureInSubroutine
+[ FAILED ] LoggingTest.InterleavingLoggingAndAssertions
+[ FAILED ] SCOPED_TRACETest.ObeysScopes
+[ FAILED ] SCOPED_TRACETest.WorksInLoop
+[ FAILED ] SCOPED_TRACETest.WorksInSubroutine
+[ FAILED ] SCOPED_TRACETest.CanBeNested
+[ FAILED ] SCOPED_TRACETest.CanBeRepeated
+[ FAILED ] SCOPED_TRACETest.WorksConcurrently
+[ FAILED ] NonFatalFailureInFixtureConstructorTest.FailureInConstructor
+[ FAILED ] FatalFailureInFixtureConstructorTest.FailureInConstructor
+[ FAILED ] NonFatalFailureInSetUpTest.FailureInSetUp
+[ FAILED ] FatalFailureInSetUpTest.FailureInSetUp
+[ FAILED ] MixedUpTestCaseTest.ThisShouldFail
+[ FAILED ] MixedUpTestCaseTest.ThisShouldFailToo
+[ FAILED ] MixedUpTestCaseWithSameTestNameTest.TheSecondTestWithThisNameShouldFail
+[ FAILED ] TEST_F_before_TEST_in_same_test_case.DefinedUsingTESTAndShouldFail
+[ FAILED ] TEST_before_TEST_F_in_same_test_case.DefinedUsingTEST_FAndShouldFail
+[ FAILED ] ExpectNonfatalFailureTest.FailsWhenThereIsNoNonfatalFailure
+[ FAILED ] ExpectNonfatalFailureTest.FailsWhenThereAreTwoNonfatalFailures
+[ FAILED ] ExpectNonfatalFailureTest.FailsWhenThereIsOneFatalFailure
+[ FAILED ] ExpectNonfatalFailureTest.FailsWhenStatementReturns
+[ FAILED ] ExpectNonfatalFailureTest.FailsWhenStatementThrows
+[ FAILED ] ExpectFatalFailureTest.FailsWhenThereIsNoFatalFailure
+[ FAILED ] ExpectFatalFailureTest.FailsWhenThereAreTwoFatalFailures
+[ FAILED ] ExpectFatalFailureTest.FailsWhenThereIsOneNonfatalFailure
+[ FAILED ] ExpectFatalFailureTest.FailsWhenStatementReturns
+[ FAILED ] ExpectFatalFailureTest.FailsWhenStatementThrows
+[ FAILED ] TypedTest/0.Failure, where TypeParam = int
+[ FAILED ] Unsigned/TypedTestP/0.Failure, where TypeParam = unsigned char
+[ FAILED ] Unsigned/TypedTestP/1.Failure, where TypeParam = unsigned int
+[ FAILED ] ExpectFailureTest.ExpectFatalFailure
+[ FAILED ] ExpectFailureTest.ExpectNonFatalFailure
+[ FAILED ] ExpectFailureTest.ExpectFatalFailureOnAllThreads
+[ FAILED ] ExpectFailureTest.ExpectNonFatalFailureOnAllThreads
+[ FAILED ] ExpectFailureWithThreadsTest.ExpectFatalFailure
+[ FAILED ] ExpectFailureWithThreadsTest.ExpectNonFatalFailure
+[ FAILED ] ScopedFakeTestPartResultReporterTest.InterceptOnlyCurrentThread
+
+39 FAILED TESTS
+ YOU HAVE 1 DISABLED TEST
+
+Note: Google Test filter = FatalFailureTest.*:LoggingTest.*
+[==========] Running 4 tests from 2 test cases.
+[----------] Global test environment set-up.
+[----------] 3 tests from FatalFailureTest
+[ RUN ] FatalFailureTest.FatalFailureInSubroutine
+(expecting a failure that x should be 1)
+gtest_output_test_.cc:#: Failure
+Value of: x
+ Actual: 2
+Expected: 1
+[ FAILED ] FatalFailureTest.FatalFailureInSubroutine (? ms)
+[ RUN ] FatalFailureTest.FatalFailureInNestedSubroutine
+(expecting a failure that x should be 1)
+gtest_output_test_.cc:#: Failure
+Value of: x
+ Actual: 2
+Expected: 1
+[ FAILED ] FatalFailureTest.FatalFailureInNestedSubroutine (? ms)
+[ RUN ] FatalFailureTest.NonfatalFailureInSubroutine
+(expecting a failure on false)
+gtest_output_test_.cc:#: Failure
+Value of: false
+ Actual: false
+Expected: true
+[ FAILED ] FatalFailureTest.NonfatalFailureInSubroutine (? ms)
+[----------] 3 tests from FatalFailureTest (? ms total)
+
+[----------] 1 test from LoggingTest
+[ RUN ] LoggingTest.InterleavingLoggingAndAssertions
+(expecting 2 failures on (3) >= (a[i]))
+i == 0
+i == 1
+gtest_output_test_.cc:#: Failure
+Expected: (3) >= (a[i]), actual: 3 vs 9
+i == 2
+i == 3
+gtest_output_test_.cc:#: Failure
+Expected: (3) >= (a[i]), actual: 3 vs 6
+[ FAILED ] LoggingTest.InterleavingLoggingAndAssertions (? ms)
+[----------] 1 test from LoggingTest (? ms total)
+
+[----------] Global test environment tear-down
+[==========] 4 tests from 2 test cases ran. (? ms total)
+[ PASSED ] 0 tests.
+[ FAILED ] 4 tests, listed below:
+[ FAILED ] FatalFailureTest.FatalFailureInSubroutine
+[ FAILED ] FatalFailureTest.FatalFailureInNestedSubroutine
+[ FAILED ] FatalFailureTest.NonfatalFailureInSubroutine
+[ FAILED ] LoggingTest.InterleavingLoggingAndAssertions
+
+ 4 FAILED TESTS
+ YOU HAVE 1 DISABLED TEST
+
+Note: Google Test filter = *DISABLED_*
+[==========] Running 1 test from 1 test case.
+[----------] Global test environment set-up.
+[----------] 1 test from DisabledTestsWarningTest
+[ RUN ] DisabledTestsWarningTest.DISABLED_AlsoRunDisabledTestsFlagSuppressesWarning
+[ OK ] DisabledTestsWarningTest.DISABLED_AlsoRunDisabledTestsFlagSuppressesWarning
+[----------] Global test environment tear-down
+[==========] 1 test from 1 test case ran.
+[ PASSED ] 1 test.
+Note: Google Test filter = PassingTest.*
+Note: This is test shard 1 of 2.
+[==========] Running 1 test from 1 test case.
+[----------] Global test environment set-up.
+[----------] 1 test from PassingTest
+[ RUN ] PassingTest.PassingTest2
+[ OK ] PassingTest.PassingTest2
+[----------] Global test environment tear-down
+[==========] 1 test from 1 test case ran.
+[ PASSED ] 1 test.
+
+ YOU HAVE 1 DISABLED TEST
+
diff --git a/Source/ThirdParty/gtest/test/gtest_output_test_golden_win.txt b/Source/ThirdParty/gtest/test/gtest_output_test_golden_win.txt
new file mode 100644
index 000000000..313c3aafc
--- /dev/null
+++ b/Source/ThirdParty/gtest/test/gtest_output_test_golden_win.txt
@@ -0,0 +1,605 @@
+The non-test part of the code is expected to have 2 failures.
+
+gtest_output_test_.cc:#: error: Value of: false
+ Actual: false
+Expected: true
+gtest_output_test_.cc:#: error: Value of: 3
+Expected: 2
+[==========] Running 61 tests from 27 test cases.
+[----------] Global test environment set-up.
+FooEnvironment::SetUp() called.
+BarEnvironment::SetUp() called.
+[----------] 1 test from ADeathTest
+[ RUN ] ADeathTest.ShouldRunFirst
+[ OK ] ADeathTest.ShouldRunFirst
+[----------] 1 test from ATypedDeathTest/0, where TypeParam = int
+[ RUN ] ATypedDeathTest/0.ShouldRunFirst
+[ OK ] ATypedDeathTest/0.ShouldRunFirst
+[----------] 1 test from ATypedDeathTest/1, where TypeParam = double
+[ RUN ] ATypedDeathTest/1.ShouldRunFirst
+[ OK ] ATypedDeathTest/1.ShouldRunFirst
+[----------] 1 test from My/ATypeParamDeathTest/0, where TypeParam = int
+[ RUN ] My/ATypeParamDeathTest/0.ShouldRunFirst
+[ OK ] My/ATypeParamDeathTest/0.ShouldRunFirst
+[----------] 1 test from My/ATypeParamDeathTest/1, where TypeParam = double
+[ RUN ] My/ATypeParamDeathTest/1.ShouldRunFirst
+[ OK ] My/ATypeParamDeathTest/1.ShouldRunFirst
+[----------] 2 tests from PassingTest
+[ RUN ] PassingTest.PassingTest1
+[ OK ] PassingTest.PassingTest1
+[ RUN ] PassingTest.PassingTest2
+[ OK ] PassingTest.PassingTest2
+[----------] 3 tests from FatalFailureTest
+[ RUN ] FatalFailureTest.FatalFailureInSubroutine
+(expecting a failure that x should be 1)
+gtest_output_test_.cc:#: error: Value of: x
+ Actual: 2
+Expected: 1
+[ FAILED ] FatalFailureTest.FatalFailureInSubroutine
+[ RUN ] FatalFailureTest.FatalFailureInNestedSubroutine
+(expecting a failure that x should be 1)
+gtest_output_test_.cc:#: error: Value of: x
+ Actual: 2
+Expected: 1
+[ FAILED ] FatalFailureTest.FatalFailureInNestedSubroutine
+[ RUN ] FatalFailureTest.NonfatalFailureInSubroutine
+(expecting a failure on false)
+gtest_output_test_.cc:#: error: Value of: false
+ Actual: false
+Expected: true
+[ FAILED ] FatalFailureTest.NonfatalFailureInSubroutine
+[----------] 1 test from LoggingTest
+[ RUN ] LoggingTest.InterleavingLoggingAndAssertions
+(expecting 2 failures on (3) >= (a[i]))
+i == 0
+i == 1
+gtest_output_test_.cc:#: error: Expected: (3) >= (a[i]), actual: 3 vs 9
+i == 2
+i == 3
+gtest_output_test_.cc:#: error: Expected: (3) >= (a[i]), actual: 3 vs 6
+[ FAILED ] LoggingTest.InterleavingLoggingAndAssertions
+[----------] 5 tests from SCOPED_TRACETest
+[ RUN ] SCOPED_TRACETest.ObeysScopes
+(expected to fail)
+gtest_output_test_.cc:#: error: Failed
+This failure is expected, and shouldn't have a trace.
+gtest_output_test_.cc:#: error: Failed
+This failure is expected, and should have a trace.
+Google Test trace:
+gtest_output_test_.cc:#: Expected trace
+gtest_output_test_.cc:#: error: Failed
+This failure is expected, and shouldn't have a trace.
+[ FAILED ] SCOPED_TRACETest.ObeysScopes
+[ RUN ] SCOPED_TRACETest.WorksInLoop
+(expected to fail)
+gtest_output_test_.cc:#: error: Value of: n
+ Actual: 1
+Expected: 2
+Google Test trace:
+gtest_output_test_.cc:#: i = 1
+gtest_output_test_.cc:#: error: Value of: n
+ Actual: 2
+Expected: 1
+Google Test trace:
+gtest_output_test_.cc:#: i = 2
+[ FAILED ] SCOPED_TRACETest.WorksInLoop
+[ RUN ] SCOPED_TRACETest.WorksInSubroutine
+(expected to fail)
+gtest_output_test_.cc:#: error: Value of: n
+ Actual: 1
+Expected: 2
+Google Test trace:
+gtest_output_test_.cc:#: n = 1
+gtest_output_test_.cc:#: error: Value of: n
+ Actual: 2
+Expected: 1
+Google Test trace:
+gtest_output_test_.cc:#: n = 2
+[ FAILED ] SCOPED_TRACETest.WorksInSubroutine
+[ RUN ] SCOPED_TRACETest.CanBeNested
+(expected to fail)
+gtest_output_test_.cc:#: error: Value of: n
+ Actual: 2
+Expected: 1
+Google Test trace:
+gtest_output_test_.cc:#: n = 2
+gtest_output_test_.cc:#:
+[ FAILED ] SCOPED_TRACETest.CanBeNested
+[ RUN ] SCOPED_TRACETest.CanBeRepeated
+(expected to fail)
+gtest_output_test_.cc:#: error: Failed
+This failure is expected, and should contain trace point A.
+Google Test trace:
+gtest_output_test_.cc:#: A
+gtest_output_test_.cc:#: error: Failed
+This failure is expected, and should contain trace point A and B.
+Google Test trace:
+gtest_output_test_.cc:#: B
+gtest_output_test_.cc:#: A
+gtest_output_test_.cc:#: error: Failed
+This failure is expected, and should contain trace point A, B, and C.
+Google Test trace:
+gtest_output_test_.cc:#: C
+gtest_output_test_.cc:#: B
+gtest_output_test_.cc:#: A
+gtest_output_test_.cc:#: error: Failed
+This failure is expected, and should contain trace point A, B, and D.
+Google Test trace:
+gtest_output_test_.cc:#: D
+gtest_output_test_.cc:#: B
+gtest_output_test_.cc:#: A
+[ FAILED ] SCOPED_TRACETest.CanBeRepeated
+[----------] 1 test from NonFatalFailureInFixtureConstructorTest
+[ RUN ] NonFatalFailureInFixtureConstructorTest.FailureInConstructor
+(expecting 5 failures)
+gtest_output_test_.cc:#: error: Failed
+Expected failure #1, in the test fixture c'tor.
+gtest_output_test_.cc:#: error: Failed
+Expected failure #2, in SetUp().
+gtest_output_test_.cc:#: error: Failed
+Expected failure #3, in the test body.
+gtest_output_test_.cc:#: error: Failed
+Expected failure #4, in TearDown.
+gtest_output_test_.cc:#: error: Failed
+Expected failure #5, in the test fixture d'tor.
+[ FAILED ] NonFatalFailureInFixtureConstructorTest.FailureInConstructor
+[----------] 1 test from FatalFailureInFixtureConstructorTest
+[ RUN ] FatalFailureInFixtureConstructorTest.FailureInConstructor
+(expecting 2 failures)
+gtest_output_test_.cc:#: error: Failed
+Expected failure #1, in the test fixture c'tor.
+gtest_output_test_.cc:#: error: Failed
+Expected failure #2, in the test fixture d'tor.
+[ FAILED ] FatalFailureInFixtureConstructorTest.FailureInConstructor
+[----------] 1 test from NonFatalFailureInSetUpTest
+[ RUN ] NonFatalFailureInSetUpTest.FailureInSetUp
+(expecting 4 failures)
+gtest_output_test_.cc:#: error: Failed
+Expected failure #1, in SetUp().
+gtest_output_test_.cc:#: error: Failed
+Expected failure #2, in the test function.
+gtest_output_test_.cc:#: error: Failed
+Expected failure #3, in TearDown().
+gtest_output_test_.cc:#: error: Failed
+Expected failure #4, in the test fixture d'tor.
+[ FAILED ] NonFatalFailureInSetUpTest.FailureInSetUp
+[----------] 1 test from FatalFailureInSetUpTest
+[ RUN ] FatalFailureInSetUpTest.FailureInSetUp
+(expecting 3 failures)
+gtest_output_test_.cc:#: error: Failed
+Expected failure #1, in SetUp().
+gtest_output_test_.cc:#: error: Failed
+Expected failure #2, in TearDown().
+gtest_output_test_.cc:#: error: Failed
+Expected failure #3, in the test fixture d'tor.
+[ FAILED ] FatalFailureInSetUpTest.FailureInSetUp
+[----------] 1 test from ExceptionInFixtureCtorTest
+[ RUN ] ExceptionInFixtureCtorTest.ExceptionInFixtureCtor
+(expecting a failure on thrown exception in the test fixture's constructor)
+unknown file: error: Exception thrown with code 0xc0000005 in the test fixture's constructor.
+[----------] 1 test from ExceptionInSetUpTest
+[ RUN ] ExceptionInSetUpTest.ExceptionInSetUp
+(expecting 3 failures)
+unknown file: error: Exception thrown with code 0xc0000005 in SetUp().
+gtest_output_test_.cc:#: error: Failed
+Expected failure #2, in TearDown().
+gtest_output_test_.cc:#: error: Failed
+Expected failure #3, in the test fixture d'tor.
+[ FAILED ] ExceptionInSetUpTest.ExceptionInSetUp
+[----------] 2 tests from ExceptionInTestFunctionTest
+[ RUN ] ExceptionInTestFunctionTest.SEH
+(expecting 3 failures)
+unknown file: error: Exception thrown with code 0xc0000005 in the test body.
+gtest_output_test_.cc:#: error: Failed
+Expected failure #2, in TearDown().
+gtest_output_test_.cc:#: error: Failed
+Expected failure #3, in the test fixture d'tor.
+[ FAILED ] ExceptionInTestFunctionTest.SEH
+[ RUN ] ExceptionInTestFunctionTest.CppException
+unknown file: error: Exception thrown with code 0xe06d7363 in the test body.
+gtest_output_test_.cc:#: error: Failed
+Expected failure #2, in TearDown().
+gtest_output_test_.cc:#: error: Failed
+Expected failure #3, in the test fixture d'tor.
+[ FAILED ] ExceptionInTestFunctionTest.CppException
+[----------] 1 test from ExceptionInTearDownTest
+[ RUN ] ExceptionInTearDownTest.ExceptionInTearDown
+(expecting 2 failures)
+unknown file: error: Exception thrown with code 0xe06d7363 in TearDown().
+gtest_output_test_.cc:#: error: Failed
+Expected failure #2, in the test fixture d'tor.
+[ FAILED ] ExceptionInTearDownTest.ExceptionInTearDown
+[----------] 4 tests from MixedUpTestCaseTest
+[ RUN ] MixedUpTestCaseTest.FirstTestFromNamespaceFoo
+[ OK ] MixedUpTestCaseTest.FirstTestFromNamespaceFoo
+[ RUN ] MixedUpTestCaseTest.SecondTestFromNamespaceFoo
+[ OK ] MixedUpTestCaseTest.SecondTestFromNamespaceFoo
+[ RUN ] MixedUpTestCaseTest.ThisShouldFail
+gtest.cc:#: error: Failed
+All tests in the same test case must use the same test fixture
+class. However, in test case MixedUpTestCaseTest,
+you defined test FirstTestFromNamespaceFoo and test ThisShouldFail
+using two different test fixture classes. This can happen if
+the two classes are from different namespaces or translation
+units and have the same name. You should probably rename one
+of the classes to put the tests into different test cases.
+[ FAILED ] MixedUpTestCaseTest.ThisShouldFail
+[ RUN ] MixedUpTestCaseTest.ThisShouldFailToo
+gtest.cc:#: error: Failed
+All tests in the same test case must use the same test fixture
+class. However, in test case MixedUpTestCaseTest,
+you defined test FirstTestFromNamespaceFoo and test ThisShouldFailToo
+using two different test fixture classes. This can happen if
+the two classes are from different namespaces or translation
+units and have the same name. You should probably rename one
+of the classes to put the tests into different test cases.
+[ FAILED ] MixedUpTestCaseTest.ThisShouldFailToo
+[----------] 2 tests from MixedUpTestCaseWithSameTestNameTest
+[ RUN ] MixedUpTestCaseWithSameTestNameTest.TheSecondTestWithThisNameShouldFail
+[ OK ] MixedUpTestCaseWithSameTestNameTest.TheSecondTestWithThisNameShouldFail
+[ RUN ] MixedUpTestCaseWithSameTestNameTest.TheSecondTestWithThisNameShouldFail
+gtest.cc:#: error: Failed
+All tests in the same test case must use the same test fixture
+class. However, in test case MixedUpTestCaseWithSameTestNameTest,
+you defined test TheSecondTestWithThisNameShouldFail and test TheSecondTestWithThisNameShouldFail
+using two different test fixture classes. This can happen if
+the two classes are from different namespaces or translation
+units and have the same name. You should probably rename one
+of the classes to put the tests into different test cases.
+[ FAILED ] MixedUpTestCaseWithSameTestNameTest.TheSecondTestWithThisNameShouldFail
+[----------] 2 tests from TEST_F_before_TEST_in_same_test_case
+[ RUN ] TEST_F_before_TEST_in_same_test_case.DefinedUsingTEST_F
+[ OK ] TEST_F_before_TEST_in_same_test_case.DefinedUsingTEST_F
+[ RUN ] TEST_F_before_TEST_in_same_test_case.DefinedUsingTESTAndShouldFail
+gtest.cc:#: error: Failed
+All tests in the same test case must use the same test fixture
+class, so mixing TEST_F and TEST in the same test case is
+illegal. In test case TEST_F_before_TEST_in_same_test_case,
+test DefinedUsingTEST_F is defined using TEST_F but
+test DefinedUsingTESTAndShouldFail is defined using TEST. You probably
+want to change the TEST to TEST_F or move it to another test
+case.
+[ FAILED ] TEST_F_before_TEST_in_same_test_case.DefinedUsingTESTAndShouldFail
+[----------] 2 tests from TEST_before_TEST_F_in_same_test_case
+[ RUN ] TEST_before_TEST_F_in_same_test_case.DefinedUsingTEST
+[ OK ] TEST_before_TEST_F_in_same_test_case.DefinedUsingTEST
+[ RUN ] TEST_before_TEST_F_in_same_test_case.DefinedUsingTEST_FAndShouldFail
+gtest.cc:#: error: Failed
+All tests in the same test case must use the same test fixture
+class, so mixing TEST_F and TEST in the same test case is
+illegal. In test case TEST_before_TEST_F_in_same_test_case,
+test DefinedUsingTEST_FAndShouldFail is defined using TEST_F but
+test DefinedUsingTEST is defined using TEST. You probably
+want to change the TEST to TEST_F or move it to another test
+case.
+[ FAILED ] TEST_before_TEST_F_in_same_test_case.DefinedUsingTEST_FAndShouldFail
+[----------] 8 tests from ExpectNonfatalFailureTest
+[ RUN ] ExpectNonfatalFailureTest.CanReferenceGlobalVariables
+[ OK ] ExpectNonfatalFailureTest.CanReferenceGlobalVariables
+[ RUN ] ExpectNonfatalFailureTest.CanReferenceLocalVariables
+[ OK ] ExpectNonfatalFailureTest.CanReferenceLocalVariables
+[ RUN ] ExpectNonfatalFailureTest.SucceedsWhenThereIsOneNonfatalFailure
+[ OK ] ExpectNonfatalFailureTest.SucceedsWhenThereIsOneNonfatalFailure
+[ RUN ] ExpectNonfatalFailureTest.FailsWhenThereIsNoNonfatalFailure
+(expecting a failure)
+gtest.cc:#: error: Expected: 1 non-fatal failure
+ Actual: 0 failures
+[ FAILED ] ExpectNonfatalFailureTest.FailsWhenThereIsNoNonfatalFailure
+[ RUN ] ExpectNonfatalFailureTest.FailsWhenThereAreTwoNonfatalFailures
+(expecting a failure)
+gtest.cc:#: error: Expected: 1 non-fatal failure
+ Actual: 2 failures
+gtest_output_test_.cc:#: Non-fatal failure:
+Failed
+Expected non-fatal failure 1.
+
+gtest_output_test_.cc:#: Non-fatal failure:
+Failed
+Expected non-fatal failure 2.
+
+[ FAILED ] ExpectNonfatalFailureTest.FailsWhenThereAreTwoNonfatalFailures
+[ RUN ] ExpectNonfatalFailureTest.FailsWhenThereIsOneFatalFailure
+(expecting a failure)
+gtest.cc:#: error: Expected: 1 non-fatal failure
+ Actual:
+gtest_output_test_.cc:#: Fatal failure:
+Failed
+Expected fatal failure.
+
+[ FAILED ] ExpectNonfatalFailureTest.FailsWhenThereIsOneFatalFailure
+[ RUN ] ExpectNonfatalFailureTest.FailsWhenStatementReturns
+(expecting a failure)
+gtest.cc:#: error: Expected: 1 non-fatal failure
+ Actual: 0 failures
+[ FAILED ] ExpectNonfatalFailureTest.FailsWhenStatementReturns
+[ RUN ] ExpectNonfatalFailureTest.FailsWhenStatementThrows
+(expecting a failure)
+gtest.cc:#: error: Expected: 1 non-fatal failure
+ Actual: 0 failures
+[ FAILED ] ExpectNonfatalFailureTest.FailsWhenStatementThrows
+[----------] 8 tests from ExpectFatalFailureTest
+[ RUN ] ExpectFatalFailureTest.CanReferenceGlobalVariables
+[ OK ] ExpectFatalFailureTest.CanReferenceGlobalVariables
+[ RUN ] ExpectFatalFailureTest.CanReferenceLocalStaticVariables
+[ OK ] ExpectFatalFailureTest.CanReferenceLocalStaticVariables
+[ RUN ] ExpectFatalFailureTest.SucceedsWhenThereIsOneFatalFailure
+[ OK ] ExpectFatalFailureTest.SucceedsWhenThereIsOneFatalFailure
+[ RUN ] ExpectFatalFailureTest.FailsWhenThereIsNoFatalFailure
+(expecting a failure)
+gtest.cc:#: error: Expected: 1 fatal failure
+ Actual: 0 failures
+[ FAILED ] ExpectFatalFailureTest.FailsWhenThereIsNoFatalFailure
+[ RUN ] ExpectFatalFailureTest.FailsWhenThereAreTwoFatalFailures
+(expecting a failure)
+gtest.cc:#: error: Expected: 1 fatal failure
+ Actual: 2 failures
+gtest_output_test_.cc:#: Fatal failure:
+Failed
+Expected fatal failure.
+
+gtest_output_test_.cc:#: Fatal failure:
+Failed
+Expected fatal failure.
+
+[ FAILED ] ExpectFatalFailureTest.FailsWhenThereAreTwoFatalFailures
+[ RUN ] ExpectFatalFailureTest.FailsWhenThereIsOneNonfatalFailure
+(expecting a failure)
+gtest.cc:#: error: Expected: 1 fatal failure
+ Actual:
+gtest_output_test_.cc:#: Non-fatal failure:
+Failed
+Expected non-fatal failure.
+
+[ FAILED ] ExpectFatalFailureTest.FailsWhenThereIsOneNonfatalFailure
+[ RUN ] ExpectFatalFailureTest.FailsWhenStatementReturns
+(expecting a failure)
+gtest.cc:#: error: Expected: 1 fatal failure
+ Actual: 0 failures
+[ FAILED ] ExpectFatalFailureTest.FailsWhenStatementReturns
+[ RUN ] ExpectFatalFailureTest.FailsWhenStatementThrows
+(expecting a failure)
+gtest.cc:#: error: Expected: 1 fatal failure
+ Actual: 0 failures
+[ FAILED ] ExpectFatalFailureTest.FailsWhenStatementThrows
+[----------] 2 tests from TypedTest/0, where TypeParam = int
+[ RUN ] TypedTest/0.Success
+[ OK ] TypedTest/0.Success
+[ RUN ] TypedTest/0.Failure
+gtest_output_test_.cc:#: error: Value of: TypeParam()
+ Actual: 0
+Expected: 1
+Expected failure
+[ FAILED ] TypedTest/0.Failure
+[----------] 2 tests from Unsigned/TypedTestP/0, where TypeParam = unsigned char
+[ RUN ] Unsigned/TypedTestP/0.Success
+[ OK ] Unsigned/TypedTestP/0.Success
+[ RUN ] Unsigned/TypedTestP/0.Failure
+gtest_output_test_.cc:#: error: Value of: TypeParam()
+ Actual: \0
+Expected: 1U
+Which is: 1
+Expected failure
+[ FAILED ] Unsigned/TypedTestP/0.Failure
+[----------] 2 tests from Unsigned/TypedTestP/1, where TypeParam = unsigned int
+[ RUN ] Unsigned/TypedTestP/1.Success
+[ OK ] Unsigned/TypedTestP/1.Success
+[ RUN ] Unsigned/TypedTestP/1.Failure
+gtest_output_test_.cc:#: error: Value of: TypeParam()
+ Actual: 0
+Expected: 1U
+Which is: 1
+Expected failure
+[ FAILED ] Unsigned/TypedTestP/1.Failure
+[----------] 4 tests from ExpectFailureTest
+[ RUN ] ExpectFailureTest.ExpectFatalFailure
+(expecting 1 failure)
+gtest.cc:#: error: Expected: 1 fatal failure
+ Actual:
+gtest_output_test_.cc:#: Success:
+Succeeded
+
+(expecting 1 failure)
+gtest.cc:#: error: Expected: 1 fatal failure
+ Actual:
+gtest_output_test_.cc:#: Non-fatal failure:
+Failed
+Expected non-fatal failure.
+
+(expecting 1 failure)
+gtest.cc:#: error: Expected: 1 fatal failure containing "Some other fatal failure expected."
+ Actual:
+gtest_output_test_.cc:#: Fatal failure:
+Failed
+Expected fatal failure.
+
+[ FAILED ] ExpectFailureTest.ExpectFatalFailure
+[ RUN ] ExpectFailureTest.ExpectNonFatalFailure
+(expecting 1 failure)
+gtest.cc:#: error: Expected: 1 non-fatal failure
+ Actual:
+gtest_output_test_.cc:#: Success:
+Succeeded
+
+(expecting 1 failure)
+gtest.cc:#: error: Expected: 1 non-fatal failure
+ Actual:
+gtest_output_test_.cc:#: Fatal failure:
+Failed
+Expected fatal failure.
+
+(expecting 1 failure)
+gtest.cc:#: error: Expected: 1 non-fatal failure containing "Some other non-fatal failure."
+ Actual:
+gtest_output_test_.cc:#: Non-fatal failure:
+Failed
+Expected non-fatal failure.
+
+[ FAILED ] ExpectFailureTest.ExpectNonFatalFailure
+[ RUN ] ExpectFailureTest.ExpectFatalFailureOnAllThreads
+(expecting 1 failure)
+gtest.cc:#: error: Expected: 1 fatal failure
+ Actual:
+gtest_output_test_.cc:#: Success:
+Succeeded
+
+(expecting 1 failure)
+gtest.cc:#: error: Expected: 1 fatal failure
+ Actual:
+gtest_output_test_.cc:#: Non-fatal failure:
+Failed
+Expected non-fatal failure.
+
+(expecting 1 failure)
+gtest.cc:#: error: Expected: 1 fatal failure containing "Some other fatal failure expected."
+ Actual:
+gtest_output_test_.cc:#: Fatal failure:
+Failed
+Expected fatal failure.
+
+[ FAILED ] ExpectFailureTest.ExpectFatalFailureOnAllThreads
+[ RUN ] ExpectFailureTest.ExpectNonFatalFailureOnAllThreads
+(expecting 1 failure)
+gtest.cc:#: error: Expected: 1 non-fatal failure
+ Actual:
+gtest_output_test_.cc:#: Success:
+Succeeded
+
+(expecting 1 failure)
+gtest.cc:#: error: Expected: 1 non-fatal failure
+ Actual:
+gtest_output_test_.cc:#: Fatal failure:
+Failed
+Expected fatal failure.
+
+(expecting 1 failure)
+gtest.cc:#: error: Expected: 1 non-fatal failure containing "Some other non-fatal failure."
+ Actual:
+gtest_output_test_.cc:#: Non-fatal failure:
+Failed
+Expected non-fatal failure.
+
+[ FAILED ] ExpectFailureTest.ExpectNonFatalFailureOnAllThreads
+[----------] Global test environment tear-down
+BarEnvironment::TearDown() called.
+gtest_output_test_.cc:#: error: Failed
+Expected non-fatal failure.
+FooEnvironment::TearDown() called.
+gtest_output_test_.cc:#: error: Failed
+Expected fatal failure.
+[==========] 61 tests from 27 test cases ran.
+[ PASSED ] 21 tests.
+[ FAILED ] 40 tests, listed below:
+[ FAILED ] FatalFailureTest.FatalFailureInSubroutine
+[ FAILED ] FatalFailureTest.FatalFailureInNestedSubroutine
+[ FAILED ] FatalFailureTest.NonfatalFailureInSubroutine
+[ FAILED ] LoggingTest.InterleavingLoggingAndAssertions
+[ FAILED ] SCOPED_TRACETest.ObeysScopes
+[ FAILED ] SCOPED_TRACETest.WorksInLoop
+[ FAILED ] SCOPED_TRACETest.WorksInSubroutine
+[ FAILED ] SCOPED_TRACETest.CanBeNested
+[ FAILED ] SCOPED_TRACETest.CanBeRepeated
+[ FAILED ] NonFatalFailureInFixtureConstructorTest.FailureInConstructor
+[ FAILED ] FatalFailureInFixtureConstructorTest.FailureInConstructor
+[ FAILED ] NonFatalFailureInSetUpTest.FailureInSetUp
+[ FAILED ] FatalFailureInSetUpTest.FailureInSetUp
+[ FAILED ] ExceptionInFixtureCtorTest.ExceptionInFixtureCtor
+[ FAILED ] ExceptionInSetUpTest.ExceptionInSetUp
+[ FAILED ] ExceptionInTestFunctionTest.SEH
+[ FAILED ] ExceptionInTestFunctionTest.CppException
+[ FAILED ] ExceptionInTearDownTest.ExceptionInTearDown
+[ FAILED ] MixedUpTestCaseTest.ThisShouldFail
+[ FAILED ] MixedUpTestCaseTest.ThisShouldFailToo
+[ FAILED ] MixedUpTestCaseWithSameTestNameTest.TheSecondTestWithThisNameShouldFail
+[ FAILED ] TEST_F_before_TEST_in_same_test_case.DefinedUsingTESTAndShouldFail
+[ FAILED ] TEST_before_TEST_F_in_same_test_case.DefinedUsingTEST_FAndShouldFail
+[ FAILED ] ExpectNonfatalFailureTest.FailsWhenThereIsNoNonfatalFailure
+[ FAILED ] ExpectNonfatalFailureTest.FailsWhenThereAreTwoNonfatalFailures
+[ FAILED ] ExpectNonfatalFailureTest.FailsWhenThereIsOneFatalFailure
+[ FAILED ] ExpectNonfatalFailureTest.FailsWhenStatementReturns
+[ FAILED ] ExpectNonfatalFailureTest.FailsWhenStatementThrows
+[ FAILED ] ExpectFatalFailureTest.FailsWhenThereIsNoFatalFailure
+[ FAILED ] ExpectFatalFailureTest.FailsWhenThereAreTwoFatalFailures
+[ FAILED ] ExpectFatalFailureTest.FailsWhenThereIsOneNonfatalFailure
+[ FAILED ] ExpectFatalFailureTest.FailsWhenStatementReturns
+[ FAILED ] ExpectFatalFailureTest.FailsWhenStatementThrows
+[ FAILED ] TypedTest/0.Failure, where TypeParam = int
+[ FAILED ] Unsigned/TypedTestP/0.Failure, where TypeParam = unsigned char
+[ FAILED ] Unsigned/TypedTestP/1.Failure, where TypeParam = unsigned int
+[ FAILED ] ExpectFailureTest.ExpectFatalFailure
+[ FAILED ] ExpectFailureTest.ExpectNonFatalFailure
+[ FAILED ] ExpectFailureTest.ExpectFatalFailureOnAllThreads
+[ FAILED ] ExpectFailureTest.ExpectNonFatalFailureOnAllThreads
+
+40 FAILED TESTS
+ YOU HAVE 1 DISABLED TEST
+
+Note: Google Test filter = FatalFailureTest.*:LoggingTest.*
+[==========] Running 4 tests from 2 test cases.
+[----------] Global test environment set-up.
+[----------] 3 tests from FatalFailureTest
+[ RUN ] FatalFailureTest.FatalFailureInSubroutine
+(expecting a failure that x should be 1)
+gtest_output_test_.cc:#: error: Value of: x
+ Actual: 2
+Expected: 1
+[ FAILED ] FatalFailureTest.FatalFailureInSubroutine (? ms)
+[ RUN ] FatalFailureTest.FatalFailureInNestedSubroutine
+(expecting a failure that x should be 1)
+gtest_output_test_.cc:#: error: Value of: x
+ Actual: 2
+Expected: 1
+[ FAILED ] FatalFailureTest.FatalFailureInNestedSubroutine (? ms)
+[ RUN ] FatalFailureTest.NonfatalFailureInSubroutine
+(expecting a failure on false)
+gtest_output_test_.cc:#: error: Value of: false
+ Actual: false
+Expected: true
+[ FAILED ] FatalFailureTest.NonfatalFailureInSubroutine (? ms)
+[----------] 3 tests from FatalFailureTest (? ms total)
+
+[----------] 1 test from LoggingTest
+[ RUN ] LoggingTest.InterleavingLoggingAndAssertions
+(expecting 2 failures on (3) >= (a[i]))
+i == 0
+i == 1
+gtest_output_test_.cc:#: error: Expected: (3) >= (a[i]), actual: 3 vs 9
+i == 2
+i == 3
+gtest_output_test_.cc:#: error: Expected: (3) >= (a[i]), actual: 3 vs 6
+[ FAILED ] LoggingTest.InterleavingLoggingAndAssertions (? ms)
+[----------] 1 test from LoggingTest (? ms total)
+
+[----------] Global test environment tear-down
+[==========] 4 tests from 2 test cases ran. (? ms total)
+[ PASSED ] 0 tests.
+[ FAILED ] 4 tests, listed below:
+[ FAILED ] FatalFailureTest.FatalFailureInSubroutine
+[ FAILED ] FatalFailureTest.FatalFailureInNestedSubroutine
+[ FAILED ] FatalFailureTest.NonfatalFailureInSubroutine
+[ FAILED ] LoggingTest.InterleavingLoggingAndAssertions
+
+ 4 FAILED TESTS
+ YOU HAVE 1 DISABLED TEST
+
+Note: Google Test filter = *DISABLED_*
+[==========] Running 1 test from 1 test case.
+[----------] Global test environment set-up.
+[----------] 1 test from DisabledTestsWarningTest
+[ RUN ] DisabledTestsWarningTest.DISABLED_AlsoRunDisabledTestsFlagSuppressesWarning
+[ OK ] DisabledTestsWarningTest.DISABLED_AlsoRunDisabledTestsFlagSuppressesWarning
+[----------] Global test environment tear-down
+[==========] 1 test from 1 test case ran.
+[ PASSED ] 1 test.
+Note: Google Test filter = PassingTest.*
+Note: This is test shard 1 of 2.
+[==========] Running 1 test from 1 test case.
+[----------] Global test environment set-up.
+[----------] 1 test from PassingTest
+[ RUN ] PassingTest.PassingTest2
+[ OK ] PassingTest.PassingTest2
+[----------] Global test environment tear-down
+[==========] 1 test from 1 test case ran.
+[ PASSED ] 1 test.
+
+ YOU HAVE 1 DISABLED TEST
+
diff --git a/Source/ThirdParty/gtest/test/gtest_pred_impl_unittest.cc b/Source/ThirdParty/gtest/test/gtest_pred_impl_unittest.cc
new file mode 100644
index 000000000..e7ee54b56
--- /dev/null
+++ b/Source/ThirdParty/gtest/test/gtest_pred_impl_unittest.cc
@@ -0,0 +1,2432 @@
+// Copyright 2006, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// This file is AUTOMATICALLY GENERATED on 10/02/2008 by command
+// 'gen_gtest_pred_impl.py 5'. DO NOT EDIT BY HAND!
+
+// Regression test for gtest_pred_impl.h
+//
+// This file is generated by a script and quite long. If you intend to
+// learn how Google Test works by reading its unit tests, read
+// gtest_unittest.cc instead.
+//
+// This is intended as a regression test for the Google Test predicate
+// assertions. We compile it as part of the gtest_unittest target
+// only to keep the implementation tidy and compact, as it is quite
+// involved to set up the stage for testing Google Test using Google
+// Test itself.
+//
+// Currently, gtest_unittest takes ~11 seconds to run in the testing
+// daemon. In the future, if it grows too large and needs much more
+// time to finish, we should consider separating this file into a
+// stand-alone regression test.
+
+#include <iostream>
+
+#include <gtest/gtest.h>
+#include <gtest/gtest-spi.h>
+
+// A user-defined data type.
+struct Bool {
+ explicit Bool(int val) : value(val != 0) {}
+
+ bool operator>(int n) const { return value > Bool(n).value; }
+
+ Bool operator+(const Bool& rhs) const { return Bool(value + rhs.value); }
+
+ bool operator==(const Bool& rhs) const { return value == rhs.value; }
+
+ bool value;
+};
+
+// Enables Bool to be used in assertions.
+std::ostream& operator<<(std::ostream& os, const Bool& x) {
+ return os << (x.value ? "true" : "false");
+}
+
+// Sample functions/functors for testing unary predicate assertions.
+
+// A unary predicate function.
+template <typename T1>
+bool PredFunction1(T1 v1) {
+ return v1 > 0;
+}
+
+// The following two functions are needed to circumvent a bug in
+// gcc 2.95.3, which sometimes has problem with the above template
+// function.
+bool PredFunction1Int(int v1) {
+ return v1 > 0;
+}
+bool PredFunction1Bool(Bool v1) {
+ return v1 > 0;
+}
+
+// A unary predicate functor.
+struct PredFunctor1 {
+ template <typename T1>
+ bool operator()(const T1& v1) {
+ return v1 > 0;
+ }
+};
+
+// A unary predicate-formatter function.
+template <typename T1>
+testing::AssertionResult PredFormatFunction1(const char* e1,
+ const T1& v1) {
+ if (PredFunction1(v1))
+ return testing::AssertionSuccess();
+
+ testing::Message msg;
+ msg << e1
+ << " is expected to be positive, but evaluates to "
+ << v1 << ".";
+ return testing::AssertionFailure(msg);
+}
+
+// A unary predicate-formatter functor.
+struct PredFormatFunctor1 {
+ template <typename T1>
+ testing::AssertionResult operator()(const char* e1,
+ const T1& v1) const {
+ return PredFormatFunction1(e1, v1);
+ }
+};
+
+// Tests for {EXPECT|ASSERT}_PRED_FORMAT1.
+
+class Predicate1Test : public testing::Test {
+ protected:
+ virtual void SetUp() {
+ expected_to_finish_ = true;
+ finished_ = false;
+ n1_ = 0;
+ }
+
+ virtual void TearDown() {
+ // Verifies that each of the predicate's arguments was evaluated
+ // exactly once.
+ EXPECT_EQ(1, n1_) <<
+ "The predicate assertion didn't evaluate argument 2 "
+ "exactly once.";
+
+ // Verifies that the control flow in the test function is expected.
+ if (expected_to_finish_ && !finished_) {
+ FAIL() << "The predicate assertion unexpactedly aborted the test.";
+ } else if (!expected_to_finish_ && finished_) {
+ FAIL() << "The failed predicate assertion didn't abort the test "
+ "as expected.";
+ }
+ }
+
+ // true iff the test function is expected to run to finish.
+ static bool expected_to_finish_;
+
+ // true iff the test function did run to finish.
+ static bool finished_;
+
+ static int n1_;
+};
+
+bool Predicate1Test::expected_to_finish_;
+bool Predicate1Test::finished_;
+int Predicate1Test::n1_;
+
+typedef Predicate1Test EXPECT_PRED_FORMAT1Test;
+typedef Predicate1Test ASSERT_PRED_FORMAT1Test;
+typedef Predicate1Test EXPECT_PRED1Test;
+typedef Predicate1Test ASSERT_PRED1Test;
+
+// Tests a successful EXPECT_PRED1 where the
+// predicate-formatter is a function on a built-in type (int).
+TEST_F(EXPECT_PRED1Test, FunctionOnBuiltInTypeSuccess) {
+ EXPECT_PRED1(PredFunction1Int,
+ ++n1_);
+ finished_ = true;
+}
+
+// Tests a successful EXPECT_PRED1 where the
+// predicate-formatter is a function on a user-defined type (Bool).
+TEST_F(EXPECT_PRED1Test, FunctionOnUserTypeSuccess) {
+ EXPECT_PRED1(PredFunction1Bool,
+ Bool(++n1_));
+ finished_ = true;
+}
+
+// Tests a successful EXPECT_PRED1 where the
+// predicate-formatter is a functor on a built-in type (int).
+TEST_F(EXPECT_PRED1Test, FunctorOnBuiltInTypeSuccess) {
+ EXPECT_PRED1(PredFunctor1(),
+ ++n1_);
+ finished_ = true;
+}
+
+// Tests a successful EXPECT_PRED1 where the
+// predicate-formatter is a functor on a user-defined type (Bool).
+TEST_F(EXPECT_PRED1Test, FunctorOnUserTypeSuccess) {
+ EXPECT_PRED1(PredFunctor1(),
+ Bool(++n1_));
+ finished_ = true;
+}
+
+// Tests a failed EXPECT_PRED1 where the
+// predicate-formatter is a function on a built-in type (int).
+TEST_F(EXPECT_PRED1Test, FunctionOnBuiltInTypeFailure) {
+ EXPECT_NONFATAL_FAILURE({ // NOLINT
+ EXPECT_PRED1(PredFunction1Int,
+ n1_++);
+ finished_ = true;
+ }, "");
+}
+
+// Tests a failed EXPECT_PRED1 where the
+// predicate-formatter is a function on a user-defined type (Bool).
+TEST_F(EXPECT_PRED1Test, FunctionOnUserTypeFailure) {
+ EXPECT_NONFATAL_FAILURE({ // NOLINT
+ EXPECT_PRED1(PredFunction1Bool,
+ Bool(n1_++));
+ finished_ = true;
+ }, "");
+}
+
+// Tests a failed EXPECT_PRED1 where the
+// predicate-formatter is a functor on a built-in type (int).
+TEST_F(EXPECT_PRED1Test, FunctorOnBuiltInTypeFailure) {
+ EXPECT_NONFATAL_FAILURE({ // NOLINT
+ EXPECT_PRED1(PredFunctor1(),
+ n1_++);
+ finished_ = true;
+ }, "");
+}
+
+// Tests a failed EXPECT_PRED1 where the
+// predicate-formatter is a functor on a user-defined type (Bool).
+TEST_F(EXPECT_PRED1Test, FunctorOnUserTypeFailure) {
+ EXPECT_NONFATAL_FAILURE({ // NOLINT
+ EXPECT_PRED1(PredFunctor1(),
+ Bool(n1_++));
+ finished_ = true;
+ }, "");
+}
+
+// Tests a successful ASSERT_PRED1 where the
+// predicate-formatter is a function on a built-in type (int).
+TEST_F(ASSERT_PRED1Test, FunctionOnBuiltInTypeSuccess) {
+ ASSERT_PRED1(PredFunction1Int,
+ ++n1_);
+ finished_ = true;
+}
+
+// Tests a successful ASSERT_PRED1 where the
+// predicate-formatter is a function on a user-defined type (Bool).
+TEST_F(ASSERT_PRED1Test, FunctionOnUserTypeSuccess) {
+ ASSERT_PRED1(PredFunction1Bool,
+ Bool(++n1_));
+ finished_ = true;
+}
+
+// Tests a successful ASSERT_PRED1 where the
+// predicate-formatter is a functor on a built-in type (int).
+TEST_F(ASSERT_PRED1Test, FunctorOnBuiltInTypeSuccess) {
+ ASSERT_PRED1(PredFunctor1(),
+ ++n1_);
+ finished_ = true;
+}
+
+// Tests a successful ASSERT_PRED1 where the
+// predicate-formatter is a functor on a user-defined type (Bool).
+TEST_F(ASSERT_PRED1Test, FunctorOnUserTypeSuccess) {
+ ASSERT_PRED1(PredFunctor1(),
+ Bool(++n1_));
+ finished_ = true;
+}
+
+// Tests a failed ASSERT_PRED1 where the
+// predicate-formatter is a function on a built-in type (int).
+TEST_F(ASSERT_PRED1Test, FunctionOnBuiltInTypeFailure) {
+ expected_to_finish_ = false;
+ EXPECT_FATAL_FAILURE({ // NOLINT
+ ASSERT_PRED1(PredFunction1Int,
+ n1_++);
+ finished_ = true;
+ }, "");
+}
+
+// Tests a failed ASSERT_PRED1 where the
+// predicate-formatter is a function on a user-defined type (Bool).
+TEST_F(ASSERT_PRED1Test, FunctionOnUserTypeFailure) {
+ expected_to_finish_ = false;
+ EXPECT_FATAL_FAILURE({ // NOLINT
+ ASSERT_PRED1(PredFunction1Bool,
+ Bool(n1_++));
+ finished_ = true;
+ }, "");
+}
+
+// Tests a failed ASSERT_PRED1 where the
+// predicate-formatter is a functor on a built-in type (int).
+TEST_F(ASSERT_PRED1Test, FunctorOnBuiltInTypeFailure) {
+ expected_to_finish_ = false;
+ EXPECT_FATAL_FAILURE({ // NOLINT
+ ASSERT_PRED1(PredFunctor1(),
+ n1_++);
+ finished_ = true;
+ }, "");
+}
+
+// Tests a failed ASSERT_PRED1 where the
+// predicate-formatter is a functor on a user-defined type (Bool).
+TEST_F(ASSERT_PRED1Test, FunctorOnUserTypeFailure) {
+ expected_to_finish_ = false;
+ EXPECT_FATAL_FAILURE({ // NOLINT
+ ASSERT_PRED1(PredFunctor1(),
+ Bool(n1_++));
+ finished_ = true;
+ }, "");
+}
+
+// Tests a successful EXPECT_PRED_FORMAT1 where the
+// predicate-formatter is a function on a built-in type (int).
+TEST_F(EXPECT_PRED_FORMAT1Test, FunctionOnBuiltInTypeSuccess) {
+ EXPECT_PRED_FORMAT1(PredFormatFunction1,
+ ++n1_);
+ finished_ = true;
+}
+
+// Tests a successful EXPECT_PRED_FORMAT1 where the
+// predicate-formatter is a function on a user-defined type (Bool).
+TEST_F(EXPECT_PRED_FORMAT1Test, FunctionOnUserTypeSuccess) {
+ EXPECT_PRED_FORMAT1(PredFormatFunction1,
+ Bool(++n1_));
+ finished_ = true;
+}
+
+// Tests a successful EXPECT_PRED_FORMAT1 where the
+// predicate-formatter is a functor on a built-in type (int).
+TEST_F(EXPECT_PRED_FORMAT1Test, FunctorOnBuiltInTypeSuccess) {
+ EXPECT_PRED_FORMAT1(PredFormatFunctor1(),
+ ++n1_);
+ finished_ = true;
+}
+
+// Tests a successful EXPECT_PRED_FORMAT1 where the
+// predicate-formatter is a functor on a user-defined type (Bool).
+TEST_F(EXPECT_PRED_FORMAT1Test, FunctorOnUserTypeSuccess) {
+ EXPECT_PRED_FORMAT1(PredFormatFunctor1(),
+ Bool(++n1_));
+ finished_ = true;
+}
+
+// Tests a failed EXPECT_PRED_FORMAT1 where the
+// predicate-formatter is a function on a built-in type (int).
+TEST_F(EXPECT_PRED_FORMAT1Test, FunctionOnBuiltInTypeFailure) {
+ EXPECT_NONFATAL_FAILURE({ // NOLINT
+ EXPECT_PRED_FORMAT1(PredFormatFunction1,
+ n1_++);
+ finished_ = true;
+ }, "");
+}
+
+// Tests a failed EXPECT_PRED_FORMAT1 where the
+// predicate-formatter is a function on a user-defined type (Bool).
+TEST_F(EXPECT_PRED_FORMAT1Test, FunctionOnUserTypeFailure) {
+ EXPECT_NONFATAL_FAILURE({ // NOLINT
+ EXPECT_PRED_FORMAT1(PredFormatFunction1,
+ Bool(n1_++));
+ finished_ = true;
+ }, "");
+}
+
+// Tests a failed EXPECT_PRED_FORMAT1 where the
+// predicate-formatter is a functor on a built-in type (int).
+TEST_F(EXPECT_PRED_FORMAT1Test, FunctorOnBuiltInTypeFailure) {
+ EXPECT_NONFATAL_FAILURE({ // NOLINT
+ EXPECT_PRED_FORMAT1(PredFormatFunctor1(),
+ n1_++);
+ finished_ = true;
+ }, "");
+}
+
+// Tests a failed EXPECT_PRED_FORMAT1 where the
+// predicate-formatter is a functor on a user-defined type (Bool).
+TEST_F(EXPECT_PRED_FORMAT1Test, FunctorOnUserTypeFailure) {
+ EXPECT_NONFATAL_FAILURE({ // NOLINT
+ EXPECT_PRED_FORMAT1(PredFormatFunctor1(),
+ Bool(n1_++));
+ finished_ = true;
+ }, "");
+}
+
+// Tests a successful ASSERT_PRED_FORMAT1 where the
+// predicate-formatter is a function on a built-in type (int).
+TEST_F(ASSERT_PRED_FORMAT1Test, FunctionOnBuiltInTypeSuccess) {
+ ASSERT_PRED_FORMAT1(PredFormatFunction1,
+ ++n1_);
+ finished_ = true;
+}
+
+// Tests a successful ASSERT_PRED_FORMAT1 where the
+// predicate-formatter is a function on a user-defined type (Bool).
+TEST_F(ASSERT_PRED_FORMAT1Test, FunctionOnUserTypeSuccess) {
+ ASSERT_PRED_FORMAT1(PredFormatFunction1,
+ Bool(++n1_));
+ finished_ = true;
+}
+
+// Tests a successful ASSERT_PRED_FORMAT1 where the
+// predicate-formatter is a functor on a built-in type (int).
+TEST_F(ASSERT_PRED_FORMAT1Test, FunctorOnBuiltInTypeSuccess) {
+ ASSERT_PRED_FORMAT1(PredFormatFunctor1(),
+ ++n1_);
+ finished_ = true;
+}
+
+// Tests a successful ASSERT_PRED_FORMAT1 where the
+// predicate-formatter is a functor on a user-defined type (Bool).
+TEST_F(ASSERT_PRED_FORMAT1Test, FunctorOnUserTypeSuccess) {
+ ASSERT_PRED_FORMAT1(PredFormatFunctor1(),
+ Bool(++n1_));
+ finished_ = true;
+}
+
+// Tests a failed ASSERT_PRED_FORMAT1 where the
+// predicate-formatter is a function on a built-in type (int).
+TEST_F(ASSERT_PRED_FORMAT1Test, FunctionOnBuiltInTypeFailure) {
+ expected_to_finish_ = false;
+ EXPECT_FATAL_FAILURE({ // NOLINT
+ ASSERT_PRED_FORMAT1(PredFormatFunction1,
+ n1_++);
+ finished_ = true;
+ }, "");
+}
+
+// Tests a failed ASSERT_PRED_FORMAT1 where the
+// predicate-formatter is a function on a user-defined type (Bool).
+TEST_F(ASSERT_PRED_FORMAT1Test, FunctionOnUserTypeFailure) {
+ expected_to_finish_ = false;
+ EXPECT_FATAL_FAILURE({ // NOLINT
+ ASSERT_PRED_FORMAT1(PredFormatFunction1,
+ Bool(n1_++));
+ finished_ = true;
+ }, "");
+}
+
+// Tests a failed ASSERT_PRED_FORMAT1 where the
+// predicate-formatter is a functor on a built-in type (int).
+TEST_F(ASSERT_PRED_FORMAT1Test, FunctorOnBuiltInTypeFailure) {
+ expected_to_finish_ = false;
+ EXPECT_FATAL_FAILURE({ // NOLINT
+ ASSERT_PRED_FORMAT1(PredFormatFunctor1(),
+ n1_++);
+ finished_ = true;
+ }, "");
+}
+
+// Tests a failed ASSERT_PRED_FORMAT1 where the
+// predicate-formatter is a functor on a user-defined type (Bool).
+TEST_F(ASSERT_PRED_FORMAT1Test, FunctorOnUserTypeFailure) {
+ expected_to_finish_ = false;
+ EXPECT_FATAL_FAILURE({ // NOLINT
+ ASSERT_PRED_FORMAT1(PredFormatFunctor1(),
+ Bool(n1_++));
+ finished_ = true;
+ }, "");
+}
+// Sample functions/functors for testing binary predicate assertions.
+
+// A binary predicate function.
+template <typename T1, typename T2>
+bool PredFunction2(T1 v1, T2 v2) {
+ return v1 + v2 > 0;
+}
+
+// The following two functions are needed to circumvent a bug in
+// gcc 2.95.3, which sometimes has problem with the above template
+// function.
+bool PredFunction2Int(int v1, int v2) {
+ return v1 + v2 > 0;
+}
+bool PredFunction2Bool(Bool v1, Bool v2) {
+ return v1 + v2 > 0;
+}
+
+// A binary predicate functor.
+struct PredFunctor2 {
+ template <typename T1, typename T2>
+ bool operator()(const T1& v1,
+ const T2& v2) {
+ return v1 + v2 > 0;
+ }
+};
+
+// A binary predicate-formatter function.
+template <typename T1, typename T2>
+testing::AssertionResult PredFormatFunction2(const char* e1,
+ const char* e2,
+ const T1& v1,
+ const T2& v2) {
+ if (PredFunction2(v1, v2))
+ return testing::AssertionSuccess();
+
+ testing::Message msg;
+ msg << e1 << " + " << e2
+ << " is expected to be positive, but evaluates to "
+ << v1 + v2 << ".";
+ return testing::AssertionFailure(msg);
+}
+
+// A binary predicate-formatter functor.
+struct PredFormatFunctor2 {
+ template <typename T1, typename T2>
+ testing::AssertionResult operator()(const char* e1,
+ const char* e2,
+ const T1& v1,
+ const T2& v2) const {
+ return PredFormatFunction2(e1, e2, v1, v2);
+ }
+};
+
+// Tests for {EXPECT|ASSERT}_PRED_FORMAT2.
+
+class Predicate2Test : public testing::Test {
+ protected:
+ virtual void SetUp() {
+ expected_to_finish_ = true;
+ finished_ = false;
+ n1_ = n2_ = 0;
+ }
+
+ virtual void TearDown() {
+ // Verifies that each of the predicate's arguments was evaluated
+ // exactly once.
+ EXPECT_EQ(1, n1_) <<
+ "The predicate assertion didn't evaluate argument 2 "
+ "exactly once.";
+ EXPECT_EQ(1, n2_) <<
+ "The predicate assertion didn't evaluate argument 3 "
+ "exactly once.";
+
+ // Verifies that the control flow in the test function is expected.
+ if (expected_to_finish_ && !finished_) {
+ FAIL() << "The predicate assertion unexpactedly aborted the test.";
+ } else if (!expected_to_finish_ && finished_) {
+ FAIL() << "The failed predicate assertion didn't abort the test "
+ "as expected.";
+ }
+ }
+
+ // true iff the test function is expected to run to finish.
+ static bool expected_to_finish_;
+
+ // true iff the test function did run to finish.
+ static bool finished_;
+
+ static int n1_;
+ static int n2_;
+};
+
+bool Predicate2Test::expected_to_finish_;
+bool Predicate2Test::finished_;
+int Predicate2Test::n1_;
+int Predicate2Test::n2_;
+
+typedef Predicate2Test EXPECT_PRED_FORMAT2Test;
+typedef Predicate2Test ASSERT_PRED_FORMAT2Test;
+typedef Predicate2Test EXPECT_PRED2Test;
+typedef Predicate2Test ASSERT_PRED2Test;
+
+// Tests a successful EXPECT_PRED2 where the
+// predicate-formatter is a function on a built-in type (int).
+TEST_F(EXPECT_PRED2Test, FunctionOnBuiltInTypeSuccess) {
+ EXPECT_PRED2(PredFunction2Int,
+ ++n1_,
+ ++n2_);
+ finished_ = true;
+}
+
+// Tests a successful EXPECT_PRED2 where the
+// predicate-formatter is a function on a user-defined type (Bool).
+TEST_F(EXPECT_PRED2Test, FunctionOnUserTypeSuccess) {
+ EXPECT_PRED2(PredFunction2Bool,
+ Bool(++n1_),
+ Bool(++n2_));
+ finished_ = true;
+}
+
+// Tests a successful EXPECT_PRED2 where the
+// predicate-formatter is a functor on a built-in type (int).
+TEST_F(EXPECT_PRED2Test, FunctorOnBuiltInTypeSuccess) {
+ EXPECT_PRED2(PredFunctor2(),
+ ++n1_,
+ ++n2_);
+ finished_ = true;
+}
+
+// Tests a successful EXPECT_PRED2 where the
+// predicate-formatter is a functor on a user-defined type (Bool).
+TEST_F(EXPECT_PRED2Test, FunctorOnUserTypeSuccess) {
+ EXPECT_PRED2(PredFunctor2(),
+ Bool(++n1_),
+ Bool(++n2_));
+ finished_ = true;
+}
+
+// Tests a failed EXPECT_PRED2 where the
+// predicate-formatter is a function on a built-in type (int).
+TEST_F(EXPECT_PRED2Test, FunctionOnBuiltInTypeFailure) {
+ EXPECT_NONFATAL_FAILURE({ // NOLINT
+ EXPECT_PRED2(PredFunction2Int,
+ n1_++,
+ n2_++);
+ finished_ = true;
+ }, "");
+}
+
+// Tests a failed EXPECT_PRED2 where the
+// predicate-formatter is a function on a user-defined type (Bool).
+TEST_F(EXPECT_PRED2Test, FunctionOnUserTypeFailure) {
+ EXPECT_NONFATAL_FAILURE({ // NOLINT
+ EXPECT_PRED2(PredFunction2Bool,
+ Bool(n1_++),
+ Bool(n2_++));
+ finished_ = true;
+ }, "");
+}
+
+// Tests a failed EXPECT_PRED2 where the
+// predicate-formatter is a functor on a built-in type (int).
+TEST_F(EXPECT_PRED2Test, FunctorOnBuiltInTypeFailure) {
+ EXPECT_NONFATAL_FAILURE({ // NOLINT
+ EXPECT_PRED2(PredFunctor2(),
+ n1_++,
+ n2_++);
+ finished_ = true;
+ }, "");
+}
+
+// Tests a failed EXPECT_PRED2 where the
+// predicate-formatter is a functor on a user-defined type (Bool).
+TEST_F(EXPECT_PRED2Test, FunctorOnUserTypeFailure) {
+ EXPECT_NONFATAL_FAILURE({ // NOLINT
+ EXPECT_PRED2(PredFunctor2(),
+ Bool(n1_++),
+ Bool(n2_++));
+ finished_ = true;
+ }, "");
+}
+
+// Tests a successful ASSERT_PRED2 where the
+// predicate-formatter is a function on a built-in type (int).
+TEST_F(ASSERT_PRED2Test, FunctionOnBuiltInTypeSuccess) {
+ ASSERT_PRED2(PredFunction2Int,
+ ++n1_,
+ ++n2_);
+ finished_ = true;
+}
+
+// Tests a successful ASSERT_PRED2 where the
+// predicate-formatter is a function on a user-defined type (Bool).
+TEST_F(ASSERT_PRED2Test, FunctionOnUserTypeSuccess) {
+ ASSERT_PRED2(PredFunction2Bool,
+ Bool(++n1_),
+ Bool(++n2_));
+ finished_ = true;
+}
+
+// Tests a successful ASSERT_PRED2 where the
+// predicate-formatter is a functor on a built-in type (int).
+TEST_F(ASSERT_PRED2Test, FunctorOnBuiltInTypeSuccess) {
+ ASSERT_PRED2(PredFunctor2(),
+ ++n1_,
+ ++n2_);
+ finished_ = true;
+}
+
+// Tests a successful ASSERT_PRED2 where the
+// predicate-formatter is a functor on a user-defined type (Bool).
+TEST_F(ASSERT_PRED2Test, FunctorOnUserTypeSuccess) {
+ ASSERT_PRED2(PredFunctor2(),
+ Bool(++n1_),
+ Bool(++n2_));
+ finished_ = true;
+}
+
+// Tests a failed ASSERT_PRED2 where the
+// predicate-formatter is a function on a built-in type (int).
+TEST_F(ASSERT_PRED2Test, FunctionOnBuiltInTypeFailure) {
+ expected_to_finish_ = false;
+ EXPECT_FATAL_FAILURE({ // NOLINT
+ ASSERT_PRED2(PredFunction2Int,
+ n1_++,
+ n2_++);
+ finished_ = true;
+ }, "");
+}
+
+// Tests a failed ASSERT_PRED2 where the
+// predicate-formatter is a function on a user-defined type (Bool).
+TEST_F(ASSERT_PRED2Test, FunctionOnUserTypeFailure) {
+ expected_to_finish_ = false;
+ EXPECT_FATAL_FAILURE({ // NOLINT
+ ASSERT_PRED2(PredFunction2Bool,
+ Bool(n1_++),
+ Bool(n2_++));
+ finished_ = true;
+ }, "");
+}
+
+// Tests a failed ASSERT_PRED2 where the
+// predicate-formatter is a functor on a built-in type (int).
+TEST_F(ASSERT_PRED2Test, FunctorOnBuiltInTypeFailure) {
+ expected_to_finish_ = false;
+ EXPECT_FATAL_FAILURE({ // NOLINT
+ ASSERT_PRED2(PredFunctor2(),
+ n1_++,
+ n2_++);
+ finished_ = true;
+ }, "");
+}
+
+// Tests a failed ASSERT_PRED2 where the
+// predicate-formatter is a functor on a user-defined type (Bool).
+TEST_F(ASSERT_PRED2Test, FunctorOnUserTypeFailure) {
+ expected_to_finish_ = false;
+ EXPECT_FATAL_FAILURE({ // NOLINT
+ ASSERT_PRED2(PredFunctor2(),
+ Bool(n1_++),
+ Bool(n2_++));
+ finished_ = true;
+ }, "");
+}
+
+// Tests a successful EXPECT_PRED_FORMAT2 where the
+// predicate-formatter is a function on a built-in type (int).
+TEST_F(EXPECT_PRED_FORMAT2Test, FunctionOnBuiltInTypeSuccess) {
+ EXPECT_PRED_FORMAT2(PredFormatFunction2,
+ ++n1_,
+ ++n2_);
+ finished_ = true;
+}
+
+// Tests a successful EXPECT_PRED_FORMAT2 where the
+// predicate-formatter is a function on a user-defined type (Bool).
+TEST_F(EXPECT_PRED_FORMAT2Test, FunctionOnUserTypeSuccess) {
+ EXPECT_PRED_FORMAT2(PredFormatFunction2,
+ Bool(++n1_),
+ Bool(++n2_));
+ finished_ = true;
+}
+
+// Tests a successful EXPECT_PRED_FORMAT2 where the
+// predicate-formatter is a functor on a built-in type (int).
+TEST_F(EXPECT_PRED_FORMAT2Test, FunctorOnBuiltInTypeSuccess) {
+ EXPECT_PRED_FORMAT2(PredFormatFunctor2(),
+ ++n1_,
+ ++n2_);
+ finished_ = true;
+}
+
+// Tests a successful EXPECT_PRED_FORMAT2 where the
+// predicate-formatter is a functor on a user-defined type (Bool).
+TEST_F(EXPECT_PRED_FORMAT2Test, FunctorOnUserTypeSuccess) {
+ EXPECT_PRED_FORMAT2(PredFormatFunctor2(),
+ Bool(++n1_),
+ Bool(++n2_));
+ finished_ = true;
+}
+
+// Tests a failed EXPECT_PRED_FORMAT2 where the
+// predicate-formatter is a function on a built-in type (int).
+TEST_F(EXPECT_PRED_FORMAT2Test, FunctionOnBuiltInTypeFailure) {
+ EXPECT_NONFATAL_FAILURE({ // NOLINT
+ EXPECT_PRED_FORMAT2(PredFormatFunction2,
+ n1_++,
+ n2_++);
+ finished_ = true;
+ }, "");
+}
+
+// Tests a failed EXPECT_PRED_FORMAT2 where the
+// predicate-formatter is a function on a user-defined type (Bool).
+TEST_F(EXPECT_PRED_FORMAT2Test, FunctionOnUserTypeFailure) {
+ EXPECT_NONFATAL_FAILURE({ // NOLINT
+ EXPECT_PRED_FORMAT2(PredFormatFunction2,
+ Bool(n1_++),
+ Bool(n2_++));
+ finished_ = true;
+ }, "");
+}
+
+// Tests a failed EXPECT_PRED_FORMAT2 where the
+// predicate-formatter is a functor on a built-in type (int).
+TEST_F(EXPECT_PRED_FORMAT2Test, FunctorOnBuiltInTypeFailure) {
+ EXPECT_NONFATAL_FAILURE({ // NOLINT
+ EXPECT_PRED_FORMAT2(PredFormatFunctor2(),
+ n1_++,
+ n2_++);
+ finished_ = true;
+ }, "");
+}
+
+// Tests a failed EXPECT_PRED_FORMAT2 where the
+// predicate-formatter is a functor on a user-defined type (Bool).
+TEST_F(EXPECT_PRED_FORMAT2Test, FunctorOnUserTypeFailure) {
+ EXPECT_NONFATAL_FAILURE({ // NOLINT
+ EXPECT_PRED_FORMAT2(PredFormatFunctor2(),
+ Bool(n1_++),
+ Bool(n2_++));
+ finished_ = true;
+ }, "");
+}
+
+// Tests a successful ASSERT_PRED_FORMAT2 where the
+// predicate-formatter is a function on a built-in type (int).
+TEST_F(ASSERT_PRED_FORMAT2Test, FunctionOnBuiltInTypeSuccess) {
+ ASSERT_PRED_FORMAT2(PredFormatFunction2,
+ ++n1_,
+ ++n2_);
+ finished_ = true;
+}
+
+// Tests a successful ASSERT_PRED_FORMAT2 where the
+// predicate-formatter is a function on a user-defined type (Bool).
+TEST_F(ASSERT_PRED_FORMAT2Test, FunctionOnUserTypeSuccess) {
+ ASSERT_PRED_FORMAT2(PredFormatFunction2,
+ Bool(++n1_),
+ Bool(++n2_));
+ finished_ = true;
+}
+
+// Tests a successful ASSERT_PRED_FORMAT2 where the
+// predicate-formatter is a functor on a built-in type (int).
+TEST_F(ASSERT_PRED_FORMAT2Test, FunctorOnBuiltInTypeSuccess) {
+ ASSERT_PRED_FORMAT2(PredFormatFunctor2(),
+ ++n1_,
+ ++n2_);
+ finished_ = true;
+}
+
+// Tests a successful ASSERT_PRED_FORMAT2 where the
+// predicate-formatter is a functor on a user-defined type (Bool).
+TEST_F(ASSERT_PRED_FORMAT2Test, FunctorOnUserTypeSuccess) {
+ ASSERT_PRED_FORMAT2(PredFormatFunctor2(),
+ Bool(++n1_),
+ Bool(++n2_));
+ finished_ = true;
+}
+
+// Tests a failed ASSERT_PRED_FORMAT2 where the
+// predicate-formatter is a function on a built-in type (int).
+TEST_F(ASSERT_PRED_FORMAT2Test, FunctionOnBuiltInTypeFailure) {
+ expected_to_finish_ = false;
+ EXPECT_FATAL_FAILURE({ // NOLINT
+ ASSERT_PRED_FORMAT2(PredFormatFunction2,
+ n1_++,
+ n2_++);
+ finished_ = true;
+ }, "");
+}
+
+// Tests a failed ASSERT_PRED_FORMAT2 where the
+// predicate-formatter is a function on a user-defined type (Bool).
+TEST_F(ASSERT_PRED_FORMAT2Test, FunctionOnUserTypeFailure) {
+ expected_to_finish_ = false;
+ EXPECT_FATAL_FAILURE({ // NOLINT
+ ASSERT_PRED_FORMAT2(PredFormatFunction2,
+ Bool(n1_++),
+ Bool(n2_++));
+ finished_ = true;
+ }, "");
+}
+
+// Tests a failed ASSERT_PRED_FORMAT2 where the
+// predicate-formatter is a functor on a built-in type (int).
+TEST_F(ASSERT_PRED_FORMAT2Test, FunctorOnBuiltInTypeFailure) {
+ expected_to_finish_ = false;
+ EXPECT_FATAL_FAILURE({ // NOLINT
+ ASSERT_PRED_FORMAT2(PredFormatFunctor2(),
+ n1_++,
+ n2_++);
+ finished_ = true;
+ }, "");
+}
+
+// Tests a failed ASSERT_PRED_FORMAT2 where the
+// predicate-formatter is a functor on a user-defined type (Bool).
+TEST_F(ASSERT_PRED_FORMAT2Test, FunctorOnUserTypeFailure) {
+ expected_to_finish_ = false;
+ EXPECT_FATAL_FAILURE({ // NOLINT
+ ASSERT_PRED_FORMAT2(PredFormatFunctor2(),
+ Bool(n1_++),
+ Bool(n2_++));
+ finished_ = true;
+ }, "");
+}
+// Sample functions/functors for testing ternary predicate assertions.
+
+// A ternary predicate function.
+template <typename T1, typename T2, typename T3>
+bool PredFunction3(T1 v1, T2 v2, T3 v3) {
+ return v1 + v2 + v3 > 0;
+}
+
+// The following two functions are needed to circumvent a bug in
+// gcc 2.95.3, which sometimes has problem with the above template
+// function.
+bool PredFunction3Int(int v1, int v2, int v3) {
+ return v1 + v2 + v3 > 0;
+}
+bool PredFunction3Bool(Bool v1, Bool v2, Bool v3) {
+ return v1 + v2 + v3 > 0;
+}
+
+// A ternary predicate functor.
+struct PredFunctor3 {
+ template <typename T1, typename T2, typename T3>
+ bool operator()(const T1& v1,
+ const T2& v2,
+ const T3& v3) {
+ return v1 + v2 + v3 > 0;
+ }
+};
+
+// A ternary predicate-formatter function.
+template <typename T1, typename T2, typename T3>
+testing::AssertionResult PredFormatFunction3(const char* e1,
+ const char* e2,
+ const char* e3,
+ const T1& v1,
+ const T2& v2,
+ const T3& v3) {
+ if (PredFunction3(v1, v2, v3))
+ return testing::AssertionSuccess();
+
+ testing::Message msg;
+ msg << e1 << " + " << e2 << " + " << e3
+ << " is expected to be positive, but evaluates to "
+ << v1 + v2 + v3 << ".";
+ return testing::AssertionFailure(msg);
+}
+
+// A ternary predicate-formatter functor.
+struct PredFormatFunctor3 {
+ template <typename T1, typename T2, typename T3>
+ testing::AssertionResult operator()(const char* e1,
+ const char* e2,
+ const char* e3,
+ const T1& v1,
+ const T2& v2,
+ const T3& v3) const {
+ return PredFormatFunction3(e1, e2, e3, v1, v2, v3);
+ }
+};
+
+// Tests for {EXPECT|ASSERT}_PRED_FORMAT3.
+
+class Predicate3Test : public testing::Test {
+ protected:
+ virtual void SetUp() {
+ expected_to_finish_ = true;
+ finished_ = false;
+ n1_ = n2_ = n3_ = 0;
+ }
+
+ virtual void TearDown() {
+ // Verifies that each of the predicate's arguments was evaluated
+ // exactly once.
+ EXPECT_EQ(1, n1_) <<
+ "The predicate assertion didn't evaluate argument 2 "
+ "exactly once.";
+ EXPECT_EQ(1, n2_) <<
+ "The predicate assertion didn't evaluate argument 3 "
+ "exactly once.";
+ EXPECT_EQ(1, n3_) <<
+ "The predicate assertion didn't evaluate argument 4 "
+ "exactly once.";
+
+ // Verifies that the control flow in the test function is expected.
+ if (expected_to_finish_ && !finished_) {
+ FAIL() << "The predicate assertion unexpactedly aborted the test.";
+ } else if (!expected_to_finish_ && finished_) {
+ FAIL() << "The failed predicate assertion didn't abort the test "
+ "as expected.";
+ }
+ }
+
+ // true iff the test function is expected to run to finish.
+ static bool expected_to_finish_;
+
+ // true iff the test function did run to finish.
+ static bool finished_;
+
+ static int n1_;
+ static int n2_;
+ static int n3_;
+};
+
+bool Predicate3Test::expected_to_finish_;
+bool Predicate3Test::finished_;
+int Predicate3Test::n1_;
+int Predicate3Test::n2_;
+int Predicate3Test::n3_;
+
+typedef Predicate3Test EXPECT_PRED_FORMAT3Test;
+typedef Predicate3Test ASSERT_PRED_FORMAT3Test;
+typedef Predicate3Test EXPECT_PRED3Test;
+typedef Predicate3Test ASSERT_PRED3Test;
+
+// Tests a successful EXPECT_PRED3 where the
+// predicate-formatter is a function on a built-in type (int).
+TEST_F(EXPECT_PRED3Test, FunctionOnBuiltInTypeSuccess) {
+ EXPECT_PRED3(PredFunction3Int,
+ ++n1_,
+ ++n2_,
+ ++n3_);
+ finished_ = true;
+}
+
+// Tests a successful EXPECT_PRED3 where the
+// predicate-formatter is a function on a user-defined type (Bool).
+TEST_F(EXPECT_PRED3Test, FunctionOnUserTypeSuccess) {
+ EXPECT_PRED3(PredFunction3Bool,
+ Bool(++n1_),
+ Bool(++n2_),
+ Bool(++n3_));
+ finished_ = true;
+}
+
+// Tests a successful EXPECT_PRED3 where the
+// predicate-formatter is a functor on a built-in type (int).
+TEST_F(EXPECT_PRED3Test, FunctorOnBuiltInTypeSuccess) {
+ EXPECT_PRED3(PredFunctor3(),
+ ++n1_,
+ ++n2_,
+ ++n3_);
+ finished_ = true;
+}
+
+// Tests a successful EXPECT_PRED3 where the
+// predicate-formatter is a functor on a user-defined type (Bool).
+TEST_F(EXPECT_PRED3Test, FunctorOnUserTypeSuccess) {
+ EXPECT_PRED3(PredFunctor3(),
+ Bool(++n1_),
+ Bool(++n2_),
+ Bool(++n3_));
+ finished_ = true;
+}
+
+// Tests a failed EXPECT_PRED3 where the
+// predicate-formatter is a function on a built-in type (int).
+TEST_F(EXPECT_PRED3Test, FunctionOnBuiltInTypeFailure) {
+ EXPECT_NONFATAL_FAILURE({ // NOLINT
+ EXPECT_PRED3(PredFunction3Int,
+ n1_++,
+ n2_++,
+ n3_++);
+ finished_ = true;
+ }, "");
+}
+
+// Tests a failed EXPECT_PRED3 where the
+// predicate-formatter is a function on a user-defined type (Bool).
+TEST_F(EXPECT_PRED3Test, FunctionOnUserTypeFailure) {
+ EXPECT_NONFATAL_FAILURE({ // NOLINT
+ EXPECT_PRED3(PredFunction3Bool,
+ Bool(n1_++),
+ Bool(n2_++),
+ Bool(n3_++));
+ finished_ = true;
+ }, "");
+}
+
+// Tests a failed EXPECT_PRED3 where the
+// predicate-formatter is a functor on a built-in type (int).
+TEST_F(EXPECT_PRED3Test, FunctorOnBuiltInTypeFailure) {
+ EXPECT_NONFATAL_FAILURE({ // NOLINT
+ EXPECT_PRED3(PredFunctor3(),
+ n1_++,
+ n2_++,
+ n3_++);
+ finished_ = true;
+ }, "");
+}
+
+// Tests a failed EXPECT_PRED3 where the
+// predicate-formatter is a functor on a user-defined type (Bool).
+TEST_F(EXPECT_PRED3Test, FunctorOnUserTypeFailure) {
+ EXPECT_NONFATAL_FAILURE({ // NOLINT
+ EXPECT_PRED3(PredFunctor3(),
+ Bool(n1_++),
+ Bool(n2_++),
+ Bool(n3_++));
+ finished_ = true;
+ }, "");
+}
+
+// Tests a successful ASSERT_PRED3 where the
+// predicate-formatter is a function on a built-in type (int).
+TEST_F(ASSERT_PRED3Test, FunctionOnBuiltInTypeSuccess) {
+ ASSERT_PRED3(PredFunction3Int,
+ ++n1_,
+ ++n2_,
+ ++n3_);
+ finished_ = true;
+}
+
+// Tests a successful ASSERT_PRED3 where the
+// predicate-formatter is a function on a user-defined type (Bool).
+TEST_F(ASSERT_PRED3Test, FunctionOnUserTypeSuccess) {
+ ASSERT_PRED3(PredFunction3Bool,
+ Bool(++n1_),
+ Bool(++n2_),
+ Bool(++n3_));
+ finished_ = true;
+}
+
+// Tests a successful ASSERT_PRED3 where the
+// predicate-formatter is a functor on a built-in type (int).
+TEST_F(ASSERT_PRED3Test, FunctorOnBuiltInTypeSuccess) {
+ ASSERT_PRED3(PredFunctor3(),
+ ++n1_,
+ ++n2_,
+ ++n3_);
+ finished_ = true;
+}
+
+// Tests a successful ASSERT_PRED3 where the
+// predicate-formatter is a functor on a user-defined type (Bool).
+TEST_F(ASSERT_PRED3Test, FunctorOnUserTypeSuccess) {
+ ASSERT_PRED3(PredFunctor3(),
+ Bool(++n1_),
+ Bool(++n2_),
+ Bool(++n3_));
+ finished_ = true;
+}
+
+// Tests a failed ASSERT_PRED3 where the
+// predicate-formatter is a function on a built-in type (int).
+TEST_F(ASSERT_PRED3Test, FunctionOnBuiltInTypeFailure) {
+ expected_to_finish_ = false;
+ EXPECT_FATAL_FAILURE({ // NOLINT
+ ASSERT_PRED3(PredFunction3Int,
+ n1_++,
+ n2_++,
+ n3_++);
+ finished_ = true;
+ }, "");
+}
+
+// Tests a failed ASSERT_PRED3 where the
+// predicate-formatter is a function on a user-defined type (Bool).
+TEST_F(ASSERT_PRED3Test, FunctionOnUserTypeFailure) {
+ expected_to_finish_ = false;
+ EXPECT_FATAL_FAILURE({ // NOLINT
+ ASSERT_PRED3(PredFunction3Bool,
+ Bool(n1_++),
+ Bool(n2_++),
+ Bool(n3_++));
+ finished_ = true;
+ }, "");
+}
+
+// Tests a failed ASSERT_PRED3 where the
+// predicate-formatter is a functor on a built-in type (int).
+TEST_F(ASSERT_PRED3Test, FunctorOnBuiltInTypeFailure) {
+ expected_to_finish_ = false;
+ EXPECT_FATAL_FAILURE({ // NOLINT
+ ASSERT_PRED3(PredFunctor3(),
+ n1_++,
+ n2_++,
+ n3_++);
+ finished_ = true;
+ }, "");
+}
+
+// Tests a failed ASSERT_PRED3 where the
+// predicate-formatter is a functor on a user-defined type (Bool).
+TEST_F(ASSERT_PRED3Test, FunctorOnUserTypeFailure) {
+ expected_to_finish_ = false;
+ EXPECT_FATAL_FAILURE({ // NOLINT
+ ASSERT_PRED3(PredFunctor3(),
+ Bool(n1_++),
+ Bool(n2_++),
+ Bool(n3_++));
+ finished_ = true;
+ }, "");
+}
+
+// Tests a successful EXPECT_PRED_FORMAT3 where the
+// predicate-formatter is a function on a built-in type (int).
+TEST_F(EXPECT_PRED_FORMAT3Test, FunctionOnBuiltInTypeSuccess) {
+ EXPECT_PRED_FORMAT3(PredFormatFunction3,
+ ++n1_,
+ ++n2_,
+ ++n3_);
+ finished_ = true;
+}
+
+// Tests a successful EXPECT_PRED_FORMAT3 where the
+// predicate-formatter is a function on a user-defined type (Bool).
+TEST_F(EXPECT_PRED_FORMAT3Test, FunctionOnUserTypeSuccess) {
+ EXPECT_PRED_FORMAT3(PredFormatFunction3,
+ Bool(++n1_),
+ Bool(++n2_),
+ Bool(++n3_));
+ finished_ = true;
+}
+
+// Tests a successful EXPECT_PRED_FORMAT3 where the
+// predicate-formatter is a functor on a built-in type (int).
+TEST_F(EXPECT_PRED_FORMAT3Test, FunctorOnBuiltInTypeSuccess) {
+ EXPECT_PRED_FORMAT3(PredFormatFunctor3(),
+ ++n1_,
+ ++n2_,
+ ++n3_);
+ finished_ = true;
+}
+
+// Tests a successful EXPECT_PRED_FORMAT3 where the
+// predicate-formatter is a functor on a user-defined type (Bool).
+TEST_F(EXPECT_PRED_FORMAT3Test, FunctorOnUserTypeSuccess) {
+ EXPECT_PRED_FORMAT3(PredFormatFunctor3(),
+ Bool(++n1_),
+ Bool(++n2_),
+ Bool(++n3_));
+ finished_ = true;
+}
+
+// Tests a failed EXPECT_PRED_FORMAT3 where the
+// predicate-formatter is a function on a built-in type (int).
+TEST_F(EXPECT_PRED_FORMAT3Test, FunctionOnBuiltInTypeFailure) {
+ EXPECT_NONFATAL_FAILURE({ // NOLINT
+ EXPECT_PRED_FORMAT3(PredFormatFunction3,
+ n1_++,
+ n2_++,
+ n3_++);
+ finished_ = true;
+ }, "");
+}
+
+// Tests a failed EXPECT_PRED_FORMAT3 where the
+// predicate-formatter is a function on a user-defined type (Bool).
+TEST_F(EXPECT_PRED_FORMAT3Test, FunctionOnUserTypeFailure) {
+ EXPECT_NONFATAL_FAILURE({ // NOLINT
+ EXPECT_PRED_FORMAT3(PredFormatFunction3,
+ Bool(n1_++),
+ Bool(n2_++),
+ Bool(n3_++));
+ finished_ = true;
+ }, "");
+}
+
+// Tests a failed EXPECT_PRED_FORMAT3 where the
+// predicate-formatter is a functor on a built-in type (int).
+TEST_F(EXPECT_PRED_FORMAT3Test, FunctorOnBuiltInTypeFailure) {
+ EXPECT_NONFATAL_FAILURE({ // NOLINT
+ EXPECT_PRED_FORMAT3(PredFormatFunctor3(),
+ n1_++,
+ n2_++,
+ n3_++);
+ finished_ = true;
+ }, "");
+}
+
+// Tests a failed EXPECT_PRED_FORMAT3 where the
+// predicate-formatter is a functor on a user-defined type (Bool).
+TEST_F(EXPECT_PRED_FORMAT3Test, FunctorOnUserTypeFailure) {
+ EXPECT_NONFATAL_FAILURE({ // NOLINT
+ EXPECT_PRED_FORMAT3(PredFormatFunctor3(),
+ Bool(n1_++),
+ Bool(n2_++),
+ Bool(n3_++));
+ finished_ = true;
+ }, "");
+}
+
+// Tests a successful ASSERT_PRED_FORMAT3 where the
+// predicate-formatter is a function on a built-in type (int).
+TEST_F(ASSERT_PRED_FORMAT3Test, FunctionOnBuiltInTypeSuccess) {
+ ASSERT_PRED_FORMAT3(PredFormatFunction3,
+ ++n1_,
+ ++n2_,
+ ++n3_);
+ finished_ = true;
+}
+
+// Tests a successful ASSERT_PRED_FORMAT3 where the
+// predicate-formatter is a function on a user-defined type (Bool).
+TEST_F(ASSERT_PRED_FORMAT3Test, FunctionOnUserTypeSuccess) {
+ ASSERT_PRED_FORMAT3(PredFormatFunction3,
+ Bool(++n1_),
+ Bool(++n2_),
+ Bool(++n3_));
+ finished_ = true;
+}
+
+// Tests a successful ASSERT_PRED_FORMAT3 where the
+// predicate-formatter is a functor on a built-in type (int).
+TEST_F(ASSERT_PRED_FORMAT3Test, FunctorOnBuiltInTypeSuccess) {
+ ASSERT_PRED_FORMAT3(PredFormatFunctor3(),
+ ++n1_,
+ ++n2_,
+ ++n3_);
+ finished_ = true;
+}
+
+// Tests a successful ASSERT_PRED_FORMAT3 where the
+// predicate-formatter is a functor on a user-defined type (Bool).
+TEST_F(ASSERT_PRED_FORMAT3Test, FunctorOnUserTypeSuccess) {
+ ASSERT_PRED_FORMAT3(PredFormatFunctor3(),
+ Bool(++n1_),
+ Bool(++n2_),
+ Bool(++n3_));
+ finished_ = true;
+}
+
+// Tests a failed ASSERT_PRED_FORMAT3 where the
+// predicate-formatter is a function on a built-in type (int).
+TEST_F(ASSERT_PRED_FORMAT3Test, FunctionOnBuiltInTypeFailure) {
+ expected_to_finish_ = false;
+ EXPECT_FATAL_FAILURE({ // NOLINT
+ ASSERT_PRED_FORMAT3(PredFormatFunction3,
+ n1_++,
+ n2_++,
+ n3_++);
+ finished_ = true;
+ }, "");
+}
+
+// Tests a failed ASSERT_PRED_FORMAT3 where the
+// predicate-formatter is a function on a user-defined type (Bool).
+TEST_F(ASSERT_PRED_FORMAT3Test, FunctionOnUserTypeFailure) {
+ expected_to_finish_ = false;
+ EXPECT_FATAL_FAILURE({ // NOLINT
+ ASSERT_PRED_FORMAT3(PredFormatFunction3,
+ Bool(n1_++),
+ Bool(n2_++),
+ Bool(n3_++));
+ finished_ = true;
+ }, "");
+}
+
+// Tests a failed ASSERT_PRED_FORMAT3 where the
+// predicate-formatter is a functor on a built-in type (int).
+TEST_F(ASSERT_PRED_FORMAT3Test, FunctorOnBuiltInTypeFailure) {
+ expected_to_finish_ = false;
+ EXPECT_FATAL_FAILURE({ // NOLINT
+ ASSERT_PRED_FORMAT3(PredFormatFunctor3(),
+ n1_++,
+ n2_++,
+ n3_++);
+ finished_ = true;
+ }, "");
+}
+
+// Tests a failed ASSERT_PRED_FORMAT3 where the
+// predicate-formatter is a functor on a user-defined type (Bool).
+TEST_F(ASSERT_PRED_FORMAT3Test, FunctorOnUserTypeFailure) {
+ expected_to_finish_ = false;
+ EXPECT_FATAL_FAILURE({ // NOLINT
+ ASSERT_PRED_FORMAT3(PredFormatFunctor3(),
+ Bool(n1_++),
+ Bool(n2_++),
+ Bool(n3_++));
+ finished_ = true;
+ }, "");
+}
+// Sample functions/functors for testing 4-ary predicate assertions.
+
+// A 4-ary predicate function.
+template <typename T1, typename T2, typename T3, typename T4>
+bool PredFunction4(T1 v1, T2 v2, T3 v3, T4 v4) {
+ return v1 + v2 + v3 + v4 > 0;
+}
+
+// The following two functions are needed to circumvent a bug in
+// gcc 2.95.3, which sometimes has problem with the above template
+// function.
+bool PredFunction4Int(int v1, int v2, int v3, int v4) {
+ return v1 + v2 + v3 + v4 > 0;
+}
+bool PredFunction4Bool(Bool v1, Bool v2, Bool v3, Bool v4) {
+ return v1 + v2 + v3 + v4 > 0;
+}
+
+// A 4-ary predicate functor.
+struct PredFunctor4 {
+ template <typename T1, typename T2, typename T3, typename T4>
+ bool operator()(const T1& v1,
+ const T2& v2,
+ const T3& v3,
+ const T4& v4) {
+ return v1 + v2 + v3 + v4 > 0;
+ }
+};
+
+// A 4-ary predicate-formatter function.
+template <typename T1, typename T2, typename T3, typename T4>
+testing::AssertionResult PredFormatFunction4(const char* e1,
+ const char* e2,
+ const char* e3,
+ const char* e4,
+ const T1& v1,
+ const T2& v2,
+ const T3& v3,
+ const T4& v4) {
+ if (PredFunction4(v1, v2, v3, v4))
+ return testing::AssertionSuccess();
+
+ testing::Message msg;
+ msg << e1 << " + " << e2 << " + " << e3 << " + " << e4
+ << " is expected to be positive, but evaluates to "
+ << v1 + v2 + v3 + v4 << ".";
+ return testing::AssertionFailure(msg);
+}
+
+// A 4-ary predicate-formatter functor.
+struct PredFormatFunctor4 {
+ template <typename T1, typename T2, typename T3, typename T4>
+ testing::AssertionResult operator()(const char* e1,
+ const char* e2,
+ const char* e3,
+ const char* e4,
+ const T1& v1,
+ const T2& v2,
+ const T3& v3,
+ const T4& v4) const {
+ return PredFormatFunction4(e1, e2, e3, e4, v1, v2, v3, v4);
+ }
+};
+
+// Tests for {EXPECT|ASSERT}_PRED_FORMAT4.
+
+class Predicate4Test : public testing::Test {
+ protected:
+ virtual void SetUp() {
+ expected_to_finish_ = true;
+ finished_ = false;
+ n1_ = n2_ = n3_ = n4_ = 0;
+ }
+
+ virtual void TearDown() {
+ // Verifies that each of the predicate's arguments was evaluated
+ // exactly once.
+ EXPECT_EQ(1, n1_) <<
+ "The predicate assertion didn't evaluate argument 2 "
+ "exactly once.";
+ EXPECT_EQ(1, n2_) <<
+ "The predicate assertion didn't evaluate argument 3 "
+ "exactly once.";
+ EXPECT_EQ(1, n3_) <<
+ "The predicate assertion didn't evaluate argument 4 "
+ "exactly once.";
+ EXPECT_EQ(1, n4_) <<
+ "The predicate assertion didn't evaluate argument 5 "
+ "exactly once.";
+
+ // Verifies that the control flow in the test function is expected.
+ if (expected_to_finish_ && !finished_) {
+ FAIL() << "The predicate assertion unexpactedly aborted the test.";
+ } else if (!expected_to_finish_ && finished_) {
+ FAIL() << "The failed predicate assertion didn't abort the test "
+ "as expected.";
+ }
+ }
+
+ // true iff the test function is expected to run to finish.
+ static bool expected_to_finish_;
+
+ // true iff the test function did run to finish.
+ static bool finished_;
+
+ static int n1_;
+ static int n2_;
+ static int n3_;
+ static int n4_;
+};
+
+bool Predicate4Test::expected_to_finish_;
+bool Predicate4Test::finished_;
+int Predicate4Test::n1_;
+int Predicate4Test::n2_;
+int Predicate4Test::n3_;
+int Predicate4Test::n4_;
+
+typedef Predicate4Test EXPECT_PRED_FORMAT4Test;
+typedef Predicate4Test ASSERT_PRED_FORMAT4Test;
+typedef Predicate4Test EXPECT_PRED4Test;
+typedef Predicate4Test ASSERT_PRED4Test;
+
+// Tests a successful EXPECT_PRED4 where the
+// predicate-formatter is a function on a built-in type (int).
+TEST_F(EXPECT_PRED4Test, FunctionOnBuiltInTypeSuccess) {
+ EXPECT_PRED4(PredFunction4Int,
+ ++n1_,
+ ++n2_,
+ ++n3_,
+ ++n4_);
+ finished_ = true;
+}
+
+// Tests a successful EXPECT_PRED4 where the
+// predicate-formatter is a function on a user-defined type (Bool).
+TEST_F(EXPECT_PRED4Test, FunctionOnUserTypeSuccess) {
+ EXPECT_PRED4(PredFunction4Bool,
+ Bool(++n1_),
+ Bool(++n2_),
+ Bool(++n3_),
+ Bool(++n4_));
+ finished_ = true;
+}
+
+// Tests a successful EXPECT_PRED4 where the
+// predicate-formatter is a functor on a built-in type (int).
+TEST_F(EXPECT_PRED4Test, FunctorOnBuiltInTypeSuccess) {
+ EXPECT_PRED4(PredFunctor4(),
+ ++n1_,
+ ++n2_,
+ ++n3_,
+ ++n4_);
+ finished_ = true;
+}
+
+// Tests a successful EXPECT_PRED4 where the
+// predicate-formatter is a functor on a user-defined type (Bool).
+TEST_F(EXPECT_PRED4Test, FunctorOnUserTypeSuccess) {
+ EXPECT_PRED4(PredFunctor4(),
+ Bool(++n1_),
+ Bool(++n2_),
+ Bool(++n3_),
+ Bool(++n4_));
+ finished_ = true;
+}
+
+// Tests a failed EXPECT_PRED4 where the
+// predicate-formatter is a function on a built-in type (int).
+TEST_F(EXPECT_PRED4Test, FunctionOnBuiltInTypeFailure) {
+ EXPECT_NONFATAL_FAILURE({ // NOLINT
+ EXPECT_PRED4(PredFunction4Int,
+ n1_++,
+ n2_++,
+ n3_++,
+ n4_++);
+ finished_ = true;
+ }, "");
+}
+
+// Tests a failed EXPECT_PRED4 where the
+// predicate-formatter is a function on a user-defined type (Bool).
+TEST_F(EXPECT_PRED4Test, FunctionOnUserTypeFailure) {
+ EXPECT_NONFATAL_FAILURE({ // NOLINT
+ EXPECT_PRED4(PredFunction4Bool,
+ Bool(n1_++),
+ Bool(n2_++),
+ Bool(n3_++),
+ Bool(n4_++));
+ finished_ = true;
+ }, "");
+}
+
+// Tests a failed EXPECT_PRED4 where the
+// predicate-formatter is a functor on a built-in type (int).
+TEST_F(EXPECT_PRED4Test, FunctorOnBuiltInTypeFailure) {
+ EXPECT_NONFATAL_FAILURE({ // NOLINT
+ EXPECT_PRED4(PredFunctor4(),
+ n1_++,
+ n2_++,
+ n3_++,
+ n4_++);
+ finished_ = true;
+ }, "");
+}
+
+// Tests a failed EXPECT_PRED4 where the
+// predicate-formatter is a functor on a user-defined type (Bool).
+TEST_F(EXPECT_PRED4Test, FunctorOnUserTypeFailure) {
+ EXPECT_NONFATAL_FAILURE({ // NOLINT
+ EXPECT_PRED4(PredFunctor4(),
+ Bool(n1_++),
+ Bool(n2_++),
+ Bool(n3_++),
+ Bool(n4_++));
+ finished_ = true;
+ }, "");
+}
+
+// Tests a successful ASSERT_PRED4 where the
+// predicate-formatter is a function on a built-in type (int).
+TEST_F(ASSERT_PRED4Test, FunctionOnBuiltInTypeSuccess) {
+ ASSERT_PRED4(PredFunction4Int,
+ ++n1_,
+ ++n2_,
+ ++n3_,
+ ++n4_);
+ finished_ = true;
+}
+
+// Tests a successful ASSERT_PRED4 where the
+// predicate-formatter is a function on a user-defined type (Bool).
+TEST_F(ASSERT_PRED4Test, FunctionOnUserTypeSuccess) {
+ ASSERT_PRED4(PredFunction4Bool,
+ Bool(++n1_),
+ Bool(++n2_),
+ Bool(++n3_),
+ Bool(++n4_));
+ finished_ = true;
+}
+
+// Tests a successful ASSERT_PRED4 where the
+// predicate-formatter is a functor on a built-in type (int).
+TEST_F(ASSERT_PRED4Test, FunctorOnBuiltInTypeSuccess) {
+ ASSERT_PRED4(PredFunctor4(),
+ ++n1_,
+ ++n2_,
+ ++n3_,
+ ++n4_);
+ finished_ = true;
+}
+
+// Tests a successful ASSERT_PRED4 where the
+// predicate-formatter is a functor on a user-defined type (Bool).
+TEST_F(ASSERT_PRED4Test, FunctorOnUserTypeSuccess) {
+ ASSERT_PRED4(PredFunctor4(),
+ Bool(++n1_),
+ Bool(++n2_),
+ Bool(++n3_),
+ Bool(++n4_));
+ finished_ = true;
+}
+
+// Tests a failed ASSERT_PRED4 where the
+// predicate-formatter is a function on a built-in type (int).
+TEST_F(ASSERT_PRED4Test, FunctionOnBuiltInTypeFailure) {
+ expected_to_finish_ = false;
+ EXPECT_FATAL_FAILURE({ // NOLINT
+ ASSERT_PRED4(PredFunction4Int,
+ n1_++,
+ n2_++,
+ n3_++,
+ n4_++);
+ finished_ = true;
+ }, "");
+}
+
+// Tests a failed ASSERT_PRED4 where the
+// predicate-formatter is a function on a user-defined type (Bool).
+TEST_F(ASSERT_PRED4Test, FunctionOnUserTypeFailure) {
+ expected_to_finish_ = false;
+ EXPECT_FATAL_FAILURE({ // NOLINT
+ ASSERT_PRED4(PredFunction4Bool,
+ Bool(n1_++),
+ Bool(n2_++),
+ Bool(n3_++),
+ Bool(n4_++));
+ finished_ = true;
+ }, "");
+}
+
+// Tests a failed ASSERT_PRED4 where the
+// predicate-formatter is a functor on a built-in type (int).
+TEST_F(ASSERT_PRED4Test, FunctorOnBuiltInTypeFailure) {
+ expected_to_finish_ = false;
+ EXPECT_FATAL_FAILURE({ // NOLINT
+ ASSERT_PRED4(PredFunctor4(),
+ n1_++,
+ n2_++,
+ n3_++,
+ n4_++);
+ finished_ = true;
+ }, "");
+}
+
+// Tests a failed ASSERT_PRED4 where the
+// predicate-formatter is a functor on a user-defined type (Bool).
+TEST_F(ASSERT_PRED4Test, FunctorOnUserTypeFailure) {
+ expected_to_finish_ = false;
+ EXPECT_FATAL_FAILURE({ // NOLINT
+ ASSERT_PRED4(PredFunctor4(),
+ Bool(n1_++),
+ Bool(n2_++),
+ Bool(n3_++),
+ Bool(n4_++));
+ finished_ = true;
+ }, "");
+}
+
+// Tests a successful EXPECT_PRED_FORMAT4 where the
+// predicate-formatter is a function on a built-in type (int).
+TEST_F(EXPECT_PRED_FORMAT4Test, FunctionOnBuiltInTypeSuccess) {
+ EXPECT_PRED_FORMAT4(PredFormatFunction4,
+ ++n1_,
+ ++n2_,
+ ++n3_,
+ ++n4_);
+ finished_ = true;
+}
+
+// Tests a successful EXPECT_PRED_FORMAT4 where the
+// predicate-formatter is a function on a user-defined type (Bool).
+TEST_F(EXPECT_PRED_FORMAT4Test, FunctionOnUserTypeSuccess) {
+ EXPECT_PRED_FORMAT4(PredFormatFunction4,
+ Bool(++n1_),
+ Bool(++n2_),
+ Bool(++n3_),
+ Bool(++n4_));
+ finished_ = true;
+}
+
+// Tests a successful EXPECT_PRED_FORMAT4 where the
+// predicate-formatter is a functor on a built-in type (int).
+TEST_F(EXPECT_PRED_FORMAT4Test, FunctorOnBuiltInTypeSuccess) {
+ EXPECT_PRED_FORMAT4(PredFormatFunctor4(),
+ ++n1_,
+ ++n2_,
+ ++n3_,
+ ++n4_);
+ finished_ = true;
+}
+
+// Tests a successful EXPECT_PRED_FORMAT4 where the
+// predicate-formatter is a functor on a user-defined type (Bool).
+TEST_F(EXPECT_PRED_FORMAT4Test, FunctorOnUserTypeSuccess) {
+ EXPECT_PRED_FORMAT4(PredFormatFunctor4(),
+ Bool(++n1_),
+ Bool(++n2_),
+ Bool(++n3_),
+ Bool(++n4_));
+ finished_ = true;
+}
+
+// Tests a failed EXPECT_PRED_FORMAT4 where the
+// predicate-formatter is a function on a built-in type (int).
+TEST_F(EXPECT_PRED_FORMAT4Test, FunctionOnBuiltInTypeFailure) {
+ EXPECT_NONFATAL_FAILURE({ // NOLINT
+ EXPECT_PRED_FORMAT4(PredFormatFunction4,
+ n1_++,
+ n2_++,
+ n3_++,
+ n4_++);
+ finished_ = true;
+ }, "");
+}
+
+// Tests a failed EXPECT_PRED_FORMAT4 where the
+// predicate-formatter is a function on a user-defined type (Bool).
+TEST_F(EXPECT_PRED_FORMAT4Test, FunctionOnUserTypeFailure) {
+ EXPECT_NONFATAL_FAILURE({ // NOLINT
+ EXPECT_PRED_FORMAT4(PredFormatFunction4,
+ Bool(n1_++),
+ Bool(n2_++),
+ Bool(n3_++),
+ Bool(n4_++));
+ finished_ = true;
+ }, "");
+}
+
+// Tests a failed EXPECT_PRED_FORMAT4 where the
+// predicate-formatter is a functor on a built-in type (int).
+TEST_F(EXPECT_PRED_FORMAT4Test, FunctorOnBuiltInTypeFailure) {
+ EXPECT_NONFATAL_FAILURE({ // NOLINT
+ EXPECT_PRED_FORMAT4(PredFormatFunctor4(),
+ n1_++,
+ n2_++,
+ n3_++,
+ n4_++);
+ finished_ = true;
+ }, "");
+}
+
+// Tests a failed EXPECT_PRED_FORMAT4 where the
+// predicate-formatter is a functor on a user-defined type (Bool).
+TEST_F(EXPECT_PRED_FORMAT4Test, FunctorOnUserTypeFailure) {
+ EXPECT_NONFATAL_FAILURE({ // NOLINT
+ EXPECT_PRED_FORMAT4(PredFormatFunctor4(),
+ Bool(n1_++),
+ Bool(n2_++),
+ Bool(n3_++),
+ Bool(n4_++));
+ finished_ = true;
+ }, "");
+}
+
+// Tests a successful ASSERT_PRED_FORMAT4 where the
+// predicate-formatter is a function on a built-in type (int).
+TEST_F(ASSERT_PRED_FORMAT4Test, FunctionOnBuiltInTypeSuccess) {
+ ASSERT_PRED_FORMAT4(PredFormatFunction4,
+ ++n1_,
+ ++n2_,
+ ++n3_,
+ ++n4_);
+ finished_ = true;
+}
+
+// Tests a successful ASSERT_PRED_FORMAT4 where the
+// predicate-formatter is a function on a user-defined type (Bool).
+TEST_F(ASSERT_PRED_FORMAT4Test, FunctionOnUserTypeSuccess) {
+ ASSERT_PRED_FORMAT4(PredFormatFunction4,
+ Bool(++n1_),
+ Bool(++n2_),
+ Bool(++n3_),
+ Bool(++n4_));
+ finished_ = true;
+}
+
+// Tests a successful ASSERT_PRED_FORMAT4 where the
+// predicate-formatter is a functor on a built-in type (int).
+TEST_F(ASSERT_PRED_FORMAT4Test, FunctorOnBuiltInTypeSuccess) {
+ ASSERT_PRED_FORMAT4(PredFormatFunctor4(),
+ ++n1_,
+ ++n2_,
+ ++n3_,
+ ++n4_);
+ finished_ = true;
+}
+
+// Tests a successful ASSERT_PRED_FORMAT4 where the
+// predicate-formatter is a functor on a user-defined type (Bool).
+TEST_F(ASSERT_PRED_FORMAT4Test, FunctorOnUserTypeSuccess) {
+ ASSERT_PRED_FORMAT4(PredFormatFunctor4(),
+ Bool(++n1_),
+ Bool(++n2_),
+ Bool(++n3_),
+ Bool(++n4_));
+ finished_ = true;
+}
+
+// Tests a failed ASSERT_PRED_FORMAT4 where the
+// predicate-formatter is a function on a built-in type (int).
+TEST_F(ASSERT_PRED_FORMAT4Test, FunctionOnBuiltInTypeFailure) {
+ expected_to_finish_ = false;
+ EXPECT_FATAL_FAILURE({ // NOLINT
+ ASSERT_PRED_FORMAT4(PredFormatFunction4,
+ n1_++,
+ n2_++,
+ n3_++,
+ n4_++);
+ finished_ = true;
+ }, "");
+}
+
+// Tests a failed ASSERT_PRED_FORMAT4 where the
+// predicate-formatter is a function on a user-defined type (Bool).
+TEST_F(ASSERT_PRED_FORMAT4Test, FunctionOnUserTypeFailure) {
+ expected_to_finish_ = false;
+ EXPECT_FATAL_FAILURE({ // NOLINT
+ ASSERT_PRED_FORMAT4(PredFormatFunction4,
+ Bool(n1_++),
+ Bool(n2_++),
+ Bool(n3_++),
+ Bool(n4_++));
+ finished_ = true;
+ }, "");
+}
+
+// Tests a failed ASSERT_PRED_FORMAT4 where the
+// predicate-formatter is a functor on a built-in type (int).
+TEST_F(ASSERT_PRED_FORMAT4Test, FunctorOnBuiltInTypeFailure) {
+ expected_to_finish_ = false;
+ EXPECT_FATAL_FAILURE({ // NOLINT
+ ASSERT_PRED_FORMAT4(PredFormatFunctor4(),
+ n1_++,
+ n2_++,
+ n3_++,
+ n4_++);
+ finished_ = true;
+ }, "");
+}
+
+// Tests a failed ASSERT_PRED_FORMAT4 where the
+// predicate-formatter is a functor on a user-defined type (Bool).
+TEST_F(ASSERT_PRED_FORMAT4Test, FunctorOnUserTypeFailure) {
+ expected_to_finish_ = false;
+ EXPECT_FATAL_FAILURE({ // NOLINT
+ ASSERT_PRED_FORMAT4(PredFormatFunctor4(),
+ Bool(n1_++),
+ Bool(n2_++),
+ Bool(n3_++),
+ Bool(n4_++));
+ finished_ = true;
+ }, "");
+}
+// Sample functions/functors for testing 5-ary predicate assertions.
+
+// A 5-ary predicate function.
+template <typename T1, typename T2, typename T3, typename T4, typename T5>
+bool PredFunction5(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5) {
+ return v1 + v2 + v3 + v4 + v5 > 0;
+}
+
+// The following two functions are needed to circumvent a bug in
+// gcc 2.95.3, which sometimes has problem with the above template
+// function.
+bool PredFunction5Int(int v1, int v2, int v3, int v4, int v5) {
+ return v1 + v2 + v3 + v4 + v5 > 0;
+}
+bool PredFunction5Bool(Bool v1, Bool v2, Bool v3, Bool v4, Bool v5) {
+ return v1 + v2 + v3 + v4 + v5 > 0;
+}
+
+// A 5-ary predicate functor.
+struct PredFunctor5 {
+ template <typename T1, typename T2, typename T3, typename T4, typename T5>
+ bool operator()(const T1& v1,
+ const T2& v2,
+ const T3& v3,
+ const T4& v4,
+ const T5& v5) {
+ return v1 + v2 + v3 + v4 + v5 > 0;
+ }
+};
+
+// A 5-ary predicate-formatter function.
+template <typename T1, typename T2, typename T3, typename T4, typename T5>
+testing::AssertionResult PredFormatFunction5(const char* e1,
+ const char* e2,
+ const char* e3,
+ const char* e4,
+ const char* e5,
+ const T1& v1,
+ const T2& v2,
+ const T3& v3,
+ const T4& v4,
+ const T5& v5) {
+ if (PredFunction5(v1, v2, v3, v4, v5))
+ return testing::AssertionSuccess();
+
+ testing::Message msg;
+ msg << e1 << " + " << e2 << " + " << e3 << " + " << e4 << " + " << e5
+ << " is expected to be positive, but evaluates to "
+ << v1 + v2 + v3 + v4 + v5 << ".";
+ return testing::AssertionFailure(msg);
+}
+
+// A 5-ary predicate-formatter functor.
+struct PredFormatFunctor5 {
+ template <typename T1, typename T2, typename T3, typename T4, typename T5>
+ testing::AssertionResult operator()(const char* e1,
+ const char* e2,
+ const char* e3,
+ const char* e4,
+ const char* e5,
+ const T1& v1,
+ const T2& v2,
+ const T3& v3,
+ const T4& v4,
+ const T5& v5) const {
+ return PredFormatFunction5(e1, e2, e3, e4, e5, v1, v2, v3, v4, v5);
+ }
+};
+
+// Tests for {EXPECT|ASSERT}_PRED_FORMAT5.
+
+class Predicate5Test : public testing::Test {
+ protected:
+ virtual void SetUp() {
+ expected_to_finish_ = true;
+ finished_ = false;
+ n1_ = n2_ = n3_ = n4_ = n5_ = 0;
+ }
+
+ virtual void TearDown() {
+ // Verifies that each of the predicate's arguments was evaluated
+ // exactly once.
+ EXPECT_EQ(1, n1_) <<
+ "The predicate assertion didn't evaluate argument 2 "
+ "exactly once.";
+ EXPECT_EQ(1, n2_) <<
+ "The predicate assertion didn't evaluate argument 3 "
+ "exactly once.";
+ EXPECT_EQ(1, n3_) <<
+ "The predicate assertion didn't evaluate argument 4 "
+ "exactly once.";
+ EXPECT_EQ(1, n4_) <<
+ "The predicate assertion didn't evaluate argument 5 "
+ "exactly once.";
+ EXPECT_EQ(1, n5_) <<
+ "The predicate assertion didn't evaluate argument 6 "
+ "exactly once.";
+
+ // Verifies that the control flow in the test function is expected.
+ if (expected_to_finish_ && !finished_) {
+ FAIL() << "The predicate assertion unexpactedly aborted the test.";
+ } else if (!expected_to_finish_ && finished_) {
+ FAIL() << "The failed predicate assertion didn't abort the test "
+ "as expected.";
+ }
+ }
+
+ // true iff the test function is expected to run to finish.
+ static bool expected_to_finish_;
+
+ // true iff the test function did run to finish.
+ static bool finished_;
+
+ static int n1_;
+ static int n2_;
+ static int n3_;
+ static int n4_;
+ static int n5_;
+};
+
+bool Predicate5Test::expected_to_finish_;
+bool Predicate5Test::finished_;
+int Predicate5Test::n1_;
+int Predicate5Test::n2_;
+int Predicate5Test::n3_;
+int Predicate5Test::n4_;
+int Predicate5Test::n5_;
+
+typedef Predicate5Test EXPECT_PRED_FORMAT5Test;
+typedef Predicate5Test ASSERT_PRED_FORMAT5Test;
+typedef Predicate5Test EXPECT_PRED5Test;
+typedef Predicate5Test ASSERT_PRED5Test;
+
+// Tests a successful EXPECT_PRED5 where the
+// predicate-formatter is a function on a built-in type (int).
+TEST_F(EXPECT_PRED5Test, FunctionOnBuiltInTypeSuccess) {
+ EXPECT_PRED5(PredFunction5Int,
+ ++n1_,
+ ++n2_,
+ ++n3_,
+ ++n4_,
+ ++n5_);
+ finished_ = true;
+}
+
+// Tests a successful EXPECT_PRED5 where the
+// predicate-formatter is a function on a user-defined type (Bool).
+TEST_F(EXPECT_PRED5Test, FunctionOnUserTypeSuccess) {
+ EXPECT_PRED5(PredFunction5Bool,
+ Bool(++n1_),
+ Bool(++n2_),
+ Bool(++n3_),
+ Bool(++n4_),
+ Bool(++n5_));
+ finished_ = true;
+}
+
+// Tests a successful EXPECT_PRED5 where the
+// predicate-formatter is a functor on a built-in type (int).
+TEST_F(EXPECT_PRED5Test, FunctorOnBuiltInTypeSuccess) {
+ EXPECT_PRED5(PredFunctor5(),
+ ++n1_,
+ ++n2_,
+ ++n3_,
+ ++n4_,
+ ++n5_);
+ finished_ = true;
+}
+
+// Tests a successful EXPECT_PRED5 where the
+// predicate-formatter is a functor on a user-defined type (Bool).
+TEST_F(EXPECT_PRED5Test, FunctorOnUserTypeSuccess) {
+ EXPECT_PRED5(PredFunctor5(),
+ Bool(++n1_),
+ Bool(++n2_),
+ Bool(++n3_),
+ Bool(++n4_),
+ Bool(++n5_));
+ finished_ = true;
+}
+
+// Tests a failed EXPECT_PRED5 where the
+// predicate-formatter is a function on a built-in type (int).
+TEST_F(EXPECT_PRED5Test, FunctionOnBuiltInTypeFailure) {
+ EXPECT_NONFATAL_FAILURE({ // NOLINT
+ EXPECT_PRED5(PredFunction5Int,
+ n1_++,
+ n2_++,
+ n3_++,
+ n4_++,
+ n5_++);
+ finished_ = true;
+ }, "");
+}
+
+// Tests a failed EXPECT_PRED5 where the
+// predicate-formatter is a function on a user-defined type (Bool).
+TEST_F(EXPECT_PRED5Test, FunctionOnUserTypeFailure) {
+ EXPECT_NONFATAL_FAILURE({ // NOLINT
+ EXPECT_PRED5(PredFunction5Bool,
+ Bool(n1_++),
+ Bool(n2_++),
+ Bool(n3_++),
+ Bool(n4_++),
+ Bool(n5_++));
+ finished_ = true;
+ }, "");
+}
+
+// Tests a failed EXPECT_PRED5 where the
+// predicate-formatter is a functor on a built-in type (int).
+TEST_F(EXPECT_PRED5Test, FunctorOnBuiltInTypeFailure) {
+ EXPECT_NONFATAL_FAILURE({ // NOLINT
+ EXPECT_PRED5(PredFunctor5(),
+ n1_++,
+ n2_++,
+ n3_++,
+ n4_++,
+ n5_++);
+ finished_ = true;
+ }, "");
+}
+
+// Tests a failed EXPECT_PRED5 where the
+// predicate-formatter is a functor on a user-defined type (Bool).
+TEST_F(EXPECT_PRED5Test, FunctorOnUserTypeFailure) {
+ EXPECT_NONFATAL_FAILURE({ // NOLINT
+ EXPECT_PRED5(PredFunctor5(),
+ Bool(n1_++),
+ Bool(n2_++),
+ Bool(n3_++),
+ Bool(n4_++),
+ Bool(n5_++));
+ finished_ = true;
+ }, "");
+}
+
+// Tests a successful ASSERT_PRED5 where the
+// predicate-formatter is a function on a built-in type (int).
+TEST_F(ASSERT_PRED5Test, FunctionOnBuiltInTypeSuccess) {
+ ASSERT_PRED5(PredFunction5Int,
+ ++n1_,
+ ++n2_,
+ ++n3_,
+ ++n4_,
+ ++n5_);
+ finished_ = true;
+}
+
+// Tests a successful ASSERT_PRED5 where the
+// predicate-formatter is a function on a user-defined type (Bool).
+TEST_F(ASSERT_PRED5Test, FunctionOnUserTypeSuccess) {
+ ASSERT_PRED5(PredFunction5Bool,
+ Bool(++n1_),
+ Bool(++n2_),
+ Bool(++n3_),
+ Bool(++n4_),
+ Bool(++n5_));
+ finished_ = true;
+}
+
+// Tests a successful ASSERT_PRED5 where the
+// predicate-formatter is a functor on a built-in type (int).
+TEST_F(ASSERT_PRED5Test, FunctorOnBuiltInTypeSuccess) {
+ ASSERT_PRED5(PredFunctor5(),
+ ++n1_,
+ ++n2_,
+ ++n3_,
+ ++n4_,
+ ++n5_);
+ finished_ = true;
+}
+
+// Tests a successful ASSERT_PRED5 where the
+// predicate-formatter is a functor on a user-defined type (Bool).
+TEST_F(ASSERT_PRED5Test, FunctorOnUserTypeSuccess) {
+ ASSERT_PRED5(PredFunctor5(),
+ Bool(++n1_),
+ Bool(++n2_),
+ Bool(++n3_),
+ Bool(++n4_),
+ Bool(++n5_));
+ finished_ = true;
+}
+
+// Tests a failed ASSERT_PRED5 where the
+// predicate-formatter is a function on a built-in type (int).
+TEST_F(ASSERT_PRED5Test, FunctionOnBuiltInTypeFailure) {
+ expected_to_finish_ = false;
+ EXPECT_FATAL_FAILURE({ // NOLINT
+ ASSERT_PRED5(PredFunction5Int,
+ n1_++,
+ n2_++,
+ n3_++,
+ n4_++,
+ n5_++);
+ finished_ = true;
+ }, "");
+}
+
+// Tests a failed ASSERT_PRED5 where the
+// predicate-formatter is a function on a user-defined type (Bool).
+TEST_F(ASSERT_PRED5Test, FunctionOnUserTypeFailure) {
+ expected_to_finish_ = false;
+ EXPECT_FATAL_FAILURE({ // NOLINT
+ ASSERT_PRED5(PredFunction5Bool,
+ Bool(n1_++),
+ Bool(n2_++),
+ Bool(n3_++),
+ Bool(n4_++),
+ Bool(n5_++));
+ finished_ = true;
+ }, "");
+}
+
+// Tests a failed ASSERT_PRED5 where the
+// predicate-formatter is a functor on a built-in type (int).
+TEST_F(ASSERT_PRED5Test, FunctorOnBuiltInTypeFailure) {
+ expected_to_finish_ = false;
+ EXPECT_FATAL_FAILURE({ // NOLINT
+ ASSERT_PRED5(PredFunctor5(),
+ n1_++,
+ n2_++,
+ n3_++,
+ n4_++,
+ n5_++);
+ finished_ = true;
+ }, "");
+}
+
+// Tests a failed ASSERT_PRED5 where the
+// predicate-formatter is a functor on a user-defined type (Bool).
+TEST_F(ASSERT_PRED5Test, FunctorOnUserTypeFailure) {
+ expected_to_finish_ = false;
+ EXPECT_FATAL_FAILURE({ // NOLINT
+ ASSERT_PRED5(PredFunctor5(),
+ Bool(n1_++),
+ Bool(n2_++),
+ Bool(n3_++),
+ Bool(n4_++),
+ Bool(n5_++));
+ finished_ = true;
+ }, "");
+}
+
+// Tests a successful EXPECT_PRED_FORMAT5 where the
+// predicate-formatter is a function on a built-in type (int).
+TEST_F(EXPECT_PRED_FORMAT5Test, FunctionOnBuiltInTypeSuccess) {
+ EXPECT_PRED_FORMAT5(PredFormatFunction5,
+ ++n1_,
+ ++n2_,
+ ++n3_,
+ ++n4_,
+ ++n5_);
+ finished_ = true;
+}
+
+// Tests a successful EXPECT_PRED_FORMAT5 where the
+// predicate-formatter is a function on a user-defined type (Bool).
+TEST_F(EXPECT_PRED_FORMAT5Test, FunctionOnUserTypeSuccess) {
+ EXPECT_PRED_FORMAT5(PredFormatFunction5,
+ Bool(++n1_),
+ Bool(++n2_),
+ Bool(++n3_),
+ Bool(++n4_),
+ Bool(++n5_));
+ finished_ = true;
+}
+
+// Tests a successful EXPECT_PRED_FORMAT5 where the
+// predicate-formatter is a functor on a built-in type (int).
+TEST_F(EXPECT_PRED_FORMAT5Test, FunctorOnBuiltInTypeSuccess) {
+ EXPECT_PRED_FORMAT5(PredFormatFunctor5(),
+ ++n1_,
+ ++n2_,
+ ++n3_,
+ ++n4_,
+ ++n5_);
+ finished_ = true;
+}
+
+// Tests a successful EXPECT_PRED_FORMAT5 where the
+// predicate-formatter is a functor on a user-defined type (Bool).
+TEST_F(EXPECT_PRED_FORMAT5Test, FunctorOnUserTypeSuccess) {
+ EXPECT_PRED_FORMAT5(PredFormatFunctor5(),
+ Bool(++n1_),
+ Bool(++n2_),
+ Bool(++n3_),
+ Bool(++n4_),
+ Bool(++n5_));
+ finished_ = true;
+}
+
+// Tests a failed EXPECT_PRED_FORMAT5 where the
+// predicate-formatter is a function on a built-in type (int).
+TEST_F(EXPECT_PRED_FORMAT5Test, FunctionOnBuiltInTypeFailure) {
+ EXPECT_NONFATAL_FAILURE({ // NOLINT
+ EXPECT_PRED_FORMAT5(PredFormatFunction5,
+ n1_++,
+ n2_++,
+ n3_++,
+ n4_++,
+ n5_++);
+ finished_ = true;
+ }, "");
+}
+
+// Tests a failed EXPECT_PRED_FORMAT5 where the
+// predicate-formatter is a function on a user-defined type (Bool).
+TEST_F(EXPECT_PRED_FORMAT5Test, FunctionOnUserTypeFailure) {
+ EXPECT_NONFATAL_FAILURE({ // NOLINT
+ EXPECT_PRED_FORMAT5(PredFormatFunction5,
+ Bool(n1_++),
+ Bool(n2_++),
+ Bool(n3_++),
+ Bool(n4_++),
+ Bool(n5_++));
+ finished_ = true;
+ }, "");
+}
+
+// Tests a failed EXPECT_PRED_FORMAT5 where the
+// predicate-formatter is a functor on a built-in type (int).
+TEST_F(EXPECT_PRED_FORMAT5Test, FunctorOnBuiltInTypeFailure) {
+ EXPECT_NONFATAL_FAILURE({ // NOLINT
+ EXPECT_PRED_FORMAT5(PredFormatFunctor5(),
+ n1_++,
+ n2_++,
+ n3_++,
+ n4_++,
+ n5_++);
+ finished_ = true;
+ }, "");
+}
+
+// Tests a failed EXPECT_PRED_FORMAT5 where the
+// predicate-formatter is a functor on a user-defined type (Bool).
+TEST_F(EXPECT_PRED_FORMAT5Test, FunctorOnUserTypeFailure) {
+ EXPECT_NONFATAL_FAILURE({ // NOLINT
+ EXPECT_PRED_FORMAT5(PredFormatFunctor5(),
+ Bool(n1_++),
+ Bool(n2_++),
+ Bool(n3_++),
+ Bool(n4_++),
+ Bool(n5_++));
+ finished_ = true;
+ }, "");
+}
+
+// Tests a successful ASSERT_PRED_FORMAT5 where the
+// predicate-formatter is a function on a built-in type (int).
+TEST_F(ASSERT_PRED_FORMAT5Test, FunctionOnBuiltInTypeSuccess) {
+ ASSERT_PRED_FORMAT5(PredFormatFunction5,
+ ++n1_,
+ ++n2_,
+ ++n3_,
+ ++n4_,
+ ++n5_);
+ finished_ = true;
+}
+
+// Tests a successful ASSERT_PRED_FORMAT5 where the
+// predicate-formatter is a function on a user-defined type (Bool).
+TEST_F(ASSERT_PRED_FORMAT5Test, FunctionOnUserTypeSuccess) {
+ ASSERT_PRED_FORMAT5(PredFormatFunction5,
+ Bool(++n1_),
+ Bool(++n2_),
+ Bool(++n3_),
+ Bool(++n4_),
+ Bool(++n5_));
+ finished_ = true;
+}
+
+// Tests a successful ASSERT_PRED_FORMAT5 where the
+// predicate-formatter is a functor on a built-in type (int).
+TEST_F(ASSERT_PRED_FORMAT5Test, FunctorOnBuiltInTypeSuccess) {
+ ASSERT_PRED_FORMAT5(PredFormatFunctor5(),
+ ++n1_,
+ ++n2_,
+ ++n3_,
+ ++n4_,
+ ++n5_);
+ finished_ = true;
+}
+
+// Tests a successful ASSERT_PRED_FORMAT5 where the
+// predicate-formatter is a functor on a user-defined type (Bool).
+TEST_F(ASSERT_PRED_FORMAT5Test, FunctorOnUserTypeSuccess) {
+ ASSERT_PRED_FORMAT5(PredFormatFunctor5(),
+ Bool(++n1_),
+ Bool(++n2_),
+ Bool(++n3_),
+ Bool(++n4_),
+ Bool(++n5_));
+ finished_ = true;
+}
+
+// Tests a failed ASSERT_PRED_FORMAT5 where the
+// predicate-formatter is a function on a built-in type (int).
+TEST_F(ASSERT_PRED_FORMAT5Test, FunctionOnBuiltInTypeFailure) {
+ expected_to_finish_ = false;
+ EXPECT_FATAL_FAILURE({ // NOLINT
+ ASSERT_PRED_FORMAT5(PredFormatFunction5,
+ n1_++,
+ n2_++,
+ n3_++,
+ n4_++,
+ n5_++);
+ finished_ = true;
+ }, "");
+}
+
+// Tests a failed ASSERT_PRED_FORMAT5 where the
+// predicate-formatter is a function on a user-defined type (Bool).
+TEST_F(ASSERT_PRED_FORMAT5Test, FunctionOnUserTypeFailure) {
+ expected_to_finish_ = false;
+ EXPECT_FATAL_FAILURE({ // NOLINT
+ ASSERT_PRED_FORMAT5(PredFormatFunction5,
+ Bool(n1_++),
+ Bool(n2_++),
+ Bool(n3_++),
+ Bool(n4_++),
+ Bool(n5_++));
+ finished_ = true;
+ }, "");
+}
+
+// Tests a failed ASSERT_PRED_FORMAT5 where the
+// predicate-formatter is a functor on a built-in type (int).
+TEST_F(ASSERT_PRED_FORMAT5Test, FunctorOnBuiltInTypeFailure) {
+ expected_to_finish_ = false;
+ EXPECT_FATAL_FAILURE({ // NOLINT
+ ASSERT_PRED_FORMAT5(PredFormatFunctor5(),
+ n1_++,
+ n2_++,
+ n3_++,
+ n4_++,
+ n5_++);
+ finished_ = true;
+ }, "");
+}
+
+// Tests a failed ASSERT_PRED_FORMAT5 where the
+// predicate-formatter is a functor on a user-defined type (Bool).
+TEST_F(ASSERT_PRED_FORMAT5Test, FunctorOnUserTypeFailure) {
+ expected_to_finish_ = false;
+ EXPECT_FATAL_FAILURE({ // NOLINT
+ ASSERT_PRED_FORMAT5(PredFormatFunctor5(),
+ Bool(n1_++),
+ Bool(n2_++),
+ Bool(n3_++),
+ Bool(n4_++),
+ Bool(n5_++));
+ finished_ = true;
+ }, "");
+}
diff --git a/Source/ThirdParty/gtest/test/gtest_prod_test.cc b/Source/ThirdParty/gtest/test/gtest_prod_test.cc
new file mode 100644
index 000000000..bc3201d0f
--- /dev/null
+++ b/Source/ThirdParty/gtest/test/gtest_prod_test.cc
@@ -0,0 +1,57 @@
+// Copyright 2006, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: wan@google.com (Zhanyong Wan)
+//
+// Unit test for include/gtest/gtest_prod.h.
+
+#include <gtest/gtest.h>
+#include "test/production.h"
+
+// Tests that private members can be accessed from a TEST declared as
+// a friend of the class.
+TEST(PrivateCodeTest, CanAccessPrivateMembers) {
+ PrivateCode a;
+ EXPECT_EQ(0, a.x_);
+
+ a.set_x(1);
+ EXPECT_EQ(1, a.x_);
+}
+
+typedef testing::Test PrivateCodeFixtureTest;
+
+// Tests that private members can be accessed from a TEST_F declared
+// as a friend of the class.
+TEST_F(PrivateCodeFixtureTest, CanAccessPrivateMembers) {
+ PrivateCode a;
+ EXPECT_EQ(0, a.x_);
+
+ a.set_x(2);
+ EXPECT_EQ(2, a.x_);
+}
diff --git a/Source/ThirdParty/gtest/test/gtest_repeat_test.cc b/Source/ThirdParty/gtest/test/gtest_repeat_test.cc
new file mode 100644
index 000000000..df6868b83
--- /dev/null
+++ b/Source/ThirdParty/gtest/test/gtest_repeat_test.cc
@@ -0,0 +1,253 @@
+// Copyright 2008, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: wan@google.com (Zhanyong Wan)
+
+// Tests the --gtest_repeat=number flag.
+
+#include <stdlib.h>
+#include <iostream>
+#include <gtest/gtest.h>
+
+// Indicates that this translation unit is part of Google Test's
+// implementation. It must come before gtest-internal-inl.h is
+// included, or there will be a compiler error. This trick is to
+// prevent a user from accidentally including gtest-internal-inl.h in
+// his code.
+#define GTEST_IMPLEMENTATION_ 1
+#include "src/gtest-internal-inl.h"
+#undef GTEST_IMPLEMENTATION_
+
+namespace testing {
+
+GTEST_DECLARE_string_(death_test_style);
+GTEST_DECLARE_string_(filter);
+GTEST_DECLARE_int32_(repeat);
+
+} // namespace testing
+
+using testing::GTEST_FLAG(death_test_style);
+using testing::GTEST_FLAG(filter);
+using testing::GTEST_FLAG(repeat);
+
+namespace {
+
+// We need this when we are testing Google Test itself and therefore
+// cannot use Google Test assertions.
+#define GTEST_CHECK_INT_EQ_(expected, actual) \
+ do {\
+ const int expected_val = (expected);\
+ const int actual_val = (actual);\
+ if (::testing::internal::IsTrue(expected_val != actual_val)) {\
+ ::std::cout << "Value of: " #actual "\n"\
+ << " Actual: " << actual_val << "\n"\
+ << "Expected: " #expected "\n"\
+ << "Which is: " << expected_val << "\n";\
+ abort();\
+ }\
+ } while(::testing::internal::AlwaysFalse())
+
+
+// Used for verifying that global environment set-up and tear-down are
+// inside the gtest_repeat loop.
+
+int g_environment_set_up_count = 0;
+int g_environment_tear_down_count = 0;
+
+class MyEnvironment : public testing::Environment {
+ public:
+ MyEnvironment() {}
+ virtual void SetUp() { g_environment_set_up_count++; }
+ virtual void TearDown() { g_environment_tear_down_count++; }
+};
+
+// A test that should fail.
+
+int g_should_fail_count = 0;
+
+TEST(FooTest, ShouldFail) {
+ g_should_fail_count++;
+ EXPECT_EQ(0, 1) << "Expected failure.";
+}
+
+// A test that should pass.
+
+int g_should_pass_count = 0;
+
+TEST(FooTest, ShouldPass) {
+ g_should_pass_count++;
+}
+
+// A test that contains a thread-safe death test and a fast death
+// test. It should pass.
+
+int g_death_test_count = 0;
+
+TEST(BarDeathTest, ThreadSafeAndFast) {
+ g_death_test_count++;
+
+ GTEST_FLAG(death_test_style) = "threadsafe";
+ EXPECT_DEATH_IF_SUPPORTED(abort(), "");
+
+ GTEST_FLAG(death_test_style) = "fast";
+ EXPECT_DEATH_IF_SUPPORTED(abort(), "");
+}
+
+#if GTEST_HAS_PARAM_TEST
+int g_param_test_count = 0;
+
+const int kNumberOfParamTests = 10;
+
+class MyParamTest : public testing::TestWithParam<int> {};
+
+TEST_P(MyParamTest, ShouldPass) {
+ // TODO(vladl@google.com): Make parameter value checking robust
+ // WRT order of tests.
+ GTEST_CHECK_INT_EQ_(g_param_test_count % kNumberOfParamTests, GetParam());
+ g_param_test_count++;
+}
+INSTANTIATE_TEST_CASE_P(MyParamSequence,
+ MyParamTest,
+ testing::Range(0, kNumberOfParamTests));
+#endif // GTEST_HAS_PARAM_TEST
+
+// Resets the count for each test.
+void ResetCounts() {
+ g_environment_set_up_count = 0;
+ g_environment_tear_down_count = 0;
+ g_should_fail_count = 0;
+ g_should_pass_count = 0;
+ g_death_test_count = 0;
+#if GTEST_HAS_PARAM_TEST
+ g_param_test_count = 0;
+#endif // GTEST_HAS_PARAM_TEST
+}
+
+// Checks that the count for each test is expected.
+void CheckCounts(int expected) {
+ GTEST_CHECK_INT_EQ_(expected, g_environment_set_up_count);
+ GTEST_CHECK_INT_EQ_(expected, g_environment_tear_down_count);
+ GTEST_CHECK_INT_EQ_(expected, g_should_fail_count);
+ GTEST_CHECK_INT_EQ_(expected, g_should_pass_count);
+ GTEST_CHECK_INT_EQ_(expected, g_death_test_count);
+#if GTEST_HAS_PARAM_TEST
+ GTEST_CHECK_INT_EQ_(expected * kNumberOfParamTests, g_param_test_count);
+#endif // GTEST_HAS_PARAM_TEST
+}
+
+// Tests the behavior of Google Test when --gtest_repeat is not specified.
+void TestRepeatUnspecified() {
+ ResetCounts();
+ GTEST_CHECK_INT_EQ_(1, RUN_ALL_TESTS());
+ CheckCounts(1);
+}
+
+// Tests the behavior of Google Test when --gtest_repeat has the given value.
+void TestRepeat(int repeat) {
+ GTEST_FLAG(repeat) = repeat;
+
+ ResetCounts();
+ GTEST_CHECK_INT_EQ_(repeat > 0 ? 1 : 0, RUN_ALL_TESTS());
+ CheckCounts(repeat);
+}
+
+// Tests using --gtest_repeat when --gtest_filter specifies an empty
+// set of tests.
+void TestRepeatWithEmptyFilter(int repeat) {
+ GTEST_FLAG(repeat) = repeat;
+ GTEST_FLAG(filter) = "None";
+
+ ResetCounts();
+ GTEST_CHECK_INT_EQ_(0, RUN_ALL_TESTS());
+ CheckCounts(0);
+}
+
+// Tests using --gtest_repeat when --gtest_filter specifies a set of
+// successful tests.
+void TestRepeatWithFilterForSuccessfulTests(int repeat) {
+ GTEST_FLAG(repeat) = repeat;
+ GTEST_FLAG(filter) = "*-*ShouldFail";
+
+ ResetCounts();
+ GTEST_CHECK_INT_EQ_(0, RUN_ALL_TESTS());
+ GTEST_CHECK_INT_EQ_(repeat, g_environment_set_up_count);
+ GTEST_CHECK_INT_EQ_(repeat, g_environment_tear_down_count);
+ GTEST_CHECK_INT_EQ_(0, g_should_fail_count);
+ GTEST_CHECK_INT_EQ_(repeat, g_should_pass_count);
+ GTEST_CHECK_INT_EQ_(repeat, g_death_test_count);
+#if GTEST_HAS_PARAM_TEST
+ GTEST_CHECK_INT_EQ_(repeat * kNumberOfParamTests, g_param_test_count);
+#endif // GTEST_HAS_PARAM_TEST
+}
+
+// Tests using --gtest_repeat when --gtest_filter specifies a set of
+// failed tests.
+void TestRepeatWithFilterForFailedTests(int repeat) {
+ GTEST_FLAG(repeat) = repeat;
+ GTEST_FLAG(filter) = "*ShouldFail";
+
+ ResetCounts();
+ GTEST_CHECK_INT_EQ_(1, RUN_ALL_TESTS());
+ GTEST_CHECK_INT_EQ_(repeat, g_environment_set_up_count);
+ GTEST_CHECK_INT_EQ_(repeat, g_environment_tear_down_count);
+ GTEST_CHECK_INT_EQ_(repeat, g_should_fail_count);
+ GTEST_CHECK_INT_EQ_(0, g_should_pass_count);
+ GTEST_CHECK_INT_EQ_(0, g_death_test_count);
+#if GTEST_HAS_PARAM_TEST
+ GTEST_CHECK_INT_EQ_(0, g_param_test_count);
+#endif // GTEST_HAS_PARAM_TEST
+}
+
+} // namespace
+
+int main(int argc, char **argv) {
+ testing::InitGoogleTest(&argc, argv);
+ testing::AddGlobalTestEnvironment(new MyEnvironment);
+
+ TestRepeatUnspecified();
+ TestRepeat(0);
+ TestRepeat(1);
+ TestRepeat(5);
+
+ TestRepeatWithEmptyFilter(2);
+ TestRepeatWithEmptyFilter(3);
+
+ TestRepeatWithFilterForSuccessfulTests(3);
+
+ TestRepeatWithFilterForFailedTests(4);
+
+ // It would be nice to verify that the tests indeed loop forever
+ // when GTEST_FLAG(repeat) is negative, but this test will be quite
+ // complicated to write. Since this flag is for interactive
+ // debugging only and doesn't affect the normal test result, such a
+ // test would be an overkill.
+
+ printf("PASS\n");
+ return 0;
+}
diff --git a/Source/ThirdParty/gtest/test/gtest_shuffle_test.py b/Source/ThirdParty/gtest/test/gtest_shuffle_test.py
new file mode 100755
index 000000000..30d0303d1
--- /dev/null
+++ b/Source/ThirdParty/gtest/test/gtest_shuffle_test.py
@@ -0,0 +1,325 @@
+#!/usr/bin/env python
+#
+# Copyright 2009 Google Inc. All Rights Reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""Verifies that test shuffling works."""
+
+__author__ = 'wan@google.com (Zhanyong Wan)'
+
+import os
+import gtest_test_utils
+
+# Command to run the gtest_shuffle_test_ program.
+COMMAND = gtest_test_utils.GetTestExecutablePath('gtest_shuffle_test_')
+
+# The environment variables for test sharding.
+TOTAL_SHARDS_ENV_VAR = 'GTEST_TOTAL_SHARDS'
+SHARD_INDEX_ENV_VAR = 'GTEST_SHARD_INDEX'
+
+TEST_FILTER = 'A*.A:A*.B:C*'
+
+ALL_TESTS = []
+ACTIVE_TESTS = []
+FILTERED_TESTS = []
+SHARDED_TESTS = []
+
+SHUFFLED_ALL_TESTS = []
+SHUFFLED_ACTIVE_TESTS = []
+SHUFFLED_FILTERED_TESTS = []
+SHUFFLED_SHARDED_TESTS = []
+
+
+def AlsoRunDisabledTestsFlag():
+ return '--gtest_also_run_disabled_tests'
+
+
+def FilterFlag(test_filter):
+ return '--gtest_filter=%s' % (test_filter,)
+
+
+def RepeatFlag(n):
+ return '--gtest_repeat=%s' % (n,)
+
+
+def ShuffleFlag():
+ return '--gtest_shuffle'
+
+
+def RandomSeedFlag(n):
+ return '--gtest_random_seed=%s' % (n,)
+
+
+def RunAndReturnOutput(extra_env, args):
+ """Runs the test program and returns its output."""
+
+ environ_copy = os.environ.copy()
+ environ_copy.update(extra_env)
+
+ return gtest_test_utils.Subprocess([COMMAND] + args, env=environ_copy).output
+
+
+def GetTestsForAllIterations(extra_env, args):
+ """Runs the test program and returns a list of test lists.
+
+ Args:
+ extra_env: a map from environment variables to their values
+ args: command line flags to pass to gtest_shuffle_test_
+
+ Returns:
+ A list where the i-th element is the list of tests run in the i-th
+ test iteration.
+ """
+
+ test_iterations = []
+ for line in RunAndReturnOutput(extra_env, args).split('\n'):
+ if line.startswith('----'):
+ tests = []
+ test_iterations.append(tests)
+ elif line.strip():
+ tests.append(line.strip()) # 'TestCaseName.TestName'
+
+ return test_iterations
+
+
+def GetTestCases(tests):
+ """Returns a list of test cases in the given full test names.
+
+ Args:
+ tests: a list of full test names
+
+ Returns:
+ A list of test cases from 'tests', in their original order.
+ Consecutive duplicates are removed.
+ """
+
+ test_cases = []
+ for test in tests:
+ test_case = test.split('.')[0]
+ if not test_case in test_cases:
+ test_cases.append(test_case)
+
+ return test_cases
+
+
+def CalculateTestLists():
+ """Calculates the list of tests run under different flags."""
+
+ if not ALL_TESTS:
+ ALL_TESTS.extend(
+ GetTestsForAllIterations({}, [AlsoRunDisabledTestsFlag()])[0])
+
+ if not ACTIVE_TESTS:
+ ACTIVE_TESTS.extend(GetTestsForAllIterations({}, [])[0])
+
+ if not FILTERED_TESTS:
+ FILTERED_TESTS.extend(
+ GetTestsForAllIterations({}, [FilterFlag(TEST_FILTER)])[0])
+
+ if not SHARDED_TESTS:
+ SHARDED_TESTS.extend(
+ GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
+ SHARD_INDEX_ENV_VAR: '1'},
+ [])[0])
+
+ if not SHUFFLED_ALL_TESTS:
+ SHUFFLED_ALL_TESTS.extend(GetTestsForAllIterations(
+ {}, [AlsoRunDisabledTestsFlag(), ShuffleFlag(), RandomSeedFlag(1)])[0])
+
+ if not SHUFFLED_ACTIVE_TESTS:
+ SHUFFLED_ACTIVE_TESTS.extend(GetTestsForAllIterations(
+ {}, [ShuffleFlag(), RandomSeedFlag(1)])[0])
+
+ if not SHUFFLED_FILTERED_TESTS:
+ SHUFFLED_FILTERED_TESTS.extend(GetTestsForAllIterations(
+ {}, [ShuffleFlag(), RandomSeedFlag(1), FilterFlag(TEST_FILTER)])[0])
+
+ if not SHUFFLED_SHARDED_TESTS:
+ SHUFFLED_SHARDED_TESTS.extend(
+ GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
+ SHARD_INDEX_ENV_VAR: '1'},
+ [ShuffleFlag(), RandomSeedFlag(1)])[0])
+
+
+class GTestShuffleUnitTest(gtest_test_utils.TestCase):
+ """Tests test shuffling."""
+
+ def setUp(self):
+ CalculateTestLists()
+
+ def testShufflePreservesNumberOfTests(self):
+ self.assertEqual(len(ALL_TESTS), len(SHUFFLED_ALL_TESTS))
+ self.assertEqual(len(ACTIVE_TESTS), len(SHUFFLED_ACTIVE_TESTS))
+ self.assertEqual(len(FILTERED_TESTS), len(SHUFFLED_FILTERED_TESTS))
+ self.assertEqual(len(SHARDED_TESTS), len(SHUFFLED_SHARDED_TESTS))
+
+ def testShuffleChangesTestOrder(self):
+ self.assert_(SHUFFLED_ALL_TESTS != ALL_TESTS, SHUFFLED_ALL_TESTS)
+ self.assert_(SHUFFLED_ACTIVE_TESTS != ACTIVE_TESTS, SHUFFLED_ACTIVE_TESTS)
+ self.assert_(SHUFFLED_FILTERED_TESTS != FILTERED_TESTS,
+ SHUFFLED_FILTERED_TESTS)
+ self.assert_(SHUFFLED_SHARDED_TESTS != SHARDED_TESTS,
+ SHUFFLED_SHARDED_TESTS)
+
+ def testShuffleChangesTestCaseOrder(self):
+ self.assert_(GetTestCases(SHUFFLED_ALL_TESTS) != GetTestCases(ALL_TESTS),
+ GetTestCases(SHUFFLED_ALL_TESTS))
+ self.assert_(
+ GetTestCases(SHUFFLED_ACTIVE_TESTS) != GetTestCases(ACTIVE_TESTS),
+ GetTestCases(SHUFFLED_ACTIVE_TESTS))
+ self.assert_(
+ GetTestCases(SHUFFLED_FILTERED_TESTS) != GetTestCases(FILTERED_TESTS),
+ GetTestCases(SHUFFLED_FILTERED_TESTS))
+ self.assert_(
+ GetTestCases(SHUFFLED_SHARDED_TESTS) != GetTestCases(SHARDED_TESTS),
+ GetTestCases(SHUFFLED_SHARDED_TESTS))
+
+ def testShuffleDoesNotRepeatTest(self):
+ for test in SHUFFLED_ALL_TESTS:
+ self.assertEqual(1, SHUFFLED_ALL_TESTS.count(test),
+ '%s appears more than once' % (test,))
+ for test in SHUFFLED_ACTIVE_TESTS:
+ self.assertEqual(1, SHUFFLED_ACTIVE_TESTS.count(test),
+ '%s appears more than once' % (test,))
+ for test in SHUFFLED_FILTERED_TESTS:
+ self.assertEqual(1, SHUFFLED_FILTERED_TESTS.count(test),
+ '%s appears more than once' % (test,))
+ for test in SHUFFLED_SHARDED_TESTS:
+ self.assertEqual(1, SHUFFLED_SHARDED_TESTS.count(test),
+ '%s appears more than once' % (test,))
+
+ def testShuffleDoesNotCreateNewTest(self):
+ for test in SHUFFLED_ALL_TESTS:
+ self.assert_(test in ALL_TESTS, '%s is an invalid test' % (test,))
+ for test in SHUFFLED_ACTIVE_TESTS:
+ self.assert_(test in ACTIVE_TESTS, '%s is an invalid test' % (test,))
+ for test in SHUFFLED_FILTERED_TESTS:
+ self.assert_(test in FILTERED_TESTS, '%s is an invalid test' % (test,))
+ for test in SHUFFLED_SHARDED_TESTS:
+ self.assert_(test in SHARDED_TESTS, '%s is an invalid test' % (test,))
+
+ def testShuffleIncludesAllTests(self):
+ for test in ALL_TESTS:
+ self.assert_(test in SHUFFLED_ALL_TESTS, '%s is missing' % (test,))
+ for test in ACTIVE_TESTS:
+ self.assert_(test in SHUFFLED_ACTIVE_TESTS, '%s is missing' % (test,))
+ for test in FILTERED_TESTS:
+ self.assert_(test in SHUFFLED_FILTERED_TESTS, '%s is missing' % (test,))
+ for test in SHARDED_TESTS:
+ self.assert_(test in SHUFFLED_SHARDED_TESTS, '%s is missing' % (test,))
+
+ def testShuffleLeavesDeathTestsAtFront(self):
+ non_death_test_found = False
+ for test in SHUFFLED_ACTIVE_TESTS:
+ if 'DeathTest.' in test:
+ self.assert_(not non_death_test_found,
+ '%s appears after a non-death test' % (test,))
+ else:
+ non_death_test_found = True
+
+ def _VerifyTestCasesDoNotInterleave(self, tests):
+ test_cases = []
+ for test in tests:
+ [test_case, _] = test.split('.')
+ if test_cases and test_cases[-1] != test_case:
+ test_cases.append(test_case)
+ self.assertEqual(1, test_cases.count(test_case),
+ 'Test case %s is not grouped together in %s' %
+ (test_case, tests))
+
+ def testShuffleDoesNotInterleaveTestCases(self):
+ self._VerifyTestCasesDoNotInterleave(SHUFFLED_ALL_TESTS)
+ self._VerifyTestCasesDoNotInterleave(SHUFFLED_ACTIVE_TESTS)
+ self._VerifyTestCasesDoNotInterleave(SHUFFLED_FILTERED_TESTS)
+ self._VerifyTestCasesDoNotInterleave(SHUFFLED_SHARDED_TESTS)
+
+ def testShuffleRestoresOrderAfterEachIteration(self):
+ # Get the test lists in all 3 iterations, using random seed 1, 2,
+ # and 3 respectively. Google Test picks a different seed in each
+ # iteration, and this test depends on the current implementation
+ # picking successive numbers. This dependency is not ideal, but
+ # makes the test much easier to write.
+ [tests_in_iteration1, tests_in_iteration2, tests_in_iteration3] = (
+ GetTestsForAllIterations(
+ {}, [ShuffleFlag(), RandomSeedFlag(1), RepeatFlag(3)]))
+
+ # Make sure running the tests with random seed 1 gets the same
+ # order as in iteration 1 above.
+ [tests_with_seed1] = GetTestsForAllIterations(
+ {}, [ShuffleFlag(), RandomSeedFlag(1)])
+ self.assertEqual(tests_in_iteration1, tests_with_seed1)
+
+ # Make sure running the tests with random seed 2 gets the same
+ # order as in iteration 2 above. Success means that Google Test
+ # correctly restores the test order before re-shuffling at the
+ # beginning of iteration 2.
+ [tests_with_seed2] = GetTestsForAllIterations(
+ {}, [ShuffleFlag(), RandomSeedFlag(2)])
+ self.assertEqual(tests_in_iteration2, tests_with_seed2)
+
+ # Make sure running the tests with random seed 3 gets the same
+ # order as in iteration 3 above. Success means that Google Test
+ # correctly restores the test order before re-shuffling at the
+ # beginning of iteration 3.
+ [tests_with_seed3] = GetTestsForAllIterations(
+ {}, [ShuffleFlag(), RandomSeedFlag(3)])
+ self.assertEqual(tests_in_iteration3, tests_with_seed3)
+
+ def testShuffleGeneratesNewOrderInEachIteration(self):
+ [tests_in_iteration1, tests_in_iteration2, tests_in_iteration3] = (
+ GetTestsForAllIterations(
+ {}, [ShuffleFlag(), RandomSeedFlag(1), RepeatFlag(3)]))
+
+ self.assert_(tests_in_iteration1 != tests_in_iteration2,
+ tests_in_iteration1)
+ self.assert_(tests_in_iteration1 != tests_in_iteration3,
+ tests_in_iteration1)
+ self.assert_(tests_in_iteration2 != tests_in_iteration3,
+ tests_in_iteration2)
+
+ def testShuffleShardedTestsPreservesPartition(self):
+ # If we run M tests on N shards, the same M tests should be run in
+ # total, regardless of the random seeds used by the shards.
+ [tests1] = GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
+ SHARD_INDEX_ENV_VAR: '0'},
+ [ShuffleFlag(), RandomSeedFlag(1)])
+ [tests2] = GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
+ SHARD_INDEX_ENV_VAR: '1'},
+ [ShuffleFlag(), RandomSeedFlag(20)])
+ [tests3] = GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
+ SHARD_INDEX_ENV_VAR: '2'},
+ [ShuffleFlag(), RandomSeedFlag(25)])
+ sorted_sharded_tests = tests1 + tests2 + tests3
+ sorted_sharded_tests.sort()
+ sorted_active_tests = []
+ sorted_active_tests.extend(ACTIVE_TESTS)
+ sorted_active_tests.sort()
+ self.assertEqual(sorted_active_tests, sorted_sharded_tests)
+
+if __name__ == '__main__':
+ gtest_test_utils.Main()
diff --git a/Source/ThirdParty/gtest/test/gtest_shuffle_test_.cc b/Source/ThirdParty/gtest/test/gtest_shuffle_test_.cc
new file mode 100644
index 000000000..53ecf7770
--- /dev/null
+++ b/Source/ThirdParty/gtest/test/gtest_shuffle_test_.cc
@@ -0,0 +1,104 @@
+// Copyright 2009, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: wan@google.com (Zhanyong Wan)
+
+// Verifies that test shuffling works.
+
+#include <gtest/gtest.h>
+
+namespace {
+
+using ::testing::EmptyTestEventListener;
+using ::testing::InitGoogleTest;
+using ::testing::Message;
+using ::testing::Test;
+using ::testing::TestEventListeners;
+using ::testing::TestInfo;
+using ::testing::UnitTest;
+using ::testing::internal::String;
+using ::testing::internal::scoped_ptr;
+
+// The test methods are empty, as the sole purpose of this program is
+// to print the test names before/after shuffling.
+
+class A : public Test {};
+TEST_F(A, A) {}
+TEST_F(A, B) {}
+
+TEST(ADeathTest, A) {}
+TEST(ADeathTest, B) {}
+TEST(ADeathTest, C) {}
+
+TEST(B, A) {}
+TEST(B, B) {}
+TEST(B, C) {}
+TEST(B, DISABLED_D) {}
+TEST(B, DISABLED_E) {}
+
+TEST(BDeathTest, A) {}
+TEST(BDeathTest, B) {}
+
+TEST(C, A) {}
+TEST(C, B) {}
+TEST(C, C) {}
+TEST(C, DISABLED_D) {}
+
+TEST(CDeathTest, A) {}
+
+TEST(DISABLED_D, A) {}
+TEST(DISABLED_D, DISABLED_B) {}
+
+// This printer prints the full test names only, starting each test
+// iteration with a "----" marker.
+class TestNamePrinter : public EmptyTestEventListener {
+ public:
+ virtual void OnTestIterationStart(const UnitTest& /* unit_test */,
+ int /* iteration */) {
+ printf("----\n");
+ }
+
+ virtual void OnTestStart(const TestInfo& test_info) {
+ printf("%s.%s\n", test_info.test_case_name(), test_info.name());
+ }
+};
+
+} // namespace
+
+int main(int argc, char **argv) {
+ InitGoogleTest(&argc, argv);
+
+ // Replaces the default printer with TestNamePrinter, which prints
+ // the test name only.
+ TestEventListeners& listeners = UnitTest::GetInstance()->listeners();
+ delete listeners.Release(listeners.default_result_printer());
+ listeners.Append(new TestNamePrinter);
+
+ return RUN_ALL_TESTS();
+}
diff --git a/Source/ThirdParty/gtest/test/gtest_sole_header_test.cc b/Source/ThirdParty/gtest/test/gtest_sole_header_test.cc
new file mode 100644
index 000000000..de91e800f
--- /dev/null
+++ b/Source/ThirdParty/gtest/test/gtest_sole_header_test.cc
@@ -0,0 +1,57 @@
+// Copyright 2008, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: mheule@google.com (Markus Heule)
+//
+// This test verifies that it's possible to use Google Test by including
+// the gtest.h header file alone.
+
+#include <gtest/gtest.h>
+
+namespace {
+
+void Subroutine() {
+ EXPECT_EQ(42, 42);
+}
+
+TEST(NoFatalFailureTest, ExpectNoFatalFailure) {
+ EXPECT_NO_FATAL_FAILURE(;);
+ EXPECT_NO_FATAL_FAILURE(SUCCEED());
+ EXPECT_NO_FATAL_FAILURE(Subroutine());
+ EXPECT_NO_FATAL_FAILURE({ SUCCEED(); });
+}
+
+TEST(NoFatalFailureTest, AssertNoFatalFailure) {
+ ASSERT_NO_FATAL_FAILURE(;);
+ ASSERT_NO_FATAL_FAILURE(SUCCEED());
+ ASSERT_NO_FATAL_FAILURE(Subroutine());
+ ASSERT_NO_FATAL_FAILURE({ SUCCEED(); });
+}
+
+} // namespace
diff --git a/Source/ThirdParty/gtest/test/gtest_stress_test.cc b/Source/ThirdParty/gtest/test/gtest_stress_test.cc
new file mode 100644
index 000000000..f5af78cc6
--- /dev/null
+++ b/Source/ThirdParty/gtest/test/gtest_stress_test.cc
@@ -0,0 +1,257 @@
+// Copyright 2007, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: wan@google.com (Zhanyong Wan)
+
+// Tests that SCOPED_TRACE() and various Google Test assertions can be
+// used in a large number of threads concurrently.
+
+#include <gtest/gtest.h>
+
+#include <iostream>
+#include <vector>
+
+// We must define this macro in order to #include
+// gtest-internal-inl.h. This is how Google Test prevents a user from
+// accidentally depending on its internal implementation.
+#define GTEST_IMPLEMENTATION_ 1
+#include "src/gtest-internal-inl.h"
+#undef GTEST_IMPLEMENTATION_
+
+#if GTEST_IS_THREADSAFE
+
+namespace testing {
+namespace {
+
+using internal::Notification;
+using internal::String;
+using internal::TestPropertyKeyIs;
+using internal::ThreadWithParam;
+using internal::scoped_ptr;
+
+// In order to run tests in this file, for platforms where Google Test is
+// thread safe, implement ThreadWithParam. See the description of its API
+// in gtest-port.h, where it is defined for already supported platforms.
+
+// How many threads to create?
+const int kThreadCount = 50;
+
+String IdToKey(int id, const char* suffix) {
+ Message key;
+ key << "key_" << id << "_" << suffix;
+ return key.GetString();
+}
+
+String IdToString(int id) {
+ Message id_message;
+ id_message << id;
+ return id_message.GetString();
+}
+
+void ExpectKeyAndValueWereRecordedForId(
+ const std::vector<TestProperty>& properties,
+ int id, const char* suffix) {
+ TestPropertyKeyIs matches_key(IdToKey(id, suffix).c_str());
+ const std::vector<TestProperty>::const_iterator property =
+ std::find_if(properties.begin(), properties.end(), matches_key);
+ ASSERT_TRUE(property != properties.end())
+ << "expecting " << suffix << " value for id " << id;
+ EXPECT_STREQ(IdToString(id).c_str(), property->value());
+}
+
+// Calls a large number of Google Test assertions, where exactly one of them
+// will fail.
+void ManyAsserts(int id) {
+ GTEST_LOG_(INFO) << "Thread #" << id << " running...";
+
+ SCOPED_TRACE(Message() << "Thread #" << id);
+
+ for (int i = 0; i < kThreadCount; i++) {
+ SCOPED_TRACE(Message() << "Iteration #" << i);
+
+ // A bunch of assertions that should succeed.
+ EXPECT_TRUE(true);
+ ASSERT_FALSE(false) << "This shouldn't fail.";
+ EXPECT_STREQ("a", "a");
+ ASSERT_LE(5, 6);
+ EXPECT_EQ(i, i) << "This shouldn't fail.";
+
+ // RecordProperty() should interact safely with other threads as well.
+ // The shared_key forces property updates.
+ Test::RecordProperty(IdToKey(id, "string").c_str(), IdToString(id).c_str());
+ Test::RecordProperty(IdToKey(id, "int").c_str(), id);
+ Test::RecordProperty("shared_key", IdToString(id).c_str());
+
+ // This assertion should fail kThreadCount times per thread. It
+ // is for testing whether Google Test can handle failed assertions in a
+ // multi-threaded context.
+ EXPECT_LT(i, 0) << "This should always fail.";
+ }
+}
+
+void CheckTestFailureCount(int expected_failures) {
+ const TestInfo* const info = UnitTest::GetInstance()->current_test_info();
+ const TestResult* const result = info->result();
+ GTEST_CHECK_(expected_failures == result->total_part_count())
+ << "Logged " << result->total_part_count() << " failures "
+ << " vs. " << expected_failures << " expected";
+}
+
+// Tests using SCOPED_TRACE() and Google Test assertions in many threads
+// concurrently.
+TEST(StressTest, CanUseScopedTraceAndAssertionsInManyThreads) {
+ {
+ scoped_ptr<ThreadWithParam<int> > threads[kThreadCount];
+ Notification threads_can_start;
+ for (int i = 0; i != kThreadCount; i++)
+ threads[i].reset(new ThreadWithParam<int>(&ManyAsserts,
+ i,
+ &threads_can_start));
+
+ threads_can_start.Notify();
+
+ // Blocks until all the threads are done.
+ for (int i = 0; i != kThreadCount; i++)
+ threads[i]->Join();
+ }
+
+ // Ensures that kThreadCount*kThreadCount failures have been reported.
+ const TestInfo* const info = UnitTest::GetInstance()->current_test_info();
+ const TestResult* const result = info->result();
+
+ std::vector<TestProperty> properties;
+ // We have no access to the TestResult's list of properties but we can
+ // copy them one by one.
+ for (int i = 0; i < result->test_property_count(); ++i)
+ properties.push_back(result->GetTestProperty(i));
+
+ EXPECT_EQ(kThreadCount * 2 + 1, result->test_property_count())
+ << "String and int values recorded on each thread, "
+ << "as well as one shared_key";
+ for (int i = 0; i < kThreadCount; ++i) {
+ ExpectKeyAndValueWereRecordedForId(properties, i, "string");
+ ExpectKeyAndValueWereRecordedForId(properties, i, "int");
+ }
+ CheckTestFailureCount(kThreadCount*kThreadCount);
+}
+
+void FailingThread(bool is_fatal) {
+ if (is_fatal)
+ FAIL() << "Fatal failure in some other thread. "
+ << "(This failure is expected.)";
+ else
+ ADD_FAILURE() << "Non-fatal failure in some other thread. "
+ << "(This failure is expected.)";
+}
+
+void GenerateFatalFailureInAnotherThread(bool is_fatal) {
+ ThreadWithParam<bool> thread(&FailingThread, is_fatal, NULL);
+ thread.Join();
+}
+
+TEST(NoFatalFailureTest, ExpectNoFatalFailureIgnoresFailuresInOtherThreads) {
+ EXPECT_NO_FATAL_FAILURE(GenerateFatalFailureInAnotherThread(true));
+ // We should only have one failure (the one from
+ // GenerateFatalFailureInAnotherThread()), since the EXPECT_NO_FATAL_FAILURE
+ // should succeed.
+ CheckTestFailureCount(1);
+}
+
+void AssertNoFatalFailureIgnoresFailuresInOtherThreads() {
+ ASSERT_NO_FATAL_FAILURE(GenerateFatalFailureInAnotherThread(true));
+}
+TEST(NoFatalFailureTest, AssertNoFatalFailureIgnoresFailuresInOtherThreads) {
+ // Using a subroutine, to make sure, that the test continues.
+ AssertNoFatalFailureIgnoresFailuresInOtherThreads();
+ // We should only have one failure (the one from
+ // GenerateFatalFailureInAnotherThread()), since the EXPECT_NO_FATAL_FAILURE
+ // should succeed.
+ CheckTestFailureCount(1);
+}
+
+TEST(FatalFailureTest, ExpectFatalFailureIgnoresFailuresInOtherThreads) {
+ // This statement should fail, since the current thread doesn't generate a
+ // fatal failure, only another one does.
+ EXPECT_FATAL_FAILURE(GenerateFatalFailureInAnotherThread(true), "expected");
+ CheckTestFailureCount(2);
+}
+
+TEST(FatalFailureOnAllThreadsTest, ExpectFatalFailureOnAllThreads) {
+ // This statement should succeed, because failures in all threads are
+ // considered.
+ EXPECT_FATAL_FAILURE_ON_ALL_THREADS(
+ GenerateFatalFailureInAnotherThread(true), "expected");
+ CheckTestFailureCount(0);
+ // We need to add a failure, because main() checks that there are failures.
+ // But when only this test is run, we shouldn't have any failures.
+ ADD_FAILURE() << "This is an expected non-fatal failure.";
+}
+
+TEST(NonFatalFailureTest, ExpectNonFatalFailureIgnoresFailuresInOtherThreads) {
+ // This statement should fail, since the current thread doesn't generate a
+ // fatal failure, only another one does.
+ EXPECT_NONFATAL_FAILURE(GenerateFatalFailureInAnotherThread(false),
+ "expected");
+ CheckTestFailureCount(2);
+}
+
+TEST(NonFatalFailureOnAllThreadsTest, ExpectNonFatalFailureOnAllThreads) {
+ // This statement should succeed, because failures in all threads are
+ // considered.
+ EXPECT_NONFATAL_FAILURE_ON_ALL_THREADS(
+ GenerateFatalFailureInAnotherThread(false), "expected");
+ CheckTestFailureCount(0);
+ // We need to add a failure, because main() checks that there are failures,
+ // But when only this test is run, we shouldn't have any failures.
+ ADD_FAILURE() << "This is an expected non-fatal failure.";
+}
+
+} // namespace
+} // namespace testing
+
+int main(int argc, char **argv) {
+ testing::InitGoogleTest(&argc, argv);
+
+ const int result = RUN_ALL_TESTS(); // Expected to fail.
+ GTEST_CHECK_(result == 1) << "RUN_ALL_TESTS() did not fail as expected";
+
+ printf("\nPASS\n");
+ return 0;
+}
+
+#else
+TEST(StressTest,
+ DISABLED_ThreadSafetyTestsAreSkippedWhenGoogleTestIsNotThreadSafe) {
+}
+
+int main(int argc, char **argv) {
+ testing::InitGoogleTest(&argc, argv);
+ return RUN_ALL_TESTS();
+}
+#endif // GTEST_IS_THREADSAFE
diff --git a/Source/ThirdParty/gtest/test/gtest_test_utils.py b/Source/ThirdParty/gtest/test/gtest_test_utils.py
new file mode 100755
index 000000000..e0f5973e7
--- /dev/null
+++ b/Source/ThirdParty/gtest/test/gtest_test_utils.py
@@ -0,0 +1,309 @@
+#!/usr/bin/env python
+#
+# Copyright 2006, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""Unit test utilities for Google C++ Testing Framework."""
+
+__author__ = 'wan@google.com (Zhanyong Wan)'
+
+import atexit
+import os
+import shutil
+import sys
+import tempfile
+import unittest
+_test_module = unittest
+
+# Suppresses the 'Import not at the top of the file' lint complaint.
+# pylint: disable-msg=C6204
+try:
+ import subprocess
+ _SUBPROCESS_MODULE_AVAILABLE = True
+except:
+ import popen2
+ _SUBPROCESS_MODULE_AVAILABLE = False
+# pylint: enable-msg=C6204
+
+GTEST_OUTPUT_VAR_NAME = 'GTEST_OUTPUT'
+
+IS_WINDOWS = os.name == 'nt'
+IS_CYGWIN = os.name == 'posix' and 'CYGWIN' in os.uname()[0]
+
+# Here we expose a class from a particular module, depending on the
+# environment. The comment suppresses the 'Invalid variable name' lint
+# complaint.
+TestCase = _test_module.TestCase # pylint: disable-msg=C6409
+
+# Initially maps a flag to its default value. After
+# _ParseAndStripGTestFlags() is called, maps a flag to its actual value.
+_flag_map = {'gtest_source_dir': os.path.dirname(sys.argv[0]),
+ 'gtest_build_dir': os.path.dirname(sys.argv[0])}
+_gtest_flags_are_parsed = False
+
+
+def _ParseAndStripGTestFlags(argv):
+ """Parses and strips Google Test flags from argv. This is idempotent."""
+
+ # Suppresses the lint complaint about a global variable since we need it
+ # here to maintain module-wide state.
+ global _gtest_flags_are_parsed # pylint: disable-msg=W0603
+ if _gtest_flags_are_parsed:
+ return
+
+ _gtest_flags_are_parsed = True
+ for flag in _flag_map:
+ # The environment variable overrides the default value.
+ if flag.upper() in os.environ:
+ _flag_map[flag] = os.environ[flag.upper()]
+
+ # The command line flag overrides the environment variable.
+ i = 1 # Skips the program name.
+ while i < len(argv):
+ prefix = '--' + flag + '='
+ if argv[i].startswith(prefix):
+ _flag_map[flag] = argv[i][len(prefix):]
+ del argv[i]
+ break
+ else:
+ # We don't increment i in case we just found a --gtest_* flag
+ # and removed it from argv.
+ i += 1
+
+
+def GetFlag(flag):
+ """Returns the value of the given flag."""
+
+ # In case GetFlag() is called before Main(), we always call
+ # _ParseAndStripGTestFlags() here to make sure the --gtest_* flags
+ # are parsed.
+ _ParseAndStripGTestFlags(sys.argv)
+
+ return _flag_map[flag]
+
+
+def GetSourceDir():
+ """Returns the absolute path of the directory where the .py files are."""
+
+ return os.path.abspath(GetFlag('gtest_source_dir'))
+
+
+def GetBuildDir():
+ """Returns the absolute path of the directory where the test binaries are."""
+
+ return os.path.abspath(GetFlag('gtest_build_dir'))
+
+
+_temp_dir = None
+
+def _RemoveTempDir():
+ if _temp_dir:
+ shutil.rmtree(_temp_dir, ignore_errors=True)
+
+atexit.register(_RemoveTempDir)
+
+
+def GetTempDir():
+ """Returns a directory for temporary files."""
+
+ global _temp_dir
+ if not _temp_dir:
+ _temp_dir = tempfile.mkdtemp()
+ return _temp_dir
+
+
+def GetTestExecutablePath(executable_name, build_dir=None):
+ """Returns the absolute path of the test binary given its name.
+
+ The function will print a message and abort the program if the resulting file
+ doesn't exist.
+
+ Args:
+ executable_name: name of the test binary that the test script runs.
+ build_dir: directory where to look for executables, by default
+ the result of GetBuildDir().
+
+ Returns:
+ The absolute path of the test binary.
+ """
+
+ path = os.path.abspath(os.path.join(build_dir or GetBuildDir(),
+ executable_name))
+ if (IS_WINDOWS or IS_CYGWIN) and not path.endswith('.exe'):
+ path += '.exe'
+
+ if not os.path.exists(path):
+ message = (
+ 'Unable to find the test binary. Please make sure to provide path\n'
+ 'to the binary via the --gtest_build_dir flag or the GTEST_BUILD_DIR\n'
+ 'environment variable. For convenient use, invoke this script via\n'
+ 'mk_test.py.\n'
+ # TODO(vladl@google.com): change mk_test.py to test.py after renaming
+ # the file.
+ 'Please run mk_test.py -h for help.')
+ print >> sys.stderr, message
+ sys.exit(1)
+
+ return path
+
+
+def GetExitStatus(exit_code):
+ """Returns the argument to exit(), or -1 if exit() wasn't called.
+
+ Args:
+ exit_code: the result value of os.system(command).
+ """
+
+ if os.name == 'nt':
+ # On Windows, os.WEXITSTATUS() doesn't work and os.system() returns
+ # the argument to exit() directly.
+ return exit_code
+ else:
+ # On Unix, os.WEXITSTATUS() must be used to extract the exit status
+ # from the result of os.system().
+ if os.WIFEXITED(exit_code):
+ return os.WEXITSTATUS(exit_code)
+ else:
+ return -1
+
+
+class Subprocess:
+ def __init__(self, command, working_dir=None, capture_stderr=True, env=None):
+ """Changes into a specified directory, if provided, and executes a command.
+
+ Restores the old directory afterwards.
+
+ Args:
+ command: The command to run, in the form of sys.argv.
+ working_dir: The directory to change into.
+ capture_stderr: Determines whether to capture stderr in the output member
+ or to discard it.
+ env: Dictionary with environment to pass to the subprocess.
+
+ Returns:
+ An object that represents outcome of the executed process. It has the
+ following attributes:
+ terminated_by_signal True iff the child process has been terminated
+ by a signal.
+ signal Sygnal that terminated the child process.
+ exited True iff the child process exited normally.
+ exit_code The code with which the child process exited.
+ output Child process's stdout and stderr output
+ combined in a string.
+ """
+
+ # The subprocess module is the preferrable way of running programs
+ # since it is available and behaves consistently on all platforms,
+ # including Windows. But it is only available starting in python 2.4.
+ # In earlier python versions, we revert to the popen2 module, which is
+ # available in python 2.0 and later but doesn't provide required
+ # functionality (Popen4) under Windows. This allows us to support Mac
+ # OS X 10.4 Tiger, which has python 2.3 installed.
+ if _SUBPROCESS_MODULE_AVAILABLE:
+ if capture_stderr:
+ stderr = subprocess.STDOUT
+ else:
+ stderr = subprocess.PIPE
+
+ p = subprocess.Popen(command,
+ stdout=subprocess.PIPE, stderr=stderr,
+ cwd=working_dir, universal_newlines=True, env=env)
+ # communicate returns a tuple with the file obect for the child's
+ # output.
+ self.output = p.communicate()[0]
+ self._return_code = p.returncode
+ else:
+ old_dir = os.getcwd()
+
+ def _ReplaceEnvDict(dest, src):
+ # Changes made by os.environ.clear are not inheritable by child
+ # processes until Python 2.6. To produce inheritable changes we have
+ # to delete environment items with the del statement.
+ for key in dest:
+ del dest[key]
+ dest.update(src)
+
+ # When 'env' is not None, backup the environment variables and replace
+ # them with the passed 'env'. When 'env' is None, we simply use the
+ # current 'os.environ' for compatibility with the subprocess.Popen
+ # semantics used above.
+ if env is not None:
+ old_environ = os.environ.copy()
+ _ReplaceEnvDict(os.environ, env)
+
+ try:
+ if working_dir is not None:
+ os.chdir(working_dir)
+ if capture_stderr:
+ p = popen2.Popen4(command)
+ else:
+ p = popen2.Popen3(command)
+ p.tochild.close()
+ self.output = p.fromchild.read()
+ ret_code = p.wait()
+ finally:
+ os.chdir(old_dir)
+
+ # Restore the old environment variables
+ # if they were replaced.
+ if env is not None:
+ _ReplaceEnvDict(os.environ, old_environ)
+
+ # Converts ret_code to match the semantics of
+ # subprocess.Popen.returncode.
+ if os.WIFSIGNALED(ret_code):
+ self._return_code = -os.WTERMSIG(ret_code)
+ else: # os.WIFEXITED(ret_code) should return True here.
+ self._return_code = os.WEXITSTATUS(ret_code)
+
+ if self._return_code < 0:
+ self.terminated_by_signal = True
+ self.exited = False
+ self.signal = -self._return_code
+ else:
+ self.terminated_by_signal = False
+ self.exited = True
+ self.exit_code = self._return_code
+
+
+def Main():
+ """Runs the unit test."""
+
+ # We must call _ParseAndStripGTestFlags() before calling
+ # unittest.main(). Otherwise the latter will be confused by the
+ # --gtest_* flags.
+ _ParseAndStripGTestFlags(sys.argv)
+ # The tested binaries should not be writing XML output files unless the
+ # script explicitly instructs them to.
+ # TODO(vladl@google.com): Move this into Subprocess when we implement
+ # passing environment into it as a parameter.
+ if GTEST_OUTPUT_VAR_NAME in os.environ:
+ del os.environ[GTEST_OUTPUT_VAR_NAME]
+
+ _test_module.main()
diff --git a/Source/ThirdParty/gtest/test/gtest_throw_on_failure_ex_test.cc b/Source/ThirdParty/gtest/test/gtest_throw_on_failure_ex_test.cc
new file mode 100644
index 000000000..8bf9dc905
--- /dev/null
+++ b/Source/ThirdParty/gtest/test/gtest_throw_on_failure_ex_test.cc
@@ -0,0 +1,92 @@
+// Copyright 2009, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: wan@google.com (Zhanyong Wan)
+
+// Tests Google Test's throw-on-failure mode with exceptions enabled.
+
+#include <gtest/gtest.h>
+
+#include <stdlib.h>
+#include <stdio.h>
+#include <string.h>
+#include <stdexcept>
+
+// Prints the given failure message and exits the program with
+// non-zero. We use this instead of a Google Test assertion to
+// indicate a failure, as the latter is been tested and cannot be
+// relied on.
+void Fail(const char* msg) {
+ printf("FAILURE: %s\n", msg);
+ fflush(stdout);
+ exit(1);
+}
+
+// Tests that an assertion failure throws a subclass of
+// std::runtime_error.
+void TestFailureThrowsRuntimeError() {
+ testing::GTEST_FLAG(throw_on_failure) = true;
+
+ // A successful assertion shouldn't throw.
+ try {
+ EXPECT_EQ(3, 3);
+ } catch(...) {
+ Fail("A successful assertion wrongfully threw.");
+ }
+
+ // A failed assertion should throw a subclass of std::runtime_error.
+ try {
+ EXPECT_EQ(2, 3) << "Expected failure";
+ } catch(const std::runtime_error& e) {
+ if (strstr(e.what(), "Expected failure") != NULL)
+ return;
+
+ printf("%s",
+ "A failed assertion did throw an exception of the right type, "
+ "but the message is incorrect. Instead of containing \"Expected "
+ "failure\", it is:\n");
+ Fail(e.what());
+ } catch(...) {
+ Fail("A failed assertion threw the wrong type of exception.");
+ }
+ Fail("A failed assertion should've thrown but didn't.");
+}
+
+int main(int argc, char** argv) {
+ testing::InitGoogleTest(&argc, argv);
+
+ // We want to ensure that people can use Google Test assertions in
+ // other testing frameworks, as long as they initialize Google Test
+ // properly and set the thrown-on-failure mode. Therefore, we don't
+ // use Google Test's constructs for defining and running tests
+ // (e.g. TEST and RUN_ALL_TESTS) here.
+
+ TestFailureThrowsRuntimeError();
+ return 0;
+}
diff --git a/Source/ThirdParty/gtest/test/gtest_throw_on_failure_test.py b/Source/ThirdParty/gtest/test/gtest_throw_on_failure_test.py
new file mode 100755
index 000000000..5678ffeaf
--- /dev/null
+++ b/Source/ThirdParty/gtest/test/gtest_throw_on_failure_test.py
@@ -0,0 +1,171 @@
+#!/usr/bin/env python
+#
+# Copyright 2009, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""Tests Google Test's throw-on-failure mode with exceptions disabled.
+
+This script invokes gtest_throw_on_failure_test_ (a program written with
+Google Test) with different environments and command line flags.
+"""
+
+__author__ = 'wan@google.com (Zhanyong Wan)'
+
+import os
+import gtest_test_utils
+
+
+# Constants.
+
+# The command line flag for enabling/disabling the throw-on-failure mode.
+THROW_ON_FAILURE = 'gtest_throw_on_failure'
+
+# Path to the gtest_throw_on_failure_test_ program, compiled with
+# exceptions disabled.
+EXE_PATH = gtest_test_utils.GetTestExecutablePath(
+ 'gtest_throw_on_failure_test_')
+
+
+# Utilities.
+
+
+def SetEnvVar(env_var, value):
+ """Sets an environment variable to a given value; unsets it when the
+ given value is None.
+ """
+
+ env_var = env_var.upper()
+ if value is not None:
+ os.environ[env_var] = value
+ elif env_var in os.environ:
+ del os.environ[env_var]
+
+
+def Run(command):
+ """Runs a command; returns True/False if its exit code is/isn't 0."""
+
+ print 'Running "%s". . .' % ' '.join(command)
+ p = gtest_test_utils.Subprocess(command)
+ return p.exited and p.exit_code == 0
+
+
+# The tests. TODO(wan@google.com): refactor the class to share common
+# logic with code in gtest_break_on_failure_unittest.py.
+class ThrowOnFailureTest(gtest_test_utils.TestCase):
+ """Tests the throw-on-failure mode."""
+
+ def RunAndVerify(self, env_var_value, flag_value, should_fail):
+ """Runs gtest_throw_on_failure_test_ and verifies that it does
+ (or does not) exit with a non-zero code.
+
+ Args:
+ env_var_value: value of the GTEST_BREAK_ON_FAILURE environment
+ variable; None if the variable should be unset.
+ flag_value: value of the --gtest_break_on_failure flag;
+ None if the flag should not be present.
+ should_fail: True iff the program is expected to fail.
+ """
+
+ SetEnvVar(THROW_ON_FAILURE, env_var_value)
+
+ if env_var_value is None:
+ env_var_value_msg = ' is not set'
+ else:
+ env_var_value_msg = '=' + env_var_value
+
+ if flag_value is None:
+ flag = ''
+ elif flag_value == '0':
+ flag = '--%s=0' % THROW_ON_FAILURE
+ else:
+ flag = '--%s' % THROW_ON_FAILURE
+
+ command = [EXE_PATH]
+ if flag:
+ command.append(flag)
+
+ if should_fail:
+ should_or_not = 'should'
+ else:
+ should_or_not = 'should not'
+
+ failed = not Run(command)
+
+ SetEnvVar(THROW_ON_FAILURE, None)
+
+ msg = ('when %s%s, an assertion failure in "%s" %s cause a non-zero '
+ 'exit code.' %
+ (THROW_ON_FAILURE, env_var_value_msg, ' '.join(command),
+ should_or_not))
+ self.assert_(failed == should_fail, msg)
+
+ def testDefaultBehavior(self):
+ """Tests the behavior of the default mode."""
+
+ self.RunAndVerify(env_var_value=None, flag_value=None, should_fail=False)
+
+ def testThrowOnFailureEnvVar(self):
+ """Tests using the GTEST_THROW_ON_FAILURE environment variable."""
+
+ self.RunAndVerify(env_var_value='0',
+ flag_value=None,
+ should_fail=False)
+ self.RunAndVerify(env_var_value='1',
+ flag_value=None,
+ should_fail=True)
+
+ def testThrowOnFailureFlag(self):
+ """Tests using the --gtest_throw_on_failure flag."""
+
+ self.RunAndVerify(env_var_value=None,
+ flag_value='0',
+ should_fail=False)
+ self.RunAndVerify(env_var_value=None,
+ flag_value='1',
+ should_fail=True)
+
+ def testThrowOnFailureFlagOverridesEnvVar(self):
+ """Tests that --gtest_throw_on_failure overrides GTEST_THROW_ON_FAILURE."""
+
+ self.RunAndVerify(env_var_value='0',
+ flag_value='0',
+ should_fail=False)
+ self.RunAndVerify(env_var_value='0',
+ flag_value='1',
+ should_fail=True)
+ self.RunAndVerify(env_var_value='1',
+ flag_value='0',
+ should_fail=False)
+ self.RunAndVerify(env_var_value='1',
+ flag_value='1',
+ should_fail=True)
+
+
+if __name__ == '__main__':
+ gtest_test_utils.Main()
diff --git a/Source/ThirdParty/gtest/test/gtest_throw_on_failure_test_.cc b/Source/ThirdParty/gtest/test/gtest_throw_on_failure_test_.cc
new file mode 100644
index 000000000..88fbd5a76
--- /dev/null
+++ b/Source/ThirdParty/gtest/test/gtest_throw_on_failure_test_.cc
@@ -0,0 +1,56 @@
+// Copyright 2009, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: wan@google.com (Zhanyong Wan)
+
+// Tests Google Test's throw-on-failure mode with exceptions disabled.
+//
+// This program must be compiled with exceptions disabled. It will be
+// invoked by gtest_throw_on_failure_test.py, and is expected to exit
+// with non-zero in the throw-on-failure mode or 0 otherwise.
+
+#include <gtest/gtest.h>
+
+int main(int argc, char** argv) {
+ testing::InitGoogleTest(&argc, argv);
+
+ // We want to ensure that people can use Google Test assertions in
+ // other testing frameworks, as long as they initialize Google Test
+ // properly and set the thrown-on-failure mode. Therefore, we don't
+ // use Google Test's constructs for defining and running tests
+ // (e.g. TEST and RUN_ALL_TESTS) here.
+
+ // In the throw-on-failure mode with exceptions disabled, this
+ // assertion will cause the program to exit with a non-zero code.
+ EXPECT_EQ(2, 3);
+
+ // When not in the throw-on-failure mode, the control will reach
+ // here.
+ return 0;
+}
diff --git a/Source/ThirdParty/gtest/test/gtest_uninitialized_test.py b/Source/ThirdParty/gtest/test/gtest_uninitialized_test.py
new file mode 100755
index 000000000..6ae57eeed
--- /dev/null
+++ b/Source/ThirdParty/gtest/test/gtest_uninitialized_test.py
@@ -0,0 +1,70 @@
+#!/usr/bin/env python
+#
+# Copyright 2008, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""Verifies that Google Test warns the user when not initialized properly."""
+
+__author__ = 'wan@google.com (Zhanyong Wan)'
+
+import gtest_test_utils
+
+
+COMMAND = gtest_test_utils.GetTestExecutablePath('gtest_uninitialized_test_')
+
+
+def Assert(condition):
+ if not condition:
+ raise AssertionError
+
+
+def AssertEq(expected, actual):
+ if expected != actual:
+ print 'Expected: %s' % (expected,)
+ print ' Actual: %s' % (actual,)
+ raise AssertionError
+
+
+def TestExitCodeAndOutput(command):
+ """Runs the given command and verifies its exit code and output."""
+
+ # Verifies that 'command' exits with code 1.
+ p = gtest_test_utils.Subprocess(command)
+ Assert(p.exited)
+ AssertEq(1, p.exit_code)
+ Assert('InitGoogleTest' in p.output)
+
+
+class GTestUninitializedTest(gtest_test_utils.TestCase):
+ def testExitCodeAndOutput(self):
+ TestExitCodeAndOutput(COMMAND)
+
+
+if __name__ == '__main__':
+ gtest_test_utils.Main()
diff --git a/Source/ThirdParty/gtest/test/gtest_uninitialized_test_.cc b/Source/ThirdParty/gtest/test/gtest_uninitialized_test_.cc
new file mode 100644
index 000000000..e8b2aa81d
--- /dev/null
+++ b/Source/ThirdParty/gtest/test/gtest_uninitialized_test_.cc
@@ -0,0 +1,43 @@
+// Copyright 2008, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: wan@google.com (Zhanyong Wan)
+
+#include <gtest/gtest.h>
+
+TEST(DummyTest, Dummy) {
+ // This test doesn't verify anything. We just need it to create a
+ // realistic stage for testing the behavior of Google Test when
+ // RUN_ALL_TESTS() is called without testing::InitGoogleTest() being
+ // called first.
+}
+
+int main() {
+ return RUN_ALL_TESTS();
+}
diff --git a/Source/ThirdParty/gtest/test/gtest_unittest.cc b/Source/ThirdParty/gtest/test/gtest_unittest.cc
new file mode 100644
index 000000000..a14f065a2
--- /dev/null
+++ b/Source/ThirdParty/gtest/test/gtest_unittest.cc
@@ -0,0 +1,6718 @@
+// Copyright 2005, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: wan@google.com (Zhanyong Wan)
+//
+// Tests for Google Test itself. This verifies that the basic constructs of
+// Google Test work.
+
+#include <gtest/gtest.h>
+#include <vector>
+
+// Verifies that the command line flag variables can be accessed
+// in code once <gtest/gtest.h> has been #included.
+// Do not move it after other #includes.
+TEST(CommandLineFlagsTest, CanBeAccessedInCodeOnceGTestHIsIncluded) {
+ bool dummy = testing::GTEST_FLAG(also_run_disabled_tests)
+ || testing::GTEST_FLAG(break_on_failure)
+ || testing::GTEST_FLAG(catch_exceptions)
+ || testing::GTEST_FLAG(color) != "unknown"
+ || testing::GTEST_FLAG(filter) != "unknown"
+ || testing::GTEST_FLAG(list_tests)
+ || testing::GTEST_FLAG(output) != "unknown"
+ || testing::GTEST_FLAG(print_time)
+ || testing::GTEST_FLAG(random_seed)
+ || testing::GTEST_FLAG(repeat) > 0
+ || testing::GTEST_FLAG(show_internal_stack_frames)
+ || testing::GTEST_FLAG(shuffle)
+ || testing::GTEST_FLAG(stack_trace_depth) > 0
+ || testing::GTEST_FLAG(throw_on_failure);
+ EXPECT_TRUE(dummy || !dummy); // Suppresses warning that dummy is unused.
+}
+
+#include <gtest/gtest-spi.h>
+
+// Indicates that this translation unit is part of Google Test's
+// implementation. It must come before gtest-internal-inl.h is
+// included, or there will be a compiler error. This trick is to
+// prevent a user from accidentally including gtest-internal-inl.h in
+// his code.
+#define GTEST_IMPLEMENTATION_ 1
+#include "src/gtest-internal-inl.h"
+#undef GTEST_IMPLEMENTATION_
+
+#include <limits.h> // For INT_MAX.
+#include <stdlib.h>
+#include <time.h>
+
+#include <map>
+
+namespace testing {
+namespace internal {
+
+// Provides access to otherwise private parts of the TestEventListeners class
+// that are needed to test it.
+class TestEventListenersAccessor {
+ public:
+ static TestEventListener* GetRepeater(TestEventListeners* listeners) {
+ return listeners->repeater();
+ }
+
+ static void SetDefaultResultPrinter(TestEventListeners* listeners,
+ TestEventListener* listener) {
+ listeners->SetDefaultResultPrinter(listener);
+ }
+ static void SetDefaultXmlGenerator(TestEventListeners* listeners,
+ TestEventListener* listener) {
+ listeners->SetDefaultXmlGenerator(listener);
+ }
+
+ static bool EventForwardingEnabled(const TestEventListeners& listeners) {
+ return listeners.EventForwardingEnabled();
+ }
+
+ static void SuppressEventForwarding(TestEventListeners* listeners) {
+ listeners->SuppressEventForwarding();
+ }
+};
+
+} // namespace internal
+} // namespace testing
+
+using testing::AssertionFailure;
+using testing::AssertionResult;
+using testing::AssertionSuccess;
+using testing::DoubleLE;
+using testing::EmptyTestEventListener;
+using testing::FloatLE;
+using testing::GTEST_FLAG(also_run_disabled_tests);
+using testing::GTEST_FLAG(break_on_failure);
+using testing::GTEST_FLAG(catch_exceptions);
+using testing::GTEST_FLAG(color);
+using testing::GTEST_FLAG(death_test_use_fork);
+using testing::GTEST_FLAG(filter);
+using testing::GTEST_FLAG(list_tests);
+using testing::GTEST_FLAG(output);
+using testing::GTEST_FLAG(print_time);
+using testing::GTEST_FLAG(random_seed);
+using testing::GTEST_FLAG(repeat);
+using testing::GTEST_FLAG(show_internal_stack_frames);
+using testing::GTEST_FLAG(shuffle);
+using testing::GTEST_FLAG(stack_trace_depth);
+using testing::GTEST_FLAG(throw_on_failure);
+using testing::IsNotSubstring;
+using testing::IsSubstring;
+using testing::Message;
+using testing::ScopedFakeTestPartResultReporter;
+using testing::StaticAssertTypeEq;
+using testing::Test;
+using testing::TestEventListeners;
+using testing::TestCase;
+using testing::TestPartResult;
+using testing::TestPartResultArray;
+using testing::TestProperty;
+using testing::TestResult;
+using testing::UnitTest;
+using testing::kMaxStackTraceDepth;
+using testing::internal::AlwaysFalse;
+using testing::internal::AlwaysTrue;
+using testing::internal::AppendUserMessage;
+using testing::internal::CodePointToUtf8;
+using testing::internal::CountIf;
+using testing::internal::EqFailure;
+using testing::internal::FloatingPoint;
+using testing::internal::FormatTimeInMillisAsSeconds;
+using testing::internal::ForEach;
+using testing::internal::GTestFlagSaver;
+using testing::internal::GetCurrentOsStackTraceExceptTop;
+using testing::internal::GetElementOr;
+using testing::internal::GetNextRandomSeed;
+using testing::internal::GetRandomSeedFromFlag;
+using testing::internal::GetTestTypeId;
+using testing::internal::GetTypeId;
+using testing::internal::GetUnitTestImpl;
+using testing::internal::Int32;
+using testing::internal::Int32FromEnvOrDie;
+using testing::internal::ParseInt32Flag;
+using testing::internal::ShouldRunTestOnShard;
+using testing::internal::ShouldShard;
+using testing::internal::ShouldUseColor;
+using testing::internal::Shuffle;
+using testing::internal::ShuffleRange;
+using testing::internal::StreamableToString;
+using testing::internal::String;
+using testing::internal::TestEventListenersAccessor;
+using testing::internal::TestResultAccessor;
+using testing::internal::UInt32;
+using testing::internal::WideStringToUtf8;
+using testing::internal::kMaxRandomSeed;
+using testing::internal::kTestTypeIdInGoogleTest;
+using testing::internal::scoped_ptr;
+
+#if GTEST_HAS_STREAM_REDIRECTION_
+using testing::internal::CaptureStdout;
+using testing::internal::GetCapturedStdout;
+#endif // GTEST_HAS_STREAM_REDIRECTION_
+
+#if GTEST_IS_THREADSAFE
+using testing::internal::ThreadWithParam;
+#endif
+
+class TestingVector : public std::vector<int> {
+};
+
+::std::ostream& operator<<(::std::ostream& os,
+ const TestingVector& vector) {
+ os << "{ ";
+ for (size_t i = 0; i < vector.size(); i++) {
+ os << vector[i] << " ";
+ }
+ os << "}";
+ return os;
+}
+
+// This line tests that we can define tests in an unnamed namespace.
+namespace {
+
+TEST(GetRandomSeedFromFlagTest, HandlesZero) {
+ const int seed = GetRandomSeedFromFlag(0);
+ EXPECT_LE(1, seed);
+ EXPECT_LE(seed, static_cast<int>(kMaxRandomSeed));
+}
+
+TEST(GetRandomSeedFromFlagTest, PreservesValidSeed) {
+ EXPECT_EQ(1, GetRandomSeedFromFlag(1));
+ EXPECT_EQ(2, GetRandomSeedFromFlag(2));
+ EXPECT_EQ(kMaxRandomSeed - 1, GetRandomSeedFromFlag(kMaxRandomSeed - 1));
+ EXPECT_EQ(static_cast<int>(kMaxRandomSeed),
+ GetRandomSeedFromFlag(kMaxRandomSeed));
+}
+
+TEST(GetRandomSeedFromFlagTest, NormalizesInvalidSeed) {
+ const int seed1 = GetRandomSeedFromFlag(-1);
+ EXPECT_LE(1, seed1);
+ EXPECT_LE(seed1, static_cast<int>(kMaxRandomSeed));
+
+ const int seed2 = GetRandomSeedFromFlag(kMaxRandomSeed + 1);
+ EXPECT_LE(1, seed2);
+ EXPECT_LE(seed2, static_cast<int>(kMaxRandomSeed));
+}
+
+TEST(GetNextRandomSeedTest, WorksForValidInput) {
+ EXPECT_EQ(2, GetNextRandomSeed(1));
+ EXPECT_EQ(3, GetNextRandomSeed(2));
+ EXPECT_EQ(static_cast<int>(kMaxRandomSeed),
+ GetNextRandomSeed(kMaxRandomSeed - 1));
+ EXPECT_EQ(1, GetNextRandomSeed(kMaxRandomSeed));
+
+ // We deliberately don't test GetNextRandomSeed() with invalid
+ // inputs, as that requires death tests, which are expensive. This
+ // is fine as GetNextRandomSeed() is internal and has a
+ // straightforward definition.
+}
+
+static void ClearCurrentTestPartResults() {
+ TestResultAccessor::ClearTestPartResults(
+ GetUnitTestImpl()->current_test_result());
+}
+
+// Tests GetTypeId.
+
+TEST(GetTypeIdTest, ReturnsSameValueForSameType) {
+ EXPECT_EQ(GetTypeId<int>(), GetTypeId<int>());
+ EXPECT_EQ(GetTypeId<Test>(), GetTypeId<Test>());
+}
+
+class SubClassOfTest : public Test {};
+class AnotherSubClassOfTest : public Test {};
+
+TEST(GetTypeIdTest, ReturnsDifferentValuesForDifferentTypes) {
+ EXPECT_NE(GetTypeId<int>(), GetTypeId<const int>());
+ EXPECT_NE(GetTypeId<int>(), GetTypeId<char>());
+ EXPECT_NE(GetTypeId<int>(), GetTestTypeId());
+ EXPECT_NE(GetTypeId<SubClassOfTest>(), GetTestTypeId());
+ EXPECT_NE(GetTypeId<AnotherSubClassOfTest>(), GetTestTypeId());
+ EXPECT_NE(GetTypeId<AnotherSubClassOfTest>(), GetTypeId<SubClassOfTest>());
+}
+
+// Verifies that GetTestTypeId() returns the same value, no matter it
+// is called from inside Google Test or outside of it.
+TEST(GetTestTypeIdTest, ReturnsTheSameValueInsideOrOutsideOfGoogleTest) {
+ EXPECT_EQ(kTestTypeIdInGoogleTest, GetTestTypeId());
+}
+
+// Tests FormatTimeInMillisAsSeconds().
+
+TEST(FormatTimeInMillisAsSecondsTest, FormatsZero) {
+ EXPECT_EQ("0", FormatTimeInMillisAsSeconds(0));
+}
+
+TEST(FormatTimeInMillisAsSecondsTest, FormatsPositiveNumber) {
+ EXPECT_EQ("0.003", FormatTimeInMillisAsSeconds(3));
+ EXPECT_EQ("0.01", FormatTimeInMillisAsSeconds(10));
+ EXPECT_EQ("0.2", FormatTimeInMillisAsSeconds(200));
+ EXPECT_EQ("1.2", FormatTimeInMillisAsSeconds(1200));
+ EXPECT_EQ("3", FormatTimeInMillisAsSeconds(3000));
+}
+
+TEST(FormatTimeInMillisAsSecondsTest, FormatsNegativeNumber) {
+ EXPECT_EQ("-0.003", FormatTimeInMillisAsSeconds(-3));
+ EXPECT_EQ("-0.01", FormatTimeInMillisAsSeconds(-10));
+ EXPECT_EQ("-0.2", FormatTimeInMillisAsSeconds(-200));
+ EXPECT_EQ("-1.2", FormatTimeInMillisAsSeconds(-1200));
+ EXPECT_EQ("-3", FormatTimeInMillisAsSeconds(-3000));
+}
+
+#if GTEST_CAN_COMPARE_NULL
+
+#ifdef __BORLANDC__
+// Silences warnings: "Condition is always true", "Unreachable code"
+#pragma option push -w-ccc -w-rch
+#endif
+
+// Tests that GTEST_IS_NULL_LITERAL_(x) is true when x is a null
+// pointer literal.
+TEST(NullLiteralTest, IsTrueForNullLiterals) {
+ EXPECT_TRUE(GTEST_IS_NULL_LITERAL_(NULL));
+ EXPECT_TRUE(GTEST_IS_NULL_LITERAL_(0));
+ EXPECT_TRUE(GTEST_IS_NULL_LITERAL_(0U));
+ EXPECT_TRUE(GTEST_IS_NULL_LITERAL_(0L));
+ EXPECT_TRUE(GTEST_IS_NULL_LITERAL_(false));
+#ifndef __BORLANDC__
+ // Some compilers may fail to detect some null pointer literals;
+ // as long as users of the framework don't use such literals, this
+ // is harmless.
+ EXPECT_TRUE(GTEST_IS_NULL_LITERAL_(1 - 1));
+ EXPECT_TRUE(GTEST_IS_NULL_LITERAL_(true && false));
+#endif
+}
+
+// Tests that GTEST_IS_NULL_LITERAL_(x) is false when x is not a null
+// pointer literal.
+TEST(NullLiteralTest, IsFalseForNonNullLiterals) {
+ EXPECT_FALSE(GTEST_IS_NULL_LITERAL_(1));
+ EXPECT_FALSE(GTEST_IS_NULL_LITERAL_(0.0));
+ EXPECT_FALSE(GTEST_IS_NULL_LITERAL_('a'));
+ EXPECT_FALSE(GTEST_IS_NULL_LITERAL_(static_cast<void*>(NULL)));
+}
+
+#ifdef __BORLANDC__
+// Restores warnings after previous "#pragma option push" suppressed them.
+#pragma option pop
+#endif
+
+#endif // GTEST_CAN_COMPARE_NULL
+//
+// Tests CodePointToUtf8().
+
+// Tests that the NUL character L'\0' is encoded correctly.
+TEST(CodePointToUtf8Test, CanEncodeNul) {
+ char buffer[32];
+ EXPECT_STREQ("", CodePointToUtf8(L'\0', buffer));
+}
+
+// Tests that ASCII characters are encoded correctly.
+TEST(CodePointToUtf8Test, CanEncodeAscii) {
+ char buffer[32];
+ EXPECT_STREQ("a", CodePointToUtf8(L'a', buffer));
+ EXPECT_STREQ("Z", CodePointToUtf8(L'Z', buffer));
+ EXPECT_STREQ("&", CodePointToUtf8(L'&', buffer));
+ EXPECT_STREQ("\x7F", CodePointToUtf8(L'\x7F', buffer));
+}
+
+// Tests that Unicode code-points that have 8 to 11 bits are encoded
+// as 110xxxxx 10xxxxxx.
+TEST(CodePointToUtf8Test, CanEncode8To11Bits) {
+ char buffer[32];
+ // 000 1101 0011 => 110-00011 10-010011
+ EXPECT_STREQ("\xC3\x93", CodePointToUtf8(L'\xD3', buffer));
+
+ // 101 0111 0110 => 110-10101 10-110110
+ EXPECT_STREQ("\xD5\xB6", CodePointToUtf8(L'\x576', buffer));
+}
+
+// Tests that Unicode code-points that have 12 to 16 bits are encoded
+// as 1110xxxx 10xxxxxx 10xxxxxx.
+TEST(CodePointToUtf8Test, CanEncode12To16Bits) {
+ char buffer[32];
+ // 0000 1000 1101 0011 => 1110-0000 10-100011 10-010011
+ EXPECT_STREQ("\xE0\xA3\x93", CodePointToUtf8(L'\x8D3', buffer));
+
+ // 1100 0111 0100 1101 => 1110-1100 10-011101 10-001101
+ EXPECT_STREQ("\xEC\x9D\x8D", CodePointToUtf8(L'\xC74D', buffer));
+}
+
+#if !GTEST_WIDE_STRING_USES_UTF16_
+// Tests in this group require a wchar_t to hold > 16 bits, and thus
+// are skipped on Windows, Cygwin, and Symbian, where a wchar_t is
+// 16-bit wide. This code may not compile on those systems.
+
+// Tests that Unicode code-points that have 17 to 21 bits are encoded
+// as 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx.
+TEST(CodePointToUtf8Test, CanEncode17To21Bits) {
+ char buffer[32];
+ // 0 0001 0000 1000 1101 0011 => 11110-000 10-010000 10-100011 10-010011
+ EXPECT_STREQ("\xF0\x90\xA3\x93", CodePointToUtf8(L'\x108D3', buffer));
+
+ // 0 0001 0000 0100 0000 0000 => 11110-000 10-010000 10-010000 10-000000
+ EXPECT_STREQ("\xF0\x90\x90\x80", CodePointToUtf8(L'\x10400', buffer));
+
+ // 1 0000 1000 0110 0011 0100 => 11110-100 10-001000 10-011000 10-110100
+ EXPECT_STREQ("\xF4\x88\x98\xB4", CodePointToUtf8(L'\x108634', buffer));
+}
+
+// Tests that encoding an invalid code-point generates the expected result.
+TEST(CodePointToUtf8Test, CanEncodeInvalidCodePoint) {
+ char buffer[32];
+ EXPECT_STREQ("(Invalid Unicode 0x1234ABCD)",
+ CodePointToUtf8(L'\x1234ABCD', buffer));
+}
+
+#endif // !GTEST_WIDE_STRING_USES_UTF16_
+
+// Tests WideStringToUtf8().
+
+// Tests that the NUL character L'\0' is encoded correctly.
+TEST(WideStringToUtf8Test, CanEncodeNul) {
+ EXPECT_STREQ("", WideStringToUtf8(L"", 0).c_str());
+ EXPECT_STREQ("", WideStringToUtf8(L"", -1).c_str());
+}
+
+// Tests that ASCII strings are encoded correctly.
+TEST(WideStringToUtf8Test, CanEncodeAscii) {
+ EXPECT_STREQ("a", WideStringToUtf8(L"a", 1).c_str());
+ EXPECT_STREQ("ab", WideStringToUtf8(L"ab", 2).c_str());
+ EXPECT_STREQ("a", WideStringToUtf8(L"a", -1).c_str());
+ EXPECT_STREQ("ab", WideStringToUtf8(L"ab", -1).c_str());
+}
+
+// Tests that Unicode code-points that have 8 to 11 bits are encoded
+// as 110xxxxx 10xxxxxx.
+TEST(WideStringToUtf8Test, CanEncode8To11Bits) {
+ // 000 1101 0011 => 110-00011 10-010011
+ EXPECT_STREQ("\xC3\x93", WideStringToUtf8(L"\xD3", 1).c_str());
+ EXPECT_STREQ("\xC3\x93", WideStringToUtf8(L"\xD3", -1).c_str());
+
+ // 101 0111 0110 => 110-10101 10-110110
+ EXPECT_STREQ("\xD5\xB6", WideStringToUtf8(L"\x576", 1).c_str());
+ EXPECT_STREQ("\xD5\xB6", WideStringToUtf8(L"\x576", -1).c_str());
+}
+
+// Tests that Unicode code-points that have 12 to 16 bits are encoded
+// as 1110xxxx 10xxxxxx 10xxxxxx.
+TEST(WideStringToUtf8Test, CanEncode12To16Bits) {
+ // 0000 1000 1101 0011 => 1110-0000 10-100011 10-010011
+ EXPECT_STREQ("\xE0\xA3\x93", WideStringToUtf8(L"\x8D3", 1).c_str());
+ EXPECT_STREQ("\xE0\xA3\x93", WideStringToUtf8(L"\x8D3", -1).c_str());
+
+ // 1100 0111 0100 1101 => 1110-1100 10-011101 10-001101
+ EXPECT_STREQ("\xEC\x9D\x8D", WideStringToUtf8(L"\xC74D", 1).c_str());
+ EXPECT_STREQ("\xEC\x9D\x8D", WideStringToUtf8(L"\xC74D", -1).c_str());
+}
+
+// Tests that the conversion stops when the function encounters \0 character.
+TEST(WideStringToUtf8Test, StopsOnNulCharacter) {
+ EXPECT_STREQ("ABC", WideStringToUtf8(L"ABC\0XYZ", 100).c_str());
+}
+
+// Tests that the conversion stops when the function reaches the limit
+// specified by the 'length' parameter.
+TEST(WideStringToUtf8Test, StopsWhenLengthLimitReached) {
+ EXPECT_STREQ("ABC", WideStringToUtf8(L"ABCDEF", 3).c_str());
+}
+
+
+#if !GTEST_WIDE_STRING_USES_UTF16_
+// Tests that Unicode code-points that have 17 to 21 bits are encoded
+// as 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx. This code may not compile
+// on the systems using UTF-16 encoding.
+TEST(WideStringToUtf8Test, CanEncode17To21Bits) {
+ // 0 0001 0000 1000 1101 0011 => 11110-000 10-010000 10-100011 10-010011
+ EXPECT_STREQ("\xF0\x90\xA3\x93", WideStringToUtf8(L"\x108D3", 1).c_str());
+ EXPECT_STREQ("\xF0\x90\xA3\x93", WideStringToUtf8(L"\x108D3", -1).c_str());
+
+ // 1 0000 1000 0110 0011 0100 => 11110-100 10-001000 10-011000 10-110100
+ EXPECT_STREQ("\xF4\x88\x98\xB4", WideStringToUtf8(L"\x108634", 1).c_str());
+ EXPECT_STREQ("\xF4\x88\x98\xB4", WideStringToUtf8(L"\x108634", -1).c_str());
+}
+
+// Tests that encoding an invalid code-point generates the expected result.
+TEST(WideStringToUtf8Test, CanEncodeInvalidCodePoint) {
+ EXPECT_STREQ("(Invalid Unicode 0xABCDFF)",
+ WideStringToUtf8(L"\xABCDFF", -1).c_str());
+}
+#else // !GTEST_WIDE_STRING_USES_UTF16_
+// Tests that surrogate pairs are encoded correctly on the systems using
+// UTF-16 encoding in the wide strings.
+TEST(WideStringToUtf8Test, CanEncodeValidUtf16SUrrogatePairs) {
+ EXPECT_STREQ("\xF0\x90\x90\x80",
+ WideStringToUtf8(L"\xD801\xDC00", -1).c_str());
+}
+
+// Tests that encoding an invalid UTF-16 surrogate pair
+// generates the expected result.
+TEST(WideStringToUtf8Test, CanEncodeInvalidUtf16SurrogatePair) {
+ // Leading surrogate is at the end of the string.
+ EXPECT_STREQ("\xED\xA0\x80", WideStringToUtf8(L"\xD800", -1).c_str());
+ // Leading surrogate is not followed by the trailing surrogate.
+ EXPECT_STREQ("\xED\xA0\x80$", WideStringToUtf8(L"\xD800$", -1).c_str());
+ // Trailing surrogate appearas without a leading surrogate.
+ EXPECT_STREQ("\xED\xB0\x80PQR", WideStringToUtf8(L"\xDC00PQR", -1).c_str());
+}
+#endif // !GTEST_WIDE_STRING_USES_UTF16_
+
+// Tests that codepoint concatenation works correctly.
+#if !GTEST_WIDE_STRING_USES_UTF16_
+TEST(WideStringToUtf8Test, ConcatenatesCodepointsCorrectly) {
+ EXPECT_STREQ(
+ "\xF4\x88\x98\xB4"
+ "\xEC\x9D\x8D"
+ "\n"
+ "\xD5\xB6"
+ "\xE0\xA3\x93"
+ "\xF4\x88\x98\xB4",
+ WideStringToUtf8(L"\x108634\xC74D\n\x576\x8D3\x108634", -1).c_str());
+}
+#else
+TEST(WideStringToUtf8Test, ConcatenatesCodepointsCorrectly) {
+ EXPECT_STREQ(
+ "\xEC\x9D\x8D" "\n" "\xD5\xB6" "\xE0\xA3\x93",
+ WideStringToUtf8(L"\xC74D\n\x576\x8D3", -1).c_str());
+}
+#endif // !GTEST_WIDE_STRING_USES_UTF16_
+
+// Tests the Random class.
+
+TEST(RandomDeathTest, GeneratesCrashesOnInvalidRange) {
+ testing::internal::Random random(42);
+ EXPECT_DEATH_IF_SUPPORTED(
+ random.Generate(0),
+ "Cannot generate a number in the range \\[0, 0\\)");
+ EXPECT_DEATH_IF_SUPPORTED(
+ random.Generate(testing::internal::Random::kMaxRange + 1),
+ "Generation of a number in \\[0, 2147483649\\) was requested, "
+ "but this can only generate numbers in \\[0, 2147483648\\)");
+}
+
+TEST(RandomTest, GeneratesNumbersWithinRange) {
+ const UInt32 kRange = 10000;
+ testing::internal::Random random(12345);
+ for (int i = 0; i < 10; i++) {
+ EXPECT_LT(random.Generate(kRange), kRange) << " for iteration " << i;
+ }
+
+ testing::internal::Random random2(testing::internal::Random::kMaxRange);
+ for (int i = 0; i < 10; i++) {
+ EXPECT_LT(random2.Generate(kRange), kRange) << " for iteration " << i;
+ }
+}
+
+TEST(RandomTest, RepeatsWhenReseeded) {
+ const int kSeed = 123;
+ const int kArraySize = 10;
+ const UInt32 kRange = 10000;
+ UInt32 values[kArraySize];
+
+ testing::internal::Random random(kSeed);
+ for (int i = 0; i < kArraySize; i++) {
+ values[i] = random.Generate(kRange);
+ }
+
+ random.Reseed(kSeed);
+ for (int i = 0; i < kArraySize; i++) {
+ EXPECT_EQ(values[i], random.Generate(kRange)) << " for iteration " << i;
+ }
+}
+
+// Tests STL container utilities.
+
+// Tests CountIf().
+
+static bool IsPositive(int n) { return n > 0; }
+
+TEST(ContainerUtilityTest, CountIf) {
+ std::vector<int> v;
+ EXPECT_EQ(0, CountIf(v, IsPositive)); // Works for an empty container.
+
+ v.push_back(-1);
+ v.push_back(0);
+ EXPECT_EQ(0, CountIf(v, IsPositive)); // Works when no value satisfies.
+
+ v.push_back(2);
+ v.push_back(-10);
+ v.push_back(10);
+ EXPECT_EQ(2, CountIf(v, IsPositive));
+}
+
+// Tests ForEach().
+
+static int g_sum = 0;
+static void Accumulate(int n) { g_sum += n; }
+
+TEST(ContainerUtilityTest, ForEach) {
+ std::vector<int> v;
+ g_sum = 0;
+ ForEach(v, Accumulate);
+ EXPECT_EQ(0, g_sum); // Works for an empty container;
+
+ g_sum = 0;
+ v.push_back(1);
+ ForEach(v, Accumulate);
+ EXPECT_EQ(1, g_sum); // Works for a container with one element.
+
+ g_sum = 0;
+ v.push_back(20);
+ v.push_back(300);
+ ForEach(v, Accumulate);
+ EXPECT_EQ(321, g_sum);
+}
+
+// Tests GetElementOr().
+TEST(ContainerUtilityTest, GetElementOr) {
+ std::vector<char> a;
+ EXPECT_EQ('x', GetElementOr(a, 0, 'x'));
+
+ a.push_back('a');
+ a.push_back('b');
+ EXPECT_EQ('a', GetElementOr(a, 0, 'x'));
+ EXPECT_EQ('b', GetElementOr(a, 1, 'x'));
+ EXPECT_EQ('x', GetElementOr(a, -2, 'x'));
+ EXPECT_EQ('x', GetElementOr(a, 2, 'x'));
+}
+
+TEST(ContainerUtilityDeathTest, ShuffleRange) {
+ std::vector<int> a;
+ a.push_back(0);
+ a.push_back(1);
+ a.push_back(2);
+ testing::internal::Random random(1);
+
+ EXPECT_DEATH_IF_SUPPORTED(
+ ShuffleRange(&random, -1, 1, &a),
+ "Invalid shuffle range start -1: must be in range \\[0, 3\\]");
+ EXPECT_DEATH_IF_SUPPORTED(
+ ShuffleRange(&random, 4, 4, &a),
+ "Invalid shuffle range start 4: must be in range \\[0, 3\\]");
+ EXPECT_DEATH_IF_SUPPORTED(
+ ShuffleRange(&random, 3, 2, &a),
+ "Invalid shuffle range finish 2: must be in range \\[3, 3\\]");
+ EXPECT_DEATH_IF_SUPPORTED(
+ ShuffleRange(&random, 3, 4, &a),
+ "Invalid shuffle range finish 4: must be in range \\[3, 3\\]");
+}
+
+class VectorShuffleTest : public Test {
+ protected:
+ static const int kVectorSize = 20;
+
+ VectorShuffleTest() : random_(1) {
+ for (int i = 0; i < kVectorSize; i++) {
+ vector_.push_back(i);
+ }
+ }
+
+ static bool VectorIsCorrupt(const TestingVector& vector) {
+ if (kVectorSize != static_cast<int>(vector.size())) {
+ return true;
+ }
+
+ bool found_in_vector[kVectorSize] = { false };
+ for (size_t i = 0; i < vector.size(); i++) {
+ const int e = vector[i];
+ if (e < 0 || e >= kVectorSize || found_in_vector[e]) {
+ return true;
+ }
+ found_in_vector[e] = true;
+ }
+
+ // Vector size is correct, elements' range is correct, no
+ // duplicate elements. Therefore no corruption has occurred.
+ return false;
+ }
+
+ static bool VectorIsNotCorrupt(const TestingVector& vector) {
+ return !VectorIsCorrupt(vector);
+ }
+
+ static bool RangeIsShuffled(const TestingVector& vector, int begin, int end) {
+ for (int i = begin; i < end; i++) {
+ if (i != vector[i]) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ static bool RangeIsUnshuffled(
+ const TestingVector& vector, int begin, int end) {
+ return !RangeIsShuffled(vector, begin, end);
+ }
+
+ static bool VectorIsShuffled(const TestingVector& vector) {
+ return RangeIsShuffled(vector, 0, static_cast<int>(vector.size()));
+ }
+
+ static bool VectorIsUnshuffled(const TestingVector& vector) {
+ return !VectorIsShuffled(vector);
+ }
+
+ testing::internal::Random random_;
+ TestingVector vector_;
+}; // class VectorShuffleTest
+
+const int VectorShuffleTest::kVectorSize;
+
+TEST_F(VectorShuffleTest, HandlesEmptyRange) {
+ // Tests an empty range at the beginning...
+ ShuffleRange(&random_, 0, 0, &vector_);
+ ASSERT_PRED1(VectorIsNotCorrupt, vector_);
+ ASSERT_PRED1(VectorIsUnshuffled, vector_);
+
+ // ...in the middle...
+ ShuffleRange(&random_, kVectorSize/2, kVectorSize/2, &vector_);
+ ASSERT_PRED1(VectorIsNotCorrupt, vector_);
+ ASSERT_PRED1(VectorIsUnshuffled, vector_);
+
+ // ...at the end...
+ ShuffleRange(&random_, kVectorSize - 1, kVectorSize - 1, &vector_);
+ ASSERT_PRED1(VectorIsNotCorrupt, vector_);
+ ASSERT_PRED1(VectorIsUnshuffled, vector_);
+
+ // ...and past the end.
+ ShuffleRange(&random_, kVectorSize, kVectorSize, &vector_);
+ ASSERT_PRED1(VectorIsNotCorrupt, vector_);
+ ASSERT_PRED1(VectorIsUnshuffled, vector_);
+}
+
+TEST_F(VectorShuffleTest, HandlesRangeOfSizeOne) {
+ // Tests a size one range at the beginning...
+ ShuffleRange(&random_, 0, 1, &vector_);
+ ASSERT_PRED1(VectorIsNotCorrupt, vector_);
+ ASSERT_PRED1(VectorIsUnshuffled, vector_);
+
+ // ...in the middle...
+ ShuffleRange(&random_, kVectorSize/2, kVectorSize/2 + 1, &vector_);
+ ASSERT_PRED1(VectorIsNotCorrupt, vector_);
+ ASSERT_PRED1(VectorIsUnshuffled, vector_);
+
+ // ...and at the end.
+ ShuffleRange(&random_, kVectorSize - 1, kVectorSize, &vector_);
+ ASSERT_PRED1(VectorIsNotCorrupt, vector_);
+ ASSERT_PRED1(VectorIsUnshuffled, vector_);
+}
+
+// Because we use our own random number generator and a fixed seed,
+// we can guarantee that the following "random" tests will succeed.
+
+TEST_F(VectorShuffleTest, ShufflesEntireVector) {
+ Shuffle(&random_, &vector_);
+ ASSERT_PRED1(VectorIsNotCorrupt, vector_);
+ EXPECT_FALSE(VectorIsUnshuffled(vector_)) << vector_;
+
+ // Tests the first and last elements in particular to ensure that
+ // there are no off-by-one problems in our shuffle algorithm.
+ EXPECT_NE(0, vector_[0]);
+ EXPECT_NE(kVectorSize - 1, vector_[kVectorSize - 1]);
+}
+
+TEST_F(VectorShuffleTest, ShufflesStartOfVector) {
+ const int kRangeSize = kVectorSize/2;
+
+ ShuffleRange(&random_, 0, kRangeSize, &vector_);
+
+ ASSERT_PRED1(VectorIsNotCorrupt, vector_);
+ EXPECT_PRED3(RangeIsShuffled, vector_, 0, kRangeSize);
+ EXPECT_PRED3(RangeIsUnshuffled, vector_, kRangeSize, kVectorSize);
+}
+
+TEST_F(VectorShuffleTest, ShufflesEndOfVector) {
+ const int kRangeSize = kVectorSize / 2;
+ ShuffleRange(&random_, kRangeSize, kVectorSize, &vector_);
+
+ ASSERT_PRED1(VectorIsNotCorrupt, vector_);
+ EXPECT_PRED3(RangeIsUnshuffled, vector_, 0, kRangeSize);
+ EXPECT_PRED3(RangeIsShuffled, vector_, kRangeSize, kVectorSize);
+}
+
+TEST_F(VectorShuffleTest, ShufflesMiddleOfVector) {
+ int kRangeSize = kVectorSize/3;
+ ShuffleRange(&random_, kRangeSize, 2*kRangeSize, &vector_);
+
+ ASSERT_PRED1(VectorIsNotCorrupt, vector_);
+ EXPECT_PRED3(RangeIsUnshuffled, vector_, 0, kRangeSize);
+ EXPECT_PRED3(RangeIsShuffled, vector_, kRangeSize, 2*kRangeSize);
+ EXPECT_PRED3(RangeIsUnshuffled, vector_, 2*kRangeSize, kVectorSize);
+}
+
+TEST_F(VectorShuffleTest, ShufflesRepeatably) {
+ TestingVector vector2;
+ for (int i = 0; i < kVectorSize; i++) {
+ vector2.push_back(i);
+ }
+
+ random_.Reseed(1234);
+ Shuffle(&random_, &vector_);
+ random_.Reseed(1234);
+ Shuffle(&random_, &vector2);
+
+ ASSERT_PRED1(VectorIsNotCorrupt, vector_);
+ ASSERT_PRED1(VectorIsNotCorrupt, vector2);
+
+ for (int i = 0; i < kVectorSize; i++) {
+ EXPECT_EQ(vector_[i], vector2[i]) << " where i is " << i;
+ }
+}
+
+// Tests the size of the AssertHelper class.
+
+TEST(AssertHelperTest, AssertHelperIsSmall) {
+ // To avoid breaking clients that use lots of assertions in one
+ // function, we cannot grow the size of AssertHelper.
+ EXPECT_LE(sizeof(testing::internal::AssertHelper), sizeof(void*));
+}
+
+// Tests the String class.
+
+// Tests String's constructors.
+TEST(StringTest, Constructors) {
+ // Default ctor.
+ String s1;
+ // We aren't using EXPECT_EQ(NULL, s1.c_str()) because comparing
+ // pointers with NULL isn't supported on all platforms.
+ EXPECT_EQ(0U, s1.length());
+ EXPECT_TRUE(NULL == s1.c_str());
+
+ // Implicitly constructs from a C-string.
+ String s2 = "Hi";
+ EXPECT_EQ(2U, s2.length());
+ EXPECT_STREQ("Hi", s2.c_str());
+
+ // Constructs from a C-string and a length.
+ String s3("hello", 3);
+ EXPECT_EQ(3U, s3.length());
+ EXPECT_STREQ("hel", s3.c_str());
+
+ // The empty String should be created when String is constructed with
+ // a NULL pointer and length 0.
+ EXPECT_EQ(0U, String(NULL, 0).length());
+ EXPECT_FALSE(String(NULL, 0).c_str() == NULL);
+
+ // Constructs a String that contains '\0'.
+ String s4("a\0bcd", 4);
+ EXPECT_EQ(4U, s4.length());
+ EXPECT_EQ('a', s4.c_str()[0]);
+ EXPECT_EQ('\0', s4.c_str()[1]);
+ EXPECT_EQ('b', s4.c_str()[2]);
+ EXPECT_EQ('c', s4.c_str()[3]);
+
+ // Copy ctor where the source is NULL.
+ const String null_str;
+ String s5 = null_str;
+ EXPECT_TRUE(s5.c_str() == NULL);
+
+ // Copy ctor where the source isn't NULL.
+ String s6 = s3;
+ EXPECT_EQ(3U, s6.length());
+ EXPECT_STREQ("hel", s6.c_str());
+
+ // Copy ctor where the source contains '\0'.
+ String s7 = s4;
+ EXPECT_EQ(4U, s7.length());
+ EXPECT_EQ('a', s7.c_str()[0]);
+ EXPECT_EQ('\0', s7.c_str()[1]);
+ EXPECT_EQ('b', s7.c_str()[2]);
+ EXPECT_EQ('c', s7.c_str()[3]);
+}
+
+TEST(StringTest, ConvertsFromStdString) {
+ // An empty std::string.
+ const std::string src1("");
+ const String dest1 = src1;
+ EXPECT_EQ(0U, dest1.length());
+ EXPECT_STREQ("", dest1.c_str());
+
+ // A normal std::string.
+ const std::string src2("Hi");
+ const String dest2 = src2;
+ EXPECT_EQ(2U, dest2.length());
+ EXPECT_STREQ("Hi", dest2.c_str());
+
+ // An std::string with an embedded NUL character.
+ const char src3[] = "a\0b";
+ const String dest3 = std::string(src3, sizeof(src3));
+ EXPECT_EQ(sizeof(src3), dest3.length());
+ EXPECT_EQ('a', dest3.c_str()[0]);
+ EXPECT_EQ('\0', dest3.c_str()[1]);
+ EXPECT_EQ('b', dest3.c_str()[2]);
+}
+
+TEST(StringTest, ConvertsToStdString) {
+ // An empty String.
+ const String src1("");
+ const std::string dest1 = src1;
+ EXPECT_EQ("", dest1);
+
+ // A normal String.
+ const String src2("Hi");
+ const std::string dest2 = src2;
+ EXPECT_EQ("Hi", dest2);
+
+ // A String containing a '\0'.
+ const String src3("x\0y", 3);
+ const std::string dest3 = src3;
+ EXPECT_EQ(std::string("x\0y", 3), dest3);
+}
+
+#if GTEST_HAS_GLOBAL_STRING
+
+TEST(StringTest, ConvertsFromGlobalString) {
+ // An empty ::string.
+ const ::string src1("");
+ const String dest1 = src1;
+ EXPECT_EQ(0U, dest1.length());
+ EXPECT_STREQ("", dest1.c_str());
+
+ // A normal ::string.
+ const ::string src2("Hi");
+ const String dest2 = src2;
+ EXPECT_EQ(2U, dest2.length());
+ EXPECT_STREQ("Hi", dest2.c_str());
+
+ // An ::string with an embedded NUL character.
+ const char src3[] = "x\0y";
+ const String dest3 = ::string(src3, sizeof(src3));
+ EXPECT_EQ(sizeof(src3), dest3.length());
+ EXPECT_EQ('x', dest3.c_str()[0]);
+ EXPECT_EQ('\0', dest3.c_str()[1]);
+ EXPECT_EQ('y', dest3.c_str()[2]);
+}
+
+TEST(StringTest, ConvertsToGlobalString) {
+ // An empty String.
+ const String src1("");
+ const ::string dest1 = src1;
+ EXPECT_EQ("", dest1);
+
+ // A normal String.
+ const String src2("Hi");
+ const ::string dest2 = src2;
+ EXPECT_EQ("Hi", dest2);
+
+ const String src3("x\0y", 3);
+ const ::string dest3 = src3;
+ EXPECT_EQ(::string("x\0y", 3), dest3);
+}
+
+#endif // GTEST_HAS_GLOBAL_STRING
+
+// Tests String::ShowCStringQuoted().
+TEST(StringTest, ShowCStringQuoted) {
+ EXPECT_STREQ("(null)",
+ String::ShowCStringQuoted(NULL).c_str());
+ EXPECT_STREQ("\"\"",
+ String::ShowCStringQuoted("").c_str());
+ EXPECT_STREQ("\"foo\"",
+ String::ShowCStringQuoted("foo").c_str());
+}
+
+// Tests String::empty().
+TEST(StringTest, Empty) {
+ EXPECT_TRUE(String("").empty());
+ EXPECT_FALSE(String().empty());
+ EXPECT_FALSE(String(NULL).empty());
+ EXPECT_FALSE(String("a").empty());
+ EXPECT_FALSE(String("\0", 1).empty());
+}
+
+// Tests String::Compare().
+TEST(StringTest, Compare) {
+ // NULL vs NULL.
+ EXPECT_EQ(0, String().Compare(String()));
+
+ // NULL vs non-NULL.
+ EXPECT_EQ(-1, String().Compare(String("")));
+
+ // Non-NULL vs NULL.
+ EXPECT_EQ(1, String("").Compare(String()));
+
+ // The following covers non-NULL vs non-NULL.
+
+ // "" vs "".
+ EXPECT_EQ(0, String("").Compare(String("")));
+
+ // "" vs non-"".
+ EXPECT_EQ(-1, String("").Compare(String("\0", 1)));
+ EXPECT_EQ(-1, String("").Compare(" "));
+
+ // Non-"" vs "".
+ EXPECT_EQ(1, String("a").Compare(String("")));
+
+ // The following covers non-"" vs non-"".
+
+ // Same length and equal.
+ EXPECT_EQ(0, String("a").Compare(String("a")));
+
+ // Same length and different.
+ EXPECT_EQ(-1, String("a\0b", 3).Compare(String("a\0c", 3)));
+ EXPECT_EQ(1, String("b").Compare(String("a")));
+
+ // Different lengths.
+ EXPECT_EQ(-1, String("a").Compare(String("ab")));
+ EXPECT_EQ(-1, String("a").Compare(String("a\0", 2)));
+ EXPECT_EQ(1, String("abc").Compare(String("aacd")));
+}
+
+// Tests String::operator==().
+TEST(StringTest, Equals) {
+ const String null(NULL);
+ EXPECT_TRUE(null == NULL); // NOLINT
+ EXPECT_FALSE(null == ""); // NOLINT
+ EXPECT_FALSE(null == "bar"); // NOLINT
+
+ const String empty("");
+ EXPECT_FALSE(empty == NULL); // NOLINT
+ EXPECT_TRUE(empty == ""); // NOLINT
+ EXPECT_FALSE(empty == "bar"); // NOLINT
+
+ const String foo("foo");
+ EXPECT_FALSE(foo == NULL); // NOLINT
+ EXPECT_FALSE(foo == ""); // NOLINT
+ EXPECT_FALSE(foo == "bar"); // NOLINT
+ EXPECT_TRUE(foo == "foo"); // NOLINT
+
+ const String bar("x\0y", 3);
+ EXPECT_FALSE(bar == "x");
+}
+
+// Tests String::operator!=().
+TEST(StringTest, NotEquals) {
+ const String null(NULL);
+ EXPECT_FALSE(null != NULL); // NOLINT
+ EXPECT_TRUE(null != ""); // NOLINT
+ EXPECT_TRUE(null != "bar"); // NOLINT
+
+ const String empty("");
+ EXPECT_TRUE(empty != NULL); // NOLINT
+ EXPECT_FALSE(empty != ""); // NOLINT
+ EXPECT_TRUE(empty != "bar"); // NOLINT
+
+ const String foo("foo");
+ EXPECT_TRUE(foo != NULL); // NOLINT
+ EXPECT_TRUE(foo != ""); // NOLINT
+ EXPECT_TRUE(foo != "bar"); // NOLINT
+ EXPECT_FALSE(foo != "foo"); // NOLINT
+
+ const String bar("x\0y", 3);
+ EXPECT_TRUE(bar != "x");
+}
+
+// Tests String::length().
+TEST(StringTest, Length) {
+ EXPECT_EQ(0U, String().length());
+ EXPECT_EQ(0U, String("").length());
+ EXPECT_EQ(2U, String("ab").length());
+ EXPECT_EQ(3U, String("a\0b", 3).length());
+}
+
+// Tests String::EndsWith().
+TEST(StringTest, EndsWith) {
+ EXPECT_TRUE(String("foobar").EndsWith("bar"));
+ EXPECT_TRUE(String("foobar").EndsWith(""));
+ EXPECT_TRUE(String("").EndsWith(""));
+
+ EXPECT_FALSE(String("foobar").EndsWith("foo"));
+ EXPECT_FALSE(String("").EndsWith("foo"));
+}
+
+// Tests String::EndsWithCaseInsensitive().
+TEST(StringTest, EndsWithCaseInsensitive) {
+ EXPECT_TRUE(String("foobar").EndsWithCaseInsensitive("BAR"));
+ EXPECT_TRUE(String("foobaR").EndsWithCaseInsensitive("bar"));
+ EXPECT_TRUE(String("foobar").EndsWithCaseInsensitive(""));
+ EXPECT_TRUE(String("").EndsWithCaseInsensitive(""));
+
+ EXPECT_FALSE(String("Foobar").EndsWithCaseInsensitive("foo"));
+ EXPECT_FALSE(String("foobar").EndsWithCaseInsensitive("Foo"));
+ EXPECT_FALSE(String("").EndsWithCaseInsensitive("foo"));
+}
+
+// C++Builder's preprocessor is buggy; it fails to expand macros that
+// appear in macro parameters after wide char literals. Provide an alias
+// for NULL as a workaround.
+static const wchar_t* const kNull = NULL;
+
+// Tests String::CaseInsensitiveWideCStringEquals
+TEST(StringTest, CaseInsensitiveWideCStringEquals) {
+ EXPECT_TRUE(String::CaseInsensitiveWideCStringEquals(NULL, NULL));
+ EXPECT_FALSE(String::CaseInsensitiveWideCStringEquals(kNull, L""));
+ EXPECT_FALSE(String::CaseInsensitiveWideCStringEquals(L"", kNull));
+ EXPECT_FALSE(String::CaseInsensitiveWideCStringEquals(kNull, L"foobar"));
+ EXPECT_FALSE(String::CaseInsensitiveWideCStringEquals(L"foobar", kNull));
+ EXPECT_TRUE(String::CaseInsensitiveWideCStringEquals(L"foobar", L"foobar"));
+ EXPECT_TRUE(String::CaseInsensitiveWideCStringEquals(L"foobar", L"FOOBAR"));
+ EXPECT_TRUE(String::CaseInsensitiveWideCStringEquals(L"FOOBAR", L"foobar"));
+}
+
+// Tests that NULL can be assigned to a String.
+TEST(StringTest, CanBeAssignedNULL) {
+ const String src(NULL);
+ String dest;
+
+ dest = src;
+ EXPECT_STREQ(NULL, dest.c_str());
+}
+
+// Tests that the empty string "" can be assigned to a String.
+TEST(StringTest, CanBeAssignedEmpty) {
+ const String src("");
+ String dest;
+
+ dest = src;
+ EXPECT_STREQ("", dest.c_str());
+}
+
+// Tests that a non-empty string can be assigned to a String.
+TEST(StringTest, CanBeAssignedNonEmpty) {
+ const String src("hello");
+ String dest;
+ dest = src;
+ EXPECT_EQ(5U, dest.length());
+ EXPECT_STREQ("hello", dest.c_str());
+
+ const String src2("x\0y", 3);
+ String dest2;
+ dest2 = src2;
+ EXPECT_EQ(3U, dest2.length());
+ EXPECT_EQ('x', dest2.c_str()[0]);
+ EXPECT_EQ('\0', dest2.c_str()[1]);
+ EXPECT_EQ('y', dest2.c_str()[2]);
+}
+
+// Tests that a String can be assigned to itself.
+TEST(StringTest, CanBeAssignedSelf) {
+ String dest("hello");
+
+ dest = dest;
+ EXPECT_STREQ("hello", dest.c_str());
+}
+
+// Sun Studio < 12 incorrectly rejects this code due to an overloading
+// ambiguity.
+#if !(defined(__SUNPRO_CC) && __SUNPRO_CC < 0x590)
+// Tests streaming a String.
+TEST(StringTest, Streams) {
+ EXPECT_EQ(StreamableToString(String()), "(null)");
+ EXPECT_EQ(StreamableToString(String("")), "");
+ EXPECT_EQ(StreamableToString(String("a\0b", 3)), "a\\0b");
+}
+#endif
+
+// Tests that String::Format() works.
+TEST(StringTest, FormatWorks) {
+ // Normal case: the format spec is valid, the arguments match the
+ // spec, and the result is < 4095 characters.
+ EXPECT_STREQ("Hello, 42", String::Format("%s, %d", "Hello", 42).c_str());
+
+ // Edge case: the result is 4095 characters.
+ char buffer[4096];
+ const size_t kSize = sizeof(buffer);
+ memset(buffer, 'a', kSize - 1);
+ buffer[kSize - 1] = '\0';
+ EXPECT_STREQ(buffer, String::Format("%s", buffer).c_str());
+
+ // The result needs to be 4096 characters, exceeding Format()'s limit.
+ EXPECT_STREQ("<formatting error or buffer exceeded>",
+ String::Format("x%s", buffer).c_str());
+
+#if GTEST_OS_LINUX
+ // On Linux, invalid format spec should lead to an error message.
+ // In other environment (e.g. MSVC on Windows), String::Format() may
+ // simply ignore a bad format spec, so this assertion is run on
+ // Linux only.
+ EXPECT_STREQ("<formatting error or buffer exceeded>",
+ String::Format("%").c_str());
+#endif
+}
+
+#if GTEST_OS_WINDOWS
+
+// Tests String::ShowWideCString().
+TEST(StringTest, ShowWideCString) {
+ EXPECT_STREQ("(null)",
+ String::ShowWideCString(NULL).c_str());
+ EXPECT_STREQ("", String::ShowWideCString(L"").c_str());
+ EXPECT_STREQ("foo", String::ShowWideCString(L"foo").c_str());
+}
+
+// Tests String::ShowWideCStringQuoted().
+TEST(StringTest, ShowWideCStringQuoted) {
+ EXPECT_STREQ("(null)",
+ String::ShowWideCStringQuoted(NULL).c_str());
+ EXPECT_STREQ("L\"\"",
+ String::ShowWideCStringQuoted(L"").c_str());
+ EXPECT_STREQ("L\"foo\"",
+ String::ShowWideCStringQuoted(L"foo").c_str());
+}
+
+#if GTEST_OS_WINDOWS_MOBILE
+TEST(StringTest, AnsiAndUtf16Null) {
+ EXPECT_EQ(NULL, String::AnsiToUtf16(NULL));
+ EXPECT_EQ(NULL, String::Utf16ToAnsi(NULL));
+}
+
+TEST(StringTest, AnsiAndUtf16ConvertBasic) {
+ const char* ansi = String::Utf16ToAnsi(L"str");
+ EXPECT_STREQ("str", ansi);
+ delete [] ansi;
+ const WCHAR* utf16 = String::AnsiToUtf16("str");
+ EXPECT_EQ(0, wcsncmp(L"str", utf16, 3));
+ delete [] utf16;
+}
+
+TEST(StringTest, AnsiAndUtf16ConvertPathChars) {
+ const char* ansi = String::Utf16ToAnsi(L".:\\ \"*?");
+ EXPECT_STREQ(".:\\ \"*?", ansi);
+ delete [] ansi;
+ const WCHAR* utf16 = String::AnsiToUtf16(".:\\ \"*?");
+ EXPECT_EQ(0, wcsncmp(L".:\\ \"*?", utf16, 3));
+ delete [] utf16;
+}
+#endif // GTEST_OS_WINDOWS_MOBILE
+
+#endif // GTEST_OS_WINDOWS
+
+// Tests TestProperty construction.
+TEST(TestPropertyTest, StringValue) {
+ TestProperty property("key", "1");
+ EXPECT_STREQ("key", property.key());
+ EXPECT_STREQ("1", property.value());
+}
+
+// Tests TestProperty replacing a value.
+TEST(TestPropertyTest, ReplaceStringValue) {
+ TestProperty property("key", "1");
+ EXPECT_STREQ("1", property.value());
+ property.SetValue("2");
+ EXPECT_STREQ("2", property.value());
+}
+
+// AddFatalFailure() and AddNonfatalFailure() must be stand-alone
+// functions (i.e. their definitions cannot be inlined at the call
+// sites), or C++Builder won't compile the code.
+static void AddFatalFailure() {
+ FAIL() << "Expected fatal failure.";
+}
+
+static void AddNonfatalFailure() {
+ ADD_FAILURE() << "Expected non-fatal failure.";
+}
+
+class ScopedFakeTestPartResultReporterTest : public Test {
+ public: // Must be public and not protected due to a bug in g++ 3.4.2.
+ enum FailureMode {
+ FATAL_FAILURE,
+ NONFATAL_FAILURE
+ };
+ static void AddFailure(FailureMode failure) {
+ if (failure == FATAL_FAILURE) {
+ AddFatalFailure();
+ } else {
+ AddNonfatalFailure();
+ }
+ }
+};
+
+// Tests that ScopedFakeTestPartResultReporter intercepts test
+// failures.
+TEST_F(ScopedFakeTestPartResultReporterTest, InterceptsTestFailures) {
+ TestPartResultArray results;
+ {
+ ScopedFakeTestPartResultReporter reporter(
+ ScopedFakeTestPartResultReporter::INTERCEPT_ONLY_CURRENT_THREAD,
+ &results);
+ AddFailure(NONFATAL_FAILURE);
+ AddFailure(FATAL_FAILURE);
+ }
+
+ EXPECT_EQ(2, results.size());
+ EXPECT_TRUE(results.GetTestPartResult(0).nonfatally_failed());
+ EXPECT_TRUE(results.GetTestPartResult(1).fatally_failed());
+}
+
+TEST_F(ScopedFakeTestPartResultReporterTest, DeprecatedConstructor) {
+ TestPartResultArray results;
+ {
+ // Tests, that the deprecated constructor still works.
+ ScopedFakeTestPartResultReporter reporter(&results);
+ AddFailure(NONFATAL_FAILURE);
+ }
+ EXPECT_EQ(1, results.size());
+}
+
+#if GTEST_IS_THREADSAFE
+
+class ScopedFakeTestPartResultReporterWithThreadsTest
+ : public ScopedFakeTestPartResultReporterTest {
+ protected:
+ static void AddFailureInOtherThread(FailureMode failure) {
+ ThreadWithParam<FailureMode> thread(&AddFailure, failure, NULL);
+ thread.Join();
+ }
+};
+
+TEST_F(ScopedFakeTestPartResultReporterWithThreadsTest,
+ InterceptsTestFailuresInAllThreads) {
+ TestPartResultArray results;
+ {
+ ScopedFakeTestPartResultReporter reporter(
+ ScopedFakeTestPartResultReporter::INTERCEPT_ALL_THREADS, &results);
+ AddFailure(NONFATAL_FAILURE);
+ AddFailure(FATAL_FAILURE);
+ AddFailureInOtherThread(NONFATAL_FAILURE);
+ AddFailureInOtherThread(FATAL_FAILURE);
+ }
+
+ EXPECT_EQ(4, results.size());
+ EXPECT_TRUE(results.GetTestPartResult(0).nonfatally_failed());
+ EXPECT_TRUE(results.GetTestPartResult(1).fatally_failed());
+ EXPECT_TRUE(results.GetTestPartResult(2).nonfatally_failed());
+ EXPECT_TRUE(results.GetTestPartResult(3).fatally_failed());
+}
+
+#endif // GTEST_IS_THREADSAFE
+
+// Tests EXPECT_FATAL_FAILURE{,ON_ALL_THREADS}. Makes sure that they
+// work even if the failure is generated in a called function rather than
+// the current context.
+
+typedef ScopedFakeTestPartResultReporterTest ExpectFatalFailureTest;
+
+TEST_F(ExpectFatalFailureTest, CatchesFatalFaliure) {
+ EXPECT_FATAL_FAILURE(AddFatalFailure(), "Expected fatal failure.");
+}
+
+TEST_F(ExpectFatalFailureTest, CatchesFatalFailureOnAllThreads) {
+ // We have another test below to verify that the macro catches fatal
+ // failures generated on another thread.
+ EXPECT_FATAL_FAILURE_ON_ALL_THREADS(AddFatalFailure(),
+ "Expected fatal failure.");
+}
+
+#ifdef __BORLANDC__
+// Silences warnings: "Condition is always true"
+#pragma option push -w-ccc
+#endif
+
+// Tests that EXPECT_FATAL_FAILURE() can be used in a non-void
+// function even when the statement in it contains ASSERT_*.
+
+int NonVoidFunction() {
+ EXPECT_FATAL_FAILURE(ASSERT_TRUE(false), "");
+ EXPECT_FATAL_FAILURE_ON_ALL_THREADS(FAIL(), "");
+ return 0;
+}
+
+TEST_F(ExpectFatalFailureTest, CanBeUsedInNonVoidFunction) {
+ NonVoidFunction();
+}
+
+// Tests that EXPECT_FATAL_FAILURE(statement, ...) doesn't abort the
+// current function even though 'statement' generates a fatal failure.
+
+void DoesNotAbortHelper(bool* aborted) {
+ EXPECT_FATAL_FAILURE(ASSERT_TRUE(false), "");
+ EXPECT_FATAL_FAILURE_ON_ALL_THREADS(FAIL(), "");
+
+ *aborted = false;
+}
+
+#ifdef __BORLANDC__
+// Restores warnings after previous "#pragma option push" suppressed them.
+#pragma option pop
+#endif
+
+TEST_F(ExpectFatalFailureTest, DoesNotAbort) {
+ bool aborted = true;
+ DoesNotAbortHelper(&aborted);
+ EXPECT_FALSE(aborted);
+}
+
+// Tests that the EXPECT_FATAL_FAILURE{,_ON_ALL_THREADS} accepts a
+// statement that contains a macro which expands to code containing an
+// unprotected comma.
+
+static int global_var = 0;
+#define GTEST_USE_UNPROTECTED_COMMA_ global_var++, global_var++
+
+TEST_F(ExpectFatalFailureTest, AcceptsMacroThatExpandsToUnprotectedComma) {
+#if !defined(__BORLANDC__) || __BORLANDC__ >= 0x600
+ // ICE's in C++Builder 2007.
+ EXPECT_FATAL_FAILURE({
+ GTEST_USE_UNPROTECTED_COMMA_;
+ AddFatalFailure();
+ }, "");
+#endif
+
+ EXPECT_FATAL_FAILURE_ON_ALL_THREADS({
+ GTEST_USE_UNPROTECTED_COMMA_;
+ AddFatalFailure();
+ }, "");
+}
+
+// Tests EXPECT_NONFATAL_FAILURE{,ON_ALL_THREADS}.
+
+typedef ScopedFakeTestPartResultReporterTest ExpectNonfatalFailureTest;
+
+TEST_F(ExpectNonfatalFailureTest, CatchesNonfatalFailure) {
+ EXPECT_NONFATAL_FAILURE(AddNonfatalFailure(),
+ "Expected non-fatal failure.");
+}
+
+TEST_F(ExpectNonfatalFailureTest, CatchesNonfatalFailureOnAllThreads) {
+ // We have another test below to verify that the macro catches
+ // non-fatal failures generated on another thread.
+ EXPECT_NONFATAL_FAILURE_ON_ALL_THREADS(AddNonfatalFailure(),
+ "Expected non-fatal failure.");
+}
+
+// Tests that the EXPECT_NONFATAL_FAILURE{,_ON_ALL_THREADS} accepts a
+// statement that contains a macro which expands to code containing an
+// unprotected comma.
+TEST_F(ExpectNonfatalFailureTest, AcceptsMacroThatExpandsToUnprotectedComma) {
+ EXPECT_NONFATAL_FAILURE({
+ GTEST_USE_UNPROTECTED_COMMA_;
+ AddNonfatalFailure();
+ }, "");
+
+ EXPECT_NONFATAL_FAILURE_ON_ALL_THREADS({
+ GTEST_USE_UNPROTECTED_COMMA_;
+ AddNonfatalFailure();
+ }, "");
+}
+
+#if GTEST_IS_THREADSAFE
+
+typedef ScopedFakeTestPartResultReporterWithThreadsTest
+ ExpectFailureWithThreadsTest;
+
+TEST_F(ExpectFailureWithThreadsTest, ExpectFatalFailureOnAllThreads) {
+ EXPECT_FATAL_FAILURE_ON_ALL_THREADS(AddFailureInOtherThread(FATAL_FAILURE),
+ "Expected fatal failure.");
+}
+
+TEST_F(ExpectFailureWithThreadsTest, ExpectNonFatalFailureOnAllThreads) {
+ EXPECT_NONFATAL_FAILURE_ON_ALL_THREADS(
+ AddFailureInOtherThread(NONFATAL_FAILURE), "Expected non-fatal failure.");
+}
+
+#endif // GTEST_IS_THREADSAFE
+
+// Tests the TestProperty class.
+
+TEST(TestPropertyTest, ConstructorWorks) {
+ const TestProperty property("key", "value");
+ EXPECT_STREQ("key", property.key());
+ EXPECT_STREQ("value", property.value());
+}
+
+TEST(TestPropertyTest, SetValue) {
+ TestProperty property("key", "value_1");
+ EXPECT_STREQ("key", property.key());
+ property.SetValue("value_2");
+ EXPECT_STREQ("key", property.key());
+ EXPECT_STREQ("value_2", property.value());
+}
+
+// Tests the TestResult class
+
+// The test fixture for testing TestResult.
+class TestResultTest : public Test {
+ protected:
+ typedef std::vector<TestPartResult> TPRVector;
+
+ // We make use of 2 TestPartResult objects,
+ TestPartResult * pr1, * pr2;
+
+ // ... and 3 TestResult objects.
+ TestResult * r0, * r1, * r2;
+
+ virtual void SetUp() {
+ // pr1 is for success.
+ pr1 = new TestPartResult(TestPartResult::kSuccess,
+ "foo/bar.cc",
+ 10,
+ "Success!");
+
+ // pr2 is for fatal failure.
+ pr2 = new TestPartResult(TestPartResult::kFatalFailure,
+ "foo/bar.cc",
+ -1, // This line number means "unknown"
+ "Failure!");
+
+ // Creates the TestResult objects.
+ r0 = new TestResult();
+ r1 = new TestResult();
+ r2 = new TestResult();
+
+ // In order to test TestResult, we need to modify its internal
+ // state, in particular the TestPartResult vector it holds.
+ // test_part_results() returns a const reference to this vector.
+ // We cast it to a non-const object s.t. it can be modified (yes,
+ // this is a hack).
+ TPRVector* results1 = const_cast<TPRVector*>(
+ &TestResultAccessor::test_part_results(*r1));
+ TPRVector* results2 = const_cast<TPRVector*>(
+ &TestResultAccessor::test_part_results(*r2));
+
+ // r0 is an empty TestResult.
+
+ // r1 contains a single SUCCESS TestPartResult.
+ results1->push_back(*pr1);
+
+ // r2 contains a SUCCESS, and a FAILURE.
+ results2->push_back(*pr1);
+ results2->push_back(*pr2);
+ }
+
+ virtual void TearDown() {
+ delete pr1;
+ delete pr2;
+
+ delete r0;
+ delete r1;
+ delete r2;
+ }
+
+ // Helper that compares two two TestPartResults.
+ static void CompareTestPartResult(const TestPartResult& expected,
+ const TestPartResult& actual) {
+ EXPECT_EQ(expected.type(), actual.type());
+ EXPECT_STREQ(expected.file_name(), actual.file_name());
+ EXPECT_EQ(expected.line_number(), actual.line_number());
+ EXPECT_STREQ(expected.summary(), actual.summary());
+ EXPECT_STREQ(expected.message(), actual.message());
+ EXPECT_EQ(expected.passed(), actual.passed());
+ EXPECT_EQ(expected.failed(), actual.failed());
+ EXPECT_EQ(expected.nonfatally_failed(), actual.nonfatally_failed());
+ EXPECT_EQ(expected.fatally_failed(), actual.fatally_failed());
+ }
+};
+
+// Tests TestResult::total_part_count().
+TEST_F(TestResultTest, total_part_count) {
+ ASSERT_EQ(0, r0->total_part_count());
+ ASSERT_EQ(1, r1->total_part_count());
+ ASSERT_EQ(2, r2->total_part_count());
+}
+
+// Tests TestResult::Passed().
+TEST_F(TestResultTest, Passed) {
+ ASSERT_TRUE(r0->Passed());
+ ASSERT_TRUE(r1->Passed());
+ ASSERT_FALSE(r2->Passed());
+}
+
+// Tests TestResult::Failed().
+TEST_F(TestResultTest, Failed) {
+ ASSERT_FALSE(r0->Failed());
+ ASSERT_FALSE(r1->Failed());
+ ASSERT_TRUE(r2->Failed());
+}
+
+// Tests TestResult::GetTestPartResult().
+
+typedef TestResultTest TestResultDeathTest;
+
+TEST_F(TestResultDeathTest, GetTestPartResult) {
+ CompareTestPartResult(*pr1, r2->GetTestPartResult(0));
+ CompareTestPartResult(*pr2, r2->GetTestPartResult(1));
+ EXPECT_DEATH_IF_SUPPORTED(r2->GetTestPartResult(2), "");
+ EXPECT_DEATH_IF_SUPPORTED(r2->GetTestPartResult(-1), "");
+}
+
+// Tests TestResult has no properties when none are added.
+TEST(TestResultPropertyTest, NoPropertiesFoundWhenNoneAreAdded) {
+ TestResult test_result;
+ ASSERT_EQ(0, test_result.test_property_count());
+}
+
+// Tests TestResult has the expected property when added.
+TEST(TestResultPropertyTest, OnePropertyFoundWhenAdded) {
+ TestResult test_result;
+ TestProperty property("key_1", "1");
+ TestResultAccessor::RecordProperty(&test_result, property);
+ ASSERT_EQ(1, test_result.test_property_count());
+ const TestProperty& actual_property = test_result.GetTestProperty(0);
+ EXPECT_STREQ("key_1", actual_property.key());
+ EXPECT_STREQ("1", actual_property.value());
+}
+
+// Tests TestResult has multiple properties when added.
+TEST(TestResultPropertyTest, MultiplePropertiesFoundWhenAdded) {
+ TestResult test_result;
+ TestProperty property_1("key_1", "1");
+ TestProperty property_2("key_2", "2");
+ TestResultAccessor::RecordProperty(&test_result, property_1);
+ TestResultAccessor::RecordProperty(&test_result, property_2);
+ ASSERT_EQ(2, test_result.test_property_count());
+ const TestProperty& actual_property_1 = test_result.GetTestProperty(0);
+ EXPECT_STREQ("key_1", actual_property_1.key());
+ EXPECT_STREQ("1", actual_property_1.value());
+
+ const TestProperty& actual_property_2 = test_result.GetTestProperty(1);
+ EXPECT_STREQ("key_2", actual_property_2.key());
+ EXPECT_STREQ("2", actual_property_2.value());
+}
+
+// Tests TestResult::RecordProperty() overrides values for duplicate keys.
+TEST(TestResultPropertyTest, OverridesValuesForDuplicateKeys) {
+ TestResult test_result;
+ TestProperty property_1_1("key_1", "1");
+ TestProperty property_2_1("key_2", "2");
+ TestProperty property_1_2("key_1", "12");
+ TestProperty property_2_2("key_2", "22");
+ TestResultAccessor::RecordProperty(&test_result, property_1_1);
+ TestResultAccessor::RecordProperty(&test_result, property_2_1);
+ TestResultAccessor::RecordProperty(&test_result, property_1_2);
+ TestResultAccessor::RecordProperty(&test_result, property_2_2);
+
+ ASSERT_EQ(2, test_result.test_property_count());
+ const TestProperty& actual_property_1 = test_result.GetTestProperty(0);
+ EXPECT_STREQ("key_1", actual_property_1.key());
+ EXPECT_STREQ("12", actual_property_1.value());
+
+ const TestProperty& actual_property_2 = test_result.GetTestProperty(1);
+ EXPECT_STREQ("key_2", actual_property_2.key());
+ EXPECT_STREQ("22", actual_property_2.value());
+}
+
+// Tests TestResult::GetTestProperty().
+TEST(TestResultPropertyDeathTest, GetTestProperty) {
+ TestResult test_result;
+ TestProperty property_1("key_1", "1");
+ TestProperty property_2("key_2", "2");
+ TestProperty property_3("key_3", "3");
+ TestResultAccessor::RecordProperty(&test_result, property_1);
+ TestResultAccessor::RecordProperty(&test_result, property_2);
+ TestResultAccessor::RecordProperty(&test_result, property_3);
+
+ const TestProperty& fetched_property_1 = test_result.GetTestProperty(0);
+ const TestProperty& fetched_property_2 = test_result.GetTestProperty(1);
+ const TestProperty& fetched_property_3 = test_result.GetTestProperty(2);
+
+ EXPECT_STREQ("key_1", fetched_property_1.key());
+ EXPECT_STREQ("1", fetched_property_1.value());
+
+ EXPECT_STREQ("key_2", fetched_property_2.key());
+ EXPECT_STREQ("2", fetched_property_2.value());
+
+ EXPECT_STREQ("key_3", fetched_property_3.key());
+ EXPECT_STREQ("3", fetched_property_3.value());
+
+ EXPECT_DEATH_IF_SUPPORTED(test_result.GetTestProperty(3), "");
+ EXPECT_DEATH_IF_SUPPORTED(test_result.GetTestProperty(-1), "");
+}
+
+// When a property using a reserved key is supplied to this function, it tests
+// that a non-fatal failure is added, a fatal failure is not added, and that the
+// property is not recorded.
+void ExpectNonFatalFailureRecordingPropertyWithReservedKey(const char* key) {
+ TestResult test_result;
+ TestProperty property(key, "1");
+ EXPECT_NONFATAL_FAILURE(
+ TestResultAccessor::RecordProperty(&test_result, property),
+ "Reserved key");
+ ASSERT_EQ(0, test_result.test_property_count()) << "Not recorded";
+}
+
+// Attempting to recording a property with the Reserved literal "name"
+// should add a non-fatal failure and the property should not be recorded.
+TEST(TestResultPropertyTest, AddFailureWhenUsingReservedKeyCalledName) {
+ ExpectNonFatalFailureRecordingPropertyWithReservedKey("name");
+}
+
+// Attempting to recording a property with the Reserved literal "status"
+// should add a non-fatal failure and the property should not be recorded.
+TEST(TestResultPropertyTest, AddFailureWhenUsingReservedKeyCalledStatus) {
+ ExpectNonFatalFailureRecordingPropertyWithReservedKey("status");
+}
+
+// Attempting to recording a property with the Reserved literal "time"
+// should add a non-fatal failure and the property should not be recorded.
+TEST(TestResultPropertyTest, AddFailureWhenUsingReservedKeyCalledTime) {
+ ExpectNonFatalFailureRecordingPropertyWithReservedKey("time");
+}
+
+// Attempting to recording a property with the Reserved literal "classname"
+// should add a non-fatal failure and the property should not be recorded.
+TEST(TestResultPropertyTest, AddFailureWhenUsingReservedKeyCalledClassname) {
+ ExpectNonFatalFailureRecordingPropertyWithReservedKey("classname");
+}
+
+// Tests that GTestFlagSaver works on Windows and Mac.
+
+class GTestFlagSaverTest : public Test {
+ protected:
+ // Saves the Google Test flags such that we can restore them later, and
+ // then sets them to their default values. This will be called
+ // before the first test in this test case is run.
+ static void SetUpTestCase() {
+ saver_ = new GTestFlagSaver;
+
+ GTEST_FLAG(also_run_disabled_tests) = false;
+ GTEST_FLAG(break_on_failure) = false;
+ GTEST_FLAG(catch_exceptions) = false;
+ GTEST_FLAG(death_test_use_fork) = false;
+ GTEST_FLAG(color) = "auto";
+ GTEST_FLAG(filter) = "";
+ GTEST_FLAG(list_tests) = false;
+ GTEST_FLAG(output) = "";
+ GTEST_FLAG(print_time) = true;
+ GTEST_FLAG(random_seed) = 0;
+ GTEST_FLAG(repeat) = 1;
+ GTEST_FLAG(shuffle) = false;
+ GTEST_FLAG(stack_trace_depth) = kMaxStackTraceDepth;
+ GTEST_FLAG(throw_on_failure) = false;
+ }
+
+ // Restores the Google Test flags that the tests have modified. This will
+ // be called after the last test in this test case is run.
+ static void TearDownTestCase() {
+ delete saver_;
+ saver_ = NULL;
+ }
+
+ // Verifies that the Google Test flags have their default values, and then
+ // modifies each of them.
+ void VerifyAndModifyFlags() {
+ EXPECT_FALSE(GTEST_FLAG(also_run_disabled_tests));
+ EXPECT_FALSE(GTEST_FLAG(break_on_failure));
+ EXPECT_FALSE(GTEST_FLAG(catch_exceptions));
+ EXPECT_STREQ("auto", GTEST_FLAG(color).c_str());
+ EXPECT_FALSE(GTEST_FLAG(death_test_use_fork));
+ EXPECT_STREQ("", GTEST_FLAG(filter).c_str());
+ EXPECT_FALSE(GTEST_FLAG(list_tests));
+ EXPECT_STREQ("", GTEST_FLAG(output).c_str());
+ EXPECT_TRUE(GTEST_FLAG(print_time));
+ EXPECT_EQ(0, GTEST_FLAG(random_seed));
+ EXPECT_EQ(1, GTEST_FLAG(repeat));
+ EXPECT_FALSE(GTEST_FLAG(shuffle));
+ EXPECT_EQ(kMaxStackTraceDepth, GTEST_FLAG(stack_trace_depth));
+ EXPECT_FALSE(GTEST_FLAG(throw_on_failure));
+
+ GTEST_FLAG(also_run_disabled_tests) = true;
+ GTEST_FLAG(break_on_failure) = true;
+ GTEST_FLAG(catch_exceptions) = true;
+ GTEST_FLAG(color) = "no";
+ GTEST_FLAG(death_test_use_fork) = true;
+ GTEST_FLAG(filter) = "abc";
+ GTEST_FLAG(list_tests) = true;
+ GTEST_FLAG(output) = "xml:foo.xml";
+ GTEST_FLAG(print_time) = false;
+ GTEST_FLAG(random_seed) = 1;
+ GTEST_FLAG(repeat) = 100;
+ GTEST_FLAG(shuffle) = true;
+ GTEST_FLAG(stack_trace_depth) = 1;
+ GTEST_FLAG(throw_on_failure) = true;
+ }
+ private:
+ // For saving Google Test flags during this test case.
+ static GTestFlagSaver* saver_;
+};
+
+GTestFlagSaver* GTestFlagSaverTest::saver_ = NULL;
+
+// Google Test doesn't guarantee the order of tests. The following two
+// tests are designed to work regardless of their order.
+
+// Modifies the Google Test flags in the test body.
+TEST_F(GTestFlagSaverTest, ModifyGTestFlags) {
+ VerifyAndModifyFlags();
+}
+
+// Verifies that the Google Test flags in the body of the previous test were
+// restored to their original values.
+TEST_F(GTestFlagSaverTest, VerifyGTestFlags) {
+ VerifyAndModifyFlags();
+}
+
+// Sets an environment variable with the given name to the given
+// value. If the value argument is "", unsets the environment
+// variable. The caller must ensure that both arguments are not NULL.
+static void SetEnv(const char* name, const char* value) {
+#if GTEST_OS_WINDOWS_MOBILE
+ // Environment variables are not supported on Windows CE.
+ return;
+#elif defined(__BORLANDC__) || defined(__SunOS_5_8) || defined(__SunOS_5_9)
+ // C++Builder's putenv only stores a pointer to its parameter; we have to
+ // ensure that the string remains valid as long as it might be needed.
+ // We use an std::map to do so.
+ static std::map<String, String*> added_env;
+
+ // Because putenv stores a pointer to the string buffer, we can't delete the
+ // previous string (if present) until after it's replaced.
+ String *prev_env = NULL;
+ if (added_env.find(name) != added_env.end()) {
+ prev_env = added_env[name];
+ }
+ added_env[name] = new String((Message() << name << "=" << value).GetString());
+
+ // The standard signature of putenv accepts a 'char*' argument. Other
+ // implementations, like C++Builder's, accept a 'const char*'.
+ // We cast away the 'const' since that would work for both variants.
+ putenv(const_cast<char*>(added_env[name]->c_str()));
+ delete prev_env;
+#elif GTEST_OS_WINDOWS // If we are on Windows proper.
+ _putenv((Message() << name << "=" << value).GetString().c_str());
+#else
+ if (*value == '\0') {
+ unsetenv(name);
+ } else {
+ setenv(name, value, 1);
+ }
+#endif // GTEST_OS_WINDOWS_MOBILE
+}
+
+#if !GTEST_OS_WINDOWS_MOBILE
+// Environment variables are not supported on Windows CE.
+
+using testing::internal::Int32FromGTestEnv;
+
+// Tests Int32FromGTestEnv().
+
+// Tests that Int32FromGTestEnv() returns the default value when the
+// environment variable is not set.
+TEST(Int32FromGTestEnvTest, ReturnsDefaultWhenVariableIsNotSet) {
+ SetEnv(GTEST_FLAG_PREFIX_UPPER_ "TEMP", "");
+ EXPECT_EQ(10, Int32FromGTestEnv("temp", 10));
+}
+
+// Tests that Int32FromGTestEnv() returns the default value when the
+// environment variable overflows as an Int32.
+TEST(Int32FromGTestEnvTest, ReturnsDefaultWhenValueOverflows) {
+ printf("(expecting 2 warnings)\n");
+
+ SetEnv(GTEST_FLAG_PREFIX_UPPER_ "TEMP", "12345678987654321");
+ EXPECT_EQ(20, Int32FromGTestEnv("temp", 20));
+
+ SetEnv(GTEST_FLAG_PREFIX_UPPER_ "TEMP", "-12345678987654321");
+ EXPECT_EQ(30, Int32FromGTestEnv("temp", 30));
+}
+
+// Tests that Int32FromGTestEnv() returns the default value when the
+// environment variable does not represent a valid decimal integer.
+TEST(Int32FromGTestEnvTest, ReturnsDefaultWhenValueIsInvalid) {
+ printf("(expecting 2 warnings)\n");
+
+ SetEnv(GTEST_FLAG_PREFIX_UPPER_ "TEMP", "A1");
+ EXPECT_EQ(40, Int32FromGTestEnv("temp", 40));
+
+ SetEnv(GTEST_FLAG_PREFIX_UPPER_ "TEMP", "12X");
+ EXPECT_EQ(50, Int32FromGTestEnv("temp", 50));
+}
+
+// Tests that Int32FromGTestEnv() parses and returns the value of the
+// environment variable when it represents a valid decimal integer in
+// the range of an Int32.
+TEST(Int32FromGTestEnvTest, ParsesAndReturnsValidValue) {
+ SetEnv(GTEST_FLAG_PREFIX_UPPER_ "TEMP", "123");
+ EXPECT_EQ(123, Int32FromGTestEnv("temp", 0));
+
+ SetEnv(GTEST_FLAG_PREFIX_UPPER_ "TEMP", "-321");
+ EXPECT_EQ(-321, Int32FromGTestEnv("temp", 0));
+}
+#endif // !GTEST_OS_WINDOWS_MOBILE
+
+// Tests ParseInt32Flag().
+
+// Tests that ParseInt32Flag() returns false and doesn't change the
+// output value when the flag has wrong format
+TEST(ParseInt32FlagTest, ReturnsFalseForInvalidFlag) {
+ Int32 value = 123;
+ EXPECT_FALSE(ParseInt32Flag("--a=100", "b", &value));
+ EXPECT_EQ(123, value);
+
+ EXPECT_FALSE(ParseInt32Flag("a=100", "a", &value));
+ EXPECT_EQ(123, value);
+}
+
+// Tests that ParseInt32Flag() returns false and doesn't change the
+// output value when the flag overflows as an Int32.
+TEST(ParseInt32FlagTest, ReturnsDefaultWhenValueOverflows) {
+ printf("(expecting 2 warnings)\n");
+
+ Int32 value = 123;
+ EXPECT_FALSE(ParseInt32Flag("--abc=12345678987654321", "abc", &value));
+ EXPECT_EQ(123, value);
+
+ EXPECT_FALSE(ParseInt32Flag("--abc=-12345678987654321", "abc", &value));
+ EXPECT_EQ(123, value);
+}
+
+// Tests that ParseInt32Flag() returns false and doesn't change the
+// output value when the flag does not represent a valid decimal
+// integer.
+TEST(ParseInt32FlagTest, ReturnsDefaultWhenValueIsInvalid) {
+ printf("(expecting 2 warnings)\n");
+
+ Int32 value = 123;
+ EXPECT_FALSE(ParseInt32Flag("--abc=A1", "abc", &value));
+ EXPECT_EQ(123, value);
+
+ EXPECT_FALSE(ParseInt32Flag("--abc=12X", "abc", &value));
+ EXPECT_EQ(123, value);
+}
+
+// Tests that ParseInt32Flag() parses the value of the flag and
+// returns true when the flag represents a valid decimal integer in
+// the range of an Int32.
+TEST(ParseInt32FlagTest, ParsesAndReturnsValidValue) {
+ Int32 value = 123;
+ EXPECT_TRUE(ParseInt32Flag("--" GTEST_FLAG_PREFIX_ "abc=456", "abc", &value));
+ EXPECT_EQ(456, value);
+
+ EXPECT_TRUE(ParseInt32Flag("--" GTEST_FLAG_PREFIX_ "abc=-789",
+ "abc", &value));
+ EXPECT_EQ(-789, value);
+}
+
+// Tests that Int32FromEnvOrDie() parses the value of the var or
+// returns the correct default.
+// Environment variables are not supported on Windows CE.
+#if !GTEST_OS_WINDOWS_MOBILE
+TEST(Int32FromEnvOrDieTest, ParsesAndReturnsValidValue) {
+ EXPECT_EQ(333, Int32FromEnvOrDie(GTEST_FLAG_PREFIX_UPPER_ "UnsetVar", 333));
+ SetEnv(GTEST_FLAG_PREFIX_UPPER_ "UnsetVar", "123");
+ EXPECT_EQ(123, Int32FromEnvOrDie(GTEST_FLAG_PREFIX_UPPER_ "UnsetVar", 333));
+ SetEnv(GTEST_FLAG_PREFIX_UPPER_ "UnsetVar", "-123");
+ EXPECT_EQ(-123, Int32FromEnvOrDie(GTEST_FLAG_PREFIX_UPPER_ "UnsetVar", 333));
+}
+#endif // !GTEST_OS_WINDOWS_MOBILE
+
+// Tests that Int32FromEnvOrDie() aborts with an error message
+// if the variable is not an Int32.
+TEST(Int32FromEnvOrDieDeathTest, AbortsOnFailure) {
+ SetEnv(GTEST_FLAG_PREFIX_UPPER_ "VAR", "xxx");
+ EXPECT_DEATH_IF_SUPPORTED(
+ Int32FromEnvOrDie(GTEST_FLAG_PREFIX_UPPER_ "VAR", 123),
+ ".*");
+}
+
+// Tests that Int32FromEnvOrDie() aborts with an error message
+// if the variable cannot be represnted by an Int32.
+TEST(Int32FromEnvOrDieDeathTest, AbortsOnInt32Overflow) {
+ SetEnv(GTEST_FLAG_PREFIX_UPPER_ "VAR", "1234567891234567891234");
+ EXPECT_DEATH_IF_SUPPORTED(
+ Int32FromEnvOrDie(GTEST_FLAG_PREFIX_UPPER_ "VAR", 123),
+ ".*");
+}
+
+// Tests that ShouldRunTestOnShard() selects all tests
+// where there is 1 shard.
+TEST(ShouldRunTestOnShardTest, IsPartitionWhenThereIsOneShard) {
+ EXPECT_TRUE(ShouldRunTestOnShard(1, 0, 0));
+ EXPECT_TRUE(ShouldRunTestOnShard(1, 0, 1));
+ EXPECT_TRUE(ShouldRunTestOnShard(1, 0, 2));
+ EXPECT_TRUE(ShouldRunTestOnShard(1, 0, 3));
+ EXPECT_TRUE(ShouldRunTestOnShard(1, 0, 4));
+}
+
+class ShouldShardTest : public testing::Test {
+ protected:
+ virtual void SetUp() {
+ index_var_ = GTEST_FLAG_PREFIX_UPPER_ "INDEX";
+ total_var_ = GTEST_FLAG_PREFIX_UPPER_ "TOTAL";
+ }
+
+ virtual void TearDown() {
+ SetEnv(index_var_, "");
+ SetEnv(total_var_, "");
+ }
+
+ const char* index_var_;
+ const char* total_var_;
+};
+
+// Tests that sharding is disabled if neither of the environment variables
+// are set.
+TEST_F(ShouldShardTest, ReturnsFalseWhenNeitherEnvVarIsSet) {
+ SetEnv(index_var_, "");
+ SetEnv(total_var_, "");
+
+ EXPECT_FALSE(ShouldShard(total_var_, index_var_, false));
+ EXPECT_FALSE(ShouldShard(total_var_, index_var_, true));
+}
+
+// Tests that sharding is not enabled if total_shards == 1.
+TEST_F(ShouldShardTest, ReturnsFalseWhenTotalShardIsOne) {
+ SetEnv(index_var_, "0");
+ SetEnv(total_var_, "1");
+ EXPECT_FALSE(ShouldShard(total_var_, index_var_, false));
+ EXPECT_FALSE(ShouldShard(total_var_, index_var_, true));
+}
+
+// Tests that sharding is enabled if total_shards > 1 and
+// we are not in a death test subprocess.
+// Environment variables are not supported on Windows CE.
+#if !GTEST_OS_WINDOWS_MOBILE
+TEST_F(ShouldShardTest, WorksWhenShardEnvVarsAreValid) {
+ SetEnv(index_var_, "4");
+ SetEnv(total_var_, "22");
+ EXPECT_TRUE(ShouldShard(total_var_, index_var_, false));
+ EXPECT_FALSE(ShouldShard(total_var_, index_var_, true));
+
+ SetEnv(index_var_, "8");
+ SetEnv(total_var_, "9");
+ EXPECT_TRUE(ShouldShard(total_var_, index_var_, false));
+ EXPECT_FALSE(ShouldShard(total_var_, index_var_, true));
+
+ SetEnv(index_var_, "0");
+ SetEnv(total_var_, "9");
+ EXPECT_TRUE(ShouldShard(total_var_, index_var_, false));
+ EXPECT_FALSE(ShouldShard(total_var_, index_var_, true));
+}
+#endif // !GTEST_OS_WINDOWS_MOBILE
+
+// Tests that we exit in error if the sharding values are not valid.
+
+typedef ShouldShardTest ShouldShardDeathTest;
+
+TEST_F(ShouldShardDeathTest, AbortsWhenShardingEnvVarsAreInvalid) {
+ SetEnv(index_var_, "4");
+ SetEnv(total_var_, "4");
+ EXPECT_DEATH_IF_SUPPORTED(ShouldShard(total_var_, index_var_, false), ".*");
+
+ SetEnv(index_var_, "4");
+ SetEnv(total_var_, "-2");
+ EXPECT_DEATH_IF_SUPPORTED(ShouldShard(total_var_, index_var_, false), ".*");
+
+ SetEnv(index_var_, "5");
+ SetEnv(total_var_, "");
+ EXPECT_DEATH_IF_SUPPORTED(ShouldShard(total_var_, index_var_, false), ".*");
+
+ SetEnv(index_var_, "");
+ SetEnv(total_var_, "5");
+ EXPECT_DEATH_IF_SUPPORTED(ShouldShard(total_var_, index_var_, false), ".*");
+}
+
+// Tests that ShouldRunTestOnShard is a partition when 5
+// shards are used.
+TEST(ShouldRunTestOnShardTest, IsPartitionWhenThereAreFiveShards) {
+ // Choose an arbitrary number of tests and shards.
+ const int num_tests = 17;
+ const int num_shards = 5;
+
+ // Check partitioning: each test should be on exactly 1 shard.
+ for (int test_id = 0; test_id < num_tests; test_id++) {
+ int prev_selected_shard_index = -1;
+ for (int shard_index = 0; shard_index < num_shards; shard_index++) {
+ if (ShouldRunTestOnShard(num_shards, shard_index, test_id)) {
+ if (prev_selected_shard_index < 0) {
+ prev_selected_shard_index = shard_index;
+ } else {
+ ADD_FAILURE() << "Shard " << prev_selected_shard_index << " and "
+ << shard_index << " are both selected to run test " << test_id;
+ }
+ }
+ }
+ }
+
+ // Check balance: This is not required by the sharding protocol, but is a
+ // desirable property for performance.
+ for (int shard_index = 0; shard_index < num_shards; shard_index++) {
+ int num_tests_on_shard = 0;
+ for (int test_id = 0; test_id < num_tests; test_id++) {
+ num_tests_on_shard +=
+ ShouldRunTestOnShard(num_shards, shard_index, test_id);
+ }
+ EXPECT_GE(num_tests_on_shard, num_tests / num_shards);
+ }
+}
+
+// For the same reason we are not explicitly testing everything in the
+// Test class, there are no separate tests for the following classes
+// (except for some trivial cases):
+//
+// TestCase, UnitTest, UnitTestResultPrinter.
+//
+// Similarly, there are no separate tests for the following macros:
+//
+// TEST, TEST_F, RUN_ALL_TESTS
+
+TEST(UnitTestTest, CanGetOriginalWorkingDir) {
+ ASSERT_TRUE(UnitTest::GetInstance()->original_working_dir() != NULL);
+ EXPECT_STRNE(UnitTest::GetInstance()->original_working_dir(), "");
+}
+
+// This group of tests is for predicate assertions (ASSERT_PRED*, etc)
+// of various arities. They do not attempt to be exhaustive. Rather,
+// view them as smoke tests that can be easily reviewed and verified.
+// A more complete set of tests for predicate assertions can be found
+// in gtest_pred_impl_unittest.cc.
+
+// First, some predicates and predicate-formatters needed by the tests.
+
+// Returns true iff the argument is an even number.
+bool IsEven(int n) {
+ return (n % 2) == 0;
+}
+
+// A functor that returns true iff the argument is an even number.
+struct IsEvenFunctor {
+ bool operator()(int n) { return IsEven(n); }
+};
+
+// A predicate-formatter function that asserts the argument is an even
+// number.
+AssertionResult AssertIsEven(const char* expr, int n) {
+ if (IsEven(n)) {
+ return AssertionSuccess();
+ }
+
+ Message msg;
+ msg << expr << " evaluates to " << n << ", which is not even.";
+ return AssertionFailure(msg);
+}
+
+// A predicate function that returns AssertionResult for use in
+// EXPECT/ASSERT_TRUE/FALSE.
+AssertionResult ResultIsEven(int n) {
+ if (IsEven(n))
+ return AssertionSuccess() << n << " is even";
+ else
+ return AssertionFailure() << n << " is odd";
+}
+
+// A predicate function that returns AssertionResult but gives no
+// explanation why it succeeds. Needed for testing that
+// EXPECT/ASSERT_FALSE handles such functions correctly.
+AssertionResult ResultIsEvenNoExplanation(int n) {
+ if (IsEven(n))
+ return AssertionSuccess();
+ else
+ return AssertionFailure() << n << " is odd";
+}
+
+// A predicate-formatter functor that asserts the argument is an even
+// number.
+struct AssertIsEvenFunctor {
+ AssertionResult operator()(const char* expr, int n) {
+ return AssertIsEven(expr, n);
+ }
+};
+
+// Returns true iff the sum of the arguments is an even number.
+bool SumIsEven2(int n1, int n2) {
+ return IsEven(n1 + n2);
+}
+
+// A functor that returns true iff the sum of the arguments is an even
+// number.
+struct SumIsEven3Functor {
+ bool operator()(int n1, int n2, int n3) {
+ return IsEven(n1 + n2 + n3);
+ }
+};
+
+// A predicate-formatter function that asserts the sum of the
+// arguments is an even number.
+AssertionResult AssertSumIsEven4(
+ const char* e1, const char* e2, const char* e3, const char* e4,
+ int n1, int n2, int n3, int n4) {
+ const int sum = n1 + n2 + n3 + n4;
+ if (IsEven(sum)) {
+ return AssertionSuccess();
+ }
+
+ Message msg;
+ msg << e1 << " + " << e2 << " + " << e3 << " + " << e4
+ << " (" << n1 << " + " << n2 << " + " << n3 << " + " << n4
+ << ") evaluates to " << sum << ", which is not even.";
+ return AssertionFailure(msg);
+}
+
+// A predicate-formatter functor that asserts the sum of the arguments
+// is an even number.
+struct AssertSumIsEven5Functor {
+ AssertionResult operator()(
+ const char* e1, const char* e2, const char* e3, const char* e4,
+ const char* e5, int n1, int n2, int n3, int n4, int n5) {
+ const int sum = n1 + n2 + n3 + n4 + n5;
+ if (IsEven(sum)) {
+ return AssertionSuccess();
+ }
+
+ Message msg;
+ msg << e1 << " + " << e2 << " + " << e3 << " + " << e4 << " + " << e5
+ << " ("
+ << n1 << " + " << n2 << " + " << n3 << " + " << n4 << " + " << n5
+ << ") evaluates to " << sum << ", which is not even.";
+ return AssertionFailure(msg);
+ }
+};
+
+
+// Tests unary predicate assertions.
+
+// Tests unary predicate assertions that don't use a custom formatter.
+TEST(Pred1Test, WithoutFormat) {
+ // Success cases.
+ EXPECT_PRED1(IsEvenFunctor(), 2) << "This failure is UNEXPECTED!";
+ ASSERT_PRED1(IsEven, 4);
+
+ // Failure cases.
+ EXPECT_NONFATAL_FAILURE({ // NOLINT
+ EXPECT_PRED1(IsEven, 5) << "This failure is expected.";
+ }, "This failure is expected.");
+ EXPECT_FATAL_FAILURE(ASSERT_PRED1(IsEvenFunctor(), 5),
+ "evaluates to false");
+}
+
+// Tests unary predicate assertions that use a custom formatter.
+TEST(Pred1Test, WithFormat) {
+ // Success cases.
+ EXPECT_PRED_FORMAT1(AssertIsEven, 2);
+ ASSERT_PRED_FORMAT1(AssertIsEvenFunctor(), 4)
+ << "This failure is UNEXPECTED!";
+
+ // Failure cases.
+ const int n = 5;
+ EXPECT_NONFATAL_FAILURE(EXPECT_PRED_FORMAT1(AssertIsEvenFunctor(), n),
+ "n evaluates to 5, which is not even.");
+ EXPECT_FATAL_FAILURE({ // NOLINT
+ ASSERT_PRED_FORMAT1(AssertIsEven, 5) << "This failure is expected.";
+ }, "This failure is expected.");
+}
+
+// Tests that unary predicate assertions evaluates their arguments
+// exactly once.
+TEST(Pred1Test, SingleEvaluationOnFailure) {
+ // A success case.
+ static int n = 0;
+ EXPECT_PRED1(IsEven, n++);
+ EXPECT_EQ(1, n) << "The argument is not evaluated exactly once.";
+
+ // A failure case.
+ EXPECT_FATAL_FAILURE({ // NOLINT
+ ASSERT_PRED_FORMAT1(AssertIsEvenFunctor(), n++)
+ << "This failure is expected.";
+ }, "This failure is expected.");
+ EXPECT_EQ(2, n) << "The argument is not evaluated exactly once.";
+}
+
+
+// Tests predicate assertions whose arity is >= 2.
+
+// Tests predicate assertions that don't use a custom formatter.
+TEST(PredTest, WithoutFormat) {
+ // Success cases.
+ ASSERT_PRED2(SumIsEven2, 2, 4) << "This failure is UNEXPECTED!";
+ EXPECT_PRED3(SumIsEven3Functor(), 4, 6, 8);
+
+ // Failure cases.
+ const int n1 = 1;
+ const int n2 = 2;
+ EXPECT_NONFATAL_FAILURE({ // NOLINT
+ EXPECT_PRED2(SumIsEven2, n1, n2) << "This failure is expected.";
+ }, "This failure is expected.");
+ EXPECT_FATAL_FAILURE({ // NOLINT
+ ASSERT_PRED3(SumIsEven3Functor(), 1, 2, 4);
+ }, "evaluates to false");
+}
+
+// Tests predicate assertions that use a custom formatter.
+TEST(PredTest, WithFormat) {
+ // Success cases.
+ ASSERT_PRED_FORMAT4(AssertSumIsEven4, 4, 6, 8, 10) <<
+ "This failure is UNEXPECTED!";
+ EXPECT_PRED_FORMAT5(AssertSumIsEven5Functor(), 2, 4, 6, 8, 10);
+
+ // Failure cases.
+ const int n1 = 1;
+ const int n2 = 2;
+ const int n3 = 4;
+ const int n4 = 6;
+ EXPECT_NONFATAL_FAILURE({ // NOLINT
+ EXPECT_PRED_FORMAT4(AssertSumIsEven4, n1, n2, n3, n4);
+ }, "evaluates to 13, which is not even.");
+ EXPECT_FATAL_FAILURE({ // NOLINT
+ ASSERT_PRED_FORMAT5(AssertSumIsEven5Functor(), 1, 2, 4, 6, 8)
+ << "This failure is expected.";
+ }, "This failure is expected.");
+}
+
+// Tests that predicate assertions evaluates their arguments
+// exactly once.
+TEST(PredTest, SingleEvaluationOnFailure) {
+ // A success case.
+ int n1 = 0;
+ int n2 = 0;
+ EXPECT_PRED2(SumIsEven2, n1++, n2++);
+ EXPECT_EQ(1, n1) << "Argument 1 is not evaluated exactly once.";
+ EXPECT_EQ(1, n2) << "Argument 2 is not evaluated exactly once.";
+
+ // Another success case.
+ n1 = n2 = 0;
+ int n3 = 0;
+ int n4 = 0;
+ int n5 = 0;
+ ASSERT_PRED_FORMAT5(AssertSumIsEven5Functor(),
+ n1++, n2++, n3++, n4++, n5++)
+ << "This failure is UNEXPECTED!";
+ EXPECT_EQ(1, n1) << "Argument 1 is not evaluated exactly once.";
+ EXPECT_EQ(1, n2) << "Argument 2 is not evaluated exactly once.";
+ EXPECT_EQ(1, n3) << "Argument 3 is not evaluated exactly once.";
+ EXPECT_EQ(1, n4) << "Argument 4 is not evaluated exactly once.";
+ EXPECT_EQ(1, n5) << "Argument 5 is not evaluated exactly once.";
+
+ // A failure case.
+ n1 = n2 = n3 = 0;
+ EXPECT_NONFATAL_FAILURE({ // NOLINT
+ EXPECT_PRED3(SumIsEven3Functor(), ++n1, n2++, n3++)
+ << "This failure is expected.";
+ }, "This failure is expected.");
+ EXPECT_EQ(1, n1) << "Argument 1 is not evaluated exactly once.";
+ EXPECT_EQ(1, n2) << "Argument 2 is not evaluated exactly once.";
+ EXPECT_EQ(1, n3) << "Argument 3 is not evaluated exactly once.";
+
+ // Another failure case.
+ n1 = n2 = n3 = n4 = 0;
+ EXPECT_NONFATAL_FAILURE({ // NOLINT
+ EXPECT_PRED_FORMAT4(AssertSumIsEven4, ++n1, n2++, n3++, n4++);
+ }, "evaluates to 1, which is not even.");
+ EXPECT_EQ(1, n1) << "Argument 1 is not evaluated exactly once.";
+ EXPECT_EQ(1, n2) << "Argument 2 is not evaluated exactly once.";
+ EXPECT_EQ(1, n3) << "Argument 3 is not evaluated exactly once.";
+ EXPECT_EQ(1, n4) << "Argument 4 is not evaluated exactly once.";
+}
+
+
+// Some helper functions for testing using overloaded/template
+// functions with ASSERT_PREDn and EXPECT_PREDn.
+
+bool IsPositive(double x) {
+ return x > 0;
+}
+
+template <typename T>
+bool IsNegative(T x) {
+ return x < 0;
+}
+
+template <typename T1, typename T2>
+bool GreaterThan(T1 x1, T2 x2) {
+ return x1 > x2;
+}
+
+// Tests that overloaded functions can be used in *_PRED* as long as
+// their types are explicitly specified.
+TEST(PredicateAssertionTest, AcceptsOverloadedFunction) {
+ // C++Builder requires C-style casts rather than static_cast.
+ EXPECT_PRED1((bool (*)(int))(IsPositive), 5); // NOLINT
+ ASSERT_PRED1((bool (*)(double))(IsPositive), 6.0); // NOLINT
+}
+
+// Tests that template functions can be used in *_PRED* as long as
+// their types are explicitly specified.
+TEST(PredicateAssertionTest, AcceptsTemplateFunction) {
+ EXPECT_PRED1(IsNegative<int>, -5);
+ // Makes sure that we can handle templates with more than one
+ // parameter.
+ ASSERT_PRED2((GreaterThan<int, int>), 5, 0);
+}
+
+
+// Some helper functions for testing using overloaded/template
+// functions with ASSERT_PRED_FORMATn and EXPECT_PRED_FORMATn.
+
+AssertionResult IsPositiveFormat(const char* /* expr */, int n) {
+ return n > 0 ? AssertionSuccess() :
+ AssertionFailure(Message() << "Failure");
+}
+
+AssertionResult IsPositiveFormat(const char* /* expr */, double x) {
+ return x > 0 ? AssertionSuccess() :
+ AssertionFailure(Message() << "Failure");
+}
+
+template <typename T>
+AssertionResult IsNegativeFormat(const char* /* expr */, T x) {
+ return x < 0 ? AssertionSuccess() :
+ AssertionFailure(Message() << "Failure");
+}
+
+template <typename T1, typename T2>
+AssertionResult EqualsFormat(const char* /* expr1 */, const char* /* expr2 */,
+ const T1& x1, const T2& x2) {
+ return x1 == x2 ? AssertionSuccess() :
+ AssertionFailure(Message() << "Failure");
+}
+
+// Tests that overloaded functions can be used in *_PRED_FORMAT*
+// without explicitly specifying their types.
+TEST(PredicateFormatAssertionTest, AcceptsOverloadedFunction) {
+ EXPECT_PRED_FORMAT1(IsPositiveFormat, 5);
+ ASSERT_PRED_FORMAT1(IsPositiveFormat, 6.0);
+}
+
+// Tests that template functions can be used in *_PRED_FORMAT* without
+// explicitly specifying their types.
+TEST(PredicateFormatAssertionTest, AcceptsTemplateFunction) {
+ EXPECT_PRED_FORMAT1(IsNegativeFormat, -5);
+ ASSERT_PRED_FORMAT2(EqualsFormat, 3, 3);
+}
+
+
+// Tests string assertions.
+
+// Tests ASSERT_STREQ with non-NULL arguments.
+TEST(StringAssertionTest, ASSERT_STREQ) {
+ const char * const p1 = "good";
+ ASSERT_STREQ(p1, p1);
+
+ // Let p2 have the same content as p1, but be at a different address.
+ const char p2[] = "good";
+ ASSERT_STREQ(p1, p2);
+
+ EXPECT_FATAL_FAILURE(ASSERT_STREQ("bad", "good"),
+ "Expected: \"bad\"");
+}
+
+// Tests ASSERT_STREQ with NULL arguments.
+TEST(StringAssertionTest, ASSERT_STREQ_Null) {
+ ASSERT_STREQ(static_cast<const char *>(NULL), NULL);
+ EXPECT_FATAL_FAILURE(ASSERT_STREQ(NULL, "non-null"),
+ "non-null");
+}
+
+// Tests ASSERT_STREQ with NULL arguments.
+TEST(StringAssertionTest, ASSERT_STREQ_Null2) {
+ EXPECT_FATAL_FAILURE(ASSERT_STREQ("non-null", NULL),
+ "non-null");
+}
+
+// Tests ASSERT_STRNE.
+TEST(StringAssertionTest, ASSERT_STRNE) {
+ ASSERT_STRNE("hi", "Hi");
+ ASSERT_STRNE("Hi", NULL);
+ ASSERT_STRNE(NULL, "Hi");
+ ASSERT_STRNE("", NULL);
+ ASSERT_STRNE(NULL, "");
+ ASSERT_STRNE("", "Hi");
+ ASSERT_STRNE("Hi", "");
+ EXPECT_FATAL_FAILURE(ASSERT_STRNE("Hi", "Hi"),
+ "\"Hi\" vs \"Hi\"");
+}
+
+// Tests ASSERT_STRCASEEQ.
+TEST(StringAssertionTest, ASSERT_STRCASEEQ) {
+ ASSERT_STRCASEEQ("hi", "Hi");
+ ASSERT_STRCASEEQ(static_cast<const char *>(NULL), NULL);
+
+ ASSERT_STRCASEEQ("", "");
+ EXPECT_FATAL_FAILURE(ASSERT_STRCASEEQ("Hi", "hi2"),
+ "(ignoring case)");
+}
+
+// Tests ASSERT_STRCASENE.
+TEST(StringAssertionTest, ASSERT_STRCASENE) {
+ ASSERT_STRCASENE("hi1", "Hi2");
+ ASSERT_STRCASENE("Hi", NULL);
+ ASSERT_STRCASENE(NULL, "Hi");
+ ASSERT_STRCASENE("", NULL);
+ ASSERT_STRCASENE(NULL, "");
+ ASSERT_STRCASENE("", "Hi");
+ ASSERT_STRCASENE("Hi", "");
+ EXPECT_FATAL_FAILURE(ASSERT_STRCASENE("Hi", "hi"),
+ "(ignoring case)");
+}
+
+// Tests *_STREQ on wide strings.
+TEST(StringAssertionTest, STREQ_Wide) {
+ // NULL strings.
+ ASSERT_STREQ(static_cast<const wchar_t *>(NULL), NULL);
+
+ // Empty strings.
+ ASSERT_STREQ(L"", L"");
+
+ // Non-null vs NULL.
+ EXPECT_NONFATAL_FAILURE(EXPECT_STREQ(L"non-null", NULL),
+ "non-null");
+
+ // Equal strings.
+ EXPECT_STREQ(L"Hi", L"Hi");
+
+ // Unequal strings.
+ EXPECT_NONFATAL_FAILURE(EXPECT_STREQ(L"abc", L"Abc"),
+ "Abc");
+
+ // Strings containing wide characters.
+ EXPECT_NONFATAL_FAILURE(EXPECT_STREQ(L"abc\x8119", L"abc\x8120"),
+ "abc");
+}
+
+// Tests *_STRNE on wide strings.
+TEST(StringAssertionTest, STRNE_Wide) {
+ // NULL strings.
+ EXPECT_NONFATAL_FAILURE({ // NOLINT
+ EXPECT_STRNE(static_cast<const wchar_t *>(NULL), NULL);
+ }, "");
+
+ // Empty strings.
+ EXPECT_NONFATAL_FAILURE(EXPECT_STRNE(L"", L""),
+ "L\"\"");
+
+ // Non-null vs NULL.
+ ASSERT_STRNE(L"non-null", NULL);
+
+ // Equal strings.
+ EXPECT_NONFATAL_FAILURE(EXPECT_STRNE(L"Hi", L"Hi"),
+ "L\"Hi\"");
+
+ // Unequal strings.
+ EXPECT_STRNE(L"abc", L"Abc");
+
+ // Strings containing wide characters.
+ EXPECT_NONFATAL_FAILURE(EXPECT_STRNE(L"abc\x8119", L"abc\x8119"),
+ "abc");
+}
+
+// Tests for ::testing::IsSubstring().
+
+// Tests that IsSubstring() returns the correct result when the input
+// argument type is const char*.
+TEST(IsSubstringTest, ReturnsCorrectResultForCString) {
+ EXPECT_FALSE(IsSubstring("", "", NULL, "a"));
+ EXPECT_FALSE(IsSubstring("", "", "b", NULL));
+ EXPECT_FALSE(IsSubstring("", "", "needle", "haystack"));
+
+ EXPECT_TRUE(IsSubstring("", "", static_cast<const char*>(NULL), NULL));
+ EXPECT_TRUE(IsSubstring("", "", "needle", "two needles"));
+}
+
+// Tests that IsSubstring() returns the correct result when the input
+// argument type is const wchar_t*.
+TEST(IsSubstringTest, ReturnsCorrectResultForWideCString) {
+ EXPECT_FALSE(IsSubstring("", "", kNull, L"a"));
+ EXPECT_FALSE(IsSubstring("", "", L"b", kNull));
+ EXPECT_FALSE(IsSubstring("", "", L"needle", L"haystack"));
+
+ EXPECT_TRUE(IsSubstring("", "", static_cast<const wchar_t*>(NULL), NULL));
+ EXPECT_TRUE(IsSubstring("", "", L"needle", L"two needles"));
+}
+
+// Tests that IsSubstring() generates the correct message when the input
+// argument type is const char*.
+TEST(IsSubstringTest, GeneratesCorrectMessageForCString) {
+ EXPECT_STREQ("Value of: needle_expr\n"
+ " Actual: \"needle\"\n"
+ "Expected: a substring of haystack_expr\n"
+ "Which is: \"haystack\"",
+ IsSubstring("needle_expr", "haystack_expr",
+ "needle", "haystack").failure_message());
+}
+
+// Tests that IsSubstring returns the correct result when the input
+// argument type is ::std::string.
+TEST(IsSubstringTest, ReturnsCorrectResultsForStdString) {
+ EXPECT_TRUE(IsSubstring("", "", std::string("hello"), "ahellob"));
+ EXPECT_FALSE(IsSubstring("", "", "hello", std::string("world")));
+}
+
+#if GTEST_HAS_STD_WSTRING
+// Tests that IsSubstring returns the correct result when the input
+// argument type is ::std::wstring.
+TEST(IsSubstringTest, ReturnsCorrectResultForStdWstring) {
+ EXPECT_TRUE(IsSubstring("", "", ::std::wstring(L"needle"), L"two needles"));
+ EXPECT_FALSE(IsSubstring("", "", L"needle", ::std::wstring(L"haystack")));
+}
+
+// Tests that IsSubstring() generates the correct message when the input
+// argument type is ::std::wstring.
+TEST(IsSubstringTest, GeneratesCorrectMessageForWstring) {
+ EXPECT_STREQ("Value of: needle_expr\n"
+ " Actual: L\"needle\"\n"
+ "Expected: a substring of haystack_expr\n"
+ "Which is: L\"haystack\"",
+ IsSubstring(
+ "needle_expr", "haystack_expr",
+ ::std::wstring(L"needle"), L"haystack").failure_message());
+}
+
+#endif // GTEST_HAS_STD_WSTRING
+
+// Tests for ::testing::IsNotSubstring().
+
+// Tests that IsNotSubstring() returns the correct result when the input
+// argument type is const char*.
+TEST(IsNotSubstringTest, ReturnsCorrectResultForCString) {
+ EXPECT_TRUE(IsNotSubstring("", "", "needle", "haystack"));
+ EXPECT_FALSE(IsNotSubstring("", "", "needle", "two needles"));
+}
+
+// Tests that IsNotSubstring() returns the correct result when the input
+// argument type is const wchar_t*.
+TEST(IsNotSubstringTest, ReturnsCorrectResultForWideCString) {
+ EXPECT_TRUE(IsNotSubstring("", "", L"needle", L"haystack"));
+ EXPECT_FALSE(IsNotSubstring("", "", L"needle", L"two needles"));
+}
+
+// Tests that IsNotSubstring() generates the correct message when the input
+// argument type is const wchar_t*.
+TEST(IsNotSubstringTest, GeneratesCorrectMessageForWideCString) {
+ EXPECT_STREQ("Value of: needle_expr\n"
+ " Actual: L\"needle\"\n"
+ "Expected: not a substring of haystack_expr\n"
+ "Which is: L\"two needles\"",
+ IsNotSubstring(
+ "needle_expr", "haystack_expr",
+ L"needle", L"two needles").failure_message());
+}
+
+// Tests that IsNotSubstring returns the correct result when the input
+// argument type is ::std::string.
+TEST(IsNotSubstringTest, ReturnsCorrectResultsForStdString) {
+ EXPECT_FALSE(IsNotSubstring("", "", std::string("hello"), "ahellob"));
+ EXPECT_TRUE(IsNotSubstring("", "", "hello", std::string("world")));
+}
+
+// Tests that IsNotSubstring() generates the correct message when the input
+// argument type is ::std::string.
+TEST(IsNotSubstringTest, GeneratesCorrectMessageForStdString) {
+ EXPECT_STREQ("Value of: needle_expr\n"
+ " Actual: \"needle\"\n"
+ "Expected: not a substring of haystack_expr\n"
+ "Which is: \"two needles\"",
+ IsNotSubstring(
+ "needle_expr", "haystack_expr",
+ ::std::string("needle"), "two needles").failure_message());
+}
+
+#if GTEST_HAS_STD_WSTRING
+
+// Tests that IsNotSubstring returns the correct result when the input
+// argument type is ::std::wstring.
+TEST(IsNotSubstringTest, ReturnsCorrectResultForStdWstring) {
+ EXPECT_FALSE(
+ IsNotSubstring("", "", ::std::wstring(L"needle"), L"two needles"));
+ EXPECT_TRUE(IsNotSubstring("", "", L"needle", ::std::wstring(L"haystack")));
+}
+
+#endif // GTEST_HAS_STD_WSTRING
+
+// Tests floating-point assertions.
+
+template <typename RawType>
+class FloatingPointTest : public Test {
+ protected:
+
+ // Pre-calculated numbers to be used by the tests.
+ struct TestValues {
+ RawType close_to_positive_zero;
+ RawType close_to_negative_zero;
+ RawType further_from_negative_zero;
+
+ RawType close_to_one;
+ RawType further_from_one;
+
+ RawType infinity;
+ RawType close_to_infinity;
+ RawType further_from_infinity;
+
+ RawType nan1;
+ RawType nan2;
+ };
+
+ typedef typename testing::internal::FloatingPoint<RawType> Floating;
+ typedef typename Floating::Bits Bits;
+
+ virtual void SetUp() {
+ const size_t max_ulps = Floating::kMaxUlps;
+
+ // The bits that represent 0.0.
+ const Bits zero_bits = Floating(0).bits();
+
+ // Makes some numbers close to 0.0.
+ values_.close_to_positive_zero = Floating::ReinterpretBits(
+ zero_bits + max_ulps/2);
+ values_.close_to_negative_zero = -Floating::ReinterpretBits(
+ zero_bits + max_ulps - max_ulps/2);
+ values_.further_from_negative_zero = -Floating::ReinterpretBits(
+ zero_bits + max_ulps + 1 - max_ulps/2);
+
+ // The bits that represent 1.0.
+ const Bits one_bits = Floating(1).bits();
+
+ // Makes some numbers close to 1.0.
+ values_.close_to_one = Floating::ReinterpretBits(one_bits + max_ulps);
+ values_.further_from_one = Floating::ReinterpretBits(
+ one_bits + max_ulps + 1);
+
+ // +infinity.
+ values_.infinity = Floating::Infinity();
+
+ // The bits that represent +infinity.
+ const Bits infinity_bits = Floating(values_.infinity).bits();
+
+ // Makes some numbers close to infinity.
+ values_.close_to_infinity = Floating::ReinterpretBits(
+ infinity_bits - max_ulps);
+ values_.further_from_infinity = Floating::ReinterpretBits(
+ infinity_bits - max_ulps - 1);
+
+ // Makes some NAN's. Sets the most significant bit of the fraction so that
+ // our NaN's are quiet; trying to process a signaling NaN would raise an
+ // exception if our environment enables floating point exceptions.
+ values_.nan1 = Floating::ReinterpretBits(Floating::kExponentBitMask
+ | (static_cast<Bits>(1) << (Floating::kFractionBitCount - 1)) | 1);
+ values_.nan2 = Floating::ReinterpretBits(Floating::kExponentBitMask
+ | (static_cast<Bits>(1) << (Floating::kFractionBitCount - 1)) | 200);
+ }
+
+ void TestSize() {
+ EXPECT_EQ(sizeof(RawType), sizeof(Bits));
+ }
+
+ static TestValues values_;
+};
+
+template <typename RawType>
+typename FloatingPointTest<RawType>::TestValues
+ FloatingPointTest<RawType>::values_;
+
+// Instantiates FloatingPointTest for testing *_FLOAT_EQ.
+typedef FloatingPointTest<float> FloatTest;
+
+// Tests that the size of Float::Bits matches the size of float.
+TEST_F(FloatTest, Size) {
+ TestSize();
+}
+
+// Tests comparing with +0 and -0.
+TEST_F(FloatTest, Zeros) {
+ EXPECT_FLOAT_EQ(0.0, -0.0);
+ EXPECT_NONFATAL_FAILURE(EXPECT_FLOAT_EQ(-0.0, 1.0),
+ "1.0");
+ EXPECT_FATAL_FAILURE(ASSERT_FLOAT_EQ(0.0, 1.5),
+ "1.5");
+}
+
+// Tests comparing numbers close to 0.
+//
+// This ensures that *_FLOAT_EQ handles the sign correctly and no
+// overflow occurs when comparing numbers whose absolute value is very
+// small.
+TEST_F(FloatTest, AlmostZeros) {
+ // In C++Builder, names within local classes (such as used by
+ // EXPECT_FATAL_FAILURE) cannot be resolved against static members of the
+ // scoping class. Use a static local alias as a workaround.
+ // We use the assignment syntax since some compilers, like Sun Studio,
+ // don't allow initializing references using construction syntax
+ // (parentheses).
+ static const FloatTest::TestValues& v = this->values_;
+
+ EXPECT_FLOAT_EQ(0.0, v.close_to_positive_zero);
+ EXPECT_FLOAT_EQ(-0.0, v.close_to_negative_zero);
+ EXPECT_FLOAT_EQ(v.close_to_positive_zero, v.close_to_negative_zero);
+
+ EXPECT_FATAL_FAILURE({ // NOLINT
+ ASSERT_FLOAT_EQ(v.close_to_positive_zero,
+ v.further_from_negative_zero);
+ }, "v.further_from_negative_zero");
+}
+
+// Tests comparing numbers close to each other.
+TEST_F(FloatTest, SmallDiff) {
+ EXPECT_FLOAT_EQ(1.0, values_.close_to_one);
+ EXPECT_NONFATAL_FAILURE(EXPECT_FLOAT_EQ(1.0, values_.further_from_one),
+ "values_.further_from_one");
+}
+
+// Tests comparing numbers far apart.
+TEST_F(FloatTest, LargeDiff) {
+ EXPECT_NONFATAL_FAILURE(EXPECT_FLOAT_EQ(2.5, 3.0),
+ "3.0");
+}
+
+// Tests comparing with infinity.
+//
+// This ensures that no overflow occurs when comparing numbers whose
+// absolute value is very large.
+TEST_F(FloatTest, Infinity) {
+ EXPECT_FLOAT_EQ(values_.infinity, values_.close_to_infinity);
+ EXPECT_FLOAT_EQ(-values_.infinity, -values_.close_to_infinity);
+#if !GTEST_OS_SYMBIAN
+ // Nokia's STLport crashes if we try to output infinity or NaN.
+ EXPECT_NONFATAL_FAILURE(EXPECT_FLOAT_EQ(values_.infinity, -values_.infinity),
+ "-values_.infinity");
+
+ // This is interesting as the representations of infinity and nan1
+ // are only 1 DLP apart.
+ EXPECT_NONFATAL_FAILURE(EXPECT_FLOAT_EQ(values_.infinity, values_.nan1),
+ "values_.nan1");
+#endif // !GTEST_OS_SYMBIAN
+}
+
+// Tests that comparing with NAN always returns false.
+TEST_F(FloatTest, NaN) {
+#if !GTEST_OS_SYMBIAN
+// Nokia's STLport crashes if we try to output infinity or NaN.
+
+ // In C++Builder, names within local classes (such as used by
+ // EXPECT_FATAL_FAILURE) cannot be resolved against static members of the
+ // scoping class. Use a static local alias as a workaround.
+ // We use the assignment syntax since some compilers, like Sun Studio,
+ // don't allow initializing references using construction syntax
+ // (parentheses).
+ static const FloatTest::TestValues& v = this->values_;
+
+ EXPECT_NONFATAL_FAILURE(EXPECT_FLOAT_EQ(v.nan1, v.nan1),
+ "v.nan1");
+ EXPECT_NONFATAL_FAILURE(EXPECT_FLOAT_EQ(v.nan1, v.nan2),
+ "v.nan2");
+ EXPECT_NONFATAL_FAILURE(EXPECT_FLOAT_EQ(1.0, v.nan1),
+ "v.nan1");
+
+ EXPECT_FATAL_FAILURE(ASSERT_FLOAT_EQ(v.nan1, v.infinity),
+ "v.infinity");
+#endif // !GTEST_OS_SYMBIAN
+}
+
+// Tests that *_FLOAT_EQ are reflexive.
+TEST_F(FloatTest, Reflexive) {
+ EXPECT_FLOAT_EQ(0.0, 0.0);
+ EXPECT_FLOAT_EQ(1.0, 1.0);
+ ASSERT_FLOAT_EQ(values_.infinity, values_.infinity);
+}
+
+// Tests that *_FLOAT_EQ are commutative.
+TEST_F(FloatTest, Commutative) {
+ // We already tested EXPECT_FLOAT_EQ(1.0, values_.close_to_one).
+ EXPECT_FLOAT_EQ(values_.close_to_one, 1.0);
+
+ // We already tested EXPECT_FLOAT_EQ(1.0, values_.further_from_one).
+ EXPECT_NONFATAL_FAILURE(EXPECT_FLOAT_EQ(values_.further_from_one, 1.0),
+ "1.0");
+}
+
+// Tests EXPECT_NEAR.
+TEST_F(FloatTest, EXPECT_NEAR) {
+ EXPECT_NEAR(-1.0f, -1.1f, 0.2f);
+ EXPECT_NEAR(2.0f, 3.0f, 1.0f);
+ EXPECT_NONFATAL_FAILURE(EXPECT_NEAR(1.0f,1.5f, 0.25f), // NOLINT
+ "The difference between 1.0f and 1.5f is 0.5, "
+ "which exceeds 0.25f");
+ // To work around a bug in gcc 2.95.0, there is intentionally no
+ // space after the first comma in the previous line.
+}
+
+// Tests ASSERT_NEAR.
+TEST_F(FloatTest, ASSERT_NEAR) {
+ ASSERT_NEAR(-1.0f, -1.1f, 0.2f);
+ ASSERT_NEAR(2.0f, 3.0f, 1.0f);
+ EXPECT_FATAL_FAILURE(ASSERT_NEAR(1.0f,1.5f, 0.25f), // NOLINT
+ "The difference between 1.0f and 1.5f is 0.5, "
+ "which exceeds 0.25f");
+ // To work around a bug in gcc 2.95.0, there is intentionally no
+ // space after the first comma in the previous line.
+}
+
+// Tests the cases where FloatLE() should succeed.
+TEST_F(FloatTest, FloatLESucceeds) {
+ EXPECT_PRED_FORMAT2(FloatLE, 1.0f, 2.0f); // When val1 < val2,
+ ASSERT_PRED_FORMAT2(FloatLE, 1.0f, 1.0f); // val1 == val2,
+
+ // or when val1 is greater than, but almost equals to, val2.
+ EXPECT_PRED_FORMAT2(FloatLE, values_.close_to_positive_zero, 0.0f);
+}
+
+// Tests the cases where FloatLE() should fail.
+TEST_F(FloatTest, FloatLEFails) {
+ // When val1 is greater than val2 by a large margin,
+ EXPECT_NONFATAL_FAILURE(EXPECT_PRED_FORMAT2(FloatLE, 2.0f, 1.0f),
+ "(2.0f) <= (1.0f)");
+
+ // or by a small yet non-negligible margin,
+ EXPECT_NONFATAL_FAILURE({ // NOLINT
+ EXPECT_PRED_FORMAT2(FloatLE, values_.further_from_one, 1.0f);
+ }, "(values_.further_from_one) <= (1.0f)");
+
+#if !GTEST_OS_SYMBIAN && !defined(__BORLANDC__)
+ // Nokia's STLport crashes if we try to output infinity or NaN.
+ // C++Builder gives bad results for ordered comparisons involving NaNs
+ // due to compiler bugs.
+ EXPECT_NONFATAL_FAILURE({ // NOLINT
+ EXPECT_PRED_FORMAT2(FloatLE, values_.nan1, values_.infinity);
+ }, "(values_.nan1) <= (values_.infinity)");
+ EXPECT_NONFATAL_FAILURE({ // NOLINT
+ EXPECT_PRED_FORMAT2(FloatLE, -values_.infinity, values_.nan1);
+ }, "(-values_.infinity) <= (values_.nan1)");
+ EXPECT_FATAL_FAILURE({ // NOLINT
+ ASSERT_PRED_FORMAT2(FloatLE, values_.nan1, values_.nan1);
+ }, "(values_.nan1) <= (values_.nan1)");
+#endif // !GTEST_OS_SYMBIAN && !defined(__BORLANDC__)
+}
+
+// Instantiates FloatingPointTest for testing *_DOUBLE_EQ.
+typedef FloatingPointTest<double> DoubleTest;
+
+// Tests that the size of Double::Bits matches the size of double.
+TEST_F(DoubleTest, Size) {
+ TestSize();
+}
+
+// Tests comparing with +0 and -0.
+TEST_F(DoubleTest, Zeros) {
+ EXPECT_DOUBLE_EQ(0.0, -0.0);
+ EXPECT_NONFATAL_FAILURE(EXPECT_DOUBLE_EQ(-0.0, 1.0),
+ "1.0");
+ EXPECT_FATAL_FAILURE(ASSERT_DOUBLE_EQ(0.0, 1.0),
+ "1.0");
+}
+
+// Tests comparing numbers close to 0.
+//
+// This ensures that *_DOUBLE_EQ handles the sign correctly and no
+// overflow occurs when comparing numbers whose absolute value is very
+// small.
+TEST_F(DoubleTest, AlmostZeros) {
+ // In C++Builder, names within local classes (such as used by
+ // EXPECT_FATAL_FAILURE) cannot be resolved against static members of the
+ // scoping class. Use a static local alias as a workaround.
+ // We use the assignment syntax since some compilers, like Sun Studio,
+ // don't allow initializing references using construction syntax
+ // (parentheses).
+ static const DoubleTest::TestValues& v = this->values_;
+
+ EXPECT_DOUBLE_EQ(0.0, v.close_to_positive_zero);
+ EXPECT_DOUBLE_EQ(-0.0, v.close_to_negative_zero);
+ EXPECT_DOUBLE_EQ(v.close_to_positive_zero, v.close_to_negative_zero);
+
+ EXPECT_FATAL_FAILURE({ // NOLINT
+ ASSERT_DOUBLE_EQ(v.close_to_positive_zero,
+ v.further_from_negative_zero);
+ }, "v.further_from_negative_zero");
+}
+
+// Tests comparing numbers close to each other.
+TEST_F(DoubleTest, SmallDiff) {
+ EXPECT_DOUBLE_EQ(1.0, values_.close_to_one);
+ EXPECT_NONFATAL_FAILURE(EXPECT_DOUBLE_EQ(1.0, values_.further_from_one),
+ "values_.further_from_one");
+}
+
+// Tests comparing numbers far apart.
+TEST_F(DoubleTest, LargeDiff) {
+ EXPECT_NONFATAL_FAILURE(EXPECT_DOUBLE_EQ(2.0, 3.0),
+ "3.0");
+}
+
+// Tests comparing with infinity.
+//
+// This ensures that no overflow occurs when comparing numbers whose
+// absolute value is very large.
+TEST_F(DoubleTest, Infinity) {
+ EXPECT_DOUBLE_EQ(values_.infinity, values_.close_to_infinity);
+ EXPECT_DOUBLE_EQ(-values_.infinity, -values_.close_to_infinity);
+#if !GTEST_OS_SYMBIAN
+ // Nokia's STLport crashes if we try to output infinity or NaN.
+ EXPECT_NONFATAL_FAILURE(EXPECT_DOUBLE_EQ(values_.infinity, -values_.infinity),
+ "-values_.infinity");
+
+ // This is interesting as the representations of infinity_ and nan1_
+ // are only 1 DLP apart.
+ EXPECT_NONFATAL_FAILURE(EXPECT_DOUBLE_EQ(values_.infinity, values_.nan1),
+ "values_.nan1");
+#endif // !GTEST_OS_SYMBIAN
+}
+
+// Tests that comparing with NAN always returns false.
+TEST_F(DoubleTest, NaN) {
+#if !GTEST_OS_SYMBIAN
+ // In C++Builder, names within local classes (such as used by
+ // EXPECT_FATAL_FAILURE) cannot be resolved against static members of the
+ // scoping class. Use a static local alias as a workaround.
+ // We use the assignment syntax since some compilers, like Sun Studio,
+ // don't allow initializing references using construction syntax
+ // (parentheses).
+ static const DoubleTest::TestValues& v = this->values_;
+
+ // Nokia's STLport crashes if we try to output infinity or NaN.
+ EXPECT_NONFATAL_FAILURE(EXPECT_DOUBLE_EQ(v.nan1, v.nan1),
+ "v.nan1");
+ EXPECT_NONFATAL_FAILURE(EXPECT_DOUBLE_EQ(v.nan1, v.nan2), "v.nan2");
+ EXPECT_NONFATAL_FAILURE(EXPECT_DOUBLE_EQ(1.0, v.nan1), "v.nan1");
+ EXPECT_FATAL_FAILURE(ASSERT_DOUBLE_EQ(v.nan1, v.infinity),
+ "v.infinity");
+#endif // !GTEST_OS_SYMBIAN
+}
+
+// Tests that *_DOUBLE_EQ are reflexive.
+TEST_F(DoubleTest, Reflexive) {
+ EXPECT_DOUBLE_EQ(0.0, 0.0);
+ EXPECT_DOUBLE_EQ(1.0, 1.0);
+#if !GTEST_OS_SYMBIAN
+ // Nokia's STLport crashes if we try to output infinity or NaN.
+ ASSERT_DOUBLE_EQ(values_.infinity, values_.infinity);
+#endif // !GTEST_OS_SYMBIAN
+}
+
+// Tests that *_DOUBLE_EQ are commutative.
+TEST_F(DoubleTest, Commutative) {
+ // We already tested EXPECT_DOUBLE_EQ(1.0, values_.close_to_one).
+ EXPECT_DOUBLE_EQ(values_.close_to_one, 1.0);
+
+ // We already tested EXPECT_DOUBLE_EQ(1.0, values_.further_from_one).
+ EXPECT_NONFATAL_FAILURE(EXPECT_DOUBLE_EQ(values_.further_from_one, 1.0),
+ "1.0");
+}
+
+// Tests EXPECT_NEAR.
+TEST_F(DoubleTest, EXPECT_NEAR) {
+ EXPECT_NEAR(-1.0, -1.1, 0.2);
+ EXPECT_NEAR(2.0, 3.0, 1.0);
+ EXPECT_NONFATAL_FAILURE(EXPECT_NEAR(1.0, 1.5, 0.25), // NOLINT
+ "The difference between 1.0 and 1.5 is 0.5, "
+ "which exceeds 0.25");
+ // To work around a bug in gcc 2.95.0, there is intentionally no
+ // space after the first comma in the previous statement.
+}
+
+// Tests ASSERT_NEAR.
+TEST_F(DoubleTest, ASSERT_NEAR) {
+ ASSERT_NEAR(-1.0, -1.1, 0.2);
+ ASSERT_NEAR(2.0, 3.0, 1.0);
+ EXPECT_FATAL_FAILURE(ASSERT_NEAR(1.0, 1.5, 0.25), // NOLINT
+ "The difference between 1.0 and 1.5 is 0.5, "
+ "which exceeds 0.25");
+ // To work around a bug in gcc 2.95.0, there is intentionally no
+ // space after the first comma in the previous statement.
+}
+
+// Tests the cases where DoubleLE() should succeed.
+TEST_F(DoubleTest, DoubleLESucceeds) {
+ EXPECT_PRED_FORMAT2(DoubleLE, 1.0, 2.0); // When val1 < val2,
+ ASSERT_PRED_FORMAT2(DoubleLE, 1.0, 1.0); // val1 == val2,
+
+ // or when val1 is greater than, but almost equals to, val2.
+ EXPECT_PRED_FORMAT2(DoubleLE, values_.close_to_positive_zero, 0.0);
+}
+
+// Tests the cases where DoubleLE() should fail.
+TEST_F(DoubleTest, DoubleLEFails) {
+ // When val1 is greater than val2 by a large margin,
+ EXPECT_NONFATAL_FAILURE(EXPECT_PRED_FORMAT2(DoubleLE, 2.0, 1.0),
+ "(2.0) <= (1.0)");
+
+ // or by a small yet non-negligible margin,
+ EXPECT_NONFATAL_FAILURE({ // NOLINT
+ EXPECT_PRED_FORMAT2(DoubleLE, values_.further_from_one, 1.0);
+ }, "(values_.further_from_one) <= (1.0)");
+
+#if !GTEST_OS_SYMBIAN && !defined(__BORLANDC__)
+ // Nokia's STLport crashes if we try to output infinity or NaN.
+ // C++Builder gives bad results for ordered comparisons involving NaNs
+ // due to compiler bugs.
+ EXPECT_NONFATAL_FAILURE({ // NOLINT
+ EXPECT_PRED_FORMAT2(DoubleLE, values_.nan1, values_.infinity);
+ }, "(values_.nan1) <= (values_.infinity)");
+ EXPECT_NONFATAL_FAILURE({ // NOLINT
+ EXPECT_PRED_FORMAT2(DoubleLE, -values_.infinity, values_.nan1);
+ }, " (-values_.infinity) <= (values_.nan1)");
+ EXPECT_FATAL_FAILURE({ // NOLINT
+ ASSERT_PRED_FORMAT2(DoubleLE, values_.nan1, values_.nan1);
+ }, "(values_.nan1) <= (values_.nan1)");
+#endif // !GTEST_OS_SYMBIAN && !defined(__BORLANDC__)
+}
+
+
+// Verifies that a test or test case whose name starts with DISABLED_ is
+// not run.
+
+// A test whose name starts with DISABLED_.
+// Should not run.
+TEST(DisabledTest, DISABLED_TestShouldNotRun) {
+ FAIL() << "Unexpected failure: Disabled test should not be run.";
+}
+
+// A test whose name does not start with DISABLED_.
+// Should run.
+TEST(DisabledTest, NotDISABLED_TestShouldRun) {
+ EXPECT_EQ(1, 1);
+}
+
+// A test case whose name starts with DISABLED_.
+// Should not run.
+TEST(DISABLED_TestCase, TestShouldNotRun) {
+ FAIL() << "Unexpected failure: Test in disabled test case should not be run.";
+}
+
+// A test case and test whose names start with DISABLED_.
+// Should not run.
+TEST(DISABLED_TestCase, DISABLED_TestShouldNotRun) {
+ FAIL() << "Unexpected failure: Test in disabled test case should not be run.";
+}
+
+// Check that when all tests in a test case are disabled, SetupTestCase() and
+// TearDownTestCase() are not called.
+class DisabledTestsTest : public Test {
+ protected:
+ static void SetUpTestCase() {
+ FAIL() << "Unexpected failure: All tests disabled in test case. "
+ "SetupTestCase() should not be called.";
+ }
+
+ static void TearDownTestCase() {
+ FAIL() << "Unexpected failure: All tests disabled in test case. "
+ "TearDownTestCase() should not be called.";
+ }
+};
+
+TEST_F(DisabledTestsTest, DISABLED_TestShouldNotRun_1) {
+ FAIL() << "Unexpected failure: Disabled test should not be run.";
+}
+
+TEST_F(DisabledTestsTest, DISABLED_TestShouldNotRun_2) {
+ FAIL() << "Unexpected failure: Disabled test should not be run.";
+}
+
+// Tests that disabled typed tests aren't run.
+
+#if GTEST_HAS_TYPED_TEST
+
+template <typename T>
+class TypedTest : public Test {
+};
+
+typedef testing::Types<int, double> NumericTypes;
+TYPED_TEST_CASE(TypedTest, NumericTypes);
+
+TYPED_TEST(TypedTest, DISABLED_ShouldNotRun) {
+ FAIL() << "Unexpected failure: Disabled typed test should not run.";
+}
+
+template <typename T>
+class DISABLED_TypedTest : public Test {
+};
+
+TYPED_TEST_CASE(DISABLED_TypedTest, NumericTypes);
+
+TYPED_TEST(DISABLED_TypedTest, ShouldNotRun) {
+ FAIL() << "Unexpected failure: Disabled typed test should not run.";
+}
+
+#endif // GTEST_HAS_TYPED_TEST
+
+// Tests that disabled type-parameterized tests aren't run.
+
+#if GTEST_HAS_TYPED_TEST_P
+
+template <typename T>
+class TypedTestP : public Test {
+};
+
+TYPED_TEST_CASE_P(TypedTestP);
+
+TYPED_TEST_P(TypedTestP, DISABLED_ShouldNotRun) {
+ FAIL() << "Unexpected failure: "
+ << "Disabled type-parameterized test should not run.";
+}
+
+REGISTER_TYPED_TEST_CASE_P(TypedTestP, DISABLED_ShouldNotRun);
+
+INSTANTIATE_TYPED_TEST_CASE_P(My, TypedTestP, NumericTypes);
+
+template <typename T>
+class DISABLED_TypedTestP : public Test {
+};
+
+TYPED_TEST_CASE_P(DISABLED_TypedTestP);
+
+TYPED_TEST_P(DISABLED_TypedTestP, ShouldNotRun) {
+ FAIL() << "Unexpected failure: "
+ << "Disabled type-parameterized test should not run.";
+}
+
+REGISTER_TYPED_TEST_CASE_P(DISABLED_TypedTestP, ShouldNotRun);
+
+INSTANTIATE_TYPED_TEST_CASE_P(My, DISABLED_TypedTestP, NumericTypes);
+
+#endif // GTEST_HAS_TYPED_TEST_P
+
+// Tests that assertion macros evaluate their arguments exactly once.
+
+class SingleEvaluationTest : public Test {
+ public: // Must be public and not protected due to a bug in g++ 3.4.2.
+ // This helper function is needed by the FailedASSERT_STREQ test
+ // below. It's public to work around C++Builder's bug with scoping local
+ // classes.
+ static void CompareAndIncrementCharPtrs() {
+ ASSERT_STREQ(p1_++, p2_++);
+ }
+
+ // This helper function is needed by the FailedASSERT_NE test below. It's
+ // public to work around C++Builder's bug with scoping local classes.
+ static void CompareAndIncrementInts() {
+ ASSERT_NE(a_++, b_++);
+ }
+
+ protected:
+ SingleEvaluationTest() {
+ p1_ = s1_;
+ p2_ = s2_;
+ a_ = 0;
+ b_ = 0;
+ }
+
+ static const char* const s1_;
+ static const char* const s2_;
+ static const char* p1_;
+ static const char* p2_;
+
+ static int a_;
+ static int b_;
+};
+
+const char* const SingleEvaluationTest::s1_ = "01234";
+const char* const SingleEvaluationTest::s2_ = "abcde";
+const char* SingleEvaluationTest::p1_;
+const char* SingleEvaluationTest::p2_;
+int SingleEvaluationTest::a_;
+int SingleEvaluationTest::b_;
+
+// Tests that when ASSERT_STREQ fails, it evaluates its arguments
+// exactly once.
+TEST_F(SingleEvaluationTest, FailedASSERT_STREQ) {
+ EXPECT_FATAL_FAILURE(SingleEvaluationTest::CompareAndIncrementCharPtrs(),
+ "p2_++");
+ EXPECT_EQ(s1_ + 1, p1_);
+ EXPECT_EQ(s2_ + 1, p2_);
+}
+
+// Tests that string assertion arguments are evaluated exactly once.
+TEST_F(SingleEvaluationTest, ASSERT_STR) {
+ // successful EXPECT_STRNE
+ EXPECT_STRNE(p1_++, p2_++);
+ EXPECT_EQ(s1_ + 1, p1_);
+ EXPECT_EQ(s2_ + 1, p2_);
+
+ // failed EXPECT_STRCASEEQ
+ EXPECT_NONFATAL_FAILURE(EXPECT_STRCASEEQ(p1_++, p2_++),
+ "ignoring case");
+ EXPECT_EQ(s1_ + 2, p1_);
+ EXPECT_EQ(s2_ + 2, p2_);
+}
+
+// Tests that when ASSERT_NE fails, it evaluates its arguments exactly
+// once.
+TEST_F(SingleEvaluationTest, FailedASSERT_NE) {
+ EXPECT_FATAL_FAILURE(SingleEvaluationTest::CompareAndIncrementInts(),
+ "(a_++) != (b_++)");
+ EXPECT_EQ(1, a_);
+ EXPECT_EQ(1, b_);
+}
+
+// Tests that assertion arguments are evaluated exactly once.
+TEST_F(SingleEvaluationTest, OtherCases) {
+ // successful EXPECT_TRUE
+ EXPECT_TRUE(0 == a_++); // NOLINT
+ EXPECT_EQ(1, a_);
+
+ // failed EXPECT_TRUE
+ EXPECT_NONFATAL_FAILURE(EXPECT_TRUE(-1 == a_++), "-1 == a_++");
+ EXPECT_EQ(2, a_);
+
+ // successful EXPECT_GT
+ EXPECT_GT(a_++, b_++);
+ EXPECT_EQ(3, a_);
+ EXPECT_EQ(1, b_);
+
+ // failed EXPECT_LT
+ EXPECT_NONFATAL_FAILURE(EXPECT_LT(a_++, b_++), "(a_++) < (b_++)");
+ EXPECT_EQ(4, a_);
+ EXPECT_EQ(2, b_);
+
+ // successful ASSERT_TRUE
+ ASSERT_TRUE(0 < a_++); // NOLINT
+ EXPECT_EQ(5, a_);
+
+ // successful ASSERT_GT
+ ASSERT_GT(a_++, b_++);
+ EXPECT_EQ(6, a_);
+ EXPECT_EQ(3, b_);
+}
+
+#if GTEST_HAS_EXCEPTIONS
+
+void ThrowAnInteger() {
+ throw 1;
+}
+
+// Tests that assertion arguments are evaluated exactly once.
+TEST_F(SingleEvaluationTest, ExceptionTests) {
+ // successful EXPECT_THROW
+ EXPECT_THROW({ // NOLINT
+ a_++;
+ ThrowAnInteger();
+ }, int);
+ EXPECT_EQ(1, a_);
+
+ // failed EXPECT_THROW, throws different
+ EXPECT_NONFATAL_FAILURE(EXPECT_THROW({ // NOLINT
+ a_++;
+ ThrowAnInteger();
+ }, bool), "throws a different type");
+ EXPECT_EQ(2, a_);
+
+ // failed EXPECT_THROW, throws nothing
+ EXPECT_NONFATAL_FAILURE(EXPECT_THROW(a_++, bool), "throws nothing");
+ EXPECT_EQ(3, a_);
+
+ // successful EXPECT_NO_THROW
+ EXPECT_NO_THROW(a_++);
+ EXPECT_EQ(4, a_);
+
+ // failed EXPECT_NO_THROW
+ EXPECT_NONFATAL_FAILURE(EXPECT_NO_THROW({ // NOLINT
+ a_++;
+ ThrowAnInteger();
+ }), "it throws");
+ EXPECT_EQ(5, a_);
+
+ // successful EXPECT_ANY_THROW
+ EXPECT_ANY_THROW({ // NOLINT
+ a_++;
+ ThrowAnInteger();
+ });
+ EXPECT_EQ(6, a_);
+
+ // failed EXPECT_ANY_THROW
+ EXPECT_NONFATAL_FAILURE(EXPECT_ANY_THROW(a_++), "it doesn't");
+ EXPECT_EQ(7, a_);
+}
+
+#endif // GTEST_HAS_EXCEPTIONS
+
+// Tests {ASSERT|EXPECT}_NO_FATAL_FAILURE.
+class NoFatalFailureTest : public Test {
+ protected:
+ void Succeeds() {}
+ void FailsNonFatal() {
+ ADD_FAILURE() << "some non-fatal failure";
+ }
+ void Fails() {
+ FAIL() << "some fatal failure";
+ }
+
+ void DoAssertNoFatalFailureOnFails() {
+ ASSERT_NO_FATAL_FAILURE(Fails());
+ ADD_FAILURE() << "shold not reach here.";
+ }
+
+ void DoExpectNoFatalFailureOnFails() {
+ EXPECT_NO_FATAL_FAILURE(Fails());
+ ADD_FAILURE() << "other failure";
+ }
+};
+
+TEST_F(NoFatalFailureTest, NoFailure) {
+ EXPECT_NO_FATAL_FAILURE(Succeeds());
+ ASSERT_NO_FATAL_FAILURE(Succeeds());
+}
+
+TEST_F(NoFatalFailureTest, NonFatalIsNoFailure) {
+ EXPECT_NONFATAL_FAILURE(
+ EXPECT_NO_FATAL_FAILURE(FailsNonFatal()),
+ "some non-fatal failure");
+ EXPECT_NONFATAL_FAILURE(
+ ASSERT_NO_FATAL_FAILURE(FailsNonFatal()),
+ "some non-fatal failure");
+}
+
+TEST_F(NoFatalFailureTest, AssertNoFatalFailureOnFatalFailure) {
+ TestPartResultArray gtest_failures;
+ {
+ ScopedFakeTestPartResultReporter gtest_reporter(&gtest_failures);
+ DoAssertNoFatalFailureOnFails();
+ }
+ ASSERT_EQ(2, gtest_failures.size());
+ EXPECT_EQ(TestPartResult::kFatalFailure,
+ gtest_failures.GetTestPartResult(0).type());
+ EXPECT_EQ(TestPartResult::kFatalFailure,
+ gtest_failures.GetTestPartResult(1).type());
+ EXPECT_PRED_FORMAT2(testing::IsSubstring, "some fatal failure",
+ gtest_failures.GetTestPartResult(0).message());
+ EXPECT_PRED_FORMAT2(testing::IsSubstring, "it does",
+ gtest_failures.GetTestPartResult(1).message());
+}
+
+TEST_F(NoFatalFailureTest, ExpectNoFatalFailureOnFatalFailure) {
+ TestPartResultArray gtest_failures;
+ {
+ ScopedFakeTestPartResultReporter gtest_reporter(&gtest_failures);
+ DoExpectNoFatalFailureOnFails();
+ }
+ ASSERT_EQ(3, gtest_failures.size());
+ EXPECT_EQ(TestPartResult::kFatalFailure,
+ gtest_failures.GetTestPartResult(0).type());
+ EXPECT_EQ(TestPartResult::kNonFatalFailure,
+ gtest_failures.GetTestPartResult(1).type());
+ EXPECT_EQ(TestPartResult::kNonFatalFailure,
+ gtest_failures.GetTestPartResult(2).type());
+ EXPECT_PRED_FORMAT2(testing::IsSubstring, "some fatal failure",
+ gtest_failures.GetTestPartResult(0).message());
+ EXPECT_PRED_FORMAT2(testing::IsSubstring, "it does",
+ gtest_failures.GetTestPartResult(1).message());
+ EXPECT_PRED_FORMAT2(testing::IsSubstring, "other failure",
+ gtest_failures.GetTestPartResult(2).message());
+}
+
+TEST_F(NoFatalFailureTest, MessageIsStreamable) {
+ TestPartResultArray gtest_failures;
+ {
+ ScopedFakeTestPartResultReporter gtest_reporter(&gtest_failures);
+ EXPECT_NO_FATAL_FAILURE(FAIL() << "foo") << "my message";
+ }
+ ASSERT_EQ(2, gtest_failures.size());
+ EXPECT_EQ(TestPartResult::kNonFatalFailure,
+ gtest_failures.GetTestPartResult(0).type());
+ EXPECT_EQ(TestPartResult::kNonFatalFailure,
+ gtest_failures.GetTestPartResult(1).type());
+ EXPECT_PRED_FORMAT2(testing::IsSubstring, "foo",
+ gtest_failures.GetTestPartResult(0).message());
+ EXPECT_PRED_FORMAT2(testing::IsSubstring, "my message",
+ gtest_failures.GetTestPartResult(1).message());
+}
+
+// Tests non-string assertions.
+
+// Tests EqFailure(), used for implementing *EQ* assertions.
+TEST(AssertionTest, EqFailure) {
+ const String foo_val("5"), bar_val("6");
+ const String msg1(
+ EqFailure("foo", "bar", foo_val, bar_val, false)
+ .failure_message());
+ EXPECT_STREQ(
+ "Value of: bar\n"
+ " Actual: 6\n"
+ "Expected: foo\n"
+ "Which is: 5",
+ msg1.c_str());
+
+ const String msg2(
+ EqFailure("foo", "6", foo_val, bar_val, false)
+ .failure_message());
+ EXPECT_STREQ(
+ "Value of: 6\n"
+ "Expected: foo\n"
+ "Which is: 5",
+ msg2.c_str());
+
+ const String msg3(
+ EqFailure("5", "bar", foo_val, bar_val, false)
+ .failure_message());
+ EXPECT_STREQ(
+ "Value of: bar\n"
+ " Actual: 6\n"
+ "Expected: 5",
+ msg3.c_str());
+
+ const String msg4(
+ EqFailure("5", "6", foo_val, bar_val, false).failure_message());
+ EXPECT_STREQ(
+ "Value of: 6\n"
+ "Expected: 5",
+ msg4.c_str());
+
+ const String msg5(
+ EqFailure("foo", "bar",
+ String("\"x\""), String("\"y\""),
+ true).failure_message());
+ EXPECT_STREQ(
+ "Value of: bar\n"
+ " Actual: \"y\"\n"
+ "Expected: foo (ignoring case)\n"
+ "Which is: \"x\"",
+ msg5.c_str());
+}
+
+// Tests AppendUserMessage(), used for implementing the *EQ* macros.
+TEST(AssertionTest, AppendUserMessage) {
+ const String foo("foo");
+
+ Message msg;
+ EXPECT_STREQ("foo",
+ AppendUserMessage(foo, msg).c_str());
+
+ msg << "bar";
+ EXPECT_STREQ("foo\nbar",
+ AppendUserMessage(foo, msg).c_str());
+}
+
+#ifdef __BORLANDC__
+// Silences warnings: "Condition is always true", "Unreachable code"
+#pragma option push -w-ccc -w-rch
+#endif
+
+// Tests ASSERT_TRUE.
+TEST(AssertionTest, ASSERT_TRUE) {
+ ASSERT_TRUE(2 > 1); // NOLINT
+ EXPECT_FATAL_FAILURE(ASSERT_TRUE(2 < 1),
+ "2 < 1");
+}
+
+// Tests ASSERT_TRUE(predicate) for predicates returning AssertionResult.
+TEST(AssertionTest, AssertTrueWithAssertionResult) {
+ ASSERT_TRUE(ResultIsEven(2));
+#if !defined(__BORLANDC__) || __BORLANDC__ >= 0x600
+ // ICE's in C++Builder 2007.
+ EXPECT_FATAL_FAILURE(ASSERT_TRUE(ResultIsEven(3)),
+ "Value of: ResultIsEven(3)\n"
+ " Actual: false (3 is odd)\n"
+ "Expected: true");
+#endif
+ ASSERT_TRUE(ResultIsEvenNoExplanation(2));
+ EXPECT_FATAL_FAILURE(ASSERT_TRUE(ResultIsEvenNoExplanation(3)),
+ "Value of: ResultIsEvenNoExplanation(3)\n"
+ " Actual: false (3 is odd)\n"
+ "Expected: true");
+}
+
+// Tests ASSERT_FALSE.
+TEST(AssertionTest, ASSERT_FALSE) {
+ ASSERT_FALSE(2 < 1); // NOLINT
+ EXPECT_FATAL_FAILURE(ASSERT_FALSE(2 > 1),
+ "Value of: 2 > 1\n"
+ " Actual: true\n"
+ "Expected: false");
+}
+
+// Tests ASSERT_FALSE(predicate) for predicates returning AssertionResult.
+TEST(AssertionTest, AssertFalseWithAssertionResult) {
+ ASSERT_FALSE(ResultIsEven(3));
+#if !defined(__BORLANDC__) || __BORLANDC__ >= 0x600
+ // ICE's in C++Builder 2007.
+ EXPECT_FATAL_FAILURE(ASSERT_FALSE(ResultIsEven(2)),
+ "Value of: ResultIsEven(2)\n"
+ " Actual: true (2 is even)\n"
+ "Expected: false");
+#endif
+ ASSERT_FALSE(ResultIsEvenNoExplanation(3));
+ EXPECT_FATAL_FAILURE(ASSERT_FALSE(ResultIsEvenNoExplanation(2)),
+ "Value of: ResultIsEvenNoExplanation(2)\n"
+ " Actual: true\n"
+ "Expected: false");
+}
+
+#ifdef __BORLANDC__
+// Restores warnings after previous "#pragma option push" supressed them
+#pragma option pop
+#endif
+
+// Tests using ASSERT_EQ on double values. The purpose is to make
+// sure that the specialization we did for integer and anonymous enums
+// isn't used for double arguments.
+TEST(ExpectTest, ASSERT_EQ_Double) {
+ // A success.
+ ASSERT_EQ(5.6, 5.6);
+
+ // A failure.
+ EXPECT_FATAL_FAILURE(ASSERT_EQ(5.1, 5.2),
+ "5.1");
+}
+
+// Tests ASSERT_EQ.
+TEST(AssertionTest, ASSERT_EQ) {
+ ASSERT_EQ(5, 2 + 3);
+ EXPECT_FATAL_FAILURE(ASSERT_EQ(5, 2*3),
+ "Value of: 2*3\n"
+ " Actual: 6\n"
+ "Expected: 5");
+}
+
+// Tests ASSERT_EQ(NULL, pointer).
+#if GTEST_CAN_COMPARE_NULL
+TEST(AssertionTest, ASSERT_EQ_NULL) {
+ // A success.
+ const char* p = NULL;
+ // Some older GCC versions may issue a spurious waring in this or the next
+ // assertion statement. This warning should not be suppressed with
+ // static_cast since the test verifies the ability to use bare NULL as the
+ // expected parameter to the macro.
+ ASSERT_EQ(NULL, p);
+
+ // A failure.
+ static int n = 0;
+ EXPECT_FATAL_FAILURE(ASSERT_EQ(NULL, &n),
+ "Value of: &n\n");
+}
+#endif // GTEST_CAN_COMPARE_NULL
+
+// Tests ASSERT_EQ(0, non_pointer). Since the literal 0 can be
+// treated as a null pointer by the compiler, we need to make sure
+// that ASSERT_EQ(0, non_pointer) isn't interpreted by Google Test as
+// ASSERT_EQ(static_cast<void*>(NULL), non_pointer).
+TEST(ExpectTest, ASSERT_EQ_0) {
+ int n = 0;
+
+ // A success.
+ ASSERT_EQ(0, n);
+
+ // A failure.
+ EXPECT_FATAL_FAILURE(ASSERT_EQ(0, 5.6),
+ "Expected: 0");
+}
+
+// Tests ASSERT_NE.
+TEST(AssertionTest, ASSERT_NE) {
+ ASSERT_NE(6, 7);
+ EXPECT_FATAL_FAILURE(ASSERT_NE('a', 'a'),
+ "Expected: ('a') != ('a'), "
+ "actual: 'a' (97, 0x61) vs 'a' (97, 0x61)");
+}
+
+// Tests ASSERT_LE.
+TEST(AssertionTest, ASSERT_LE) {
+ ASSERT_LE(2, 3);
+ ASSERT_LE(2, 2);
+ EXPECT_FATAL_FAILURE(ASSERT_LE(2, 0),
+ "Expected: (2) <= (0), actual: 2 vs 0");
+}
+
+// Tests ASSERT_LT.
+TEST(AssertionTest, ASSERT_LT) {
+ ASSERT_LT(2, 3);
+ EXPECT_FATAL_FAILURE(ASSERT_LT(2, 2),
+ "Expected: (2) < (2), actual: 2 vs 2");
+}
+
+// Tests ASSERT_GE.
+TEST(AssertionTest, ASSERT_GE) {
+ ASSERT_GE(2, 1);
+ ASSERT_GE(2, 2);
+ EXPECT_FATAL_FAILURE(ASSERT_GE(2, 3),
+ "Expected: (2) >= (3), actual: 2 vs 3");
+}
+
+// Tests ASSERT_GT.
+TEST(AssertionTest, ASSERT_GT) {
+ ASSERT_GT(2, 1);
+ EXPECT_FATAL_FAILURE(ASSERT_GT(2, 2),
+ "Expected: (2) > (2), actual: 2 vs 2");
+}
+
+#if GTEST_HAS_EXCEPTIONS
+
+void ThrowNothing() {}
+
+// Tests ASSERT_THROW.
+TEST(AssertionTest, ASSERT_THROW) {
+ ASSERT_THROW(ThrowAnInteger(), int);
+
+#ifndef __BORLANDC__
+ // ICE's in C++Builder 2007 and 2009.
+ EXPECT_FATAL_FAILURE(
+ ASSERT_THROW(ThrowAnInteger(), bool),
+ "Expected: ThrowAnInteger() throws an exception of type bool.\n"
+ " Actual: it throws a different type.");
+#endif
+
+ EXPECT_FATAL_FAILURE(
+ ASSERT_THROW(ThrowNothing(), bool),
+ "Expected: ThrowNothing() throws an exception of type bool.\n"
+ " Actual: it throws nothing.");
+}
+
+// Tests ASSERT_NO_THROW.
+TEST(AssertionTest, ASSERT_NO_THROW) {
+ ASSERT_NO_THROW(ThrowNothing());
+ EXPECT_FATAL_FAILURE(ASSERT_NO_THROW(ThrowAnInteger()),
+ "Expected: ThrowAnInteger() doesn't throw an exception."
+ "\n Actual: it throws.");
+}
+
+// Tests ASSERT_ANY_THROW.
+TEST(AssertionTest, ASSERT_ANY_THROW) {
+ ASSERT_ANY_THROW(ThrowAnInteger());
+ EXPECT_FATAL_FAILURE(
+ ASSERT_ANY_THROW(ThrowNothing()),
+ "Expected: ThrowNothing() throws an exception.\n"
+ " Actual: it doesn't.");
+}
+
+#endif // GTEST_HAS_EXCEPTIONS
+
+// Makes sure we deal with the precedence of <<. This test should
+// compile.
+TEST(AssertionTest, AssertPrecedence) {
+ ASSERT_EQ(1 < 2, true);
+ ASSERT_EQ(true && false, false);
+}
+
+// A subroutine used by the following test.
+void TestEq1(int x) {
+ ASSERT_EQ(1, x);
+}
+
+// Tests calling a test subroutine that's not part of a fixture.
+TEST(AssertionTest, NonFixtureSubroutine) {
+ EXPECT_FATAL_FAILURE(TestEq1(2),
+ "Value of: x");
+}
+
+// An uncopyable class.
+class Uncopyable {
+ public:
+ explicit Uncopyable(int a_value) : value_(a_value) {}
+
+ int value() const { return value_; }
+ bool operator==(const Uncopyable& rhs) const {
+ return value() == rhs.value();
+ }
+ private:
+ // This constructor deliberately has no implementation, as we don't
+ // want this class to be copyable.
+ Uncopyable(const Uncopyable&); // NOLINT
+
+ int value_;
+};
+
+::std::ostream& operator<<(::std::ostream& os, const Uncopyable& value) {
+ return os << value.value();
+}
+
+
+bool IsPositiveUncopyable(const Uncopyable& x) {
+ return x.value() > 0;
+}
+
+// A subroutine used by the following test.
+void TestAssertNonPositive() {
+ Uncopyable y(-1);
+ ASSERT_PRED1(IsPositiveUncopyable, y);
+}
+// A subroutine used by the following test.
+void TestAssertEqualsUncopyable() {
+ Uncopyable x(5);
+ Uncopyable y(-1);
+ ASSERT_EQ(x, y);
+}
+
+// Tests that uncopyable objects can be used in assertions.
+TEST(AssertionTest, AssertWorksWithUncopyableObject) {
+ Uncopyable x(5);
+ ASSERT_PRED1(IsPositiveUncopyable, x);
+ ASSERT_EQ(x, x);
+ EXPECT_FATAL_FAILURE(TestAssertNonPositive(),
+ "IsPositiveUncopyable(y) evaluates to false, where\ny evaluates to -1");
+ EXPECT_FATAL_FAILURE(TestAssertEqualsUncopyable(),
+ "Value of: y\n Actual: -1\nExpected: x\nWhich is: 5");
+}
+
+// Tests that uncopyable objects can be used in expects.
+TEST(AssertionTest, ExpectWorksWithUncopyableObject) {
+ Uncopyable x(5);
+ EXPECT_PRED1(IsPositiveUncopyable, x);
+ Uncopyable y(-1);
+ EXPECT_NONFATAL_FAILURE(EXPECT_PRED1(IsPositiveUncopyable, y),
+ "IsPositiveUncopyable(y) evaluates to false, where\ny evaluates to -1");
+ EXPECT_EQ(x, x);
+ EXPECT_NONFATAL_FAILURE(EXPECT_EQ(x, y),
+ "Value of: y\n Actual: -1\nExpected: x\nWhich is: 5");
+}
+
+
+// The version of gcc used in XCode 2.2 has a bug and doesn't allow
+// anonymous enums in assertions. Therefore the following test is not
+// done on Mac.
+// Sun Studio also rejects this code.
+#if !GTEST_OS_MAC && !defined(__SUNPRO_CC)
+
+// Tests using assertions with anonymous enums.
+enum {
+ CASE_A = -1,
+#if GTEST_OS_LINUX
+ // We want to test the case where the size of the anonymous enum is
+ // larger than sizeof(int), to make sure our implementation of the
+ // assertions doesn't truncate the enums. However, MSVC
+ // (incorrectly) doesn't allow an enum value to exceed the range of
+ // an int, so this has to be conditionally compiled.
+ //
+ // On Linux, CASE_B and CASE_A have the same value when truncated to
+ // int size. We want to test whether this will confuse the
+ // assertions.
+ CASE_B = testing::internal::kMaxBiggestInt,
+#else
+ CASE_B = INT_MAX,
+#endif // GTEST_OS_LINUX
+};
+
+TEST(AssertionTest, AnonymousEnum) {
+#if GTEST_OS_LINUX
+ EXPECT_EQ(static_cast<int>(CASE_A), static_cast<int>(CASE_B));
+#endif // GTEST_OS_LINUX
+
+ EXPECT_EQ(CASE_A, CASE_A);
+ EXPECT_NE(CASE_A, CASE_B);
+ EXPECT_LT(CASE_A, CASE_B);
+ EXPECT_LE(CASE_A, CASE_B);
+ EXPECT_GT(CASE_B, CASE_A);
+ EXPECT_GE(CASE_A, CASE_A);
+ EXPECT_NONFATAL_FAILURE(EXPECT_GE(CASE_A, CASE_B),
+ "(CASE_A) >= (CASE_B)");
+
+ ASSERT_EQ(CASE_A, CASE_A);
+ ASSERT_NE(CASE_A, CASE_B);
+ ASSERT_LT(CASE_A, CASE_B);
+ ASSERT_LE(CASE_A, CASE_B);
+ ASSERT_GT(CASE_B, CASE_A);
+ ASSERT_GE(CASE_A, CASE_A);
+ EXPECT_FATAL_FAILURE(ASSERT_EQ(CASE_A, CASE_B),
+ "Value of: CASE_B");
+}
+
+#endif // !GTEST_OS_MAC && !defined(__SUNPRO_CC)
+
+#if GTEST_OS_WINDOWS
+
+static HRESULT UnexpectedHRESULTFailure() {
+ return E_UNEXPECTED;
+}
+
+static HRESULT OkHRESULTSuccess() {
+ return S_OK;
+}
+
+static HRESULT FalseHRESULTSuccess() {
+ return S_FALSE;
+}
+
+// HRESULT assertion tests test both zero and non-zero
+// success codes as well as failure message for each.
+//
+// Windows CE doesn't support message texts.
+TEST(HRESULTAssertionTest, EXPECT_HRESULT_SUCCEEDED) {
+ EXPECT_HRESULT_SUCCEEDED(S_OK);
+ EXPECT_HRESULT_SUCCEEDED(S_FALSE);
+
+ EXPECT_NONFATAL_FAILURE(EXPECT_HRESULT_SUCCEEDED(UnexpectedHRESULTFailure()),
+ "Expected: (UnexpectedHRESULTFailure()) succeeds.\n"
+ " Actual: 0x8000FFFF");
+}
+
+TEST(HRESULTAssertionTest, ASSERT_HRESULT_SUCCEEDED) {
+ ASSERT_HRESULT_SUCCEEDED(S_OK);
+ ASSERT_HRESULT_SUCCEEDED(S_FALSE);
+
+ EXPECT_FATAL_FAILURE(ASSERT_HRESULT_SUCCEEDED(UnexpectedHRESULTFailure()),
+ "Expected: (UnexpectedHRESULTFailure()) succeeds.\n"
+ " Actual: 0x8000FFFF");
+}
+
+TEST(HRESULTAssertionTest, EXPECT_HRESULT_FAILED) {
+ EXPECT_HRESULT_FAILED(E_UNEXPECTED);
+
+ EXPECT_NONFATAL_FAILURE(EXPECT_HRESULT_FAILED(OkHRESULTSuccess()),
+ "Expected: (OkHRESULTSuccess()) fails.\n"
+ " Actual: 0x00000000");
+ EXPECT_NONFATAL_FAILURE(EXPECT_HRESULT_FAILED(FalseHRESULTSuccess()),
+ "Expected: (FalseHRESULTSuccess()) fails.\n"
+ " Actual: 0x00000001");
+}
+
+TEST(HRESULTAssertionTest, ASSERT_HRESULT_FAILED) {
+ ASSERT_HRESULT_FAILED(E_UNEXPECTED);
+
+#ifndef __BORLANDC__
+ // ICE's in C++Builder 2007 and 2009.
+ EXPECT_FATAL_FAILURE(ASSERT_HRESULT_FAILED(OkHRESULTSuccess()),
+ "Expected: (OkHRESULTSuccess()) fails.\n"
+ " Actual: 0x00000000");
+#endif
+ EXPECT_FATAL_FAILURE(ASSERT_HRESULT_FAILED(FalseHRESULTSuccess()),
+ "Expected: (FalseHRESULTSuccess()) fails.\n"
+ " Actual: 0x00000001");
+}
+
+// Tests that streaming to the HRESULT macros works.
+TEST(HRESULTAssertionTest, Streaming) {
+ EXPECT_HRESULT_SUCCEEDED(S_OK) << "unexpected failure";
+ ASSERT_HRESULT_SUCCEEDED(S_OK) << "unexpected failure";
+ EXPECT_HRESULT_FAILED(E_UNEXPECTED) << "unexpected failure";
+ ASSERT_HRESULT_FAILED(E_UNEXPECTED) << "unexpected failure";
+
+ EXPECT_NONFATAL_FAILURE(
+ EXPECT_HRESULT_SUCCEEDED(E_UNEXPECTED) << "expected failure",
+ "expected failure");
+
+#ifndef __BORLANDC__
+ // ICE's in C++Builder 2007 and 2009.
+ EXPECT_FATAL_FAILURE(
+ ASSERT_HRESULT_SUCCEEDED(E_UNEXPECTED) << "expected failure",
+ "expected failure");
+#endif
+
+ EXPECT_NONFATAL_FAILURE(
+ EXPECT_HRESULT_FAILED(S_OK) << "expected failure",
+ "expected failure");
+
+ EXPECT_FATAL_FAILURE(
+ ASSERT_HRESULT_FAILED(S_OK) << "expected failure",
+ "expected failure");
+}
+
+#endif // GTEST_OS_WINDOWS
+
+#ifdef __BORLANDC__
+// Silences warnings: "Condition is always true", "Unreachable code"
+#pragma option push -w-ccc -w-rch
+#endif
+
+// Tests that the assertion macros behave like single statements.
+TEST(AssertionSyntaxTest, BasicAssertionsBehavesLikeSingleStatement) {
+ if (AlwaysFalse())
+ ASSERT_TRUE(false) << "This should never be executed; "
+ "It's a compilation test only.";
+
+ if (AlwaysTrue())
+ EXPECT_FALSE(false);
+ else
+ ; // NOLINT
+
+ if (AlwaysFalse())
+ ASSERT_LT(1, 3);
+
+ if (AlwaysFalse())
+ ; // NOLINT
+ else
+ EXPECT_GT(3, 2) << "";
+}
+
+#if GTEST_HAS_EXCEPTIONS
+// Tests that the compiler will not complain about unreachable code in the
+// EXPECT_THROW/EXPECT_ANY_THROW/EXPECT_NO_THROW macros.
+TEST(ExpectThrowTest, DoesNotGenerateUnreachableCodeWarning) {
+ int n = 0;
+
+ EXPECT_THROW(throw 1, int);
+ EXPECT_NONFATAL_FAILURE(EXPECT_THROW(n++, int), "");
+ EXPECT_NONFATAL_FAILURE(EXPECT_THROW(throw 1, const char*), "");
+ EXPECT_NO_THROW(n++);
+ EXPECT_NONFATAL_FAILURE(EXPECT_NO_THROW(throw 1), "");
+ EXPECT_ANY_THROW(throw 1);
+ EXPECT_NONFATAL_FAILURE(EXPECT_ANY_THROW(n++), "");
+}
+
+TEST(AssertionSyntaxTest, ExceptionAssertionsBehavesLikeSingleStatement) {
+ if (AlwaysFalse())
+ EXPECT_THROW(ThrowNothing(), bool);
+
+ if (AlwaysTrue())
+ EXPECT_THROW(ThrowAnInteger(), int);
+ else
+ ; // NOLINT
+
+ if (AlwaysFalse())
+ EXPECT_NO_THROW(ThrowAnInteger());
+
+ if (AlwaysTrue())
+ EXPECT_NO_THROW(ThrowNothing());
+ else
+ ; // NOLINT
+
+ if (AlwaysFalse())
+ EXPECT_ANY_THROW(ThrowNothing());
+
+ if (AlwaysTrue())
+ EXPECT_ANY_THROW(ThrowAnInteger());
+ else
+ ; // NOLINT
+}
+#endif // GTEST_HAS_EXCEPTIONS
+
+TEST(AssertionSyntaxTest, NoFatalFailureAssertionsBehavesLikeSingleStatement) {
+ if (AlwaysFalse())
+ EXPECT_NO_FATAL_FAILURE(FAIL()) << "This should never be executed. "
+ << "It's a compilation test only.";
+ else
+ ; // NOLINT
+
+ if (AlwaysFalse())
+ ASSERT_NO_FATAL_FAILURE(FAIL()) << "";
+ else
+ ; // NOLINT
+
+ if (AlwaysTrue())
+ EXPECT_NO_FATAL_FAILURE(SUCCEED());
+ else
+ ; // NOLINT
+
+ if (AlwaysFalse())
+ ; // NOLINT
+ else
+ ASSERT_NO_FATAL_FAILURE(SUCCEED());
+}
+
+// Tests that the assertion macros work well with switch statements.
+TEST(AssertionSyntaxTest, WorksWithSwitch) {
+ switch (0) {
+ case 1:
+ break;
+ default:
+ ASSERT_TRUE(true);
+ }
+
+ switch (0)
+ case 0:
+ EXPECT_FALSE(false) << "EXPECT_FALSE failed in switch case";
+
+ // Binary assertions are implemented using a different code path
+ // than the Boolean assertions. Hence we test them separately.
+ switch (0) {
+ case 1:
+ default:
+ ASSERT_EQ(1, 1) << "ASSERT_EQ failed in default switch handler";
+ }
+
+ switch (0)
+ case 0:
+ EXPECT_NE(1, 2);
+}
+
+#if GTEST_HAS_EXCEPTIONS
+
+void ThrowAString() {
+ throw "String";
+}
+
+// Test that the exception assertion macros compile and work with const
+// type qualifier.
+TEST(AssertionSyntaxTest, WorksWithConst) {
+ ASSERT_THROW(ThrowAString(), const char*);
+
+ EXPECT_THROW(ThrowAString(), const char*);
+}
+
+#endif // GTEST_HAS_EXCEPTIONS
+
+} // namespace
+
+namespace testing {
+
+// Tests that Google Test tracks SUCCEED*.
+TEST(SuccessfulAssertionTest, SUCCEED) {
+ SUCCEED();
+ SUCCEED() << "OK";
+ EXPECT_EQ(2, GetUnitTestImpl()->current_test_result()->total_part_count());
+}
+
+// Tests that Google Test doesn't track successful EXPECT_*.
+TEST(SuccessfulAssertionTest, EXPECT) {
+ EXPECT_TRUE(true);
+ EXPECT_EQ(0, GetUnitTestImpl()->current_test_result()->total_part_count());
+}
+
+// Tests that Google Test doesn't track successful EXPECT_STR*.
+TEST(SuccessfulAssertionTest, EXPECT_STR) {
+ EXPECT_STREQ("", "");
+ EXPECT_EQ(0, GetUnitTestImpl()->current_test_result()->total_part_count());
+}
+
+// Tests that Google Test doesn't track successful ASSERT_*.
+TEST(SuccessfulAssertionTest, ASSERT) {
+ ASSERT_TRUE(true);
+ EXPECT_EQ(0, GetUnitTestImpl()->current_test_result()->total_part_count());
+}
+
+// Tests that Google Test doesn't track successful ASSERT_STR*.
+TEST(SuccessfulAssertionTest, ASSERT_STR) {
+ ASSERT_STREQ("", "");
+ EXPECT_EQ(0, GetUnitTestImpl()->current_test_result()->total_part_count());
+}
+
+} // namespace testing
+
+namespace {
+
+// Tests EXPECT_TRUE.
+TEST(ExpectTest, EXPECT_TRUE) {
+ EXPECT_TRUE(2 > 1); // NOLINT
+ EXPECT_NONFATAL_FAILURE(EXPECT_TRUE(2 < 1),
+ "Value of: 2 < 1\n"
+ " Actual: false\n"
+ "Expected: true");
+ EXPECT_NONFATAL_FAILURE(EXPECT_TRUE(2 > 3),
+ "2 > 3");
+}
+
+// Tests EXPECT_TRUE(predicate) for predicates returning AssertionResult.
+TEST(ExpectTest, ExpectTrueWithAssertionResult) {
+ EXPECT_TRUE(ResultIsEven(2));
+ EXPECT_NONFATAL_FAILURE(EXPECT_TRUE(ResultIsEven(3)),
+ "Value of: ResultIsEven(3)\n"
+ " Actual: false (3 is odd)\n"
+ "Expected: true");
+ EXPECT_TRUE(ResultIsEvenNoExplanation(2));
+ EXPECT_NONFATAL_FAILURE(EXPECT_TRUE(ResultIsEvenNoExplanation(3)),
+ "Value of: ResultIsEvenNoExplanation(3)\n"
+ " Actual: false (3 is odd)\n"
+ "Expected: true");
+}
+
+// Tests EXPECT_FALSE.
+TEST(ExpectTest, EXPECT_FALSE) {
+ EXPECT_FALSE(2 < 1); // NOLINT
+ EXPECT_NONFATAL_FAILURE(EXPECT_FALSE(2 > 1),
+ "Value of: 2 > 1\n"
+ " Actual: true\n"
+ "Expected: false");
+ EXPECT_NONFATAL_FAILURE(EXPECT_FALSE(2 < 3),
+ "2 < 3");
+}
+
+// Tests EXPECT_FALSE(predicate) for predicates returning AssertionResult.
+TEST(ExpectTest, ExpectFalseWithAssertionResult) {
+ EXPECT_FALSE(ResultIsEven(3));
+ EXPECT_NONFATAL_FAILURE(EXPECT_FALSE(ResultIsEven(2)),
+ "Value of: ResultIsEven(2)\n"
+ " Actual: true (2 is even)\n"
+ "Expected: false");
+ EXPECT_FALSE(ResultIsEvenNoExplanation(3));
+ EXPECT_NONFATAL_FAILURE(EXPECT_FALSE(ResultIsEvenNoExplanation(2)),
+ "Value of: ResultIsEvenNoExplanation(2)\n"
+ " Actual: true\n"
+ "Expected: false");
+}
+
+#ifdef __BORLANDC__
+// Restores warnings after previous "#pragma option push" supressed them
+#pragma option pop
+#endif
+
+// Tests EXPECT_EQ.
+TEST(ExpectTest, EXPECT_EQ) {
+ EXPECT_EQ(5, 2 + 3);
+ EXPECT_NONFATAL_FAILURE(EXPECT_EQ(5, 2*3),
+ "Value of: 2*3\n"
+ " Actual: 6\n"
+ "Expected: 5");
+ EXPECT_NONFATAL_FAILURE(EXPECT_EQ(5, 2 - 3),
+ "2 - 3");
+}
+
+// Tests using EXPECT_EQ on double values. The purpose is to make
+// sure that the specialization we did for integer and anonymous enums
+// isn't used for double arguments.
+TEST(ExpectTest, EXPECT_EQ_Double) {
+ // A success.
+ EXPECT_EQ(5.6, 5.6);
+
+ // A failure.
+ EXPECT_NONFATAL_FAILURE(EXPECT_EQ(5.1, 5.2),
+ "5.1");
+}
+
+#if GTEST_CAN_COMPARE_NULL
+// Tests EXPECT_EQ(NULL, pointer).
+TEST(ExpectTest, EXPECT_EQ_NULL) {
+ // A success.
+ const char* p = NULL;
+ // Some older GCC versions may issue a spurious waring in this or the next
+ // assertion statement. This warning should not be suppressed with
+ // static_cast since the test verifies the ability to use bare NULL as the
+ // expected parameter to the macro.
+ EXPECT_EQ(NULL, p);
+
+ // A failure.
+ int n = 0;
+ EXPECT_NONFATAL_FAILURE(EXPECT_EQ(NULL, &n),
+ "Value of: &n\n");
+}
+#endif // GTEST_CAN_COMPARE_NULL
+
+// Tests EXPECT_EQ(0, non_pointer). Since the literal 0 can be
+// treated as a null pointer by the compiler, we need to make sure
+// that EXPECT_EQ(0, non_pointer) isn't interpreted by Google Test as
+// EXPECT_EQ(static_cast<void*>(NULL), non_pointer).
+TEST(ExpectTest, EXPECT_EQ_0) {
+ int n = 0;
+
+ // A success.
+ EXPECT_EQ(0, n);
+
+ // A failure.
+ EXPECT_NONFATAL_FAILURE(EXPECT_EQ(0, 5.6),
+ "Expected: 0");
+}
+
+// Tests EXPECT_NE.
+TEST(ExpectTest, EXPECT_NE) {
+ EXPECT_NE(6, 7);
+
+ EXPECT_NONFATAL_FAILURE(EXPECT_NE('a', 'a'),
+ "Expected: ('a') != ('a'), "
+ "actual: 'a' (97, 0x61) vs 'a' (97, 0x61)");
+ EXPECT_NONFATAL_FAILURE(EXPECT_NE(2, 2),
+ "2");
+ char* const p0 = NULL;
+ EXPECT_NONFATAL_FAILURE(EXPECT_NE(p0, p0),
+ "p0");
+ // Only way to get the Nokia compiler to compile the cast
+ // is to have a separate void* variable first. Putting
+ // the two casts on the same line doesn't work, neither does
+ // a direct C-style to char*.
+ void* pv1 = (void*)0x1234; // NOLINT
+ char* const p1 = reinterpret_cast<char*>(pv1);
+ EXPECT_NONFATAL_FAILURE(EXPECT_NE(p1, p1),
+ "p1");
+}
+
+// Tests EXPECT_LE.
+TEST(ExpectTest, EXPECT_LE) {
+ EXPECT_LE(2, 3);
+ EXPECT_LE(2, 2);
+ EXPECT_NONFATAL_FAILURE(EXPECT_LE(2, 0),
+ "Expected: (2) <= (0), actual: 2 vs 0");
+ EXPECT_NONFATAL_FAILURE(EXPECT_LE(1.1, 0.9),
+ "(1.1) <= (0.9)");
+}
+
+// Tests EXPECT_LT.
+TEST(ExpectTest, EXPECT_LT) {
+ EXPECT_LT(2, 3);
+ EXPECT_NONFATAL_FAILURE(EXPECT_LT(2, 2),
+ "Expected: (2) < (2), actual: 2 vs 2");
+ EXPECT_NONFATAL_FAILURE(EXPECT_LT(2, 1),
+ "(2) < (1)");
+}
+
+// Tests EXPECT_GE.
+TEST(ExpectTest, EXPECT_GE) {
+ EXPECT_GE(2, 1);
+ EXPECT_GE(2, 2);
+ EXPECT_NONFATAL_FAILURE(EXPECT_GE(2, 3),
+ "Expected: (2) >= (3), actual: 2 vs 3");
+ EXPECT_NONFATAL_FAILURE(EXPECT_GE(0.9, 1.1),
+ "(0.9) >= (1.1)");
+}
+
+// Tests EXPECT_GT.
+TEST(ExpectTest, EXPECT_GT) {
+ EXPECT_GT(2, 1);
+ EXPECT_NONFATAL_FAILURE(EXPECT_GT(2, 2),
+ "Expected: (2) > (2), actual: 2 vs 2");
+ EXPECT_NONFATAL_FAILURE(EXPECT_GT(2, 3),
+ "(2) > (3)");
+}
+
+#if GTEST_HAS_EXCEPTIONS
+
+// Tests EXPECT_THROW.
+TEST(ExpectTest, EXPECT_THROW) {
+ EXPECT_THROW(ThrowAnInteger(), int);
+ EXPECT_NONFATAL_FAILURE(EXPECT_THROW(ThrowAnInteger(), bool),
+ "Expected: ThrowAnInteger() throws an exception of "
+ "type bool.\n Actual: it throws a different type.");
+ EXPECT_NONFATAL_FAILURE(
+ EXPECT_THROW(ThrowNothing(), bool),
+ "Expected: ThrowNothing() throws an exception of type bool.\n"
+ " Actual: it throws nothing.");
+}
+
+// Tests EXPECT_NO_THROW.
+TEST(ExpectTest, EXPECT_NO_THROW) {
+ EXPECT_NO_THROW(ThrowNothing());
+ EXPECT_NONFATAL_FAILURE(EXPECT_NO_THROW(ThrowAnInteger()),
+ "Expected: ThrowAnInteger() doesn't throw an "
+ "exception.\n Actual: it throws.");
+}
+
+// Tests EXPECT_ANY_THROW.
+TEST(ExpectTest, EXPECT_ANY_THROW) {
+ EXPECT_ANY_THROW(ThrowAnInteger());
+ EXPECT_NONFATAL_FAILURE(
+ EXPECT_ANY_THROW(ThrowNothing()),
+ "Expected: ThrowNothing() throws an exception.\n"
+ " Actual: it doesn't.");
+}
+
+#endif // GTEST_HAS_EXCEPTIONS
+
+// Make sure we deal with the precedence of <<.
+TEST(ExpectTest, ExpectPrecedence) {
+ EXPECT_EQ(1 < 2, true);
+ EXPECT_NONFATAL_FAILURE(EXPECT_EQ(true, true && false),
+ "Value of: true && false");
+}
+
+
+// Tests the StreamableToString() function.
+
+// Tests using StreamableToString() on a scalar.
+TEST(StreamableToStringTest, Scalar) {
+ EXPECT_STREQ("5", StreamableToString(5).c_str());
+}
+
+// Tests using StreamableToString() on a non-char pointer.
+TEST(StreamableToStringTest, Pointer) {
+ int n = 0;
+ int* p = &n;
+ EXPECT_STRNE("(null)", StreamableToString(p).c_str());
+}
+
+// Tests using StreamableToString() on a NULL non-char pointer.
+TEST(StreamableToStringTest, NullPointer) {
+ int* p = NULL;
+ EXPECT_STREQ("(null)", StreamableToString(p).c_str());
+}
+
+// Tests using StreamableToString() on a C string.
+TEST(StreamableToStringTest, CString) {
+ EXPECT_STREQ("Foo", StreamableToString("Foo").c_str());
+}
+
+// Tests using StreamableToString() on a NULL C string.
+TEST(StreamableToStringTest, NullCString) {
+ char* p = NULL;
+ EXPECT_STREQ("(null)", StreamableToString(p).c_str());
+}
+
+// Tests using streamable values as assertion messages.
+
+// Tests using std::string as an assertion message.
+TEST(StreamableTest, string) {
+ static const std::string str(
+ "This failure message is a std::string, and is expected.");
+ EXPECT_FATAL_FAILURE(FAIL() << str,
+ str.c_str());
+}
+
+// Tests that we can output strings containing embedded NULs.
+// Limited to Linux because we can only do this with std::string's.
+TEST(StreamableTest, stringWithEmbeddedNUL) {
+ static const char char_array_with_nul[] =
+ "Here's a NUL\0 and some more string";
+ static const std::string string_with_nul(char_array_with_nul,
+ sizeof(char_array_with_nul)
+ - 1); // drops the trailing NUL
+ EXPECT_FATAL_FAILURE(FAIL() << string_with_nul,
+ "Here's a NUL\\0 and some more string");
+}
+
+// Tests that we can output a NUL char.
+TEST(StreamableTest, NULChar) {
+ EXPECT_FATAL_FAILURE({ // NOLINT
+ FAIL() << "A NUL" << '\0' << " and some more string";
+ }, "A NUL\\0 and some more string");
+}
+
+// Tests using int as an assertion message.
+TEST(StreamableTest, int) {
+ EXPECT_FATAL_FAILURE(FAIL() << 900913,
+ "900913");
+}
+
+// Tests using NULL char pointer as an assertion message.
+//
+// In MSVC, streaming a NULL char * causes access violation. Google Test
+// implemented a workaround (substituting "(null)" for NULL). This
+// tests whether the workaround works.
+TEST(StreamableTest, NullCharPtr) {
+ EXPECT_FATAL_FAILURE(FAIL() << static_cast<const char*>(NULL),
+ "(null)");
+}
+
+// Tests that basic IO manipulators (endl, ends, and flush) can be
+// streamed to testing::Message.
+TEST(StreamableTest, BasicIoManip) {
+ EXPECT_FATAL_FAILURE({ // NOLINT
+ FAIL() << "Line 1." << std::endl
+ << "A NUL char " << std::ends << std::flush << " in line 2.";
+ }, "Line 1.\nA NUL char \\0 in line 2.");
+}
+
+// Tests the macros that haven't been covered so far.
+
+void AddFailureHelper(bool* aborted) {
+ *aborted = true;
+ ADD_FAILURE() << "Failure";
+ *aborted = false;
+}
+
+// Tests ADD_FAILURE.
+TEST(MacroTest, ADD_FAILURE) {
+ bool aborted = true;
+ EXPECT_NONFATAL_FAILURE(AddFailureHelper(&aborted),
+ "Failure");
+ EXPECT_FALSE(aborted);
+}
+
+// Tests FAIL.
+TEST(MacroTest, FAIL) {
+ EXPECT_FATAL_FAILURE(FAIL(),
+ "Failed");
+ EXPECT_FATAL_FAILURE(FAIL() << "Intentional failure.",
+ "Intentional failure.");
+}
+
+// Tests SUCCEED
+TEST(MacroTest, SUCCEED) {
+ SUCCEED();
+ SUCCEED() << "Explicit success.";
+}
+
+
+// Tests for EXPECT_EQ() and ASSERT_EQ().
+//
+// These tests fail *intentionally*, s.t. the failure messages can be
+// generated and tested.
+//
+// We have different tests for different argument types.
+
+// Tests using bool values in {EXPECT|ASSERT}_EQ.
+TEST(EqAssertionTest, Bool) {
+ EXPECT_EQ(true, true);
+ EXPECT_FATAL_FAILURE(ASSERT_EQ(false, true),
+ "Value of: true");
+}
+
+// Tests using int values in {EXPECT|ASSERT}_EQ.
+TEST(EqAssertionTest, Int) {
+ ASSERT_EQ(32, 32);
+ EXPECT_NONFATAL_FAILURE(EXPECT_EQ(32, 33),
+ "33");
+}
+
+// Tests using time_t values in {EXPECT|ASSERT}_EQ.
+TEST(EqAssertionTest, Time_T) {
+ EXPECT_EQ(static_cast<time_t>(0),
+ static_cast<time_t>(0));
+ EXPECT_FATAL_FAILURE(ASSERT_EQ(static_cast<time_t>(0),
+ static_cast<time_t>(1234)),
+ "1234");
+}
+
+// Tests using char values in {EXPECT|ASSERT}_EQ.
+TEST(EqAssertionTest, Char) {
+ ASSERT_EQ('z', 'z');
+ const char ch = 'b';
+ EXPECT_NONFATAL_FAILURE(EXPECT_EQ('\0', ch),
+ "ch");
+ EXPECT_NONFATAL_FAILURE(EXPECT_EQ('a', ch),
+ "ch");
+}
+
+// Tests using wchar_t values in {EXPECT|ASSERT}_EQ.
+TEST(EqAssertionTest, WideChar) {
+ EXPECT_EQ(L'b', L'b');
+
+ EXPECT_NONFATAL_FAILURE(EXPECT_EQ(L'\0', L'x'),
+ "Value of: L'x'\n"
+ " Actual: L'x' (120, 0x78)\n"
+ "Expected: L'\0'\n"
+ "Which is: L'\0' (0, 0x0)");
+
+ static wchar_t wchar;
+ wchar = L'b';
+ EXPECT_NONFATAL_FAILURE(EXPECT_EQ(L'a', wchar),
+ "wchar");
+ wchar = L'\x8119';
+ EXPECT_FATAL_FAILURE(ASSERT_EQ(L'\x8120', wchar),
+ "Value of: wchar");
+}
+
+// Tests using ::std::string values in {EXPECT|ASSERT}_EQ.
+TEST(EqAssertionTest, StdString) {
+ // Compares a const char* to an std::string that has identical
+ // content.
+ ASSERT_EQ("Test", ::std::string("Test"));
+
+ // Compares two identical std::strings.
+ static const ::std::string str1("A * in the middle");
+ static const ::std::string str2(str1);
+ EXPECT_EQ(str1, str2);
+
+ // Compares a const char* to an std::string that has different
+ // content
+ EXPECT_NONFATAL_FAILURE(EXPECT_EQ("Test", ::std::string("test")),
+ "::std::string(\"test\")");
+
+ // Compares an std::string to a char* that has different content.
+ char* const p1 = const_cast<char*>("foo");
+ EXPECT_NONFATAL_FAILURE(EXPECT_EQ(::std::string("bar"), p1),
+ "p1");
+
+ // Compares two std::strings that have different contents, one of
+ // which having a NUL character in the middle. This should fail.
+ static ::std::string str3(str1);
+ str3.at(2) = '\0';
+ EXPECT_FATAL_FAILURE(ASSERT_EQ(str1, str3),
+ "Value of: str3\n"
+ " Actual: \"A \\0 in the middle\"");
+}
+
+#if GTEST_HAS_STD_WSTRING
+
+// Tests using ::std::wstring values in {EXPECT|ASSERT}_EQ.
+TEST(EqAssertionTest, StdWideString) {
+ // Compares an std::wstring to a const wchar_t* that has identical
+ // content.
+ EXPECT_EQ(::std::wstring(L"Test\x8119"), L"Test\x8119");
+
+ // Compares two identical std::wstrings.
+ const ::std::wstring wstr1(L"A * in the middle");
+ const ::std::wstring wstr2(wstr1);
+ ASSERT_EQ(wstr1, wstr2);
+
+ // Compares an std::wstring to a const wchar_t* that has different
+ // content.
+ EXPECT_NONFATAL_FAILURE({ // NOLINT
+ EXPECT_EQ(::std::wstring(L"Test\x8119"), L"Test\x8120");
+ }, "L\"Test\\x8120\"");
+
+ // Compares two std::wstrings that have different contents, one of
+ // which having a NUL character in the middle.
+ ::std::wstring wstr3(wstr1);
+ wstr3.at(2) = L'\0';
+ EXPECT_NONFATAL_FAILURE(EXPECT_EQ(wstr1, wstr3),
+ "wstr3");
+
+ // Compares a wchar_t* to an std::wstring that has different
+ // content.
+ EXPECT_FATAL_FAILURE({ // NOLINT
+ ASSERT_EQ(const_cast<wchar_t*>(L"foo"), ::std::wstring(L"bar"));
+ }, "");
+}
+
+#endif // GTEST_HAS_STD_WSTRING
+
+#if GTEST_HAS_GLOBAL_STRING
+// Tests using ::string values in {EXPECT|ASSERT}_EQ.
+TEST(EqAssertionTest, GlobalString) {
+ // Compares a const char* to a ::string that has identical content.
+ EXPECT_EQ("Test", ::string("Test"));
+
+ // Compares two identical ::strings.
+ const ::string str1("A * in the middle");
+ const ::string str2(str1);
+ ASSERT_EQ(str1, str2);
+
+ // Compares a ::string to a const char* that has different content.
+ EXPECT_NONFATAL_FAILURE(EXPECT_EQ(::string("Test"), "test"),
+ "test");
+
+ // Compares two ::strings that have different contents, one of which
+ // having a NUL character in the middle.
+ ::string str3(str1);
+ str3.at(2) = '\0';
+ EXPECT_NONFATAL_FAILURE(EXPECT_EQ(str1, str3),
+ "str3");
+
+ // Compares a ::string to a char* that has different content.
+ EXPECT_FATAL_FAILURE({ // NOLINT
+ ASSERT_EQ(::string("bar"), const_cast<char*>("foo"));
+ }, "");
+}
+
+#endif // GTEST_HAS_GLOBAL_STRING
+
+#if GTEST_HAS_GLOBAL_WSTRING
+
+// Tests using ::wstring values in {EXPECT|ASSERT}_EQ.
+TEST(EqAssertionTest, GlobalWideString) {
+ // Compares a const wchar_t* to a ::wstring that has identical content.
+ ASSERT_EQ(L"Test\x8119", ::wstring(L"Test\x8119"));
+
+ // Compares two identical ::wstrings.
+ static const ::wstring wstr1(L"A * in the middle");
+ static const ::wstring wstr2(wstr1);
+ EXPECT_EQ(wstr1, wstr2);
+
+ // Compares a const wchar_t* to a ::wstring that has different
+ // content.
+ EXPECT_NONFATAL_FAILURE({ // NOLINT
+ EXPECT_EQ(L"Test\x8120", ::wstring(L"Test\x8119"));
+ }, "Test\\x8119");
+
+ // Compares a wchar_t* to a ::wstring that has different content.
+ wchar_t* const p1 = const_cast<wchar_t*>(L"foo");
+ EXPECT_NONFATAL_FAILURE(EXPECT_EQ(p1, ::wstring(L"bar")),
+ "bar");
+
+ // Compares two ::wstrings that have different contents, one of which
+ // having a NUL character in the middle.
+ static ::wstring wstr3;
+ wstr3 = wstr1;
+ wstr3.at(2) = L'\0';
+ EXPECT_FATAL_FAILURE(ASSERT_EQ(wstr1, wstr3),
+ "wstr3");
+}
+
+#endif // GTEST_HAS_GLOBAL_WSTRING
+
+// Tests using char pointers in {EXPECT|ASSERT}_EQ.
+TEST(EqAssertionTest, CharPointer) {
+ char* const p0 = NULL;
+ // Only way to get the Nokia compiler to compile the cast
+ // is to have a separate void* variable first. Putting
+ // the two casts on the same line doesn't work, neither does
+ // a direct C-style to char*.
+ void* pv1 = (void*)0x1234; // NOLINT
+ void* pv2 = (void*)0xABC0; // NOLINT
+ char* const p1 = reinterpret_cast<char*>(pv1);
+ char* const p2 = reinterpret_cast<char*>(pv2);
+ ASSERT_EQ(p1, p1);
+
+ EXPECT_NONFATAL_FAILURE(EXPECT_EQ(p0, p2),
+ "Value of: p2");
+ EXPECT_NONFATAL_FAILURE(EXPECT_EQ(p1, p2),
+ "p2");
+ EXPECT_FATAL_FAILURE(ASSERT_EQ(reinterpret_cast<char*>(0x1234),
+ reinterpret_cast<char*>(0xABC0)),
+ "ABC0");
+}
+
+// Tests using wchar_t pointers in {EXPECT|ASSERT}_EQ.
+TEST(EqAssertionTest, WideCharPointer) {
+ wchar_t* const p0 = NULL;
+ // Only way to get the Nokia compiler to compile the cast
+ // is to have a separate void* variable first. Putting
+ // the two casts on the same line doesn't work, neither does
+ // a direct C-style to char*.
+ void* pv1 = (void*)0x1234; // NOLINT
+ void* pv2 = (void*)0xABC0; // NOLINT
+ wchar_t* const p1 = reinterpret_cast<wchar_t*>(pv1);
+ wchar_t* const p2 = reinterpret_cast<wchar_t*>(pv2);
+ EXPECT_EQ(p0, p0);
+
+ EXPECT_NONFATAL_FAILURE(EXPECT_EQ(p0, p2),
+ "Value of: p2");
+ EXPECT_NONFATAL_FAILURE(EXPECT_EQ(p1, p2),
+ "p2");
+ void* pv3 = (void*)0x1234; // NOLINT
+ void* pv4 = (void*)0xABC0; // NOLINT
+ const wchar_t* p3 = reinterpret_cast<const wchar_t*>(pv3);
+ const wchar_t* p4 = reinterpret_cast<const wchar_t*>(pv4);
+ EXPECT_NONFATAL_FAILURE(EXPECT_EQ(p3, p4),
+ "p4");
+}
+
+// Tests using other types of pointers in {EXPECT|ASSERT}_EQ.
+TEST(EqAssertionTest, OtherPointer) {
+ ASSERT_EQ(static_cast<const int*>(NULL),
+ static_cast<const int*>(NULL));
+ EXPECT_FATAL_FAILURE(ASSERT_EQ(static_cast<const int*>(NULL),
+ reinterpret_cast<const int*>(0x1234)),
+ "0x1234");
+}
+
+// Tests the FRIEND_TEST macro.
+
+// This class has a private member we want to test. We will test it
+// both in a TEST and in a TEST_F.
+class Foo {
+ public:
+ Foo() {}
+
+ private:
+ int Bar() const { return 1; }
+
+ // Declares the friend tests that can access the private member
+ // Bar().
+ FRIEND_TEST(FRIEND_TEST_Test, TEST);
+ FRIEND_TEST(FRIEND_TEST_Test2, TEST_F);
+};
+
+// Tests that the FRIEND_TEST declaration allows a TEST to access a
+// class's private members. This should compile.
+TEST(FRIEND_TEST_Test, TEST) {
+ ASSERT_EQ(1, Foo().Bar());
+}
+
+// The fixture needed to test using FRIEND_TEST with TEST_F.
+class FRIEND_TEST_Test2 : public Test {
+ protected:
+ Foo foo;
+};
+
+// Tests that the FRIEND_TEST declaration allows a TEST_F to access a
+// class's private members. This should compile.
+TEST_F(FRIEND_TEST_Test2, TEST_F) {
+ ASSERT_EQ(1, foo.Bar());
+}
+
+// Tests the life cycle of Test objects.
+
+// The test fixture for testing the life cycle of Test objects.
+//
+// This class counts the number of live test objects that uses this
+// fixture.
+class TestLifeCycleTest : public Test {
+ protected:
+ // Constructor. Increments the number of test objects that uses
+ // this fixture.
+ TestLifeCycleTest() { count_++; }
+
+ // Destructor. Decrements the number of test objects that uses this
+ // fixture.
+ ~TestLifeCycleTest() { count_--; }
+
+ // Returns the number of live test objects that uses this fixture.
+ int count() const { return count_; }
+
+ private:
+ static int count_;
+};
+
+int TestLifeCycleTest::count_ = 0;
+
+// Tests the life cycle of test objects.
+TEST_F(TestLifeCycleTest, Test1) {
+ // There should be only one test object in this test case that's
+ // currently alive.
+ ASSERT_EQ(1, count());
+}
+
+// Tests the life cycle of test objects.
+TEST_F(TestLifeCycleTest, Test2) {
+ // After Test1 is done and Test2 is started, there should still be
+ // only one live test object, as the object for Test1 should've been
+ // deleted.
+ ASSERT_EQ(1, count());
+}
+
+} // namespace
+
+// Tests that the copy constructor works when it is NOT optimized away by
+// the compiler.
+TEST(AssertionResultTest, CopyConstructorWorksWhenNotOptimied) {
+ // Checks that the copy constructor doesn't try to dereference NULL pointers
+ // in the source object.
+ AssertionResult r1 = AssertionSuccess();
+ AssertionResult r2 = r1;
+ // The following line is added to prevent the compiler from optimizing
+ // away the constructor call.
+ r1 << "abc";
+
+ AssertionResult r3 = r1;
+ EXPECT_EQ(static_cast<bool>(r3), static_cast<bool>(r1));
+ EXPECT_STREQ("abc", r1.message());
+}
+
+// Tests that AssertionSuccess and AssertionFailure construct
+// AssertionResult objects as expected.
+TEST(AssertionResultTest, ConstructionWorks) {
+ AssertionResult r1 = AssertionSuccess();
+ EXPECT_TRUE(r1);
+ EXPECT_STREQ("", r1.message());
+
+ AssertionResult r2 = AssertionSuccess() << "abc";
+ EXPECT_TRUE(r2);
+ EXPECT_STREQ("abc", r2.message());
+
+ AssertionResult r3 = AssertionFailure();
+ EXPECT_FALSE(r3);
+ EXPECT_STREQ("", r3.message());
+
+ AssertionResult r4 = AssertionFailure() << "def";
+ EXPECT_FALSE(r4);
+ EXPECT_STREQ("def", r4.message());
+
+ AssertionResult r5 = AssertionFailure(Message() << "ghi");
+ EXPECT_FALSE(r5);
+ EXPECT_STREQ("ghi", r5.message());
+}
+
+// Tests that the negation fips the predicate result but keeps the message.
+TEST(AssertionResultTest, NegationWorks) {
+ AssertionResult r1 = AssertionSuccess() << "abc";
+ EXPECT_FALSE(!r1);
+ EXPECT_STREQ("abc", (!r1).message());
+
+ AssertionResult r2 = AssertionFailure() << "def";
+ EXPECT_TRUE(!r2);
+ EXPECT_STREQ("def", (!r2).message());
+}
+
+TEST(AssertionResultTest, StreamingWorks) {
+ AssertionResult r = AssertionSuccess();
+ r << "abc" << 'd' << 0 << true;
+ EXPECT_STREQ("abcd0true", r.message());
+}
+
+// Tests streaming a user type whose definition and operator << are
+// both in the global namespace.
+class Base {
+ public:
+ explicit Base(int an_x) : x_(an_x) {}
+ int x() const { return x_; }
+ private:
+ int x_;
+};
+std::ostream& operator<<(std::ostream& os,
+ const Base& val) {
+ return os << val.x();
+}
+std::ostream& operator<<(std::ostream& os,
+ const Base* pointer) {
+ return os << "(" << pointer->x() << ")";
+}
+
+TEST(MessageTest, CanStreamUserTypeInGlobalNameSpace) {
+ Message msg;
+ Base a(1);
+
+ msg << a << &a; // Uses ::operator<<.
+ EXPECT_STREQ("1(1)", msg.GetString().c_str());
+}
+
+// Tests streaming a user type whose definition and operator<< are
+// both in an unnamed namespace.
+namespace {
+class MyTypeInUnnamedNameSpace : public Base {
+ public:
+ explicit MyTypeInUnnamedNameSpace(int an_x): Base(an_x) {}
+};
+std::ostream& operator<<(std::ostream& os,
+ const MyTypeInUnnamedNameSpace& val) {
+ return os << val.x();
+}
+std::ostream& operator<<(std::ostream& os,
+ const MyTypeInUnnamedNameSpace* pointer) {
+ return os << "(" << pointer->x() << ")";
+}
+} // namespace
+
+TEST(MessageTest, CanStreamUserTypeInUnnamedNameSpace) {
+ Message msg;
+ MyTypeInUnnamedNameSpace a(1);
+
+ msg << a << &a; // Uses <unnamed_namespace>::operator<<.
+ EXPECT_STREQ("1(1)", msg.GetString().c_str());
+}
+
+// Tests streaming a user type whose definition and operator<< are
+// both in a user namespace.
+namespace namespace1 {
+class MyTypeInNameSpace1 : public Base {
+ public:
+ explicit MyTypeInNameSpace1(int an_x): Base(an_x) {}
+};
+std::ostream& operator<<(std::ostream& os,
+ const MyTypeInNameSpace1& val) {
+ return os << val.x();
+}
+std::ostream& operator<<(std::ostream& os,
+ const MyTypeInNameSpace1* pointer) {
+ return os << "(" << pointer->x() << ")";
+}
+} // namespace namespace1
+
+TEST(MessageTest, CanStreamUserTypeInUserNameSpace) {
+ Message msg;
+ namespace1::MyTypeInNameSpace1 a(1);
+
+ msg << a << &a; // Uses namespace1::operator<<.
+ EXPECT_STREQ("1(1)", msg.GetString().c_str());
+}
+
+// Tests streaming a user type whose definition is in a user namespace
+// but whose operator<< is in the global namespace.
+namespace namespace2 {
+class MyTypeInNameSpace2 : public ::Base {
+ public:
+ explicit MyTypeInNameSpace2(int an_x): Base(an_x) {}
+};
+} // namespace namespace2
+std::ostream& operator<<(std::ostream& os,
+ const namespace2::MyTypeInNameSpace2& val) {
+ return os << val.x();
+}
+std::ostream& operator<<(std::ostream& os,
+ const namespace2::MyTypeInNameSpace2* pointer) {
+ return os << "(" << pointer->x() << ")";
+}
+
+TEST(MessageTest, CanStreamUserTypeInUserNameSpaceWithStreamOperatorInGlobal) {
+ Message msg;
+ namespace2::MyTypeInNameSpace2 a(1);
+
+ msg << a << &a; // Uses ::operator<<.
+ EXPECT_STREQ("1(1)", msg.GetString().c_str());
+}
+
+// Tests streaming NULL pointers to testing::Message.
+TEST(MessageTest, NullPointers) {
+ Message msg;
+ char* const p1 = NULL;
+ unsigned char* const p2 = NULL;
+ int* p3 = NULL;
+ double* p4 = NULL;
+ bool* p5 = NULL;
+ Message* p6 = NULL;
+
+ msg << p1 << p2 << p3 << p4 << p5 << p6;
+ ASSERT_STREQ("(null)(null)(null)(null)(null)(null)",
+ msg.GetString().c_str());
+}
+
+// Tests streaming wide strings to testing::Message.
+TEST(MessageTest, WideStrings) {
+ // Streams a NULL of type const wchar_t*.
+ const wchar_t* const_wstr = NULL;
+ EXPECT_STREQ("(null)",
+ (Message() << const_wstr).GetString().c_str());
+
+ // Streams a NULL of type wchar_t*.
+ wchar_t* wstr = NULL;
+ EXPECT_STREQ("(null)",
+ (Message() << wstr).GetString().c_str());
+
+ // Streams a non-NULL of type const wchar_t*.
+ const_wstr = L"abc\x8119";
+ EXPECT_STREQ("abc\xe8\x84\x99",
+ (Message() << const_wstr).GetString().c_str());
+
+ // Streams a non-NULL of type wchar_t*.
+ wstr = const_cast<wchar_t*>(const_wstr);
+ EXPECT_STREQ("abc\xe8\x84\x99",
+ (Message() << wstr).GetString().c_str());
+}
+
+
+// This line tests that we can define tests in the testing namespace.
+namespace testing {
+
+// Tests the TestInfo class.
+
+class TestInfoTest : public Test {
+ protected:
+ static const TestInfo* GetTestInfo(const char* test_name) {
+ const TestCase* const test_case = GetUnitTestImpl()->
+ GetTestCase("TestInfoTest", "", NULL, NULL);
+
+ for (int i = 0; i < test_case->total_test_count(); ++i) {
+ const TestInfo* const test_info = test_case->GetTestInfo(i);
+ if (strcmp(test_name, test_info->name()) == 0)
+ return test_info;
+ }
+ return NULL;
+ }
+
+ static const TestResult* GetTestResult(
+ const TestInfo* test_info) {
+ return test_info->result();
+ }
+};
+
+// Tests TestInfo::test_case_name() and TestInfo::name().
+TEST_F(TestInfoTest, Names) {
+ const TestInfo* const test_info = GetTestInfo("Names");
+
+ ASSERT_STREQ("TestInfoTest", test_info->test_case_name());
+ ASSERT_STREQ("Names", test_info->name());
+}
+
+// Tests TestInfo::result().
+TEST_F(TestInfoTest, result) {
+ const TestInfo* const test_info = GetTestInfo("result");
+
+ // Initially, there is no TestPartResult for this test.
+ ASSERT_EQ(0, GetTestResult(test_info)->total_part_count());
+
+ // After the previous assertion, there is still none.
+ ASSERT_EQ(0, GetTestResult(test_info)->total_part_count());
+}
+
+// Tests setting up and tearing down a test case.
+
+class SetUpTestCaseTest : public Test {
+ protected:
+ // This will be called once before the first test in this test case
+ // is run.
+ static void SetUpTestCase() {
+ printf("Setting up the test case . . .\n");
+
+ // Initializes some shared resource. In this simple example, we
+ // just create a C string. More complex stuff can be done if
+ // desired.
+ shared_resource_ = "123";
+
+ // Increments the number of test cases that have been set up.
+ counter_++;
+
+ // SetUpTestCase() should be called only once.
+ EXPECT_EQ(1, counter_);
+ }
+
+ // This will be called once after the last test in this test case is
+ // run.
+ static void TearDownTestCase() {
+ printf("Tearing down the test case . . .\n");
+
+ // Decrements the number of test cases that have been set up.
+ counter_--;
+
+ // TearDownTestCase() should be called only once.
+ EXPECT_EQ(0, counter_);
+
+ // Cleans up the shared resource.
+ shared_resource_ = NULL;
+ }
+
+ // This will be called before each test in this test case.
+ virtual void SetUp() {
+ // SetUpTestCase() should be called only once, so counter_ should
+ // always be 1.
+ EXPECT_EQ(1, counter_);
+ }
+
+ // Number of test cases that have been set up.
+ static int counter_;
+
+ // Some resource to be shared by all tests in this test case.
+ static const char* shared_resource_;
+};
+
+int SetUpTestCaseTest::counter_ = 0;
+const char* SetUpTestCaseTest::shared_resource_ = NULL;
+
+// A test that uses the shared resource.
+TEST_F(SetUpTestCaseTest, Test1) {
+ EXPECT_STRNE(NULL, shared_resource_);
+}
+
+// Another test that uses the shared resource.
+TEST_F(SetUpTestCaseTest, Test2) {
+ EXPECT_STREQ("123", shared_resource_);
+}
+
+// The InitGoogleTestTest test case tests testing::InitGoogleTest().
+
+// The Flags struct stores a copy of all Google Test flags.
+struct Flags {
+ // Constructs a Flags struct where each flag has its default value.
+ Flags() : also_run_disabled_tests(false),
+ break_on_failure(false),
+ catch_exceptions(false),
+ death_test_use_fork(false),
+ filter(""),
+ list_tests(false),
+ output(""),
+ print_time(true),
+ random_seed(0),
+ repeat(1),
+ shuffle(false),
+ stack_trace_depth(kMaxStackTraceDepth),
+ throw_on_failure(false) {}
+
+ // Factory methods.
+
+ // Creates a Flags struct where the gtest_also_run_disabled_tests flag has
+ // the given value.
+ static Flags AlsoRunDisabledTests(bool also_run_disabled_tests) {
+ Flags flags;
+ flags.also_run_disabled_tests = also_run_disabled_tests;
+ return flags;
+ }
+
+ // Creates a Flags struct where the gtest_break_on_failure flag has
+ // the given value.
+ static Flags BreakOnFailure(bool break_on_failure) {
+ Flags flags;
+ flags.break_on_failure = break_on_failure;
+ return flags;
+ }
+
+ // Creates a Flags struct where the gtest_catch_exceptions flag has
+ // the given value.
+ static Flags CatchExceptions(bool catch_exceptions) {
+ Flags flags;
+ flags.catch_exceptions = catch_exceptions;
+ return flags;
+ }
+
+ // Creates a Flags struct where the gtest_death_test_use_fork flag has
+ // the given value.
+ static Flags DeathTestUseFork(bool death_test_use_fork) {
+ Flags flags;
+ flags.death_test_use_fork = death_test_use_fork;
+ return flags;
+ }
+
+ // Creates a Flags struct where the gtest_filter flag has the given
+ // value.
+ static Flags Filter(const char* filter) {
+ Flags flags;
+ flags.filter = filter;
+ return flags;
+ }
+
+ // Creates a Flags struct where the gtest_list_tests flag has the
+ // given value.
+ static Flags ListTests(bool list_tests) {
+ Flags flags;
+ flags.list_tests = list_tests;
+ return flags;
+ }
+
+ // Creates a Flags struct where the gtest_output flag has the given
+ // value.
+ static Flags Output(const char* output) {
+ Flags flags;
+ flags.output = output;
+ return flags;
+ }
+
+ // Creates a Flags struct where the gtest_print_time flag has the given
+ // value.
+ static Flags PrintTime(bool print_time) {
+ Flags flags;
+ flags.print_time = print_time;
+ return flags;
+ }
+
+ // Creates a Flags struct where the gtest_random_seed flag has
+ // the given value.
+ static Flags RandomSeed(Int32 random_seed) {
+ Flags flags;
+ flags.random_seed = random_seed;
+ return flags;
+ }
+
+ // Creates a Flags struct where the gtest_repeat flag has the given
+ // value.
+ static Flags Repeat(Int32 repeat) {
+ Flags flags;
+ flags.repeat = repeat;
+ return flags;
+ }
+
+ // Creates a Flags struct where the gtest_shuffle flag has
+ // the given value.
+ static Flags Shuffle(bool shuffle) {
+ Flags flags;
+ flags.shuffle = shuffle;
+ return flags;
+ }
+
+ // Creates a Flags struct where the GTEST_FLAG(stack_trace_depth) flag has
+ // the given value.
+ static Flags StackTraceDepth(Int32 stack_trace_depth) {
+ Flags flags;
+ flags.stack_trace_depth = stack_trace_depth;
+ return flags;
+ }
+
+ // Creates a Flags struct where the gtest_throw_on_failure flag has
+ // the given value.
+ static Flags ThrowOnFailure(bool throw_on_failure) {
+ Flags flags;
+ flags.throw_on_failure = throw_on_failure;
+ return flags;
+ }
+
+ // These fields store the flag values.
+ bool also_run_disabled_tests;
+ bool break_on_failure;
+ bool catch_exceptions;
+ bool death_test_use_fork;
+ const char* filter;
+ bool list_tests;
+ const char* output;
+ bool print_time;
+ Int32 random_seed;
+ Int32 repeat;
+ bool shuffle;
+ Int32 stack_trace_depth;
+ bool throw_on_failure;
+};
+
+// Fixture for testing InitGoogleTest().
+class InitGoogleTestTest : public Test {
+ protected:
+ // Clears the flags before each test.
+ virtual void SetUp() {
+ GTEST_FLAG(also_run_disabled_tests) = false;
+ GTEST_FLAG(break_on_failure) = false;
+ GTEST_FLAG(catch_exceptions) = false;
+ GTEST_FLAG(death_test_use_fork) = false;
+ GTEST_FLAG(filter) = "";
+ GTEST_FLAG(list_tests) = false;
+ GTEST_FLAG(output) = "";
+ GTEST_FLAG(print_time) = true;
+ GTEST_FLAG(random_seed) = 0;
+ GTEST_FLAG(repeat) = 1;
+ GTEST_FLAG(shuffle) = false;
+ GTEST_FLAG(stack_trace_depth) = kMaxStackTraceDepth;
+ GTEST_FLAG(throw_on_failure) = false;
+ }
+
+ // Asserts that two narrow or wide string arrays are equal.
+ template <typename CharType>
+ static void AssertStringArrayEq(size_t size1, CharType** array1,
+ size_t size2, CharType** array2) {
+ ASSERT_EQ(size1, size2) << " Array sizes different.";
+
+ for (size_t i = 0; i != size1; i++) {
+ ASSERT_STREQ(array1[i], array2[i]) << " where i == " << i;
+ }
+ }
+
+ // Verifies that the flag values match the expected values.
+ static void CheckFlags(const Flags& expected) {
+ EXPECT_EQ(expected.also_run_disabled_tests,
+ GTEST_FLAG(also_run_disabled_tests));
+ EXPECT_EQ(expected.break_on_failure, GTEST_FLAG(break_on_failure));
+ EXPECT_EQ(expected.catch_exceptions, GTEST_FLAG(catch_exceptions));
+ EXPECT_EQ(expected.death_test_use_fork, GTEST_FLAG(death_test_use_fork));
+ EXPECT_STREQ(expected.filter, GTEST_FLAG(filter).c_str());
+ EXPECT_EQ(expected.list_tests, GTEST_FLAG(list_tests));
+ EXPECT_STREQ(expected.output, GTEST_FLAG(output).c_str());
+ EXPECT_EQ(expected.print_time, GTEST_FLAG(print_time));
+ EXPECT_EQ(expected.random_seed, GTEST_FLAG(random_seed));
+ EXPECT_EQ(expected.repeat, GTEST_FLAG(repeat));
+ EXPECT_EQ(expected.shuffle, GTEST_FLAG(shuffle));
+ EXPECT_EQ(expected.throw_on_failure, GTEST_FLAG(throw_on_failure));
+ EXPECT_EQ(expected.stack_trace_depth, GTEST_FLAG(stack_trace_depth));
+ }
+
+ // Parses a command line (specified by argc1 and argv1), then
+ // verifies that the flag values are expected and that the
+ // recognized flags are removed from the command line.
+ template <typename CharType>
+ static void TestParsingFlags(int argc1, const CharType** argv1,
+ int argc2, const CharType** argv2,
+ const Flags& expected, bool should_print_help) {
+ const bool saved_help_flag = ::testing::internal::g_help_flag;
+ ::testing::internal::g_help_flag = false;
+
+#if GTEST_HAS_STREAM_REDIRECTION_
+ CaptureStdout();
+#endif // GTEST_HAS_STREAM_REDIRECTION_
+
+ // Parses the command line.
+ internal::ParseGoogleTestFlagsOnly(&argc1, const_cast<CharType**>(argv1));
+
+#if GTEST_HAS_STREAM_REDIRECTION_
+ const String captured_stdout = GetCapturedStdout();
+#endif // GTEST_HAS_STREAM_REDIRECTION_
+
+ // Verifies the flag values.
+ CheckFlags(expected);
+
+ // Verifies that the recognized flags are removed from the command
+ // line.
+ AssertStringArrayEq(argc1 + 1, argv1, argc2 + 1, argv2);
+
+ // ParseGoogleTestFlagsOnly should neither set g_help_flag nor print the
+ // help message for the flags it recognizes.
+ EXPECT_EQ(should_print_help, ::testing::internal::g_help_flag);
+
+#if GTEST_HAS_STREAM_REDIRECTION_
+ const char* const expected_help_fragment =
+ "This program contains tests written using";
+ if (should_print_help) {
+ EXPECT_PRED_FORMAT2(IsSubstring, expected_help_fragment, captured_stdout);
+ } else {
+ EXPECT_PRED_FORMAT2(IsNotSubstring,
+ expected_help_fragment, captured_stdout);
+ }
+#endif // GTEST_HAS_STREAM_REDIRECTION_
+
+ ::testing::internal::g_help_flag = saved_help_flag;
+ }
+
+ // This macro wraps TestParsingFlags s.t. the user doesn't need
+ // to specify the array sizes.
+#define GTEST_TEST_PARSING_FLAGS_(argv1, argv2, expected, should_print_help) \
+ TestParsingFlags(sizeof(argv1)/sizeof(*argv1) - 1, argv1, \
+ sizeof(argv2)/sizeof(*argv2) - 1, argv2, \
+ expected, should_print_help)
+};
+
+// Tests parsing an empty command line.
+TEST_F(InitGoogleTestTest, Empty) {
+ const char* argv[] = {
+ NULL
+ };
+
+ const char* argv2[] = {
+ NULL
+ };
+
+ GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags(), false);
+}
+
+// Tests parsing a command line that has no flag.
+TEST_F(InitGoogleTestTest, NoFlag) {
+ const char* argv[] = {
+ "foo.exe",
+ NULL
+ };
+
+ const char* argv2[] = {
+ "foo.exe",
+ NULL
+ };
+
+ GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags(), false);
+}
+
+// Tests parsing a bad --gtest_filter flag.
+TEST_F(InitGoogleTestTest, FilterBad) {
+ const char* argv[] = {
+ "foo.exe",
+ "--gtest_filter",
+ NULL
+ };
+
+ const char* argv2[] = {
+ "foo.exe",
+ "--gtest_filter",
+ NULL
+ };
+
+ GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::Filter(""), true);
+}
+
+// Tests parsing an empty --gtest_filter flag.
+TEST_F(InitGoogleTestTest, FilterEmpty) {
+ const char* argv[] = {
+ "foo.exe",
+ "--gtest_filter=",
+ NULL
+ };
+
+ const char* argv2[] = {
+ "foo.exe",
+ NULL
+ };
+
+ GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::Filter(""), false);
+}
+
+// Tests parsing a non-empty --gtest_filter flag.
+TEST_F(InitGoogleTestTest, FilterNonEmpty) {
+ const char* argv[] = {
+ "foo.exe",
+ "--gtest_filter=abc",
+ NULL
+ };
+
+ const char* argv2[] = {
+ "foo.exe",
+ NULL
+ };
+
+ GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::Filter("abc"), false);
+}
+
+// Tests parsing --gtest_break_on_failure.
+TEST_F(InitGoogleTestTest, BreakOnFailureWithoutValue) {
+ const char* argv[] = {
+ "foo.exe",
+ "--gtest_break_on_failure",
+ NULL
+};
+
+ const char* argv2[] = {
+ "foo.exe",
+ NULL
+ };
+
+ GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::BreakOnFailure(true), false);
+}
+
+// Tests parsing --gtest_break_on_failure=0.
+TEST_F(InitGoogleTestTest, BreakOnFailureFalse_0) {
+ const char* argv[] = {
+ "foo.exe",
+ "--gtest_break_on_failure=0",
+ NULL
+ };
+
+ const char* argv2[] = {
+ "foo.exe",
+ NULL
+ };
+
+ GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::BreakOnFailure(false), false);
+}
+
+// Tests parsing --gtest_break_on_failure=f.
+TEST_F(InitGoogleTestTest, BreakOnFailureFalse_f) {
+ const char* argv[] = {
+ "foo.exe",
+ "--gtest_break_on_failure=f",
+ NULL
+ };
+
+ const char* argv2[] = {
+ "foo.exe",
+ NULL
+ };
+
+ GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::BreakOnFailure(false), false);
+}
+
+// Tests parsing --gtest_break_on_failure=F.
+TEST_F(InitGoogleTestTest, BreakOnFailureFalse_F) {
+ const char* argv[] = {
+ "foo.exe",
+ "--gtest_break_on_failure=F",
+ NULL
+ };
+
+ const char* argv2[] = {
+ "foo.exe",
+ NULL
+ };
+
+ GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::BreakOnFailure(false), false);
+}
+
+// Tests parsing a --gtest_break_on_failure flag that has a "true"
+// definition.
+TEST_F(InitGoogleTestTest, BreakOnFailureTrue) {
+ const char* argv[] = {
+ "foo.exe",
+ "--gtest_break_on_failure=1",
+ NULL
+ };
+
+ const char* argv2[] = {
+ "foo.exe",
+ NULL
+ };
+
+ GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::BreakOnFailure(true), false);
+}
+
+// Tests parsing --gtest_catch_exceptions.
+TEST_F(InitGoogleTestTest, CatchExceptions) {
+ const char* argv[] = {
+ "foo.exe",
+ "--gtest_catch_exceptions",
+ NULL
+ };
+
+ const char* argv2[] = {
+ "foo.exe",
+ NULL
+ };
+
+ GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::CatchExceptions(true), false);
+}
+
+// Tests parsing --gtest_death_test_use_fork.
+TEST_F(InitGoogleTestTest, DeathTestUseFork) {
+ const char* argv[] = {
+ "foo.exe",
+ "--gtest_death_test_use_fork",
+ NULL
+ };
+
+ const char* argv2[] = {
+ "foo.exe",
+ NULL
+ };
+
+ GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::DeathTestUseFork(true), false);
+}
+
+// Tests having the same flag twice with different values. The
+// expected behavior is that the one coming last takes precedence.
+TEST_F(InitGoogleTestTest, DuplicatedFlags) {
+ const char* argv[] = {
+ "foo.exe",
+ "--gtest_filter=a",
+ "--gtest_filter=b",
+ NULL
+ };
+
+ const char* argv2[] = {
+ "foo.exe",
+ NULL
+ };
+
+ GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::Filter("b"), false);
+}
+
+// Tests having an unrecognized flag on the command line.
+TEST_F(InitGoogleTestTest, UnrecognizedFlag) {
+ const char* argv[] = {
+ "foo.exe",
+ "--gtest_break_on_failure",
+ "bar", // Unrecognized by Google Test.
+ "--gtest_filter=b",
+ NULL
+ };
+
+ const char* argv2[] = {
+ "foo.exe",
+ "bar",
+ NULL
+ };
+
+ Flags flags;
+ flags.break_on_failure = true;
+ flags.filter = "b";
+ GTEST_TEST_PARSING_FLAGS_(argv, argv2, flags, false);
+}
+
+// Tests having a --gtest_list_tests flag
+TEST_F(InitGoogleTestTest, ListTestsFlag) {
+ const char* argv[] = {
+ "foo.exe",
+ "--gtest_list_tests",
+ NULL
+ };
+
+ const char* argv2[] = {
+ "foo.exe",
+ NULL
+ };
+
+ GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::ListTests(true), false);
+}
+
+// Tests having a --gtest_list_tests flag with a "true" value
+TEST_F(InitGoogleTestTest, ListTestsTrue) {
+ const char* argv[] = {
+ "foo.exe",
+ "--gtest_list_tests=1",
+ NULL
+ };
+
+ const char* argv2[] = {
+ "foo.exe",
+ NULL
+ };
+
+ GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::ListTests(true), false);
+}
+
+// Tests having a --gtest_list_tests flag with a "false" value
+TEST_F(InitGoogleTestTest, ListTestsFalse) {
+ const char* argv[] = {
+ "foo.exe",
+ "--gtest_list_tests=0",
+ NULL
+ };
+
+ const char* argv2[] = {
+ "foo.exe",
+ NULL
+ };
+
+ GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::ListTests(false), false);
+}
+
+// Tests parsing --gtest_list_tests=f.
+TEST_F(InitGoogleTestTest, ListTestsFalse_f) {
+ const char* argv[] = {
+ "foo.exe",
+ "--gtest_list_tests=f",
+ NULL
+ };
+
+ const char* argv2[] = {
+ "foo.exe",
+ NULL
+ };
+
+ GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::ListTests(false), false);
+}
+
+// Tests parsing --gtest_list_tests=F.
+TEST_F(InitGoogleTestTest, ListTestsFalse_F) {
+ const char* argv[] = {
+ "foo.exe",
+ "--gtest_list_tests=F",
+ NULL
+ };
+
+ const char* argv2[] = {
+ "foo.exe",
+ NULL
+ };
+
+ GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::ListTests(false), false);
+}
+
+// Tests parsing --gtest_output (invalid).
+TEST_F(InitGoogleTestTest, OutputEmpty) {
+ const char* argv[] = {
+ "foo.exe",
+ "--gtest_output",
+ NULL
+ };
+
+ const char* argv2[] = {
+ "foo.exe",
+ "--gtest_output",
+ NULL
+ };
+
+ GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags(), true);
+}
+
+// Tests parsing --gtest_output=xml
+TEST_F(InitGoogleTestTest, OutputXml) {
+ const char* argv[] = {
+ "foo.exe",
+ "--gtest_output=xml",
+ NULL
+ };
+
+ const char* argv2[] = {
+ "foo.exe",
+ NULL
+ };
+
+ GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::Output("xml"), false);
+}
+
+// Tests parsing --gtest_output=xml:file
+TEST_F(InitGoogleTestTest, OutputXmlFile) {
+ const char* argv[] = {
+ "foo.exe",
+ "--gtest_output=xml:file",
+ NULL
+ };
+
+ const char* argv2[] = {
+ "foo.exe",
+ NULL
+ };
+
+ GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::Output("xml:file"), false);
+}
+
+// Tests parsing --gtest_output=xml:directory/path/
+TEST_F(InitGoogleTestTest, OutputXmlDirectory) {
+ const char* argv[] = {
+ "foo.exe",
+ "--gtest_output=xml:directory/path/",
+ NULL
+ };
+
+ const char* argv2[] = {
+ "foo.exe",
+ NULL
+ };
+
+ GTEST_TEST_PARSING_FLAGS_(argv, argv2,
+ Flags::Output("xml:directory/path/"), false);
+}
+
+// Tests having a --gtest_print_time flag
+TEST_F(InitGoogleTestTest, PrintTimeFlag) {
+ const char* argv[] = {
+ "foo.exe",
+ "--gtest_print_time",
+ NULL
+ };
+
+ const char* argv2[] = {
+ "foo.exe",
+ NULL
+ };
+
+ GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::PrintTime(true), false);
+}
+
+// Tests having a --gtest_print_time flag with a "true" value
+TEST_F(InitGoogleTestTest, PrintTimeTrue) {
+ const char* argv[] = {
+ "foo.exe",
+ "--gtest_print_time=1",
+ NULL
+ };
+
+ const char* argv2[] = {
+ "foo.exe",
+ NULL
+ };
+
+ GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::PrintTime(true), false);
+}
+
+// Tests having a --gtest_print_time flag with a "false" value
+TEST_F(InitGoogleTestTest, PrintTimeFalse) {
+ const char* argv[] = {
+ "foo.exe",
+ "--gtest_print_time=0",
+ NULL
+ };
+
+ const char* argv2[] = {
+ "foo.exe",
+ NULL
+ };
+
+ GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::PrintTime(false), false);
+}
+
+// Tests parsing --gtest_print_time=f.
+TEST_F(InitGoogleTestTest, PrintTimeFalse_f) {
+ const char* argv[] = {
+ "foo.exe",
+ "--gtest_print_time=f",
+ NULL
+ };
+
+ const char* argv2[] = {
+ "foo.exe",
+ NULL
+ };
+
+ GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::PrintTime(false), false);
+}
+
+// Tests parsing --gtest_print_time=F.
+TEST_F(InitGoogleTestTest, PrintTimeFalse_F) {
+ const char* argv[] = {
+ "foo.exe",
+ "--gtest_print_time=F",
+ NULL
+ };
+
+ const char* argv2[] = {
+ "foo.exe",
+ NULL
+ };
+
+ GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::PrintTime(false), false);
+}
+
+// Tests parsing --gtest_random_seed=number
+TEST_F(InitGoogleTestTest, RandomSeed) {
+ const char* argv[] = {
+ "foo.exe",
+ "--gtest_random_seed=1000",
+ NULL
+ };
+
+ const char* argv2[] = {
+ "foo.exe",
+ NULL
+ };
+
+ GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::RandomSeed(1000), false);
+}
+
+// Tests parsing --gtest_repeat=number
+TEST_F(InitGoogleTestTest, Repeat) {
+ const char* argv[] = {
+ "foo.exe",
+ "--gtest_repeat=1000",
+ NULL
+ };
+
+ const char* argv2[] = {
+ "foo.exe",
+ NULL
+ };
+
+ GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::Repeat(1000), false);
+}
+
+// Tests having a --gtest_also_run_disabled_tests flag
+TEST_F(InitGoogleTestTest, AlsoRunDisabledTestsFlag) {
+ const char* argv[] = {
+ "foo.exe",
+ "--gtest_also_run_disabled_tests",
+ NULL
+ };
+
+ const char* argv2[] = {
+ "foo.exe",
+ NULL
+ };
+
+ GTEST_TEST_PARSING_FLAGS_(argv, argv2,
+ Flags::AlsoRunDisabledTests(true), false);
+}
+
+// Tests having a --gtest_also_run_disabled_tests flag with a "true" value
+TEST_F(InitGoogleTestTest, AlsoRunDisabledTestsTrue) {
+ const char* argv[] = {
+ "foo.exe",
+ "--gtest_also_run_disabled_tests=1",
+ NULL
+ };
+
+ const char* argv2[] = {
+ "foo.exe",
+ NULL
+ };
+
+ GTEST_TEST_PARSING_FLAGS_(argv, argv2,
+ Flags::AlsoRunDisabledTests(true), false);
+}
+
+// Tests having a --gtest_also_run_disabled_tests flag with a "false" value
+TEST_F(InitGoogleTestTest, AlsoRunDisabledTestsFalse) {
+ const char* argv[] = {
+ "foo.exe",
+ "--gtest_also_run_disabled_tests=0",
+ NULL
+ };
+
+ const char* argv2[] = {
+ "foo.exe",
+ NULL
+ };
+
+ GTEST_TEST_PARSING_FLAGS_(argv, argv2,
+ Flags::AlsoRunDisabledTests(false), false);
+}
+
+// Tests parsing --gtest_shuffle.
+TEST_F(InitGoogleTestTest, ShuffleWithoutValue) {
+ const char* argv[] = {
+ "foo.exe",
+ "--gtest_shuffle",
+ NULL
+};
+
+ const char* argv2[] = {
+ "foo.exe",
+ NULL
+ };
+
+ GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::Shuffle(true), false);
+}
+
+// Tests parsing --gtest_shuffle=0.
+TEST_F(InitGoogleTestTest, ShuffleFalse_0) {
+ const char* argv[] = {
+ "foo.exe",
+ "--gtest_shuffle=0",
+ NULL
+ };
+
+ const char* argv2[] = {
+ "foo.exe",
+ NULL
+ };
+
+ GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::Shuffle(false), false);
+}
+
+// Tests parsing a --gtest_shuffle flag that has a "true"
+// definition.
+TEST_F(InitGoogleTestTest, ShuffleTrue) {
+ const char* argv[] = {
+ "foo.exe",
+ "--gtest_shuffle=1",
+ NULL
+ };
+
+ const char* argv2[] = {
+ "foo.exe",
+ NULL
+ };
+
+ GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::Shuffle(true), false);
+}
+
+// Tests parsing --gtest_stack_trace_depth=number.
+TEST_F(InitGoogleTestTest, StackTraceDepth) {
+ const char* argv[] = {
+ "foo.exe",
+ "--gtest_stack_trace_depth=5",
+ NULL
+ };
+
+ const char* argv2[] = {
+ "foo.exe",
+ NULL
+ };
+
+ GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::StackTraceDepth(5), false);
+}
+
+// Tests parsing --gtest_throw_on_failure.
+TEST_F(InitGoogleTestTest, ThrowOnFailureWithoutValue) {
+ const char* argv[] = {
+ "foo.exe",
+ "--gtest_throw_on_failure",
+ NULL
+};
+
+ const char* argv2[] = {
+ "foo.exe",
+ NULL
+ };
+
+ GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::ThrowOnFailure(true), false);
+}
+
+// Tests parsing --gtest_throw_on_failure=0.
+TEST_F(InitGoogleTestTest, ThrowOnFailureFalse_0) {
+ const char* argv[] = {
+ "foo.exe",
+ "--gtest_throw_on_failure=0",
+ NULL
+ };
+
+ const char* argv2[] = {
+ "foo.exe",
+ NULL
+ };
+
+ GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::ThrowOnFailure(false), false);
+}
+
+// Tests parsing a --gtest_throw_on_failure flag that has a "true"
+// definition.
+TEST_F(InitGoogleTestTest, ThrowOnFailureTrue) {
+ const char* argv[] = {
+ "foo.exe",
+ "--gtest_throw_on_failure=1",
+ NULL
+ };
+
+ const char* argv2[] = {
+ "foo.exe",
+ NULL
+ };
+
+ GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::ThrowOnFailure(true), false);
+}
+
+#if GTEST_OS_WINDOWS
+// Tests parsing wide strings.
+TEST_F(InitGoogleTestTest, WideStrings) {
+ const wchar_t* argv[] = {
+ L"foo.exe",
+ L"--gtest_filter=Foo*",
+ L"--gtest_list_tests=1",
+ L"--gtest_break_on_failure",
+ L"--non_gtest_flag",
+ NULL
+ };
+
+ const wchar_t* argv2[] = {
+ L"foo.exe",
+ L"--non_gtest_flag",
+ NULL
+ };
+
+ Flags expected_flags;
+ expected_flags.break_on_failure = true;
+ expected_flags.filter = "Foo*";
+ expected_flags.list_tests = true;
+
+ GTEST_TEST_PARSING_FLAGS_(argv, argv2, expected_flags, false);
+}
+#endif // GTEST_OS_WINDOWS
+
+// Tests current_test_info() in UnitTest.
+class CurrentTestInfoTest : public Test {
+ protected:
+ // Tests that current_test_info() returns NULL before the first test in
+ // the test case is run.
+ static void SetUpTestCase() {
+ // There should be no tests running at this point.
+ const TestInfo* test_info =
+ UnitTest::GetInstance()->current_test_info();
+ EXPECT_TRUE(test_info == NULL)
+ << "There should be no tests running at this point.";
+ }
+
+ // Tests that current_test_info() returns NULL after the last test in
+ // the test case has run.
+ static void TearDownTestCase() {
+ const TestInfo* test_info =
+ UnitTest::GetInstance()->current_test_info();
+ EXPECT_TRUE(test_info == NULL)
+ << "There should be no tests running at this point.";
+ }
+};
+
+// Tests that current_test_info() returns TestInfo for currently running
+// test by checking the expected test name against the actual one.
+TEST_F(CurrentTestInfoTest, WorksForFirstTestInATestCase) {
+ const TestInfo* test_info =
+ UnitTest::GetInstance()->current_test_info();
+ ASSERT_TRUE(NULL != test_info)
+ << "There is a test running so we should have a valid TestInfo.";
+ EXPECT_STREQ("CurrentTestInfoTest", test_info->test_case_name())
+ << "Expected the name of the currently running test case.";
+ EXPECT_STREQ("WorksForFirstTestInATestCase", test_info->name())
+ << "Expected the name of the currently running test.";
+}
+
+// Tests that current_test_info() returns TestInfo for currently running
+// test by checking the expected test name against the actual one. We
+// use this test to see that the TestInfo object actually changed from
+// the previous invocation.
+TEST_F(CurrentTestInfoTest, WorksForSecondTestInATestCase) {
+ const TestInfo* test_info =
+ UnitTest::GetInstance()->current_test_info();
+ ASSERT_TRUE(NULL != test_info)
+ << "There is a test running so we should have a valid TestInfo.";
+ EXPECT_STREQ("CurrentTestInfoTest", test_info->test_case_name())
+ << "Expected the name of the currently running test case.";
+ EXPECT_STREQ("WorksForSecondTestInATestCase", test_info->name())
+ << "Expected the name of the currently running test.";
+}
+
+} // namespace testing
+
+// These two lines test that we can define tests in a namespace that
+// has the name "testing" and is nested in another namespace.
+namespace my_namespace {
+namespace testing {
+
+// Makes sure that TEST knows to use ::testing::Test instead of
+// ::my_namespace::testing::Test.
+class Test {};
+
+// Makes sure that an assertion knows to use ::testing::Message instead of
+// ::my_namespace::testing::Message.
+class Message {};
+
+// Makes sure that an assertion knows to use
+// ::testing::AssertionResult instead of
+// ::my_namespace::testing::AssertionResult.
+class AssertionResult {};
+
+// Tests that an assertion that should succeed works as expected.
+TEST(NestedTestingNamespaceTest, Success) {
+ EXPECT_EQ(1, 1) << "This shouldn't fail.";
+}
+
+// Tests that an assertion that should fail works as expected.
+TEST(NestedTestingNamespaceTest, Failure) {
+ EXPECT_FATAL_FAILURE(FAIL() << "This failure is expected.",
+ "This failure is expected.");
+}
+
+} // namespace testing
+} // namespace my_namespace
+
+// Tests that one can call superclass SetUp and TearDown methods--
+// that is, that they are not private.
+// No tests are based on this fixture; the test "passes" if it compiles
+// successfully.
+class ProtectedFixtureMethodsTest : public Test {
+ protected:
+ virtual void SetUp() {
+ Test::SetUp();
+ }
+ virtual void TearDown() {
+ Test::TearDown();
+ }
+};
+
+// StreamingAssertionsTest tests the streaming versions of a representative
+// sample of assertions.
+TEST(StreamingAssertionsTest, Unconditional) {
+ SUCCEED() << "expected success";
+ EXPECT_NONFATAL_FAILURE(ADD_FAILURE() << "expected failure",
+ "expected failure");
+ EXPECT_FATAL_FAILURE(FAIL() << "expected failure",
+ "expected failure");
+}
+
+#ifdef __BORLANDC__
+// Silences warnings: "Condition is always true", "Unreachable code"
+#pragma option push -w-ccc -w-rch
+#endif
+
+TEST(StreamingAssertionsTest, Truth) {
+ EXPECT_TRUE(true) << "unexpected failure";
+ ASSERT_TRUE(true) << "unexpected failure";
+ EXPECT_NONFATAL_FAILURE(EXPECT_TRUE(false) << "expected failure",
+ "expected failure");
+ EXPECT_FATAL_FAILURE(ASSERT_TRUE(false) << "expected failure",
+ "expected failure");
+}
+
+TEST(StreamingAssertionsTest, Truth2) {
+ EXPECT_FALSE(false) << "unexpected failure";
+ ASSERT_FALSE(false) << "unexpected failure";
+ EXPECT_NONFATAL_FAILURE(EXPECT_FALSE(true) << "expected failure",
+ "expected failure");
+ EXPECT_FATAL_FAILURE(ASSERT_FALSE(true) << "expected failure",
+ "expected failure");
+}
+
+#ifdef __BORLANDC__
+// Restores warnings after previous "#pragma option push" supressed them
+#pragma option pop
+#endif
+
+TEST(StreamingAssertionsTest, IntegerEquals) {
+ EXPECT_EQ(1, 1) << "unexpected failure";
+ ASSERT_EQ(1, 1) << "unexpected failure";
+ EXPECT_NONFATAL_FAILURE(EXPECT_EQ(1, 2) << "expected failure",
+ "expected failure");
+ EXPECT_FATAL_FAILURE(ASSERT_EQ(1, 2) << "expected failure",
+ "expected failure");
+}
+
+TEST(StreamingAssertionsTest, IntegerLessThan) {
+ EXPECT_LT(1, 2) << "unexpected failure";
+ ASSERT_LT(1, 2) << "unexpected failure";
+ EXPECT_NONFATAL_FAILURE(EXPECT_LT(2, 1) << "expected failure",
+ "expected failure");
+ EXPECT_FATAL_FAILURE(ASSERT_LT(2, 1) << "expected failure",
+ "expected failure");
+}
+
+TEST(StreamingAssertionsTest, StringsEqual) {
+ EXPECT_STREQ("foo", "foo") << "unexpected failure";
+ ASSERT_STREQ("foo", "foo") << "unexpected failure";
+ EXPECT_NONFATAL_FAILURE(EXPECT_STREQ("foo", "bar") << "expected failure",
+ "expected failure");
+ EXPECT_FATAL_FAILURE(ASSERT_STREQ("foo", "bar") << "expected failure",
+ "expected failure");
+}
+
+TEST(StreamingAssertionsTest, StringsNotEqual) {
+ EXPECT_STRNE("foo", "bar") << "unexpected failure";
+ ASSERT_STRNE("foo", "bar") << "unexpected failure";
+ EXPECT_NONFATAL_FAILURE(EXPECT_STRNE("foo", "foo") << "expected failure",
+ "expected failure");
+ EXPECT_FATAL_FAILURE(ASSERT_STRNE("foo", "foo") << "expected failure",
+ "expected failure");
+}
+
+TEST(StreamingAssertionsTest, StringsEqualIgnoringCase) {
+ EXPECT_STRCASEEQ("foo", "FOO") << "unexpected failure";
+ ASSERT_STRCASEEQ("foo", "FOO") << "unexpected failure";
+ EXPECT_NONFATAL_FAILURE(EXPECT_STRCASEEQ("foo", "bar") << "expected failure",
+ "expected failure");
+ EXPECT_FATAL_FAILURE(ASSERT_STRCASEEQ("foo", "bar") << "expected failure",
+ "expected failure");
+}
+
+TEST(StreamingAssertionsTest, StringNotEqualIgnoringCase) {
+ EXPECT_STRCASENE("foo", "bar") << "unexpected failure";
+ ASSERT_STRCASENE("foo", "bar") << "unexpected failure";
+ EXPECT_NONFATAL_FAILURE(EXPECT_STRCASENE("foo", "FOO") << "expected failure",
+ "expected failure");
+ EXPECT_FATAL_FAILURE(ASSERT_STRCASENE("bar", "BAR") << "expected failure",
+ "expected failure");
+}
+
+TEST(StreamingAssertionsTest, FloatingPointEquals) {
+ EXPECT_FLOAT_EQ(1.0, 1.0) << "unexpected failure";
+ ASSERT_FLOAT_EQ(1.0, 1.0) << "unexpected failure";
+ EXPECT_NONFATAL_FAILURE(EXPECT_FLOAT_EQ(0.0, 1.0) << "expected failure",
+ "expected failure");
+ EXPECT_FATAL_FAILURE(ASSERT_FLOAT_EQ(0.0, 1.0) << "expected failure",
+ "expected failure");
+}
+
+#if GTEST_HAS_EXCEPTIONS
+
+TEST(StreamingAssertionsTest, Throw) {
+ EXPECT_THROW(ThrowAnInteger(), int) << "unexpected failure";
+ ASSERT_THROW(ThrowAnInteger(), int) << "unexpected failure";
+ EXPECT_NONFATAL_FAILURE(EXPECT_THROW(ThrowAnInteger(), bool) <<
+ "expected failure", "expected failure");
+ EXPECT_FATAL_FAILURE(ASSERT_THROW(ThrowAnInteger(), bool) <<
+ "expected failure", "expected failure");
+}
+
+TEST(StreamingAssertionsTest, NoThrow) {
+ EXPECT_NO_THROW(ThrowNothing()) << "unexpected failure";
+ ASSERT_NO_THROW(ThrowNothing()) << "unexpected failure";
+ EXPECT_NONFATAL_FAILURE(EXPECT_NO_THROW(ThrowAnInteger()) <<
+ "expected failure", "expected failure");
+ EXPECT_FATAL_FAILURE(ASSERT_NO_THROW(ThrowAnInteger()) <<
+ "expected failure", "expected failure");
+}
+
+TEST(StreamingAssertionsTest, AnyThrow) {
+ EXPECT_ANY_THROW(ThrowAnInteger()) << "unexpected failure";
+ ASSERT_ANY_THROW(ThrowAnInteger()) << "unexpected failure";
+ EXPECT_NONFATAL_FAILURE(EXPECT_ANY_THROW(ThrowNothing()) <<
+ "expected failure", "expected failure");
+ EXPECT_FATAL_FAILURE(ASSERT_ANY_THROW(ThrowNothing()) <<
+ "expected failure", "expected failure");
+}
+
+#endif // GTEST_HAS_EXCEPTIONS
+
+// Tests that Google Test correctly decides whether to use colors in the output.
+
+TEST(ColoredOutputTest, UsesColorsWhenGTestColorFlagIsYes) {
+ GTEST_FLAG(color) = "yes";
+
+ SetEnv("TERM", "xterm"); // TERM supports colors.
+ EXPECT_TRUE(ShouldUseColor(true)); // Stdout is a TTY.
+ EXPECT_TRUE(ShouldUseColor(false)); // Stdout is not a TTY.
+
+ SetEnv("TERM", "dumb"); // TERM doesn't support colors.
+ EXPECT_TRUE(ShouldUseColor(true)); // Stdout is a TTY.
+ EXPECT_TRUE(ShouldUseColor(false)); // Stdout is not a TTY.
+}
+
+TEST(ColoredOutputTest, UsesColorsWhenGTestColorFlagIsAliasOfYes) {
+ SetEnv("TERM", "dumb"); // TERM doesn't support colors.
+
+ GTEST_FLAG(color) = "True";
+ EXPECT_TRUE(ShouldUseColor(false)); // Stdout is not a TTY.
+
+ GTEST_FLAG(color) = "t";
+ EXPECT_TRUE(ShouldUseColor(false)); // Stdout is not a TTY.
+
+ GTEST_FLAG(color) = "1";
+ EXPECT_TRUE(ShouldUseColor(false)); // Stdout is not a TTY.
+}
+
+TEST(ColoredOutputTest, UsesNoColorWhenGTestColorFlagIsNo) {
+ GTEST_FLAG(color) = "no";
+
+ SetEnv("TERM", "xterm"); // TERM supports colors.
+ EXPECT_FALSE(ShouldUseColor(true)); // Stdout is a TTY.
+ EXPECT_FALSE(ShouldUseColor(false)); // Stdout is not a TTY.
+
+ SetEnv("TERM", "dumb"); // TERM doesn't support colors.
+ EXPECT_FALSE(ShouldUseColor(true)); // Stdout is a TTY.
+ EXPECT_FALSE(ShouldUseColor(false)); // Stdout is not a TTY.
+}
+
+TEST(ColoredOutputTest, UsesNoColorWhenGTestColorFlagIsInvalid) {
+ SetEnv("TERM", "xterm"); // TERM supports colors.
+
+ GTEST_FLAG(color) = "F";
+ EXPECT_FALSE(ShouldUseColor(true)); // Stdout is a TTY.
+
+ GTEST_FLAG(color) = "0";
+ EXPECT_FALSE(ShouldUseColor(true)); // Stdout is a TTY.
+
+ GTEST_FLAG(color) = "unknown";
+ EXPECT_FALSE(ShouldUseColor(true)); // Stdout is a TTY.
+}
+
+TEST(ColoredOutputTest, UsesColorsWhenStdoutIsTty) {
+ GTEST_FLAG(color) = "auto";
+
+ SetEnv("TERM", "xterm"); // TERM supports colors.
+ EXPECT_FALSE(ShouldUseColor(false)); // Stdout is not a TTY.
+ EXPECT_TRUE(ShouldUseColor(true)); // Stdout is a TTY.
+}
+
+TEST(ColoredOutputTest, UsesColorsWhenTermSupportsColors) {
+ GTEST_FLAG(color) = "auto";
+
+#if GTEST_OS_WINDOWS
+ // On Windows, we ignore the TERM variable as it's usually not set.
+
+ SetEnv("TERM", "dumb");
+ EXPECT_TRUE(ShouldUseColor(true)); // Stdout is a TTY.
+
+ SetEnv("TERM", "");
+ EXPECT_TRUE(ShouldUseColor(true)); // Stdout is a TTY.
+
+ SetEnv("TERM", "xterm");
+ EXPECT_TRUE(ShouldUseColor(true)); // Stdout is a TTY.
+#else
+ // On non-Windows platforms, we rely on TERM to determine if the
+ // terminal supports colors.
+
+ SetEnv("TERM", "dumb"); // TERM doesn't support colors.
+ EXPECT_FALSE(ShouldUseColor(true)); // Stdout is a TTY.
+
+ SetEnv("TERM", "emacs"); // TERM doesn't support colors.
+ EXPECT_FALSE(ShouldUseColor(true)); // Stdout is a TTY.
+
+ SetEnv("TERM", "vt100"); // TERM doesn't support colors.
+ EXPECT_FALSE(ShouldUseColor(true)); // Stdout is a TTY.
+
+ SetEnv("TERM", "xterm-mono"); // TERM doesn't support colors.
+ EXPECT_FALSE(ShouldUseColor(true)); // Stdout is a TTY.
+
+ SetEnv("TERM", "xterm"); // TERM supports colors.
+ EXPECT_TRUE(ShouldUseColor(true)); // Stdout is a TTY.
+
+ SetEnv("TERM", "xterm-color"); // TERM supports colors.
+ EXPECT_TRUE(ShouldUseColor(true)); // Stdout is a TTY.
+
+ SetEnv("TERM", "linux"); // TERM supports colors.
+ EXPECT_TRUE(ShouldUseColor(true)); // Stdout is a TTY.
+#endif // GTEST_OS_WINDOWS
+}
+
+// Verifies that StaticAssertTypeEq works in a namespace scope.
+
+static bool dummy1 = StaticAssertTypeEq<bool, bool>();
+static bool dummy2 = StaticAssertTypeEq<const int, const int>();
+
+// Verifies that StaticAssertTypeEq works in a class.
+
+template <typename T>
+class StaticAssertTypeEqTestHelper {
+ public:
+ StaticAssertTypeEqTestHelper() { StaticAssertTypeEq<bool, T>(); }
+};
+
+TEST(StaticAssertTypeEqTest, WorksInClass) {
+ StaticAssertTypeEqTestHelper<bool>();
+}
+
+// Verifies that StaticAssertTypeEq works inside a function.
+
+typedef int IntAlias;
+
+TEST(StaticAssertTypeEqTest, CompilesForEqualTypes) {
+ StaticAssertTypeEq<int, IntAlias>();
+ StaticAssertTypeEq<int*, IntAlias*>();
+}
+
+TEST(GetCurrentOsStackTraceExceptTopTest, ReturnsTheStackTrace) {
+ testing::UnitTest* const unit_test = testing::UnitTest::GetInstance();
+
+ // We don't have a stack walker in Google Test yet.
+ EXPECT_STREQ("", GetCurrentOsStackTraceExceptTop(unit_test, 0).c_str());
+ EXPECT_STREQ("", GetCurrentOsStackTraceExceptTop(unit_test, 1).c_str());
+}
+
+TEST(HasNonfatalFailureTest, ReturnsFalseWhenThereIsNoFailure) {
+ EXPECT_FALSE(HasNonfatalFailure());
+}
+
+static void FailFatally() { FAIL(); }
+
+TEST(HasNonfatalFailureTest, ReturnsFalseWhenThereIsOnlyFatalFailure) {
+ FailFatally();
+ const bool has_nonfatal_failure = HasNonfatalFailure();
+ ClearCurrentTestPartResults();
+ EXPECT_FALSE(has_nonfatal_failure);
+}
+
+TEST(HasNonfatalFailureTest, ReturnsTrueWhenThereIsNonfatalFailure) {
+ ADD_FAILURE();
+ const bool has_nonfatal_failure = HasNonfatalFailure();
+ ClearCurrentTestPartResults();
+ EXPECT_TRUE(has_nonfatal_failure);
+}
+
+TEST(HasNonfatalFailureTest, ReturnsTrueWhenThereAreFatalAndNonfatalFailures) {
+ FailFatally();
+ ADD_FAILURE();
+ const bool has_nonfatal_failure = HasNonfatalFailure();
+ ClearCurrentTestPartResults();
+ EXPECT_TRUE(has_nonfatal_failure);
+}
+
+// A wrapper for calling HasNonfatalFailure outside of a test body.
+static bool HasNonfatalFailureHelper() {
+ return testing::Test::HasNonfatalFailure();
+}
+
+TEST(HasNonfatalFailureTest, WorksOutsideOfTestBody) {
+ EXPECT_FALSE(HasNonfatalFailureHelper());
+}
+
+TEST(HasNonfatalFailureTest, WorksOutsideOfTestBody2) {
+ ADD_FAILURE();
+ const bool has_nonfatal_failure = HasNonfatalFailureHelper();
+ ClearCurrentTestPartResults();
+ EXPECT_TRUE(has_nonfatal_failure);
+}
+
+TEST(HasFailureTest, ReturnsFalseWhenThereIsNoFailure) {
+ EXPECT_FALSE(HasFailure());
+}
+
+TEST(HasFailureTest, ReturnsTrueWhenThereIsFatalFailure) {
+ FailFatally();
+ const bool has_failure = HasFailure();
+ ClearCurrentTestPartResults();
+ EXPECT_TRUE(has_failure);
+}
+
+TEST(HasFailureTest, ReturnsTrueWhenThereIsNonfatalFailure) {
+ ADD_FAILURE();
+ const bool has_failure = HasFailure();
+ ClearCurrentTestPartResults();
+ EXPECT_TRUE(has_failure);
+}
+
+TEST(HasFailureTest, ReturnsTrueWhenThereAreFatalAndNonfatalFailures) {
+ FailFatally();
+ ADD_FAILURE();
+ const bool has_failure = HasFailure();
+ ClearCurrentTestPartResults();
+ EXPECT_TRUE(has_failure);
+}
+
+// A wrapper for calling HasFailure outside of a test body.
+static bool HasFailureHelper() { return testing::Test::HasFailure(); }
+
+TEST(HasFailureTest, WorksOutsideOfTestBody) {
+ EXPECT_FALSE(HasFailureHelper());
+}
+
+TEST(HasFailureTest, WorksOutsideOfTestBody2) {
+ ADD_FAILURE();
+ const bool has_failure = HasFailureHelper();
+ ClearCurrentTestPartResults();
+ EXPECT_TRUE(has_failure);
+}
+
+class TestListener : public EmptyTestEventListener {
+ public:
+ TestListener() : on_start_counter_(NULL), is_destroyed_(NULL) {}
+ TestListener(int* on_start_counter, bool* is_destroyed)
+ : on_start_counter_(on_start_counter),
+ is_destroyed_(is_destroyed) {}
+
+ virtual ~TestListener() {
+ if (is_destroyed_)
+ *is_destroyed_ = true;
+ }
+
+ protected:
+ virtual void OnTestProgramStart(const UnitTest& /*unit_test*/) {
+ if (on_start_counter_ != NULL)
+ (*on_start_counter_)++;
+ }
+
+ private:
+ int* on_start_counter_;
+ bool* is_destroyed_;
+};
+
+// Tests the constructor.
+TEST(TestEventListenersTest, ConstructionWorks) {
+ TestEventListeners listeners;
+
+ EXPECT_TRUE(TestEventListenersAccessor::GetRepeater(&listeners) != NULL);
+ EXPECT_TRUE(listeners.default_result_printer() == NULL);
+ EXPECT_TRUE(listeners.default_xml_generator() == NULL);
+}
+
+// Tests that the TestEventListeners destructor deletes all the listeners it
+// owns.
+TEST(TestEventListenersTest, DestructionWorks) {
+ bool default_result_printer_is_destroyed = false;
+ bool default_xml_printer_is_destroyed = false;
+ bool extra_listener_is_destroyed = false;
+ TestListener* default_result_printer = new TestListener(
+ NULL, &default_result_printer_is_destroyed);
+ TestListener* default_xml_printer = new TestListener(
+ NULL, &default_xml_printer_is_destroyed);
+ TestListener* extra_listener = new TestListener(
+ NULL, &extra_listener_is_destroyed);
+
+ {
+ TestEventListeners listeners;
+ TestEventListenersAccessor::SetDefaultResultPrinter(&listeners,
+ default_result_printer);
+ TestEventListenersAccessor::SetDefaultXmlGenerator(&listeners,
+ default_xml_printer);
+ listeners.Append(extra_listener);
+ }
+ EXPECT_TRUE(default_result_printer_is_destroyed);
+ EXPECT_TRUE(default_xml_printer_is_destroyed);
+ EXPECT_TRUE(extra_listener_is_destroyed);
+}
+
+// Tests that a listener Append'ed to a TestEventListeners list starts
+// receiving events.
+TEST(TestEventListenersTest, Append) {
+ int on_start_counter = 0;
+ bool is_destroyed = false;
+ TestListener* listener = new TestListener(&on_start_counter, &is_destroyed);
+ {
+ TestEventListeners listeners;
+ listeners.Append(listener);
+ TestEventListenersAccessor::GetRepeater(&listeners)->OnTestProgramStart(
+ *UnitTest::GetInstance());
+ EXPECT_EQ(1, on_start_counter);
+ }
+ EXPECT_TRUE(is_destroyed);
+}
+
+// Tests that listeners receive events in the order they were appended to
+// the list, except for *End requests, which must be received in the reverse
+// order.
+class SequenceTestingListener : public EmptyTestEventListener {
+ public:
+ SequenceTestingListener(std::vector<String>* vector, const char* id)
+ : vector_(vector), id_(id) {}
+
+ protected:
+ virtual void OnTestProgramStart(const UnitTest& /*unit_test*/) {
+ vector_->push_back(GetEventDescription("OnTestProgramStart"));
+ }
+
+ virtual void OnTestProgramEnd(const UnitTest& /*unit_test*/) {
+ vector_->push_back(GetEventDescription("OnTestProgramEnd"));
+ }
+
+ virtual void OnTestIterationStart(const UnitTest& /*unit_test*/,
+ int /*iteration*/) {
+ vector_->push_back(GetEventDescription("OnTestIterationStart"));
+ }
+
+ virtual void OnTestIterationEnd(const UnitTest& /*unit_test*/,
+ int /*iteration*/) {
+ vector_->push_back(GetEventDescription("OnTestIterationEnd"));
+ }
+
+ private:
+ String GetEventDescription(const char* method) {
+ Message message;
+ message << id_ << "." << method;
+ return message.GetString();
+ }
+
+ std::vector<String>* vector_;
+ const char* const id_;
+
+ GTEST_DISALLOW_COPY_AND_ASSIGN_(SequenceTestingListener);
+};
+
+TEST(EventListenerTest, AppendKeepsOrder) {
+ std::vector<String> vec;
+ TestEventListeners listeners;
+ listeners.Append(new SequenceTestingListener(&vec, "1st"));
+ listeners.Append(new SequenceTestingListener(&vec, "2nd"));
+ listeners.Append(new SequenceTestingListener(&vec, "3rd"));
+
+ TestEventListenersAccessor::GetRepeater(&listeners)->OnTestProgramStart(
+ *UnitTest::GetInstance());
+ ASSERT_EQ(3U, vec.size());
+ EXPECT_STREQ("1st.OnTestProgramStart", vec[0].c_str());
+ EXPECT_STREQ("2nd.OnTestProgramStart", vec[1].c_str());
+ EXPECT_STREQ("3rd.OnTestProgramStart", vec[2].c_str());
+
+ vec.clear();
+ TestEventListenersAccessor::GetRepeater(&listeners)->OnTestProgramEnd(
+ *UnitTest::GetInstance());
+ ASSERT_EQ(3U, vec.size());
+ EXPECT_STREQ("3rd.OnTestProgramEnd", vec[0].c_str());
+ EXPECT_STREQ("2nd.OnTestProgramEnd", vec[1].c_str());
+ EXPECT_STREQ("1st.OnTestProgramEnd", vec[2].c_str());
+
+ vec.clear();
+ TestEventListenersAccessor::GetRepeater(&listeners)->OnTestIterationStart(
+ *UnitTest::GetInstance(), 0);
+ ASSERT_EQ(3U, vec.size());
+ EXPECT_STREQ("1st.OnTestIterationStart", vec[0].c_str());
+ EXPECT_STREQ("2nd.OnTestIterationStart", vec[1].c_str());
+ EXPECT_STREQ("3rd.OnTestIterationStart", vec[2].c_str());
+
+ vec.clear();
+ TestEventListenersAccessor::GetRepeater(&listeners)->OnTestIterationEnd(
+ *UnitTest::GetInstance(), 0);
+ ASSERT_EQ(3U, vec.size());
+ EXPECT_STREQ("3rd.OnTestIterationEnd", vec[0].c_str());
+ EXPECT_STREQ("2nd.OnTestIterationEnd", vec[1].c_str());
+ EXPECT_STREQ("1st.OnTestIterationEnd", vec[2].c_str());
+}
+
+// Tests that a listener removed from a TestEventListeners list stops receiving
+// events and is not deleted when the list is destroyed.
+TEST(TestEventListenersTest, Release) {
+ int on_start_counter = 0;
+ bool is_destroyed = false;
+ // Although Append passes the ownership of this object to the list,
+ // the following calls release it, and we need to delete it before the
+ // test ends.
+ TestListener* listener = new TestListener(&on_start_counter, &is_destroyed);
+ {
+ TestEventListeners listeners;
+ listeners.Append(listener);
+ EXPECT_EQ(listener, listeners.Release(listener));
+ TestEventListenersAccessor::GetRepeater(&listeners)->OnTestProgramStart(
+ *UnitTest::GetInstance());
+ EXPECT_TRUE(listeners.Release(listener) == NULL);
+ }
+ EXPECT_EQ(0, on_start_counter);
+ EXPECT_FALSE(is_destroyed);
+ delete listener;
+}
+
+// Tests that no events are forwarded when event forwarding is disabled.
+TEST(EventListenerTest, SuppressEventForwarding) {
+ int on_start_counter = 0;
+ TestListener* listener = new TestListener(&on_start_counter, NULL);
+
+ TestEventListeners listeners;
+ listeners.Append(listener);
+ ASSERT_TRUE(TestEventListenersAccessor::EventForwardingEnabled(listeners));
+ TestEventListenersAccessor::SuppressEventForwarding(&listeners);
+ ASSERT_FALSE(TestEventListenersAccessor::EventForwardingEnabled(listeners));
+ TestEventListenersAccessor::GetRepeater(&listeners)->OnTestProgramStart(
+ *UnitTest::GetInstance());
+ EXPECT_EQ(0, on_start_counter);
+}
+
+// Tests that events generated by Google Test are not forwarded in
+// death test subprocesses.
+TEST(EventListenerDeathTest, EventsNotForwardedInDeathTestSubprecesses) {
+ EXPECT_DEATH_IF_SUPPORTED({
+ GTEST_CHECK_(TestEventListenersAccessor::EventForwardingEnabled(
+ *GetUnitTestImpl()->listeners())) << "expected failure";},
+ "expected failure");
+}
+
+// Tests that a listener installed via SetDefaultResultPrinter() starts
+// receiving events and is returned via default_result_printer() and that
+// the previous default_result_printer is removed from the list and deleted.
+TEST(EventListenerTest, default_result_printer) {
+ int on_start_counter = 0;
+ bool is_destroyed = false;
+ TestListener* listener = new TestListener(&on_start_counter, &is_destroyed);
+
+ TestEventListeners listeners;
+ TestEventListenersAccessor::SetDefaultResultPrinter(&listeners, listener);
+
+ EXPECT_EQ(listener, listeners.default_result_printer());
+
+ TestEventListenersAccessor::GetRepeater(&listeners)->OnTestProgramStart(
+ *UnitTest::GetInstance());
+
+ EXPECT_EQ(1, on_start_counter);
+
+ // Replacing default_result_printer with something else should remove it
+ // from the list and destroy it.
+ TestEventListenersAccessor::SetDefaultResultPrinter(&listeners, NULL);
+
+ EXPECT_TRUE(listeners.default_result_printer() == NULL);
+ EXPECT_TRUE(is_destroyed);
+
+ // After broadcasting an event the counter is still the same, indicating
+ // the listener is not in the list anymore.
+ TestEventListenersAccessor::GetRepeater(&listeners)->OnTestProgramStart(
+ *UnitTest::GetInstance());
+ EXPECT_EQ(1, on_start_counter);
+}
+
+// Tests that the default_result_printer listener stops receiving events
+// when removed via Release and that is not owned by the list anymore.
+TEST(EventListenerTest, RemovingDefaultResultPrinterWorks) {
+ int on_start_counter = 0;
+ bool is_destroyed = false;
+ // Although Append passes the ownership of this object to the list,
+ // the following calls release it, and we need to delete it before the
+ // test ends.
+ TestListener* listener = new TestListener(&on_start_counter, &is_destroyed);
+ {
+ TestEventListeners listeners;
+ TestEventListenersAccessor::SetDefaultResultPrinter(&listeners, listener);
+
+ EXPECT_EQ(listener, listeners.Release(listener));
+ EXPECT_TRUE(listeners.default_result_printer() == NULL);
+ EXPECT_FALSE(is_destroyed);
+
+ // Broadcasting events now should not affect default_result_printer.
+ TestEventListenersAccessor::GetRepeater(&listeners)->OnTestProgramStart(
+ *UnitTest::GetInstance());
+ EXPECT_EQ(0, on_start_counter);
+ }
+ // Destroying the list should not affect the listener now, too.
+ EXPECT_FALSE(is_destroyed);
+ delete listener;
+}
+
+// Tests that a listener installed via SetDefaultXmlGenerator() starts
+// receiving events and is returned via default_xml_generator() and that
+// the previous default_xml_generator is removed from the list and deleted.
+TEST(EventListenerTest, default_xml_generator) {
+ int on_start_counter = 0;
+ bool is_destroyed = false;
+ TestListener* listener = new TestListener(&on_start_counter, &is_destroyed);
+
+ TestEventListeners listeners;
+ TestEventListenersAccessor::SetDefaultXmlGenerator(&listeners, listener);
+
+ EXPECT_EQ(listener, listeners.default_xml_generator());
+
+ TestEventListenersAccessor::GetRepeater(&listeners)->OnTestProgramStart(
+ *UnitTest::GetInstance());
+
+ EXPECT_EQ(1, on_start_counter);
+
+ // Replacing default_xml_generator with something else should remove it
+ // from the list and destroy it.
+ TestEventListenersAccessor::SetDefaultXmlGenerator(&listeners, NULL);
+
+ EXPECT_TRUE(listeners.default_xml_generator() == NULL);
+ EXPECT_TRUE(is_destroyed);
+
+ // After broadcasting an event the counter is still the same, indicating
+ // the listener is not in the list anymore.
+ TestEventListenersAccessor::GetRepeater(&listeners)->OnTestProgramStart(
+ *UnitTest::GetInstance());
+ EXPECT_EQ(1, on_start_counter);
+}
+
+// Tests that the default_xml_generator listener stops receiving events
+// when removed via Release and that is not owned by the list anymore.
+TEST(EventListenerTest, RemovingDefaultXmlGeneratorWorks) {
+ int on_start_counter = 0;
+ bool is_destroyed = false;
+ // Although Append passes the ownership of this object to the list,
+ // the following calls release it, and we need to delete it before the
+ // test ends.
+ TestListener* listener = new TestListener(&on_start_counter, &is_destroyed);
+ {
+ TestEventListeners listeners;
+ TestEventListenersAccessor::SetDefaultXmlGenerator(&listeners, listener);
+
+ EXPECT_EQ(listener, listeners.Release(listener));
+ EXPECT_TRUE(listeners.default_xml_generator() == NULL);
+ EXPECT_FALSE(is_destroyed);
+
+ // Broadcasting events now should not affect default_xml_generator.
+ TestEventListenersAccessor::GetRepeater(&listeners)->OnTestProgramStart(
+ *UnitTest::GetInstance());
+ EXPECT_EQ(0, on_start_counter);
+ }
+ // Destroying the list should not affect the listener now, too.
+ EXPECT_FALSE(is_destroyed);
+ delete listener;
+}
+
+// Sanity tests to ensure that the alternative, verbose spellings of
+// some of the macros work. We don't test them thoroughly as that
+// would be quite involved. Since their implementations are
+// straightforward, and they are rarely used, we'll just rely on the
+// users to tell us when they are broken.
+GTEST_TEST(AlternativeNameTest, Works) { // GTEST_TEST is the same as TEST.
+ GTEST_SUCCEED() << "OK"; // GTEST_SUCCEED is the same as SUCCEED.
+
+ // GTEST_FAIL is the same as FAIL.
+ EXPECT_FATAL_FAILURE(GTEST_FAIL() << "An expected failure",
+ "An expected failure");
+}
diff --git a/Source/ThirdParty/gtest/test/gtest_xml_outfile1_test_.cc b/Source/ThirdParty/gtest/test/gtest_xml_outfile1_test_.cc
new file mode 100644
index 000000000..664baad2a
--- /dev/null
+++ b/Source/ThirdParty/gtest/test/gtest_xml_outfile1_test_.cc
@@ -0,0 +1,49 @@
+// Copyright 2008, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: keith.ray@gmail.com (Keith Ray)
+//
+// gtest_xml_outfile1_test_ writes some xml via TestProperty used by
+// gtest_xml_outfiles_test.py
+
+#include <gtest/gtest.h>
+
+class PropertyOne : public testing::Test {
+ protected:
+ virtual void SetUp() {
+ RecordProperty("SetUpProp", 1);
+ }
+ virtual void TearDown() {
+ RecordProperty("TearDownProp", 1);
+ }
+};
+
+TEST_F(PropertyOne, TestSomeProperties) {
+ RecordProperty("TestSomeProperty", 1);
+}
diff --git a/Source/ThirdParty/gtest/test/gtest_xml_outfile2_test_.cc b/Source/ThirdParty/gtest/test/gtest_xml_outfile2_test_.cc
new file mode 100644
index 000000000..3411a3d38
--- /dev/null
+++ b/Source/ThirdParty/gtest/test/gtest_xml_outfile2_test_.cc
@@ -0,0 +1,49 @@
+// Copyright 2008, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: keith.ray@gmail.com (Keith Ray)
+//
+// gtest_xml_outfile2_test_ writes some xml via TestProperty used by
+// gtest_xml_outfiles_test.py
+
+#include <gtest/gtest.h>
+
+class PropertyTwo : public testing::Test {
+ protected:
+ virtual void SetUp() {
+ RecordProperty("SetUpProp", 2);
+ }
+ virtual void TearDown() {
+ RecordProperty("TearDownProp", 2);
+ }
+};
+
+TEST_F(PropertyTwo, TestSomeProperties) {
+ RecordProperty("TestSomeProperty", 2);
+}
diff --git a/Source/ThirdParty/gtest/test/gtest_xml_outfiles_test.py b/Source/ThirdParty/gtest/test/gtest_xml_outfiles_test.py
new file mode 100755
index 000000000..0fe947f08
--- /dev/null
+++ b/Source/ThirdParty/gtest/test/gtest_xml_outfiles_test.py
@@ -0,0 +1,132 @@
+#!/usr/bin/env python
+#
+# Copyright 2008, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""Unit test for the gtest_xml_output module."""
+
+__author__ = "keith.ray@gmail.com (Keith Ray)"
+
+import os
+from xml.dom import minidom, Node
+
+import gtest_test_utils
+import gtest_xml_test_utils
+
+
+GTEST_OUTPUT_SUBDIR = "xml_outfiles"
+GTEST_OUTPUT_1_TEST = "gtest_xml_outfile1_test_"
+GTEST_OUTPUT_2_TEST = "gtest_xml_outfile2_test_"
+
+EXPECTED_XML_1 = """<?xml version="1.0" encoding="UTF-8"?>
+<testsuites tests="1" failures="0" disabled="0" errors="0" time="*" name="AllTests">
+ <testsuite name="PropertyOne" tests="1" failures="0" disabled="0" errors="0" time="*">
+ <testcase name="TestSomeProperties" status="run" time="*" classname="PropertyOne" SetUpProp="1" TestSomeProperty="1" TearDownProp="1" />
+ </testsuite>
+</testsuites>
+"""
+
+EXPECTED_XML_2 = """<?xml version="1.0" encoding="UTF-8"?>
+<testsuites tests="1" failures="0" disabled="0" errors="0" time="*" name="AllTests">
+ <testsuite name="PropertyTwo" tests="1" failures="0" disabled="0" errors="0" time="*">
+ <testcase name="TestSomeProperties" status="run" time="*" classname="PropertyTwo" SetUpProp="2" TestSomeProperty="2" TearDownProp="2" />
+ </testsuite>
+</testsuites>
+"""
+
+
+class GTestXMLOutFilesTest(gtest_xml_test_utils.GTestXMLTestCase):
+ """Unit test for Google Test's XML output functionality."""
+
+ def setUp(self):
+ # We want the trailing '/' that the last "" provides in os.path.join, for
+ # telling Google Test to create an output directory instead of a single file
+ # for xml output.
+ self.output_dir_ = os.path.join(gtest_test_utils.GetTempDir(),
+ GTEST_OUTPUT_SUBDIR, "")
+ self.DeleteFilesAndDir()
+
+ def tearDown(self):
+ self.DeleteFilesAndDir()
+
+ def DeleteFilesAndDir(self):
+ try:
+ os.remove(os.path.join(self.output_dir_, GTEST_OUTPUT_1_TEST + ".xml"))
+ except os.error:
+ pass
+ try:
+ os.remove(os.path.join(self.output_dir_, GTEST_OUTPUT_2_TEST + ".xml"))
+ except os.error:
+ pass
+ try:
+ os.rmdir(self.output_dir_)
+ except os.error:
+ pass
+
+ def testOutfile1(self):
+ self._TestOutFile(GTEST_OUTPUT_1_TEST, EXPECTED_XML_1)
+
+ def testOutfile2(self):
+ self._TestOutFile(GTEST_OUTPUT_2_TEST, EXPECTED_XML_2)
+
+ def _TestOutFile(self, test_name, expected_xml):
+ gtest_prog_path = gtest_test_utils.GetTestExecutablePath(test_name)
+ command = [gtest_prog_path, "--gtest_output=xml:%s" % self.output_dir_]
+ p = gtest_test_utils.Subprocess(command,
+ working_dir=gtest_test_utils.GetTempDir())
+ self.assert_(p.exited)
+ self.assertEquals(0, p.exit_code)
+
+ # TODO(wan@google.com): libtool causes the built test binary to be
+ # named lt-gtest_xml_outfiles_test_ instead of
+ # gtest_xml_outfiles_test_. To account for this possibillity, we
+ # allow both names in the following code. We should remove this
+ # hack when Chandler Carruth's libtool replacement tool is ready.
+ output_file_name1 = test_name + ".xml"
+ output_file1 = os.path.join(self.output_dir_, output_file_name1)
+ output_file_name2 = 'lt-' + output_file_name1
+ output_file2 = os.path.join(self.output_dir_, output_file_name2)
+ self.assert_(os.path.isfile(output_file1) or os.path.isfile(output_file2),
+ output_file1)
+
+ expected = minidom.parseString(expected_xml)
+ if os.path.isfile(output_file1):
+ actual = minidom.parse(output_file1)
+ else:
+ actual = minidom.parse(output_file2)
+ self.NormalizeXml(actual.documentElement)
+ self.AssertEquivalentNodes(expected.documentElement,
+ actual.documentElement)
+ expected.unlink()
+ actual.unlink()
+
+
+if __name__ == "__main__":
+ os.environ["GTEST_STACK_TRACE_DEPTH"] = "0"
+ gtest_test_utils.Main()
diff --git a/Source/ThirdParty/gtest/test/gtest_xml_output_unittest.py b/Source/ThirdParty/gtest/test/gtest_xml_output_unittest.py
new file mode 100755
index 000000000..6d44929ca
--- /dev/null
+++ b/Source/ThirdParty/gtest/test/gtest_xml_output_unittest.py
@@ -0,0 +1,224 @@
+#!/usr/bin/env python
+#
+# Copyright 2006, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""Unit test for the gtest_xml_output module"""
+
+__author__ = 'eefacm@gmail.com (Sean Mcafee)'
+
+import errno
+import os
+import sys
+from xml.dom import minidom, Node
+
+import gtest_test_utils
+import gtest_xml_test_utils
+
+
+GTEST_OUTPUT_FLAG = "--gtest_output"
+GTEST_DEFAULT_OUTPUT_FILE = "test_detail.xml"
+GTEST_PROGRAM_NAME = "gtest_xml_output_unittest_"
+
+SUPPORTS_STACK_TRACES = False
+
+if SUPPORTS_STACK_TRACES:
+ STACK_TRACE_TEMPLATE = "\nStack trace:\n*"
+else:
+ STACK_TRACE_TEMPLATE = ""
+
+EXPECTED_NON_EMPTY_XML = """<?xml version="1.0" encoding="UTF-8"?>
+<testsuites tests="15" failures="4" disabled="2" errors="0" time="*" name="AllTests">
+ <testsuite name="SuccessfulTest" tests="1" failures="0" disabled="0" errors="0" time="*">
+ <testcase name="Succeeds" status="run" time="*" classname="SuccessfulTest"/>
+ </testsuite>
+ <testsuite name="FailedTest" tests="1" failures="1" disabled="0" errors="0" time="*">
+ <testcase name="Fails" status="run" time="*" classname="FailedTest">
+ <failure message="Value of: 2&#x0A;Expected: 1" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
+Value of: 2
+Expected: 1%(stack)s]]></failure>
+ </testcase>
+ </testsuite>
+ <testsuite name="MixedResultTest" tests="3" failures="1" disabled="1" errors="0" time="*">
+ <testcase name="Succeeds" status="run" time="*" classname="MixedResultTest"/>
+ <testcase name="Fails" status="run" time="*" classname="MixedResultTest">
+ <failure message="Value of: 2&#x0A;Expected: 1" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
+Value of: 2
+Expected: 1%(stack)s]]></failure>
+ <failure message="Value of: 3&#x0A;Expected: 2" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
+Value of: 3
+Expected: 2%(stack)s]]></failure>
+ </testcase>
+ <testcase name="DISABLED_test" status="notrun" time="*" classname="MixedResultTest"/>
+ </testsuite>
+ <testsuite name="XmlQuotingTest" tests="1" failures="1" disabled="0" errors="0" time="*">
+ <testcase name="OutputsCData" status="run" time="*" classname="XmlQuotingTest">
+ <failure message="Failed&#x0A;XML output: &lt;?xml encoding=&quot;utf-8&quot;&gt;&lt;top&gt;&lt;![CDATA[cdata text]]&gt;&lt;/top&gt;" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
+Failed
+XML output: <?xml encoding="utf-8"><top><![CDATA[cdata text]]>]]&gt;<![CDATA[</top>%(stack)s]]></failure>
+ </testcase>
+ </testsuite>
+ <testsuite name="InvalidCharactersTest" tests="1" failures="1" disabled="0" errors="0" time="*">
+ <testcase name="InvalidCharactersInMessage" status="run" time="*" classname="InvalidCharactersTest">
+ <failure message="Failed&#x0A;Invalid characters in brackets []" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
+Failed
+Invalid characters in brackets []%(stack)s]]></failure>
+ </testcase>
+ </testsuite>
+ <testsuite name="DisabledTest" tests="1" failures="0" disabled="1" errors="0" time="*">
+ <testcase name="DISABLED_test_not_run" status="notrun" time="*" classname="DisabledTest"/>
+ </testsuite>
+ <testsuite name="PropertyRecordingTest" tests="4" failures="0" disabled="0" errors="0" time="*">
+ <testcase name="OneProperty" status="run" time="*" classname="PropertyRecordingTest" key_1="1"/>
+ <testcase name="IntValuedProperty" status="run" time="*" classname="PropertyRecordingTest" key_int="1"/>
+ <testcase name="ThreeProperties" status="run" time="*" classname="PropertyRecordingTest" key_1="1" key_2="2" key_3="3"/>
+ <testcase name="TwoValuesForOneKeyUsesLastValue" status="run" time="*" classname="PropertyRecordingTest" key_1="2"/>
+ </testsuite>
+ <testsuite name="NoFixtureTest" tests="3" failures="0" disabled="0" errors="0" time="*">
+ <testcase name="RecordProperty" status="run" time="*" classname="NoFixtureTest" key="1"/>
+ <testcase name="ExternalUtilityThatCallsRecordIntValuedProperty" status="run" time="*" classname="NoFixtureTest" key_for_utility_int="1"/>
+ <testcase name="ExternalUtilityThatCallsRecordStringValuedProperty" status="run" time="*" classname="NoFixtureTest" key_for_utility_string="1"/>
+ </testsuite>
+</testsuites>""" % {'stack': STACK_TRACE_TEMPLATE}
+
+
+EXPECTED_EMPTY_XML = """<?xml version="1.0" encoding="UTF-8"?>
+<testsuites tests="0" failures="0" disabled="0" errors="0" time="*" name="AllTests">
+</testsuites>"""
+
+
+class GTestXMLOutputUnitTest(gtest_xml_test_utils.GTestXMLTestCase):
+ """
+ Unit test for Google Test's XML output functionality.
+ """
+
+ def testNonEmptyXmlOutput(self):
+ """
+ Runs a test program that generates a non-empty XML output, and
+ tests that the XML output is expected.
+ """
+ self._TestXmlOutput(GTEST_PROGRAM_NAME, EXPECTED_NON_EMPTY_XML, 1)
+
+ def testEmptyXmlOutput(self):
+ """
+ Runs a test program that generates an empty XML output, and
+ tests that the XML output is expected.
+ """
+
+ self._TestXmlOutput("gtest_no_test_unittest",
+ EXPECTED_EMPTY_XML, 0)
+
+ def testDefaultOutputFile(self):
+ """
+ Confirms that Google Test produces an XML output file with the expected
+ default name if no name is explicitly specified.
+ """
+ output_file = os.path.join(gtest_test_utils.GetTempDir(),
+ GTEST_DEFAULT_OUTPUT_FILE)
+ gtest_prog_path = gtest_test_utils.GetTestExecutablePath(
+ "gtest_no_test_unittest")
+ try:
+ os.remove(output_file)
+ except OSError, e:
+ if e.errno != errno.ENOENT:
+ raise
+
+ p = gtest_test_utils.Subprocess(
+ [gtest_prog_path, "%s=xml" % GTEST_OUTPUT_FLAG],
+ working_dir=gtest_test_utils.GetTempDir())
+ self.assert_(p.exited)
+ self.assertEquals(0, p.exit_code)
+ self.assert_(os.path.isfile(output_file))
+
+ def testSuppressedXmlOutput(self):
+ """
+ Tests that no XML file is generated if the default XML listener is
+ shut down before RUN_ALL_TESTS is invoked.
+ """
+
+ xml_path = os.path.join(gtest_test_utils.GetTempDir(),
+ GTEST_PROGRAM_NAME + "out.xml")
+ if os.path.isfile(xml_path):
+ os.remove(xml_path)
+
+ gtest_prog_path = gtest_test_utils.GetTestExecutablePath(GTEST_PROGRAM_NAME)
+
+ command = [gtest_prog_path,
+ "%s=xml:%s" % (GTEST_OUTPUT_FLAG, xml_path),
+ "--shut_down_xml"]
+ p = gtest_test_utils.Subprocess(command)
+ if p.terminated_by_signal:
+ self.assert_(False,
+ "%s was killed by signal %d" % (gtest_prog_name, p.signal))
+ else:
+ self.assert_(p.exited)
+ self.assertEquals(1, p.exit_code,
+ "'%s' exited with code %s, which doesn't match "
+ "the expected exit code %s."
+ % (command, p.exit_code, 1))
+
+ self.assert_(not os.path.isfile(xml_path))
+
+
+ def _TestXmlOutput(self, gtest_prog_name, expected_xml, expected_exit_code):
+ """
+ Asserts that the XML document generated by running the program
+ gtest_prog_name matches expected_xml, a string containing another
+ XML document. Furthermore, the program's exit code must be
+ expected_exit_code.
+ """
+ xml_path = os.path.join(gtest_test_utils.GetTempDir(),
+ gtest_prog_name + "out.xml")
+ gtest_prog_path = gtest_test_utils.GetTestExecutablePath(gtest_prog_name)
+
+ command = [gtest_prog_path, "%s=xml:%s" % (GTEST_OUTPUT_FLAG, xml_path)]
+ p = gtest_test_utils.Subprocess(command)
+ if p.terminated_by_signal:
+ self.assert_(False,
+ "%s was killed by signal %d" % (gtest_prog_name, p.signal))
+ else:
+ self.assert_(p.exited)
+ self.assertEquals(expected_exit_code, p.exit_code,
+ "'%s' exited with code %s, which doesn't match "
+ "the expected exit code %s."
+ % (command, p.exit_code, expected_exit_code))
+
+ expected = minidom.parseString(expected_xml)
+ actual = minidom.parse(xml_path)
+ self.NormalizeXml(actual.documentElement)
+ self.AssertEquivalentNodes(expected.documentElement,
+ actual.documentElement)
+ expected.unlink()
+ actual .unlink()
+
+
+
+if __name__ == '__main__':
+ os.environ['GTEST_STACK_TRACE_DEPTH'] = '1'
+ gtest_test_utils.Main()
diff --git a/Source/ThirdParty/gtest/test/gtest_xml_output_unittest_.cc b/Source/ThirdParty/gtest/test/gtest_xml_output_unittest_.cc
new file mode 100644
index 000000000..fc07ef46b
--- /dev/null
+++ b/Source/ThirdParty/gtest/test/gtest_xml_output_unittest_.cc
@@ -0,0 +1,145 @@
+// Copyright 2006, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Author: eefacm@gmail.com (Sean Mcafee)
+
+// Unit test for Google Test XML output.
+//
+// A user can specify XML output in a Google Test program to run via
+// either the GTEST_OUTPUT environment variable or the --gtest_output
+// flag. This is used for testing such functionality.
+//
+// This program will be invoked from a Python unit test. Don't run it
+// directly.
+
+#include <gtest/gtest.h>
+
+using ::testing::InitGoogleTest;
+using ::testing::TestEventListeners;
+using ::testing::UnitTest;
+
+class SuccessfulTest : public testing::Test {
+};
+
+TEST_F(SuccessfulTest, Succeeds) {
+ SUCCEED() << "This is a success.";
+ ASSERT_EQ(1, 1);
+}
+
+class FailedTest : public testing::Test {
+};
+
+TEST_F(FailedTest, Fails) {
+ ASSERT_EQ(1, 2);
+}
+
+class DisabledTest : public testing::Test {
+};
+
+TEST_F(DisabledTest, DISABLED_test_not_run) {
+ FAIL() << "Unexpected failure: Disabled test should not be run";
+}
+
+TEST(MixedResultTest, Succeeds) {
+ EXPECT_EQ(1, 1);
+ ASSERT_EQ(1, 1);
+}
+
+TEST(MixedResultTest, Fails) {
+ EXPECT_EQ(1, 2);
+ ASSERT_EQ(2, 3);
+}
+
+TEST(MixedResultTest, DISABLED_test) {
+ FAIL() << "Unexpected failure: Disabled test should not be run";
+}
+
+TEST(XmlQuotingTest, OutputsCData) {
+ FAIL() << "XML output: "
+ "<?xml encoding=\"utf-8\"><top><![CDATA[cdata text]]></top>";
+}
+
+// Helps to test that invalid characters produced by test code do not make
+// it into the XML file.
+TEST(InvalidCharactersTest, InvalidCharactersInMessage) {
+ FAIL() << "Invalid characters in brackets [\x1\x2]";
+}
+
+class PropertyRecordingTest : public testing::Test {
+};
+
+TEST_F(PropertyRecordingTest, OneProperty) {
+ RecordProperty("key_1", "1");
+}
+
+TEST_F(PropertyRecordingTest, IntValuedProperty) {
+ RecordProperty("key_int", 1);
+}
+
+TEST_F(PropertyRecordingTest, ThreeProperties) {
+ RecordProperty("key_1", "1");
+ RecordProperty("key_2", "2");
+ RecordProperty("key_3", "3");
+}
+
+TEST_F(PropertyRecordingTest, TwoValuesForOneKeyUsesLastValue) {
+ RecordProperty("key_1", "1");
+ RecordProperty("key_1", "2");
+}
+
+TEST(NoFixtureTest, RecordProperty) {
+ RecordProperty("key", "1");
+}
+
+void ExternalUtilityThatCallsRecordProperty(const char* key, int value) {
+ testing::Test::RecordProperty(key, value);
+}
+
+void ExternalUtilityThatCallsRecordProperty(const char* key,
+ const char* value) {
+ testing::Test::RecordProperty(key, value);
+}
+
+TEST(NoFixtureTest, ExternalUtilityThatCallsRecordIntValuedProperty) {
+ ExternalUtilityThatCallsRecordProperty("key_for_utility_int", 1);
+}
+
+TEST(NoFixtureTest, ExternalUtilityThatCallsRecordStringValuedProperty) {
+ ExternalUtilityThatCallsRecordProperty("key_for_utility_string", "1");
+}
+
+int main(int argc, char** argv) {
+ InitGoogleTest(&argc, argv);
+
+ if (argc > 1 && strcmp(argv[1], "--shut_down_xml") == 0) {
+ TestEventListeners& listeners = UnitTest::GetInstance()->listeners();
+ delete listeners.Release(listeners.default_xml_generator());
+ }
+ return RUN_ALL_TESTS();
+}
diff --git a/Source/ThirdParty/gtest/test/gtest_xml_test_utils.py b/Source/ThirdParty/gtest/test/gtest_xml_test_utils.py
new file mode 100755
index 000000000..c83c3b7ea
--- /dev/null
+++ b/Source/ThirdParty/gtest/test/gtest_xml_test_utils.py
@@ -0,0 +1,172 @@
+#!/usr/bin/env python
+#
+# Copyright 2006, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""Unit test utilities for gtest_xml_output"""
+
+__author__ = 'eefacm@gmail.com (Sean Mcafee)'
+
+import re
+from xml.dom import minidom, Node
+
+import gtest_test_utils
+
+
+GTEST_OUTPUT_FLAG = "--gtest_output"
+GTEST_DEFAULT_OUTPUT_FILE = "test_detail.xml"
+
+class GTestXMLTestCase(gtest_test_utils.TestCase):
+ """
+ Base class for tests of Google Test's XML output functionality.
+ """
+
+
+ def AssertEquivalentNodes(self, expected_node, actual_node):
+ """
+ Asserts that actual_node (a DOM node object) is equivalent to
+ expected_node (another DOM node object), in that either both of
+ them are CDATA nodes and have the same value, or both are DOM
+ elements and actual_node meets all of the following conditions:
+
+ * It has the same tag name as expected_node.
+ * It has the same set of attributes as expected_node, each with
+ the same value as the corresponding attribute of expected_node.
+ An exception is any attribute named "time", which needs only be
+ convertible to a floating-point number.
+ * It has an equivalent set of child nodes (including elements and
+ CDATA sections) as expected_node. Note that we ignore the
+ order of the children as they are not guaranteed to be in any
+ particular order.
+ """
+
+ if expected_node.nodeType == Node.CDATA_SECTION_NODE:
+ self.assertEquals(Node.CDATA_SECTION_NODE, actual_node.nodeType)
+ self.assertEquals(expected_node.nodeValue, actual_node.nodeValue)
+ return
+
+ self.assertEquals(Node.ELEMENT_NODE, actual_node.nodeType)
+ self.assertEquals(Node.ELEMENT_NODE, expected_node.nodeType)
+ self.assertEquals(expected_node.tagName, actual_node.tagName)
+
+ expected_attributes = expected_node.attributes
+ actual_attributes = actual_node .attributes
+ self.assertEquals(
+ expected_attributes.length, actual_attributes.length,
+ "attribute numbers differ in element " + actual_node.tagName)
+ for i in range(expected_attributes.length):
+ expected_attr = expected_attributes.item(i)
+ actual_attr = actual_attributes.get(expected_attr.name)
+ self.assert_(
+ actual_attr is not None,
+ "expected attribute %s not found in element %s" %
+ (expected_attr.name, actual_node.tagName))
+ self.assertEquals(expected_attr.value, actual_attr.value,
+ " values of attribute %s in element %s differ" %
+ (expected_attr.name, actual_node.tagName))
+
+ expected_children = self._GetChildren(expected_node)
+ actual_children = self._GetChildren(actual_node)
+ self.assertEquals(
+ len(expected_children), len(actual_children),
+ "number of child elements differ in element " + actual_node.tagName)
+ for child_id, child in expected_children.iteritems():
+ self.assert_(child_id in actual_children,
+ '<%s> is not in <%s> (in element %s)' %
+ (child_id, actual_children, actual_node.tagName))
+ self.AssertEquivalentNodes(child, actual_children[child_id])
+
+ identifying_attribute = {
+ "testsuites": "name",
+ "testsuite": "name",
+ "testcase": "name",
+ "failure": "message",
+ }
+
+ def _GetChildren(self, element):
+ """
+ Fetches all of the child nodes of element, a DOM Element object.
+ Returns them as the values of a dictionary keyed by the IDs of the
+ children. For <testsuites>, <testsuite> and <testcase> elements, the ID
+ is the value of their "name" attribute; for <failure> elements, it is
+ the value of the "message" attribute; CDATA sections and non-whitespace
+ text nodes are concatenated into a single CDATA section with ID
+ "detail". An exception is raised if any element other than the above
+ four is encountered, if two child elements with the same identifying
+ attributes are encountered, or if any other type of node is encountered.
+ """
+
+ children = {}
+ for child in element.childNodes:
+ if child.nodeType == Node.ELEMENT_NODE:
+ self.assert_(child.tagName in self.identifying_attribute,
+ "Encountered unknown element <%s>" % child.tagName)
+ childID = child.getAttribute(self.identifying_attribute[child.tagName])
+ self.assert_(childID not in children)
+ children[childID] = child
+ elif child.nodeType in [Node.TEXT_NODE, Node.CDATA_SECTION_NODE]:
+ if "detail" not in children:
+ if (child.nodeType == Node.CDATA_SECTION_NODE or
+ not child.nodeValue.isspace()):
+ children["detail"] = child.ownerDocument.createCDATASection(
+ child.nodeValue)
+ else:
+ children["detail"].nodeValue += child.nodeValue
+ else:
+ self.fail("Encountered unexpected node type %d" % child.nodeType)
+ return children
+
+ def NormalizeXml(self, element):
+ """
+ Normalizes Google Test's XML output to eliminate references to transient
+ information that may change from run to run.
+
+ * The "time" attribute of <testsuites>, <testsuite> and <testcase>
+ elements is replaced with a single asterisk, if it contains
+ only digit characters.
+ * The line number reported in the first line of the "message"
+ attribute of <failure> elements is replaced with a single asterisk.
+ * The directory names in file paths are removed.
+ * The stack traces are removed.
+ """
+
+ if element.tagName in ("testsuites", "testsuite", "testcase"):
+ time = element.getAttributeNode("time")
+ time.value = re.sub(r"^\d+(\.\d+)?$", "*", time.value)
+ elif element.tagName == "failure":
+ for child in element.childNodes:
+ if child.nodeType == Node.CDATA_SECTION_NODE:
+ # Removes the source line number.
+ cdata = re.sub(r"^.*[/\\](.*:)\d+\n", "\\1*\n", child.nodeValue)
+ # Removes the actual stack trace.
+ child.nodeValue = re.sub(r"\nStack trace:\n(.|\n)*",
+ "", cdata)
+ for child in element.childNodes:
+ if child.nodeType == Node.ELEMENT_NODE:
+ self.NormalizeXml(child)
diff --git a/Source/ThirdParty/gtest/test/production.cc b/Source/ThirdParty/gtest/test/production.cc
new file mode 100644
index 000000000..8b8a40b44
--- /dev/null
+++ b/Source/ThirdParty/gtest/test/production.cc
@@ -0,0 +1,36 @@
+// Copyright 2006, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: wan@google.com (Zhanyong Wan)
+//
+// This is part of the unit test for include/gtest/gtest_prod.h.
+
+#include "production.h"
+
+PrivateCode::PrivateCode() : x_(0) {}
diff --git a/Source/ThirdParty/gtest/test/production.h b/Source/ThirdParty/gtest/test/production.h
new file mode 100644
index 000000000..8f16fffa2
--- /dev/null
+++ b/Source/ThirdParty/gtest/test/production.h
@@ -0,0 +1,55 @@
+// Copyright 2006, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: wan@google.com (Zhanyong Wan)
+//
+// This is part of the unit test for include/gtest/gtest_prod.h.
+
+#ifndef GTEST_TEST_PRODUCTION_H_
+#define GTEST_TEST_PRODUCTION_H_
+
+#include <gtest/gtest_prod.h>
+
+class PrivateCode {
+ public:
+ // Declares a friend test that does not use a fixture.
+ FRIEND_TEST(PrivateCodeTest, CanAccessPrivateMembers);
+
+ // Declares a friend test that uses a fixture.
+ FRIEND_TEST(PrivateCodeFixtureTest, CanAccessPrivateMembers);
+
+ PrivateCode();
+
+ int x() const { return x_; }
+ private:
+ void set_x(int an_x) { x_ = an_x; }
+ int x_;
+};
+
+#endif // GTEST_TEST_PRODUCTION_H_
diff --git a/Source/ThirdParty/gtest/test/run_tests_util.py b/Source/ThirdParty/gtest/test/run_tests_util.py
new file mode 100755
index 000000000..9e57931eb
--- /dev/null
+++ b/Source/ThirdParty/gtest/test/run_tests_util.py
@@ -0,0 +1,466 @@
+# Copyright 2008 Google Inc. All Rights Reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""Provides facilities for running SCons-built Google Test/Mock tests."""
+
+
+import optparse
+import os
+import re
+import sets
+import sys
+
+try:
+ # subrocess module is a preferable way to invoke subprocesses but it may
+ # not be available on MacOS X 10.4.
+ # Suppresses the 'Import not at the top of the file' lint complaint.
+ # pylint: disable-msg=C6204
+ import subprocess
+except ImportError:
+ subprocess = None
+
+HELP_MSG = """Runs the specified tests for %(proj)s.
+
+SYNOPSIS
+ run_tests.py [OPTION]... [BUILD_DIR]... [TEST]...
+
+DESCRIPTION
+ Runs the specified tests (either binary or Python), and prints a
+ summary of the results. BUILD_DIRS will be used to search for the
+ binaries. If no TESTs are specified, all binary tests found in
+ BUILD_DIRs and all Python tests found in the directory test/ (in the
+ %(proj)s root) are run.
+
+ TEST is a name of either a binary or a Python test. A binary test is
+ an executable file named *_test or *_unittest (with the .exe
+ extension on Windows) A Python test is a script named *_test.py or
+ *_unittest.py.
+
+OPTIONS
+ -h, --help
+ Print this help message.
+ -c CONFIGURATIONS
+ Specify build directories via build configurations.
+ CONFIGURATIONS is either a comma-separated list of build
+ configurations or 'all'. Each configuration is equivalent to
+ adding 'scons/build/<configuration>/%(proj)s/scons' to BUILD_DIRs.
+ Specifying -c=all is equivalent to providing all directories
+ listed in KNOWN BUILD DIRECTORIES section below.
+ -a
+ Equivalent to -c=all
+ -b
+ Equivalent to -c=all with the exception that the script will not
+ fail if some of the KNOWN BUILD DIRECTORIES do not exists; the
+ script will simply not run the tests there. 'b' stands for
+ 'built directories'.
+
+RETURN VALUE
+ Returns 0 if all tests are successful; otherwise returns 1.
+
+EXAMPLES
+ run_tests.py
+ Runs all tests for the default build configuration.
+ run_tests.py -a
+ Runs all tests with binaries in KNOWN BUILD DIRECTORIES.
+ run_tests.py -b
+ Runs all tests in KNOWN BUILD DIRECTORIES that have been
+ built.
+ run_tests.py foo/
+ Runs all tests in the foo/ directory and all Python tests in
+ the directory test. The Python tests are instructed to look
+ for binaries in foo/.
+ run_tests.py bar_test.exe test/baz_test.exe foo/ bar/
+ Runs foo/bar_test.exe, bar/bar_test.exe, foo/baz_test.exe, and
+ bar/baz_test.exe.
+ run_tests.py foo bar test/foo_test.py
+ Runs test/foo_test.py twice instructing it to look for its
+ test binaries in the directories foo and bar,
+ correspondingly.
+
+KNOWN BUILD DIRECTORIES
+ run_tests.py knows about directories where the SCons build script
+ deposits its products. These are the directories where run_tests.py
+ will be looking for its binaries. Currently, %(proj)s's SConstruct file
+ defines them as follows (the default build directory is the first one
+ listed in each group):
+ On Windows:
+ <%(proj)s root>/scons/build/win-dbg8/%(proj)s/scons/
+ <%(proj)s root>/scons/build/win-opt8/%(proj)s/scons/
+ On Mac:
+ <%(proj)s root>/scons/build/mac-dbg/%(proj)s/scons/
+ <%(proj)s root>/scons/build/mac-opt/%(proj)s/scons/
+ On other platforms:
+ <%(proj)s root>/scons/build/dbg/%(proj)s/scons/
+ <%(proj)s root>/scons/build/opt/%(proj)s/scons/"""
+
+IS_WINDOWS = os.name == 'nt'
+IS_MAC = os.name == 'posix' and os.uname()[0] == 'Darwin'
+IS_CYGWIN = os.name == 'posix' and 'CYGWIN' in os.uname()[0]
+
+# Definition of CONFIGS must match that of the build directory names in the
+# SConstruct script. The first list item is the default build configuration.
+if IS_WINDOWS:
+ CONFIGS = ('win-dbg8', 'win-opt8')
+elif IS_MAC:
+ CONFIGS = ('mac-dbg', 'mac-opt')
+else:
+ CONFIGS = ('dbg', 'opt')
+
+if IS_WINDOWS or IS_CYGWIN:
+ PYTHON_TEST_REGEX = re.compile(r'_(unit)?test\.py$', re.IGNORECASE)
+ BINARY_TEST_REGEX = re.compile(r'_(unit)?test(\.exe)?$', re.IGNORECASE)
+ BINARY_TEST_SEARCH_REGEX = re.compile(r'_(unit)?test\.exe$', re.IGNORECASE)
+else:
+ PYTHON_TEST_REGEX = re.compile(r'_(unit)?test\.py$')
+ BINARY_TEST_REGEX = re.compile(r'_(unit)?test$')
+ BINARY_TEST_SEARCH_REGEX = BINARY_TEST_REGEX
+
+
+def _GetGtestBuildDir(injected_os, script_dir, config):
+ """Calculates path to the Google Test SCons build directory."""
+
+ return injected_os.path.normpath(injected_os.path.join(script_dir,
+ 'scons/build',
+ config,
+ 'gtest/scons'))
+
+
+def _GetConfigFromBuildDir(build_dir):
+ """Extracts the configuration name from the build directory."""
+
+ # We don't want to depend on build_dir containing the correct path
+ # separators.
+ m = re.match(r'.*[\\/]([^\\/]+)[\\/][^\\/]+[\\/]scons[\\/]?$', build_dir)
+ if m:
+ return m.group(1)
+ else:
+ print >>sys.stderr, ('%s is an invalid build directory that does not '
+ 'correspond to any configuration.' % (build_dir,))
+ return ''
+
+
+# All paths in this script are either absolute or relative to the current
+# working directory, unless otherwise specified.
+class TestRunner(object):
+ """Provides facilities for running Python and binary tests for Google Test."""
+
+ def __init__(self,
+ script_dir,
+ build_dir_var_name='GTEST_BUILD_DIR',
+ injected_os=os,
+ injected_subprocess=subprocess,
+ injected_build_dir_finder=_GetGtestBuildDir):
+ """Initializes a TestRunner instance.
+
+ Args:
+ script_dir: File path to the calling script.
+ build_dir_var_name: Name of the env variable used to pass the
+ the build directory path to the invoked
+ tests.
+ injected_os: standard os module or a mock/stub for
+ testing.
+ injected_subprocess: standard subprocess module or a mock/stub
+ for testing
+ injected_build_dir_finder: function that determines the path to
+ the build directory.
+ """
+
+ self.os = injected_os
+ self.subprocess = injected_subprocess
+ self.build_dir_finder = injected_build_dir_finder
+ self.build_dir_var_name = build_dir_var_name
+ self.script_dir = script_dir
+
+ def _GetBuildDirForConfig(self, config):
+ """Returns the build directory for a given configuration."""
+
+ return self.build_dir_finder(self.os, self.script_dir, config)
+
+ def _Run(self, args):
+ """Runs the executable with given args (args[0] is the executable name).
+
+ Args:
+ args: Command line arguments for the process.
+
+ Returns:
+ Process's exit code if it exits normally, or -signal if the process is
+ killed by a signal.
+ """
+
+ if self.subprocess:
+ return self.subprocess.Popen(args).wait()
+ else:
+ return self.os.spawnv(self.os.P_WAIT, args[0], args)
+
+ def _RunBinaryTest(self, test):
+ """Runs the binary test given its path.
+
+ Args:
+ test: Path to the test binary.
+
+ Returns:
+ Process's exit code if it exits normally, or -signal if the process is
+ killed by a signal.
+ """
+
+ return self._Run([test])
+
+ def _RunPythonTest(self, test, build_dir):
+ """Runs the Python test script with the specified build directory.
+
+ Args:
+ test: Path to the test's Python script.
+ build_dir: Path to the directory where the test binary is to be found.
+
+ Returns:
+ Process's exit code if it exits normally, or -signal if the process is
+ killed by a signal.
+ """
+
+ old_build_dir = self.os.environ.get(self.build_dir_var_name)
+
+ try:
+ self.os.environ[self.build_dir_var_name] = build_dir
+
+ # If this script is run on a Windows machine that has no association
+ # between the .py extension and a python interpreter, simply passing
+ # the script name into subprocess.Popen/os.spawn will not work.
+ print 'Running %s . . .' % (test,)
+ return self._Run([sys.executable, test])
+
+ finally:
+ if old_build_dir is None:
+ del self.os.environ[self.build_dir_var_name]
+ else:
+ self.os.environ[self.build_dir_var_name] = old_build_dir
+
+ def _FindFilesByRegex(self, directory, regex):
+ """Returns files in a directory whose names match a regular expression.
+
+ Args:
+ directory: Path to the directory to search for files.
+ regex: Regular expression to filter file names.
+
+ Returns:
+ The list of the paths to the files in the directory.
+ """
+
+ return [self.os.path.join(directory, file_name)
+ for file_name in self.os.listdir(directory)
+ if re.search(regex, file_name)]
+
+ # TODO(vladl@google.com): Implement parsing of scons/SConscript to run all
+ # tests defined there when no tests are specified.
+ # TODO(vladl@google.com): Update the docstring after the code is changed to
+ # try to test all builds defined in scons/SConscript.
+ def GetTestsToRun(self,
+ args,
+ named_configurations,
+ built_configurations,
+ available_configurations=CONFIGS,
+ python_tests_to_skip=None):
+ """Determines what tests should be run.
+
+ Args:
+ args: The list of non-option arguments from the command line.
+ named_configurations: The list of configurations specified via -c or -a.
+ built_configurations: True if -b has been specified.
+ available_configurations: a list of configurations available on the
+ current platform, injectable for testing.
+ python_tests_to_skip: a collection of (configuration, python test name)s
+ that need to be skipped.
+
+ Returns:
+ A tuple with 2 elements: the list of Python tests to run and the list of
+ binary tests to run.
+ """
+
+ if named_configurations == 'all':
+ named_configurations = ','.join(available_configurations)
+
+ normalized_args = [self.os.path.normpath(arg) for arg in args]
+
+ # A final list of build directories which will be searched for the test
+ # binaries. First, add directories specified directly on the command
+ # line.
+ build_dirs = filter(self.os.path.isdir, normalized_args)
+
+ # Adds build directories specified via their build configurations using
+ # the -c or -a options.
+ if named_configurations:
+ build_dirs += [self._GetBuildDirForConfig(config)
+ for config in named_configurations.split(',')]
+
+ # Adds KNOWN BUILD DIRECTORIES if -b is specified.
+ if built_configurations:
+ build_dirs += [self._GetBuildDirForConfig(config)
+ for config in available_configurations
+ if self.os.path.isdir(self._GetBuildDirForConfig(config))]
+
+ # If no directories were specified either via -a, -b, -c, or directly, use
+ # the default configuration.
+ elif not build_dirs:
+ build_dirs = [self._GetBuildDirForConfig(available_configurations[0])]
+
+ # Makes sure there are no duplications.
+ build_dirs = sets.Set(build_dirs)
+
+ errors_found = False
+ listed_python_tests = [] # All Python tests listed on the command line.
+ listed_binary_tests = [] # All binary tests listed on the command line.
+
+ test_dir = self.os.path.normpath(self.os.path.join(self.script_dir, 'test'))
+
+ # Sifts through non-directory arguments fishing for any Python or binary
+ # tests and detecting errors.
+ for argument in sets.Set(normalized_args) - build_dirs:
+ if re.search(PYTHON_TEST_REGEX, argument):
+ python_path = self.os.path.join(test_dir,
+ self.os.path.basename(argument))
+ if self.os.path.isfile(python_path):
+ listed_python_tests.append(python_path)
+ else:
+ sys.stderr.write('Unable to find Python test %s' % argument)
+ errors_found = True
+ elif re.search(BINARY_TEST_REGEX, argument):
+ # This script also accepts binary test names prefixed with test/ for
+ # the convenience of typing them (can use path completions in the
+ # shell). Strips test/ prefix from the binary test names.
+ listed_binary_tests.append(self.os.path.basename(argument))
+ else:
+ sys.stderr.write('%s is neither test nor build directory' % argument)
+ errors_found = True
+
+ if errors_found:
+ return None
+
+ user_has_listed_tests = listed_python_tests or listed_binary_tests
+
+ if user_has_listed_tests:
+ selected_python_tests = listed_python_tests
+ else:
+ selected_python_tests = self._FindFilesByRegex(test_dir,
+ PYTHON_TEST_REGEX)
+
+ # TODO(vladl@google.com): skip unbuilt Python tests when -b is specified.
+ python_test_pairs = []
+ for directory in build_dirs:
+ for test in selected_python_tests:
+ config = _GetConfigFromBuildDir(directory)
+ file_name = os.path.basename(test)
+ if python_tests_to_skip and (config, file_name) in python_tests_to_skip:
+ print ('NOTE: %s is skipped for configuration %s, as it does not '
+ 'work there.' % (file_name, config))
+ else:
+ python_test_pairs.append((directory, test))
+
+ binary_test_pairs = []
+ for directory in build_dirs:
+ if user_has_listed_tests:
+ binary_test_pairs.extend(
+ [(directory, self.os.path.join(directory, test))
+ for test in listed_binary_tests])
+ else:
+ tests = self._FindFilesByRegex(directory, BINARY_TEST_SEARCH_REGEX)
+ binary_test_pairs.extend([(directory, test) for test in tests])
+
+ return (python_test_pairs, binary_test_pairs)
+
+ def RunTests(self, python_tests, binary_tests):
+ """Runs Python and binary tests and reports results to the standard output.
+
+ Args:
+ python_tests: List of Python tests to run in the form of tuples
+ (build directory, Python test script).
+ binary_tests: List of binary tests to run in the form of tuples
+ (build directory, binary file).
+
+ Returns:
+ The exit code the program should pass into sys.exit().
+ """
+
+ if python_tests or binary_tests:
+ results = []
+ for directory, test in python_tests:
+ results.append((directory,
+ test,
+ self._RunPythonTest(test, directory) == 0))
+ for directory, test in binary_tests:
+ results.append((directory,
+ self.os.path.basename(test),
+ self._RunBinaryTest(test) == 0))
+
+ failed = [(directory, test)
+ for (directory, test, success) in results
+ if not success]
+ print
+ print '%d tests run.' % len(results)
+ if failed:
+ print 'The following %d tests failed:' % len(failed)
+ for (directory, test) in failed:
+ print '%s in %s' % (test, directory)
+ return 1
+ else:
+ print 'All tests passed!'
+ else: # No tests defined
+ print 'Nothing to test - no tests specified!'
+
+ return 0
+
+
+def ParseArgs(project_name, argv=None, help_callback=None):
+ """Parses the options run_tests.py uses."""
+
+ # Suppresses lint warning on unused arguments. These arguments are
+ # required by optparse, even though they are unused.
+ # pylint: disable-msg=W0613
+ def PrintHelp(option, opt, value, parser):
+ print HELP_MSG % {'proj': project_name}
+ sys.exit(1)
+
+ parser = optparse.OptionParser()
+ parser.add_option('-c',
+ action='store',
+ dest='configurations',
+ default=None)
+ parser.add_option('-a',
+ action='store_const',
+ dest='configurations',
+ default=None,
+ const='all')
+ parser.add_option('-b',
+ action='store_const',
+ dest='built_configurations',
+ default=False,
+ const=True)
+ # Replaces the built-in help with ours.
+ parser.remove_option('-h')
+ parser.add_option('-h', '--help',
+ action='callback',
+ callback=help_callback or PrintHelp)
+ return parser.parse_args(argv)
diff --git a/Source/ThirdParty/gtest/test/run_tests_util_test.py b/Source/ThirdParty/gtest/test/run_tests_util_test.py
new file mode 100755
index 000000000..9c55726fd
--- /dev/null
+++ b/Source/ThirdParty/gtest/test/run_tests_util_test.py
@@ -0,0 +1,676 @@
+#!/usr/bin/env python
+#
+# Copyright 2009 Google Inc. All Rights Reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""Tests for run_tests_util.py test runner script."""
+
+__author__ = 'vladl@google.com (Vlad Losev)'
+
+import os
+import re
+import sets
+import unittest
+
+import run_tests_util
+
+
+GTEST_DBG_DIR = 'scons/build/dbg/gtest/scons'
+GTEST_OPT_DIR = 'scons/build/opt/gtest/scons'
+GTEST_OTHER_DIR = 'scons/build/other/gtest/scons'
+
+
+def AddExeExtension(path):
+ """Appends .exe to the path on Windows or Cygwin."""
+
+ if run_tests_util.IS_WINDOWS or run_tests_util.IS_CYGWIN:
+ return path + '.exe'
+ else:
+ return path
+
+
+class FakePath(object):
+ """A fake os.path module for testing."""
+
+ def __init__(self, current_dir=os.getcwd(), known_paths=None):
+ self.current_dir = current_dir
+ self.tree = {}
+ self.path_separator = os.sep
+
+ # known_paths contains either absolute or relative paths. Relative paths
+ # are absolutized with self.current_dir.
+ if known_paths:
+ self._AddPaths(known_paths)
+
+ def _AddPath(self, path):
+ ends_with_slash = path.endswith('/')
+ path = self.abspath(path)
+ if ends_with_slash:
+ path += self.path_separator
+ name_list = path.split(self.path_separator)
+ tree = self.tree
+ for name in name_list[:-1]:
+ if not name:
+ continue
+ if name in tree:
+ tree = tree[name]
+ else:
+ tree[name] = {}
+ tree = tree[name]
+
+ name = name_list[-1]
+ if name:
+ if name in tree:
+ assert tree[name] == 1
+ else:
+ tree[name] = 1
+
+ def _AddPaths(self, paths):
+ for path in paths:
+ self._AddPath(path)
+
+ def PathElement(self, path):
+ """Returns an internal representation of directory tree entry for path."""
+ tree = self.tree
+ name_list = self.abspath(path).split(self.path_separator)
+ for name in name_list:
+ if not name:
+ continue
+ tree = tree.get(name, None)
+ if tree is None:
+ break
+
+ return tree
+
+ # Silences pylint warning about using standard names.
+ # pylint: disable-msg=C6409
+ def normpath(self, path):
+ return os.path.normpath(path)
+
+ def abspath(self, path):
+ return self.normpath(os.path.join(self.current_dir, path))
+
+ def isfile(self, path):
+ return self.PathElement(self.abspath(path)) == 1
+
+ def isdir(self, path):
+ return type(self.PathElement(self.abspath(path))) == type(dict())
+
+ def basename(self, path):
+ return os.path.basename(path)
+
+ def dirname(self, path):
+ return os.path.dirname(path)
+
+ def join(self, *kargs):
+ return os.path.join(*kargs)
+
+
+class FakeOs(object):
+ """A fake os module for testing."""
+ P_WAIT = os.P_WAIT
+
+ def __init__(self, fake_path_module):
+ self.path = fake_path_module
+
+ # Some methods/attributes are delegated to the real os module.
+ self.environ = os.environ
+
+ # pylint: disable-msg=C6409
+ def listdir(self, path):
+ assert self.path.isdir(path)
+ return self.path.PathElement(path).iterkeys()
+
+ def spawnv(self, wait, executable, *kargs):
+ assert wait == FakeOs.P_WAIT
+ return self.spawn_impl(executable, kargs)
+
+
+class GetTestsToRunTest(unittest.TestCase):
+ """Exercises TestRunner.GetTestsToRun."""
+
+ def NormalizeGetTestsToRunResults(self, results):
+ """Normalizes path data returned from GetTestsToRun for comparison."""
+
+ def NormalizePythonTestPair(pair):
+ """Normalizes path data in the (directory, python_script) pair."""
+
+ return (os.path.normpath(pair[0]), os.path.normpath(pair[1]))
+
+ def NormalizeBinaryTestPair(pair):
+ """Normalizes path data in the (directory, binary_executable) pair."""
+
+ directory, executable = map(os.path.normpath, pair)
+
+ # On Windows and Cygwin, the test file names have the .exe extension, but
+ # they can be invoked either by name or by name+extension. Our test must
+ # accommodate both situations.
+ if run_tests_util.IS_WINDOWS or run_tests_util.IS_CYGWIN:
+ executable = re.sub(r'\.exe$', '', executable)
+ return (directory, executable)
+
+ python_tests = sets.Set(map(NormalizePythonTestPair, results[0]))
+ binary_tests = sets.Set(map(NormalizeBinaryTestPair, results[1]))
+ return (python_tests, binary_tests)
+
+ def AssertResultsEqual(self, results, expected):
+ """Asserts results returned by GetTestsToRun equal to expected results."""
+
+ self.assertEqual(self.NormalizeGetTestsToRunResults(results),
+ self.NormalizeGetTestsToRunResults(expected),
+ 'Incorrect set of tests returned:\n%s\nexpected:\n%s' %
+ (results, expected))
+
+ def setUp(self):
+ self.fake_os = FakeOs(FakePath(
+ current_dir=os.path.abspath(os.path.dirname(run_tests_util.__file__)),
+ known_paths=[AddExeExtension(GTEST_DBG_DIR + '/gtest_unittest'),
+ AddExeExtension(GTEST_OPT_DIR + '/gtest_unittest'),
+ 'test/gtest_color_test.py']))
+ self.fake_configurations = ['dbg', 'opt']
+ self.test_runner = run_tests_util.TestRunner(script_dir='.',
+ injected_os=self.fake_os,
+ injected_subprocess=None)
+
+ def testBinaryTestsOnly(self):
+ """Exercises GetTestsToRun with parameters designating binary tests only."""
+
+ # A default build.
+ self.AssertResultsEqual(
+ self.test_runner.GetTestsToRun(
+ ['gtest_unittest'],
+ '',
+ False,
+ available_configurations=self.fake_configurations),
+ ([],
+ [(GTEST_DBG_DIR, GTEST_DBG_DIR + '/gtest_unittest')]))
+
+ # An explicitly specified directory.
+ self.AssertResultsEqual(
+ self.test_runner.GetTestsToRun(
+ [GTEST_DBG_DIR, 'gtest_unittest'],
+ '',
+ False,
+ available_configurations=self.fake_configurations),
+ ([],
+ [(GTEST_DBG_DIR, GTEST_DBG_DIR + '/gtest_unittest')]))
+
+ # A particular configuration.
+ self.AssertResultsEqual(
+ self.test_runner.GetTestsToRun(
+ ['gtest_unittest'],
+ 'other',
+ False,
+ available_configurations=self.fake_configurations),
+ ([],
+ [(GTEST_OTHER_DIR, GTEST_OTHER_DIR + '/gtest_unittest')]))
+
+ # All available configurations
+ self.AssertResultsEqual(
+ self.test_runner.GetTestsToRun(
+ ['gtest_unittest'],
+ 'all',
+ False,
+ available_configurations=self.fake_configurations),
+ ([],
+ [(GTEST_DBG_DIR, GTEST_DBG_DIR + '/gtest_unittest'),
+ (GTEST_OPT_DIR, GTEST_OPT_DIR + '/gtest_unittest')]))
+
+ # All built configurations (unbuilt don't cause failure).
+ self.AssertResultsEqual(
+ self.test_runner.GetTestsToRun(
+ ['gtest_unittest'],
+ '',
+ True,
+ available_configurations=self.fake_configurations + ['unbuilt']),
+ ([],
+ [(GTEST_DBG_DIR, GTEST_DBG_DIR + '/gtest_unittest'),
+ (GTEST_OPT_DIR, GTEST_OPT_DIR + '/gtest_unittest')]))
+
+ # A combination of an explicit directory and a configuration.
+ self.AssertResultsEqual(
+ self.test_runner.GetTestsToRun(
+ [GTEST_DBG_DIR, 'gtest_unittest'],
+ 'opt',
+ False,
+ available_configurations=self.fake_configurations),
+ ([],
+ [(GTEST_DBG_DIR, GTEST_DBG_DIR + '/gtest_unittest'),
+ (GTEST_OPT_DIR, GTEST_OPT_DIR + '/gtest_unittest')]))
+
+ # Same test specified in an explicit directory and via a configuration.
+ self.AssertResultsEqual(
+ self.test_runner.GetTestsToRun(
+ [GTEST_DBG_DIR, 'gtest_unittest'],
+ 'dbg',
+ False,
+ available_configurations=self.fake_configurations),
+ ([],
+ [(GTEST_DBG_DIR, GTEST_DBG_DIR + '/gtest_unittest')]))
+
+ # All built configurations + explicit directory + explicit configuration.
+ self.AssertResultsEqual(
+ self.test_runner.GetTestsToRun(
+ [GTEST_DBG_DIR, 'gtest_unittest'],
+ 'opt',
+ True,
+ available_configurations=self.fake_configurations),
+ ([],
+ [(GTEST_DBG_DIR, GTEST_DBG_DIR + '/gtest_unittest'),
+ (GTEST_OPT_DIR, GTEST_OPT_DIR + '/gtest_unittest')]))
+
+ def testPythonTestsOnly(self):
+ """Exercises GetTestsToRun with parameters designating Python tests only."""
+
+ # A default build.
+ self.AssertResultsEqual(
+ self.test_runner.GetTestsToRun(
+ ['gtest_color_test.py'],
+ '',
+ False,
+ available_configurations=self.fake_configurations),
+ ([(GTEST_DBG_DIR, 'test/gtest_color_test.py')],
+ []))
+
+ # An explicitly specified directory.
+ self.AssertResultsEqual(
+ self.test_runner.GetTestsToRun(
+ [GTEST_DBG_DIR, 'test/gtest_color_test.py'],
+ '',
+ False,
+ available_configurations=self.fake_configurations),
+ ([(GTEST_DBG_DIR, 'test/gtest_color_test.py')],
+ []))
+
+ # A particular configuration.
+ self.AssertResultsEqual(
+ self.test_runner.GetTestsToRun(
+ ['gtest_color_test.py'],
+ 'other',
+ False,
+ available_configurations=self.fake_configurations),
+ ([(GTEST_OTHER_DIR, 'test/gtest_color_test.py')],
+ []))
+
+ # All available configurations
+ self.AssertResultsEqual(
+ self.test_runner.GetTestsToRun(
+ ['test/gtest_color_test.py'],
+ 'all',
+ False,
+ available_configurations=self.fake_configurations),
+ ([(GTEST_DBG_DIR, 'test/gtest_color_test.py'),
+ (GTEST_OPT_DIR, 'test/gtest_color_test.py')],
+ []))
+
+ # All built configurations (unbuilt don't cause failure).
+ self.AssertResultsEqual(
+ self.test_runner.GetTestsToRun(
+ ['gtest_color_test.py'],
+ '',
+ True,
+ available_configurations=self.fake_configurations + ['unbuilt']),
+ ([(GTEST_DBG_DIR, 'test/gtest_color_test.py'),
+ (GTEST_OPT_DIR, 'test/gtest_color_test.py')],
+ []))
+
+ # A combination of an explicit directory and a configuration.
+ self.AssertResultsEqual(
+ self.test_runner.GetTestsToRun(
+ [GTEST_DBG_DIR, 'gtest_color_test.py'],
+ 'opt',
+ False,
+ available_configurations=self.fake_configurations),
+ ([(GTEST_DBG_DIR, 'test/gtest_color_test.py'),
+ (GTEST_OPT_DIR, 'test/gtest_color_test.py')],
+ []))
+
+ # Same test specified in an explicit directory and via a configuration.
+ self.AssertResultsEqual(
+ self.test_runner.GetTestsToRun(
+ [GTEST_DBG_DIR, 'gtest_color_test.py'],
+ 'dbg',
+ False,
+ available_configurations=self.fake_configurations),
+ ([(GTEST_DBG_DIR, 'test/gtest_color_test.py')],
+ []))
+
+ # All built configurations + explicit directory + explicit configuration.
+ self.AssertResultsEqual(
+ self.test_runner.GetTestsToRun(
+ [GTEST_DBG_DIR, 'gtest_color_test.py'],
+ 'opt',
+ True,
+ available_configurations=self.fake_configurations),
+ ([(GTEST_DBG_DIR, 'test/gtest_color_test.py'),
+ (GTEST_OPT_DIR, 'test/gtest_color_test.py')],
+ []))
+
+ def testCombinationOfBinaryAndPythonTests(self):
+ """Exercises GetTestsToRun with mixed binary/Python tests."""
+
+ # Use only default configuration for this test.
+
+ # Neither binary nor Python tests are specified so find all.
+ self.AssertResultsEqual(
+ self.test_runner.GetTestsToRun(
+ [],
+ '',
+ False,
+ available_configurations=self.fake_configurations),
+ ([(GTEST_DBG_DIR, 'test/gtest_color_test.py')],
+ [(GTEST_DBG_DIR, GTEST_DBG_DIR + '/gtest_unittest')]))
+
+ # Specifying both binary and Python tests.
+ self.AssertResultsEqual(
+ self.test_runner.GetTestsToRun(
+ ['gtest_unittest', 'gtest_color_test.py'],
+ '',
+ False,
+ available_configurations=self.fake_configurations),
+ ([(GTEST_DBG_DIR, 'test/gtest_color_test.py')],
+ [(GTEST_DBG_DIR, GTEST_DBG_DIR + '/gtest_unittest')]))
+
+ # Specifying binary tests suppresses Python tests.
+ self.AssertResultsEqual(
+ self.test_runner.GetTestsToRun(
+ ['gtest_unittest'],
+ '',
+ False,
+ available_configurations=self.fake_configurations),
+ ([],
+ [(GTEST_DBG_DIR, GTEST_DBG_DIR + '/gtest_unittest')]))
+
+ # Specifying Python tests suppresses binary tests.
+ self.AssertResultsEqual(
+ self.test_runner.GetTestsToRun(
+ ['gtest_color_test.py'],
+ '',
+ False,
+ available_configurations=self.fake_configurations),
+ ([(GTEST_DBG_DIR, 'test/gtest_color_test.py')],
+ []))
+
+ def testIgnoresNonTestFiles(self):
+ """Verifies that GetTestsToRun ignores non-test files in the filesystem."""
+
+ self.fake_os = FakeOs(FakePath(
+ current_dir=os.path.abspath(os.path.dirname(run_tests_util.__file__)),
+ known_paths=[AddExeExtension(GTEST_DBG_DIR + '/gtest_nontest'),
+ 'test/']))
+ self.test_runner = run_tests_util.TestRunner(script_dir='.',
+ injected_os=self.fake_os,
+ injected_subprocess=None)
+ self.AssertResultsEqual(
+ self.test_runner.GetTestsToRun(
+ [],
+ '',
+ True,
+ available_configurations=self.fake_configurations),
+ ([], []))
+
+ def testWorksFromDifferentDir(self):
+ """Exercises GetTestsToRun from a directory different from run_test.py's."""
+
+ # Here we simulate an test script in directory /d/ called from the
+ # directory /a/b/c/.
+ self.fake_os = FakeOs(FakePath(
+ current_dir=os.path.abspath('/a/b/c'),
+ known_paths=[
+ '/a/b/c/',
+ AddExeExtension('/d/' + GTEST_DBG_DIR + '/gtest_unittest'),
+ AddExeExtension('/d/' + GTEST_OPT_DIR + '/gtest_unittest'),
+ '/d/test/gtest_color_test.py']))
+ self.fake_configurations = ['dbg', 'opt']
+ self.test_runner = run_tests_util.TestRunner(script_dir='/d/',
+ injected_os=self.fake_os,
+ injected_subprocess=None)
+ # A binary test.
+ self.AssertResultsEqual(
+ self.test_runner.GetTestsToRun(
+ ['gtest_unittest'],
+ '',
+ False,
+ available_configurations=self.fake_configurations),
+ ([],
+ [('/d/' + GTEST_DBG_DIR, '/d/' + GTEST_DBG_DIR + '/gtest_unittest')]))
+
+ # A Python test.
+ self.AssertResultsEqual(
+ self.test_runner.GetTestsToRun(
+ ['gtest_color_test.py'],
+ '',
+ False,
+ available_configurations=self.fake_configurations),
+ ([('/d/' + GTEST_DBG_DIR, '/d/test/gtest_color_test.py')], []))
+
+ def testNonTestBinary(self):
+ """Exercises GetTestsToRun with a non-test parameter."""
+
+ self.assert_(
+ not self.test_runner.GetTestsToRun(
+ ['gtest_unittest_not_really'],
+ '',
+ False,
+ available_configurations=self.fake_configurations))
+
+ def testNonExistingPythonTest(self):
+ """Exercises GetTestsToRun with a non-existent Python test parameter."""
+
+ self.assert_(
+ not self.test_runner.GetTestsToRun(
+ ['nonexistent_test.py'],
+ '',
+ False,
+ available_configurations=self.fake_configurations))
+
+ if run_tests_util.IS_WINDOWS or run_tests_util.IS_CYGWIN:
+
+ def testDoesNotPickNonExeFilesOnWindows(self):
+ """Verifies that GetTestsToRun does not find _test files on Windows."""
+
+ self.fake_os = FakeOs(FakePath(
+ current_dir=os.path.abspath(os.path.dirname(run_tests_util.__file__)),
+ known_paths=['/d/' + GTEST_DBG_DIR + '/gtest_test', 'test/']))
+ self.test_runner = run_tests_util.TestRunner(script_dir='.',
+ injected_os=self.fake_os,
+ injected_subprocess=None)
+ self.AssertResultsEqual(
+ self.test_runner.GetTestsToRun(
+ [],
+ '',
+ True,
+ available_configurations=self.fake_configurations),
+ ([], []))
+
+
+class RunTestsTest(unittest.TestCase):
+ """Exercises TestRunner.RunTests."""
+
+ def SpawnSuccess(self, unused_executable, unused_argv):
+ """Fakes test success by returning 0 as an exit code."""
+
+ self.num_spawn_calls += 1
+ return 0
+
+ def SpawnFailure(self, unused_executable, unused_argv):
+ """Fakes test success by returning 1 as an exit code."""
+
+ self.num_spawn_calls += 1
+ return 1
+
+ def setUp(self):
+ self.fake_os = FakeOs(FakePath(
+ current_dir=os.path.abspath(os.path.dirname(run_tests_util.__file__)),
+ known_paths=[
+ AddExeExtension(GTEST_DBG_DIR + '/gtest_unittest'),
+ AddExeExtension(GTEST_OPT_DIR + '/gtest_unittest'),
+ 'test/gtest_color_test.py']))
+ self.fake_configurations = ['dbg', 'opt']
+ self.test_runner = run_tests_util.TestRunner(
+ script_dir=os.path.dirname(__file__) or '.',
+ injected_os=self.fake_os,
+ injected_subprocess=None)
+ self.num_spawn_calls = 0 # A number of calls to spawn.
+
+ def testRunPythonTestSuccess(self):
+ """Exercises RunTests to handle a Python test success."""
+
+ self.fake_os.spawn_impl = self.SpawnSuccess
+ self.assertEqual(
+ self.test_runner.RunTests(
+ [(GTEST_DBG_DIR, 'test/gtest_color_test.py')],
+ []),
+ 0)
+ self.assertEqual(self.num_spawn_calls, 1)
+
+ def testRunBinaryTestSuccess(self):
+ """Exercises RunTests to handle a binary test success."""
+
+ self.fake_os.spawn_impl = self.SpawnSuccess
+ self.assertEqual(
+ self.test_runner.RunTests(
+ [],
+ [(GTEST_DBG_DIR, GTEST_DBG_DIR + '/gtest_unittest')]),
+ 0)
+ self.assertEqual(self.num_spawn_calls, 1)
+
+ def testRunPythonTestFauilure(self):
+ """Exercises RunTests to handle a Python test failure."""
+
+ self.fake_os.spawn_impl = self.SpawnFailure
+ self.assertEqual(
+ self.test_runner.RunTests(
+ [(GTEST_DBG_DIR, 'test/gtest_color_test.py')],
+ []),
+ 1)
+ self.assertEqual(self.num_spawn_calls, 1)
+
+ def testRunBinaryTestFailure(self):
+ """Exercises RunTests to handle a binary test failure."""
+
+ self.fake_os.spawn_impl = self.SpawnFailure
+ self.assertEqual(
+ self.test_runner.RunTests(
+ [],
+ [(GTEST_DBG_DIR, GTEST_DBG_DIR + '/gtest_unittest')]),
+ 1)
+ self.assertEqual(self.num_spawn_calls, 1)
+
+ def testCombinedTestSuccess(self):
+ """Exercises RunTests to handle a success of both Python and binary test."""
+
+ self.fake_os.spawn_impl = self.SpawnSuccess
+ self.assertEqual(
+ self.test_runner.RunTests(
+ [(GTEST_DBG_DIR, GTEST_DBG_DIR + '/gtest_unittest')],
+ [(GTEST_DBG_DIR, GTEST_DBG_DIR + '/gtest_unittest')]),
+ 0)
+ self.assertEqual(self.num_spawn_calls, 2)
+
+ def testCombinedTestSuccessAndFailure(self):
+ """Exercises RunTests to handle a success of both Python and binary test."""
+
+ def SpawnImpl(executable, argv):
+ self.num_spawn_calls += 1
+ # Simulates failure of a Python test and success of a binary test.
+ if '.py' in executable or '.py' in argv[0]:
+ return 1
+ else:
+ return 0
+
+ self.fake_os.spawn_impl = SpawnImpl
+ self.assertEqual(
+ self.test_runner.RunTests(
+ [(GTEST_DBG_DIR, GTEST_DBG_DIR + '/gtest_unittest')],
+ [(GTEST_DBG_DIR, GTEST_DBG_DIR + '/gtest_unittest')]),
+ 0)
+ self.assertEqual(self.num_spawn_calls, 2)
+
+
+class ParseArgsTest(unittest.TestCase):
+ """Exercises ParseArgs."""
+
+ def testNoOptions(self):
+ options, args = run_tests_util.ParseArgs('gtest', argv=['script.py'])
+ self.assertEqual(args, ['script.py'])
+ self.assert_(options.configurations is None)
+ self.assertFalse(options.built_configurations)
+
+ def testOptionC(self):
+ options, args = run_tests_util.ParseArgs(
+ 'gtest', argv=['script.py', '-c', 'dbg'])
+ self.assertEqual(args, ['script.py'])
+ self.assertEqual(options.configurations, 'dbg')
+ self.assertFalse(options.built_configurations)
+
+ def testOptionA(self):
+ options, args = run_tests_util.ParseArgs('gtest', argv=['script.py', '-a'])
+ self.assertEqual(args, ['script.py'])
+ self.assertEqual(options.configurations, 'all')
+ self.assertFalse(options.built_configurations)
+
+ def testOptionB(self):
+ options, args = run_tests_util.ParseArgs('gtest', argv=['script.py', '-b'])
+ self.assertEqual(args, ['script.py'])
+ self.assert_(options.configurations is None)
+ self.assertTrue(options.built_configurations)
+
+ def testOptionCAndOptionB(self):
+ options, args = run_tests_util.ParseArgs(
+ 'gtest', argv=['script.py', '-c', 'dbg', '-b'])
+ self.assertEqual(args, ['script.py'])
+ self.assertEqual(options.configurations, 'dbg')
+ self.assertTrue(options.built_configurations)
+
+ def testOptionH(self):
+ help_called = [False]
+
+ # Suppresses lint warning on unused arguments. These arguments are
+ # required by optparse, even though they are unused.
+ # pylint: disable-msg=W0613
+ def VerifyHelp(option, opt, value, parser):
+ help_called[0] = True
+
+ # Verifies that -h causes the help callback to be called.
+ help_called[0] = False
+ _, args = run_tests_util.ParseArgs(
+ 'gtest', argv=['script.py', '-h'], help_callback=VerifyHelp)
+ self.assertEqual(args, ['script.py'])
+ self.assertTrue(help_called[0])
+
+ # Verifies that --help causes the help callback to be called.
+ help_called[0] = False
+ _, args = run_tests_util.ParseArgs(
+ 'gtest', argv=['script.py', '--help'], help_callback=VerifyHelp)
+ self.assertEqual(args, ['script.py'])
+ self.assertTrue(help_called[0])
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/Source/ThirdParty/gtest/xcode/Samples/FrameworkSample/runtests.sh b/Source/ThirdParty/gtest/xcode/Samples/FrameworkSample/runtests.sh
new file mode 100755
index 000000000..4a0d413e5
--- /dev/null
+++ b/Source/ThirdParty/gtest/xcode/Samples/FrameworkSample/runtests.sh
@@ -0,0 +1,62 @@
+#!/bin/bash
+#
+# Copyright 2008, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Executes the samples and tests for the Google Test Framework.
+
+# Help the dynamic linker find the path to the libraries.
+export DYLD_FRAMEWORK_PATH=$BUILT_PRODUCTS_DIR
+export DYLD_LIBRARY_PATH=$BUILT_PRODUCTS_DIR
+
+# Create some executables.
+test_executables=$@
+
+# Now execute each one in turn keeping track of how many succeeded and failed.
+succeeded=0
+failed=0
+failed_list=()
+for test in ${test_executables[*]}; do
+ "$test"
+ result=$?
+ if [ $result -eq 0 ]; then
+ succeeded=$(( $succeeded + 1 ))
+ else
+ failed=$(( failed + 1 ))
+ failed_list="$failed_list $test"
+ fi
+done
+
+# Report the successes and failures to the console.
+echo "Tests complete with $succeeded successes and $failed failures."
+if [ $failed -ne 0 ]; then
+ echo "The following tests failed:"
+ echo $failed_list
+fi
+exit $failed
diff --git a/Source/ThirdParty/gtest/xcode/Samples/FrameworkSample/widget.cc b/Source/ThirdParty/gtest/xcode/Samples/FrameworkSample/widget.cc
new file mode 100644
index 000000000..bfc4e7fcf
--- /dev/null
+++ b/Source/ThirdParty/gtest/xcode/Samples/FrameworkSample/widget.cc
@@ -0,0 +1,63 @@
+// Copyright 2008, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: preston.a.jackson@gmail.com (Preston Jackson)
+//
+// Google Test - FrameworkSample
+// widget.cc
+//
+
+// Widget is a very simple class used for demonstrating the use of gtest
+
+#include "widget.h"
+
+Widget::Widget(int number, const std::string& name)
+ : number_(number),
+ name_(name) {}
+
+Widget::~Widget() {}
+
+float Widget::GetFloatValue() const {
+ return number_;
+}
+
+int Widget::GetIntValue() const {
+ return static_cast<int>(number_);
+}
+
+std::string Widget::GetStringValue() const {
+ return name_;
+}
+
+void Widget::GetCharPtrValue(char* buffer, size_t max_size) const {
+ // Copy the char* representation of name_ into buffer, up to max_size.
+ strncpy(buffer, name_.c_str(), max_size-1);
+ buffer[max_size-1] = '\0';
+ return;
+}
diff --git a/Source/ThirdParty/gtest/xcode/Samples/FrameworkSample/widget.h b/Source/ThirdParty/gtest/xcode/Samples/FrameworkSample/widget.h
new file mode 100644
index 000000000..0c55cdc8c
--- /dev/null
+++ b/Source/ThirdParty/gtest/xcode/Samples/FrameworkSample/widget.h
@@ -0,0 +1,59 @@
+// Copyright 2008, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: preston.a.jackson@gmail.com (Preston Jackson)
+//
+// Google Test - FrameworkSample
+// widget.h
+//
+
+// Widget is a very simple class used for demonstrating the use of gtest. It
+// simply stores two values a string and an integer, which are returned via
+// public accessors in multiple forms.
+
+#import <string>
+
+class Widget {
+ public:
+ Widget(int number, const std::string& name);
+ ~Widget();
+
+ // Public accessors to number data
+ float GetFloatValue() const;
+ int GetIntValue() const;
+
+ // Public accessors to the string data
+ std::string GetStringValue() const;
+ void GetCharPtrValue(char* buffer, size_t max_size) const;
+
+ private:
+ // Data members
+ float number_;
+ std::string name_;
+};
diff --git a/Source/ThirdParty/gtest/xcode/Samples/FrameworkSample/widget_test.cc b/Source/ThirdParty/gtest/xcode/Samples/FrameworkSample/widget_test.cc
new file mode 100644
index 000000000..61c0d2ffd
--- /dev/null
+++ b/Source/ThirdParty/gtest/xcode/Samples/FrameworkSample/widget_test.cc
@@ -0,0 +1,68 @@
+// Copyright 2008, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: preston.a.jackson@gmail.com (Preston Jackson)
+//
+// Google Test - FrameworkSample
+// widget_test.cc
+//
+
+// This is a simple test file for the Widget class in the Widget.framework
+
+#include <string>
+#include <gtest/gtest.h>
+
+#include <Widget/widget.h>
+
+// This test verifies that the constructor sets the internal state of the
+// Widget class correctly.
+TEST(WidgetInitializerTest, TestConstructor) {
+ Widget widget(1.0f, "name");
+ EXPECT_FLOAT_EQ(1.0f, widget.GetFloatValue());
+ EXPECT_EQ(std::string("name"), widget.GetStringValue());
+}
+
+// This test verifies the conversion of the float and string values to int and
+// char*, respectively.
+TEST(WidgetInitializerTest, TestConversion) {
+ Widget widget(1.0f, "name");
+ EXPECT_EQ(1, widget.GetIntValue());
+
+ size_t max_size = 128;
+ char buffer[max_size];
+ widget.GetCharPtrValue(buffer, max_size);
+ EXPECT_STREQ("name", buffer);
+}
+
+// Use the Google Test main that is linked into the framework. It does something
+// like this:
+// int main(int argc, char** argv) {
+// testing::InitGoogleTest(&argc, argv);
+// return RUN_ALL_TESTS();
+// }
diff --git a/Source/ThirdParty/gtest/xcode/Scripts/runtests.sh b/Source/ThirdParty/gtest/xcode/Scripts/runtests.sh
new file mode 100755
index 000000000..3fc229f1d
--- /dev/null
+++ b/Source/ThirdParty/gtest/xcode/Scripts/runtests.sh
@@ -0,0 +1,65 @@
+#!/bin/bash
+#
+# Copyright 2008, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Executes the samples and tests for the Google Test Framework.
+
+# Help the dynamic linker find the path to the libraries.
+export DYLD_FRAMEWORK_PATH=$BUILT_PRODUCTS_DIR
+export DYLD_LIBRARY_PATH=$BUILT_PRODUCTS_DIR
+
+# Create some executables.
+test_executables=("$BUILT_PRODUCTS_DIR/gtest_unittest-framework"
+ "$BUILT_PRODUCTS_DIR/gtest_unittest"
+ "$BUILT_PRODUCTS_DIR/sample1_unittest-framework"
+ "$BUILT_PRODUCTS_DIR/sample1_unittest-static")
+
+# Now execute each one in turn keeping track of how many succeeded and failed.
+succeeded=0
+failed=0
+failed_list=()
+for test in ${test_executables[*]}; do
+ "$test"
+ result=$?
+ if [ $result -eq 0 ]; then
+ succeeded=$(( $succeeded + 1 ))
+ else
+ failed=$(( failed + 1 ))
+ failed_list="$failed_list $test"
+ fi
+done
+
+# Report the successes and failures to the console.
+echo "Tests complete with $succeeded successes and $failed failures."
+if [ $failed -ne 0 ]; then
+ echo "The following tests failed:"
+ echo $failed_list
+fi
+exit $failed
diff --git a/Source/ThirdParty/gtest/xcode/Scripts/versiongenerate.py b/Source/ThirdParty/gtest/xcode/Scripts/versiongenerate.py
new file mode 100644
index 000000000..81de8c96a
--- /dev/null
+++ b/Source/ThirdParty/gtest/xcode/Scripts/versiongenerate.py
@@ -0,0 +1,100 @@
+#!/usr/bin/env python
+#
+# Copyright 2008, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""A script to prepare version informtion for use the gtest Info.plist file.
+
+ This script extracts the version information from the configure.ac file and
+ uses it to generate a header file containing the same information. The
+ #defines in this header file will be included in during the generation of
+ the Info.plist of the framework, giving the correct value to the version
+ shown in the Finder.
+
+ This script makes the following assumptions (these are faults of the script,
+ not problems with the Autoconf):
+ 1. The AC_INIT macro will be contained within the first 1024 characters
+ of configure.ac
+ 2. The version string will be 3 integers separated by periods and will be
+ surrounded by squre brackets, "[" and "]" (e.g. [1.0.1]). The first
+ segment represents the major version, the second represents the minor
+ version and the third represents the fix version.
+ 3. No ")" character exists between the opening "(" and closing ")" of
+ AC_INIT, including in comments and character strings.
+"""
+
+import sys
+import re
+
+# Read the command line argument (the output directory for Version.h)
+if (len(sys.argv) < 3):
+ print "Usage: versiongenerate.py input_dir output_dir"
+ sys.exit(1)
+else:
+ input_dir = sys.argv[1]
+ output_dir = sys.argv[2]
+
+# Read the first 1024 characters of the configure.ac file
+config_file = open("%s/configure.ac" % input_dir, 'r')
+buffer_size = 1024
+opening_string = config_file.read(buffer_size)
+config_file.close()
+
+# Extract the version string from the AC_INIT macro
+# The following init_expression means:
+# Extract three integers separated by periods and surrounded by squre
+# brackets(e.g. "[1.0.1]") between "AC_INIT(" and ")". Do not be greedy
+# (*? is the non-greedy flag) since that would pull in everything between
+# the first "(" and the last ")" in the file.
+version_expression = re.compile(r"AC_INIT\(.*?\[(\d+)\.(\d+)\.(\d+)\].*?\)",
+ re.DOTALL)
+version_values = version_expression.search(opening_string)
+major_version = version_values.group(1)
+minor_version = version_values.group(2)
+fix_version = version_values.group(3)
+
+# Write the version information to a header file to be included in the
+# Info.plist file.
+file_data = """//
+// DO NOT MODIFY THIS FILE (but you can delete it)
+//
+// This file is autogenerated by the versiongenerate.py script. This script
+// is executed in a "Run Script" build phase when creating gtest.framework. This
+// header file is not used during compilation of C-source. Rather, it simply
+// defines some version strings for substitution in the Info.plist. Because of
+// this, we are not not restricted to C-syntax nor are we using include guards.
+//
+
+#define GTEST_VERSIONINFO_SHORT %s.%s
+#define GTEST_VERSIONINFO_LONG %s.%s.%s
+
+""" % (major_version, minor_version, major_version, minor_version, fix_version)
+version_file = open("%s/Version.h" % output_dir, 'w')
+version_file.write(file_data)
+version_file.close()