aboutsummaryrefslogtreecommitdiff
path: root/googletest/test
diff options
context:
space:
mode:
Diffstat (limited to 'googletest/test')
-rw-r--r--googletest/test/BUILD.bazel7
-rwxr-xr-xgoogletest/test/googletest-break-on-failure-unittest.py79
-rw-r--r--googletest/test/googletest-break-on-failure-unittest_.cc25
-rwxr-xr-xgoogletest/test/googletest-catch-exceptions-test.py261
-rw-r--r--googletest/test/googletest-catch-exceptions-test_.cc16
-rwxr-xr-xgoogletest/test/googletest-color-test.py70
-rw-r--r--googletest/test/googletest-color-test_.cc4
-rw-r--r--googletest/test/googletest-death-test-test.cc448
-rw-r--r--googletest/test/googletest-death-test_ex_test.cc29
-rwxr-xr-xgoogletest/test/googletest-env-var-test.py4
-rw-r--r--googletest/test/googletest-env-var-test_.cc30
-rwxr-xr-xgoogletest/test/googletest-failfast-unittest.py203
-rw-r--r--googletest/test/googletest-failfast-unittest_.cc3
-rw-r--r--googletest/test/googletest-filepath-test.cc220
-rwxr-xr-xgoogletest/test/googletest-filter-unittest.py439
-rw-r--r--googletest/test/googletest-filter-unittest_.cc71
-rw-r--r--googletest/test/googletest-global-environment-unittest.py81
-rw-r--r--googletest/test/googletest-json-outfiles-test.py165
-rw-r--r--googletest/test/googletest-json-output-unittest.py1274
-rwxr-xr-xgoogletest/test/googletest-list-tests-unittest.py102
-rw-r--r--googletest/test/googletest-list-tests-unittest_.cc93
-rw-r--r--googletest/test/googletest-listener-test.cc45
-rw-r--r--googletest/test/googletest-message-test.cc44
-rw-r--r--googletest/test/googletest-options-test.cc87
-rw-r--r--googletest/test/googletest-output-test-golden-lin.txt30
-rwxr-xr-xgoogletest/test/googletest-output-test.py175
-rw-r--r--googletest/test/googletest-output-test_.cc280
-rw-r--r--googletest/test/googletest-param-test-invalid-name1-test.py4
-rw-r--r--googletest/test/googletest-param-test-invalid-name1-test_.cc8
-rw-r--r--googletest/test/googletest-param-test-invalid-name2-test.py5
-rw-r--r--googletest/test/googletest-param-test-invalid-name2-test_.cc11
-rw-r--r--googletest/test/googletest-param-test-test.cc218
-rw-r--r--googletest/test/googletest-param-test-test.h6
-rw-r--r--googletest/test/googletest-param-test2-test.cc9
-rw-r--r--googletest/test/googletest-port-test.cc237
-rw-r--r--googletest/test/googletest-printers-test.cc506
-rwxr-xr-xgoogletest/test/googletest-setuptestsuite-test.py18
-rw-r--r--googletest/test/googletest-setuptestsuite-test_.cc9
-rwxr-xr-xgoogletest/test/googletest-shuffle-test.py203
-rw-r--r--googletest/test/googletest-shuffle-test_.cc4
-rw-r--r--googletest/test/googletest-test-part-test.cc22
-rwxr-xr-xgoogletest/test/googletest-throw-on-failure-test.py70
-rw-r--r--googletest/test/googletest-throw-on-failure-test_.cc10
-rwxr-xr-xgoogletest/test/googletest-uninitialized-test.py9
-rw-r--r--googletest/test/googletest-uninitialized-test_.cc5
-rw-r--r--googletest/test/gtest-typed-test2_test.cc3
-rw-r--r--googletest/test/gtest-typed-test_test.cc58
-rw-r--r--googletest/test/gtest-typed-test_test.h11
-rw-r--r--googletest/test/gtest-unittest-api_test.cc24
-rw-r--r--googletest/test/gtest_assert_by_exception_test.cc24
-rw-r--r--googletest/test/gtest_dirs_test.cc101
-rw-r--r--googletest/test/gtest_environment_test.cc23
-rwxr-xr-xgoogletest/test/gtest_help_test.py155
-rw-r--r--googletest/test/gtest_help_test_.cc3
-rw-r--r--googletest/test/gtest_json_test_utils.py4
-rw-r--r--googletest/test/gtest_list_output_unittest.py29
-rw-r--r--googletest/test/gtest_main_unittest.cc4
-rw-r--r--googletest/test/gtest_pred_impl_unittest.cc1728
-rw-r--r--googletest/test/gtest_premature_exit_test.cc22
-rw-r--r--googletest/test/gtest_repeat_test.cc69
-rwxr-xr-xgoogletest/test/gtest_skip_check_output_test.py5
-rwxr-xr-xgoogletest/test/gtest_skip_environment_check_output_test.py5
-rw-r--r--googletest/test/gtest_skip_in_environment_setup_test.cc1
-rw-r--r--googletest/test/gtest_skip_test.cc8
-rw-r--r--googletest/test/gtest_sole_header_test.cc4
-rw-r--r--googletest/test/gtest_stress_test.cc31
-rw-r--r--googletest/test/gtest_test_macro_stack_footprint_test.cc58
-rwxr-xr-xgoogletest/test/gtest_test_utils.py124
-rwxr-xr-xgoogletest/test/gtest_testbridge_test.py4
-rw-r--r--googletest/test/gtest_testbridge_test_.cc1
-rw-r--r--googletest/test/gtest_throw_on_failure_ex_test.cc16
-rw-r--r--googletest/test/gtest_unittest.cc2079
-rw-r--r--googletest/test/gtest_xml_outfile2_test_.cc38
-rwxr-xr-xgoogletest/test/gtest_xml_outfiles_test.py44
-rwxr-xr-xgoogletest/test/gtest_xml_output_unittest.py264
-rw-r--r--googletest/test/gtest_xml_output_unittest_.cc10
-rwxr-xr-xgoogletest/test/gtest_xml_test_utils.py159
-rw-r--r--googletest/test/production.h1
78 files changed, 5497 insertions, 5252 deletions
diff --git a/googletest/test/BUILD.bazel b/googletest/test/BUILD.bazel
index b06a00a1..1890b6ff 100644
--- a/googletest/test/BUILD.bazel
+++ b/googletest/test/BUILD.bazel
@@ -30,7 +30,6 @@
#
# Bazel BUILD for The Google C++ Testing Framework (Google Test)
-load("@rules_cc//cc:defs.bzl", "cc_binary", "cc_test")
load("@rules_python//python:defs.bzl", "py_library", "py_test")
licenses(["notice"])
@@ -65,6 +64,7 @@ cc_test(
"googletest-global-environment-unittest_.cc",
"googletest-break-on-failure-unittest_.cc",
"googletest-listener-test.cc",
+ "googletest-message-test.cc",
"googletest-output-test_.cc",
"googletest-list-tests-unittest_.cc",
"googletest-shuffle-test_.cc",
@@ -95,6 +95,7 @@ cc_test(
"googletest/test",
],
linkopts = select({
+ "//:qnx": [],
"//:windows": [],
"//conditions:default": ["-pthread"],
}),
@@ -174,6 +175,10 @@ py_test(
name = "gtest_help_test",
size = "small",
srcs = ["gtest_help_test.py"],
+ args = select({
+ "//:has_absl": ["--has_absl_flags"],
+ "//conditions:default": [],
+ }),
data = [":gtest_help_test_"],
deps = [":gtest_test_utils"],
)
diff --git a/googletest/test/googletest-break-on-failure-unittest.py b/googletest/test/googletest-break-on-failure-unittest.py
index a5dfbc69..e314b5cc 100755
--- a/googletest/test/googletest-break-on-failure-unittest.py
+++ b/googletest/test/googletest-break-on-failure-unittest.py
@@ -39,7 +39,7 @@ Google Test) with different environments and command line flags.
"""
import os
-import gtest_test_utils
+from googletest.test import gtest_test_utils
# Constants.
@@ -59,7 +59,8 @@ CATCH_EXCEPTIONS_ENV_VAR = 'GTEST_CATCH_EXCEPTIONS'
# Path to the googletest-break-on-failure-unittest_ program.
EXE_PATH = gtest_test_utils.GetTestExecutablePath(
- 'googletest-break-on-failure-unittest_')
+ 'googletest-break-on-failure-unittest_'
+)
environ = gtest_test_utils.environ
@@ -87,22 +88,26 @@ def Run(command):
class GTestBreakOnFailureUnitTest(gtest_test_utils.TestCase):
- """Tests using the GTEST_BREAK_ON_FAILURE environment variable or
+ """Unit test for Google Test's break-on-failure mode.
+
+ Tests using the GTEST_BREAK_ON_FAILURE environment variable or
the --gtest_break_on_failure flag to turn assertion failures into
segmentation faults.
"""
def RunAndVerify(self, env_var_value, flag_value, expect_seg_fault):
- """Runs googletest-break-on-failure-unittest_ and verifies that it does
+ """Runs googletest-break-on-failure-unittest_ and verifies its behavior.
+
+ Runs googletest-break-on-failure-unittest_ and verifies that it does
(or does not) have a seg-fault.
Args:
env_var_value: value of the GTEST_BREAK_ON_FAILURE environment
- variable; None if the variable should be unset.
- flag_value: value of the --gtest_break_on_failure flag;
- None if the flag should not be present.
- expect_seg_fault: 1 if the program is expected to generate a seg-fault;
- 0 otherwise.
+ variable; None if the variable should be unset.
+ flag_value: value of the --gtest_break_on_failure flag; None if the
+ flag should not be present.
+ expect_seg_fault: 1 if the program is expected to generate a seg-fault; 0
+ otherwise.
"""
SetEnvVar(BREAK_ON_FAILURE_ENV_VAR, env_var_value)
@@ -132,74 +137,56 @@ class GTestBreakOnFailureUnitTest(gtest_test_utils.TestCase):
SetEnvVar(BREAK_ON_FAILURE_ENV_VAR, None)
- msg = ('when %s%s, an assertion failure in "%s" %s cause a seg-fault.' %
- (BREAK_ON_FAILURE_ENV_VAR, env_var_value_msg, ' '.join(command),
- should_or_not))
- self.assert_(has_seg_fault == expect_seg_fault, msg)
+ msg = 'when %s%s, an assertion failure in "%s" %s cause a seg-fault.' % (
+ BREAK_ON_FAILURE_ENV_VAR,
+ env_var_value_msg,
+ ' '.join(command),
+ should_or_not,
+ )
+ self.assertTrue(has_seg_fault == expect_seg_fault, msg)
def testDefaultBehavior(self):
"""Tests the behavior of the default mode."""
- self.RunAndVerify(env_var_value=None,
- flag_value=None,
- expect_seg_fault=0)
+ self.RunAndVerify(env_var_value=None, flag_value=None, expect_seg_fault=0)
def testEnvVar(self):
"""Tests using the GTEST_BREAK_ON_FAILURE environment variable."""
- self.RunAndVerify(env_var_value='0',
- flag_value=None,
- expect_seg_fault=0)
- self.RunAndVerify(env_var_value='1',
- flag_value=None,
- expect_seg_fault=1)
+ self.RunAndVerify(env_var_value='0', flag_value=None, expect_seg_fault=0)
+ self.RunAndVerify(env_var_value='1', flag_value=None, expect_seg_fault=1)
def testFlag(self):
"""Tests using the --gtest_break_on_failure flag."""
- self.RunAndVerify(env_var_value=None,
- flag_value='0',
- expect_seg_fault=0)
- self.RunAndVerify(env_var_value=None,
- flag_value='1',
- expect_seg_fault=1)
+ self.RunAndVerify(env_var_value=None, flag_value='0', expect_seg_fault=0)
+ self.RunAndVerify(env_var_value=None, flag_value='1', expect_seg_fault=1)
def testFlagOverridesEnvVar(self):
"""Tests that the flag overrides the environment variable."""
- self.RunAndVerify(env_var_value='0',
- flag_value='0',
- expect_seg_fault=0)
- self.RunAndVerify(env_var_value='0',
- flag_value='1',
- expect_seg_fault=1)
- self.RunAndVerify(env_var_value='1',
- flag_value='0',
- expect_seg_fault=0)
- self.RunAndVerify(env_var_value='1',
- flag_value='1',
- expect_seg_fault=1)
+ self.RunAndVerify(env_var_value='0', flag_value='0', expect_seg_fault=0)
+ self.RunAndVerify(env_var_value='0', flag_value='1', expect_seg_fault=1)
+ self.RunAndVerify(env_var_value='1', flag_value='0', expect_seg_fault=0)
+ self.RunAndVerify(env_var_value='1', flag_value='1', expect_seg_fault=1)
def testBreakOnFailureOverridesThrowOnFailure(self):
"""Tests that gtest_break_on_failure overrides gtest_throw_on_failure."""
SetEnvVar(THROW_ON_FAILURE_ENV_VAR, '1')
try:
- self.RunAndVerify(env_var_value=None,
- flag_value='1',
- expect_seg_fault=1)
+ self.RunAndVerify(env_var_value=None, flag_value='1', expect_seg_fault=1)
finally:
SetEnvVar(THROW_ON_FAILURE_ENV_VAR, None)
if IS_WINDOWS:
+
def testCatchExceptionsDoesNotInterfere(self):
"""Tests that gtest_catch_exceptions doesn't interfere."""
SetEnvVar(CATCH_EXCEPTIONS_ENV_VAR, '1')
try:
- self.RunAndVerify(env_var_value='1',
- flag_value='1',
- expect_seg_fault=1)
+ self.RunAndVerify(env_var_value='1', flag_value='1', expect_seg_fault=1)
finally:
SetEnvVar(CATCH_EXCEPTIONS_ENV_VAR, None)
diff --git a/googletest/test/googletest-break-on-failure-unittest_.cc b/googletest/test/googletest-break-on-failure-unittest_.cc
index f84957a2..337e34c3 100644
--- a/googletest/test/googletest-break-on-failure-unittest_.cc
+++ b/googletest/test/googletest-break-on-failure-unittest_.cc
@@ -27,7 +27,6 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
// Unit test for Google Test's break-on-failure mode.
//
// A user can ask Google Test to seg-fault when an assertion fails, using
@@ -40,35 +39,33 @@
#include "gtest/gtest.h"
-#if GTEST_OS_WINDOWS
-# include <windows.h>
-# include <stdlib.h>
+#ifdef GTEST_OS_WINDOWS
+#include <stdlib.h>
+#include <windows.h>
#endif
namespace {
// A test that's expected to fail.
-TEST(Foo, Bar) {
- EXPECT_EQ(2, 3);
-}
+TEST(Foo, Bar) { EXPECT_EQ(2, 3); }
-#if GTEST_HAS_SEH && !GTEST_OS_WINDOWS_MOBILE
+#if GTEST_HAS_SEH && !defined(GTEST_OS_WINDOWS_MOBILE)
// On Windows Mobile global exception handlers are not supported.
-LONG WINAPI ExitWithExceptionCode(
- struct _EXCEPTION_POINTERS* exception_pointers) {
+LONG WINAPI
+ExitWithExceptionCode(struct _EXCEPTION_POINTERS* exception_pointers) {
exit(exception_pointers->ExceptionRecord->ExceptionCode);
}
#endif
} // namespace
-int main(int argc, char **argv) {
-#if GTEST_OS_WINDOWS
+int main(int argc, char** argv) {
+#ifdef GTEST_OS_WINDOWS
// Suppresses display of the Windows error dialog upon encountering
// a general protection fault (segment violation).
SetErrorMode(SEM_NOGPFAULTERRORBOX | SEM_FAILCRITICALERRORS);
-# if GTEST_HAS_SEH && !GTEST_OS_WINDOWS_MOBILE
+#if GTEST_HAS_SEH && !defined(GTEST_OS_WINDOWS_MOBILE)
// The default unhandled exception filter does not always exit
// with the exception code as exit code - for example it exits with
@@ -78,7 +75,7 @@ int main(int argc, char **argv) {
// exceptions.
SetUnhandledExceptionFilter(ExitWithExceptionCode);
-# endif
+#endif
#endif // GTEST_OS_WINDOWS
testing::InitGoogleTest(&argc, argv);
diff --git a/googletest/test/googletest-catch-exceptions-test.py b/googletest/test/googletest-catch-exceptions-test.py
index 94a5b33f..180e18de 100755
--- a/googletest/test/googletest-catch-exceptions-test.py
+++ b/googletest/test/googletest-catch-exceptions-test.py
@@ -35,7 +35,7 @@ googletest-catch-exceptions-ex-test_ (programs written with
Google Test) and verifies their output.
"""
-import gtest_test_utils
+from googletest.test import gtest_test_utils
# Constants.
FLAG_PREFIX = '--gtest_'
@@ -46,12 +46,14 @@ FILTER_FLAG = FLAG_PREFIX + 'filter'
# Path to the googletest-catch-exceptions-ex-test_ binary, compiled with
# exceptions enabled.
EX_EXE_PATH = gtest_test_utils.GetTestExecutablePath(
- 'googletest-catch-exceptions-ex-test_')
+ 'googletest-catch-exceptions-ex-test_'
+)
# Path to the googletest-catch-exceptions-test_ binary, compiled with
# exceptions disabled.
EXE_PATH = gtest_test_utils.GetTestExecutablePath(
- 'googletest-catch-exceptions-no-ex-test_')
+ 'googletest-catch-exceptions-no-ex-test_'
+)
environ = gtest_test_utils.environ
SetEnvVar = gtest_test_utils.SetEnvVar
@@ -64,7 +66,8 @@ SetEnvVar = gtest_test_utils.SetEnvVar
SetEnvVar(gtest_test_utils.PREMATURE_EXIT_FILE_ENV_VAR, None)
TEST_LIST = gtest_test_utils.Subprocess(
- [EXE_PATH, LIST_TESTS_FLAG], env=environ).output
+ [EXE_PATH, LIST_TESTS_FLAG], env=environ
+).output
SUPPORTS_SEH_EXCEPTIONS = 'ThrowsSehException' in TEST_LIST
@@ -72,33 +75,47 @@ if SUPPORTS_SEH_EXCEPTIONS:
BINARY_OUTPUT = gtest_test_utils.Subprocess([EXE_PATH], env=environ).output
EX_BINARY_OUTPUT = gtest_test_utils.Subprocess(
- [EX_EXE_PATH], env=environ).output
+ [EX_EXE_PATH], env=environ
+).output
# The tests.
if SUPPORTS_SEH_EXCEPTIONS:
- # pylint:disable-msg=C6302
+
class CatchSehExceptionsTest(gtest_test_utils.TestCase):
"""Tests exception-catching behavior."""
-
def TestSehExceptions(self, test_output):
- self.assert_('SEH exception with code 0x2a thrown '
- 'in the test fixture\'s constructor'
- in test_output)
- self.assert_('SEH exception with code 0x2a thrown '
- 'in the test fixture\'s destructor'
- in test_output)
- self.assert_('SEH exception with code 0x2a thrown in SetUpTestSuite()'
- in test_output)
- self.assert_('SEH exception with code 0x2a thrown in TearDownTestSuite()'
- in test_output)
- self.assert_('SEH exception with code 0x2a thrown in SetUp()'
- in test_output)
- self.assert_('SEH exception with code 0x2a thrown in TearDown()'
- in test_output)
- self.assert_('SEH exception with code 0x2a thrown in the test body'
- in test_output)
+ self.assertIn(
+ (
+ 'SEH exception with code 0x2a thrown '
+ "in the test fixture's constructor"
+ ),
+ test_output,
+ )
+ self.assertIn(
+ (
+ 'SEH exception with code 0x2a thrown '
+ "in the test fixture's destructor"
+ ),
+ test_output,
+ )
+ self.assertIn(
+ 'SEH exception with code 0x2a thrown in SetUpTestSuite()', test_output
+ )
+ self.assertIn(
+ 'SEH exception with code 0x2a thrown in TearDownTestSuite()',
+ test_output,
+ )
+ self.assertIn(
+ 'SEH exception with code 0x2a thrown in SetUp()', test_output
+ )
+ self.assertIn(
+ 'SEH exception with code 0x2a thrown in TearDown()', test_output
+ )
+ self.assertIn(
+ 'SEH exception with code 0x2a thrown in the test body', test_output
+ )
def testCatchesSehExceptionsWithCxxExceptionsEnabled(self):
self.TestSehExceptions(EX_BINARY_OUTPUT)
@@ -110,126 +127,188 @@ if SUPPORTS_SEH_EXCEPTIONS:
class CatchCxxExceptionsTest(gtest_test_utils.TestCase):
"""Tests C++ exception-catching behavior.
- Tests in this test case verify that:
- * C++ exceptions are caught and logged as C++ (not SEH) exceptions
- * Exception thrown affect the remainder of the test work flow in the
- expected manner.
+ Tests in this test case verify that:
+ * C++ exceptions are caught and logged as C++ (not SEH) exceptions
+ * Exception thrown affect the remainder of the test work flow in the
+ expected manner.
"""
def testCatchesCxxExceptionsInFixtureConstructor(self):
self.assertTrue(
'C++ exception with description '
'"Standard C++ exception" thrown '
- 'in the test fixture\'s constructor' in EX_BINARY_OUTPUT,
- EX_BINARY_OUTPUT)
- self.assert_('unexpected' not in EX_BINARY_OUTPUT,
- 'This failure belongs in this test only if '
- '"CxxExceptionInConstructorTest" (no quotes) '
- 'appears on the same line as words "called unexpectedly"')
-
- if ('CxxExceptionInDestructorTest.ThrowsExceptionInDestructor' in
- EX_BINARY_OUTPUT):
+ "in the test fixture's constructor"
+ in EX_BINARY_OUTPUT,
+ EX_BINARY_OUTPUT,
+ )
+ self.assertTrue(
+ 'unexpected' not in EX_BINARY_OUTPUT,
+ (
+ 'This failure belongs in this test only if '
+ '"CxxExceptionInConstructorTest" (no quotes) '
+ 'appears on the same line as words "called unexpectedly"'
+ ),
+ )
+
+ if (
+ 'CxxExceptionInDestructorTest.ThrowsExceptionInDestructor'
+ in EX_BINARY_OUTPUT
+ ):
def testCatchesCxxExceptionsInFixtureDestructor(self):
self.assertTrue(
'C++ exception with description '
'"Standard C++ exception" thrown '
- 'in the test fixture\'s destructor' in EX_BINARY_OUTPUT,
- EX_BINARY_OUTPUT)
+ "in the test fixture's destructor"
+ in EX_BINARY_OUTPUT,
+ EX_BINARY_OUTPUT,
+ )
self.assertTrue(
'CxxExceptionInDestructorTest::TearDownTestSuite() '
- 'called as expected.' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT)
+ 'called as expected.'
+ in EX_BINARY_OUTPUT,
+ EX_BINARY_OUTPUT,
+ )
def testCatchesCxxExceptionsInSetUpTestCase(self):
self.assertTrue(
'C++ exception with description "Standard C++ exception"'
- ' thrown in SetUpTestSuite()' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT)
- self.assertTrue(
- 'CxxExceptionInConstructorTest::TearDownTestSuite() '
- 'called as expected.' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT)
+ ' thrown in SetUpTestSuite()'
+ in EX_BINARY_OUTPUT,
+ EX_BINARY_OUTPUT,
+ )
self.assertTrue(
- 'CxxExceptionInSetUpTestSuiteTest constructor '
- 'called as expected.' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT)
- self.assertTrue(
- 'CxxExceptionInSetUpTestSuiteTest destructor '
- 'called as expected.' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT)
- self.assertTrue(
- 'CxxExceptionInSetUpTestSuiteTest::SetUp() '
- 'called as expected.' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT)
- self.assertTrue(
- 'CxxExceptionInSetUpTestSuiteTest::TearDown() '
- 'called as expected.' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT)
- self.assertTrue(
- 'CxxExceptionInSetUpTestSuiteTest test body '
- 'called as expected.' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT)
+ 'CxxExceptionInConstructorTest::TearDownTestSuite() called as expected.'
+ in EX_BINARY_OUTPUT,
+ EX_BINARY_OUTPUT,
+ )
+ self.assertFalse(
+ 'CxxExceptionInSetUpTestSuiteTest constructor called as expected.'
+ in EX_BINARY_OUTPUT,
+ EX_BINARY_OUTPUT,
+ )
+ self.assertFalse(
+ 'CxxExceptionInSetUpTestSuiteTest destructor called as expected.'
+ in EX_BINARY_OUTPUT,
+ EX_BINARY_OUTPUT,
+ )
+ self.assertFalse(
+ 'CxxExceptionInSetUpTestSuiteTest::SetUp() called as expected.'
+ in EX_BINARY_OUTPUT,
+ EX_BINARY_OUTPUT,
+ )
+ self.assertFalse(
+ 'CxxExceptionInSetUpTestSuiteTest::TearDown() called as expected.'
+ in EX_BINARY_OUTPUT,
+ EX_BINARY_OUTPUT,
+ )
+ self.assertFalse(
+ 'CxxExceptionInSetUpTestSuiteTest test body called as expected.'
+ in EX_BINARY_OUTPUT,
+ EX_BINARY_OUTPUT,
+ )
def testCatchesCxxExceptionsInTearDownTestCase(self):
self.assertTrue(
'C++ exception with description "Standard C++ exception"'
- ' thrown in TearDownTestSuite()' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT)
+ ' thrown in TearDownTestSuite()'
+ in EX_BINARY_OUTPUT,
+ EX_BINARY_OUTPUT,
+ )
def testCatchesCxxExceptionsInSetUp(self):
self.assertTrue(
'C++ exception with description "Standard C++ exception"'
- ' thrown in SetUp()' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT)
+ ' thrown in SetUp()'
+ in EX_BINARY_OUTPUT,
+ EX_BINARY_OUTPUT,
+ )
+ self.assertTrue(
+ 'CxxExceptionInSetUpTest::TearDownTestSuite() called as expected.'
+ in EX_BINARY_OUTPUT,
+ EX_BINARY_OUTPUT,
+ )
self.assertTrue(
- 'CxxExceptionInSetUpTest::TearDownTestSuite() '
- 'called as expected.' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT)
+ 'CxxExceptionInSetUpTest destructor called as expected.'
+ in EX_BINARY_OUTPUT,
+ EX_BINARY_OUTPUT,
+ )
self.assertTrue(
- 'CxxExceptionInSetUpTest destructor '
- 'called as expected.' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT)
+ 'CxxExceptionInSetUpTest::TearDown() called as expected.'
+ in EX_BINARY_OUTPUT,
+ EX_BINARY_OUTPUT,
+ )
self.assertTrue(
- 'CxxExceptionInSetUpTest::TearDown() '
- 'called as expected.' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT)
- self.assert_('unexpected' not in EX_BINARY_OUTPUT,
- 'This failure belongs in this test only if '
- '"CxxExceptionInSetUpTest" (no quotes) '
- 'appears on the same line as words "called unexpectedly"')
+ 'unexpected' not in EX_BINARY_OUTPUT,
+ (
+ 'This failure belongs in this test only if '
+ '"CxxExceptionInSetUpTest" (no quotes) '
+ 'appears on the same line as words "called unexpectedly"'
+ ),
+ )
def testCatchesCxxExceptionsInTearDown(self):
self.assertTrue(
'C++ exception with description "Standard C++ exception"'
- ' thrown in TearDown()' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT)
+ ' thrown in TearDown()'
+ in EX_BINARY_OUTPUT,
+ EX_BINARY_OUTPUT,
+ )
self.assertTrue(
- 'CxxExceptionInTearDownTest::TearDownTestSuite() '
- 'called as expected.' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT)
+ 'CxxExceptionInTearDownTest::TearDownTestSuite() called as expected.'
+ in EX_BINARY_OUTPUT,
+ EX_BINARY_OUTPUT,
+ )
self.assertTrue(
- 'CxxExceptionInTearDownTest destructor '
- 'called as expected.' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT)
+ 'CxxExceptionInTearDownTest destructor called as expected.'
+ in EX_BINARY_OUTPUT,
+ EX_BINARY_OUTPUT,
+ )
def testCatchesCxxExceptionsInTestBody(self):
self.assertTrue(
'C++ exception with description "Standard C++ exception"'
- ' thrown in the test body' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT)
+ ' thrown in the test body'
+ in EX_BINARY_OUTPUT,
+ EX_BINARY_OUTPUT,
+ )
self.assertTrue(
- 'CxxExceptionInTestBodyTest::TearDownTestSuite() '
- 'called as expected.' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT)
+ 'CxxExceptionInTestBodyTest::TearDownTestSuite() called as expected.'
+ in EX_BINARY_OUTPUT,
+ EX_BINARY_OUTPUT,
+ )
self.assertTrue(
- 'CxxExceptionInTestBodyTest destructor '
- 'called as expected.' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT)
+ 'CxxExceptionInTestBodyTest destructor called as expected.'
+ in EX_BINARY_OUTPUT,
+ EX_BINARY_OUTPUT,
+ )
self.assertTrue(
- 'CxxExceptionInTestBodyTest::TearDown() '
- 'called as expected.' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT)
+ 'CxxExceptionInTestBodyTest::TearDown() called as expected.'
+ in EX_BINARY_OUTPUT,
+ EX_BINARY_OUTPUT,
+ )
def testCatchesNonStdCxxExceptions(self):
self.assertTrue(
'Unknown C++ exception thrown in the test body' in EX_BINARY_OUTPUT,
- EX_BINARY_OUTPUT)
+ EX_BINARY_OUTPUT,
+ )
def testUnhandledCxxExceptionsAbortTheProgram(self):
# Filters out SEH exception tests on Windows. Unhandled SEH exceptions
# cause tests to show pop-up windows there.
- FITLER_OUT_SEH_TESTS_FLAG = FILTER_FLAG + '=-*Seh*'
+ filter_out_seh_tests_flag = FILTER_FLAG + '=-*Seh*'
# By default, Google Test doesn't catch the exceptions.
uncaught_exceptions_ex_binary_output = gtest_test_utils.Subprocess(
- [EX_EXE_PATH,
- NO_CATCH_EXCEPTIONS_FLAG,
- FITLER_OUT_SEH_TESTS_FLAG],
- env=environ).output
-
- self.assert_('Unhandled C++ exception terminating the program'
- in uncaught_exceptions_ex_binary_output)
- self.assert_('unexpected' not in uncaught_exceptions_ex_binary_output)
+ [EX_EXE_PATH, NO_CATCH_EXCEPTIONS_FLAG, filter_out_seh_tests_flag],
+ env=environ,
+ ).output
+
+ self.assertIn(
+ 'Unhandled C++ exception terminating the program',
+ uncaught_exceptions_ex_binary_output,
+ )
+ self.assertNotIn('unexpected', uncaught_exceptions_ex_binary_output)
if __name__ == '__main__':
diff --git a/googletest/test/googletest-catch-exceptions-test_.cc b/googletest/test/googletest-catch-exceptions-test_.cc
index 8c127d40..3c8f4f4b 100644
--- a/googletest/test/googletest-catch-exceptions-test_.cc
+++ b/googletest/test/googletest-catch-exceptions-test_.cc
@@ -32,18 +32,18 @@
// exceptions, and the output is verified by
// googletest-catch-exceptions-test.py.
-#include <stdio.h> // NOLINT
+#include <stdio.h> // NOLINT
#include <stdlib.h> // For exit().
#include "gtest/gtest.h"
#if GTEST_HAS_SEH
-# include <windows.h>
+#include <windows.h>
#endif
#if GTEST_HAS_EXCEPTIONS
-# include <exception> // For set_terminate().
-# include <stdexcept>
+#include <exception> // For set_terminate().
+#include <stdexcept>
#endif
using testing::Test;
@@ -93,9 +93,7 @@ class SehExceptionInTearDownTest : public Test {
TEST_F(SehExceptionInTearDownTest, ThrowsExceptionInTearDown) {}
-TEST(SehExceptionTest, ThrowsSehException) {
- RaiseException(42, 0, 0, NULL);
-}
+TEST(SehExceptionTest, ThrowsSehException) { RaiseException(42, 0, 0, NULL); }
#endif // GTEST_HAS_SEH
@@ -269,9 +267,7 @@ TEST_F(CxxExceptionInTestBodyTest, ThrowsStdCxxException) {
throw std::runtime_error("Standard C++ exception");
}
-TEST(CxxExceptionTest, ThrowsNonStdCxxException) {
- throw "C-string";
-}
+TEST(CxxExceptionTest, ThrowsNonStdCxxException) { throw "C-string"; }
// This terminate handler aborts the program using exit() rather than abort().
// This avoids showing pop-ups on Windows systems and core dumps on Unix-like
diff --git a/googletest/test/googletest-color-test.py b/googletest/test/googletest-color-test.py
index f3b7c999..8926a481 100755
--- a/googletest/test/googletest-color-test.py
+++ b/googletest/test/googletest-color-test.py
@@ -32,7 +32,7 @@
"""Verifies that Google Test correctly determines whether to use colors."""
import os
-import gtest_test_utils
+from googletest.test import gtest_test_utils
IS_WINDOWS = os.name == 'nt'
@@ -65,62 +65,64 @@ def UsesColor(term, color_env_var, color_flag):
class GTestColorTest(gtest_test_utils.TestCase):
+
def testNoEnvVarNoFlag(self):
"""Tests the case when there's neither GTEST_COLOR nor --gtest_color."""
if not IS_WINDOWS:
- self.assert_(not UsesColor('dumb', None, None))
- self.assert_(not UsesColor('emacs', None, None))
- self.assert_(not UsesColor('xterm-mono', None, None))
- self.assert_(not UsesColor('unknown', None, None))
- self.assert_(not UsesColor(None, None, None))
- self.assert_(UsesColor('linux', None, None))
- self.assert_(UsesColor('cygwin', None, None))
- self.assert_(UsesColor('xterm', None, None))
- self.assert_(UsesColor('xterm-color', None, None))
- self.assert_(UsesColor('xterm-256color', None, None))
+ self.assertTrue(not UsesColor('dumb', None, None))
+ self.assertTrue(not UsesColor('emacs', None, None))
+ self.assertTrue(not UsesColor('xterm-mono', None, None))
+ self.assertTrue(not UsesColor('unknown', None, None))
+ self.assertTrue(not UsesColor(None, None, None))
+ self.assertTrue(UsesColor('linux', None, None))
+ self.assertTrue(UsesColor('cygwin', None, None))
+ self.assertTrue(UsesColor('xterm', None, None))
+ self.assertTrue(UsesColor('xterm-color', None, None))
+ self.assertTrue(UsesColor('xterm-kitty', None, None))
+ self.assertTrue(UsesColor('xterm-256color', None, None))
def testFlagOnly(self):
"""Tests the case when there's --gtest_color but not GTEST_COLOR."""
- self.assert_(not UsesColor('dumb', None, 'no'))
- self.assert_(not UsesColor('xterm-color', None, 'no'))
+ self.assertTrue(not UsesColor('dumb', None, 'no'))
+ self.assertTrue(not UsesColor('xterm-color', None, 'no'))
if not IS_WINDOWS:
- self.assert_(not UsesColor('emacs', None, 'auto'))
- self.assert_(UsesColor('xterm', None, 'auto'))
- self.assert_(UsesColor('dumb', None, 'yes'))
- self.assert_(UsesColor('xterm', None, 'yes'))
+ self.assertTrue(not UsesColor('emacs', None, 'auto'))
+ self.assertTrue(UsesColor('xterm', None, 'auto'))
+ self.assertTrue(UsesColor('dumb', None, 'yes'))
+ self.assertTrue(UsesColor('xterm', None, 'yes'))
def testEnvVarOnly(self):
"""Tests the case when there's GTEST_COLOR but not --gtest_color."""
- self.assert_(not UsesColor('dumb', 'no', None))
- self.assert_(not UsesColor('xterm-color', 'no', None))
+ self.assertTrue(not UsesColor('dumb', 'no', None))
+ self.assertTrue(not UsesColor('xterm-color', 'no', None))
if not IS_WINDOWS:
- self.assert_(not UsesColor('dumb', 'auto', None))
- self.assert_(UsesColor('xterm-color', 'auto', None))
- self.assert_(UsesColor('dumb', 'yes', None))
- self.assert_(UsesColor('xterm-color', 'yes', None))
+ self.assertTrue(not UsesColor('dumb', 'auto', None))
+ self.assertTrue(UsesColor('xterm-color', 'auto', None))
+ self.assertTrue(UsesColor('dumb', 'yes', None))
+ self.assertTrue(UsesColor('xterm-color', 'yes', None))
def testEnvVarAndFlag(self):
"""Tests the case when there are both GTEST_COLOR and --gtest_color."""
- self.assert_(not UsesColor('xterm-color', 'no', 'no'))
- self.assert_(UsesColor('dumb', 'no', 'yes'))
- self.assert_(UsesColor('xterm-color', 'no', 'auto'))
+ self.assertTrue(not UsesColor('xterm-color', 'no', 'no'))
+ self.assertTrue(UsesColor('dumb', 'no', 'yes'))
+ self.assertTrue(UsesColor('xterm-color', 'no', 'auto'))
def testAliasesOfYesAndNo(self):
"""Tests using aliases in specifying --gtest_color."""
- self.assert_(UsesColor('dumb', None, 'true'))
- self.assert_(UsesColor('dumb', None, 'YES'))
- self.assert_(UsesColor('dumb', None, 'T'))
- self.assert_(UsesColor('dumb', None, '1'))
+ self.assertTrue(UsesColor('dumb', None, 'true'))
+ self.assertTrue(UsesColor('dumb', None, 'YES'))
+ self.assertTrue(UsesColor('dumb', None, 'T'))
+ self.assertTrue(UsesColor('dumb', None, '1'))
- self.assert_(not UsesColor('xterm', None, 'f'))
- self.assert_(not UsesColor('xterm', None, 'false'))
- self.assert_(not UsesColor('xterm', None, '0'))
- self.assert_(not UsesColor('xterm', None, 'unknown'))
+ self.assertTrue(not UsesColor('xterm', None, 'f'))
+ self.assertTrue(not UsesColor('xterm', None, 'false'))
+ self.assertTrue(not UsesColor('xterm', None, '0'))
+ self.assertTrue(not UsesColor('xterm', None, 'unknown'))
if __name__ == '__main__':
diff --git a/googletest/test/googletest-color-test_.cc b/googletest/test/googletest-color-test_.cc
index 220a3a00..55657b72 100644
--- a/googletest/test/googletest-color-test_.cc
+++ b/googletest/test/googletest-color-test_.cc
@@ -27,7 +27,6 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
// A helper program for testing how Google Test determines whether to use
// colors in the output. It prints "YES" and returns 1 if Google Test
// decides to use colors, and prints "NO" and returns 0 otherwise.
@@ -43,8 +42,7 @@ using testing::internal::ShouldUseColor;
// created before main() is entered, and thus that ShouldUseColor()
// works the same way as in a real Google-Test-based test. We don't actual
// run the TEST itself.
-TEST(GTestColorTest, Dummy) {
-}
+TEST(GTestColorTest, Dummy) {}
int main(int argc, char** argv) {
testing::InitGoogleTest(&argc, argv);
diff --git a/googletest/test/googletest-death-test-test.cc b/googletest/test/googletest-death-test-test.cc
index c0b3d1f2..4cc81b72 100644
--- a/googletest/test/googletest-death-test-test.cc
+++ b/googletest/test/googletest-death-test-test.cc
@@ -31,34 +31,36 @@
// Tests for death tests.
#include "gtest/gtest-death-test.h"
-
#include "gtest/gtest.h"
#include "gtest/internal/gtest-filepath.h"
using testing::internal::AlwaysFalse;
using testing::internal::AlwaysTrue;
-#if GTEST_HAS_DEATH_TEST
+#ifdef GTEST_HAS_DEATH_TEST
-# if GTEST_OS_WINDOWS
-# include <fcntl.h> // For O_BINARY
-# include <direct.h> // For chdir().
-# include <io.h>
-# else
-# include <unistd.h>
-# include <sys/wait.h> // For waitpid.
-# endif // GTEST_OS_WINDOWS
+#ifdef GTEST_OS_WINDOWS
+#include <direct.h> // For chdir().
+#include <fcntl.h> // For O_BINARY
+#include <io.h>
+#else
+#include <sys/wait.h> // For waitpid.
+#include <unistd.h>
+#endif // GTEST_OS_WINDOWS
-# include <limits.h>
-# include <signal.h>
-# include <stdio.h>
+#include <limits.h>
+#include <signal.h>
+#include <stdio.h>
-# if GTEST_OS_LINUX
-# include <sys/time.h>
-# endif // GTEST_OS_LINUX
+#include <string>
+#include <vector>
-# include "gtest/gtest-spi.h"
-# include "src/gtest-internal-inl.h"
+#ifdef GTEST_OS_LINUX
+#include <sys/time.h>
+#endif // GTEST_OS_LINUX
+
+#include "gtest/gtest-spi.h"
+#include "src/gtest-internal-inl.h"
namespace posix = ::testing::internal::posix;
@@ -90,6 +92,7 @@ class ReplaceDeathTestFactory {
unit_test_impl_->death_test_factory_.release();
unit_test_impl_->death_test_factory_.reset(old_factory_);
}
+
private:
// Prevents copying ReplaceDeathTestFactory objects.
ReplaceDeathTestFactory(const ReplaceDeathTestFactory&);
@@ -116,8 +119,7 @@ void DieWithMessage(const ::std::string& message) {
// Some compilers can recognize that _exit() never returns and issue the
// 'unreachable code' warning for code following this function, unless
// fooled by a fake condition.
- if (AlwaysTrue())
- _exit(1);
+ if (AlwaysTrue()) _exit(1);
}
void DieInside(const ::std::string& function) {
@@ -137,8 +139,7 @@ class TestForDeathTest : public testing::Test {
// A method of the test fixture that may die.
void MemberFunction() {
- if (should_die_)
- DieInside("MemberFunction");
+ if (should_die_) DieInside("MemberFunction");
}
// True if and only if MemberFunction() should die.
@@ -153,8 +154,7 @@ class MayDie {
// A member function that may die.
void MemberFunction() const {
- if (should_die_)
- DieInside("MayDie::MemberFunction");
+ if (should_die_) DieInside("MayDie::MemberFunction");
}
private:
@@ -173,8 +173,7 @@ int NonVoidFunction() {
// A unary function that may die.
void DieIf(bool should_die) {
- if (should_die)
- DieInside("DieIf");
+ if (should_die) DieInside("DieIf");
}
// A binary function that may die.
@@ -195,16 +194,16 @@ void DeathTestSubroutine() {
int DieInDebugElse12(int* sideeffect) {
if (sideeffect) *sideeffect = 12;
-# ifndef NDEBUG
+#ifndef NDEBUG
DieInside("DieInDebugElse12");
-# endif // NDEBUG
+#endif // NDEBUG
return 12;
}
-# if GTEST_OS_WINDOWS
+#ifdef GTEST_OS_WINDOWS
// Death in dbg due to Windows CRT assertion failure, not opt.
int DieInCRTDebugElse12(int* sideeffect) {
@@ -224,7 +223,7 @@ int DieInCRTDebugElse12(int* sideeffect) {
#endif // GTEST_OS_WINDOWS
-# if GTEST_OS_WINDOWS || GTEST_OS_FUCHSIA
+#if defined(GTEST_OS_WINDOWS) || defined(GTEST_OS_FUCHSIA)
// Tests the ExitedWithCode predicate.
TEST(ExitStatusPredicateTest, ExitedWithCode) {
@@ -237,7 +236,7 @@ TEST(ExitStatusPredicateTest, ExitedWithCode) {
EXPECT_FALSE(testing::ExitedWithCode(1)(0));
}
-# else
+#else
// Returns the exit status of a process that calls _exit(2) with a
// given exit code. This is a helper function for the
@@ -270,14 +269,14 @@ static int KilledExitStatus(int signum) {
// Tests the ExitedWithCode predicate.
TEST(ExitStatusPredicateTest, ExitedWithCode) {
- const int status0 = NormalExitStatus(0);
- const int status1 = NormalExitStatus(1);
+ const int status0 = NormalExitStatus(0);
+ const int status1 = NormalExitStatus(1);
const int status42 = NormalExitStatus(42);
const testing::ExitedWithCode pred0(0);
const testing::ExitedWithCode pred1(1);
const testing::ExitedWithCode pred42(42);
- EXPECT_PRED1(pred0, status0);
- EXPECT_PRED1(pred1, status1);
+ EXPECT_PRED1(pred0, status0);
+ EXPECT_PRED1(pred1, status1);
EXPECT_PRED1(pred42, status42);
EXPECT_FALSE(pred0(status1));
EXPECT_FALSE(pred42(status0));
@@ -296,7 +295,7 @@ TEST(ExitStatusPredicateTest, KilledBySignal) {
EXPECT_FALSE(pred_kill(status_segv));
}
-# endif // GTEST_OS_WINDOWS || GTEST_OS_FUCHSIA
+#endif // GTEST_OS_WINDOWS || GTEST_OS_FUCHSIA
// The following code intentionally tests a suboptimal syntax.
#ifdef __GNUC__
@@ -320,8 +319,7 @@ TEST_F(TestForDeathTest, SingleStatement) {
// doesn't expand into an "if" statement without an "else"
;
- if (AlwaysFalse())
- ASSERT_DEATH(return, "") << "did not die";
+ if (AlwaysFalse()) ASSERT_DEATH(return, "") << "did not die";
if (AlwaysFalse())
;
@@ -332,23 +330,6 @@ TEST_F(TestForDeathTest, SingleStatement) {
#pragma GCC diagnostic pop
#endif
-# if GTEST_USES_PCRE
-
-void DieWithEmbeddedNul() {
- fprintf(stderr, "Hello%cmy null world.\n", '\0');
- fflush(stderr);
- _exit(1);
-}
-
-// Tests that EXPECT_DEATH and ASSERT_DEATH work when the error
-// message has a NUL character in it.
-TEST_F(TestForDeathTest, EmbeddedNulInMessage) {
- EXPECT_DEATH(DieWithEmbeddedNul(), "my null world");
- ASSERT_DEATH(DieWithEmbeddedNul(), "my null world");
-}
-
-# endif // GTEST_USES_PCRE
-
// Tests that death test macros expand to code which interacts well with switch
// statements.
TEST_F(TestForDeathTest, SwitchStatement) {
@@ -357,12 +338,12 @@ TEST_F(TestForDeathTest, SwitchStatement) {
GTEST_DISABLE_MSC_WARNINGS_PUSH_(4065)
switch (0)
- default:
- ASSERT_DEATH(_exit(1), "") << "exit in default switch handler";
+ default:
+ ASSERT_DEATH(_exit(1), "") << "exit in default switch handler";
switch (0)
- case 0:
- EXPECT_DEATH(_exit(1), "") << "exit in switch case";
+ case 0:
+ EXPECT_DEATH(_exit(1), "") << "exit in switch case";
GTEST_DISABLE_MSC_WARNINGS_POP_()
}
@@ -370,14 +351,14 @@ TEST_F(TestForDeathTest, SwitchStatement) {
// Tests that a static member function can be used in a "fast" style
// death test.
TEST_F(TestForDeathTest, StaticMemberFunctionFastStyle) {
- testing::GTEST_FLAG(death_test_style) = "fast";
+ GTEST_FLAG_SET(death_test_style, "fast");
ASSERT_DEATH(StaticMemberFunction(), "death.*StaticMember");
}
// Tests that a method of the test fixture can be used in a "fast"
// style death test.
TEST_F(TestForDeathTest, MemberFunctionFastStyle) {
- testing::GTEST_FLAG(death_test_style) = "fast";
+ GTEST_FLAG_SET(death_test_style, "fast");
should_die_ = true;
EXPECT_DEATH(MemberFunction(), "inside.*MemberFunction");
}
@@ -387,7 +368,7 @@ void ChangeToRootDir() { posix::ChDir(GTEST_PATH_SEP_); }
// Tests that death tests work even if the current directory has been
// changed.
TEST_F(TestForDeathTest, FastDeathTestInChangedDir) {
- testing::GTEST_FLAG(death_test_style) = "fast";
+ GTEST_FLAG_SET(death_test_style, "fast");
ChangeToRootDir();
EXPECT_EXIT(_exit(1), testing::ExitedWithCode(1), "");
@@ -396,8 +377,9 @@ TEST_F(TestForDeathTest, FastDeathTestInChangedDir) {
ASSERT_DEATH(_exit(1), "");
}
-# if GTEST_OS_LINUX
-void SigprofAction(int, siginfo_t*, void*) { /* no op */ }
+#ifdef GTEST_OS_LINUX
+void SigprofAction(int, siginfo_t*, void*) { /* no op */
+}
// Sets SIGPROF action and ITIMER_PROF timer (interval: 1ms).
void SetSigprofActionAndTimer() {
@@ -432,7 +414,7 @@ void DisableSigprofActionAndTimer(struct sigaction* old_signal_action) {
// Tests that death tests work when SIGPROF handler and timer are set.
TEST_F(TestForDeathTest, FastSigprofActionSet) {
- testing::GTEST_FLAG(death_test_style) = "fast";
+ GTEST_FLAG_SET(death_test_style, "fast");
SetSigprofActionAndTimer();
EXPECT_DEATH(_exit(1), "");
struct sigaction old_signal_action;
@@ -441,37 +423,37 @@ TEST_F(TestForDeathTest, FastSigprofActionSet) {
}
TEST_F(TestForDeathTest, ThreadSafeSigprofActionSet) {
- testing::GTEST_FLAG(death_test_style) = "threadsafe";
+ GTEST_FLAG_SET(death_test_style, "threadsafe");
SetSigprofActionAndTimer();
EXPECT_DEATH(_exit(1), "");
struct sigaction old_signal_action;
DisableSigprofActionAndTimer(&old_signal_action);
EXPECT_TRUE(old_signal_action.sa_sigaction == SigprofAction);
}
-# endif // GTEST_OS_LINUX
+#endif // GTEST_OS_LINUX
// Repeats a representative sample of death tests in the "threadsafe" style:
TEST_F(TestForDeathTest, StaticMemberFunctionThreadsafeStyle) {
- testing::GTEST_FLAG(death_test_style) = "threadsafe";
+ GTEST_FLAG_SET(death_test_style, "threadsafe");
ASSERT_DEATH(StaticMemberFunction(), "death.*StaticMember");
}
TEST_F(TestForDeathTest, MemberFunctionThreadsafeStyle) {
- testing::GTEST_FLAG(death_test_style) = "threadsafe";
+ GTEST_FLAG_SET(death_test_style, "threadsafe");
should_die_ = true;
EXPECT_DEATH(MemberFunction(), "inside.*MemberFunction");
}
TEST_F(TestForDeathTest, ThreadsafeDeathTestInLoop) {
- testing::GTEST_FLAG(death_test_style) = "threadsafe";
+ GTEST_FLAG_SET(death_test_style, "threadsafe");
for (int i = 0; i < 3; ++i)
EXPECT_EXIT(_exit(i), testing::ExitedWithCode(i), "") << ": i = " << i;
}
TEST_F(TestForDeathTest, ThreadsafeDeathTestInChangedDir) {
- testing::GTEST_FLAG(death_test_style) = "threadsafe";
+ GTEST_FLAG_SET(death_test_style, "threadsafe");
ChangeToRootDir();
EXPECT_EXIT(_exit(1), testing::ExitedWithCode(1), "");
@@ -481,23 +463,21 @@ TEST_F(TestForDeathTest, ThreadsafeDeathTestInChangedDir) {
}
TEST_F(TestForDeathTest, MixedStyles) {
- testing::GTEST_FLAG(death_test_style) = "threadsafe";
+ GTEST_FLAG_SET(death_test_style, "threadsafe");
EXPECT_DEATH(_exit(1), "");
- testing::GTEST_FLAG(death_test_style) = "fast";
+ GTEST_FLAG_SET(death_test_style, "fast");
EXPECT_DEATH(_exit(1), "");
}
-# if GTEST_HAS_CLONE && GTEST_HAS_PTHREAD
+#if GTEST_HAS_CLONE && GTEST_HAS_PTHREAD
bool pthread_flag;
-void SetPthreadFlag() {
- pthread_flag = true;
-}
+void SetPthreadFlag() { pthread_flag = true; }
TEST_F(TestForDeathTest, DoesNotExecuteAtforkHooks) {
- if (!testing::GTEST_FLAG(death_test_use_fork)) {
- testing::GTEST_FLAG(death_test_style) = "threadsafe";
+ if (!GTEST_FLAG_GET(death_test_use_fork)) {
+ GTEST_FLAG_SET(death_test_style, "threadsafe");
pthread_flag = false;
ASSERT_EQ(0, pthread_atfork(&SetPthreadFlag, nullptr, nullptr));
ASSERT_DEATH(_exit(1), "");
@@ -505,7 +485,7 @@ TEST_F(TestForDeathTest, DoesNotExecuteAtforkHooks) {
}
}
-# endif // GTEST_HAS_CLONE && GTEST_HAS_PTHREAD
+#endif // GTEST_HAS_CLONE && GTEST_HAS_PTHREAD
// Tests that a method of another class can be used in a death test.
TEST_F(TestForDeathTest, MethodOfAnotherClass) {
@@ -527,16 +507,12 @@ TEST_F(TestForDeathTest, AcceptsAnythingConvertibleToRE) {
const testing::internal::RE regex(regex_c_str);
EXPECT_DEATH(GlobalFunction(), regex);
-# if !GTEST_USES_PCRE
-
const ::std::string regex_std_str(regex_c_str);
EXPECT_DEATH(GlobalFunction(), regex_std_str);
// This one is tricky; a temporary pointer into another temporary. Reference
// lifetime extension of the pointer is not sufficient.
EXPECT_DEATH(GlobalFunction(), ::std::string(regex_c_str).c_str());
-
-# endif // !GTEST_USES_PCRE
}
// Tests that a non-void function can be used in a death test.
@@ -551,9 +527,7 @@ TEST_F(TestForDeathTest, FunctionWithParameter) {
}
// Tests that ASSERT_DEATH can be used outside a TEST, TEST_F, or test fixture.
-TEST_F(TestForDeathTest, OutsideFixture) {
- DeathTestSubroutine();
-}
+TEST_F(TestForDeathTest, OutsideFixture) { DeathTestSubroutine(); }
// Tests that death tests can be done inside a loop.
TEST_F(TestForDeathTest, InsideLoop) {
@@ -564,25 +538,28 @@ TEST_F(TestForDeathTest, InsideLoop) {
// Tests that a compound statement can be used in a death test.
TEST_F(TestForDeathTest, CompoundStatement) {
- EXPECT_DEATH({ // NOLINT
- const int x = 2;
- const int y = x + 1;
- DieIfLessThan(x, y);
- },
- "DieIfLessThan");
+ EXPECT_DEATH(
+ { // NOLINT
+ const int x = 2;
+ const int y = x + 1;
+ DieIfLessThan(x, y);
+ },
+ "DieIfLessThan");
}
// Tests that code that doesn't die causes a death test to fail.
TEST_F(TestForDeathTest, DoesNotDie) {
- EXPECT_NONFATAL_FAILURE(EXPECT_DEATH(DieIf(false), "DieIf"),
- "failed to die");
+ EXPECT_NONFATAL_FAILURE(EXPECT_DEATH(DieIf(false), "DieIf"), "failed to die");
}
// Tests that a death test fails when the error message isn't expected.
TEST_F(TestForDeathTest, ErrorMessageMismatch) {
- EXPECT_NONFATAL_FAILURE({ // NOLINT
- EXPECT_DEATH(DieIf(true), "DieIfLessThan") << "End of death test message.";
- }, "died but not with expected error");
+ EXPECT_NONFATAL_FAILURE(
+ { // NOLINT
+ EXPECT_DEATH(DieIf(true), "DieIfLessThan")
+ << "End of death test message.";
+ },
+ "died but not with expected error");
}
// On exit, *aborted will be true if and only if the EXPECT_DEATH()
@@ -596,19 +573,20 @@ void ExpectDeathTestHelper(bool* aborted) {
// Tests that EXPECT_DEATH doesn't abort the test on failure.
TEST_F(TestForDeathTest, EXPECT_DEATH) {
bool aborted = true;
- EXPECT_NONFATAL_FAILURE(ExpectDeathTestHelper(&aborted),
- "failed to die");
+ EXPECT_NONFATAL_FAILURE(ExpectDeathTestHelper(&aborted), "failed to die");
EXPECT_FALSE(aborted);
}
// Tests that ASSERT_DEATH does abort the test on failure.
TEST_F(TestForDeathTest, ASSERT_DEATH) {
static bool aborted;
- EXPECT_FATAL_FAILURE({ // NOLINT
- aborted = true;
- ASSERT_DEATH(DieIf(false), "DieIf"); // This assertion should fail.
- aborted = false;
- }, "failed to die");
+ EXPECT_FATAL_FAILURE(
+ { // NOLINT
+ aborted = true;
+ ASSERT_DEATH(DieIf(false), "DieIf"); // This assertion should fail.
+ aborted = false;
+ },
+ "failed to die");
EXPECT_TRUE(aborted);
}
@@ -653,52 +631,36 @@ TEST_F(TestForDeathTest, TestExpectDebugDeath) {
EXPECT_DEBUG_DEATH(DieInDebugElse12(&sideeffect), regex)
<< "Must accept a streamed message";
-# ifdef NDEBUG
+#ifdef NDEBUG
// Checks that the assignment occurs in opt mode (sideeffect).
EXPECT_EQ(12, sideeffect);
-# else
+#else
// Checks that the assignment does not occur in dbg mode (no sideeffect).
EXPECT_EQ(0, sideeffect);
-# endif
+#endif
}
-# if GTEST_OS_WINDOWS
+#ifdef GTEST_OS_WINDOWS
-// Tests that EXPECT_DEBUG_DEATH works as expected when in debug mode
-// the Windows CRT crashes the process with an assertion failure.
+// https://docs.microsoft.com/en-us/cpp/c-runtime-library/reference/crtsetreportmode
+// In debug mode, the calls to _CrtSetReportMode and _CrtSetReportFile enable
+// the dumping of assertions to stderr. Tests that EXPECT_DEATH works as
+// expected when in CRT debug mode (compiled with /MTd or /MDd, which defines
+// _DEBUG) the Windows CRT crashes the process with an assertion failure.
// 1. Asserts on death.
// 2. Has no side effect (doesn't pop up a window or wait for user input).
-//
-// And in opt mode, it:
-// 1. Has side effects but does not assert.
+#ifdef _DEBUG
TEST_F(TestForDeathTest, CRTDebugDeath) {
- int sideeffect = 0;
-
- // Put the regex in a local variable to make sure we don't get an "unused"
- // warning in opt mode.
- const char* regex = "dup.* : Assertion failed";
-
- EXPECT_DEBUG_DEATH(DieInCRTDebugElse12(&sideeffect), regex)
+ EXPECT_DEATH(DieInCRTDebugElse12(nullptr), "dup.* : Assertion failed")
<< "Must accept a streamed message";
-
-# ifdef NDEBUG
-
- // Checks that the assignment occurs in opt mode (sideeffect).
- EXPECT_EQ(12, sideeffect);
-
-# else
-
- // Checks that the assignment does not occur in dbg mode (no sideeffect).
- EXPECT_EQ(0, sideeffect);
-
-# endif
}
+#endif // _DEBUG
-# endif // GTEST_OS_WINDOWS
+#endif // GTEST_OS_WINDOWS
// Tests that ASSERT_DEBUG_DEATH works as expected, that is, you can stream a
// message to it, and in debug mode it:
@@ -713,20 +675,20 @@ TEST_F(TestForDeathTest, TestAssertDebugDeath) {
ASSERT_DEBUG_DEATH(DieInDebugElse12(&sideeffect), "death.*DieInDebugElse12")
<< "Must accept a streamed message";
-# ifdef NDEBUG
+#ifdef NDEBUG
// Checks that the assignment occurs in opt mode (sideeffect).
EXPECT_EQ(12, sideeffect);
-# else
+#else
// Checks that the assignment does not occur in dbg mode (no sideeffect).
EXPECT_EQ(0, sideeffect);
-# endif
+#endif
}
-# ifndef NDEBUG
+#ifndef NDEBUG
void ExpectDebugDeathHelper(bool* aborted) {
*aborted = true;
@@ -734,18 +696,21 @@ void ExpectDebugDeathHelper(bool* aborted) {
*aborted = false;
}
-# if GTEST_OS_WINDOWS
+#ifdef GTEST_OS_WINDOWS
TEST(PopUpDeathTest, DoesNotShowPopUpOnAbort) {
- printf("This test should be considered failing if it shows "
- "any pop-up dialogs.\n");
+ printf(
+ "This test should be considered failing if it shows "
+ "any pop-up dialogs.\n");
fflush(stdout);
- EXPECT_DEATH({
- testing::GTEST_FLAG(catch_exceptions) = false;
- abort();
- }, "");
+ EXPECT_DEATH(
+ {
+ GTEST_FLAG_SET(catch_exceptions, false);
+ abort();
+ },
+ "");
}
-# endif // GTEST_OS_WINDOWS
+#endif // GTEST_OS_WINDOWS
// Tests that EXPECT_DEBUG_DEATH in debug mode does not abort
// the function.
@@ -836,83 +801,86 @@ TEST_F(TestForDeathTest, AssertDebugDeathAborts10) {
EXPECT_TRUE(aborted);
}
-# endif // _NDEBUG
+#endif // _NDEBUG
// Tests the *_EXIT family of macros, using a variety of predicates.
static void TestExitMacros() {
- EXPECT_EXIT(_exit(1), testing::ExitedWithCode(1), "");
+ EXPECT_EXIT(_exit(1), testing::ExitedWithCode(1), "");
ASSERT_EXIT(_exit(42), testing::ExitedWithCode(42), "");
-# if GTEST_OS_WINDOWS
+#ifdef GTEST_OS_WINDOWS
// Of all signals effects on the process exit code, only those of SIGABRT
// are documented on Windows.
// See https://msdn.microsoft.com/en-us/query-bi/m/dwwzkt4c.
EXPECT_EXIT(raise(SIGABRT), testing::ExitedWithCode(3), "") << "b_ar";
-# elif !GTEST_OS_FUCHSIA
+#elif !defined(GTEST_OS_FUCHSIA)
// Fuchsia has no unix signals.
EXPECT_EXIT(raise(SIGKILL), testing::KilledBySignal(SIGKILL), "") << "foo";
ASSERT_EXIT(raise(SIGUSR2), testing::KilledBySignal(SIGUSR2), "") << "bar";
- EXPECT_FATAL_FAILURE({ // NOLINT
- ASSERT_EXIT(_exit(0), testing::KilledBySignal(SIGSEGV), "")
- << "This failure is expected, too.";
- }, "This failure is expected, too.");
+ EXPECT_FATAL_FAILURE(
+ { // NOLINT
+ ASSERT_EXIT(_exit(0), testing::KilledBySignal(SIGSEGV), "")
+ << "This failure is expected, too.";
+ },
+ "This failure is expected, too.");
-# endif // GTEST_OS_WINDOWS
+#endif // GTEST_OS_WINDOWS
- EXPECT_NONFATAL_FAILURE({ // NOLINT
- EXPECT_EXIT(raise(SIGSEGV), testing::ExitedWithCode(0), "")
- << "This failure is expected.";
- }, "This failure is expected.");
+ EXPECT_NONFATAL_FAILURE(
+ { // NOLINT
+ EXPECT_EXIT(raise(SIGSEGV), testing::ExitedWithCode(0), "")
+ << "This failure is expected.";
+ },
+ "This failure is expected.");
}
-TEST_F(TestForDeathTest, ExitMacros) {
- TestExitMacros();
-}
+TEST_F(TestForDeathTest, ExitMacros) { TestExitMacros(); }
TEST_F(TestForDeathTest, ExitMacrosUsingFork) {
- testing::GTEST_FLAG(death_test_use_fork) = true;
+ GTEST_FLAG_SET(death_test_use_fork, true);
TestExitMacros();
}
TEST_F(TestForDeathTest, InvalidStyle) {
- testing::GTEST_FLAG(death_test_style) = "rococo";
- EXPECT_NONFATAL_FAILURE({ // NOLINT
- EXPECT_DEATH(_exit(0), "") << "This failure is expected.";
- }, "This failure is expected.");
+ GTEST_FLAG_SET(death_test_style, "rococo");
+ EXPECT_NONFATAL_FAILURE(
+ { // NOLINT
+ EXPECT_DEATH(_exit(0), "") << "This failure is expected.";
+ },
+ "This failure is expected.");
}
TEST_F(TestForDeathTest, DeathTestFailedOutput) {
- testing::GTEST_FLAG(death_test_style) = "fast";
+ GTEST_FLAG_SET(death_test_style, "fast");
EXPECT_NONFATAL_FAILURE(
- EXPECT_DEATH(DieWithMessage("death\n"),
- "expected message"),
+ EXPECT_DEATH(DieWithMessage("death\n"), "expected message"),
"Actual msg:\n"
"[ DEATH ] death\n");
}
TEST_F(TestForDeathTest, DeathTestUnexpectedReturnOutput) {
- testing::GTEST_FLAG(death_test_style) = "fast";
- EXPECT_NONFATAL_FAILURE(
- EXPECT_DEATH({
- fprintf(stderr, "returning\n");
- fflush(stderr);
- return;
- }, ""),
- " Result: illegal return in test statement.\n"
- " Error msg:\n"
- "[ DEATH ] returning\n");
+ GTEST_FLAG_SET(death_test_style, "fast");
+ EXPECT_NONFATAL_FAILURE(EXPECT_DEATH(
+ {
+ fprintf(stderr, "returning\n");
+ fflush(stderr);
+ return;
+ },
+ ""),
+ " Result: illegal return in test statement.\n"
+ " Error msg:\n"
+ "[ DEATH ] returning\n");
}
TEST_F(TestForDeathTest, DeathTestBadExitCodeOutput) {
- testing::GTEST_FLAG(death_test_style) = "fast";
+ GTEST_FLAG_SET(death_test_style, "fast");
EXPECT_NONFATAL_FAILURE(
EXPECT_EXIT(DieWithMessage("exiting with rc 1\n"),
- testing::ExitedWithCode(3),
- "expected message"),
+ testing::ExitedWithCode(3), "expected message"),
" Result: died but not with expected exit code:\n"
" Exited with exit status 1\n"
"Actual msg:\n"
@@ -920,7 +888,7 @@ TEST_F(TestForDeathTest, DeathTestBadExitCodeOutput) {
}
TEST_F(TestForDeathTest, DeathTestMultiLineMatchFail) {
- testing::GTEST_FLAG(death_test_style) = "fast";
+ GTEST_FLAG_SET(death_test_style, "fast");
EXPECT_NONFATAL_FAILURE(
EXPECT_DEATH(DieWithMessage("line 1\nline 2\nline 3\n"),
"line 1\nxyz\nline 3\n"),
@@ -931,7 +899,7 @@ TEST_F(TestForDeathTest, DeathTestMultiLineMatchFail) {
}
TEST_F(TestForDeathTest, DeathTestMultiLineMatchPass) {
- testing::GTEST_FLAG(death_test_style) = "fast";
+ GTEST_FLAG_SET(death_test_style, "fast");
EXPECT_DEATH(DieWithMessage("line 1\nline 2\nline 3\n"),
"line 1\nline 2\nline 3\n");
}
@@ -945,8 +913,8 @@ class MockDeathTestFactory : public DeathTestFactory {
int line, DeathTest** test) override;
// Sets the parameters for subsequent calls to Create.
- void SetParameters(bool create, DeathTest::TestRole role,
- int status, bool passed);
+ void SetParameters(bool create, DeathTest::TestRole role, int status,
+ bool passed);
// Accessors.
int AssumeRoleCalls() const { return assume_role_calls_; }
@@ -988,17 +956,15 @@ class MockDeathTestFactory : public DeathTestFactory {
bool test_deleted_;
};
-
// A DeathTest implementation useful in testing. It returns values set
// at its creation from its various inherited DeathTest methods, and
// reports calls to those methods to its parent MockDeathTestFactory
// object.
class MockDeathTest : public DeathTest {
public:
- MockDeathTest(MockDeathTestFactory *parent,
- TestRole role, int status, bool passed) :
- parent_(parent), role_(role), status_(status), passed_(passed) {
- }
+ MockDeathTest(MockDeathTestFactory* parent, TestRole role, int status,
+ bool passed)
+ : parent_(parent), role_(role), status_(status), passed_(passed) {}
~MockDeathTest() override { parent_->test_deleted_ = true; }
TestRole AssumeRole() override {
++parent_->assume_role_calls_;
@@ -1023,7 +989,6 @@ class MockDeathTest : public DeathTest {
const bool passed_;
};
-
// MockDeathTestFactory constructor.
MockDeathTestFactory::MockDeathTestFactory()
: create_(true),
@@ -1033,13 +998,10 @@ MockDeathTestFactory::MockDeathTestFactory()
assume_role_calls_(0),
wait_calls_(0),
passed_args_(),
- abort_args_() {
-}
-
+ abort_args_() {}
// Sets the parameters for subsequent calls to Create.
-void MockDeathTestFactory::SetParameters(bool create,
- DeathTest::TestRole role,
+void MockDeathTestFactory::SetParameters(bool create, DeathTest::TestRole role,
int status, bool passed) {
create_ = create;
role_ = role;
@@ -1052,7 +1014,6 @@ void MockDeathTestFactory::SetParameters(bool create,
abort_args_.clear();
}
-
// Sets test to NULL (if create_ is false) or to the address of a new
// MockDeathTest object with parameters taken from the last call
// to SetParameters (if create_ is true). Always returns true.
@@ -1092,10 +1053,12 @@ class MacroLogicDeathTest : public testing::Test {
// test cannot be run directly from a test routine that uses a
// MockDeathTest, or the remainder of the routine will not be executed.
static void RunReturningDeathTest(bool* flag) {
- ASSERT_DEATH({ // NOLINT
- *flag = true;
- return;
- }, "");
+ ASSERT_DEATH(
+ { // NOLINT
+ *flag = true;
+ return;
+ },
+ "");
}
};
@@ -1180,8 +1143,7 @@ TEST_F(MacroLogicDeathTest, ChildDoesNotDie) {
// _exit(2) is called in that case by ForkingDeathTest, but not by
// our MockDeathTest.
ASSERT_EQ(2U, factory_->AbortCalls());
- EXPECT_EQ(DeathTest::TEST_DID_NOT_DIE,
- factory_->AbortArgument(0));
+ EXPECT_EQ(DeathTest::TEST_DID_NOT_DIE, factory_->AbortArgument(0));
EXPECT_EQ(DeathTest::TEST_ENCOUNTERED_RETURN_STATEMENT,
factory_->AbortArgument(1));
EXPECT_TRUE(factory_->TestDeleted());
@@ -1197,12 +1159,16 @@ TEST(SuccessRegistrationDeathTest, NoSuccessPart) {
TEST(StreamingAssertionsDeathTest, DeathTest) {
EXPECT_DEATH(_exit(1), "") << "unexpected failure";
ASSERT_DEATH(_exit(1), "") << "unexpected failure";
- EXPECT_NONFATAL_FAILURE({ // NOLINT
- EXPECT_DEATH(_exit(0), "") << "expected failure";
- }, "expected failure");
- EXPECT_FATAL_FAILURE({ // NOLINT
- ASSERT_DEATH(_exit(0), "") << "expected failure";
- }, "expected failure");
+ EXPECT_NONFATAL_FAILURE(
+ { // NOLINT
+ EXPECT_DEATH(_exit(0), "") << "expected failure";
+ },
+ "expected failure");
+ EXPECT_FATAL_FAILURE(
+ { // NOLINT
+ ASSERT_DEATH(_exit(0), "") << "expected failure";
+ },
+ "expected failure");
}
// Tests that GetLastErrnoDescription returns an empty string when the
@@ -1214,7 +1180,7 @@ TEST(GetLastErrnoDescription, GetLastErrnoDescriptionWorks) {
EXPECT_STREQ("", GetLastErrnoDescription().c_str());
}
-# if GTEST_OS_WINDOWS
+#ifdef GTEST_OS_WINDOWS
TEST(AutoHandleTest, AutoHandleWorks) {
HANDLE handle = ::CreateEvent(NULL, FALSE, FALSE, NULL);
ASSERT_NE(INVALID_HANDLE_VALUE, handle);
@@ -1239,15 +1205,15 @@ TEST(AutoHandleTest, AutoHandleWorks) {
testing::internal::AutoHandle auto_handle2;
EXPECT_EQ(INVALID_HANDLE_VALUE, auto_handle2.Get());
}
-# endif // GTEST_OS_WINDOWS
+#endif // GTEST_OS_WINDOWS
-# if GTEST_OS_WINDOWS
+#ifdef GTEST_OS_WINDOWS
typedef unsigned __int64 BiggestParsable;
typedef signed __int64 BiggestSignedParsable;
-# else
+#else
typedef unsigned long long BiggestParsable;
typedef signed long long BiggestSignedParsable;
-# endif // GTEST_OS_WINDOWS
+#endif // GTEST_OS_WINDOWS
// We cannot use std::numeric_limits<T>::max() as it clashes with the
// max() macro defined by <windows.h>.
@@ -1338,11 +1304,11 @@ TEST(ParseNaturalNumberTest, WorksForShorterIntegers) {
EXPECT_EQ(123, char_result);
}
-# if GTEST_OS_WINDOWS
+#ifdef GTEST_OS_WINDOWS
TEST(EnvironmentTest, HandleFitsIntoSizeT) {
ASSERT_TRUE(sizeof(HANDLE) <= sizeof(size_t));
}
-# endif // GTEST_OS_WINDOWS
+#endif // GTEST_OS_WINDOWS
// Tests that EXPECT_DEATH_IF_SUPPORTED/ASSERT_DEATH_IF_SUPPORTED trigger
// failures when death tests are available on the system.
@@ -1358,23 +1324,27 @@ TEST(ConditionalDeathMacrosDeathTest, ExpectsDeathWhenDeathTestsAvailable) {
}
TEST(InDeathTestChildDeathTest, ReportsDeathTestCorrectlyInFastStyle) {
- testing::GTEST_FLAG(death_test_style) = "fast";
+ GTEST_FLAG_SET(death_test_style, "fast");
EXPECT_FALSE(InDeathTestChild());
- EXPECT_DEATH({
- fprintf(stderr, InDeathTestChild() ? "Inside" : "Outside");
- fflush(stderr);
- _exit(1);
- }, "Inside");
+ EXPECT_DEATH(
+ {
+ fprintf(stderr, InDeathTestChild() ? "Inside" : "Outside");
+ fflush(stderr);
+ _exit(1);
+ },
+ "Inside");
}
TEST(InDeathTestChildDeathTest, ReportsDeathTestCorrectlyInThreadSafeStyle) {
- testing::GTEST_FLAG(death_test_style) = "threadsafe";
+ GTEST_FLAG_SET(death_test_style, "threadsafe");
EXPECT_FALSE(InDeathTestChild());
- EXPECT_DEATH({
- fprintf(stderr, InDeathTestChild() ? "Inside" : "Outside");
- fflush(stderr);
- _exit(1);
- }, "Inside");
+ EXPECT_DEATH(
+ {
+ fprintf(stderr, InDeathTestChild() ? "Inside" : "Outside");
+ fflush(stderr);
+ _exit(1);
+ },
+ "Inside");
}
void DieWithMessage(const char* message) {
@@ -1386,7 +1356,7 @@ void DieWithMessage(const char* message) {
TEST(MatcherDeathTest, DoesNotBreakBareRegexMatching) {
// googletest tests this, of course; here we ensure that including googlemock
// has not broken it.
-#if GTEST_USES_POSIX_RE
+#ifdef GTEST_USES_POSIX_RE
EXPECT_DEATH(DieWithMessage("O, I die, Horatio."), "I d[aeiou]e");
#else
EXPECT_DEATH(DieWithMessage("O, I die, Horatio."), "I di?e");
@@ -1502,8 +1472,7 @@ TEST(ConditionalDeathMacrosSyntaxDeathTest, SingleStatement) {
// doesn't expand into an "if" statement without an "else"
; // NOLINT
- if (AlwaysFalse())
- ASSERT_DEATH_IF_SUPPORTED(return, "") << "did not die";
+ if (AlwaysFalse()) ASSERT_DEATH_IF_SUPPORTED(return, "") << "did not die";
if (AlwaysFalse())
; // NOLINT
@@ -1522,21 +1491,18 @@ TEST(ConditionalDeathMacrosSyntaxDeathTest, SwitchStatement) {
GTEST_DISABLE_MSC_WARNINGS_PUSH_(4065)
switch (0)
- default:
- ASSERT_DEATH_IF_SUPPORTED(_exit(1), "")
- << "exit in default switch handler";
+ default:
+ ASSERT_DEATH_IF_SUPPORTED(_exit(1), "") << "exit in default switch handler";
switch (0)
- case 0:
- EXPECT_DEATH_IF_SUPPORTED(_exit(1), "") << "exit in switch case";
+ case 0:
+ EXPECT_DEATH_IF_SUPPORTED(_exit(1), "") << "exit in switch case";
GTEST_DISABLE_MSC_WARNINGS_POP_()
}
// Tests that a test case whose name ends with "DeathTest" works fine
// on Windows.
-TEST(NotADeathTest, Test) {
- SUCCEED();
-}
+TEST(NotADeathTest, Test) { SUCCEED(); }
} // namespace
diff --git a/googletest/test/googletest-death-test_ex_test.cc b/googletest/test/googletest-death-test_ex_test.cc
index 7219680d..34d55013 100644
--- a/googletest/test/googletest-death-test_ex_test.cc
+++ b/googletest/test/googletest-death-test_ex_test.cc
@@ -33,17 +33,17 @@
#include "gtest/gtest-death-test.h"
#include "gtest/gtest.h"
-#if GTEST_HAS_DEATH_TEST
+#ifdef GTEST_HAS_DEATH_TEST
-# if GTEST_HAS_SEH
-# include <windows.h> // For RaiseException().
-# endif
+#if GTEST_HAS_SEH
+#include <windows.h> // For RaiseException().
+#endif
-# include "gtest/gtest-spi.h"
+#include "gtest/gtest-spi.h"
-# if GTEST_HAS_EXCEPTIONS
+#if GTEST_HAS_EXCEPTIONS
-# include <exception> // For std::exception.
+#include <exception> // For std::exception.
// Tests that death tests report thrown exceptions as failures and that the
// exceptions do not escape death test macros.
@@ -53,7 +53,7 @@ TEST(CxxExceptionDeathTest, ExceptionIsFailure) {
} catch (...) { // NOLINT
FAIL() << "An exception escaped a death test macro invocation "
<< "with catch_exceptions "
- << (testing::GTEST_FLAG(catch_exceptions) ? "enabled" : "disabled");
+ << (GTEST_FLAG_GET(catch_exceptions) ? "enabled" : "disabled");
}
}
@@ -67,26 +67,25 @@ TEST(CxxExceptionDeathTest, PrintsMessageForStdExceptions) {
EXPECT_NONFATAL_FAILURE(EXPECT_DEATH(throw TestException(), ""),
"exceptional message");
// Verifies that the location is mentioned in the failure text.
- EXPECT_NONFATAL_FAILURE(EXPECT_DEATH(throw TestException(), ""),
- __FILE__);
+ EXPECT_NONFATAL_FAILURE(EXPECT_DEATH(throw TestException(), ""), __FILE__);
}
-# endif // GTEST_HAS_EXCEPTIONS
+#endif // GTEST_HAS_EXCEPTIONS
-# if GTEST_HAS_SEH
+#if GTEST_HAS_SEH
// Tests that enabling interception of SEH exceptions with the
// catch_exceptions flag does not interfere with SEH exceptions being
// treated as death by death tests.
TEST(SehExceptionDeasTest, CatchExceptionsDoesNotInterfere) {
EXPECT_DEATH(RaiseException(42, 0x0, 0, NULL), "")
<< "with catch_exceptions "
- << (testing::GTEST_FLAG(catch_exceptions) ? "enabled" : "disabled");
+ << (GTEST_FLAG_GET(catch_exceptions) ? "enabled" : "disabled");
}
-# endif
+#endif
#endif // GTEST_HAS_DEATH_TEST
int main(int argc, char** argv) {
testing::InitGoogleTest(&argc, argv);
- testing::GTEST_FLAG(catch_exceptions) = GTEST_ENABLE_CATCH_EXCEPTIONS_ != 0;
+ GTEST_FLAG_SET(catch_exceptions, GTEST_ENABLE_CATCH_EXCEPTIONS_ != 0);
return RUN_ALL_TESTS();
}
diff --git a/googletest/test/googletest-env-var-test.py b/googletest/test/googletest-env-var-test.py
index 02c3655c..24d8edbb 100755
--- a/googletest/test/googletest-env-var-test.py
+++ b/googletest/test/googletest-env-var-test.py
@@ -32,7 +32,7 @@
"""Verifies that Google Test correctly parses environment variables."""
import os
-import gtest_test_utils
+from googletest.test import gtest_test_utils
IS_WINDOWS = os.name == 'nt'
@@ -101,7 +101,6 @@ class GTestEnvVarTest(gtest_test_utils.TestCase):
TestFlag('death_test_use_fork', '1', '0')
TestFlag('stack_trace_depth', '0', '100')
-
def testXmlOutputFile(self):
"""Tests that $XML_OUTPUT_FILE affects the output flag."""
@@ -116,5 +115,6 @@ class GTestEnvVarTest(gtest_test_utils.TestCase):
SetEnvVar('XML_OUTPUT_FILE', 'tmp/bar.xml')
AssertEq('xml:tmp/foo.xml', GetFlag('output'))
+
if __name__ == '__main__':
gtest_test_utils.Main()
diff --git a/googletest/test/googletest-env-var-test_.cc b/googletest/test/googletest-env-var-test_.cc
index 52f95864..36533750 100644
--- a/googletest/test/googletest-env-var-test_.cc
+++ b/googletest/test/googletest-env-var-test_.cc
@@ -27,7 +27,6 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
// A helper program for testing that Google Test parses the environment
// variables correctly.
@@ -43,72 +42,71 @@ namespace testing {
// The purpose of this is to make the test more realistic by ensuring
// that the UnitTest singleton is created before main() is entered.
// We don't actual run the TEST itself.
-TEST(GTestEnvVarTest, Dummy) {
-}
+TEST(GTestEnvVarTest, Dummy) {}
void PrintFlag(const char* flag) {
if (strcmp(flag, "break_on_failure") == 0) {
- cout << GTEST_FLAG(break_on_failure);
+ cout << GTEST_FLAG_GET(break_on_failure);
return;
}
if (strcmp(flag, "catch_exceptions") == 0) {
- cout << GTEST_FLAG(catch_exceptions);
+ cout << GTEST_FLAG_GET(catch_exceptions);
return;
}
if (strcmp(flag, "color") == 0) {
- cout << GTEST_FLAG(color);
+ cout << GTEST_FLAG_GET(color);
return;
}
if (strcmp(flag, "death_test_style") == 0) {
- cout << GTEST_FLAG(death_test_style);
+ cout << GTEST_FLAG_GET(death_test_style);
return;
}
if (strcmp(flag, "death_test_use_fork") == 0) {
- cout << GTEST_FLAG(death_test_use_fork);
+ cout << GTEST_FLAG_GET(death_test_use_fork);
return;
}
if (strcmp(flag, "fail_fast") == 0) {
- cout << GTEST_FLAG(fail_fast);
+ cout << GTEST_FLAG_GET(fail_fast);
return;
}
if (strcmp(flag, "filter") == 0) {
- cout << GTEST_FLAG(filter);
+ cout << GTEST_FLAG_GET(filter);
return;
}
if (strcmp(flag, "output") == 0) {
- cout << GTEST_FLAG(output);
+ cout << GTEST_FLAG_GET(output);
return;
}
if (strcmp(flag, "brief") == 0) {
- cout << GTEST_FLAG(brief);
+ cout << GTEST_FLAG_GET(brief);
return;
}
if (strcmp(flag, "print_time") == 0) {
- cout << GTEST_FLAG(print_time);
+ cout << GTEST_FLAG_GET(print_time);
return;
}
if (strcmp(flag, "repeat") == 0) {
- cout << GTEST_FLAG(repeat);
+ cout << GTEST_FLAG_GET(repeat);
return;
}
if (strcmp(flag, "stack_trace_depth") == 0) {
- cout << GTEST_FLAG(stack_trace_depth);
+ cout << GTEST_FLAG_GET(stack_trace_depth);
return;
}
if (strcmp(flag, "throw_on_failure") == 0) {
- cout << GTEST_FLAG(throw_on_failure);
+ cout << GTEST_FLAG_GET(throw_on_failure);
return;
}
diff --git a/googletest/test/googletest-failfast-unittest.py b/googletest/test/googletest-failfast-unittest.py
index 3aeb2dff..cdbce0c5 100755
--- a/googletest/test/googletest-failfast-unittest.py
+++ b/googletest/test/googletest-failfast-unittest.py
@@ -41,7 +41,7 @@ line flags.
"""
import os
-import gtest_test_utils
+from googletest.test import gtest_test_utils
# Constants.
@@ -62,15 +62,18 @@ FILTER_FLAG = 'gtest_filter'
# Command to run the googletest-failfast-unittest_ program.
COMMAND = gtest_test_utils.GetTestExecutablePath(
- 'googletest-failfast-unittest_')
+ 'googletest-failfast-unittest_'
+)
# The command line flag to tell Google Test to output the list of tests it
# will run.
LIST_TESTS_FLAG = '--gtest_list_tests'
# Indicates whether Google Test supports death tests.
-SUPPORTS_DEATH_TESTS = 'HasDeathTest' in gtest_test_utils.Subprocess(
- [COMMAND, LIST_TESTS_FLAG]).output
+SUPPORTS_DEATH_TESTS = (
+ 'HasDeathTest'
+ in gtest_test_utils.Subprocess([COMMAND, LIST_TESTS_FLAG]).output
+)
# Utilities.
@@ -90,8 +93,9 @@ def RunAndReturnOutput(test_suite=None, fail_fast=None, run_disabled=False):
"""Runs the test program and returns its output."""
args = []
- xml_path = os.path.join(gtest_test_utils.GetTempDir(),
- '.GTestFailFastUnitTest.xml')
+ xml_path = os.path.join(
+ gtest_test_utils.GetTempDir(), '.GTestFailFastUnitTest.xml'
+ )
args += ['--gtest_output=xml:' + xml_path]
if fail_fast is not None:
if isinstance(fail_fast, str):
@@ -188,49 +192,63 @@ class GTestFailFastUnitTest(gtest_test_utils.TestCase):
txt, _ = RunAndReturnOutput(test_suite='HasSkipTest', fail_fast=True)
self.assertIn('1 FAILED TEST', txt)
self.assertIn('[ SKIPPED ] 3 tests', txt)
- for expected_count, callback in [(1, 'OnTestSuiteStart'),
- (5, 'OnTestStart'),
- (5, 'OnTestEnd'),
- (5, 'OnTestPartResult'),
- (1, 'OnTestSuiteEnd')]:
+ for expected_count, callback in [
+ (1, 'OnTestSuiteStart'),
+ (5, 'OnTestStart'),
+ (5, 'OnTestEnd'),
+ (5, 'OnTestPartResult'),
+ (1, 'OnTestSuiteEnd'),
+ ]:
self.assertEqual(
- expected_count, txt.count(callback),
- 'Expected %d calls to callback %s match count on output: %s ' %
- (expected_count, callback, txt))
+ expected_count,
+ txt.count(callback),
+ 'Expected %d calls to callback %s match count on output: %s '
+ % (expected_count, callback, txt),
+ )
txt, _ = RunAndReturnOutput(test_suite='HasSkipTest', fail_fast=False)
self.assertIn('3 FAILED TEST', txt)
self.assertIn('[ SKIPPED ] 1 test', txt)
- for expected_count, callback in [(1, 'OnTestSuiteStart'),
- (5, 'OnTestStart'),
- (5, 'OnTestEnd'),
- (5, 'OnTestPartResult'),
- (1, 'OnTestSuiteEnd')]:
+ for expected_count, callback in [
+ (1, 'OnTestSuiteStart'),
+ (5, 'OnTestStart'),
+ (5, 'OnTestEnd'),
+ (5, 'OnTestPartResult'),
+ (1, 'OnTestSuiteEnd'),
+ ]:
self.assertEqual(
- expected_count, txt.count(callback),
- 'Expected %d calls to callback %s match count on output: %s ' %
- (expected_count, callback, txt))
+ expected_count,
+ txt.count(callback),
+ 'Expected %d calls to callback %s match count on output: %s '
+ % (expected_count, callback, txt),
+ )
def assertXmlResultCount(self, result, count, xml):
self.assertEqual(
- count, xml.count('result="%s"' % result),
- 'Expected \'result="%s"\' match count of %s: %s ' %
- (result, count, xml))
+ count,
+ xml.count('result="%s"' % result),
+ 'Expected \'result="%s"\' match count of %s: %s '
+ % (result, count, xml),
+ )
def assertXmlStatusCount(self, status, count, xml):
self.assertEqual(
- count, xml.count('status="%s"' % status),
- 'Expected \'status="%s"\' match count of %s: %s ' %
- (status, count, xml))
-
- def assertFailFastXmlAndTxtOutput(self,
- fail_fast,
- test_suite,
- passed_count,
- failure_count,
- skipped_count,
- suppressed_count,
- run_disabled=False):
+ count,
+ xml.count('status="%s"' % status),
+ 'Expected \'status="%s"\' match count of %s: %s '
+ % (status, count, xml),
+ )
+
+ def assertFailFastXmlAndTxtOutput(
+ self,
+ fail_fast,
+ test_suite,
+ passed_count,
+ failure_count,
+ skipped_count,
+ suppressed_count,
+ run_disabled=False,
+ ):
"""Assert XML and text output of a test execution."""
txt, xml = RunAndReturnOutput(test_suite, fail_fast, run_disabled)
@@ -240,40 +258,57 @@ class GTestFailFastUnitTest(gtest_test_utils.TestCase):
self.assertIn('%s DISABLED TEST' % suppressed_count, txt)
if skipped_count > 0:
self.assertIn('[ SKIPPED ] %s tests' % skipped_count, txt)
- self.assertXmlStatusCount('run',
- passed_count + failure_count + skipped_count, xml)
+ self.assertXmlStatusCount(
+ 'run', passed_count + failure_count + skipped_count, xml
+ )
self.assertXmlStatusCount('notrun', suppressed_count, xml)
self.assertXmlResultCount('completed', passed_count + failure_count, xml)
self.assertXmlResultCount('skipped', skipped_count, xml)
self.assertXmlResultCount('suppressed', suppressed_count, xml)
- def assertFailFastBehavior(self,
- test_suite,
- passed_count,
- failure_count,
- skipped_count,
- suppressed_count,
- run_disabled=False):
+ def assertFailFastBehavior(
+ self,
+ test_suite,
+ passed_count,
+ failure_count,
+ skipped_count,
+ suppressed_count,
+ run_disabled=False,
+ ):
"""Assert --fail_fast via flag."""
for fail_fast in ('true', '1', 't', True):
- self.assertFailFastXmlAndTxtOutput(fail_fast, test_suite, passed_count,
- failure_count, skipped_count,
- suppressed_count, run_disabled)
-
- def assertNotFailFastBehavior(self,
- test_suite,
- passed_count,
- failure_count,
- skipped_count,
- suppressed_count,
- run_disabled=False):
+ self.assertFailFastXmlAndTxtOutput(
+ fail_fast,
+ test_suite,
+ passed_count,
+ failure_count,
+ skipped_count,
+ suppressed_count,
+ run_disabled,
+ )
+
+ def assertNotFailFastBehavior(
+ self,
+ test_suite,
+ passed_count,
+ failure_count,
+ skipped_count,
+ suppressed_count,
+ run_disabled=False,
+ ):
"""Assert --nofail_fast via flag."""
for fail_fast in ('false', '0', 'f', False):
- self.assertFailFastXmlAndTxtOutput(fail_fast, test_suite, passed_count,
- failure_count, skipped_count,
- suppressed_count, run_disabled)
+ self.assertFailFastXmlAndTxtOutput(
+ fail_fast,
+ test_suite,
+ passed_count,
+ failure_count,
+ skipped_count,
+ suppressed_count,
+ run_disabled,
+ )
def testFlag_HasFixtureTest(self):
"""Tests the behavior of fail_fast and TEST_F."""
@@ -282,13 +317,15 @@ class GTestFailFastUnitTest(gtest_test_utils.TestCase):
passed_count=1,
failure_count=1,
skipped_count=3,
- suppressed_count=0)
+ suppressed_count=0,
+ )
self.assertNotFailFastBehavior(
test_suite='HasFixtureTest',
passed_count=1,
failure_count=4,
skipped_count=0,
- suppressed_count=0)
+ suppressed_count=0,
+ )
def testFlag_HasSimpleTest(self):
"""Tests the behavior of fail_fast and TEST."""
@@ -297,13 +334,15 @@ class GTestFailFastUnitTest(gtest_test_utils.TestCase):
passed_count=1,
failure_count=1,
skipped_count=3,
- suppressed_count=0)
+ suppressed_count=0,
+ )
self.assertNotFailFastBehavior(
test_suite='HasSimpleTest',
passed_count=1,
failure_count=4,
skipped_count=0,
- suppressed_count=0)
+ suppressed_count=0,
+ )
def testFlag_HasParametersTest(self):
"""Tests the behavior of fail_fast and TEST_P."""
@@ -312,13 +351,15 @@ class GTestFailFastUnitTest(gtest_test_utils.TestCase):
passed_count=0,
failure_count=1,
skipped_count=3,
- suppressed_count=0)
+ suppressed_count=0,
+ )
self.assertNotFailFastBehavior(
test_suite='HasParametersSuite/HasParametersTest',
passed_count=0,
failure_count=4,
skipped_count=0,
- suppressed_count=0)
+ suppressed_count=0,
+ )
def testFlag_HasDisabledTest(self):
"""Tests the behavior of fail_fast and Disabled test cases."""
@@ -328,14 +369,16 @@ class GTestFailFastUnitTest(gtest_test_utils.TestCase):
failure_count=1,
skipped_count=2,
suppressed_count=1,
- run_disabled=False)
+ run_disabled=False,
+ )
self.assertNotFailFastBehavior(
test_suite='HasDisabledTest',
passed_count=1,
failure_count=3,
skipped_count=0,
suppressed_count=1,
- run_disabled=False)
+ run_disabled=False,
+ )
def testFlag_HasDisabledRunDisabledTest(self):
"""Tests the behavior of fail_fast and Disabled test cases enabled."""
@@ -345,14 +388,16 @@ class GTestFailFastUnitTest(gtest_test_utils.TestCase):
failure_count=1,
skipped_count=3,
suppressed_count=0,
- run_disabled=True)
+ run_disabled=True,
+ )
self.assertNotFailFastBehavior(
test_suite='HasDisabledTest',
passed_count=1,
failure_count=4,
skipped_count=0,
suppressed_count=0,
- run_disabled=True)
+ run_disabled=True,
+ )
def testFlag_HasDisabledSuiteTest(self):
"""Tests the behavior of fail_fast and Disabled test suites."""
@@ -362,14 +407,16 @@ class GTestFailFastUnitTest(gtest_test_utils.TestCase):
failure_count=0,
skipped_count=0,
suppressed_count=5,
- run_disabled=False)
+ run_disabled=False,
+ )
self.assertNotFailFastBehavior(
test_suite='DISABLED_HasDisabledSuite',
passed_count=0,
failure_count=0,
skipped_count=0,
suppressed_count=5,
- run_disabled=False)
+ run_disabled=False,
+ )
def testFlag_HasDisabledSuiteRunDisabledTest(self):
"""Tests the behavior of fail_fast and Disabled test suites enabled."""
@@ -379,14 +426,16 @@ class GTestFailFastUnitTest(gtest_test_utils.TestCase):
failure_count=1,
skipped_count=3,
suppressed_count=0,
- run_disabled=True)
+ run_disabled=True,
+ )
self.assertNotFailFastBehavior(
test_suite='DISABLED_HasDisabledSuite',
passed_count=1,
failure_count=4,
skipped_count=0,
suppressed_count=0,
- run_disabled=True)
+ run_disabled=True,
+ )
if SUPPORTS_DEATH_TESTS:
@@ -397,13 +446,15 @@ class GTestFailFastUnitTest(gtest_test_utils.TestCase):
passed_count=1,
failure_count=1,
skipped_count=3,
- suppressed_count=0)
+ suppressed_count=0,
+ )
self.assertNotFailFastBehavior(
test_suite='HasDeathTest',
passed_count=1,
failure_count=4,
skipped_count=0,
- suppressed_count=0)
+ suppressed_count=0,
+ )
if __name__ == '__main__':
diff --git a/googletest/test/googletest-failfast-unittest_.cc b/googletest/test/googletest-failfast-unittest_.cc
index 0b2c951b..3bd05a8e 100644
--- a/googletest/test/googletest-failfast-unittest_.cc
+++ b/googletest/test/googletest-failfast-unittest_.cc
@@ -27,7 +27,6 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
// Unit test for Google Test test filters.
//
// A user can specify which test(s) in a Google Test program to run via
@@ -160,7 +159,7 @@ TEST(HasSkipTest, Test4) { FAIL() << "Expected failure."; }
} // namespace
-int main(int argc, char **argv) {
+int main(int argc, char** argv) {
::testing::InitGoogleTest(&argc, argv);
::testing::UnitTest::GetInstance()->listeners().Append(new MyTestListener());
return RUN_ALL_TESTS();
diff --git a/googletest/test/googletest-filepath-test.cc b/googletest/test/googletest-filepath-test.cc
index aafad36f..3e9c79f0 100644
--- a/googletest/test/googletest-filepath-test.cc
+++ b/googletest/test/googletest-filepath-test.cc
@@ -35,36 +35,38 @@
// This file is #included from gtest-internal.h.
// Do not #include this file anywhere else!
-#include "gtest/internal/gtest-filepath.h"
+#include <string>
+
#include "gtest/gtest.h"
+#include "gtest/internal/gtest-filepath.h"
#include "src/gtest-internal-inl.h"
-#if GTEST_OS_WINDOWS_MOBILE
-# include <windows.h> // NOLINT
-#elif GTEST_OS_WINDOWS
-# include <direct.h> // NOLINT
-#endif // GTEST_OS_WINDOWS_MOBILE
+#ifdef GTEST_OS_WINDOWS_MOBILE
+#include <windows.h> // NOLINT
+#elif defined(GTEST_OS_WINDOWS)
+#include <direct.h> // NOLINT
+#endif // GTEST_OS_WINDOWS_MOBILE
namespace testing {
namespace internal {
namespace {
-#if GTEST_OS_WINDOWS_MOBILE
+#ifdef GTEST_OS_WINDOWS_MOBILE
// Windows CE doesn't have the remove C function.
int remove(const char* path) {
LPCWSTR wpath = String::AnsiToUtf16(path);
int ret = DeleteFile(wpath) ? 0 : -1;
- delete [] wpath;
+ delete[] wpath;
return ret;
}
// Windows CE doesn't have the _rmdir C function.
int _rmdir(const char* path) {
FilePath filepath(path);
- LPCWSTR wpath = String::AnsiToUtf16(
- filepath.RemoveTrailingPathSeparator().c_str());
+ LPCWSTR wpath =
+ String::AnsiToUtf16(filepath.RemoveTrailingPathSeparator().c_str());
int ret = RemoveDirectory(wpath) ? 0 : -1;
- delete [] wpath;
+ delete[] wpath;
return ret;
}
@@ -78,18 +80,18 @@ TEST(GetCurrentDirTest, ReturnsCurrentDir) {
const FilePath cwd = FilePath::GetCurrentDir();
posix::ChDir(original_dir.c_str());
-# if GTEST_OS_WINDOWS || GTEST_OS_OS2
+#if defined(GTEST_OS_WINDOWS) || defined(GTEST_OS_OS2)
// Skips the ":".
const char* const cwd_without_drive = strchr(cwd.c_str(), ':');
ASSERT_TRUE(cwd_without_drive != NULL);
EXPECT_STREQ(GTEST_PATH_SEP_, cwd_without_drive + 1);
-# else
+#else
EXPECT_EQ(GTEST_PATH_SEP_, cwd.string());
-# endif
+#endif
}
#endif // GTEST_OS_WINDOWS_MOBILE
@@ -112,33 +114,34 @@ TEST(RemoveDirectoryNameTest, WhenEmptyName) {
// RemoveDirectoryName "afile" -> "afile"
TEST(RemoveDirectoryNameTest, ButNoDirectory) {
- EXPECT_EQ("afile",
- FilePath("afile").RemoveDirectoryName().string());
+ EXPECT_EQ("afile", FilePath("afile").RemoveDirectoryName().string());
}
// RemoveDirectoryName "/afile" -> "afile"
TEST(RemoveDirectoryNameTest, RootFileShouldGiveFileName) {
EXPECT_EQ("afile",
- FilePath(GTEST_PATH_SEP_ "afile").RemoveDirectoryName().string());
+ FilePath(GTEST_PATH_SEP_ "afile").RemoveDirectoryName().string());
}
// RemoveDirectoryName "adir/" -> ""
TEST(RemoveDirectoryNameTest, WhereThereIsNoFileName) {
EXPECT_EQ("",
- FilePath("adir" GTEST_PATH_SEP_).RemoveDirectoryName().string());
+ FilePath("adir" GTEST_PATH_SEP_).RemoveDirectoryName().string());
}
// RemoveDirectoryName "adir/afile" -> "afile"
TEST(RemoveDirectoryNameTest, ShouldGiveFileName) {
- EXPECT_EQ("afile",
+ EXPECT_EQ(
+ "afile",
FilePath("adir" GTEST_PATH_SEP_ "afile").RemoveDirectoryName().string());
}
// RemoveDirectoryName "adir/subdir/afile" -> "afile"
TEST(RemoveDirectoryNameTest, ShouldAlsoGiveFileName) {
EXPECT_EQ("afile",
- FilePath("adir" GTEST_PATH_SEP_ "subdir" GTEST_PATH_SEP_ "afile")
- .RemoveDirectoryName().string());
+ FilePath("adir" GTEST_PATH_SEP_ "subdir" GTEST_PATH_SEP_ "afile")
+ .RemoveDirectoryName()
+ .string());
}
#if GTEST_HAS_ALT_PATH_SEP_
@@ -171,7 +174,7 @@ TEST(RemoveDirectoryNameTest, ShouldAlsoGiveFileNameForAlternateSeparator) {
// RemoveFileName "" -> "./"
TEST(RemoveFileNameTest, EmptyName) {
-#if GTEST_OS_WINDOWS_MOBILE
+#ifdef GTEST_OS_WINDOWS_MOBILE
// On Windows CE, we use the root as the current directory.
EXPECT_EQ(GTEST_PATH_SEP_, FilePath("").RemoveFileName().string());
#else
@@ -182,7 +185,7 @@ TEST(RemoveFileNameTest, EmptyName) {
// RemoveFileName "adir/" -> "adir/"
TEST(RemoveFileNameTest, ButNoFile) {
EXPECT_EQ("adir" GTEST_PATH_SEP_,
- FilePath("adir" GTEST_PATH_SEP_).RemoveFileName().string());
+ FilePath("adir" GTEST_PATH_SEP_).RemoveFileName().string());
}
// RemoveFileName "adir/afile" -> "adir/"
@@ -194,14 +197,15 @@ TEST(RemoveFileNameTest, GivesDirName) {
// RemoveFileName "adir/subdir/afile" -> "adir/subdir/"
TEST(RemoveFileNameTest, GivesDirAndSubDirName) {
EXPECT_EQ("adir" GTEST_PATH_SEP_ "subdir" GTEST_PATH_SEP_,
- FilePath("adir" GTEST_PATH_SEP_ "subdir" GTEST_PATH_SEP_ "afile")
- .RemoveFileName().string());
+ FilePath("adir" GTEST_PATH_SEP_ "subdir" GTEST_PATH_SEP_ "afile")
+ .RemoveFileName()
+ .string());
}
// RemoveFileName "/afile" -> "/"
TEST(RemoveFileNameTest, GivesRootDir) {
EXPECT_EQ(GTEST_PATH_SEP_,
- FilePath(GTEST_PATH_SEP_ "afile").RemoveFileName().string());
+ FilePath(GTEST_PATH_SEP_ "afile").RemoveFileName().string());
}
#if GTEST_HAS_ALT_PATH_SEP_
@@ -235,44 +239,43 @@ TEST(RemoveFileNameTest, GivesRootDirForAlternateSeparator) {
#endif
TEST(MakeFileNameTest, GenerateWhenNumberIsZero) {
- FilePath actual = FilePath::MakeFileName(FilePath("foo"), FilePath("bar"),
- 0, "xml");
+ FilePath actual =
+ FilePath::MakeFileName(FilePath("foo"), FilePath("bar"), 0, "xml");
EXPECT_EQ("foo" GTEST_PATH_SEP_ "bar.xml", actual.string());
}
TEST(MakeFileNameTest, GenerateFileNameNumberGtZero) {
- FilePath actual = FilePath::MakeFileName(FilePath("foo"), FilePath("bar"),
- 12, "xml");
+ FilePath actual =
+ FilePath::MakeFileName(FilePath("foo"), FilePath("bar"), 12, "xml");
EXPECT_EQ("foo" GTEST_PATH_SEP_ "bar_12.xml", actual.string());
}
TEST(MakeFileNameTest, GenerateFileNameWithSlashNumberIsZero) {
FilePath actual = FilePath::MakeFileName(FilePath("foo" GTEST_PATH_SEP_),
- FilePath("bar"), 0, "xml");
+ FilePath("bar"), 0, "xml");
EXPECT_EQ("foo" GTEST_PATH_SEP_ "bar.xml", actual.string());
}
TEST(MakeFileNameTest, GenerateFileNameWithSlashNumberGtZero) {
FilePath actual = FilePath::MakeFileName(FilePath("foo" GTEST_PATH_SEP_),
- FilePath("bar"), 12, "xml");
+ FilePath("bar"), 12, "xml");
EXPECT_EQ("foo" GTEST_PATH_SEP_ "bar_12.xml", actual.string());
}
TEST(MakeFileNameTest, GenerateWhenNumberIsZeroAndDirIsEmpty) {
- FilePath actual = FilePath::MakeFileName(FilePath(""), FilePath("bar"),
- 0, "xml");
+ FilePath actual =
+ FilePath::MakeFileName(FilePath(""), FilePath("bar"), 0, "xml");
EXPECT_EQ("bar.xml", actual.string());
}
TEST(MakeFileNameTest, GenerateWhenNumberIsNotZeroAndDirIsEmpty) {
- FilePath actual = FilePath::MakeFileName(FilePath(""), FilePath("bar"),
- 14, "xml");
+ FilePath actual =
+ FilePath::MakeFileName(FilePath(""), FilePath("bar"), 14, "xml");
EXPECT_EQ("bar_14.xml", actual.string());
}
TEST(ConcatPathsTest, WorksWhenDirDoesNotEndWithPathSep) {
- FilePath actual = FilePath::ConcatPaths(FilePath("foo"),
- FilePath("bar.xml"));
+ FilePath actual = FilePath::ConcatPaths(FilePath("foo"), FilePath("bar.xml"));
EXPECT_EQ("foo" GTEST_PATH_SEP_ "bar.xml", actual.string());
}
@@ -283,8 +286,7 @@ TEST(ConcatPathsTest, WorksWhenPath1EndsWithPathSep) {
}
TEST(ConcatPathsTest, Path1BeingEmpty) {
- FilePath actual = FilePath::ConcatPaths(FilePath(""),
- FilePath("bar.xml"));
+ FilePath actual = FilePath::ConcatPaths(FilePath(""), FilePath("bar.xml"));
EXPECT_EQ("bar.xml", actual.string());
}
@@ -294,8 +296,7 @@ TEST(ConcatPathsTest, Path2BeingEmpty) {
}
TEST(ConcatPathsTest, BothPathBeingEmpty) {
- FilePath actual = FilePath::ConcatPaths(FilePath(""),
- FilePath(""));
+ FilePath actual = FilePath::ConcatPaths(FilePath(""), FilePath(""));
EXPECT_EQ("", actual.string());
}
@@ -307,16 +308,16 @@ TEST(ConcatPathsTest, Path1ContainsPathSep) {
}
TEST(ConcatPathsTest, Path2ContainsPathSep) {
- FilePath actual = FilePath::ConcatPaths(
- FilePath("foo" GTEST_PATH_SEP_),
- FilePath("bar" GTEST_PATH_SEP_ "bar.xml"));
+ FilePath actual =
+ FilePath::ConcatPaths(FilePath("foo" GTEST_PATH_SEP_),
+ FilePath("bar" GTEST_PATH_SEP_ "bar.xml"));
EXPECT_EQ("foo" GTEST_PATH_SEP_ "bar" GTEST_PATH_SEP_ "bar.xml",
actual.string());
}
TEST(ConcatPathsTest, Path2EndsWithPathSep) {
- FilePath actual = FilePath::ConcatPaths(FilePath("foo"),
- FilePath("bar" GTEST_PATH_SEP_));
+ FilePath actual =
+ FilePath::ConcatPaths(FilePath("foo"), FilePath("bar" GTEST_PATH_SEP_));
EXPECT_EQ("foo" GTEST_PATH_SEP_ "bar" GTEST_PATH_SEP_, actual.string());
}
@@ -332,7 +333,8 @@ TEST(RemoveTrailingPathSeparatorTest, FileNoSlashString) {
// RemoveTrailingPathSeparator "foo/" -> "foo"
TEST(RemoveTrailingPathSeparatorTest, ShouldRemoveTrailingSeparator) {
- EXPECT_EQ("foo",
+ EXPECT_EQ(
+ "foo",
FilePath("foo" GTEST_PATH_SEP_).RemoveTrailingPathSeparator().string());
#if GTEST_HAS_ALT_PATH_SEP_
EXPECT_EQ("foo", FilePath("foo/").RemoveTrailingPathSeparator().string());
@@ -343,18 +345,19 @@ TEST(RemoveTrailingPathSeparatorTest, ShouldRemoveTrailingSeparator) {
TEST(RemoveTrailingPathSeparatorTest, ShouldRemoveLastSeparator) {
EXPECT_EQ("foo" GTEST_PATH_SEP_ "bar",
FilePath("foo" GTEST_PATH_SEP_ "bar" GTEST_PATH_SEP_)
- .RemoveTrailingPathSeparator().string());
+ .RemoveTrailingPathSeparator()
+ .string());
}
// RemoveTrailingPathSeparator "foo/bar" -> "foo/bar"
TEST(RemoveTrailingPathSeparatorTest, ShouldReturnUnmodified) {
- EXPECT_EQ("foo" GTEST_PATH_SEP_ "bar",
- FilePath("foo" GTEST_PATH_SEP_ "bar")
- .RemoveTrailingPathSeparator().string());
+ EXPECT_EQ("foo" GTEST_PATH_SEP_ "bar", FilePath("foo" GTEST_PATH_SEP_ "bar")
+ .RemoveTrailingPathSeparator()
+ .string());
}
TEST(DirectoryTest, RootDirectoryExists) {
-#if GTEST_OS_WINDOWS // We are on Windows.
+#ifdef GTEST_OS_WINDOWS // We are on Windows.
char current_drive[_MAX_PATH]; // NOLINT
current_drive[0] = static_cast<char>(_getdrive() + 'A' - 1);
current_drive[1] = ':';
@@ -366,7 +369,7 @@ TEST(DirectoryTest, RootDirectoryExists) {
#endif // GTEST_OS_WINDOWS
}
-#if GTEST_OS_WINDOWS
+#ifdef GTEST_OS_WINDOWS
TEST(DirectoryTest, RootOfWrongDriveDoesNotExists) {
const int saved_drive_ = _getdrive();
// Find a drive that doesn't exist. Start with 'Z' to avoid common ones.
@@ -384,7 +387,7 @@ TEST(DirectoryTest, RootOfWrongDriveDoesNotExists) {
}
#endif // GTEST_OS_WINDOWS
-#if !GTEST_OS_WINDOWS_MOBILE
+#ifndef GTEST_OS_WINDOWS_MOBILE
// Windows CE _does_ consider an empty directory to exist.
TEST(DirectoryTest, EmptyPathDirectoryDoesNotExist) {
EXPECT_FALSE(FilePath("").DirectoryExists());
@@ -392,13 +395,13 @@ TEST(DirectoryTest, EmptyPathDirectoryDoesNotExist) {
#endif // !GTEST_OS_WINDOWS_MOBILE
TEST(DirectoryTest, CurrentDirectoryExists) {
-#if GTEST_OS_WINDOWS // We are on Windows.
-# ifndef _WIN32_CE // Windows CE doesn't have a current directory.
+#ifdef GTEST_OS_WINDOWS // We are on Windows.
+#ifndef _WIN32_CE // Windows CE doesn't have a current directory.
EXPECT_TRUE(FilePath(".").DirectoryExists());
EXPECT_TRUE(FilePath(".\\").DirectoryExists());
-# endif // _WIN32_CE
+#endif // _WIN32_CE
#else
EXPECT_TRUE(FilePath(".").DirectoryExists());
EXPECT_TRUE(FilePath("./").DirectoryExists());
@@ -406,34 +409,40 @@ TEST(DirectoryTest, CurrentDirectoryExists) {
}
// "foo/bar" == foo//bar" == "foo///bar"
-TEST(NormalizeTest, MultipleConsecutiveSepaparatorsInMidstring) {
+TEST(NormalizeTest, MultipleConsecutiveSeparatorsInMidstring) {
EXPECT_EQ("foo" GTEST_PATH_SEP_ "bar",
FilePath("foo" GTEST_PATH_SEP_ "bar").string());
EXPECT_EQ("foo" GTEST_PATH_SEP_ "bar",
FilePath("foo" GTEST_PATH_SEP_ GTEST_PATH_SEP_ "bar").string());
- EXPECT_EQ("foo" GTEST_PATH_SEP_ "bar",
- FilePath("foo" GTEST_PATH_SEP_ GTEST_PATH_SEP_
- GTEST_PATH_SEP_ "bar").string());
+ EXPECT_EQ(
+ "foo" GTEST_PATH_SEP_ "bar",
+ FilePath("foo" GTEST_PATH_SEP_ GTEST_PATH_SEP_ GTEST_PATH_SEP_ "bar")
+ .string());
}
// "/bar" == //bar" == "///bar"
-TEST(NormalizeTest, MultipleConsecutiveSepaparatorsAtStringStart) {
- EXPECT_EQ(GTEST_PATH_SEP_ "bar",
- FilePath(GTEST_PATH_SEP_ "bar").string());
- EXPECT_EQ(GTEST_PATH_SEP_ "bar",
- FilePath(GTEST_PATH_SEP_ GTEST_PATH_SEP_ "bar").string());
+TEST(NormalizeTest, MultipleConsecutiveSeparatorsAtStringStart) {
+ EXPECT_EQ(GTEST_PATH_SEP_ "bar", FilePath(GTEST_PATH_SEP_ "bar").string());
+#ifdef GTEST_OS_WINDOWS
+ EXPECT_EQ(GTEST_PATH_SEP_ GTEST_PATH_SEP_ "bar",
+ FilePath(GTEST_PATH_SEP_ GTEST_PATH_SEP_ "bar").string());
+#else
EXPECT_EQ(GTEST_PATH_SEP_ "bar",
- FilePath(GTEST_PATH_SEP_ GTEST_PATH_SEP_ GTEST_PATH_SEP_ "bar").string());
+ FilePath(GTEST_PATH_SEP_ GTEST_PATH_SEP_ "bar").string());
+#endif
+ EXPECT_EQ(
+ GTEST_PATH_SEP_ "bar",
+ FilePath(GTEST_PATH_SEP_ GTEST_PATH_SEP_ GTEST_PATH_SEP_ "bar").string());
}
// "foo/" == foo//" == "foo///"
-TEST(NormalizeTest, MultipleConsecutiveSepaparatorsAtStringEnd) {
- EXPECT_EQ("foo" GTEST_PATH_SEP_,
- FilePath("foo" GTEST_PATH_SEP_).string());
+TEST(NormalizeTest, MultipleConsecutiveSeparatorsAtStringEnd) {
+ EXPECT_EQ("foo" GTEST_PATH_SEP_, FilePath("foo" GTEST_PATH_SEP_).string());
EXPECT_EQ("foo" GTEST_PATH_SEP_,
- FilePath("foo" GTEST_PATH_SEP_ GTEST_PATH_SEP_).string());
- EXPECT_EQ("foo" GTEST_PATH_SEP_,
- FilePath("foo" GTEST_PATH_SEP_ GTEST_PATH_SEP_ GTEST_PATH_SEP_).string());
+ FilePath("foo" GTEST_PATH_SEP_ GTEST_PATH_SEP_).string());
+ EXPECT_EQ(
+ "foo" GTEST_PATH_SEP_,
+ FilePath("foo" GTEST_PATH_SEP_ GTEST_PATH_SEP_ GTEST_PATH_SEP_).string());
}
#if GTEST_HAS_ALT_PATH_SEP_
@@ -442,12 +451,10 @@ TEST(NormalizeTest, MultipleConsecutiveSepaparatorsAtStringEnd) {
// regardless of their combination (e.g. "foo\" =="foo/\" ==
// "foo\\/").
TEST(NormalizeTest, MixAlternateSeparatorAtStringEnd) {
- EXPECT_EQ("foo" GTEST_PATH_SEP_,
- FilePath("foo/").string());
+ EXPECT_EQ("foo" GTEST_PATH_SEP_, FilePath("foo/").string());
EXPECT_EQ("foo" GTEST_PATH_SEP_,
FilePath("foo" GTEST_PATH_SEP_ "/").string());
- EXPECT_EQ("foo" GTEST_PATH_SEP_,
- FilePath("foo//" GTEST_PATH_SEP_).string());
+ EXPECT_EQ("foo" GTEST_PATH_SEP_, FilePath("foo//" GTEST_PATH_SEP_).string());
}
#endif
@@ -478,15 +485,15 @@ TEST(AssignmentOperatorTest, ConstAssignedToNonConst) {
class DirectoryCreationTest : public Test {
protected:
void SetUp() override {
- testdata_path_.Set(FilePath(
- TempDir() + GetCurrentExecutableName().string() +
- "_directory_creation" GTEST_PATH_SEP_ "test" GTEST_PATH_SEP_));
+ testdata_path_.Set(
+ FilePath(TempDir() + GetCurrentExecutableName().string() +
+ "_directory_creation" GTEST_PATH_SEP_ "test" GTEST_PATH_SEP_));
testdata_file_.Set(testdata_path_.RemoveTrailingPathSeparator());
- unique_file0_.Set(FilePath::MakeFileName(testdata_path_, FilePath("unique"),
- 0, "txt"));
- unique_file1_.Set(FilePath::MakeFileName(testdata_path_, FilePath("unique"),
- 1, "txt"));
+ unique_file0_.Set(
+ FilePath::MakeFileName(testdata_path_, FilePath("unique"), 0, "txt"));
+ unique_file1_.Set(
+ FilePath::MakeFileName(testdata_path_, FilePath("unique"), 1, "txt"));
remove(testdata_file_.c_str());
remove(unique_file0_.c_str());
@@ -508,12 +515,12 @@ class DirectoryCreationTest : public Test {
}
// Strings representing a directory and a file, with identical paths
- // except for the trailing separator character that distinquishes
+ // except for the trailing separator character that distinguishes
// a directory named 'test' from a file named 'test'. Example names:
FilePath testdata_path_; // "/tmp/directory_creation/test/"
FilePath testdata_file_; // "/tmp/directory_creation/test"
- FilePath unique_file0_; // "/tmp/directory_creation/test/unique.txt"
- FilePath unique_file1_; // "/tmp/directory_creation/test/unique_1.txt"
+ FilePath unique_file0_; // "/tmp/directory_creation/test/unique.txt"
+ FilePath unique_file1_; // "/tmp/directory_creation/test/unique_1.txt"
};
TEST_F(DirectoryCreationTest, CreateDirectoriesRecursively) {
@@ -530,8 +537,8 @@ TEST_F(DirectoryCreationTest, CreateDirectoriesForAlreadyExistingPath) {
}
TEST_F(DirectoryCreationTest, CreateDirectoriesAndUniqueFilename) {
- FilePath file_path(FilePath::GenerateUniqueFileName(testdata_path_,
- FilePath("unique"), "txt"));
+ FilePath file_path(FilePath::GenerateUniqueFileName(
+ testdata_path_, FilePath("unique"), "txt"));
EXPECT_EQ(unique_file0_.string(), file_path.string());
EXPECT_FALSE(file_path.FileOrDirectoryExists()); // file not there
@@ -540,8 +547,8 @@ TEST_F(DirectoryCreationTest, CreateDirectoriesAndUniqueFilename) {
CreateTextFile(file_path.c_str());
EXPECT_TRUE(file_path.FileOrDirectoryExists());
- FilePath file_path2(FilePath::GenerateUniqueFileName(testdata_path_,
- FilePath("unique"), "txt"));
+ FilePath file_path2(FilePath::GenerateUniqueFileName(
+ testdata_path_, FilePath("unique"), "txt"));
EXPECT_EQ(unique_file1_.string(), file_path2.string());
EXPECT_FALSE(file_path2.FileOrDirectoryExists()); // file not there
CreateTextFile(file_path2.c_str());
@@ -613,20 +620,25 @@ TEST(FilePathTest, IsDirectory) {
TEST(FilePathTest, IsAbsolutePath) {
EXPECT_FALSE(FilePath("is" GTEST_PATH_SEP_ "relative").IsAbsolutePath());
EXPECT_FALSE(FilePath("").IsAbsolutePath());
-#if GTEST_OS_WINDOWS
- EXPECT_TRUE(FilePath("c:\\" GTEST_PATH_SEP_ "is_not"
- GTEST_PATH_SEP_ "relative").IsAbsolutePath());
+#ifdef GTEST_OS_WINDOWS
+ EXPECT_TRUE(
+ FilePath("c:\\" GTEST_PATH_SEP_ "is_not" GTEST_PATH_SEP_ "relative")
+ .IsAbsolutePath());
EXPECT_FALSE(FilePath("c:foo" GTEST_PATH_SEP_ "bar").IsAbsolutePath());
- EXPECT_TRUE(FilePath("c:/" GTEST_PATH_SEP_ "is_not"
- GTEST_PATH_SEP_ "relative").IsAbsolutePath());
+ EXPECT_TRUE(
+ FilePath("c:/" GTEST_PATH_SEP_ "is_not" GTEST_PATH_SEP_ "relative")
+ .IsAbsolutePath());
+ EXPECT_TRUE(FilePath("d:/Windows").IsAbsolutePath());
+ EXPECT_TRUE(FilePath("\\\\Host\\Share").IsAbsolutePath());
+ EXPECT_TRUE(FilePath("\\\\Host\\Share\\Folder").IsAbsolutePath());
#else
EXPECT_TRUE(FilePath(GTEST_PATH_SEP_ "is_not" GTEST_PATH_SEP_ "relative")
- .IsAbsolutePath());
+ .IsAbsolutePath());
#endif // GTEST_OS_WINDOWS
}
TEST(FilePathTest, IsRootDirectory) {
-#if GTEST_OS_WINDOWS
+#ifdef GTEST_OS_WINDOWS
EXPECT_TRUE(FilePath("a:\\").IsRootDirectory());
EXPECT_TRUE(FilePath("Z:/").IsRootDirectory());
EXPECT_TRUE(FilePath("e://").IsRootDirectory());
@@ -635,6 +647,16 @@ TEST(FilePathTest, IsRootDirectory) {
EXPECT_FALSE(FilePath("b:a").IsRootDirectory());
EXPECT_FALSE(FilePath("8:/").IsRootDirectory());
EXPECT_FALSE(FilePath("c|/").IsRootDirectory());
+ EXPECT_TRUE(FilePath("c:/").IsRootDirectory());
+ EXPECT_FALSE(FilePath("d:/Windows").IsRootDirectory());
+
+ // This is for backward compatibility, since callers (even in this library)
+ // have assumed IsRootDirectory() implies a trailing directory separator.
+ EXPECT_FALSE(FilePath("\\\\Host\\Share").IsRootDirectory());
+
+ EXPECT_TRUE(FilePath("\\\\Host\\Share\\").IsRootDirectory());
+ EXPECT_FALSE(FilePath("\\\\Host\\Share\\.").IsRootDirectory());
+ EXPECT_FALSE(FilePath("\\\\Host\\Share\\C$\\").IsRootDirectory());
#else
EXPECT_TRUE(FilePath("/").IsRootDirectory());
EXPECT_TRUE(FilePath("//").IsRootDirectory());
diff --git a/googletest/test/googletest-filter-unittest.py b/googletest/test/googletest-filter-unittest.py
index 6b32f2d2..f1f3c7a5 100755
--- a/googletest/test/googletest-filter-unittest.py
+++ b/googletest/test/googletest-filter-unittest.py
@@ -42,12 +42,13 @@ we test that here also.
import os
import re
+
try:
from sets import Set as set # For Python 2.3 compatibility
except ImportError:
pass
import sys
-import gtest_test_utils
+from googletest.test import gtest_test_utils
# Constants.
@@ -60,7 +61,8 @@ CAN_PASS_EMPTY_ENV = False
if sys.executable:
os.environ['EMPTY_VAR'] = ''
child = gtest_test_utils.Subprocess(
- [sys.executable, '-c', 'import os; print(\'EMPTY_VAR\' in os.environ)'])
+ [sys.executable, '-c', "import os; print('EMPTY_VAR' in os.environ)"]
+ )
CAN_PASS_EMPTY_ENV = eval(child.output)
@@ -75,8 +77,8 @@ if sys.executable:
os.environ['UNSET_VAR'] = 'X'
del os.environ['UNSET_VAR']
child = gtest_test_utils.Subprocess(
- [sys.executable, '-c', 'import os; print(\'UNSET_VAR\' not in os.environ)'
- ])
+ [sys.executable, '-c', "import os; print('UNSET_VAR' not in os.environ)"]
+ )
CAN_UNSET_ENV = eval(child.output)
@@ -84,7 +86,7 @@ if sys.executable:
# make sense on platforms that cannot pass empty env variables (Win32)
# and on platforms that cannot unset variables (since we cannot tell
# the difference between "" and NULL -- Borland and Solaris < 5.10)
-CAN_TEST_EMPTY_FILTER = (CAN_PASS_EMPTY_ENV and CAN_UNSET_ENV)
+CAN_TEST_EMPTY_FILTER = CAN_PASS_EMPTY_ENV and CAN_UNSET_ENV
# The environment variable for specifying the test filters.
@@ -113,13 +115,18 @@ TEST_CASE_REGEX = re.compile(r'^\[\-+\] \d+ tests? from (\w+(/\w+)?)')
# Regex for parsing test names from Google Test's output.
TEST_REGEX = re.compile(r'^\[\s*RUN\s*\].*\.(\w+(/\w+)?)')
+# Regex for parsing disabled banner from Google Test's output
+DISABLED_BANNER_REGEX = re.compile(r'^\[\s*DISABLED\s*\] (.*)')
+
# The command line flag to tell Google Test to output the list of tests it
# will run.
LIST_TESTS_FLAG = '--gtest_list_tests'
# Indicates whether Google Test supports death tests.
-SUPPORTS_DEATH_TESTS = 'HasDeathTest' in gtest_test_utils.Subprocess(
- [COMMAND, LIST_TESTS_FLAG]).output
+SUPPORTS_DEATH_TESTS = (
+ 'HasDeathTest'
+ in gtest_test_utils.Subprocess([COMMAND, LIST_TESTS_FLAG]).output
+)
# Full names of all tests in googletest-filter-unittests_.
PARAM_TESTS = [
@@ -131,7 +138,7 @@ PARAM_TESTS = [
'SeqQ/ParamTest.TestX/1',
'SeqQ/ParamTest.TestY/0',
'SeqQ/ParamTest.TestY/1',
- ]
+]
DISABLED_TESTS = [
'BarTest.DISABLED_TestFour',
@@ -140,29 +147,31 @@ DISABLED_TESTS = [
'DISABLED_FoobarTest.Test1',
'DISABLED_FoobarTest.DISABLED_Test2',
'DISABLED_FoobarbazTest.TestA',
- ]
+]
if SUPPORTS_DEATH_TESTS:
DEATH_TESTS = [
- 'HasDeathTest.Test1',
- 'HasDeathTest.Test2',
- ]
+ 'HasDeathTest.Test1',
+ 'HasDeathTest.Test2',
+ ]
else:
DEATH_TESTS = []
# All the non-disabled tests.
-ACTIVE_TESTS = [
- 'FooTest.Abc',
- 'FooTest.Xyz',
-
- 'BarTest.TestOne',
- 'BarTest.TestTwo',
- 'BarTest.TestThree',
-
- 'BazTest.TestOne',
- 'BazTest.TestA',
- 'BazTest.TestB',
- ] + DEATH_TESTS + PARAM_TESTS
+ACTIVE_TESTS = (
+ [
+ 'FooTest.Abc',
+ 'FooTest.Xyz',
+ 'BarTest.TestOne',
+ 'BarTest.TestTwo',
+ 'BarTest.TestThree',
+ 'BazTest.TestOne',
+ 'BazTest.TestA',
+ 'BazTest.TestB',
+ ]
+ + DEATH_TESTS
+ + PARAM_TESTS
+)
param_tests_present = None
@@ -180,14 +189,15 @@ def SetEnvVar(env_var, value):
del environ[env_var]
-def RunAndReturnOutput(args = None):
+def RunAndReturnOutput(args=None):
"""Runs the test program and returns its output."""
- return gtest_test_utils.Subprocess([COMMAND] + (args or []),
- env=environ).output
+ return gtest_test_utils.Subprocess(
+ [COMMAND] + (args or []), env=environ
+ ).output
-def RunAndExtractTestList(args = None):
+def RunAndExtractTestList(args=None):
"""Runs the test program and returns its exit code and a list of tests run."""
p = gtest_test_utils.Subprocess([COMMAND] + (args or []), env=environ)
@@ -206,6 +216,17 @@ def RunAndExtractTestList(args = None):
return (tests_run, p.exit_code)
+def RunAndExtractDisabledBannerList(args=None):
+ """Runs the test program and returns tests that printed a disabled banner."""
+ p = gtest_test_utils.Subprocess([COMMAND] + (args or []), env=environ)
+ banners_printed = []
+ for line in p.output.split('\n'):
+ match = DISABLED_BANNER_REGEX.match(line)
+ if match is not None:
+ banners_printed.append(match.group(1))
+ return banners_printed
+
+
def InvokeWithModifiedEnv(extra_env, function, *args, **kwargs):
"""Runs the given function and arguments in a modified environment."""
try:
@@ -220,10 +241,13 @@ def InvokeWithModifiedEnv(extra_env, function, *args, **kwargs):
def RunWithSharding(total_shards, shard_index, command):
"""Runs a test program shard and returns exit code and a list of tests run."""
- extra_env = {SHARD_INDEX_ENV_VAR: str(shard_index),
- TOTAL_SHARDS_ENV_VAR: str(total_shards)}
+ extra_env = {
+ SHARD_INDEX_ENV_VAR: str(shard_index),
+ TOTAL_SHARDS_ENV_VAR: str(total_shards),
+ }
return InvokeWithModifiedEnv(extra_env, RunAndExtractTestList, command)
+
# The unit test.
@@ -236,10 +260,10 @@ class GTestFilterUnitTest(gtest_test_utils.TestCase):
"""Asserts that two sets are equal."""
for elem in lhs:
- self.assert_(elem in rhs, '%s in %s' % (elem, rhs))
+ self.assertTrue(elem in rhs, '%s in %s' % (elem, rhs))
for elem in rhs:
- self.assert_(elem in lhs, '%s in %s' % (elem, lhs))
+ self.assertTrue(elem in lhs, '%s in %s' % (elem, lhs))
def AssertPartitionIsValid(self, set_var, list_of_sets):
"""Asserts that list_of_sets is a valid partition of set_var."""
@@ -271,13 +295,13 @@ class GTestFilterUnitTest(gtest_test_utils.TestCase):
# into a process using the environment variable. However, we can still
# test the case when the variable is not supplied (i.e., gtest_filter is
# None).
- # pylint: disable-msg=C6403
+ # pylint: disable=g-explicit-bool-comparison
if CAN_TEST_EMPTY_FILTER or gtest_filter != '':
SetEnvVar(FILTER_ENV_VAR, gtest_filter)
tests_run = RunAndExtractTestList()[0]
SetEnvVar(FILTER_ENV_VAR, None)
self.AssertSetEqual(tests_run, tests_to_run)
- # pylint: enable-msg=C6403
+ # pylint: enable=g-explicit-bool-comparison
# Next, tests using the command line flag.
@@ -289,8 +313,14 @@ class GTestFilterUnitTest(gtest_test_utils.TestCase):
tests_run = RunAndExtractTestList(args)[0]
self.AssertSetEqual(tests_run, tests_to_run)
- def RunAndVerifyWithSharding(self, gtest_filter, total_shards, tests_to_run,
- args=None, check_exit_0=False):
+ def RunAndVerifyWithSharding(
+ self,
+ gtest_filter,
+ total_shards,
+ tests_to_run,
+ args=None,
+ check_exit_0=False,
+ ):
"""Checks that binary runs correct tests for the given filter and shard.
Runs all shards of googletest-filter-unittest_ with the given filter, and
@@ -302,9 +332,9 @@ class GTestFilterUnitTest(gtest_test_utils.TestCase):
gtest_filter: A filter to apply to the tests.
total_shards: A total number of shards to split test run into.
tests_to_run: A set of tests expected to run.
- args : Arguments to pass to the to the test binary.
- check_exit_0: When set to a true value, make sure that all shards
- return 0.
+ args: Arguments to pass to the to the test binary.
+ check_exit_0: When set to a true value, make sure that all shards return
+ 0.
"""
tests_to_run = self.AdjustForParameterizedTests(tests_to_run)
@@ -314,7 +344,7 @@ class GTestFilterUnitTest(gtest_test_utils.TestCase):
# into a process using the environment variable. However, we can still
# test the case when the variable is not supplied (i.e., gtest_filter is
# None).
- # pylint: disable-msg=C6403
+ # pylint: disable=g-explicit-bool-comparison
if CAN_TEST_EMPTY_FILTER or gtest_filter != '':
SetEnvVar(FILTER_ENV_VAR, gtest_filter)
partition = []
@@ -326,7 +356,7 @@ class GTestFilterUnitTest(gtest_test_utils.TestCase):
self.AssertPartitionIsValid(tests_to_run, partition)
SetEnvVar(FILTER_ENV_VAR, None)
- # pylint: enable-msg=C6403
+ # pylint: enable=g-explicit-bool-comparison
def RunAndVerifyAllowingDisabled(self, gtest_filter, tests_to_run):
"""Checks that the binary runs correct set of tests for the given filter.
@@ -358,8 +388,9 @@ class GTestFilterUnitTest(gtest_test_utils.TestCase):
global param_tests_present
if param_tests_present is None:
- param_tests_present = PARAM_TEST_REGEX.search(
- RunAndReturnOutput()) is not None
+ param_tests_present = (
+ PARAM_TEST_REGEX.search(RunAndReturnOutput()) is not None
+ )
def testDefaultBehavior(self):
"""Tests the behavior of not specifying the filter."""
@@ -411,8 +442,9 @@ class GTestFilterUnitTest(gtest_test_utils.TestCase):
BAZ_TESTS = ['BazTest.TestOne', 'BazTest.TestA', 'BazTest.TestB']
self.RunAndVerify('BazTest.*', BAZ_TESTS)
- self.RunAndVerifyAllowingDisabled('BazTest.*',
- BAZ_TESTS + ['BazTest.DISABLED_TestC'])
+ self.RunAndVerifyAllowingDisabled(
+ 'BazTest.*', BAZ_TESTS + ['BazTest.DISABLED_TestC']
+ )
def testFilterByTest(self):
"""Tests filtering by test name."""
@@ -423,38 +455,50 @@ class GTestFilterUnitTest(gtest_test_utils.TestCase):
"""Select only the disabled tests to run."""
self.RunAndVerify('DISABLED_FoobarTest.Test1', [])
- self.RunAndVerifyAllowingDisabled('DISABLED_FoobarTest.Test1',
- ['DISABLED_FoobarTest.Test1'])
+ self.RunAndVerifyAllowingDisabled(
+ 'DISABLED_FoobarTest.Test1', ['DISABLED_FoobarTest.Test1']
+ )
self.RunAndVerify('*DISABLED_*', [])
self.RunAndVerifyAllowingDisabled('*DISABLED_*', DISABLED_TESTS)
self.RunAndVerify('*.DISABLED_*', [])
- self.RunAndVerifyAllowingDisabled('*.DISABLED_*', [
- 'BarTest.DISABLED_TestFour',
- 'BarTest.DISABLED_TestFive',
- 'BazTest.DISABLED_TestC',
- 'DISABLED_FoobarTest.DISABLED_Test2',
- ])
+ self.RunAndVerifyAllowingDisabled(
+ '*.DISABLED_*',
+ [
+ 'BarTest.DISABLED_TestFour',
+ 'BarTest.DISABLED_TestFive',
+ 'BazTest.DISABLED_TestC',
+ 'DISABLED_FoobarTest.DISABLED_Test2',
+ ],
+ )
self.RunAndVerify('DISABLED_*', [])
- self.RunAndVerifyAllowingDisabled('DISABLED_*', [
- 'DISABLED_FoobarTest.Test1',
- 'DISABLED_FoobarTest.DISABLED_Test2',
- 'DISABLED_FoobarbazTest.TestA',
- ])
+ self.RunAndVerifyAllowingDisabled(
+ 'DISABLED_*',
+ [
+ 'DISABLED_FoobarTest.Test1',
+ 'DISABLED_FoobarTest.DISABLED_Test2',
+ 'DISABLED_FoobarbazTest.TestA',
+ ],
+ )
def testWildcardInTestCaseName(self):
"""Tests using wildcard in the test case name."""
- self.RunAndVerify('*a*.*', [
- 'BarTest.TestOne',
- 'BarTest.TestTwo',
- 'BarTest.TestThree',
-
- 'BazTest.TestOne',
- 'BazTest.TestA',
- 'BazTest.TestB', ] + DEATH_TESTS + PARAM_TESTS)
+ self.RunAndVerify(
+ '*a*.*',
+ [
+ 'BarTest.TestOne',
+ 'BarTest.TestTwo',
+ 'BarTest.TestThree',
+ 'BazTest.TestOne',
+ 'BazTest.TestA',
+ 'BazTest.TestB',
+ ]
+ + DEATH_TESTS
+ + PARAM_TESTS,
+ )
def testWildcardInTestName(self):
"""Tests using wildcard in the test name."""
@@ -464,23 +508,27 @@ class GTestFilterUnitTest(gtest_test_utils.TestCase):
def testFilterWithoutDot(self):
"""Tests a filter that has no '.' in it."""
- self.RunAndVerify('*z*', [
- 'FooTest.Xyz',
-
- 'BazTest.TestOne',
- 'BazTest.TestA',
- 'BazTest.TestB',
- ])
+ self.RunAndVerify(
+ '*z*',
+ [
+ 'FooTest.Xyz',
+ 'BazTest.TestOne',
+ 'BazTest.TestA',
+ 'BazTest.TestB',
+ ],
+ )
def testTwoPatterns(self):
"""Tests filters that consist of two patterns."""
- self.RunAndVerify('Foo*.*:*A*', [
- 'FooTest.Abc',
- 'FooTest.Xyz',
-
- 'BazTest.TestA',
- ])
+ self.RunAndVerify(
+ 'Foo*.*:*A*',
+ [
+ 'FooTest.Abc',
+ 'FooTest.Xyz',
+ 'BazTest.TestA',
+ ],
+ )
# An empty pattern + a non-empty one
self.RunAndVerify(':*A*', ['FooTest.Abc', 'BazTest.TestA'])
@@ -488,83 +536,109 @@ class GTestFilterUnitTest(gtest_test_utils.TestCase):
def testThreePatterns(self):
"""Tests filters that consist of three patterns."""
- self.RunAndVerify('*oo*:*A*:*One', [
- 'FooTest.Abc',
- 'FooTest.Xyz',
-
- 'BarTest.TestOne',
-
- 'BazTest.TestOne',
- 'BazTest.TestA',
- ])
+ self.RunAndVerify(
+ '*oo*:*A*:*One',
+ [
+ 'FooTest.Abc',
+ 'FooTest.Xyz',
+ 'BarTest.TestOne',
+ 'BazTest.TestOne',
+ 'BazTest.TestA',
+ ],
+ )
# The 2nd pattern is empty.
- self.RunAndVerify('*oo*::*One', [
- 'FooTest.Abc',
- 'FooTest.Xyz',
-
- 'BarTest.TestOne',
-
- 'BazTest.TestOne',
- ])
+ self.RunAndVerify(
+ '*oo*::*One',
+ [
+ 'FooTest.Abc',
+ 'FooTest.Xyz',
+ 'BarTest.TestOne',
+ 'BazTest.TestOne',
+ ],
+ )
# The last 2 patterns are empty.
- self.RunAndVerify('*oo*::', [
- 'FooTest.Abc',
- 'FooTest.Xyz',
- ])
+ self.RunAndVerify(
+ '*oo*::',
+ [
+ 'FooTest.Abc',
+ 'FooTest.Xyz',
+ ],
+ )
def testNegativeFilters(self):
- self.RunAndVerify('*-BazTest.TestOne', [
- 'FooTest.Abc',
- 'FooTest.Xyz',
-
- 'BarTest.TestOne',
- 'BarTest.TestTwo',
- 'BarTest.TestThree',
-
- 'BazTest.TestA',
- 'BazTest.TestB',
- ] + DEATH_TESTS + PARAM_TESTS)
-
- self.RunAndVerify('*-FooTest.Abc:BazTest.*', [
- 'FooTest.Xyz',
-
- 'BarTest.TestOne',
- 'BarTest.TestTwo',
- 'BarTest.TestThree',
- ] + DEATH_TESTS + PARAM_TESTS)
-
- self.RunAndVerify('BarTest.*-BarTest.TestOne', [
- 'BarTest.TestTwo',
- 'BarTest.TestThree',
- ])
+ self.RunAndVerify(
+ '*-BazTest.TestOne',
+ [
+ 'FooTest.Abc',
+ 'FooTest.Xyz',
+ 'BarTest.TestOne',
+ 'BarTest.TestTwo',
+ 'BarTest.TestThree',
+ 'BazTest.TestA',
+ 'BazTest.TestB',
+ ]
+ + DEATH_TESTS
+ + PARAM_TESTS,
+ )
+
+ self.RunAndVerify(
+ '*-FooTest.Abc:BazTest.*',
+ [
+ 'FooTest.Xyz',
+ 'BarTest.TestOne',
+ 'BarTest.TestTwo',
+ 'BarTest.TestThree',
+ ]
+ + DEATH_TESTS
+ + PARAM_TESTS,
+ )
+
+ self.RunAndVerify(
+ 'BarTest.*-BarTest.TestOne',
+ [
+ 'BarTest.TestTwo',
+ 'BarTest.TestThree',
+ ],
+ )
# Tests without leading '*'.
- self.RunAndVerify('-FooTest.Abc:FooTest.Xyz:BazTest.*', [
- 'BarTest.TestOne',
- 'BarTest.TestTwo',
- 'BarTest.TestThree',
- ] + DEATH_TESTS + PARAM_TESTS)
+ self.RunAndVerify(
+ '-FooTest.Abc:FooTest.Xyz:BazTest.*',
+ [
+ 'BarTest.TestOne',
+ 'BarTest.TestTwo',
+ 'BarTest.TestThree',
+ ]
+ + DEATH_TESTS
+ + PARAM_TESTS,
+ )
# Value parameterized tests.
self.RunAndVerify('*/*', PARAM_TESTS)
# Value parameterized tests filtering by the sequence name.
- self.RunAndVerify('SeqP/*', [
- 'SeqP/ParamTest.TestX/0',
- 'SeqP/ParamTest.TestX/1',
- 'SeqP/ParamTest.TestY/0',
- 'SeqP/ParamTest.TestY/1',
- ])
+ self.RunAndVerify(
+ 'SeqP/*',
+ [
+ 'SeqP/ParamTest.TestX/0',
+ 'SeqP/ParamTest.TestX/1',
+ 'SeqP/ParamTest.TestY/0',
+ 'SeqP/ParamTest.TestY/1',
+ ],
+ )
# Value parameterized tests filtering by the test name.
- self.RunAndVerify('*/0', [
- 'SeqP/ParamTest.TestX/0',
- 'SeqP/ParamTest.TestY/0',
- 'SeqQ/ParamTest.TestX/0',
- 'SeqQ/ParamTest.TestY/0',
- ])
+ self.RunAndVerify(
+ '*/0',
+ [
+ 'SeqP/ParamTest.TestX/0',
+ 'SeqP/ParamTest.TestY/0',
+ 'SeqQ/ParamTest.TestX/0',
+ 'SeqQ/ParamTest.TestY/0',
+ ],
+ )
def testFlagOverridesEnvVar(self):
"""Tests that the filter flag overrides the filtering env. variable."""
@@ -579,41 +653,70 @@ class GTestFilterUnitTest(gtest_test_utils.TestCase):
def testShardStatusFileIsCreated(self):
"""Tests that the shard file is created if specified in the environment."""
- shard_status_file = os.path.join(gtest_test_utils.GetTempDir(),
- 'shard_status_file')
- self.assert_(not os.path.exists(shard_status_file))
+ shard_status_file = os.path.join(
+ gtest_test_utils.GetTempDir(), 'shard_status_file'
+ )
+ self.assertTrue(not os.path.exists(shard_status_file))
extra_env = {SHARD_STATUS_FILE_ENV_VAR: shard_status_file}
try:
InvokeWithModifiedEnv(extra_env, RunAndReturnOutput)
finally:
- self.assert_(os.path.exists(shard_status_file))
+ self.assertTrue(os.path.exists(shard_status_file))
os.remove(shard_status_file)
def testShardStatusFileIsCreatedWithListTests(self):
"""Tests that the shard file is created with the "list_tests" flag."""
- shard_status_file = os.path.join(gtest_test_utils.GetTempDir(),
- 'shard_status_file2')
- self.assert_(not os.path.exists(shard_status_file))
+ shard_status_file = os.path.join(
+ gtest_test_utils.GetTempDir(), 'shard_status_file2'
+ )
+ self.assertTrue(not os.path.exists(shard_status_file))
extra_env = {SHARD_STATUS_FILE_ENV_VAR: shard_status_file}
try:
- output = InvokeWithModifiedEnv(extra_env,
- RunAndReturnOutput,
- [LIST_TESTS_FLAG])
+ output = InvokeWithModifiedEnv(
+ extra_env, RunAndReturnOutput, [LIST_TESTS_FLAG]
+ )
finally:
# This assertion ensures that Google Test enumerated the tests as
# opposed to running them.
- self.assert_('[==========]' not in output,
- 'Unexpected output during test enumeration.\n'
- 'Please ensure that LIST_TESTS_FLAG is assigned the\n'
- 'correct flag value for listing Google Test tests.')
-
- self.assert_(os.path.exists(shard_status_file))
+ self.assertTrue(
+ '[==========]' not in output,
+ (
+ 'Unexpected output during test enumeration.\n'
+ 'Please ensure that LIST_TESTS_FLAG is assigned the\n'
+ 'correct flag value for listing Google Test tests.'
+ ),
+ )
+
+ self.assertTrue(os.path.exists(shard_status_file))
os.remove(shard_status_file)
+ def testDisabledBanner(self):
+ """Tests that the disabled banner prints only tests that match filter."""
+ make_filter = lambda s: ['--%s=%s' % (FILTER_FLAG, s)]
+
+ banners = RunAndExtractDisabledBannerList(make_filter('*'))
+ self.AssertSetEqual(
+ banners,
+ [
+ 'BarTest.DISABLED_TestFour',
+ 'BarTest.DISABLED_TestFive',
+ 'BazTest.DISABLED_TestC',
+ ],
+ )
+
+ banners = RunAndExtractDisabledBannerList(make_filter('Bar*'))
+ self.AssertSetEqual(
+ banners, ['BarTest.DISABLED_TestFour', 'BarTest.DISABLED_TestFive']
+ )
+
+ banners = RunAndExtractDisabledBannerList(make_filter('*-Bar*'))
+ self.AssertSetEqual(banners, ['BazTest.DISABLED_TestC'])
+
if SUPPORTS_DEATH_TESTS:
+
def testShardingWorksWithDeathTests(self):
"""Tests integration with death tests and sharding."""
@@ -621,19 +724,23 @@ class GTestFilterUnitTest(gtest_test_utils.TestCase):
expected_tests = [
'HasDeathTest.Test1',
'HasDeathTest.Test2',
-
'SeqP/ParamTest.TestX/0',
'SeqP/ParamTest.TestX/1',
'SeqP/ParamTest.TestY/0',
'SeqP/ParamTest.TestY/1',
- ]
-
- for flag in ['--gtest_death_test_style=threadsafe',
- '--gtest_death_test_style=fast']:
- self.RunAndVerifyWithSharding(gtest_filter, 3, expected_tests,
- check_exit_0=True, args=[flag])
- self.RunAndVerifyWithSharding(gtest_filter, 5, expected_tests,
- check_exit_0=True, args=[flag])
+ ]
+
+ for flag in [
+ '--gtest_death_test_style=threadsafe',
+ '--gtest_death_test_style=fast',
+ ]:
+ self.RunAndVerifyWithSharding(
+ gtest_filter, 3, expected_tests, check_exit_0=True, args=[flag]
+ )
+ self.RunAndVerifyWithSharding(
+ gtest_filter, 5, expected_tests, check_exit_0=True, args=[flag]
+ )
+
if __name__ == '__main__':
gtest_test_utils.Main()
diff --git a/googletest/test/googletest-filter-unittest_.cc b/googletest/test/googletest-filter-unittest_.cc
index d30ec9c7..bc7aa594 100644
--- a/googletest/test/googletest-filter-unittest_.cc
+++ b/googletest/test/googletest-filter-unittest_.cc
@@ -27,7 +27,6 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
// Unit test for Google Test test filters.
//
// A user can specify which test(s) in a Google Test program to run via
@@ -43,87 +42,57 @@ namespace {
// Test case FooTest.
-class FooTest : public testing::Test {
-};
+class FooTest : public testing::Test {};
-TEST_F(FooTest, Abc) {
-}
+TEST_F(FooTest, Abc) {}
-TEST_F(FooTest, Xyz) {
- FAIL() << "Expected failure.";
-}
+TEST_F(FooTest, Xyz) { FAIL() << "Expected failure."; }
// Test case BarTest.
-TEST(BarTest, TestOne) {
-}
+TEST(BarTest, TestOne) {}
-TEST(BarTest, TestTwo) {
-}
+TEST(BarTest, TestTwo) {}
-TEST(BarTest, TestThree) {
-}
+TEST(BarTest, TestThree) {}
-TEST(BarTest, DISABLED_TestFour) {
- FAIL() << "Expected failure.";
-}
+TEST(BarTest, DISABLED_TestFour) { FAIL() << "Expected failure."; }
-TEST(BarTest, DISABLED_TestFive) {
- FAIL() << "Expected failure.";
-}
+TEST(BarTest, DISABLED_TestFive) { FAIL() << "Expected failure."; }
// Test case BazTest.
-TEST(BazTest, TestOne) {
- FAIL() << "Expected failure.";
-}
+TEST(BazTest, TestOne) { FAIL() << "Expected failure."; }
-TEST(BazTest, TestA) {
-}
+TEST(BazTest, TestA) {}
-TEST(BazTest, TestB) {
-}
+TEST(BazTest, TestB) {}
-TEST(BazTest, DISABLED_TestC) {
- FAIL() << "Expected failure.";
-}
+TEST(BazTest, DISABLED_TestC) { FAIL() << "Expected failure."; }
// Test case HasDeathTest
-TEST(HasDeathTest, Test1) {
- EXPECT_DEATH_IF_SUPPORTED(exit(1), ".*");
-}
+TEST(HasDeathTest, Test1) { EXPECT_DEATH_IF_SUPPORTED(exit(1), ".*"); }
// We need at least two death tests to make sure that the all death tests
// aren't on the first shard.
-TEST(HasDeathTest, Test2) {
- EXPECT_DEATH_IF_SUPPORTED(exit(1), ".*");
-}
+TEST(HasDeathTest, Test2) { EXPECT_DEATH_IF_SUPPORTED(exit(1), ".*"); }
// Test case FoobarTest
-TEST(DISABLED_FoobarTest, Test1) {
- FAIL() << "Expected failure.";
-}
+TEST(DISABLED_FoobarTest, Test1) { FAIL() << "Expected failure."; }
-TEST(DISABLED_FoobarTest, DISABLED_Test2) {
- FAIL() << "Expected failure.";
-}
+TEST(DISABLED_FoobarTest, DISABLED_Test2) { FAIL() << "Expected failure."; }
// Test case FoobarbazTest
-TEST(DISABLED_FoobarbazTest, TestA) {
- FAIL() << "Expected failure.";
-}
+TEST(DISABLED_FoobarbazTest, TestA) { FAIL() << "Expected failure."; }
-class ParamTest : public testing::TestWithParam<int> {
-};
+class ParamTest : public testing::TestWithParam<int> {};
-TEST_P(ParamTest, TestX) {
-}
+TEST_P(ParamTest, TestX) {}
-TEST_P(ParamTest, TestY) {
-}
+TEST_P(ParamTest, TestY) {}
INSTANTIATE_TEST_SUITE_P(SeqP, ParamTest, testing::Values(1, 2));
INSTANTIATE_TEST_SUITE_P(SeqQ, ParamTest, testing::Values(5, 6));
diff --git a/googletest/test/googletest-global-environment-unittest.py b/googletest/test/googletest-global-environment-unittest.py
index 32ba6285..bd73a2e1 100644
--- a/googletest/test/googletest-global-environment-unittest.py
+++ b/googletest/test/googletest-global-environment-unittest.py
@@ -35,16 +35,21 @@ This script tests such functionality by invoking
googletest-global-environment-unittest_ (a program written with Google Test).
"""
-import gtest_test_utils
+import re
+from googletest.test import gtest_test_utils
-def RunAndReturnOutput():
+def RunAndReturnOutput(args=None):
"""Runs the test program and returns its output."""
- return gtest_test_utils.Subprocess([
- gtest_test_utils.GetTestExecutablePath(
- 'googletest-global-environment-unittest_')
- ]).output
+ return gtest_test_utils.Subprocess(
+ [
+ gtest_test_utils.GetTestExecutablePath(
+ 'googletest-global-environment-unittest_'
+ )
+ ]
+ + (args or [])
+ ).output
class GTestGlobalEnvironmentUnitTest(gtest_test_utils.TestCase):
@@ -67,6 +72,70 @@ class GTestGlobalEnvironmentUnitTest(gtest_test_utils.TestCase):
# The test case shouldn't have been run.
self.assertNotIn('Unexpected call', txt)
+ def testEnvironmentSetUpAndTornDownForEachRepeat(self):
+ """Tests the behavior of test environments and gtest_repeat."""
+
+ # When --gtest_recreate_environments_when_repeating is true, the global test
+ # environment should be set up and torn down for each iteration.
+ txt = RunAndReturnOutput([
+ '--gtest_repeat=2',
+ '--gtest_recreate_environments_when_repeating=true',
+ ])
+
+ expected_pattern = (
+ '(.|\n)*'
+ r'Repeating all tests \(iteration 1\)'
+ '(.|\n)*'
+ 'Global test environment set-up.'
+ '(.|\n)*'
+ 'SomeTest.DoesFoo'
+ '(.|\n)*'
+ 'Global test environment tear-down'
+ '(.|\n)*'
+ r'Repeating all tests \(iteration 2\)'
+ '(.|\n)*'
+ 'Global test environment set-up.'
+ '(.|\n)*'
+ 'SomeTest.DoesFoo'
+ '(.|\n)*'
+ 'Global test environment tear-down'
+ '(.|\n)*'
+ )
+ self.assertRegex(txt, expected_pattern)
+
+ def testEnvironmentSetUpAndTornDownOnce(self):
+ """Tests environment and --gtest_recreate_environments_when_repeating."""
+
+ # By default the environment should only be set up and torn down once, at
+ # the start and end of the test respectively.
+ txt = RunAndReturnOutput(
+ [
+ '--gtest_repeat=2',
+ ]
+ )
+
+ expected_pattern = (
+ '(.|\n)*'
+ r'Repeating all tests \(iteration 1\)'
+ '(.|\n)*'
+ 'Global test environment set-up.'
+ '(.|\n)*'
+ 'SomeTest.DoesFoo'
+ '(.|\n)*'
+ r'Repeating all tests \(iteration 2\)'
+ '(.|\n)*'
+ 'SomeTest.DoesFoo'
+ '(.|\n)*'
+ 'Global test environment tear-down'
+ '(.|\n)*'
+ )
+ self.assertRegex(txt, expected_pattern)
+
+ self.assertEqual(len(re.findall('Global test environment set-up', txt)), 1)
+ self.assertEqual(
+ len(re.findall('Global test environment tear-down', txt)), 1
+ )
+
if __name__ == '__main__':
gtest_test_utils.Main()
diff --git a/googletest/test/googletest-json-outfiles-test.py b/googletest/test/googletest-json-outfiles-test.py
index 8ef47b8f..5626004e 100644
--- a/googletest/test/googletest-json-outfiles-test.py
+++ b/googletest/test/googletest-json-outfiles-test.py
@@ -32,97 +32,82 @@
import json
import os
-import gtest_json_test_utils
-import gtest_test_utils
+from googletest.test import gtest_json_test_utils
+from googletest.test import gtest_test_utils
GTEST_OUTPUT_SUBDIR = 'json_outfiles'
GTEST_OUTPUT_1_TEST = 'gtest_xml_outfile1_test_'
GTEST_OUTPUT_2_TEST = 'gtest_xml_outfile2_test_'
EXPECTED_1 = {
- u'tests':
- 1,
- u'failures':
- 0,
- u'disabled':
- 0,
- u'errors':
- 0,
- u'time':
- u'*',
- u'timestamp':
- u'*',
- u'name':
- u'AllTests',
- u'testsuites': [{
- u'name':
- u'PropertyOne',
- u'tests':
- 1,
- u'failures':
- 0,
- u'disabled':
- 0,
- u'errors':
- 0,
- u'time':
- u'*',
- u'timestamp':
- u'*',
- u'testsuite': [{
- u'name': u'TestSomeProperties',
- u'status': u'RUN',
- u'result': u'COMPLETED',
- u'time': u'*',
- u'timestamp': u'*',
- u'classname': u'PropertyOne',
- u'SetUpProp': u'1',
- u'TestSomeProperty': u'1',
- u'TearDownProp': u'1',
+ 'tests': 1,
+ 'failures': 0,
+ 'disabled': 0,
+ 'errors': 0,
+ 'time': '*',
+ 'timestamp': '*',
+ 'name': 'AllTests',
+ 'testsuites': [{
+ 'name': 'PropertyOne',
+ 'tests': 1,
+ 'failures': 0,
+ 'disabled': 0,
+ 'errors': 0,
+ 'time': '*',
+ 'timestamp': '*',
+ 'testsuite': [{
+ 'name': 'TestSomeProperties',
+ 'file': 'gtest_xml_outfile1_test_.cc',
+ 'line': 41,
+ 'status': 'RUN',
+ 'result': 'COMPLETED',
+ 'time': '*',
+ 'timestamp': '*',
+ 'classname': 'PropertyOne',
+ 'SetUpProp': '1',
+ 'TestSomeProperty': '1',
+ 'TearDownProp': '1',
}],
}],
}
EXPECTED_2 = {
- u'tests':
- 1,
- u'failures':
- 0,
- u'disabled':
- 0,
- u'errors':
- 0,
- u'time':
- u'*',
- u'timestamp':
- u'*',
- u'name':
- u'AllTests',
- u'testsuites': [{
- u'name':
- u'PropertyTwo',
- u'tests':
- 1,
- u'failures':
- 0,
- u'disabled':
- 0,
- u'errors':
- 0,
- u'time':
- u'*',
- u'timestamp':
- u'*',
- u'testsuite': [{
- u'name': u'TestSomeProperties',
- u'status': u'RUN',
- u'result': u'COMPLETED',
- u'timestamp': u'*',
- u'time': u'*',
- u'classname': u'PropertyTwo',
- u'SetUpProp': u'2',
- u'TestSomeProperty': u'2',
- u'TearDownProp': u'2',
+ 'tests': 1,
+ 'failures': 0,
+ 'disabled': 0,
+ 'errors': 0,
+ 'time': '*',
+ 'timestamp': '*',
+ 'name': 'AllTests',
+ 'testsuites': [{
+ 'name': 'PropertyTwo',
+ 'tests': 1,
+ 'failures': 0,
+ 'disabled': 0,
+ 'errors': 0,
+ 'time': '*',
+ 'timestamp': '*',
+ 'testsuite': [{
+ 'name': 'TestInt64ConvertibleProperties',
+ 'file': 'gtest_xml_outfile2_test_.cc',
+ 'line': 43,
+ 'status': 'RUN',
+ 'result': 'COMPLETED',
+ 'timestamp': '*',
+ 'time': '*',
+ 'classname': 'PropertyTwo',
+ 'SetUpProp': '2',
+ 'TestFloatProperty': '3.25',
+ 'TestDoubleProperty': '4.75',
+ 'TestSizetProperty': '5',
+ 'TestBoolProperty': 'true',
+ 'TestCharProperty': 'A',
+ 'TestInt16Property': '6',
+ 'TestInt32Property': '7',
+ 'TestInt64Property': '8',
+ 'TestEnumProperty': '9',
+ 'TestAtomicIntProperty': '10',
+ 'TearDownProp': '2',
}],
}],
}
@@ -135,8 +120,9 @@ class GTestJsonOutFilesTest(gtest_test_utils.TestCase):
# We want the trailing '/' that the last "" provides in os.path.join, for
# telling Google Test to create an output directory instead of a single file
# for xml output.
- self.output_dir_ = os.path.join(gtest_test_utils.GetTempDir(),
- GTEST_OUTPUT_SUBDIR, '')
+ self.output_dir_ = os.path.join(
+ gtest_test_utils.GetTempDir(), GTEST_OUTPUT_SUBDIR, ''
+ )
self.DeleteFilesAndDir()
def tearDown(self):
@@ -165,17 +151,20 @@ class GTestJsonOutFilesTest(gtest_test_utils.TestCase):
def _TestOutFile(self, test_name, expected):
gtest_prog_path = gtest_test_utils.GetTestExecutablePath(test_name)
command = [gtest_prog_path, '--gtest_output=json:%s' % self.output_dir_]
- p = gtest_test_utils.Subprocess(command,
- working_dir=gtest_test_utils.GetTempDir())
- self.assert_(p.exited)
- self.assertEquals(0, p.exit_code)
+ p = gtest_test_utils.Subprocess(
+ command, working_dir=gtest_test_utils.GetTempDir()
+ )
+ self.assertTrue(p.exited)
+ self.assertEqual(0, p.exit_code)
output_file_name1 = test_name + '.json'
output_file1 = os.path.join(self.output_dir_, output_file_name1)
output_file_name2 = 'lt-' + output_file_name1
output_file2 = os.path.join(self.output_dir_, output_file_name2)
- self.assert_(os.path.isfile(output_file1) or os.path.isfile(output_file2),
- output_file1)
+ self.assertTrue(
+ os.path.isfile(output_file1) or os.path.isfile(output_file2),
+ output_file1,
+ )
if os.path.isfile(output_file1):
with open(output_file1) as f:
diff --git a/googletest/test/googletest-json-output-unittest.py b/googletest/test/googletest-json-output-unittest.py
index 41c85651..cb976945 100644
--- a/googletest/test/googletest-json-output-unittest.py
+++ b/googletest/test/googletest-json-output-unittest.py
@@ -37,8 +37,8 @@ import os
import re
import sys
-import gtest_json_test_utils
-import gtest_test_utils
+from googletest.test import gtest_json_test_utils
+from googletest.test import gtest_test_utils
GTEST_FILTER_FLAG = '--gtest_filter'
GTEST_LIST_TESTS_FLAG = '--gtest_list_tests'
@@ -54,628 +54,573 @@ SUPPORTS_STACK_TRACES = NO_STACKTRACE_SUPPORT_FLAG not in sys.argv
if SUPPORTS_STACK_TRACES:
STACK_TRACE_TEMPLATE = '\nStack trace:\n*'
else:
- STACK_TRACE_TEMPLATE = ''
+ STACK_TRACE_TEMPLATE = '\n'
EXPECTED_NON_EMPTY = {
- u'tests':
- 26,
- u'failures':
- 5,
- u'disabled':
- 2,
- u'errors':
- 0,
- u'timestamp':
- u'*',
- u'time':
- u'*',
- u'ad_hoc_property':
- u'42',
- u'name':
- u'AllTests',
- u'testsuites': [{
- u'name':
- u'SuccessfulTest',
- u'tests':
- 1,
- u'failures':
- 0,
- u'disabled':
- 0,
- u'errors':
- 0,
- u'time':
- u'*',
- u'timestamp':
- u'*',
- u'testsuite': [{
- u'name': u'Succeeds',
- u'status': u'RUN',
- u'result': u'COMPLETED',
- u'time': u'*',
- u'timestamp': u'*',
- u'classname': u'SuccessfulTest'
- }]
- }, {
- u'name':
- u'FailedTest',
- u'tests':
- 1,
- u'failures':
- 1,
- u'disabled':
- 0,
- u'errors':
- 0,
- u'time':
- u'*',
- u'timestamp':
- u'*',
- u'testsuite': [{
- u'name':
- u'Fails',
- u'status':
- u'RUN',
- u'result':
- u'COMPLETED',
- u'time':
- u'*',
- u'timestamp':
- u'*',
- u'classname':
- u'FailedTest',
- u'failures': [{
- u'failure': u'gtest_xml_output_unittest_.cc:*\n'
- u'Expected equality of these values:\n'
- u' 1\n 2' + STACK_TRACE_TEMPLATE,
- u'type': u''
- }]
- }]
- }, {
- u'name':
- u'DisabledTest',
- u'tests':
- 1,
- u'failures':
- 0,
- u'disabled':
- 1,
- u'errors':
- 0,
- u'time':
- u'*',
- u'timestamp':
- u'*',
- u'testsuite': [{
- u'name': u'DISABLED_test_not_run',
- u'status': u'NOTRUN',
- u'result': u'SUPPRESSED',
- u'time': u'*',
- u'timestamp': u'*',
- u'classname': u'DisabledTest'
- }]
- }, {
- u'name':
- u'SkippedTest',
- u'tests':
- 3,
- u'failures':
- 1,
- u'disabled':
- 0,
- u'errors':
- 0,
- u'time':
- u'*',
- u'timestamp':
- u'*',
- u'testsuite': [{
- u'name': u'Skipped',
- u'status': u'RUN',
- u'result': u'SKIPPED',
- u'time': u'*',
- u'timestamp': u'*',
- u'classname': u'SkippedTest'
- }, {
- u'name': u'SkippedWithMessage',
- u'status': u'RUN',
- u'result': u'SKIPPED',
- u'time': u'*',
- u'timestamp': u'*',
- u'classname': u'SkippedTest'
- }, {
- u'name':
- u'SkippedAfterFailure',
- u'status':
- u'RUN',
- u'result':
- u'COMPLETED',
- u'time':
- u'*',
- u'timestamp':
- u'*',
- u'classname':
- u'SkippedTest',
- u'failures': [{
- u'failure': u'gtest_xml_output_unittest_.cc:*\n'
- u'Expected equality of these values:\n'
- u' 1\n 2' + STACK_TRACE_TEMPLATE,
- u'type': u''
- }]
- }]
- }, {
- u'name':
- u'MixedResultTest',
- u'tests':
- 3,
- u'failures':
- 1,
- u'disabled':
- 1,
- u'errors':
- 0,
- u'time':
- u'*',
- u'timestamp':
- u'*',
- u'testsuite': [{
- u'name': u'Succeeds',
- u'status': u'RUN',
- u'result': u'COMPLETED',
- u'time': u'*',
- u'timestamp': u'*',
- u'classname': u'MixedResultTest'
- }, {
- u'name':
- u'Fails',
- u'status':
- u'RUN',
- u'result':
- u'COMPLETED',
- u'time':
- u'*',
- u'timestamp':
- u'*',
- u'classname':
- u'MixedResultTest',
- u'failures': [{
- u'failure': u'gtest_xml_output_unittest_.cc:*\n'
- u'Expected equality of these values:\n'
- u' 1\n 2' + STACK_TRACE_TEMPLATE,
- u'type': u''
- }, {
- u'failure': u'gtest_xml_output_unittest_.cc:*\n'
- u'Expected equality of these values:\n'
- u' 2\n 3' + STACK_TRACE_TEMPLATE,
- u'type': u''
- }]
- }, {
- u'name': u'DISABLED_test',
- u'status': u'NOTRUN',
- u'result': u'SUPPRESSED',
- u'time': u'*',
- u'timestamp': u'*',
- u'classname': u'MixedResultTest'
- }]
- }, {
- u'name':
- u'XmlQuotingTest',
- u'tests':
- 1,
- u'failures':
- 1,
- u'disabled':
- 0,
- u'errors':
- 0,
- u'time':
- u'*',
- u'timestamp':
- u'*',
- u'testsuite': [{
- u'name':
- u'OutputsCData',
- u'status':
- u'RUN',
- u'result':
- u'COMPLETED',
- u'time':
- u'*',
- u'timestamp':
- u'*',
- u'classname':
- u'XmlQuotingTest',
- u'failures': [{
- u'failure': u'gtest_xml_output_unittest_.cc:*\n'
- u'Failed\nXML output: <?xml encoding="utf-8">'
- u'<top><![CDATA[cdata text]]></top>' +
- STACK_TRACE_TEMPLATE,
- u'type': u''
- }]
- }]
- }, {
- u'name':
- u'InvalidCharactersTest',
- u'tests':
- 1,
- u'failures':
- 1,
- u'disabled':
- 0,
- u'errors':
- 0,
- u'time':
- u'*',
- u'timestamp':
- u'*',
- u'testsuite': [{
- u'name':
- u'InvalidCharactersInMessage',
- u'status':
- u'RUN',
- u'result':
- u'COMPLETED',
- u'time':
- u'*',
- u'timestamp':
- u'*',
- u'classname':
- u'InvalidCharactersTest',
- u'failures': [{
- u'failure': u'gtest_xml_output_unittest_.cc:*\n'
- u'Failed\nInvalid characters in brackets'
- u' [\x01\x02]' + STACK_TRACE_TEMPLATE,
- u'type': u''
- }]
- }]
- }, {
- u'name':
- u'PropertyRecordingTest',
- u'tests':
- 4,
- u'failures':
- 0,
- u'disabled':
- 0,
- u'errors':
- 0,
- u'time':
- u'*',
- u'timestamp':
- u'*',
- u'SetUpTestSuite':
- u'yes',
- u'TearDownTestSuite':
- u'aye',
- u'testsuite': [{
- u'name': u'OneProperty',
- u'status': u'RUN',
- u'result': u'COMPLETED',
- u'time': u'*',
- u'timestamp': u'*',
- u'classname': u'PropertyRecordingTest',
- u'key_1': u'1'
- }, {
- u'name': u'IntValuedProperty',
- u'status': u'RUN',
- u'result': u'COMPLETED',
- u'time': u'*',
- u'timestamp': u'*',
- u'classname': u'PropertyRecordingTest',
- u'key_int': u'1'
- }, {
- u'name': u'ThreeProperties',
- u'status': u'RUN',
- u'result': u'COMPLETED',
- u'time': u'*',
- u'timestamp': u'*',
- u'classname': u'PropertyRecordingTest',
- u'key_1': u'1',
- u'key_2': u'2',
- u'key_3': u'3'
- }, {
- u'name': u'TwoValuesForOneKeyUsesLastValue',
- u'status': u'RUN',
- u'result': u'COMPLETED',
- u'time': u'*',
- u'timestamp': u'*',
- u'classname': u'PropertyRecordingTest',
- u'key_1': u'2'
- }]
- }, {
- u'name':
- u'NoFixtureTest',
- u'tests':
- 3,
- u'failures':
- 0,
- u'disabled':
- 0,
- u'errors':
- 0,
- u'time':
- u'*',
- u'timestamp':
- u'*',
- u'testsuite': [{
- u'name': u'RecordProperty',
- u'status': u'RUN',
- u'result': u'COMPLETED',
- u'time': u'*',
- u'timestamp': u'*',
- u'classname': u'NoFixtureTest',
- u'key': u'1'
- }, {
- u'name': u'ExternalUtilityThatCallsRecordIntValuedProperty',
- u'status': u'RUN',
- u'result': u'COMPLETED',
- u'time': u'*',
- u'timestamp': u'*',
- u'classname': u'NoFixtureTest',
- u'key_for_utility_int': u'1'
- }, {
- u'name': u'ExternalUtilityThatCallsRecordStringValuedProperty',
- u'status': u'RUN',
- u'result': u'COMPLETED',
- u'time': u'*',
- u'timestamp': u'*',
- u'classname': u'NoFixtureTest',
- u'key_for_utility_string': u'1'
- }]
- }, {
- u'name':
- u'TypedTest/0',
- u'tests':
- 1,
- u'failures':
- 0,
- u'disabled':
- 0,
- u'errors':
- 0,
- u'time':
- u'*',
- u'timestamp':
- u'*',
- u'testsuite': [{
- u'name': u'HasTypeParamAttribute',
- u'type_param': u'int',
- u'status': u'RUN',
- u'result': u'COMPLETED',
- u'time': u'*',
- u'timestamp': u'*',
- u'classname': u'TypedTest/0'
- }]
- }, {
- u'name':
- u'TypedTest/1',
- u'tests':
- 1,
- u'failures':
- 0,
- u'disabled':
- 0,
- u'errors':
- 0,
- u'time':
- u'*',
- u'timestamp':
- u'*',
- u'testsuite': [{
- u'name': u'HasTypeParamAttribute',
- u'type_param': u'long',
- u'status': u'RUN',
- u'result': u'COMPLETED',
- u'time': u'*',
- u'timestamp': u'*',
- u'classname': u'TypedTest/1'
- }]
- }, {
- u'name':
- u'Single/TypeParameterizedTestSuite/0',
- u'tests':
- 1,
- u'failures':
- 0,
- u'disabled':
- 0,
- u'errors':
- 0,
- u'time':
- u'*',
- u'timestamp':
- u'*',
- u'testsuite': [{
- u'name': u'HasTypeParamAttribute',
- u'type_param': u'int',
- u'status': u'RUN',
- u'result': u'COMPLETED',
- u'time': u'*',
- u'timestamp': u'*',
- u'classname': u'Single/TypeParameterizedTestSuite/0'
- }]
- }, {
- u'name':
- u'Single/TypeParameterizedTestSuite/1',
- u'tests':
- 1,
- u'failures':
- 0,
- u'disabled':
- 0,
- u'errors':
- 0,
- u'time':
- u'*',
- u'timestamp':
- u'*',
- u'testsuite': [{
- u'name': u'HasTypeParamAttribute',
- u'type_param': u'long',
- u'status': u'RUN',
- u'result': u'COMPLETED',
- u'time': u'*',
- u'timestamp': u'*',
- u'classname': u'Single/TypeParameterizedTestSuite/1'
- }]
- }, {
- u'name':
- u'Single/ValueParamTest',
- u'tests':
- 4,
- u'failures':
- 0,
- u'disabled':
- 0,
- u'errors':
- 0,
- u'time':
- u'*',
- u'timestamp':
- u'*',
- u'testsuite': [{
- u'name': u'HasValueParamAttribute/0',
- u'value_param': u'33',
- u'status': u'RUN',
- u'result': u'COMPLETED',
- u'time': u'*',
- u'timestamp': u'*',
- u'classname': u'Single/ValueParamTest'
- }, {
- u'name': u'HasValueParamAttribute/1',
- u'value_param': u'42',
- u'status': u'RUN',
- u'result': u'COMPLETED',
- u'time': u'*',
- u'timestamp': u'*',
- u'classname': u'Single/ValueParamTest'
- }, {
- u'name': u'AnotherTestThatHasValueParamAttribute/0',
- u'value_param': u'33',
- u'status': u'RUN',
- u'result': u'COMPLETED',
- u'time': u'*',
- u'timestamp': u'*',
- u'classname': u'Single/ValueParamTest'
- }, {
- u'name': u'AnotherTestThatHasValueParamAttribute/1',
- u'value_param': u'42',
- u'status': u'RUN',
- u'result': u'COMPLETED',
- u'time': u'*',
- u'timestamp': u'*',
- u'classname': u'Single/ValueParamTest'
- }]
- }]
+ 'tests': 26,
+ 'failures': 5,
+ 'disabled': 2,
+ 'errors': 0,
+ 'timestamp': '*',
+ 'time': '*',
+ 'ad_hoc_property': '42',
+ 'name': 'AllTests',
+ 'testsuites': [
+ {
+ 'name': 'SuccessfulTest',
+ 'tests': 1,
+ 'failures': 0,
+ 'disabled': 0,
+ 'errors': 0,
+ 'time': '*',
+ 'timestamp': '*',
+ 'testsuite': [{
+ 'name': 'Succeeds',
+ 'file': 'gtest_xml_output_unittest_.cc',
+ 'line': 53,
+ 'status': 'RUN',
+ 'result': 'COMPLETED',
+ 'time': '*',
+ 'timestamp': '*',
+ 'classname': 'SuccessfulTest',
+ }],
+ },
+ {
+ 'name': 'FailedTest',
+ 'tests': 1,
+ 'failures': 1,
+ 'disabled': 0,
+ 'errors': 0,
+ 'time': '*',
+ 'timestamp': '*',
+ 'testsuite': [{
+ 'name': 'Fails',
+ 'file': 'gtest_xml_output_unittest_.cc',
+ 'line': 61,
+ 'status': 'RUN',
+ 'result': 'COMPLETED',
+ 'time': '*',
+ 'timestamp': '*',
+ 'classname': 'FailedTest',
+ 'failures': [{
+ 'failure': (
+ 'gtest_xml_output_unittest_.cc:*\n'
+ 'Expected equality of these values:\n'
+ ' 1\n 2'
+ + STACK_TRACE_TEMPLATE
+ ),
+ 'type': '',
+ }],
+ }],
+ },
+ {
+ 'name': 'DisabledTest',
+ 'tests': 1,
+ 'failures': 0,
+ 'disabled': 1,
+ 'errors': 0,
+ 'time': '*',
+ 'timestamp': '*',
+ 'testsuite': [{
+ 'name': 'DISABLED_test_not_run',
+ 'file': 'gtest_xml_output_unittest_.cc',
+ 'line': 68,
+ 'status': 'NOTRUN',
+ 'result': 'SUPPRESSED',
+ 'time': '*',
+ 'timestamp': '*',
+ 'classname': 'DisabledTest',
+ }],
+ },
+ {
+ 'name': 'SkippedTest',
+ 'tests': 3,
+ 'failures': 1,
+ 'disabled': 0,
+ 'errors': 0,
+ 'time': '*',
+ 'timestamp': '*',
+ 'testsuite': [
+ {
+ 'name': 'Skipped',
+ 'file': 'gtest_xml_output_unittest_.cc',
+ 'line': 75,
+ 'status': 'RUN',
+ 'result': 'SKIPPED',
+ 'time': '*',
+ 'timestamp': '*',
+ 'classname': 'SkippedTest',
+ },
+ {
+ 'name': 'SkippedWithMessage',
+ 'file': 'gtest_xml_output_unittest_.cc',
+ 'line': 79,
+ 'status': 'RUN',
+ 'result': 'SKIPPED',
+ 'time': '*',
+ 'timestamp': '*',
+ 'classname': 'SkippedTest',
+ },
+ {
+ 'name': 'SkippedAfterFailure',
+ 'file': 'gtest_xml_output_unittest_.cc',
+ 'line': 83,
+ 'status': 'RUN',
+ 'result': 'COMPLETED',
+ 'time': '*',
+ 'timestamp': '*',
+ 'classname': 'SkippedTest',
+ 'failures': [{
+ 'failure': (
+ 'gtest_xml_output_unittest_.cc:*\n'
+ 'Expected equality of these values:\n'
+ ' 1\n 2'
+ + STACK_TRACE_TEMPLATE
+ ),
+ 'type': '',
+ }],
+ },
+ ],
+ },
+ {
+ 'name': 'MixedResultTest',
+ 'tests': 3,
+ 'failures': 1,
+ 'disabled': 1,
+ 'errors': 0,
+ 'time': '*',
+ 'timestamp': '*',
+ 'testsuite': [
+ {
+ 'name': 'Succeeds',
+ 'file': 'gtest_xml_output_unittest_.cc',
+ 'line': 88,
+ 'status': 'RUN',
+ 'result': 'COMPLETED',
+ 'time': '*',
+ 'timestamp': '*',
+ 'classname': 'MixedResultTest',
+ },
+ {
+ 'name': 'Fails',
+ 'file': 'gtest_xml_output_unittest_.cc',
+ 'line': 93,
+ 'status': 'RUN',
+ 'result': 'COMPLETED',
+ 'time': '*',
+ 'timestamp': '*',
+ 'classname': 'MixedResultTest',
+ 'failures': [
+ {
+ 'failure': (
+ 'gtest_xml_output_unittest_.cc:*\n'
+ 'Expected equality of these values:\n'
+ ' 1\n 2'
+ + STACK_TRACE_TEMPLATE
+ ),
+ 'type': '',
+ },
+ {
+ 'failure': (
+ 'gtest_xml_output_unittest_.cc:*\n'
+ 'Expected equality of these values:\n'
+ ' 2\n 3'
+ + STACK_TRACE_TEMPLATE
+ ),
+ 'type': '',
+ },
+ ],
+ },
+ {
+ 'name': 'DISABLED_test',
+ 'file': 'gtest_xml_output_unittest_.cc',
+ 'line': 98,
+ 'status': 'NOTRUN',
+ 'result': 'SUPPRESSED',
+ 'time': '*',
+ 'timestamp': '*',
+ 'classname': 'MixedResultTest',
+ },
+ ],
+ },
+ {
+ 'name': 'XmlQuotingTest',
+ 'tests': 1,
+ 'failures': 1,
+ 'disabled': 0,
+ 'errors': 0,
+ 'time': '*',
+ 'timestamp': '*',
+ 'testsuite': [{
+ 'name': 'OutputsCData',
+ 'file': 'gtest_xml_output_unittest_.cc',
+ 'line': 102,
+ 'status': 'RUN',
+ 'result': 'COMPLETED',
+ 'time': '*',
+ 'timestamp': '*',
+ 'classname': 'XmlQuotingTest',
+ 'failures': [{
+ 'failure': (
+ 'gtest_xml_output_unittest_.cc:*\n'
+ 'Failed\nXML output: <?xml encoding="utf-8">'
+ '<top><![CDATA[cdata text]]></top>'
+ + STACK_TRACE_TEMPLATE
+ ),
+ 'type': '',
+ }],
+ }],
+ },
+ {
+ 'name': 'InvalidCharactersTest',
+ 'tests': 1,
+ 'failures': 1,
+ 'disabled': 0,
+ 'errors': 0,
+ 'time': '*',
+ 'timestamp': '*',
+ 'testsuite': [{
+ 'name': 'InvalidCharactersInMessage',
+ 'file': 'gtest_xml_output_unittest_.cc',
+ 'line': 109,
+ 'status': 'RUN',
+ 'result': 'COMPLETED',
+ 'time': '*',
+ 'timestamp': '*',
+ 'classname': 'InvalidCharactersTest',
+ 'failures': [{
+ 'failure': (
+ 'gtest_xml_output_unittest_.cc:*\n'
+ 'Failed\nInvalid characters in brackets'
+ ' [\x01\x02]'
+ + STACK_TRACE_TEMPLATE
+ ),
+ 'type': '',
+ }],
+ }],
+ },
+ {
+ 'name': 'PropertyRecordingTest',
+ 'tests': 4,
+ 'failures': 0,
+ 'disabled': 0,
+ 'errors': 0,
+ 'time': '*',
+ 'timestamp': '*',
+ 'SetUpTestSuite': 'yes',
+ 'TearDownTestSuite': 'aye',
+ 'testsuite': [
+ {
+ 'name': 'OneProperty',
+ 'file': 'gtest_xml_output_unittest_.cc',
+ 'line': 121,
+ 'status': 'RUN',
+ 'result': 'COMPLETED',
+ 'time': '*',
+ 'timestamp': '*',
+ 'classname': 'PropertyRecordingTest',
+ 'key_1': '1',
+ },
+ {
+ 'name': 'IntValuedProperty',
+ 'file': 'gtest_xml_output_unittest_.cc',
+ 'line': 125,
+ 'status': 'RUN',
+ 'result': 'COMPLETED',
+ 'time': '*',
+ 'timestamp': '*',
+ 'classname': 'PropertyRecordingTest',
+ 'key_int': '1',
+ },
+ {
+ 'name': 'ThreeProperties',
+ 'file': 'gtest_xml_output_unittest_.cc',
+ 'line': 129,
+ 'status': 'RUN',
+ 'result': 'COMPLETED',
+ 'time': '*',
+ 'timestamp': '*',
+ 'classname': 'PropertyRecordingTest',
+ 'key_1': '1',
+ 'key_2': '2',
+ 'key_3': '3',
+ },
+ {
+ 'name': 'TwoValuesForOneKeyUsesLastValue',
+ 'file': 'gtest_xml_output_unittest_.cc',
+ 'line': 135,
+ 'status': 'RUN',
+ 'result': 'COMPLETED',
+ 'time': '*',
+ 'timestamp': '*',
+ 'classname': 'PropertyRecordingTest',
+ 'key_1': '2',
+ },
+ ],
+ },
+ {
+ 'name': 'NoFixtureTest',
+ 'tests': 3,
+ 'failures': 0,
+ 'disabled': 0,
+ 'errors': 0,
+ 'time': '*',
+ 'timestamp': '*',
+ 'testsuite': [
+ {
+ 'name': 'RecordProperty',
+ 'file': 'gtest_xml_output_unittest_.cc',
+ 'line': 140,
+ 'status': 'RUN',
+ 'result': 'COMPLETED',
+ 'time': '*',
+ 'timestamp': '*',
+ 'classname': 'NoFixtureTest',
+ 'key': '1',
+ },
+ {
+ 'name': 'ExternalUtilityThatCallsRecordIntValuedProperty',
+ 'file': 'gtest_xml_output_unittest_.cc',
+ 'line': 153,
+ 'status': 'RUN',
+ 'result': 'COMPLETED',
+ 'time': '*',
+ 'timestamp': '*',
+ 'classname': 'NoFixtureTest',
+ 'key_for_utility_int': '1',
+ },
+ {
+ 'name': (
+ 'ExternalUtilityThatCallsRecordStringValuedProperty'
+ ),
+ 'file': 'gtest_xml_output_unittest_.cc',
+ 'line': 157,
+ 'status': 'RUN',
+ 'result': 'COMPLETED',
+ 'time': '*',
+ 'timestamp': '*',
+ 'classname': 'NoFixtureTest',
+ 'key_for_utility_string': '1',
+ },
+ ],
+ },
+ {
+ 'name': 'TypedTest/0',
+ 'tests': 1,
+ 'failures': 0,
+ 'disabled': 0,
+ 'errors': 0,
+ 'time': '*',
+ 'timestamp': '*',
+ 'testsuite': [{
+ 'name': 'HasTypeParamAttribute',
+ 'type_param': 'int',
+ 'file': 'gtest_xml_output_unittest_.cc',
+ 'line': 173,
+ 'status': 'RUN',
+ 'result': 'COMPLETED',
+ 'time': '*',
+ 'timestamp': '*',
+ 'classname': 'TypedTest/0',
+ }],
+ },
+ {
+ 'name': 'TypedTest/1',
+ 'tests': 1,
+ 'failures': 0,
+ 'disabled': 0,
+ 'errors': 0,
+ 'time': '*',
+ 'timestamp': '*',
+ 'testsuite': [{
+ 'name': 'HasTypeParamAttribute',
+ 'type_param': 'long',
+ 'file': 'gtest_xml_output_unittest_.cc',
+ 'line': 173,
+ 'status': 'RUN',
+ 'result': 'COMPLETED',
+ 'time': '*',
+ 'timestamp': '*',
+ 'classname': 'TypedTest/1',
+ }],
+ },
+ {
+ 'name': 'Single/TypeParameterizedTestSuite/0',
+ 'tests': 1,
+ 'failures': 0,
+ 'disabled': 0,
+ 'errors': 0,
+ 'time': '*',
+ 'timestamp': '*',
+ 'testsuite': [{
+ 'name': 'HasTypeParamAttribute',
+ 'type_param': 'int',
+ 'file': 'gtest_xml_output_unittest_.cc',
+ 'line': 180,
+ 'status': 'RUN',
+ 'result': 'COMPLETED',
+ 'time': '*',
+ 'timestamp': '*',
+ 'classname': 'Single/TypeParameterizedTestSuite/0',
+ }],
+ },
+ {
+ 'name': 'Single/TypeParameterizedTestSuite/1',
+ 'tests': 1,
+ 'failures': 0,
+ 'disabled': 0,
+ 'errors': 0,
+ 'time': '*',
+ 'timestamp': '*',
+ 'testsuite': [{
+ 'name': 'HasTypeParamAttribute',
+ 'type_param': 'long',
+ 'file': 'gtest_xml_output_unittest_.cc',
+ 'line': 180,
+ 'status': 'RUN',
+ 'result': 'COMPLETED',
+ 'time': '*',
+ 'timestamp': '*',
+ 'classname': 'Single/TypeParameterizedTestSuite/1',
+ }],
+ },
+ {
+ 'name': 'Single/ValueParamTest',
+ 'tests': 4,
+ 'failures': 0,
+ 'disabled': 0,
+ 'errors': 0,
+ 'time': '*',
+ 'timestamp': '*',
+ 'testsuite': [
+ {
+ 'name': 'HasValueParamAttribute/0',
+ 'value_param': '33',
+ 'file': 'gtest_xml_output_unittest_.cc',
+ 'line': 164,
+ 'status': 'RUN',
+ 'result': 'COMPLETED',
+ 'time': '*',
+ 'timestamp': '*',
+ 'classname': 'Single/ValueParamTest',
+ },
+ {
+ 'name': 'HasValueParamAttribute/1',
+ 'value_param': '42',
+ 'file': 'gtest_xml_output_unittest_.cc',
+ 'line': 164,
+ 'status': 'RUN',
+ 'result': 'COMPLETED',
+ 'time': '*',
+ 'timestamp': '*',
+ 'classname': 'Single/ValueParamTest',
+ },
+ {
+ 'name': 'AnotherTestThatHasValueParamAttribute/0',
+ 'value_param': '33',
+ 'file': 'gtest_xml_output_unittest_.cc',
+ 'line': 165,
+ 'status': 'RUN',
+ 'result': 'COMPLETED',
+ 'time': '*',
+ 'timestamp': '*',
+ 'classname': 'Single/ValueParamTest',
+ },
+ {
+ 'name': 'AnotherTestThatHasValueParamAttribute/1',
+ 'value_param': '42',
+ 'file': 'gtest_xml_output_unittest_.cc',
+ 'line': 165,
+ 'status': 'RUN',
+ 'result': 'COMPLETED',
+ 'time': '*',
+ 'timestamp': '*',
+ 'classname': 'Single/ValueParamTest',
+ },
+ ],
+ },
+ ],
}
EXPECTED_FILTERED = {
- u'tests':
- 1,
- u'failures':
- 0,
- u'disabled':
- 0,
- u'errors':
- 0,
- u'time':
- u'*',
- u'timestamp':
- u'*',
- u'name':
- u'AllTests',
- u'ad_hoc_property':
- u'42',
- u'testsuites': [{
- u'name':
- u'SuccessfulTest',
- u'tests':
- 1,
- u'failures':
- 0,
- u'disabled':
- 0,
- u'errors':
- 0,
- u'time':
- u'*',
- u'timestamp':
- u'*',
- u'testsuite': [{
- u'name': u'Succeeds',
- u'status': u'RUN',
- u'result': u'COMPLETED',
- u'time': u'*',
- u'timestamp': u'*',
- u'classname': u'SuccessfulTest',
- }]
+ 'tests': 1,
+ 'failures': 0,
+ 'disabled': 0,
+ 'errors': 0,
+ 'time': '*',
+ 'timestamp': '*',
+ 'name': 'AllTests',
+ 'ad_hoc_property': '42',
+ 'testsuites': [{
+ 'name': 'SuccessfulTest',
+ 'tests': 1,
+ 'failures': 0,
+ 'disabled': 0,
+ 'errors': 0,
+ 'time': '*',
+ 'timestamp': '*',
+ 'testsuite': [{
+ 'name': 'Succeeds',
+ 'file': 'gtest_xml_output_unittest_.cc',
+ 'line': 53,
+ 'status': 'RUN',
+ 'result': 'COMPLETED',
+ 'time': '*',
+ 'timestamp': '*',
+ 'classname': 'SuccessfulTest',
+ }],
}],
}
EXPECTED_NO_TEST = {
- u'tests':
- 0,
- u'failures':
- 0,
- u'disabled':
- 0,
- u'errors':
- 0,
- u'time':
- u'*',
- u'timestamp':
- u'*',
- u'name':
- u'AllTests',
- u'testsuites': [{
- u'name':
- u'NonTestSuiteFailure',
- u'tests':
- 1,
- u'failures':
- 1,
- u'disabled':
- 0,
- u'skipped':
- 0,
- u'errors':
- 0,
- u'time':
- u'*',
- u'timestamp':
- u'*',
- u'testsuite': [{
- u'name':
- u'',
- u'status':
- u'RUN',
- u'result':
- u'COMPLETED',
- u'time':
- u'*',
- u'timestamp':
- u'*',
- u'classname':
- u'',
- u'failures': [{
- u'failure': u'gtest_no_test_unittest.cc:*\n'
- u'Expected equality of these values:\n'
- u' 1\n 2' + STACK_TRACE_TEMPLATE,
- u'type': u'',
- }]
- }]
+ 'tests': 0,
+ 'failures': 0,
+ 'disabled': 0,
+ 'errors': 0,
+ 'time': '*',
+ 'timestamp': '*',
+ 'name': 'AllTests',
+ 'testsuites': [{
+ 'name': 'NonTestSuiteFailure',
+ 'tests': 1,
+ 'failures': 1,
+ 'disabled': 0,
+ 'skipped': 0,
+ 'errors': 0,
+ 'time': '*',
+ 'timestamp': '*',
+ 'testsuite': [{
+ 'name': '',
+ 'status': 'RUN',
+ 'result': 'COMPLETED',
+ 'time': '*',
+ 'timestamp': '*',
+ 'classname': '',
+ 'failures': [{
+ 'failure': (
+ 'gtest_no_test_unittest.cc:*\n'
+ 'Expected equality of these values:\n'
+ ' 1\n 2'
+ + STACK_TRACE_TEMPLATE
+ ),
+ 'type': '',
+ }],
+ }],
}],
}
GTEST_PROGRAM_PATH = gtest_test_utils.GetTestExecutablePath(GTEST_PROGRAM_NAME)
-SUPPORTS_TYPED_TESTS = 'TypedTest' in gtest_test_utils.Subprocess(
- [GTEST_PROGRAM_PATH, GTEST_LIST_TESTS_FLAG], capture_stderr=False).output
+SUPPORTS_TYPED_TESTS = (
+ 'TypedTest'
+ in gtest_test_utils.Subprocess(
+ [GTEST_PROGRAM_PATH, GTEST_LIST_TESTS_FLAG], capture_stderr=False
+ ).output
+)
class GTestJsonOutputUnitTest(gtest_test_utils.TestCase):
- """Unit test for Google Test's JSON output functionality.
- """
+ """Unit test for Google Test's JSON output functionality."""
# This test currently breaks on platforms that do not support typed and
# type-parameterized tests, so we don't run it under them.
@@ -711,16 +656,23 @@ class GTestJsonOutputUnitTest(gtest_test_utils.TestCase):
match = re.match(r'(\d+)-(\d\d)-(\d\d)T(\d\d):(\d\d):(\d\d)', date_time_str)
self.assertTrue(
re.match,
- 'JSON datettime string %s has incorrect format' % date_time_str)
+ 'JSON datettime string %s has incorrect format' % date_time_str,
+ )
date_time_from_json = datetime.datetime(
- year=int(match.group(1)), month=int(match.group(2)),
- day=int(match.group(3)), hour=int(match.group(4)),
- minute=int(match.group(5)), second=int(match.group(6)))
+ year=int(match.group(1)),
+ month=int(match.group(2)),
+ day=int(match.group(3)),
+ hour=int(match.group(4)),
+ minute=int(match.group(5)),
+ second=int(match.group(6)),
+ )
time_delta = abs(datetime.datetime.now() - date_time_from_json)
# timestamp value should be near the current local time
- self.assertTrue(time_delta < datetime.timedelta(seconds=600),
- 'time_delta is %s' % time_delta)
+ self.assertTrue(
+ time_delta < datetime.timedelta(seconds=600),
+ 'time_delta is %s' % time_delta,
+ )
def testDefaultOutputFile(self):
"""Verifies the default output file name.
@@ -728,10 +680,12 @@ class GTestJsonOutputUnitTest(gtest_test_utils.TestCase):
Confirms that Google Test produces an JSON output file with the expected
default name if no name is explicitly specified.
"""
- output_file = os.path.join(gtest_test_utils.GetTempDir(),
- GTEST_DEFAULT_OUTPUT_FILE)
+ output_file = os.path.join(
+ gtest_test_utils.GetTempDir(), GTEST_DEFAULT_OUTPUT_FILE
+ )
gtest_prog_path = gtest_test_utils.GetTestExecutablePath(
- 'gtest_no_test_unittest')
+ 'gtest_no_test_unittest'
+ )
try:
os.remove(output_file)
except OSError:
@@ -741,10 +695,11 @@ class GTestJsonOutputUnitTest(gtest_test_utils.TestCase):
p = gtest_test_utils.Subprocess(
[gtest_prog_path, '%s=json' % GTEST_OUTPUT_FLAG],
- working_dir=gtest_test_utils.GetTempDir())
- self.assert_(p.exited)
- self.assertEquals(0, p.exit_code)
- self.assert_(os.path.isfile(output_file))
+ working_dir=gtest_test_utils.GetTempDir(),
+ )
+ self.assertTrue(p.exited)
+ self.assertEqual(0, p.exit_code)
+ self.assertTrue(os.path.isfile(output_file))
def testSuppressedJsonOutput(self):
"""Verifies that no JSON output is generated.
@@ -753,28 +708,34 @@ class GTestJsonOutputUnitTest(gtest_test_utils.TestCase):
shut down before RUN_ALL_TESTS is invoked.
"""
- json_path = os.path.join(gtest_test_utils.GetTempDir(),
- GTEST_PROGRAM_NAME + 'out.json')
+ json_path = os.path.join(
+ gtest_test_utils.GetTempDir(), GTEST_PROGRAM_NAME + 'out.json'
+ )
if os.path.isfile(json_path):
os.remove(json_path)
- command = [GTEST_PROGRAM_PATH,
- '%s=json:%s' % (GTEST_OUTPUT_FLAG, json_path),
- '--shut_down_xml']
+ command = [
+ GTEST_PROGRAM_PATH,
+ '%s=json:%s' % (GTEST_OUTPUT_FLAG, json_path),
+ '--shut_down_xml',
+ ]
p = gtest_test_utils.Subprocess(command)
if p.terminated_by_signal:
# p.signal is available only if p.terminated_by_signal is True.
self.assertFalse(
p.terminated_by_signal,
- '%s was killed by signal %d' % (GTEST_PROGRAM_NAME, p.signal))
+ '%s was killed by signal %d' % (GTEST_PROGRAM_NAME, p.signal),
+ )
else:
- self.assert_(p.exited)
- self.assertEquals(1, p.exit_code,
- "'%s' exited with code %s, which doesn't match "
- 'the expected exit code %s.'
- % (command, p.exit_code, 1))
+ self.assertTrue(p.exited)
+ self.assertEqual(
+ 1,
+ p.exit_code,
+ "'%s' exited with code %s, which doesn't match "
+ 'the expected exit code %s.' % (command, p.exit_code, 1),
+ )
- self.assert_(not os.path.isfile(json_path))
+ self.assertTrue(not os.path.isfile(json_path))
def testFilteredTestJsonOutput(self):
"""Verifies JSON output when a filter is applied.
@@ -783,8 +744,12 @@ class GTestJsonOutputUnitTest(gtest_test_utils.TestCase):
non-selected tests do not show up in the JSON output.
"""
- self._TestJsonOutput(GTEST_PROGRAM_NAME, EXPECTED_FILTERED, 0,
- extra_args=['%s=SuccessfulTest.*' % GTEST_FILTER_FLAG])
+ self._TestJsonOutput(
+ GTEST_PROGRAM_NAME,
+ EXPECTED_FILTERED,
+ 0,
+ extra_args=['%s=SuccessfulTest.*' % GTEST_FILTER_FLAG],
+ )
def _GetJsonOutput(self, gtest_prog_name, extra_args, expected_exit_code):
"""Returns the JSON output generated by running the program gtest_prog_name.
@@ -796,30 +761,36 @@ class GTestJsonOutputUnitTest(gtest_test_utils.TestCase):
extra_args: extra arguments to binary invocation.
expected_exit_code: program's exit code.
"""
- json_path = os.path.join(gtest_test_utils.GetTempDir(),
- gtest_prog_name + 'out.json')
+ json_path = os.path.join(
+ gtest_test_utils.GetTempDir(), gtest_prog_name + 'out.json'
+ )
gtest_prog_path = gtest_test_utils.GetTestExecutablePath(gtest_prog_name)
- command = (
- [gtest_prog_path, '%s=json:%s' % (GTEST_OUTPUT_FLAG, json_path)] +
- extra_args
- )
+ command = [
+ gtest_prog_path,
+ '%s=json:%s' % (GTEST_OUTPUT_FLAG, json_path),
+ ] + extra_args
p = gtest_test_utils.Subprocess(command)
if p.terminated_by_signal:
- self.assert_(False,
- '%s was killed by signal %d' % (gtest_prog_name, p.signal))
+ self.assertTrue(
+ False, '%s was killed by signal %d' % (gtest_prog_name, p.signal)
+ )
else:
- self.assert_(p.exited)
- self.assertEquals(expected_exit_code, p.exit_code,
- "'%s' exited with code %s, which doesn't match "
- 'the expected exit code %s.'
- % (command, p.exit_code, expected_exit_code))
+ self.assertTrue(p.exited)
+ self.assertEqual(
+ expected_exit_code,
+ p.exit_code,
+ "'%s' exited with code %s, which doesn't match "
+ 'the expected exit code %s.'
+ % (command, p.exit_code, expected_exit_code),
+ )
with open(json_path) as f:
actual = json.load(f)
return actual
- def _TestJsonOutput(self, gtest_prog_name, expected,
- expected_exit_code, extra_args=None):
+ def _TestJsonOutput(
+ self, gtest_prog_name, expected, expected_exit_code, extra_args=None
+ ):
"""Checks the JSON output generated by the Google Test binary.
Asserts that the JSON document generated by running the program
@@ -834,8 +805,9 @@ class GTestJsonOutputUnitTest(gtest_test_utils.TestCase):
extra_args: extra arguments to binary invocation.
"""
- actual = self._GetJsonOutput(gtest_prog_name, extra_args or [],
- expected_exit_code)
+ actual = self._GetJsonOutput(
+ gtest_prog_name, extra_args or [], expected_exit_code
+ )
self.assertEqual(expected, gtest_json_test_utils.normalize(actual))
diff --git a/googletest/test/googletest-list-tests-unittest.py b/googletest/test/googletest-list-tests-unittest.py
index 81423a33..977e57f0 100755
--- a/googletest/test/googletest-list-tests-unittest.py
+++ b/googletest/test/googletest-list-tests-unittest.py
@@ -38,7 +38,7 @@ Google Test) the command line flags.
"""
import re
-import gtest_test_utils
+from googletest.test import gtest_test_utils
# Constants.
@@ -46,11 +46,14 @@ import gtest_test_utils
LIST_TESTS_FLAG = 'gtest_list_tests'
# Path to the googletest-list-tests-unittest_ program.
-EXE_PATH = gtest_test_utils.GetTestExecutablePath('googletest-list-tests-unittest_')
+EXE_PATH = gtest_test_utils.GetTestExecutablePath(
+ 'googletest-list-tests-unittest_'
+)
# The expected output when running googletest-list-tests-unittest_ with
# --gtest_list_tests
-EXPECTED_OUTPUT_NO_FILTER_RE = re.compile(r"""FooDeathTest\.
+EXPECTED_OUTPUT_NO_FILTER_RE = re.compile(
+ r"""FooDeathTest\.
Test1
Foo\.
Bar1
@@ -90,11 +93,13 @@ MyInstantiation/ValueParamTest\.
TestB/0 # GetParam\(\) = one line
TestB/1 # GetParam\(\) = two\\nlines
TestB/2 # GetParam\(\) = a very\\nlo{241}\.\.\.
-""")
+"""
+)
# The expected output when running googletest-list-tests-unittest_ with
# --gtest_list_tests and --gtest_filter=Foo*.
-EXPECTED_OUTPUT_FILTER_FOO_RE = re.compile(r"""FooDeathTest\.
+EXPECTED_OUTPUT_FILTER_FOO_RE = re.compile(
+ r"""FooDeathTest\.
Test1
Foo\.
Bar1
@@ -106,7 +111,8 @@ FooTest\.
Test1
DISABLED_Test2
Test3
-""")
+"""
+)
# Utilities.
@@ -114,8 +120,9 @@ FooTest\.
def Run(args):
"""Runs googletest-list-tests-unittest_ and returns the list of tests printed."""
- return gtest_test_utils.Subprocess([EXE_PATH] + args,
- capture_stderr=False).output
+ return gtest_test_utils.Subprocess(
+ [EXE_PATH] + args, capture_stderr=False
+ ).output
# The unit test.
@@ -125,17 +132,18 @@ class GTestListTestsUnitTest(gtest_test_utils.TestCase):
"""Tests using the --gtest_list_tests flag to list all tests."""
def RunAndVerify(self, flag_value, expected_output_re, other_flag):
- """Runs googletest-list-tests-unittest_ and verifies that it prints
+ """Run googletest-list-tests-unittest_ and verify the output.
+
+ Runs googletest-list-tests-unittest_ and verifies that it prints
the correct tests.
Args:
- flag_value: value of the --gtest_list_tests flag;
- None if the flag should not be present.
- expected_output_re: regular expression that matches the expected
- output after running command;
- other_flag: a different flag to be passed to command
- along with gtest_list_tests;
- None if the flag should not be present.
+ flag_value: value of the --gtest_list_tests flag; None if the flag
+ should not be present.
+ expected_output_re: regular expression that matches the expected output
+ after running command;
+ other_flag: a different flag to be passed to command along with
+ gtest_list_tests; None if the flag should not be present.
"""
if flag_value is None:
@@ -156,49 +164,61 @@ class GTestListTestsUnitTest(gtest_test_utils.TestCase):
output = Run(args)
if expected_output_re:
- self.assert_(
+ self.assertTrue(
expected_output_re.match(output),
- ('when %s is %s, the output of "%s" is "%s",\n'
- 'which does not match regex "%s"' %
- (LIST_TESTS_FLAG, flag_expression, ' '.join(args), output,
- expected_output_re.pattern)))
+ 'when %s is %s, the output of "%s" is "%s",\n'
+ 'which does not match regex "%s"'
+ % (
+ LIST_TESTS_FLAG,
+ flag_expression,
+ ' '.join(args),
+ output,
+ expected_output_re.pattern,
+ ),
+ )
else:
- self.assert_(
+ self.assertTrue(
not EXPECTED_OUTPUT_NO_FILTER_RE.match(output),
- ('when %s is %s, the output of "%s" is "%s"'%
- (LIST_TESTS_FLAG, flag_expression, ' '.join(args), output)))
+ 'when %s is %s, the output of "%s" is "%s"'
+ % (LIST_TESTS_FLAG, flag_expression, ' '.join(args), output),
+ )
def testDefaultBehavior(self):
"""Tests the behavior of the default mode."""
- self.RunAndVerify(flag_value=None,
- expected_output_re=None,
- other_flag=None)
+ self.RunAndVerify(flag_value=None, expected_output_re=None, other_flag=None)
def testFlag(self):
"""Tests using the --gtest_list_tests flag."""
- self.RunAndVerify(flag_value='0',
- expected_output_re=None,
- other_flag=None)
- self.RunAndVerify(flag_value='1',
- expected_output_re=EXPECTED_OUTPUT_NO_FILTER_RE,
- other_flag=None)
+ self.RunAndVerify(flag_value='0', expected_output_re=None, other_flag=None)
+ self.RunAndVerify(
+ flag_value='1',
+ expected_output_re=EXPECTED_OUTPUT_NO_FILTER_RE,
+ other_flag=None,
+ )
def testOverrideNonFilterFlags(self):
"""Tests that --gtest_list_tests overrides the non-filter flags."""
- self.RunAndVerify(flag_value='1',
- expected_output_re=EXPECTED_OUTPUT_NO_FILTER_RE,
- other_flag='--gtest_break_on_failure')
+ self.RunAndVerify(
+ flag_value='1',
+ expected_output_re=EXPECTED_OUTPUT_NO_FILTER_RE,
+ other_flag='--gtest_break_on_failure',
+ )
def testWithFilterFlags(self):
- """Tests that --gtest_list_tests takes into account the
- --gtest_filter flag."""
+ """Tests that --gtest_list_tests takes into account the filter flags.
+
+ Tests that --gtest_list_tests takes into account the
+ --gtest_filter flag.
+ """
- self.RunAndVerify(flag_value='1',
- expected_output_re=EXPECTED_OUTPUT_FILTER_FOO_RE,
- other_flag='--gtest_filter=Foo*')
+ self.RunAndVerify(
+ flag_value='1',
+ expected_output_re=EXPECTED_OUTPUT_FILTER_FOO_RE,
+ other_flag='--gtest_filter=Foo*',
+ )
if __name__ == '__main__':
diff --git a/googletest/test/googletest-list-tests-unittest_.cc b/googletest/test/googletest-list-tests-unittest_.cc
index 493c6f00..a1ea6cf4 100644
--- a/googletest/test/googletest-list-tests-unittest_.cc
+++ b/googletest/test/googletest-list-tests-unittest_.cc
@@ -27,7 +27,6 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
// Unit test for Google Test's --gtest_list_tests flag.
//
// A user can ask Google Test to list all tests that will run
@@ -37,41 +36,33 @@
// This program will be invoked from a Python unit test.
// Don't run it directly.
+#include <ostream>
+#include <string>
+
#include "gtest/gtest.h"
// Several different test cases and tests that will be listed.
-TEST(Foo, Bar1) {
-}
+TEST(Foo, Bar1) {}
-TEST(Foo, Bar2) {
-}
+TEST(Foo, Bar2) {}
-TEST(Foo, DISABLED_Bar3) {
-}
+TEST(Foo, DISABLED_Bar3) {}
-TEST(Abc, Xyz) {
-}
+TEST(Abc, Xyz) {}
-TEST(Abc, Def) {
-}
+TEST(Abc, Def) {}
-TEST(FooBar, Baz) {
-}
+TEST(FooBar, Baz) {}
-class FooTest : public testing::Test {
-};
+class FooTest : public testing::Test {};
-TEST_F(FooTest, Test1) {
-}
+TEST_F(FooTest, Test1) {}
-TEST_F(FooTest, DISABLED_Test2) {
-}
+TEST_F(FooTest, DISABLED_Test2) {}
-TEST_F(FooTest, Test3) {
-}
+TEST_F(FooTest, Test3) {}
-TEST(FooDeathTest, Test1) {
-}
+TEST(FooDeathTest, Test1) {}
// A group of value-parameterized tests.
@@ -86,70 +77,66 @@ class MyType {
};
// Teaches Google Test how to print a MyType.
-void PrintTo(const MyType& x, std::ostream* os) {
- *os << x.value();
-}
+void PrintTo(const MyType& x, std::ostream* os) { *os << x.value(); }
-class ValueParamTest : public testing::TestWithParam<MyType> {
-};
+class ValueParamTest : public testing::TestWithParam<MyType> {};
-TEST_P(ValueParamTest, TestA) {
-}
+TEST_P(ValueParamTest, TestA) {}
-TEST_P(ValueParamTest, TestB) {
-}
+TEST_P(ValueParamTest, TestB) {}
INSTANTIATE_TEST_SUITE_P(
MyInstantiation, ValueParamTest,
- testing::Values(MyType("one line"),
- MyType("two\nlines"),
- MyType("a very\nloooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong line"))); // NOLINT
+ testing::Values(
+ MyType("one line"), MyType("two\nlines"),
+ MyType("a "
+ "very\nloooooooooooooooooooooooooooooooooooooooooooooooooooooooo"
+ "ooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooo"
+ "ooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooo"
+ "ooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooo"
+ "ooooong line"))); // NOLINT
// A group of typed tests.
// A deliberately long type name for testing the line-truncating
// behavior when printing a type parameter.
-class VeryLoooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooogName { // NOLINT
+class
+ VeryLoooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooogName { // NOLINT
};
template <typename T>
-class TypedTest : public testing::Test {
-};
+class TypedTest : public testing::Test {};
template <typename T, int kSize>
-class MyArray {
-};
+class MyArray {};
-typedef testing::Types<VeryLoooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooogName, // NOLINT
- int*, MyArray<bool, 42> > MyTypes;
+typedef testing::Types<
+ VeryLoooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooogName, // NOLINT
+ int*, MyArray<bool, 42> >
+ MyTypes;
TYPED_TEST_SUITE(TypedTest, MyTypes);
-TYPED_TEST(TypedTest, TestA) {
-}
+TYPED_TEST(TypedTest, TestA) {}
-TYPED_TEST(TypedTest, TestB) {
-}
+TYPED_TEST(TypedTest, TestB) {}
// A group of type-parameterized tests.
template <typename T>
-class TypeParamTest : public testing::Test {
-};
+class TypeParamTest : public testing::Test {};
TYPED_TEST_SUITE_P(TypeParamTest);
-TYPED_TEST_P(TypeParamTest, TestA) {
-}
+TYPED_TEST_P(TypeParamTest, TestA) {}
-TYPED_TEST_P(TypeParamTest, TestB) {
-}
+TYPED_TEST_P(TypeParamTest, TestB) {}
REGISTER_TYPED_TEST_SUITE_P(TypeParamTest, TestA, TestB);
INSTANTIATE_TYPED_TEST_SUITE_P(My, TypeParamTest, MyTypes);
-int main(int argc, char **argv) {
+int main(int argc, char** argv) {
::testing::InitGoogleTest(&argc, argv);
return RUN_ALL_TESTS();
diff --git a/googletest/test/googletest-listener-test.cc b/googletest/test/googletest-listener-test.cc
index 10457afe..d7c47c21 100644
--- a/googletest/test/googletest-listener-test.cc
+++ b/googletest/test/googletest-listener-test.cc
@@ -32,19 +32,14 @@
// This file verifies Google Test event listeners receive events at the
// right times.
+#include <string>
#include <vector>
#include "gtest/gtest.h"
#include "gtest/internal/custom/gtest.h"
using ::testing::AddGlobalTestEnvironment;
-using ::testing::Environment;
using ::testing::InitGoogleTest;
-using ::testing::Test;
-using ::testing::TestSuite;
-using ::testing::TestEventListener;
-using ::testing::TestInfo;
-using ::testing::TestPartResult;
using ::testing::UnitTest;
// Used by tests to register their events.
@@ -65,8 +60,8 @@ class EventRecordingListener : public TestEventListener {
void OnTestIterationStart(const UnitTest& /*unit_test*/,
int iteration) override {
Message message;
- message << GetFullMethodName("OnTestIterationStart")
- << "(" << iteration << ")";
+ message << GetFullMethodName("OnTestIterationStart") << "(" << iteration
+ << ")";
g_events->push_back(message.GetString());
}
@@ -112,8 +107,8 @@ class EventRecordingListener : public TestEventListener {
void OnTestIterationEnd(const UnitTest& /*unit_test*/,
int iteration) override {
Message message;
- message << GetFullMethodName("OnTestIterationEnd")
- << "(" << iteration << ")";
+ message << GetFullMethodName("OnTestIterationEnd") << "(" << iteration
+ << ")";
g_events->push_back(message.GetString());
}
@@ -122,9 +117,7 @@ class EventRecordingListener : public TestEventListener {
}
private:
- std::string GetFullMethodName(const char* name) {
- return name_ + "." + name;
- }
+ std::string GetFullMethodName(const char* name) { return name_ + "." + name; }
std::string name_;
};
@@ -252,22 +245,21 @@ void VerifyResults(const std::vector<std::string>& data,
EXPECT_EQ(expected_data_size, actual_size);
// Compares the common prefix.
- const size_t shorter_size = expected_data_size <= actual_size ?
- expected_data_size : actual_size;
+ const size_t shorter_size =
+ expected_data_size <= actual_size ? expected_data_size : actual_size;
size_t i = 0;
for (; i < shorter_size; ++i) {
- ASSERT_STREQ(expected_data[i], data[i].c_str())
- << "at position " << i;
+ ASSERT_STREQ(expected_data[i], data[i].c_str()) << "at position " << i;
}
// Prints extra elements in the actual data.
for (; i < actual_size; ++i) {
- printf(" Actual event #%lu: %s\n",
- static_cast<unsigned long>(i), data[i].c_str());
+ printf(" Actual event #%lu: %s\n", static_cast<unsigned long>(i),
+ data[i].c_str());
}
}
-int main(int argc, char **argv) {
+int main(int argc, char** argv) {
std::vector<std::string> events;
g_events = &events;
InitGoogleTest(&argc, argv);
@@ -281,10 +273,11 @@ int main(int argc, char **argv) {
AddGlobalTestEnvironment(new EnvironmentInvocationCatcher);
- GTEST_CHECK_(events.size() == 0)
+ GTEST_CHECK_(events.empty())
<< "AddGlobalTestEnvironment should not generate any events itself.";
- ::testing::GTEST_FLAG(repeat) = 2;
+ GTEST_FLAG_SET(repeat, 2);
+ GTEST_FLAG_SET(recreate_environments_when_repeating, true);
int ret_val = RUN_ALL_TESTS();
#ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_
@@ -505,14 +498,12 @@ int main(int argc, char **argv) {
"1st.OnTestProgramEnd"};
#endif // GTEST_REMOVE_LEGACY_TEST_CASEAPI_
- VerifyResults(events,
- expected_events,
- sizeof(expected_events)/sizeof(expected_events[0]));
+ VerifyResults(events, expected_events,
+ sizeof(expected_events) / sizeof(expected_events[0]));
// We need to check manually for ad hoc test failures that happen after
// RUN_ALL_TESTS finishes.
- if (UnitTest::GetInstance()->Failed())
- ret_val = 1;
+ if (UnitTest::GetInstance()->Failed()) ret_val = 1;
return ret_val;
}
diff --git a/googletest/test/googletest-message-test.cc b/googletest/test/googletest-message-test.cc
index 962d5191..bf1f094c 100644
--- a/googletest/test/googletest-message-test.cc
+++ b/googletest/test/googletest-message-test.cc
@@ -30,14 +30,32 @@
//
// Tests for the Message class.
-#include "gtest/gtest-message.h"
+#include <sstream>
+#include <string>
+#include "gtest/gtest-message.h"
#include "gtest/gtest.h"
+#ifdef GTEST_HAS_ABSL
+#include "absl/strings/str_format.h"
+#endif // GTEST_HAS_ABSL
+
namespace {
using ::testing::Message;
+#ifdef GTEST_HAS_ABSL
+struct AbslStringifiablePoint {
+ template <typename Sink>
+ friend void AbslStringify(Sink& sink, const AbslStringifiablePoint& p) {
+ absl::Format(&sink, "(%d, %d)", p.x, p.y);
+ }
+
+ int x;
+ int y;
+};
+#endif // GTEST_HAS_ABSL
+
// Tests the testing::Message class
// Tests the default constructor.
@@ -69,8 +87,9 @@ TEST(MessageTest, StreamsFloat) {
// Tests streaming a double.
TEST(MessageTest, StreamsDouble) {
- const std::string s = (Message() << 1260570880.4555497 << " "
- << 1260572265.1954534).GetString();
+ const std::string s =
+ (Message() << 1260570880.4555497 << " " << 1260572265.1954534)
+ .GetString();
// Both numbers should be printed with enough precision.
EXPECT_PRED_FORMAT2(testing::IsSubstring, "1260570880.45", s.c_str());
EXPECT_PRED_FORMAT2(testing::IsSubstring, " 1260572265.19", s.c_str());
@@ -108,8 +127,7 @@ TEST(MessageTest, StreamsString) {
// Tests that we can output strings containing embedded NULs.
TEST(MessageTest, StreamsStringWithEmbeddedNUL) {
- const char char_array_with_nul[] =
- "Here's a NUL\0 and some more string";
+ const char char_array_with_nul[] = "Here's a NUL\0 and some more string";
const ::std::string string_with_nul(char_array_with_nul,
sizeof(char_array_with_nul) - 1);
EXPECT_EQ("Here's a NUL\\0 and some more string",
@@ -126,13 +144,21 @@ TEST(MessageTest, StreamsInt) {
EXPECT_EQ("123", (Message() << 123).GetString());
}
+#ifdef GTEST_HAS_ABSL
+// Tests streaming a type with an AbslStringify definition.
+TEST(MessageTest, StreamsAbslStringify) {
+ EXPECT_EQ("(1, 2)", (Message() << AbslStringifiablePoint{1, 2}).GetString());
+}
+#endif // GTEST_HAS_ABSL
+
// Tests that basic IO manipulators (endl, ends, and flush) can be
// streamed to Message.
TEST(MessageTest, StreamsBasicIoManip) {
- EXPECT_EQ("Line 1.\nA NUL char \\0 in line 2.",
- (Message() << "Line 1." << std::endl
- << "A NUL char " << std::ends << std::flush
- << " in line 2.").GetString());
+ EXPECT_EQ(
+ "Line 1.\nA NUL char \\0 in line 2.",
+ (Message() << "Line 1." << std::endl
+ << "A NUL char " << std::ends << std::flush << " in line 2.")
+ .GetString());
}
// Tests Message::GetString()
diff --git a/googletest/test/googletest-options-test.cc b/googletest/test/googletest-options-test.cc
index c09d5723..722c5b55 100644
--- a/googletest/test/googletest-options-test.cc
+++ b/googletest/test/googletest-options-test.cc
@@ -36,13 +36,15 @@
// make-files on Windows and other platforms. Do not #include this file
// anywhere else!
+#include <string>
+
#include "gtest/gtest.h"
-#if GTEST_OS_WINDOWS_MOBILE
-# include <windows.h>
-#elif GTEST_OS_WINDOWS
-# include <direct.h>
-#elif GTEST_OS_OS2
+#ifdef GTEST_OS_WINDOWS_MOBILE
+#include <windows.h>
+#elif defined(GTEST_OS_WINDOWS)
+#include <direct.h>
+#elif defined(GTEST_OS_OS2)
// For strcasecmp on OS/2
#include <strings.h>
#endif // GTEST_OS_WINDOWS_MOBILE
@@ -61,36 +63,36 @@ FilePath GetAbsolutePathOf(const FilePath& relative_path) {
// Testing UnitTestOptions::GetOutputFormat/GetOutputFile.
TEST(XmlOutputTest, GetOutputFormatDefault) {
- GTEST_FLAG(output) = "";
+ GTEST_FLAG_SET(output, "");
EXPECT_STREQ("", UnitTestOptions::GetOutputFormat().c_str());
}
TEST(XmlOutputTest, GetOutputFormat) {
- GTEST_FLAG(output) = "xml:filename";
+ GTEST_FLAG_SET(output, "xml:filename");
EXPECT_STREQ("xml", UnitTestOptions::GetOutputFormat().c_str());
}
TEST(XmlOutputTest, GetOutputFileDefault) {
- GTEST_FLAG(output) = "";
+ GTEST_FLAG_SET(output, "");
EXPECT_EQ(GetAbsolutePathOf(FilePath("test_detail.xml")).string(),
UnitTestOptions::GetAbsolutePathToOutputFile());
}
TEST(XmlOutputTest, GetOutputFileSingleFile) {
- GTEST_FLAG(output) = "xml:filename.abc";
+ GTEST_FLAG_SET(output, "xml:filename.abc");
EXPECT_EQ(GetAbsolutePathOf(FilePath("filename.abc")).string(),
UnitTestOptions::GetAbsolutePathToOutputFile());
}
TEST(XmlOutputTest, GetOutputFileFromDirectoryPath) {
- GTEST_FLAG(output) = "xml:path" GTEST_PATH_SEP_;
+ GTEST_FLAG_SET(output, "xml:path" GTEST_PATH_SEP_);
const std::string expected_output_file =
- GetAbsolutePathOf(
- FilePath(std::string("path") + GTEST_PATH_SEP_ +
- GetCurrentExecutableName().string() + ".xml")).string();
+ GetAbsolutePathOf(FilePath(std::string("path") + GTEST_PATH_SEP_ +
+ GetCurrentExecutableName().string() + ".xml"))
+ .string();
const std::string& output_file =
UnitTestOptions::GetAbsolutePathToOutputFile();
-#if GTEST_OS_WINDOWS
+#ifdef GTEST_OS_WINDOWS
EXPECT_STRCASEEQ(expected_output_file.c_str(), output_file.c_str());
#else
EXPECT_EQ(expected_output_file, output_file.c_str());
@@ -99,19 +101,19 @@ TEST(XmlOutputTest, GetOutputFileFromDirectoryPath) {
TEST(OutputFileHelpersTest, GetCurrentExecutableName) {
const std::string exe_str = GetCurrentExecutableName().string();
-#if GTEST_OS_WINDOWS
+#ifdef GTEST_OS_WINDOWS
const bool success =
_strcmpi("googletest-options-test", exe_str.c_str()) == 0 ||
_strcmpi("gtest-options-ex_test", exe_str.c_str()) == 0 ||
_strcmpi("gtest_all_test", exe_str.c_str()) == 0 ||
_strcmpi("gtest_dll_test", exe_str.c_str()) == 0;
-#elif GTEST_OS_OS2
+#elif defined(GTEST_OS_OS2)
const bool success =
strcasecmp("googletest-options-test", exe_str.c_str()) == 0 ||
strcasecmp("gtest-options-ex_test", exe_str.c_str()) == 0 ||
strcasecmp("gtest_all_test", exe_str.c_str()) == 0 ||
strcasecmp("gtest_dll_test", exe_str.c_str()) == 0;
-#elif GTEST_OS_FUCHSIA
+#elif defined(GTEST_OS_FUCHSIA)
const bool success = exe_str == "app";
#else
const bool success =
@@ -126,11 +128,10 @@ TEST(OutputFileHelpersTest, GetCurrentExecutableName) {
#endif
;
#endif // GTEST_OS_WINDOWS
- if (!success)
- FAIL() << "GetCurrentExecutableName() returns " << exe_str;
+ if (!success) FAIL() << "GetCurrentExecutableName() returns " << exe_str;
}
-#if !GTEST_OS_FUCHSIA
+#ifndef GTEST_OS_FUCHSIA
class XmlOutputChangeDirTest : public Test {
protected:
@@ -150,36 +151,40 @@ class XmlOutputChangeDirTest : public Test {
};
TEST_F(XmlOutputChangeDirTest, PreserveOriginalWorkingDirWithDefault) {
- GTEST_FLAG(output) = "";
- EXPECT_EQ(FilePath::ConcatPaths(original_working_dir_,
- FilePath("test_detail.xml")).string(),
- UnitTestOptions::GetAbsolutePathToOutputFile());
+ GTEST_FLAG_SET(output, "");
+ EXPECT_EQ(
+ FilePath::ConcatPaths(original_working_dir_, FilePath("test_detail.xml"))
+ .string(),
+ UnitTestOptions::GetAbsolutePathToOutputFile());
}
TEST_F(XmlOutputChangeDirTest, PreserveOriginalWorkingDirWithDefaultXML) {
- GTEST_FLAG(output) = "xml";
- EXPECT_EQ(FilePath::ConcatPaths(original_working_dir_,
- FilePath("test_detail.xml")).string(),
- UnitTestOptions::GetAbsolutePathToOutputFile());
+ GTEST_FLAG_SET(output, "xml");
+ EXPECT_EQ(
+ FilePath::ConcatPaths(original_working_dir_, FilePath("test_detail.xml"))
+ .string(),
+ UnitTestOptions::GetAbsolutePathToOutputFile());
}
TEST_F(XmlOutputChangeDirTest, PreserveOriginalWorkingDirWithRelativeFile) {
- GTEST_FLAG(output) = "xml:filename.abc";
- EXPECT_EQ(FilePath::ConcatPaths(original_working_dir_,
- FilePath("filename.abc")).string(),
- UnitTestOptions::GetAbsolutePathToOutputFile());
+ GTEST_FLAG_SET(output, "xml:filename.abc");
+ EXPECT_EQ(
+ FilePath::ConcatPaths(original_working_dir_, FilePath("filename.abc"))
+ .string(),
+ UnitTestOptions::GetAbsolutePathToOutputFile());
}
TEST_F(XmlOutputChangeDirTest, PreserveOriginalWorkingDirWithRelativePath) {
- GTEST_FLAG(output) = "xml:path" GTEST_PATH_SEP_;
+ GTEST_FLAG_SET(output, "xml:path" GTEST_PATH_SEP_);
const std::string expected_output_file =
FilePath::ConcatPaths(
original_working_dir_,
FilePath(std::string("path") + GTEST_PATH_SEP_ +
- GetCurrentExecutableName().string() + ".xml")).string();
+ GetCurrentExecutableName().string() + ".xml"))
+ .string();
const std::string& output_file =
UnitTestOptions::GetAbsolutePathToOutputFile();
-#if GTEST_OS_WINDOWS
+#ifdef GTEST_OS_WINDOWS
EXPECT_STRCASEEQ(expected_output_file.c_str(), output_file.c_str());
#else
EXPECT_EQ(expected_output_file, output_file.c_str());
@@ -187,31 +192,31 @@ TEST_F(XmlOutputChangeDirTest, PreserveOriginalWorkingDirWithRelativePath) {
}
TEST_F(XmlOutputChangeDirTest, PreserveOriginalWorkingDirWithAbsoluteFile) {
-#if GTEST_OS_WINDOWS
- GTEST_FLAG(output) = "xml:c:\\tmp\\filename.abc";
+#ifdef GTEST_OS_WINDOWS
+ GTEST_FLAG_SET(output, "xml:c:\\tmp\\filename.abc");
EXPECT_EQ(FilePath("c:\\tmp\\filename.abc").string(),
UnitTestOptions::GetAbsolutePathToOutputFile());
#else
- GTEST_FLAG(output) ="xml:/tmp/filename.abc";
+ GTEST_FLAG_SET(output, "xml:/tmp/filename.abc");
EXPECT_EQ(FilePath("/tmp/filename.abc").string(),
UnitTestOptions::GetAbsolutePathToOutputFile());
#endif
}
TEST_F(XmlOutputChangeDirTest, PreserveOriginalWorkingDirWithAbsolutePath) {
-#if GTEST_OS_WINDOWS
+#ifdef GTEST_OS_WINDOWS
const std::string path = "c:\\tmp\\";
#else
const std::string path = "/tmp/";
#endif
- GTEST_FLAG(output) = "xml:" + path;
+ GTEST_FLAG_SET(output, "xml:" + path);
const std::string expected_output_file =
path + GetCurrentExecutableName().string() + ".xml";
const std::string& output_file =
UnitTestOptions::GetAbsolutePathToOutputFile();
-#if GTEST_OS_WINDOWS
+#ifdef GTEST_OS_WINDOWS
EXPECT_STRCASEEQ(expected_output_file.c_str(), output_file.c_str());
#else
EXPECT_EQ(expected_output_file, output_file.c_str());
diff --git a/googletest/test/googletest-output-test-golden-lin.txt b/googletest/test/googletest-output-test-golden-lin.txt
index 3fab3b97..6ddf822f 100644
--- a/googletest/test/googletest-output-test-golden-lin.txt
+++ b/googletest/test/googletest-output-test-golden-lin.txt
@@ -12,7 +12,7 @@ Expected equality of these values:
3
Stack trace: (omitted)
-[==========] Running 88 tests from 41 test suites.
+[==========] Running 90 tests from 43 test suites.
[----------] Global test environment set-up.
FooEnvironment::SetUp() called.
BarEnvironment::SetUp() called.
@@ -956,6 +956,26 @@ Stack trace: (omitted)
~DynamicFixture()
[ FAILED ] BadDynamicFixture2.Derived
DynamicFixture::TearDownTestSuite
+[----------] 1 test from TestSuiteThatFailsToSetUp
+googletest-output-test_.cc:#: Failure
+Value of: false
+ Actual: false
+Expected: true
+Stack trace: (omitted)
+
+[ RUN ] TestSuiteThatFailsToSetUp.ShouldNotRun
+googletest-output-test_.cc:#: Skipped
+
+[ SKIPPED ] TestSuiteThatFailsToSetUp.ShouldNotRun
+[----------] 1 test from TestSuiteThatSkipsInSetUp
+googletest-output-test_.cc:#: Skipped
+Skip entire test suite
+Stack trace: (omitted)
+
+[ RUN ] TestSuiteThatSkipsInSetUp.ShouldNotRun
+googletest-output-test_.cc:#: Skipped
+
+[ SKIPPED ] TestSuiteThatSkipsInSetUp.ShouldNotRun
[----------] 1 test from PrintingFailingParams/FailingParamTest
[ RUN ] PrintingFailingParams/FailingParamTest.Fails/0
googletest-output-test_.cc:#: Failure
@@ -1032,8 +1052,11 @@ Failed
Expected fatal failure.
Stack trace: (omitted)
-[==========] 88 tests from 41 test suites ran.
+[==========] 90 tests from 43 test suites ran.
[ PASSED ] 31 tests.
+[ SKIPPED ] 2 tests, listed below:
+[ SKIPPED ] TestSuiteThatFailsToSetUp.ShouldNotRun
+[ SKIPPED ] TestSuiteThatSkipsInSetUp.ShouldNotRun
[ FAILED ] 57 tests, listed below:
[ FAILED ] NonfatalFailureTest.EscapesStringOperands
[ FAILED ] NonfatalFailureTest.DiffForLongStrings
@@ -1094,6 +1117,9 @@ Stack trace: (omitted)
[ FAILED ] GoogleTestVerification.UninstantiatedTypeParameterizedTestSuite<DetectNotInstantiatedTypesTest>
57 FAILED TESTS
+[ FAILED ] TestSuiteThatFailsToSetUp: SetUpTestSuite or TearDownTestSuite
+
+ 1 FAILED TEST SUITE
 YOU HAVE 1 DISABLED TEST
Note: Google Test filter = FatalFailureTest.*:LoggingTest.*
diff --git a/googletest/test/googletest-output-test.py b/googletest/test/googletest-output-test.py
index 09028f66..6d80d532 100755
--- a/googletest/test/googletest-output-test.py
+++ b/googletest/test/googletest-output-test.py
@@ -42,7 +42,7 @@ import difflib
import os
import re
import sys
-import gtest_test_utils
+from googletest.test import gtest_test_utils
# The flag for generating the golden file
@@ -63,20 +63,32 @@ PROGRAM_PATH = gtest_test_utils.GetTestExecutablePath('googletest-output-test_')
# 'internal_skip_environment_and_ad_hoc_tests' argument.
COMMAND_LIST_TESTS = ({}, [PROGRAM_PATH, '--gtest_list_tests'])
COMMAND_WITH_COLOR = ({}, [PROGRAM_PATH, '--gtest_color=yes'])
-COMMAND_WITH_TIME = ({}, [PROGRAM_PATH,
- '--gtest_print_time',
- 'internal_skip_environment_and_ad_hoc_tests',
- '--gtest_filter=FatalFailureTest.*:LoggingTest.*'])
+COMMAND_WITH_TIME = (
+ {},
+ [
+ PROGRAM_PATH,
+ '--gtest_print_time',
+ 'internal_skip_environment_and_ad_hoc_tests',
+ '--gtest_filter=FatalFailureTest.*:LoggingTest.*',
+ ],
+)
COMMAND_WITH_DISABLED = (
- {}, [PROGRAM_PATH,
- '--gtest_also_run_disabled_tests',
- 'internal_skip_environment_and_ad_hoc_tests',
- '--gtest_filter=*DISABLED_*'])
+ {},
+ [
+ PROGRAM_PATH,
+ '--gtest_also_run_disabled_tests',
+ 'internal_skip_environment_and_ad_hoc_tests',
+ '--gtest_filter=*DISABLED_*',
+ ],
+)
COMMAND_WITH_SHARDING = (
{'GTEST_SHARD_INDEX': '1', 'GTEST_TOTAL_SHARDS': '2'},
- [PROGRAM_PATH,
- 'internal_skip_environment_and_ad_hoc_tests',
- '--gtest_filter=PassingTest.*'])
+ [
+ PROGRAM_PATH,
+ 'internal_skip_environment_and_ad_hoc_tests',
+ '--gtest_filter=PassingTest.*',
+ ],
+)
GOLDEN_PATH = os.path.join(gtest_test_utils.GetSourceDir(), GOLDEN_NAME)
@@ -100,23 +112,27 @@ def RemoveLocations(test_output):
'FILE_NAME:#: '.
"""
- return re.sub(r'.*[/\\]((googletest-output-test_|gtest).cc)(\:\d+|\(\d+\))\: ',
- r'\1:#: ', test_output)
+ return re.sub(
+ r'.*[/\\]((googletest-output-test_|gtest).cc)(\:\d+|\(\d+\))\: ',
+ r'\1:#: ',
+ test_output,
+ )
def RemoveStackTraceDetails(output):
"""Removes all stack traces from a Google Test program's output."""
# *? means "find the shortest string that matches".
- return re.sub(r'Stack trace:(.|\n)*?\n\n',
- 'Stack trace: (omitted)\n\n', output)
+ return re.sub(
+ r'Stack trace:(.|\n)*?\n\n', 'Stack trace: (omitted)\n\n', output
+ )
def RemoveStackTraces(output):
"""Removes all traces of stack traces from a Google Test program's output."""
# *? means "find the shortest string that matches".
- return re.sub(r'Stack trace:(.|\n)*?\n\n', '', output)
+ return re.sub(r'Stack trace:(.|\n)*?\n', '', output)
def RemoveTime(output):
@@ -156,14 +172,12 @@ def NormalizeToCurrentPlatform(test_output):
def RemoveTestCounts(output):
"""Removes test counts from a Google Test program's output."""
- output = re.sub(r'\d+ tests?, listed below',
- '? tests, listed below', output)
- output = re.sub(r'\d+ FAILED TESTS',
- '? FAILED TESTS', output)
- output = re.sub(r'\d+ tests? from \d+ test cases?',
- '? tests from ? test cases', output)
- output = re.sub(r'\d+ tests? from ([a-zA-Z_])',
- r'? tests from \1', output)
+ output = re.sub(r'\d+ tests?, listed below', '? tests, listed below', output)
+ output = re.sub(r'\d+ FAILED TESTS', '? FAILED TESTS', output)
+ output = re.sub(
+ r'\d+ tests? from \d+ test cases?', '? tests from ? test cases', output
+ )
+ output = re.sub(r'\d+ tests? from ([a-zA-Z_])', r'? tests from \1', output)
return re.sub(r'\d+ tests?\.', '? tests.', output)
@@ -175,18 +189,19 @@ def RemoveMatchingTests(test_output, pattern):
Args:
test_output: A string containing the test output.
- pattern: A regex string that matches names of test cases or
- tests to remove.
+ pattern: A regex string that matches names of test cases or tests
+ to remove.
Returns:
Contents of test_output with tests whose names match pattern removed.
"""
test_output = re.sub(
- r'.*\[ RUN \] .*%s(.|\n)*?\[( FAILED | OK )\] .*%s.*\n' % (
- pattern, pattern),
+ r'.*\[ RUN \] .*%s(.|\n)*?\[( FAILED | OK )\] .*%s.*\n'
+ % (pattern, pattern),
'',
- test_output)
+ test_output,
+ )
return re.sub(r'.*%s.*\n' % pattern, '', test_output)
@@ -205,8 +220,8 @@ def GetShellCommandOutput(env_cmd):
Args:
env_cmd: The shell command. A 2-tuple where element 0 is a dict of extra
- environment variables to set, and element 1 is a string with
- the command and any flags.
+ environment variables to set, and element 1 is a string with the command
+ and any flags.
Returns:
A string with the command's combined standard and diagnostic output.
@@ -222,13 +237,16 @@ def GetShellCommandOutput(env_cmd):
def GetCommandOutput(env_cmd):
- """Runs a command and returns its output with all file location
- info stripped off.
+ """Runs a command and returns output with all file location info stripped off.
Args:
env_cmd: The shell command. A 2-tuple where element 0 is a dict of extra
- environment variables to set, and element 1 is a string with
- the command and any flags.
+ environment variables to set, and element 1 is a string with the command
+ and any flags.
+
+ Returns:
+ A string with the command's combined standard and diagnostic output. File
+ location info is stripped.
"""
# Disables exception pop-ups on Windows.
@@ -241,10 +259,12 @@ def GetCommandOutput(env_cmd):
def GetOutputOfAllCommands():
"""Returns concatenated output from several representative commands."""
- return (GetCommandOutput(COMMAND_WITH_COLOR) +
- GetCommandOutput(COMMAND_WITH_TIME) +
- GetCommandOutput(COMMAND_WITH_DISABLED) +
- GetCommandOutput(COMMAND_WITH_SHARDING))
+ return (
+ GetCommandOutput(COMMAND_WITH_COLOR)
+ + GetCommandOutput(COMMAND_WITH_TIME)
+ + GetCommandOutput(COMMAND_WITH_DISABLED)
+ + GetCommandOutput(COMMAND_WITH_SHARDING)
+ )
test_list = GetShellCommandOutput(COMMAND_LIST_TESTS)
@@ -253,12 +273,16 @@ SUPPORTS_TYPED_TESTS = 'TypedTest' in test_list
SUPPORTS_THREADS = 'ExpectFailureWithThreadsTest' in test_list
SUPPORTS_STACK_TRACES = NO_STACKTRACE_SUPPORT_FLAG not in sys.argv
-CAN_GENERATE_GOLDEN_FILE = (SUPPORTS_DEATH_TESTS and
- SUPPORTS_TYPED_TESTS and
- SUPPORTS_THREADS and
- SUPPORTS_STACK_TRACES)
+CAN_GENERATE_GOLDEN_FILE = (
+ SUPPORTS_DEATH_TESTS
+ and SUPPORTS_TYPED_TESTS
+ and SUPPORTS_THREADS
+ and SUPPORTS_STACK_TRACES
+)
+
class GTestOutputTest(gtest_test_utils.TestCase):
+
def RemoveUnsupportedTests(self, test_output):
if not SUPPORTS_DEATH_TESTS:
test_output = RemoveMatchingTests(test_output, 'DeathTest')
@@ -267,12 +291,13 @@ class GTestOutputTest(gtest_test_utils.TestCase):
test_output = RemoveMatchingTests(test_output, 'TypedDeathTest')
test_output = RemoveMatchingTests(test_output, 'TypeParamDeathTest')
if not SUPPORTS_THREADS:
- test_output = RemoveMatchingTests(test_output,
- 'ExpectFailureWithThreadsTest')
- test_output = RemoveMatchingTests(test_output,
- 'ScopedFakeTestPartResultReporterTest')
- test_output = RemoveMatchingTests(test_output,
- 'WorksConcurrently')
+ test_output = RemoveMatchingTests(
+ test_output, 'ExpectFailureWithThreadsTest'
+ )
+ test_output = RemoveMatchingTests(
+ test_output, 'ScopedFakeTestPartResultReporterTest'
+ )
+ test_output = RemoveMatchingTests(test_output, 'WorksConcurrently')
if not SUPPORTS_STACK_TRACES:
test_output = RemoveStackTraces(test_output)
@@ -297,27 +322,42 @@ class GTestOutputTest(gtest_test_utils.TestCase):
normalized_golden = RemoveTypeInfoDetails(golden)
if CAN_GENERATE_GOLDEN_FILE:
- self.assertEqual(normalized_golden, normalized_actual,
- '\n'.join(difflib.unified_diff(
- normalized_golden.split('\n'),
- normalized_actual.split('\n'),
- 'golden', 'actual')))
+ self.assertEqual(
+ normalized_golden,
+ normalized_actual,
+ '\n'.join(
+ difflib.unified_diff(
+ normalized_golden.split('\n'),
+ normalized_actual.split('\n'),
+ 'golden',
+ 'actual',
+ )
+ ),
+ )
else:
normalized_actual = NormalizeToCurrentPlatform(
- RemoveTestCounts(normalized_actual))
+ RemoveTestCounts(normalized_actual)
+ )
normalized_golden = NormalizeToCurrentPlatform(
- RemoveTestCounts(self.RemoveUnsupportedTests(normalized_golden)))
+ RemoveTestCounts(self.RemoveUnsupportedTests(normalized_golden))
+ )
# This code is very handy when debugging golden file differences:
if os.getenv('DEBUG_GTEST_OUTPUT_TEST'):
- open(os.path.join(
- gtest_test_utils.GetSourceDir(),
- '_googletest-output-test_normalized_actual.txt'), 'wb').write(
- normalized_actual)
- open(os.path.join(
- gtest_test_utils.GetSourceDir(),
- '_googletest-output-test_normalized_golden.txt'), 'wb').write(
- normalized_golden)
+ open(
+ os.path.join(
+ gtest_test_utils.GetSourceDir(),
+ '_googletest-output-test_normalized_actual.txt',
+ ),
+ 'wb',
+ ).write(normalized_actual)
+ open(
+ os.path.join(
+ gtest_test_utils.GetSourceDir(),
+ '_googletest-output-test_normalized_golden.txt',
+ ),
+ 'wb',
+ ).write(normalized_golden)
self.assertEqual(normalized_golden, normalized_actual)
@@ -334,11 +374,10 @@ if __name__ == '__main__':
golden_file.write(output.encode())
golden_file.close()
else:
- message = (
- """Unable to write a golden file when compiled in an environment
+ message = """Unable to write a golden file when compiled in an environment
that does not support all the required features (death tests,
typed tests, stack traces, and multiple threads).
-Please build this test and generate the golden file using Blaze on Linux.""")
+Please build this test and generate the golden file using Blaze on Linux."""
sys.stderr.write(message)
sys.exit(1)
diff --git a/googletest/test/googletest-output-test_.cc b/googletest/test/googletest-output-test_.cc
index 074f64ef..e3560c01 100644
--- a/googletest/test/googletest-output-test_.cc
+++ b/googletest/test/googletest-output-test_.cc
@@ -33,17 +33,18 @@
// desired messages. Therefore, most tests in this file are MEANT TO
// FAIL.
+#include <stdlib.h>
+
+#include <algorithm>
+#include <string>
+
#include "gtest/gtest-spi.h"
#include "gtest/gtest.h"
#include "src/gtest-internal-inl.h"
-#include <stdlib.h>
-
-#if _MSC_VER
GTEST_DISABLE_MSC_WARNINGS_PUSH_(4127 /* conditional expression is constant */)
-#endif // _MSC_VER
-#if GTEST_IS_THREADSAFE
+#ifdef GTEST_IS_THREADSAFE
using testing::ScopedFakeTestPartResultReporter;
using testing::TestPartResultArray;
@@ -56,9 +57,7 @@ namespace posix = ::testing::internal::posix;
// Tests catching fatal failures.
// A subroutine used by the following test.
-void TestEq1(int x) {
- ASSERT_EQ(1, x);
-}
+void TestEq1(int x) { ASSERT_EQ(1, x); }
// This function calls a test subroutine, catches the fatal failure it
// generates, and then returns early.
@@ -76,24 +75,19 @@ void TryTestSubroutine() {
FAIL() << "This should never be reached.";
}
-TEST(PassingTest, PassingTest1) {
-}
+TEST(PassingTest, PassingTest1) {}
-TEST(PassingTest, PassingTest2) {
-}
+TEST(PassingTest, PassingTest2) {}
// Tests that parameters of failing parameterized tests are printed in the
// failing test summary.
class FailingParamTest : public testing::TestWithParam<int> {};
-TEST_P(FailingParamTest, Fails) {
- EXPECT_EQ(1, GetParam());
-}
+TEST_P(FailingParamTest, Fails) { EXPECT_EQ(1, GetParam()); }
// This generates a test which will fail. Google Test is expected to print
// its parameter when it outputs the list of all failed tests.
-INSTANTIATE_TEST_SUITE_P(PrintingFailingParams,
- FailingParamTest,
+INSTANTIATE_TEST_SUITE_P(PrintingFailingParams, FailingParamTest,
testing::Values(2));
// Tests that an empty value for the test suite basename yields just
@@ -146,18 +140,16 @@ TEST(FatalFailureTest, FatalFailureInNestedSubroutine) {
// Tests HasFatalFailure() after a failed EXPECT check.
TEST(FatalFailureTest, NonfatalFailureInSubroutine) {
printf("(expecting a failure on false)\n");
- EXPECT_TRUE(false); // Generates a nonfatal failure
+ EXPECT_TRUE(false); // Generates a nonfatal failure
ASSERT_FALSE(HasFatalFailure()); // This should succeed.
}
// Tests interleaving user logging and Google Test assertions.
TEST(LoggingTest, InterleavingLoggingAndAssertions) {
- static const int a[4] = {
- 3, 9, 2, 6
- };
+ static const int a[4] = {3, 9, 2, 6};
printf("(expecting 2 failures on (3) >= (a[i]))\n");
- for (int i = 0; i < static_cast<int>(sizeof(a)/sizeof(*a)); i++) {
+ for (int i = 0; i < static_cast<int>(sizeof(a) / sizeof(*a)); i++) {
printf("i == %d\n", i);
EXPECT_GE(3, a[i]);
}
@@ -258,7 +250,7 @@ TEST(SCOPED_TRACETest, CanBeRepeated) {
<< "contain trace point A, B, and D.";
}
-#if GTEST_IS_THREADSAFE
+#ifdef GTEST_IS_THREADSAFE
// Tests that SCOPED_TRACE()s can be used concurrently from multiple
// threads. Namely, an assertion should be affected by
// SCOPED_TRACE()s in its own thread only.
@@ -297,16 +289,14 @@ struct CheckPoints {
static void ThreadWithScopedTrace(CheckPoints* check_points) {
{
SCOPED_TRACE("Trace B");
- ADD_FAILURE()
- << "Expected failure #1 (in thread B, only trace B alive).";
+ ADD_FAILURE() << "Expected failure #1 (in thread B, only trace B alive).";
check_points->n1.Notify();
check_points->n2.WaitForNotification();
ADD_FAILURE()
<< "Expected failure #3 (in thread B, trace A & B both alive).";
} // Trace B dies here.
- ADD_FAILURE()
- << "Expected failure #4 (in thread B, only trace A alive).";
+ ADD_FAILURE() << "Expected failure #4 (in thread B, only trace A alive).";
check_points->n3.Notify();
}
@@ -325,11 +315,9 @@ TEST(SCOPED_TRACETest, WorksConcurrently) {
check_points.n2.Notify();
check_points.n3.WaitForNotification();
- ADD_FAILURE()
- << "Expected failure #5 (in thread A, only trace A alive).";
+ ADD_FAILURE() << "Expected failure #5 (in thread A, only trace A alive).";
} // Trace A dies here.
- ADD_FAILURE()
- << "Expected failure #6 (in thread A, no trace alive).";
+ ADD_FAILURE() << "Expected failure #6 (in thread A, no trace alive).";
thread.Join();
}
#endif // GTEST_IS_THREADSAFE
@@ -412,9 +400,7 @@ class FatalFailureInFixtureConstructorTest : public testing::Test {
}
private:
- void Init() {
- FAIL() << "Expected failure #1, in the test fixture c'tor.";
- }
+ void Init() { FAIL() << "Expected failure #1, in the test fixture c'tor."; }
};
TEST_F(FatalFailureInFixtureConstructorTest, FailureInConstructor) {
@@ -436,9 +422,7 @@ class NonFatalFailureInSetUpTest : public testing::Test {
void TearDown() override { FAIL() << "Expected failure #3, in TearDown()."; }
private:
- void Deinit() {
- FAIL() << "Expected failure #4, in the test fixture d'tor.";
- }
+ void Deinit() { FAIL() << "Expected failure #4, in the test fixture d'tor."; }
};
TEST_F(NonFatalFailureInSetUpTest, FailureInSetUp) {
@@ -458,9 +442,7 @@ class FatalFailureInSetUpTest : public testing::Test {
void TearDown() override { FAIL() << "Expected failure #2, in TearDown()."; }
private:
- void Deinit() {
- FAIL() << "Expected failure #3, in the test fixture d'tor.";
- }
+ void Deinit() { FAIL() << "Expected failure #3, in the test fixture d'tor."; }
};
TEST_F(FatalFailureInSetUpTest, FailureInSetUp) {
@@ -488,14 +470,12 @@ TEST(GtestFailAtTest, MessageContainsSpecifiedFileAndLineNumber) {
namespace foo {
-class MixedUpTestSuiteTest : public testing::Test {
-};
+class MixedUpTestSuiteTest : public testing::Test {};
TEST_F(MixedUpTestSuiteTest, FirstTestFromNamespaceFoo) {}
TEST_F(MixedUpTestSuiteTest, SecondTestFromNamespaceFoo) {}
-class MixedUpTestSuiteWithSameTestNameTest : public testing::Test {
-};
+class MixedUpTestSuiteWithSameTestNameTest : public testing::Test {};
TEST_F(MixedUpTestSuiteWithSameTestNameTest,
TheSecondTestWithThisNameShouldFail) {}
@@ -504,16 +484,14 @@ TEST_F(MixedUpTestSuiteWithSameTestNameTest,
namespace bar {
-class MixedUpTestSuiteTest : public testing::Test {
-};
+class MixedUpTestSuiteTest : public testing::Test {};
// The following two tests are expected to fail. We rely on the
// golden file to check that Google Test generates the right error message.
TEST_F(MixedUpTestSuiteTest, ThisShouldFail) {}
TEST_F(MixedUpTestSuiteTest, ThisShouldFailToo) {}
-class MixedUpTestSuiteWithSameTestNameTest : public testing::Test {
-};
+class MixedUpTestSuiteWithSameTestNameTest : public testing::Test {};
// Expected to fail. We rely on the golden file to check that Google Test
// generates the right error message.
@@ -527,8 +505,7 @@ TEST_F(MixedUpTestSuiteWithSameTestNameTest,
// test case checks the scenario where TEST_F appears before TEST, and
// the second one checks where TEST appears before TEST_F.
-class TEST_F_before_TEST_in_same_test_case : public testing::Test {
-};
+class TEST_F_before_TEST_in_same_test_case : public testing::Test {};
TEST_F(TEST_F_before_TEST_in_same_test_case, DefinedUsingTEST_F) {}
@@ -536,15 +513,13 @@ TEST_F(TEST_F_before_TEST_in_same_test_case, DefinedUsingTEST_F) {}
// generates the right error message.
TEST(TEST_F_before_TEST_in_same_test_case, DefinedUsingTESTAndShouldFail) {}
-class TEST_before_TEST_F_in_same_test_case : public testing::Test {
-};
+class TEST_before_TEST_F_in_same_test_case : public testing::Test {};
TEST(TEST_before_TEST_F_in_same_test_case, DefinedUsingTEST) {}
// Expected to fail. We rely on the golden file to check that Google Test
// generates the right error message.
-TEST_F(TEST_before_TEST_F_in_same_test_case, DefinedUsingTEST_FAndShouldFail) {
-}
+TEST_F(TEST_before_TEST_F_in_same_test_case, DefinedUsingTEST_FAndShouldFail) {}
// Used for testing EXPECT_NONFATAL_FAILURE() and EXPECT_FATAL_FAILURE().
int global_integer = 0;
@@ -552,9 +527,9 @@ int global_integer = 0;
// Tests that EXPECT_NONFATAL_FAILURE() can reference global variables.
TEST(ExpectNonfatalFailureTest, CanReferenceGlobalVariables) {
global_integer = 0;
- EXPECT_NONFATAL_FAILURE({
- EXPECT_EQ(1, global_integer) << "Expected non-fatal failure.";
- }, "Expected non-fatal failure.");
+ EXPECT_NONFATAL_FAILURE(
+ { EXPECT_EQ(1, global_integer) << "Expected non-fatal failure."; },
+ "Expected non-fatal failure.");
}
// Tests that EXPECT_NONFATAL_FAILURE() can reference local variables
@@ -563,53 +538,48 @@ TEST(ExpectNonfatalFailureTest, CanReferenceLocalVariables) {
int m = 0;
static int n;
n = 1;
- EXPECT_NONFATAL_FAILURE({
- EXPECT_EQ(m, n) << "Expected non-fatal failure.";
- }, "Expected non-fatal failure.");
+ EXPECT_NONFATAL_FAILURE({ EXPECT_EQ(m, n) << "Expected non-fatal failure."; },
+ "Expected non-fatal failure.");
}
// Tests that EXPECT_NONFATAL_FAILURE() succeeds when there is exactly
// one non-fatal failure and no fatal failure.
TEST(ExpectNonfatalFailureTest, SucceedsWhenThereIsOneNonfatalFailure) {
- EXPECT_NONFATAL_FAILURE({
- ADD_FAILURE() << "Expected non-fatal failure.";
- }, "Expected non-fatal failure.");
+ EXPECT_NONFATAL_FAILURE({ ADD_FAILURE() << "Expected non-fatal failure."; },
+ "Expected non-fatal failure.");
}
// Tests that EXPECT_NONFATAL_FAILURE() fails when there is no
// non-fatal failure.
TEST(ExpectNonfatalFailureTest, FailsWhenThereIsNoNonfatalFailure) {
printf("(expecting a failure)\n");
- EXPECT_NONFATAL_FAILURE({
- }, "");
+ EXPECT_NONFATAL_FAILURE({}, "");
}
// Tests that EXPECT_NONFATAL_FAILURE() fails when there are two
// non-fatal failures.
TEST(ExpectNonfatalFailureTest, FailsWhenThereAreTwoNonfatalFailures) {
printf("(expecting a failure)\n");
- EXPECT_NONFATAL_FAILURE({
- ADD_FAILURE() << "Expected non-fatal failure 1.";
- ADD_FAILURE() << "Expected non-fatal failure 2.";
- }, "");
+ EXPECT_NONFATAL_FAILURE(
+ {
+ ADD_FAILURE() << "Expected non-fatal failure 1.";
+ ADD_FAILURE() << "Expected non-fatal failure 2.";
+ },
+ "");
}
// Tests that EXPECT_NONFATAL_FAILURE() fails when there is one fatal
// failure.
TEST(ExpectNonfatalFailureTest, FailsWhenThereIsOneFatalFailure) {
printf("(expecting a failure)\n");
- EXPECT_NONFATAL_FAILURE({
- FAIL() << "Expected fatal failure.";
- }, "");
+ EXPECT_NONFATAL_FAILURE({ FAIL() << "Expected fatal failure."; }, "");
}
// Tests that EXPECT_NONFATAL_FAILURE() fails when the statement being
// tested returns.
TEST(ExpectNonfatalFailureTest, FailsWhenStatementReturns) {
printf("(expecting a failure)\n");
- EXPECT_NONFATAL_FAILURE({
- return;
- }, "");
+ EXPECT_NONFATAL_FAILURE({ return; }, "");
}
#if GTEST_HAS_EXCEPTIONS
@@ -619,10 +589,8 @@ TEST(ExpectNonfatalFailureTest, FailsWhenStatementReturns) {
TEST(ExpectNonfatalFailureTest, FailsWhenStatementThrows) {
printf("(expecting a failure)\n");
try {
- EXPECT_NONFATAL_FAILURE({
- throw 0;
- }, "");
- } catch(int) { // NOLINT
+ EXPECT_NONFATAL_FAILURE({ throw 0; }, "");
+ } catch (int) { // NOLINT
}
}
@@ -631,9 +599,9 @@ TEST(ExpectNonfatalFailureTest, FailsWhenStatementThrows) {
// Tests that EXPECT_FATAL_FAILURE() can reference global variables.
TEST(ExpectFatalFailureTest, CanReferenceGlobalVariables) {
global_integer = 0;
- EXPECT_FATAL_FAILURE({
- ASSERT_EQ(1, global_integer) << "Expected fatal failure.";
- }, "Expected fatal failure.");
+ EXPECT_FATAL_FAILURE(
+ { ASSERT_EQ(1, global_integer) << "Expected fatal failure."; },
+ "Expected fatal failure.");
}
// Tests that EXPECT_FATAL_FAILURE() can reference local static
@@ -641,58 +609,51 @@ TEST(ExpectFatalFailureTest, CanReferenceGlobalVariables) {
TEST(ExpectFatalFailureTest, CanReferenceLocalStaticVariables) {
static int n;
n = 1;
- EXPECT_FATAL_FAILURE({
- ASSERT_EQ(0, n) << "Expected fatal failure.";
- }, "Expected fatal failure.");
+ EXPECT_FATAL_FAILURE({ ASSERT_EQ(0, n) << "Expected fatal failure."; },
+ "Expected fatal failure.");
}
// Tests that EXPECT_FATAL_FAILURE() succeeds when there is exactly
// one fatal failure and no non-fatal failure.
TEST(ExpectFatalFailureTest, SucceedsWhenThereIsOneFatalFailure) {
- EXPECT_FATAL_FAILURE({
- FAIL() << "Expected fatal failure.";
- }, "Expected fatal failure.");
+ EXPECT_FATAL_FAILURE({ FAIL() << "Expected fatal failure."; },
+ "Expected fatal failure.");
}
// Tests that EXPECT_FATAL_FAILURE() fails when there is no fatal
// failure.
TEST(ExpectFatalFailureTest, FailsWhenThereIsNoFatalFailure) {
printf("(expecting a failure)\n");
- EXPECT_FATAL_FAILURE({
- }, "");
+ EXPECT_FATAL_FAILURE({}, "");
}
// A helper for generating a fatal failure.
-void FatalFailure() {
- FAIL() << "Expected fatal failure.";
-}
+void FatalFailure() { FAIL() << "Expected fatal failure."; }
// Tests that EXPECT_FATAL_FAILURE() fails when there are two
// fatal failures.
TEST(ExpectFatalFailureTest, FailsWhenThereAreTwoFatalFailures) {
printf("(expecting a failure)\n");
- EXPECT_FATAL_FAILURE({
- FatalFailure();
- FatalFailure();
- }, "");
+ EXPECT_FATAL_FAILURE(
+ {
+ FatalFailure();
+ FatalFailure();
+ },
+ "");
}
// Tests that EXPECT_FATAL_FAILURE() fails when there is one non-fatal
// failure.
TEST(ExpectFatalFailureTest, FailsWhenThereIsOneNonfatalFailure) {
printf("(expecting a failure)\n");
- EXPECT_FATAL_FAILURE({
- ADD_FAILURE() << "Expected non-fatal failure.";
- }, "");
+ EXPECT_FATAL_FAILURE({ ADD_FAILURE() << "Expected non-fatal failure."; }, "");
}
// Tests that EXPECT_FATAL_FAILURE() fails when the statement being
// tested returns.
TEST(ExpectFatalFailureTest, FailsWhenStatementReturns) {
printf("(expecting a failure)\n");
- EXPECT_FATAL_FAILURE({
- return;
- }, "");
+ EXPECT_FATAL_FAILURE({ return; }, "");
}
#if GTEST_HAS_EXCEPTIONS
@@ -702,10 +663,8 @@ TEST(ExpectFatalFailureTest, FailsWhenStatementReturns) {
TEST(ExpectFatalFailureTest, FailsWhenStatementThrows) {
printf("(expecting a failure)\n");
try {
- EXPECT_FATAL_FAILURE({
- throw 0;
- }, "");
- } catch(int) { // NOLINT
+ EXPECT_FATAL_FAILURE({ throw 0; }, "");
+ } catch (int) { // NOLINT
}
}
@@ -717,21 +676,14 @@ std::string ParamNameFunc(const testing::TestParamInfo<std::string>& info) {
return info.param;
}
-class ParamTest : public testing::TestWithParam<std::string> {
-};
+class ParamTest : public testing::TestWithParam<std::string> {};
-TEST_P(ParamTest, Success) {
- EXPECT_EQ("a", GetParam());
-}
+TEST_P(ParamTest, Success) { EXPECT_EQ("a", GetParam()); }
-TEST_P(ParamTest, Failure) {
- EXPECT_EQ("b", GetParam()) << "Expected failure";
-}
+TEST_P(ParamTest, Failure) { EXPECT_EQ("b", GetParam()) << "Expected failure"; }
-INSTANTIATE_TEST_SUITE_P(PrintingStrings,
- ParamTest,
- testing::Values(std::string("a")),
- ParamNameFunc);
+INSTANTIATE_TEST_SUITE_P(PrintingStrings, ParamTest,
+ testing::Values(std::string("a")), ParamNameFunc);
// The case where a suite has INSTANTIATE_TEST_SUITE_P but not TEST_P.
using NoTests = ParamTest;
@@ -739,20 +691,17 @@ INSTANTIATE_TEST_SUITE_P(ThisIsOdd, NoTests, ::testing::Values("Hello"));
// fails under kErrorOnUninstantiatedParameterizedTest=true
class DetectNotInstantiatedTest : public testing::TestWithParam<int> {};
-TEST_P(DetectNotInstantiatedTest, Used) { }
+TEST_P(DetectNotInstantiatedTest, Used) {}
// This would make the test failure from the above go away.
// INSTANTIATE_TEST_SUITE_P(Fix, DetectNotInstantiatedTest, testing::Values(1));
template <typename T>
-class TypedTest : public testing::Test {
-};
+class TypedTest : public testing::Test {};
TYPED_TEST_SUITE(TypedTest, testing::Types<int>);
-TYPED_TEST(TypedTest, Success) {
- EXPECT_EQ(0, TypeParam());
-}
+TYPED_TEST(TypedTest, Success) { EXPECT_EQ(0, TypeParam()); }
TYPED_TEST(TypedTest, Failure) {
EXPECT_EQ(1, TypeParam()) << "Expected failure";
@@ -781,14 +730,11 @@ TYPED_TEST(TypedTestWithNames, Success) {}
TYPED_TEST(TypedTestWithNames, Failure) { FAIL(); }
template <typename T>
-class TypedTestP : public testing::Test {
-};
+class TypedTestP : public testing::Test {};
TYPED_TEST_SUITE_P(TypedTestP);
-TYPED_TEST_P(TypedTestP, Success) {
- EXPECT_EQ(0U, TypeParam());
-}
+TYPED_TEST_P(TypedTestP, Success) { EXPECT_EQ(0U, TypeParam()); }
TYPED_TEST_P(TypedTestP, Failure) {
EXPECT_EQ(1U, TypeParam()) << "Expected failure";
@@ -813,7 +759,7 @@ class TypedTestPNames {
};
INSTANTIATE_TYPED_TEST_SUITE_P(UnsignedCustomName, TypedTestP, UnsignedTypes,
- TypedTestPNames);
+ TypedTestPNames);
template <typename T>
class DetectNotInstantiatedTypesTest : public testing::Test {};
@@ -830,39 +776,33 @@ REGISTER_TYPED_TEST_SUITE_P(DetectNotInstantiatedTypesTest, Used);
// typedef ::testing::Types<char, int, unsigned int> MyTypes;
// INSTANTIATE_TYPED_TEST_SUITE_P(All, DetectNotInstantiatedTypesTest, MyTypes);
-#if GTEST_HAS_DEATH_TEST
+#ifdef GTEST_HAS_DEATH_TEST
// We rely on the golden file to verify that tests whose test case
// name ends with DeathTest are run first.
-TEST(ADeathTest, ShouldRunFirst) {
-}
+TEST(ADeathTest, ShouldRunFirst) {}
// We rely on the golden file to verify that typed tests whose test
// case name ends with DeathTest are run first.
template <typename T>
-class ATypedDeathTest : public testing::Test {
-};
+class ATypedDeathTest : public testing::Test {};
typedef testing::Types<int, double> NumericTypes;
TYPED_TEST_SUITE(ATypedDeathTest, NumericTypes);
-TYPED_TEST(ATypedDeathTest, ShouldRunFirst) {
-}
-
+TYPED_TEST(ATypedDeathTest, ShouldRunFirst) {}
// We rely on the golden file to verify that type-parameterized tests
// whose test case name ends with DeathTest are run first.
template <typename T>
-class ATypeParamDeathTest : public testing::Test {
-};
+class ATypeParamDeathTest : public testing::Test {};
TYPED_TEST_SUITE_P(ATypeParamDeathTest);
-TYPED_TEST_P(ATypeParamDeathTest, ShouldRunFirst) {
-}
+TYPED_TEST_P(ATypeParamDeathTest, ShouldRunFirst) {}
REGISTER_TYPED_TEST_SUITE_P(ATypeParamDeathTest, ShouldRunFirst);
@@ -874,10 +814,7 @@ INSTANTIATE_TYPED_TEST_SUITE_P(My, ATypeParamDeathTest, NumericTypes);
// EXPECT_{,NON}FATAL_FAILURE{,_ON_ALL_THREADS}.
class ExpectFailureTest : public testing::Test {
public: // Must be public and not protected due to a bug in g++ 3.4.2.
- enum FailureMode {
- FATAL_FAILURE,
- NONFATAL_FAILURE
- };
+ enum FailureMode { FATAL_FAILURE, NONFATAL_FAILURE };
static void AddFailure(FailureMode failure) {
if (failure == FATAL_FAILURE) {
FAIL() << "Expected fatal failure.";
@@ -893,11 +830,13 @@ TEST_F(ExpectFailureTest, ExpectFatalFailure) {
EXPECT_FATAL_FAILURE(SUCCEED(), "Expected fatal failure.");
// Expected fatal failure, but got a non-fatal failure.
printf("(expecting 1 failure)\n");
- EXPECT_FATAL_FAILURE(AddFailure(NONFATAL_FAILURE), "Expected non-fatal "
+ EXPECT_FATAL_FAILURE(AddFailure(NONFATAL_FAILURE),
+ "Expected non-fatal "
"failure.");
// Wrong message.
printf("(expecting 1 failure)\n");
- EXPECT_FATAL_FAILURE(AddFailure(FATAL_FAILURE), "Some other fatal failure "
+ EXPECT_FATAL_FAILURE(AddFailure(FATAL_FAILURE),
+ "Some other fatal failure "
"expected.");
}
@@ -910,11 +849,12 @@ TEST_F(ExpectFailureTest, ExpectNonFatalFailure) {
EXPECT_NONFATAL_FAILURE(AddFailure(FATAL_FAILURE), "Expected fatal failure.");
// Wrong message.
printf("(expecting 1 failure)\n");
- EXPECT_NONFATAL_FAILURE(AddFailure(NONFATAL_FAILURE), "Some other non-fatal "
+ EXPECT_NONFATAL_FAILURE(AddFailure(NONFATAL_FAILURE),
+ "Some other non-fatal "
"failure.");
}
-#if GTEST_IS_THREADSAFE
+#ifdef GTEST_IS_THREADSAFE
class ExpectFailureWithThreadsTest : public ExpectFailureTest {
protected:
@@ -975,7 +915,8 @@ TEST_F(ExpectFailureTest, ExpectFatalFailureOnAllThreads) {
TEST_F(ExpectFailureTest, ExpectNonFatalFailureOnAllThreads) {
// Expected non-fatal failure, but succeeds.
printf("(expecting 1 failure)\n");
- EXPECT_NONFATAL_FAILURE_ON_ALL_THREADS(SUCCEED(), "Expected non-fatal "
+ EXPECT_NONFATAL_FAILURE_ON_ALL_THREADS(SUCCEED(),
+ "Expected non-fatal "
"failure.");
// Expected non-fatal failure, but got a fatal failure.
printf("(expecting 1 failure)\n");
@@ -1060,49 +1001,58 @@ class BarEnvironment : public testing::Environment {
}
};
+class TestSuiteThatFailsToSetUp : public testing::Test {
+ public:
+ static void SetUpTestSuite() { EXPECT_TRUE(false); }
+};
+TEST_F(TestSuiteThatFailsToSetUp, ShouldNotRun) { std::abort(); }
+
+class TestSuiteThatSkipsInSetUp : public testing::Test {
+ public:
+ static void SetUpTestSuite() { GTEST_SKIP() << "Skip entire test suite"; }
+};
+TEST_F(TestSuiteThatSkipsInSetUp, ShouldNotRun) { std::abort(); }
+
// The main function.
//
// The idea is to use Google Test to run all the tests we have defined (some
// of them are intended to fail), and then compare the test results
// with the "golden" file.
-int main(int argc, char **argv) {
- testing::GTEST_FLAG(print_time) = false;
+int main(int argc, char** argv) {
+ GTEST_FLAG_SET(print_time, false);
// We just run the tests, knowing some of them are intended to fail.
// We will use a separate Python script to compare the output of
// this program with the golden file.
// It's hard to test InitGoogleTest() directly, as it has many
- // global side effects. The following line serves as a sanity test
+ // global side effects. The following line serves as a test
// for it.
testing::InitGoogleTest(&argc, argv);
bool internal_skip_environment_and_ad_hoc_tests =
std::count(argv, argv + argc,
std::string("internal_skip_environment_and_ad_hoc_tests")) > 0;
-#if GTEST_HAS_DEATH_TEST
- if (testing::internal::GTEST_FLAG(internal_run_death_test) != "") {
+#ifdef GTEST_HAS_DEATH_TEST
+ if (!GTEST_FLAG_GET(internal_run_death_test).empty()) {
// Skip the usual output capturing if we're running as the child
// process of an threadsafe-style death test.
-# if GTEST_OS_WINDOWS
+#if defined(GTEST_OS_WINDOWS)
posix::FReopen("nul:", "w", stdout);
-# else
+#else
posix::FReopen("/dev/null", "w", stdout);
-# endif // GTEST_OS_WINDOWS
+#endif // GTEST_OS_WINDOWS
return RUN_ALL_TESTS();
}
#endif // GTEST_HAS_DEATH_TEST
- if (internal_skip_environment_and_ad_hoc_tests)
- return RUN_ALL_TESTS();
+ if (internal_skip_environment_and_ad_hoc_tests) return RUN_ALL_TESTS();
// Registers two global test environments.
// The golden file verifies that they are set up in the order they
// are registered, and torn down in the reverse order.
testing::AddGlobalTestEnvironment(new FooEnvironment);
testing::AddGlobalTestEnvironment(new BarEnvironment);
-#if _MSC_VER
-GTEST_DISABLE_MSC_WARNINGS_POP_() // 4127
-#endif // _MSC_VER
+ GTEST_DISABLE_MSC_WARNINGS_POP_() // 4127
return RunAllTests();
}
diff --git a/googletest/test/googletest-param-test-invalid-name1-test.py b/googletest/test/googletest-param-test-invalid-name1-test.py
index 2a08477a..4886e49e 100644
--- a/googletest/test/googletest-param-test-invalid-name1-test.py
+++ b/googletest/test/googletest-param-test-invalid-name1-test.py
@@ -30,7 +30,7 @@
"""Verifies that Google Test warns the user when not initialized properly."""
-import gtest_test_utils
+from googletest.test import gtest_test_utils
binary_name = 'googletest-param-test-invalid-name1-test_'
COMMAND = gtest_test_utils.GetTestExecutablePath(binary_name)
@@ -44,7 +44,7 @@ def Assert(condition):
def TestExitCodeAndOutput(command):
"""Runs the given command and verifies its exit code and output."""
- err = ('Parameterized test name \'"InvalidWithQuotes"\' is invalid')
+ err = 'Parameterized test name \'"InvalidWithQuotes"\' is invalid'
p = gtest_test_utils.Subprocess(command)
Assert(p.terminated_by_signal)
diff --git a/googletest/test/googletest-param-test-invalid-name1-test_.cc b/googletest/test/googletest-param-test-invalid-name1-test_.cc
index 955d6999..004733a1 100644
--- a/googletest/test/googletest-param-test-invalid-name1-test_.cc
+++ b/googletest/test/googletest-param-test-invalid-name1-test_.cc
@@ -27,17 +27,14 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
#include "gtest/gtest.h"
namespace {
class DummyTest : public ::testing::TestWithParam<const char *> {};
-TEST_P(DummyTest, Dummy) {
-}
+TEST_P(DummyTest, Dummy) {}
-INSTANTIATE_TEST_SUITE_P(InvalidTestName,
- DummyTest,
+INSTANTIATE_TEST_SUITE_P(InvalidTestName, DummyTest,
::testing::Values("InvalidWithQuotes"),
::testing::PrintToStringParamName());
@@ -47,4 +44,3 @@ int main(int argc, char *argv[]) {
testing::InitGoogleTest(&argc, argv);
return RUN_ALL_TESTS();
}
-
diff --git a/googletest/test/googletest-param-test-invalid-name2-test.py b/googletest/test/googletest-param-test-invalid-name2-test.py
index ab838f46..bcd8ddf0 100644
--- a/googletest/test/googletest-param-test-invalid-name2-test.py
+++ b/googletest/test/googletest-param-test-invalid-name2-test.py
@@ -30,7 +30,7 @@
"""Verifies that Google Test warns the user when not initialized properly."""
-import gtest_test_utils
+from googletest.test import gtest_test_utils
binary_name = 'googletest-param-test-invalid-name2-test_'
COMMAND = gtest_test_utils.GetTestExecutablePath(binary_name)
@@ -44,7 +44,7 @@ def Assert(condition):
def TestExitCodeAndOutput(command):
"""Runs the given command and verifies its exit code and output."""
- err = ('Duplicate parameterized test name \'a\'')
+ err = "Duplicate parameterized test name 'a'"
p = gtest_test_utils.Subprocess(command)
Assert(p.terminated_by_signal)
@@ -58,5 +58,6 @@ class GTestParamTestInvalidName2Test(gtest_test_utils.TestCase):
def testExitCodeAndOutput(self):
TestExitCodeAndOutput(COMMAND)
+
if __name__ == '__main__':
gtest_test_utils.Main()
diff --git a/googletest/test/googletest-param-test-invalid-name2-test_.cc b/googletest/test/googletest-param-test-invalid-name2-test_.cc
index 76371df5..6d88a9ce 100644
--- a/googletest/test/googletest-param-test-invalid-name2-test_.cc
+++ b/googletest/test/googletest-param-test-invalid-name2-test_.cc
@@ -27,6 +27,7 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#include <string>
#include "gtest/gtest.h"
@@ -34,15 +35,13 @@ namespace {
class DummyTest : public ::testing::TestWithParam<const char *> {};
std::string StringParamTestSuffix(
- const testing::TestParamInfo<const char*>& info) {
+ const testing::TestParamInfo<const char *> &info) {
return std::string(info.param);
}
-TEST_P(DummyTest, Dummy) {
-}
+TEST_P(DummyTest, Dummy) {}
-INSTANTIATE_TEST_SUITE_P(DuplicateTestNames,
- DummyTest,
+INSTANTIATE_TEST_SUITE_P(DuplicateTestNames, DummyTest,
::testing::Values("a", "b", "a", "c"),
StringParamTestSuffix);
} // namespace
@@ -51,5 +50,3 @@ int main(int argc, char *argv[]) {
testing::InitGoogleTest(&argc, argv);
return RUN_ALL_TESTS();
}
-
-
diff --git a/googletest/test/googletest-param-test-test.cc b/googletest/test/googletest-param-test-test.cc
index 023aa46d..c9c5e78e 100644
--- a/googletest/test/googletest-param-test-test.cc
+++ b/googletest/test/googletest-param-test-test.cc
@@ -32,25 +32,27 @@
// generators objects produce correct parameter sequences and that
// Google Test runtime instantiates correct tests from those sequences.
-#include "gtest/gtest.h"
+#include "test/googletest-param-test-test.h"
-# include <algorithm>
-# include <iostream>
-# include <list>
-# include <set>
-# include <sstream>
-# include <string>
-# include <vector>
+#include <algorithm>
+#include <iostream>
+#include <list>
+#include <set>
+#include <sstream>
+#include <string>
+#include <tuple>
+#include <vector>
-# include "src/gtest-internal-inl.h" // for UnitTestOptions
-# include "test/googletest-param-test-test.h"
+#include "gtest/gtest.h"
+#include "src/gtest-internal-inl.h" // for UnitTestOptions
-using ::std::vector;
using ::std::sort;
+using ::std::vector;
using ::testing::AddGlobalTestEnvironment;
using ::testing::Bool;
using ::testing::Combine;
+using ::testing::ConvertGenerator;
using ::testing::Message;
using ::testing::Range;
using ::testing::TestWithParam;
@@ -85,15 +87,14 @@ void VerifyGenerator(const ParamGenerator<T>& generator,
// We cannot use EXPECT_EQ() here as the values may be tuples,
// which don't support <<.
EXPECT_TRUE(expected_values[i] == *it)
- << "where i is " << i
- << ", expected_values[i] is " << PrintValue(expected_values[i])
- << ", *it is " << PrintValue(*it)
+ << "where i is " << i << ", expected_values[i] is "
+ << PrintValue(expected_values[i]) << ", *it is " << PrintValue(*it)
<< ", and 'it' is an iterator created with the copy constructor.\n";
++it;
}
EXPECT_TRUE(it == generator.end())
- << "At the presumed end of sequence when accessing via an iterator "
- << "created with the copy constructor.\n";
+ << "At the presumed end of sequence when accessing via an iterator "
+ << "created with the copy constructor.\n";
// Test the iterator assignment. The following lines verify that
// the sequence accessed via an iterator initialized via the
@@ -105,15 +106,14 @@ void VerifyGenerator(const ParamGenerator<T>& generator,
<< "At element " << i << " when accessing via an iterator "
<< "created with the assignment operator.\n";
EXPECT_TRUE(expected_values[i] == *it)
- << "where i is " << i
- << ", expected_values[i] is " << PrintValue(expected_values[i])
- << ", *it is " << PrintValue(*it)
+ << "where i is " << i << ", expected_values[i] is "
+ << PrintValue(expected_values[i]) << ", *it is " << PrintValue(*it)
<< ", and 'it' is an iterator created with the copy constructor.\n";
++it;
}
EXPECT_TRUE(it == generator.end())
- << "At the presumed end of sequence when accessing via an iterator "
- << "created with the assignment operator.\n";
+ << "At the presumed end of sequence when accessing via an iterator "
+ << "created with the assignment operator.\n";
}
template <typename T>
@@ -216,18 +216,15 @@ class DogAdder {
DogAdder(const DogAdder& other) : value_(other.value_.c_str()) {}
DogAdder operator=(const DogAdder& other) {
- if (this != &other)
- value_ = other.value_;
+ if (this != &other) value_ = other.value_;
return *this;
}
DogAdder operator+(const DogAdder& other) const {
Message msg;
- msg << value_.c_str() << other.value_.c_str();
+ msg << value_ << other.value_;
return DogAdder(msg.GetString().c_str());
}
- bool operator<(const DogAdder& other) const {
- return value_ < other.value_;
- }
+ bool operator<(const DogAdder& other) const { return value_ < other.value_; }
const std::string& value() const { return value_; }
private:
@@ -372,19 +369,17 @@ TEST(ValuesTest, ValuesWorksForValuesOfCompatibleTypes) {
}
TEST(ValuesTest, ValuesWorksForMaxLengthList) {
- const ParamGenerator<int> gen = Values(
- 10, 20, 30, 40, 50, 60, 70, 80, 90, 100,
- 110, 120, 130, 140, 150, 160, 170, 180, 190, 200,
- 210, 220, 230, 240, 250, 260, 270, 280, 290, 300,
- 310, 320, 330, 340, 350, 360, 370, 380, 390, 400,
- 410, 420, 430, 440, 450, 460, 470, 480, 490, 500);
+ const ParamGenerator<int> gen =
+ Values(10, 20, 30, 40, 50, 60, 70, 80, 90, 100, 110, 120, 130, 140, 150,
+ 160, 170, 180, 190, 200, 210, 220, 230, 240, 250, 260, 270, 280,
+ 290, 300, 310, 320, 330, 340, 350, 360, 370, 380, 390, 400, 410,
+ 420, 430, 440, 450, 460, 470, 480, 490, 500);
const int expected_values[] = {
- 10, 20, 30, 40, 50, 60, 70, 80, 90, 100,
- 110, 120, 130, 140, 150, 160, 170, 180, 190, 200,
- 210, 220, 230, 240, 250, 260, 270, 280, 290, 300,
- 310, 320, 330, 340, 350, 360, 370, 380, 390, 400,
- 410, 420, 430, 440, 450, 460, 470, 480, 490, 500};
+ 10, 20, 30, 40, 50, 60, 70, 80, 90, 100, 110, 120, 130,
+ 140, 150, 160, 170, 180, 190, 200, 210, 220, 230, 240, 250, 260,
+ 270, 280, 290, 300, 310, 320, 330, 340, 350, 360, 370, 380, 390,
+ 400, 410, 420, 430, 440, 450, 460, 470, 480, 490, 500};
VerifyGenerator(gen, expected_values);
}
@@ -409,7 +404,7 @@ TEST(BoolTest, BoolWorks) {
TEST(CombineTest, CombineWithTwoParameters) {
const char* foo = "foo";
const char* bar = "bar";
- const ParamGenerator<std::tuple<const char*, int> > gen =
+ const ParamGenerator<std::tuple<const char*, int>> gen =
Combine(Values(foo, bar), Values(3, 4));
std::tuple<const char*, int> expected_values[] = {
@@ -420,7 +415,7 @@ TEST(CombineTest, CombineWithTwoParameters) {
// Tests that Combine() with three parameters generates the expected sequence.
TEST(CombineTest, CombineWithThreeParameters) {
- const ParamGenerator<std::tuple<int, int, int> > gen =
+ const ParamGenerator<std::tuple<int, int, int>> gen =
Combine(Values(0, 1), Values(3, 4), Values(5, 6));
std::tuple<int, int, int> expected_values[] = {
std::make_tuple(0, 3, 5), std::make_tuple(0, 3, 6),
@@ -434,7 +429,7 @@ TEST(CombineTest, CombineWithThreeParameters) {
// sequence generates a sequence with the number of elements equal to the
// number of elements in the sequence generated by the second parameter.
TEST(CombineTest, CombineWithFirstParameterSingleValue) {
- const ParamGenerator<std::tuple<int, int> > gen =
+ const ParamGenerator<std::tuple<int, int>> gen =
Combine(Values(42), Values(0, 1));
std::tuple<int, int> expected_values[] = {std::make_tuple(42, 0),
@@ -446,7 +441,7 @@ TEST(CombineTest, CombineWithFirstParameterSingleValue) {
// sequence generates a sequence with the number of elements equal to the
// number of elements in the sequence generated by the first parameter.
TEST(CombineTest, CombineWithSecondParameterSingleValue) {
- const ParamGenerator<std::tuple<int, int> > gen =
+ const ParamGenerator<std::tuple<int, int>> gen =
Combine(Values(0, 1), Values(42));
std::tuple<int, int> expected_values[] = {std::make_tuple(0, 42),
@@ -457,7 +452,7 @@ TEST(CombineTest, CombineWithSecondParameterSingleValue) {
// Tests that when the first parameter produces an empty sequence,
// Combine() produces an empty sequence, too.
TEST(CombineTest, CombineWithFirstParameterEmptyRange) {
- const ParamGenerator<std::tuple<int, int> > gen =
+ const ParamGenerator<std::tuple<int, int>> gen =
Combine(Range(0, 0), Values(0, 1));
VerifyGeneratorIsEmpty(gen);
}
@@ -465,7 +460,7 @@ TEST(CombineTest, CombineWithFirstParameterEmptyRange) {
// Tests that when the second parameter produces an empty sequence,
// Combine() produces an empty sequence, too.
TEST(CombineTest, CombineWithSecondParameterEmptyRange) {
- const ParamGenerator<std::tuple<int, int> > gen =
+ const ParamGenerator<std::tuple<int, int>> gen =
Combine(Values(0, 1), Range(1, 1));
VerifyGeneratorIsEmpty(gen);
}
@@ -476,7 +471,7 @@ TEST(CombineTest, CombineWithMaxNumberOfParameters) {
const char* foo = "foo";
const char* bar = "bar";
const ParamGenerator<
- std::tuple<const char*, int, int, int, int, int, int, int, int, int> >
+ std::tuple<const char*, int, int, int, int, int, int, int, int, int>>
gen =
Combine(Values(foo, bar), Values(1), Values(2), Values(3), Values(4),
Values(5), Values(6), Values(7), Values(8), Values(9));
@@ -504,11 +499,11 @@ class NonDefaultConstructAssignString {
};
TEST(CombineTest, NonDefaultConstructAssign) {
- const ParamGenerator<std::tuple<int, NonDefaultConstructAssignString> > gen =
+ const ParamGenerator<std::tuple<int, NonDefaultConstructAssignString>> gen =
Combine(Values(0, 1), Values(NonDefaultConstructAssignString("A"),
NonDefaultConstructAssignString("B")));
- ParamGenerator<std::tuple<int, NonDefaultConstructAssignString> >::iterator
+ ParamGenerator<std::tuple<int, NonDefaultConstructAssignString>>::iterator
it = gen.begin();
EXPECT_EQ(0, std::get<0>(*it));
@@ -530,6 +525,63 @@ TEST(CombineTest, NonDefaultConstructAssign) {
EXPECT_TRUE(it == gen.end());
}
+template <typename T>
+class ConstructFromT {
+ public:
+ explicit ConstructFromT(const T& t) : t_(t) {}
+ template <typename... Args,
+ typename std::enable_if<sizeof...(Args) != 1, int>::type = 0>
+ ConstructFromT(Args&&... args) : t_(std::forward<Args>(args)...) {}
+
+ bool operator==(const ConstructFromT& other) const { return other.t_ == t_; }
+
+ const T& get() const { return t_; }
+
+ private:
+ T t_;
+};
+
+TEST(ConvertTest, CombineWithTwoParameters) {
+ const char* foo = "foo";
+ const char* bar = "bar";
+ const ParamGenerator<ConstructFromT<std::tuple<const char*, int>>> gen =
+ ConvertGenerator<std::tuple<const char*, int>>(
+ Combine(Values(foo, bar), Values(3, 4)));
+
+ ConstructFromT<std::tuple<const char*, int>> expected_values[] = {
+ {foo, 3}, {foo, 4}, {bar, 3}, {bar, 4}};
+ VerifyGenerator(gen, expected_values);
+}
+
+TEST(ConvertTest, NonDefaultConstructAssign) {
+ const ParamGenerator<
+ ConstructFromT<std::tuple<int, NonDefaultConstructAssignString>>>
+ gen = ConvertGenerator<std::tuple<int, NonDefaultConstructAssignString>>(
+ Combine(Values(0, 1), Values(NonDefaultConstructAssignString("A"),
+ NonDefaultConstructAssignString("B"))));
+
+ ParamGenerator<ConstructFromT<
+ std::tuple<int, NonDefaultConstructAssignString>>>::iterator it =
+ gen.begin();
+
+ EXPECT_EQ(0, std::get<0>(it->get()));
+ EXPECT_EQ("A", std::get<1>(it->get()).str());
+ ++it;
+
+ EXPECT_EQ(0, std::get<0>(it->get()));
+ EXPECT_EQ("B", std::get<1>(it->get()).str());
+ ++it;
+
+ EXPECT_EQ(1, std::get<0>(it->get()));
+ EXPECT_EQ("A", std::get<1>(it->get()).str());
+ ++it;
+
+ EXPECT_EQ(1, std::get<0>(it->get()));
+ EXPECT_EQ("B", std::get<1>(it->get()).str());
+ ++it;
+
+ EXPECT_TRUE(it == gen.end());
+}
// Tests that an generator produces correct sequence after being
// assigned from another generator.
@@ -573,7 +625,7 @@ class TestGenerationEnvironment : public ::testing::Environment {
Message msg;
msg << "TestsExpandedAndRun/" << i;
if (UnitTestOptions::FilterMatchesTest(
- "TestExpansionModule/MultipleTestGenerationTest",
+ "TestExpansionModule/MultipleTestGenerationTest",
msg.GetString().c_str())) {
perform_check = true;
}
@@ -595,15 +647,20 @@ class TestGenerationEnvironment : public ::testing::Environment {
}
private:
- TestGenerationEnvironment() : fixture_constructor_count_(0), set_up_count_(0),
- tear_down_count_(0), test_body_count_(0) {}
+ TestGenerationEnvironment()
+ : fixture_constructor_count_(0),
+ set_up_count_(0),
+ tear_down_count_(0),
+ test_body_count_(0) {}
int fixture_constructor_count_;
int set_up_count_;
int tear_down_count_;
int test_body_count_;
- GTEST_DISALLOW_COPY_AND_ASSIGN_(TestGenerationEnvironment);
+ TestGenerationEnvironment(const TestGenerationEnvironment&) = delete;
+ TestGenerationEnvironment& operator=(const TestGenerationEnvironment&) =
+ delete;
};
const int test_generation_params[] = {36, 42, 72};
@@ -612,7 +669,7 @@ class TestGenerationTest : public TestWithParam<int> {
public:
enum {
PARAMETER_COUNT =
- sizeof(test_generation_params)/sizeof(test_generation_params[0])
+ sizeof(test_generation_params) / sizeof(test_generation_params[0])
};
typedef TestGenerationEnvironment<PARAMETER_COUNT> Environment;
@@ -636,9 +693,9 @@ class TestGenerationTest : public TestWithParam<int> {
for (int i = 0; i < PARAMETER_COUNT; ++i) {
Message test_name;
test_name << "TestsExpandedAndRun/" << i;
- if ( !UnitTestOptions::FilterMatchesTest(
- "TestExpansionModule/MultipleTestGenerationTest",
- test_name.GetString())) {
+ if (!UnitTestOptions::FilterMatchesTest(
+ "TestExpansionModule/MultipleTestGenerationTest",
+ test_name.GetString())) {
all_tests_in_test_case_selected = false;
}
}
@@ -668,7 +725,8 @@ class TestGenerationTest : public TestWithParam<int> {
static vector<int> collected_parameters_;
private:
- GTEST_DISALLOW_COPY_AND_ASSIGN_(TestGenerationTest);
+ TestGenerationTest(const TestGenerationTest&) = delete;
+ TestGenerationTest& operator=(const TestGenerationTest&) = delete;
};
vector<int> TestGenerationTest::collected_parameters_;
@@ -729,8 +787,7 @@ TEST_P(ExternalInstantiationTest, IsMultipleOf33) {
// Tests that a parameterized test case can be instantiated with multiple
// generators.
class MultipleInstantiationTest : public TestWithParam<int> {};
-TEST_P(MultipleInstantiationTest, AllowsMultipleInstances) {
-}
+TEST_P(MultipleInstantiationTest, AllowsMultipleInstances) {}
INSTANTIATE_TEST_SUITE_P(Sequence1, MultipleInstantiationTest, Values(1, 2));
INSTANTIATE_TEST_SUITE_P(Sequence2, MultipleInstantiationTest, Range(3, 5));
@@ -780,7 +837,7 @@ class NamingTest : public TestWithParam<int> {};
TEST_P(NamingTest, TestsReportCorrectNamesAndParameters) {
const ::testing::TestInfo* const test_info =
- ::testing::UnitTest::GetInstance()->current_test_info();
+ ::testing::UnitTest::GetInstance()->current_test_info();
EXPECT_STREQ("ZeroToFiveSequence/NamingTest", test_info->test_suite_name());
@@ -801,7 +858,7 @@ class MacroNamingTest : public TestWithParam<int> {};
TEST_P(PREFIX_WITH_MACRO(NamingTest), PREFIX_WITH_FOO(SomeTestName)) {
const ::testing::TestInfo* const test_info =
- ::testing::UnitTest::GetInstance()->current_test_info();
+ ::testing::UnitTest::GetInstance()->current_test_info();
EXPECT_STREQ("FortyTwo/MacroNamingTest", test_info->test_suite_name());
EXPECT_STREQ("FooSomeTestName/0", test_info->name());
@@ -815,7 +872,7 @@ class MacroNamingTestNonParametrized : public ::testing::Test {};
TEST_F(PREFIX_WITH_MACRO(NamingTestNonParametrized),
PREFIX_WITH_FOO(SomeTestName)) {
const ::testing::TestInfo* const test_info =
- ::testing::UnitTest::GetInstance()->current_test_info();
+ ::testing::UnitTest::GetInstance()->current_test_info();
EXPECT_STREQ("MacroNamingTestNonParametrized", test_info->test_suite_name());
EXPECT_STREQ("FooSomeTestName", test_info->name());
@@ -824,7 +881,7 @@ TEST_F(PREFIX_WITH_MACRO(NamingTestNonParametrized),
TEST(MacroNameing, LookupNames) {
std::set<std::string> know_suite_names, know_test_names;
- auto ins = testing::UnitTest::GetInstance();
+ const auto& ins = testing::UnitTest::GetInstance();
int ts = 0;
while (const testing::TestSuite* suite = ins->GetTestSuite(ts++)) {
know_suite_names.insert(suite->name());
@@ -839,9 +896,8 @@ TEST(MacroNameing, LookupNames) {
EXPECT_NE( //
know_suite_names.find("FortyTwo/MacroNamingTest"),
know_suite_names.end());
- EXPECT_NE(
- know_suite_names.find("MacroNamingTestNonParametrized"),
- know_suite_names.end());
+ EXPECT_NE(know_suite_names.find("MacroNamingTestNonParametrized"),
+ know_suite_names.end());
// Check that the expected form of the test name actually exists.
EXPECT_NE( //
know_test_names.find("FortyTwo/MacroNamingTest.FooSomeTestName/0"),
@@ -901,7 +957,7 @@ INSTANTIATE_TEST_SUITE_P(CustomParamNameLambda, CustomLambdaNamingTest,
});
TEST(CustomNamingTest, CheckNameRegistry) {
- ::testing::UnitTest* unit_test = ::testing::UnitTest::GetInstance();
+ const auto& unit_test = ::testing::UnitTest::GetInstance();
std::set<std::string> test_names;
for (int suite_num = 0; suite_num < unit_test->total_test_suite_count();
++suite_num) {
@@ -924,7 +980,7 @@ class CustomIntegerNamingTest : public TestWithParam<int> {};
TEST_P(CustomIntegerNamingTest, TestsReportCorrectNames) {
const ::testing::TestInfo* const test_info =
- ::testing::UnitTest::GetInstance()->current_test_info();
+ ::testing::UnitTest::GetInstance()->current_test_info();
Message test_name_stream;
test_name_stream << "TestsReportCorrectNames/" << GetParam();
EXPECT_STREQ(test_name_stream.GetString().c_str(), test_info->name());
@@ -949,7 +1005,7 @@ class CustomStructNamingTest : public TestWithParam<CustomStruct> {};
TEST_P(CustomStructNamingTest, TestsReportCorrectNames) {
const ::testing::TestInfo* const test_info =
- ::testing::UnitTest::GetInstance()->current_test_info();
+ ::testing::UnitTest::GetInstance()->current_test_info();
Message test_name_stream;
test_name_stream << "TestsReportCorrectNames/" << GetParam();
EXPECT_STREQ(test_name_stream.GetString().c_str(), test_info->name());
@@ -979,7 +1035,7 @@ class StatefulNamingTest : public ::testing::TestWithParam<int> {
TEST_P(StatefulNamingTest, TestsReportCorrectNames) {
const ::testing::TestInfo* const test_info =
- ::testing::UnitTest::GetInstance()->current_test_info();
+ ::testing::UnitTest::GetInstance()->current_test_info();
sum_ += GetParam();
Message test_name_stream;
test_name_stream << "TestsReportCorrectNames/" << sum_;
@@ -1007,7 +1063,7 @@ class CommentTest : public TestWithParam<Unstreamable> {};
TEST_P(CommentTest, TestsCorrectlyReportUnstreamableParams) {
const ::testing::TestInfo* const test_info =
- ::testing::UnitTest::GetInstance()->current_test_info();
+ ::testing::UnitTest::GetInstance()->current_test_info();
EXPECT_EQ(::testing::PrintToString(GetParam()), test_info->value_param());
}
@@ -1021,7 +1077,8 @@ INSTANTIATE_TEST_SUITE_P(InstantiationWithComments, CommentTest,
// perform simple tests on both.
class NonParameterizedBaseTest : public ::testing::Test {
public:
- NonParameterizedBaseTest() : n_(17) { }
+ NonParameterizedBaseTest() : n_(17) {}
+
protected:
int n_;
};
@@ -1029,16 +1086,14 @@ class NonParameterizedBaseTest : public ::testing::Test {
class ParameterizedDerivedTest : public NonParameterizedBaseTest,
public ::testing::WithParamInterface<int> {
protected:
- ParameterizedDerivedTest() : count_(0) { }
+ ParameterizedDerivedTest() : count_(0) {}
int count_;
static int global_count_;
};
int ParameterizedDerivedTest::global_count_ = 0;
-TEST_F(NonParameterizedBaseTest, FixtureIsInitialized) {
- EXPECT_EQ(17, n_);
-}
+TEST_F(NonParameterizedBaseTest, FixtureIsInitialized) { EXPECT_EQ(17, n_); }
TEST_P(ParameterizedDerivedTest, SeesSequence) {
EXPECT_EQ(17, n_);
@@ -1046,11 +1101,10 @@ TEST_P(ParameterizedDerivedTest, SeesSequence) {
EXPECT_EQ(GetParam(), global_count_++);
}
-class ParameterizedDeathTest : public ::testing::TestWithParam<int> { };
+class ParameterizedDeathTest : public ::testing::TestWithParam<int> {};
TEST_F(ParameterizedDeathTest, GetParamDiesFromTestF) {
- EXPECT_DEATH_IF_SUPPORTED(GetParam(),
- ".* value-parameterized test .*");
+ EXPECT_DEATH_IF_SUPPORTED(GetParam(), ".* value-parameterized test .*");
}
INSTANTIATE_TEST_SUITE_P(RangeZeroToFive, ParameterizedDerivedTest,
@@ -1084,11 +1138,11 @@ class NotInstantiatedTest : public testing::TestWithParam<int> {};
// ... we mark is as allowed.
GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(NotInstantiatedTest);
-TEST_P(NotInstantiatedTest, Used) { }
+TEST_P(NotInstantiatedTest, Used) {}
using OtherName = NotInstantiatedTest;
GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(OtherName);
-TEST_P(OtherName, Used) { }
+TEST_P(OtherName, Used) {}
// Used but not instantiated, this would fail. but...
template <typename T>
@@ -1097,11 +1151,11 @@ TYPED_TEST_SUITE_P(NotInstantiatedTypeTest);
// ... we mark is as allowed.
GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(NotInstantiatedTypeTest);
-TYPED_TEST_P(NotInstantiatedTypeTest, Used) { }
+TYPED_TEST_P(NotInstantiatedTypeTest, Used) {}
REGISTER_TYPED_TEST_SUITE_P(NotInstantiatedTypeTest, Used);
} // namespace works_here
-int main(int argc, char **argv) {
+int main(int argc, char** argv) {
// Used in TestGenerationTest test suite.
AddGlobalTestEnvironment(TestGenerationTest::Environment::Instance());
// Used in GeneratorEvaluationTest test suite. Tests that the updated value
diff --git a/googletest/test/googletest-param-test-test.h b/googletest/test/googletest-param-test-test.h
index 89193753..6d77e104 100644
--- a/googletest/test/googletest-param-test-test.h
+++ b/googletest/test/googletest-param-test-test.h
@@ -39,13 +39,11 @@
// Test fixture for testing definition and instantiation of a test
// in separate translation units.
-class ExternalInstantiationTest : public ::testing::TestWithParam<int> {
-};
+class ExternalInstantiationTest : public ::testing::TestWithParam<int> {};
// Test fixture for testing instantiation of a test in multiple
// translation units.
class InstantiationInMultipleTranslationUnitsTest
- : public ::testing::TestWithParam<int> {
-};
+ : public ::testing::TestWithParam<int> {};
#endif // GOOGLETEST_TEST_GOOGLETEST_PARAM_TEST_TEST_H_
diff --git a/googletest/test/googletest-param-test2-test.cc b/googletest/test/googletest-param-test2-test.cc
index 2a29fb1d..71727a67 100644
--- a/googletest/test/googletest-param-test2-test.cc
+++ b/googletest/test/googletest-param-test2-test.cc
@@ -46,8 +46,7 @@ ParamGenerator<int> extern_gen = Values(33);
// and instantiated in another. The test is defined in
// googletest-param-test-test.cc and ExternalInstantiationTest fixture class is
// defined in gtest-param-test_test.h.
-INSTANTIATE_TEST_SUITE_P(MultiplesOf33,
- ExternalInstantiationTest,
+INSTANTIATE_TEST_SUITE_P(MultiplesOf33, ExternalInstantiationTest,
Values(33, 66));
// Tests that a parameterized test case can be instantiated
@@ -55,7 +54,5 @@ INSTANTIATE_TEST_SUITE_P(MultiplesOf33,
// in googletest-param-test-test.cc and
// InstantiationInMultipleTranslationUnitsTest fixture is defined in
// gtest-param-test_test.h
-INSTANTIATE_TEST_SUITE_P(Sequence2,
- InstantiationInMultipleTranslationUnitsTest,
- Values(42*3, 42*4, 42*5));
-
+INSTANTIATE_TEST_SUITE_P(Sequence2, InstantiationInMultipleTranslationUnitsTest,
+ Values(42 * 3, 42 * 4, 42 * 5));
diff --git a/googletest/test/googletest-port-test.cc b/googletest/test/googletest-port-test.cc
index 1e0c8616..8d210260 100644
--- a/googletest/test/googletest-port-test.cc
+++ b/googletest/test/googletest-port-test.cc
@@ -32,17 +32,20 @@
#include "gtest/internal/gtest-port.h"
-#if GTEST_OS_MAC
-# include <time.h>
+#ifdef GTEST_OS_MAC
+#include <time.h>
#endif // GTEST_OS_MAC
+#include <chrono> // NOLINT
#include <list>
#include <memory>
+#include <string>
+#include <thread> // NOLINT
#include <utility> // For std::pair and std::make_pair.
#include <vector>
-#include "gtest/gtest.h"
#include "gtest/gtest-spi.h"
+#include "gtest/gtest.h"
#include "src/gtest-internal-inl.h"
using std::make_pair;
@@ -94,7 +97,7 @@ class Base {
explicit Base(int n) : member_(n) {}
Base(const Base&) = default;
Base& operator=(const Base&) = default;
- virtual ~Base() {}
+ virtual ~Base() = default;
int member() { return member_; }
private:
@@ -236,8 +239,8 @@ TEST(GtestCheckSyntaxTest, WorksWithSwitch) {
}
switch (0)
- case 0:
- GTEST_CHECK_(true) << "Check failed in switch case";
+ case 0:
+ GTEST_CHECK_(true) << "Check failed in switch case";
}
// Verifies behavior of FormatFileLocation.
@@ -278,9 +281,11 @@ TEST(FormatCompilerIndependentFileLocationTest, FormatsUknownFileAndLine) {
EXPECT_EQ("unknown file", FormatCompilerIndependentFileLocation(nullptr, -1));
}
-#if GTEST_OS_LINUX || GTEST_OS_MAC || GTEST_OS_QNX || GTEST_OS_FUCHSIA || \
- GTEST_OS_DRAGONFLY || GTEST_OS_FREEBSD || GTEST_OS_GNU_KFREEBSD || \
- GTEST_OS_NETBSD || GTEST_OS_OPENBSD
+#if defined(GTEST_OS_LINUX) || defined(GTEST_OS_MAC) || \
+ defined(GTEST_OS_QNX) || defined(GTEST_OS_FUCHSIA) || \
+ defined(GTEST_OS_DRAGONFLY) || defined(GTEST_OS_FREEBSD) || \
+ defined(GTEST_OS_GNU_KFREEBSD) || defined(GTEST_OS_NETBSD) || \
+ defined(GTEST_OS_OPENBSD) || defined(GTEST_OS_GNU_HURD)
void* ThreadFunc(void* data) {
internal::Mutex* mutex = static_cast<internal::Mutex*>(data);
mutex->Lock();
@@ -289,36 +294,61 @@ void* ThreadFunc(void* data) {
}
TEST(GetThreadCountTest, ReturnsCorrectValue) {
- const size_t starting_count = GetThreadCount();
- pthread_t thread_id;
+ size_t starting_count;
+ size_t thread_count_after_create;
+ size_t thread_count_after_join = 0;
+
+ // We can't guarantee that no other thread was created or destroyed between
+ // any two calls to GetThreadCount(). We make multiple attempts, hoping that
+ // background noise is not constant and we would see the "right" values at
+ // some point.
+ for (int attempt = 0; attempt < 20; ++attempt) {
+ starting_count = GetThreadCount();
+ pthread_t thread_id;
+
+ internal::Mutex mutex;
+ {
+ internal::MutexLock lock(&mutex);
+ pthread_attr_t attr;
+ ASSERT_EQ(0, pthread_attr_init(&attr));
+ ASSERT_EQ(0, pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE));
- internal::Mutex mutex;
- {
- internal::MutexLock lock(&mutex);
- pthread_attr_t attr;
- ASSERT_EQ(0, pthread_attr_init(&attr));
- ASSERT_EQ(0, pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE));
-
- const int status = pthread_create(&thread_id, &attr, &ThreadFunc, &mutex);
- ASSERT_EQ(0, pthread_attr_destroy(&attr));
- ASSERT_EQ(0, status);
- EXPECT_EQ(starting_count + 1, GetThreadCount());
- }
+ const int status = pthread_create(&thread_id, &attr, &ThreadFunc, &mutex);
+ ASSERT_EQ(0, pthread_attr_destroy(&attr));
+ ASSERT_EQ(0, status);
- void* dummy;
- ASSERT_EQ(0, pthread_join(thread_id, &dummy));
+ thread_count_after_create = GetThreadCount();
+ }
- // The OS may not immediately report the updated thread count after
- // joining a thread, causing flakiness in this test. To counter that, we
- // wait for up to .5 seconds for the OS to report the correct value.
- for (int i = 0; i < 5; ++i) {
- if (GetThreadCount() == starting_count)
- break;
+ void* dummy;
+ ASSERT_EQ(0, pthread_join(thread_id, &dummy));
+
+ // Join before we decide whether we need to retry the test. Retry if an
+ // arbitrary other thread was created or destroyed in the meantime.
+ if (thread_count_after_create != starting_count + 1) continue;
+
+ // The OS may not immediately report the updated thread count after
+ // joining a thread, causing flakiness in this test. To counter that, we
+ // wait for up to .5 seconds for the OS to report the correct value.
+ bool thread_count_matches = false;
+ for (int i = 0; i < 5; ++i) {
+ thread_count_after_join = GetThreadCount();
+ if (thread_count_after_join == starting_count) {
+ thread_count_matches = true;
+ break;
+ }
+
+ std::this_thread::sleep_for(std::chrono::milliseconds(100));
+ }
- SleepMilliseconds(100);
+ // Retry if an arbitrary other thread was created or destroyed.
+ if (!thread_count_matches) continue;
+
+ break;
}
- EXPECT_EQ(starting_count, GetThreadCount());
+ EXPECT_EQ(thread_count_after_create, starting_count + 1);
+ EXPECT_EQ(thread_count_after_join, starting_count);
}
#else
TEST(GetThreadCountTest, ReturnsZeroWhenUnableToCountThreads) {
@@ -330,25 +360,27 @@ TEST(GtestCheckDeathTest, DiesWithCorrectOutputOnFailure) {
const bool a_false_condition = false;
const char regex[] =
#ifdef _MSC_VER
- "googletest-port-test\\.cc\\(\\d+\\):"
-#elif GTEST_USES_POSIX_RE
- "googletest-port-test\\.cc:[0-9]+"
+ "googletest-port-test\\.cc\\(\\d+\\):"
+#elif defined(GTEST_USES_POSIX_RE)
+ "googletest-port-test\\.cc:[0-9]+"
#else
- "googletest-port-test\\.cc:\\d+"
+ "googletest-port-test\\.cc:\\d+"
#endif // _MSC_VER
- ".*a_false_condition.*Extra info.*";
+ ".*a_false_condition.*Extra info.*";
EXPECT_DEATH_IF_SUPPORTED(GTEST_CHECK_(a_false_condition) << "Extra info",
regex);
}
-#if GTEST_HAS_DEATH_TEST
+#ifdef GTEST_HAS_DEATH_TEST
TEST(GtestCheckDeathTest, LivesSilentlyOnSuccess) {
- EXPECT_EXIT({
- GTEST_CHECK_(true) << "Extra info";
- ::std::cerr << "Success\n";
- exit(0); },
+ EXPECT_EXIT(
+ {
+ GTEST_CHECK_(true) << "Extra info";
+ ::std::cerr << "Success\n";
+ exit(0);
+ },
::testing::ExitedWithCode(0), "Success");
}
@@ -358,20 +390,16 @@ TEST(GtestCheckDeathTest, LivesSilentlyOnSuccess) {
// the platform. The test will produce compiler errors in case of failure.
// For simplicity, we only cover the most important platforms here.
TEST(RegexEngineSelectionTest, SelectsCorrectRegexEngine) {
-#if !GTEST_USES_PCRE
-# if GTEST_HAS_POSIX_RE
-
+#ifdef GTEST_HAS_ABSL
+ EXPECT_TRUE(GTEST_USES_RE2);
+#elif GTEST_HAS_POSIX_RE
EXPECT_TRUE(GTEST_USES_POSIX_RE);
-
-# else
-
+#else
EXPECT_TRUE(GTEST_USES_SIMPLE_RE);
-
-# endif
-#endif // !GTEST_USES_PCRE
+#endif
}
-#if GTEST_USES_POSIX_RE
+#ifdef GTEST_USES_POSIX_RE
template <typename Str>
class RETest : public ::testing::Test {};
@@ -390,15 +418,15 @@ TYPED_TEST(RETest, ImplicitConstructorWorks) {
const RE simple(TypeParam("hello"));
EXPECT_STREQ("hello", simple.pattern());
- const RE normal(TypeParam(".*(\\w+)"));
- EXPECT_STREQ(".*(\\w+)", normal.pattern());
+ const RE normal(TypeParam(".*([[:alnum:]_]+)"));
+ EXPECT_STREQ(".*([[:alnum:]_]+)", normal.pattern());
}
// Tests that RE's constructors reject invalid regular expressions.
TYPED_TEST(RETest, RejectsInvalidRegex) {
- EXPECT_NONFATAL_FAILURE({
- const RE invalid(TypeParam("?"));
- }, "\"?\" is not a valid POSIX Extended regular expression.");
+ EXPECT_NONFATAL_FAILURE(
+ { const RE invalid(TypeParam("?")); },
+ "\"?\" is not a valid POSIX Extended regular expression.");
}
// Tests RE::FullMatch().
@@ -428,7 +456,7 @@ TYPED_TEST(RETest, PartialMatchWorks) {
EXPECT_FALSE(RE::PartialMatch(TypeParam("zza"), re));
}
-#elif GTEST_USES_SIMPLE_RE
+#elif defined(GTEST_USES_SIMPLE_RE)
TEST(IsInSetTest, NulCharIsNotInAnySet) {
EXPECT_FALSE(IsInSet('\0', ""));
@@ -792,8 +820,7 @@ TEST(MatchRegexAtHeadTest, WorksWhenRegexStartsWithRepetition) {
EXPECT_TRUE(MatchRegexAtHead("a?b", "ab"));
}
-TEST(MatchRegexAtHeadTest,
- WorksWhenRegexStartsWithRepetionOfEscapeSequence) {
+TEST(MatchRegexAtHeadTest, WorksWhenRegexStartsWithRepetionOfEscapeSequence) {
EXPECT_FALSE(MatchRegexAtHead("\\.+a", "abc"));
EXPECT_FALSE(MatchRegexAtHead("\\s?b", " b"));
@@ -849,17 +876,14 @@ TEST(RETest, ImplicitConstructorWorks) {
// Tests that RE's constructors reject invalid regular expressions.
TEST(RETest, RejectsInvalidRegex) {
- EXPECT_NONFATAL_FAILURE({
- const RE normal(NULL);
- }, "NULL is not a valid simple regular expression");
+ EXPECT_NONFATAL_FAILURE({ const RE normal(NULL); },
+ "NULL is not a valid simple regular expression");
- EXPECT_NONFATAL_FAILURE({
- const RE normal(".*(\\w+");
- }, "'(' is unsupported");
+ EXPECT_NONFATAL_FAILURE({ const RE normal(".*(\\w+"); },
+ "'(' is unsupported");
- EXPECT_NONFATAL_FAILURE({
- const RE invalid("^?");
- }, "'?' can only follow a repeatable token");
+ EXPECT_NONFATAL_FAILURE({ const RE invalid("^?"); },
+ "'?' can only follow a repeatable token");
}
// Tests RE::FullMatch().
@@ -894,7 +918,7 @@ TEST(RETest, PartialMatchWorks) {
#endif // GTEST_USES_POSIX_RE
-#if !GTEST_OS_WINDOWS_MOBILE
+#ifndef GTEST_OS_WINDOWS_MOBILE
TEST(CaptureTest, CapturesStdout) {
CaptureStdout();
@@ -955,14 +979,14 @@ TEST(ThreadLocalTest, SingleParamConstructorInitializesToParam) {
EXPECT_EQ(&i, t2.get());
}
-class NoDefaultContructor {
+class NoDefaultConstructor {
public:
- explicit NoDefaultContructor(const char*) {}
- NoDefaultContructor(const NoDefaultContructor&) {}
+ explicit NoDefaultConstructor(const char*) {}
+ NoDefaultConstructor(const NoDefaultConstructor&) = default;
};
TEST(ThreadLocalTest, ValueDefaultContructorIsNotRequiredForParamVersion) {
- ThreadLocal<NoDefaultContructor> bar(NoDefaultContructor("foo"));
+ ThreadLocal<NoDefaultConstructor> bar(NoDefaultConstructor("foo"));
bar.pointer();
}
@@ -987,7 +1011,7 @@ TEST(ThreadLocalTest, PointerAndConstPointerReturnSameValue) {
EXPECT_EQ(thread_local_string.pointer(), const_thread_local_string.pointer());
}
-#if GTEST_IS_THREADSAFE
+#ifdef GTEST_IS_THREADSAFE
void AddTwo(int* param) { *param += 2; }
@@ -1001,12 +1025,13 @@ TEST(ThreadWithParamTest, ConstructorExecutesThreadFunc) {
TEST(MutexDeathTest, AssertHeldShouldAssertWhenNotLocked) {
// AssertHeld() is flaky only in the presence of multiple threads accessing
// the lock. In this case, the test is robust.
- EXPECT_DEATH_IF_SUPPORTED({
- Mutex m;
- { MutexLock lock(&m); }
- m.AssertHeld();
- },
- "thread .*hold");
+ EXPECT_DEATH_IF_SUPPORTED(
+ {
+ Mutex m;
+ { MutexLock lock(&m); }
+ m.AssertHeld();
+ },
+ "thread .*hold");
}
TEST(MutexTest, AssertHeldShouldNotAssertWhenLocked) {
@@ -1017,15 +1042,15 @@ TEST(MutexTest, AssertHeldShouldNotAssertWhenLocked) {
class AtomicCounterWithMutex {
public:
- explicit AtomicCounterWithMutex(Mutex* mutex) :
- value_(0), mutex_(mutex), random_(42) {}
+ explicit AtomicCounterWithMutex(Mutex* mutex)
+ : value_(0), mutex_(mutex), random_(42) {}
void Increment() {
MutexLock lock(mutex_);
int temp = value_;
{
// We need to put up a memory barrier to prevent reads and writes to
- // value_ rearranged with the call to SleepMilliseconds when observed
+ // value_ rearranged with the call to sleep_for when observed
// from other threads.
#if GTEST_HAS_PTHREAD
// On POSIX, locking a mutex puts up a memory barrier. We cannot use
@@ -1036,18 +1061,20 @@ class AtomicCounterWithMutex {
pthread_mutex_init(&memory_barrier_mutex, nullptr));
GTEST_CHECK_POSIX_SUCCESS_(pthread_mutex_lock(&memory_barrier_mutex));
- SleepMilliseconds(static_cast<int>(random_.Generate(30)));
+ std::this_thread::sleep_for(
+ std::chrono::milliseconds(random_.Generate(30)));
GTEST_CHECK_POSIX_SUCCESS_(pthread_mutex_unlock(&memory_barrier_mutex));
GTEST_CHECK_POSIX_SUCCESS_(pthread_mutex_destroy(&memory_barrier_mutex));
-#elif GTEST_OS_WINDOWS
+#elif defined(GTEST_OS_WINDOWS)
// On Windows, performing an interlocked access puts up a memory barrier.
volatile LONG dummy = 0;
::InterlockedIncrement(&dummy);
- SleepMilliseconds(static_cast<int>(random_.Generate(30)));
+ std::this_thread::sleep_for(
+ std::chrono::milliseconds(random_.Generate(30)));
::InterlockedIncrement(&dummy);
#else
-# error "Memory barrier not implemented on this platform."
+#error "Memory barrier not implemented on this platform."
#endif // GTEST_HAS_PTHREAD
}
value_ = temp + 1;
@@ -1057,12 +1084,11 @@ class AtomicCounterWithMutex {
private:
volatile int value_;
Mutex* const mutex_; // Protects value_.
- Random random_;
+ Random random_;
};
void CountingThreadFunc(pair<AtomicCounterWithMutex*, int> param) {
- for (int i = 0; i < param.second; ++i)
- param.first->Increment();
+ for (int i = 0; i < param.second; ++i) param.first->Increment();
}
// Tests that the mutex only lets one thread at a time to lock it.
@@ -1078,14 +1104,12 @@ TEST(MutexTest, OnlyOneThreadCanLockAtATime) {
// Creates and runs kThreadCount threads that increment locked_counter
// kCycleCount times each.
for (int i = 0; i < kThreadCount; ++i) {
- counting_threads[i].reset(new ThreadType(&CountingThreadFunc,
- make_pair(&locked_counter,
- kCycleCount),
- &threads_can_start));
+ counting_threads[i] = std::make_unique<ThreadType>(
+ &CountingThreadFunc, make_pair(&locked_counter, kCycleCount),
+ &threads_can_start);
}
threads_can_start.Notify();
- for (int i = 0; i < kThreadCount; ++i)
- counting_threads[i]->Join();
+ for (int i = 0; i < kThreadCount; ++i) counting_threads[i]->Join();
// If the mutex lets more than one thread to increment the counter at a
// time, they are likely to encounter a race condition and have some
@@ -1095,7 +1119,7 @@ TEST(MutexTest, OnlyOneThreadCanLockAtATime) {
}
template <typename T>
-void RunFromThread(void (func)(T), T param) {
+void RunFromThread(void(func)(T), T param) {
ThreadWithParam<T> thread(func, param, nullptr);
thread.Join();
}
@@ -1124,14 +1148,14 @@ class DestructorCall {
public:
DestructorCall() {
invoked_ = false;
-#if GTEST_OS_WINDOWS
+#ifdef GTEST_OS_WINDOWS
wait_event_.Reset(::CreateEvent(NULL, TRUE, FALSE, NULL));
GTEST_CHECK_(wait_event_.Get() != NULL);
#endif
}
bool CheckDestroyed() const {
-#if GTEST_OS_WINDOWS
+#ifdef GTEST_OS_WINDOWS
if (::WaitForSingleObject(wait_event_.Get(), 1000) != WAIT_OBJECT_0)
return false;
#endif
@@ -1140,7 +1164,7 @@ class DestructorCall {
void ReportDestroyed() {
invoked_ = true;
-#if GTEST_OS_WINDOWS
+#ifdef GTEST_OS_WINDOWS
::SetEvent(wait_event_.Get());
#endif
}
@@ -1156,12 +1180,13 @@ class DestructorCall {
private:
bool invoked_;
-#if GTEST_OS_WINDOWS
+#ifdef GTEST_OS_WINDOWS
AutoHandle wait_event_;
#endif
static std::vector<DestructorCall*>* const list_;
- GTEST_DISALLOW_COPY_AND_ASSIGN_(DestructorCall);
+ DestructorCall(const DestructorCall&) = delete;
+ DestructorCall& operator=(const DestructorCall&) = delete;
};
std::vector<DestructorCall*>* const DestructorCall::list_ =
@@ -1255,12 +1280,12 @@ TEST(ThreadLocalTest, ThreadLocalMutationsAffectOnlyCurrentThread) {
#endif // GTEST_IS_THREADSAFE
-#if GTEST_OS_WINDOWS
+#ifdef GTEST_OS_WINDOWS
TEST(WindowsTypesTest, HANDLEIsVoidStar) {
StaticAssertTypeEq<HANDLE, void*>();
}
-#if GTEST_OS_WINDOWS_MINGW && !defined(__MINGW64_VERSION_MAJOR)
+#if defined(GTEST_OS_WINDOWS_MINGW) && !defined(__MINGW64_VERSION_MAJOR)
TEST(WindowsTypesTest, _CRITICAL_SECTIONIs_CRITICAL_SECTION) {
StaticAssertTypeEq<CRITICAL_SECTION, _CRITICAL_SECTION>();
}
diff --git a/googletest/test/googletest-printers-test.cc b/googletest/test/googletest-printers-test.cc
index e1e8e1c7..d5061bef 100644
--- a/googletest/test/googletest-printers-test.cc
+++ b/googletest/test/googletest-printers-test.cc
@@ -27,7 +27,6 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
// Google Test - The Google C++ Testing and Mocking Framework
//
// This file tests the universal value printer.
@@ -38,13 +37,16 @@
#include <cstring>
#include <deque>
#include <forward_list>
+#include <functional>
#include <limits>
#include <list>
#include <map>
#include <memory>
+#include <ostream>
#include <set>
#include <sstream>
#include <string>
+#include <tuple>
#include <unordered_map>
#include <unordered_set>
#include <utility>
@@ -52,34 +54,33 @@
#include "gtest/gtest-printers.h"
#include "gtest/gtest.h"
+#include "gtest/internal/gtest-port.h"
+
+#ifdef GTEST_HAS_ABSL
+#include "absl/strings/str_format.h"
+#endif
+
+#if GTEST_INTERNAL_HAS_STD_SPAN
+#include <span> // NOLINT
+#endif // GTEST_INTERNAL_HAS_STD_SPAN
// Some user-defined types for testing the universal value printer.
// An anonymous enum type.
-enum AnonymousEnum {
- kAE1 = -1,
- kAE2 = 1
-};
+enum AnonymousEnum { kAE1 = -1, kAE2 = 1 };
// An enum without a user-defined printer.
-enum EnumWithoutPrinter {
- kEWP1 = -2,
- kEWP2 = 42
-};
+enum EnumWithoutPrinter { kEWP1 = -2, kEWP2 = 42 };
// An enum with a << operator.
-enum EnumWithStreaming {
- kEWS1 = 10
-};
+enum EnumWithStreaming { kEWS1 = 10 };
std::ostream& operator<<(std::ostream& os, EnumWithStreaming e) {
return os << (e == kEWS1 ? "kEWS1" : "invalid");
}
// An enum with a PrintTo() function.
-enum EnumWithPrintTo {
- kEWPT1 = 1
-};
+enum EnumWithPrintTo { kEWPT1 = 1 };
void PrintTo(EnumWithPrintTo e, std::ostream* os) {
*os << (e == kEWPT1 ? "kEWPT1" : "invalid");
@@ -108,6 +109,7 @@ template <typename T>
class UnprintableTemplateInGlobal {
public:
UnprintableTemplateInGlobal() : value_() {}
+
private:
T value_;
};
@@ -115,7 +117,7 @@ class UnprintableTemplateInGlobal {
// A user-defined streamable type in the global namespace.
class StreamableInGlobal {
public:
- virtual ~StreamableInGlobal() {}
+ virtual ~StreamableInGlobal() = default;
};
inline void operator<<(::std::ostream& os, const StreamableInGlobal& /* x */) {
@@ -126,6 +128,19 @@ void operator<<(::std::ostream& os, const StreamableInGlobal* /* x */) {
os << "StreamableInGlobal*";
}
+#ifdef GTEST_HAS_ABSL
+// A user-defined type with AbslStringify
+struct Point {
+ template <typename Sink>
+ friend void AbslStringify(Sink& sink, const Point& p) {
+ absl::Format(&sink, "(%d, %d)", p.x, p.y);
+ }
+
+ int x = 10;
+ int y = 20;
+};
+#endif
+
namespace foo {
// A user-defined unprintable type in a user namespace.
@@ -133,6 +148,7 @@ class UnprintableInFoo {
public:
UnprintableInFoo() : z_(0) { memcpy(xy_, "\xEF\x12\x0\x0\x34\xAB\x0\x0", 8); }
double z() const { return z_; }
+
private:
char xy_[8];
double z_;
@@ -149,8 +165,7 @@ void PrintTo(const PrintableViaPrintTo& x, ::std::ostream* os) {
}
// A type with a user-defined << for printing its pointer.
-struct PointerPrintable {
-};
+struct PointerPrintable {};
::std::ostream& operator<<(::std::ostream& os,
const PointerPrintable* /* x */) {
@@ -164,6 +179,7 @@ class PrintableViaPrintToTemplate {
explicit PrintableViaPrintToTemplate(const T& a_value) : value_(a_value) {}
const T& value() const { return value_; }
+
private:
T value_;
};
@@ -180,6 +196,7 @@ class StreamableTemplateInFoo {
StreamableTemplateInFoo() : value_() {}
const T& value() const { return value_; }
+
private:
T value_;
};
@@ -201,6 +218,11 @@ OutputStream& operator<<(OutputStream& os,
return os;
}
+struct StreamableInLocal {};
+void operator<<(::std::ostream& os, const StreamableInLocal& /* x */) {
+ os << "StreamableInLocal";
+}
+
// A user-defined streamable but recursively-defined container type in
// a user namespace, it mimics therefore std::filesystem::path or
// boost::filesystem::path.
@@ -216,7 +238,7 @@ class PathLike {
using value_type = char;
using const_iterator = iterator;
- PathLike() {}
+ PathLike() = default;
iterator begin() const { return iterator(); }
iterator end() const { return iterator(); }
@@ -255,7 +277,6 @@ class UniversalPrinter<Wrapper<T>> {
};
} // namespace internal
-
namespace gtest_printers_test {
using ::std::deque;
@@ -269,7 +290,6 @@ using ::std::set;
using ::std::vector;
using ::testing::PrintToString;
using ::testing::internal::FormatForComparisonFailureMessage;
-using ::testing::internal::ImplicitCast_;
using ::testing::internal::NativeArray;
using ::testing::internal::RelationToSourceReference;
using ::testing::internal::Strings;
@@ -319,6 +339,11 @@ TEST(PrintEnumTest, EnumWithPrintTo) {
EXPECT_EQ("invalid", Print(static_cast<EnumWithPrintTo>(0)));
}
+#ifdef GTEST_HAS_ABSL
+// Tests printing a class that defines AbslStringify
+TEST(PrintClassTest, AbslStringify) { EXPECT_EQ("(10, 20)", Print(Point())); }
+#endif
+
// Tests printing a class implicitly convertible to BiggestInt.
TEST(PrintClassTest, BiggestIntConvertible) {
@@ -350,29 +375,21 @@ TEST(PrintCharTest, PlainChar) {
// signed char.
TEST(PrintCharTest, SignedChar) {
EXPECT_EQ("'\\0'", Print(static_cast<signed char>('\0')));
- EXPECT_EQ("'\\xCE' (-50)",
- Print(static_cast<signed char>(-50)));
+ EXPECT_EQ("'\\xCE' (-50)", Print(static_cast<signed char>(-50)));
}
// unsigned char.
TEST(PrintCharTest, UnsignedChar) {
EXPECT_EQ("'\\0'", Print(static_cast<unsigned char>('\0')));
- EXPECT_EQ("'b' (98, 0x62)",
- Print(static_cast<unsigned char>('b')));
+ EXPECT_EQ("'b' (98, 0x62)", Print(static_cast<unsigned char>('b')));
}
-TEST(PrintCharTest, Char16) {
- EXPECT_EQ("U+0041", Print(u'A'));
-}
+TEST(PrintCharTest, Char16) { EXPECT_EQ("U+0041", Print(u'A')); }
-TEST(PrintCharTest, Char32) {
- EXPECT_EQ("U+0041", Print(U'A'));
-}
+TEST(PrintCharTest, Char32) { EXPECT_EQ("U+0041", Print(U'A')); }
-#ifdef __cpp_char8_t
-TEST(PrintCharTest, Char8) {
- EXPECT_EQ("U+0041", Print(u8'A'));
-}
+#ifdef __cpp_lib_char8_t
+TEST(PrintCharTest, Char8) { EXPECT_EQ("U+0041", Print(u8'A')); }
#endif
// Tests printing other simple, built-in types.
@@ -414,8 +431,8 @@ TEST(PrintTypeSizeTest, Wchar_t) {
TEST(PrintBuiltInTypeTest, Integer) {
EXPECT_EQ("'\\xFF' (255)", Print(static_cast<unsigned char>(255))); // uint8
EXPECT_EQ("'\\x80' (-128)", Print(static_cast<signed char>(-128))); // int8
- EXPECT_EQ("65535", Print(std::numeric_limits<uint16_t>::max())); // uint16
- EXPECT_EQ("-32768", Print(std::numeric_limits<int16_t>::min())); // int16
+ EXPECT_EQ("65535", Print(std::numeric_limits<uint16_t>::max())); // uint16
+ EXPECT_EQ("-32768", Print(std::numeric_limits<int16_t>::min())); // int16
EXPECT_EQ("4294967295",
Print(std::numeric_limits<uint32_t>::max())); // uint32
EXPECT_EQ("-2147483648",
@@ -424,7 +441,7 @@ TEST(PrintBuiltInTypeTest, Integer) {
Print(std::numeric_limits<uint64_t>::max())); // uint64
EXPECT_EQ("-9223372036854775808",
Print(std::numeric_limits<int64_t>::min())); // int64
-#ifdef __cpp_char8_t
+#ifdef __cpp_lib_char8_t
EXPECT_EQ("U+0000",
Print(std::numeric_limits<char8_t>::min())); // char8_t
EXPECT_EQ("U+00FF",
@@ -443,18 +460,54 @@ TEST(PrintBuiltInTypeTest, Integer) {
// Size types.
TEST(PrintBuiltInTypeTest, Size_t) {
EXPECT_EQ("1", Print(sizeof('a'))); // size_t.
-#if !GTEST_OS_WINDOWS
+#ifndef GTEST_OS_WINDOWS
// Windows has no ssize_t type.
EXPECT_EQ("-2", Print(static_cast<ssize_t>(-2))); // ssize_t.
-#endif // !GTEST_OS_WINDOWS
+#endif // !GTEST_OS_WINDOWS
}
+// gcc/clang __{u,}int128_t values.
+#if defined(__SIZEOF_INT128__)
+TEST(PrintBuiltInTypeTest, Int128) {
+ // Small ones
+ EXPECT_EQ("0", Print(__int128_t{0}));
+ EXPECT_EQ("0", Print(__uint128_t{0}));
+ EXPECT_EQ("12345", Print(__int128_t{12345}));
+ EXPECT_EQ("12345", Print(__uint128_t{12345}));
+ EXPECT_EQ("-12345", Print(__int128_t{-12345}));
+
+ // Large ones
+ EXPECT_EQ("340282366920938463463374607431768211455", Print(~__uint128_t{}));
+ __int128_t max_128 = static_cast<__int128_t>(~__uint128_t{} / 2);
+ EXPECT_EQ("-170141183460469231731687303715884105728", Print(~max_128));
+ EXPECT_EQ("170141183460469231731687303715884105727", Print(max_128));
+}
+#endif // __SIZEOF_INT128__
+
// Floating-points.
TEST(PrintBuiltInTypeTest, FloatingPoints) {
- EXPECT_EQ("1.5", Print(1.5f)); // float
+ // float (32-bit precision)
+ EXPECT_EQ("1.5", Print(1.5f));
+
+ EXPECT_EQ("1.0999999", Print(1.09999990f));
+ EXPECT_EQ("1.1", Print(1.10000002f));
+ EXPECT_EQ("1.10000014", Print(1.10000014f));
+ EXPECT_EQ("9e+09", Print(9e9f));
+
+ // double
EXPECT_EQ("-2.5", Print(-2.5)); // double
}
+#if GTEST_HAS_RTTI
+TEST(PrintBuiltInTypeTest, TypeInfo) {
+ struct MyStruct {};
+ auto res = Print(typeid(MyStruct{}));
+ // We can't guarantee that we can demangle the name, but either name should
+ // contain the substring "MyStruct".
+ EXPECT_NE(res.find("MyStruct"), res.npos) << res;
+}
+#endif // GTEST_HAS_RTTI
+
// Since ::std::stringstream::operator<<(const void *) formats the pointer
// output differently with different compilers, we have to create the expected
// output first and use it as our expectation.
@@ -488,12 +541,13 @@ TEST(PrintCStringTest, Null) {
// Tests that C strings are escaped properly.
TEST(PrintCStringTest, EscapesProperly) {
const char* p = "'\"?\\\a\b\f\n\r\t\v\x7F\xFF a";
- EXPECT_EQ(PrintPointer(p) + " pointing to \"'\\\"?\\\\\\a\\b\\f"
- "\\n\\r\\t\\v\\x7F\\xFF a\"",
+ EXPECT_EQ(PrintPointer(p) +
+ " pointing to \"'\\\"?\\\\\\a\\b\\f"
+ "\\n\\r\\t\\v\\x7F\\xFF a\"",
Print(p));
}
-#ifdef __cpp_char8_t
+#ifdef __cpp_lib_char8_t
// const char8_t*.
TEST(PrintU8StringTest, Const) {
const char8_t* p = u8"界";
@@ -608,10 +662,12 @@ TEST(PrintWideCStringTest, Null) {
// Tests that wide C strings are escaped properly.
TEST(PrintWideCStringTest, EscapesProperly) {
- const wchar_t s[] = {'\'', '"', '?', '\\', '\a', '\b', '\f', '\n', '\r',
- '\t', '\v', 0xD3, 0x576, 0x8D3, 0xC74D, ' ', 'a', '\0'};
- EXPECT_EQ(PrintPointer(s) + " pointing to L\"'\\\"?\\\\\\a\\b\\f"
- "\\n\\r\\t\\v\\xD3\\x576\\x8D3\\xC74D a\"",
+ const wchar_t s[] = {'\'', '"', '?', '\\', '\a', '\b',
+ '\f', '\n', '\r', '\t', '\v', 0xD3,
+ 0x576, 0x8D3, 0xC74D, ' ', 'a', '\0'};
+ EXPECT_EQ(PrintPointer(s) +
+ " pointing to L\"'\\\"?\\\\\\a\\b\\f"
+ "\\n\\r\\t\\v\\xD3\\x576\\x8D3\\xC74D a\"",
Print(static_cast<const wchar_t*>(s)));
}
#endif // native wchar_t
@@ -693,10 +749,9 @@ TEST(PrintPointerTest, NonMemberFunctionPointer) {
// standard disallows casting between pointers to functions and
// pointers to objects, and some compilers (e.g. GCC 3.4) enforce
// this limitation.
- EXPECT_EQ(
- PrintPointer(reinterpret_cast<const void*>(
- reinterpret_cast<internal::BiggestInt>(&MyFunction))),
- Print(&MyFunction));
+ EXPECT_EQ(PrintPointer(reinterpret_cast<const void*>(
+ reinterpret_cast<internal::BiggestInt>(&MyFunction))),
+ Print(&MyFunction));
int (*p)(bool) = NULL; // NOLINT
EXPECT_EQ("NULL", Print(p));
}
@@ -705,14 +760,13 @@ TEST(PrintPointerTest, NonMemberFunctionPointer) {
// another.
template <typename StringType>
AssertionResult HasPrefix(const StringType& str, const StringType& prefix) {
- if (str.find(prefix, 0) == 0)
- return AssertionSuccess();
+ if (str.find(prefix, 0) == 0) return AssertionSuccess();
const bool is_wide_string = sizeof(prefix[0]) > 1;
const char* const begin_string_quote = is_wide_string ? "L\"" : "\"";
return AssertionFailure()
- << begin_string_quote << prefix << "\" is not a prefix of "
- << begin_string_quote << str << "\"\n";
+ << begin_string_quote << prefix << "\" is not a prefix of "
+ << begin_string_quote << str << "\"\n";
}
// Tests printing member variable pointers. Although they are called
@@ -722,7 +776,7 @@ AssertionResult HasPrefix(const StringType& str, const StringType& prefix) {
struct Foo {
public:
- virtual ~Foo() {}
+ virtual ~Foo() = default;
int MyMethod(char x) { return x + 1; }
virtual char MyVirtualMethod(int /* n */) { return 'a'; }
@@ -733,8 +787,7 @@ TEST(PrintPointerTest, MemberVariablePointer) {
EXPECT_TRUE(HasPrefix(Print(&Foo::value),
Print(sizeof(&Foo::value)) + "-byte object "));
int Foo::*p = NULL; // NOLINT
- EXPECT_TRUE(HasPrefix(Print(p),
- Print(sizeof(p)) + "-byte object "));
+ EXPECT_TRUE(HasPrefix(Print(p), Print(sizeof(p)) + "-byte object "));
}
// Tests printing member function pointers. Although they are called
@@ -748,8 +801,7 @@ TEST(PrintPointerTest, MemberFunctionPointer) {
HasPrefix(Print(&Foo::MyVirtualMethod),
Print(sizeof((&Foo::MyVirtualMethod))) + "-byte object "));
int (Foo::*p)(char) = NULL; // NOLINT
- EXPECT_TRUE(HasPrefix(Print(p),
- Print(sizeof(p)) + "-byte object "));
+ EXPECT_TRUE(HasPrefix(Print(p), Print(sizeof(p)) + "-byte object "));
}
// Tests printing C arrays.
@@ -763,29 +815,26 @@ std::string PrintArrayHelper(T (&a)[N]) {
// One-dimensional array.
TEST(PrintArrayTest, OneDimensionalArray) {
- int a[5] = { 1, 2, 3, 4, 5 };
+ int a[5] = {1, 2, 3, 4, 5};
EXPECT_EQ("{ 1, 2, 3, 4, 5 }", PrintArrayHelper(a));
}
// Two-dimensional array.
TEST(PrintArrayTest, TwoDimensionalArray) {
- int a[2][5] = {
- { 1, 2, 3, 4, 5 },
- { 6, 7, 8, 9, 0 }
- };
+ int a[2][5] = {{1, 2, 3, 4, 5}, {6, 7, 8, 9, 0}};
EXPECT_EQ("{ { 1, 2, 3, 4, 5 }, { 6, 7, 8, 9, 0 } }", PrintArrayHelper(a));
}
// Array of const elements.
TEST(PrintArrayTest, ConstArray) {
- const bool a[1] = { false };
+ const bool a[1] = {false};
EXPECT_EQ("{ false }", PrintArrayHelper(a));
}
// char array without terminating NUL.
TEST(PrintArrayTest, CharArrayWithNoTerminatingNul) {
// Array a contains '\0' in the middle and doesn't end with '\0'.
- char a[] = { 'H', '\0', 'i' };
+ char a[] = {'H', '\0', 'i'};
EXPECT_EQ("\"H\\0i\" (no terminating NUL)", PrintArrayHelper(a));
}
@@ -795,7 +844,7 @@ TEST(PrintArrayTest, CharArrayWithTerminatingNul) {
EXPECT_EQ("\"\\0Hi\"", PrintArrayHelper(a));
}
-#ifdef __cpp_char8_t
+#ifdef __cpp_lib_char8_t
// char_t array without terminating NUL.
TEST(PrintArrayTest, Char8ArrayWithNoTerminatingNul) {
// Array a contains '\0' in the middle and doesn't end with '\0'.
@@ -806,9 +855,7 @@ TEST(PrintArrayTest, Char8ArrayWithNoTerminatingNul) {
// char8_t array with terminating NUL.
TEST(PrintArrayTest, Char8ArrayWithTerminatingNul) {
const char8_t a[] = u8"\0世界";
- EXPECT_EQ(
- "u8\"\\0\\xE4\\xB8\\x96\\xE7\\x95\\x8C\"",
- PrintArrayHelper(a));
+ EXPECT_EQ("u8\"\\0\\xE4\\xB8\\x96\\xE7\\x95\\x8C\"", PrintArrayHelper(a));
}
#endif
@@ -861,7 +908,7 @@ TEST(PrintArrayTest, ObjectArray) {
// Array with many elements.
TEST(PrintArrayTest, BigArray) {
- int a[100] = { 1, 2, 3 };
+ int a[100] = {1, 2, 3};
EXPECT_EQ("{ 1, 2, 3, 0, 0, 0, 0, 0, ..., 0, 0, 0, 0, 0, 0, 0, 0 }",
PrintArrayHelper(a));
}
@@ -881,11 +928,14 @@ TEST(PrintStringTest, StringAmbiguousHex) {
// '\x6', '\x6B', or '\x6BA'.
// a hex escaping sequence following by a decimal digit
- EXPECT_EQ("\"0\\x12\" \"3\"", Print(::std::string("0\x12" "3")));
+ EXPECT_EQ("\"0\\x12\" \"3\"", Print(::std::string("0\x12"
+ "3")));
// a hex escaping sequence following by a hex digit (lower-case)
- EXPECT_EQ("\"mm\\x6\" \"bananas\"", Print(::std::string("mm\x6" "bananas")));
+ EXPECT_EQ("\"mm\\x6\" \"bananas\"", Print(::std::string("mm\x6"
+ "bananas")));
// a hex escaping sequence following by a hex digit (upper-case)
- EXPECT_EQ("\"NOM\\x6\" \"BANANA\"", Print(::std::string("NOM\x6" "BANANA")));
+ EXPECT_EQ("\"NOM\\x6\" \"BANANA\"", Print(::std::string("NOM\x6"
+ "BANANA")));
// a hex escaping sequence following by a non-xdigit
EXPECT_EQ("\"!\\x5-!\"", Print(::std::string("!\x5-!")));
}
@@ -895,24 +945,26 @@ TEST(PrintStringTest, StringAmbiguousHex) {
// ::std::wstring.
TEST(PrintWideStringTest, StringInStdNamespace) {
const wchar_t s[] = L"'\"?\\\a\b\f\n\0\r\t\v\xD3\x576\x8D3\xC74D a";
- const ::std::wstring str(s, sizeof(s)/sizeof(wchar_t));
- EXPECT_EQ("L\"'\\\"?\\\\\\a\\b\\f\\n\\0\\r\\t\\v"
- "\\xD3\\x576\\x8D3\\xC74D a\\0\"",
- Print(str));
+ const ::std::wstring str(s, sizeof(s) / sizeof(wchar_t));
+ EXPECT_EQ(
+ "L\"'\\\"?\\\\\\a\\b\\f\\n\\0\\r\\t\\v"
+ "\\xD3\\x576\\x8D3\\xC74D a\\0\"",
+ Print(str));
}
TEST(PrintWideStringTest, StringAmbiguousHex) {
// same for wide strings.
- EXPECT_EQ("L\"0\\x12\" L\"3\"", Print(::std::wstring(L"0\x12" L"3")));
- EXPECT_EQ("L\"mm\\x6\" L\"bananas\"",
- Print(::std::wstring(L"mm\x6" L"bananas")));
- EXPECT_EQ("L\"NOM\\x6\" L\"BANANA\"",
- Print(::std::wstring(L"NOM\x6" L"BANANA")));
+ EXPECT_EQ("L\"0\\x12\" L\"3\"", Print(::std::wstring(L"0\x12"
+ L"3")));
+ EXPECT_EQ("L\"mm\\x6\" L\"bananas\"", Print(::std::wstring(L"mm\x6"
+ L"bananas")));
+ EXPECT_EQ("L\"NOM\\x6\" L\"BANANA\"", Print(::std::wstring(L"NOM\x6"
+ L"BANANA")));
EXPECT_EQ("L\"!\\x5-!\"", Print(::std::wstring(L"!\x5-!")));
}
#endif // GTEST_HAS_STD_WSTRING
-#ifdef __cpp_char8_t
+#ifdef __cpp_lib_char8_t
TEST(PrintStringTest, U8String) {
std::u8string str = u8"Hello, 世界";
EXPECT_EQ(str, str); // Verify EXPECT_EQ compiles with this type.
@@ -1021,7 +1073,6 @@ TEST(PrintStlContainerTest, NonEmptyDeque) {
EXPECT_EQ("{ 1, 3 }", Print(non_empty));
}
-
TEST(PrintStlContainerTest, OneElementHashMap) {
::std::unordered_map<int, char> map1;
map1[1] = 'a';
@@ -1037,11 +1088,9 @@ TEST(PrintStlContainerTest, HashMultiMap) {
const std::string result = Print(map1);
EXPECT_TRUE(result == "{ (5, true), (5, false) }" ||
result == "{ (5, false), (5, true) }")
- << " where Print(map1) returns \"" << result << "\".";
+ << " where Print(map1) returns \"" << result << "\".";
}
-
-
TEST(PrintStlContainerTest, HashSet) {
::std::unordered_set<int> set1;
set1.insert(1);
@@ -1050,7 +1099,7 @@ TEST(PrintStlContainerTest, HashSet) {
TEST(PrintStlContainerTest, HashMultiSet) {
const int kSize = 5;
- int a[kSize] = { 1, 1, 2, 5, 1 };
+ int a[kSize] = {1, 1, 2, 5, 1};
::std::unordered_multiset<int> set1(a, a + kSize);
// Elements of hash_multiset can be printed in any order.
@@ -1066,8 +1115,8 @@ TEST(PrintStlContainerTest, HashMultiSet) {
ASSERT_NE(isdigit(static_cast<unsigned char>(result[i])), 0);
numbers.push_back(result[i] - '0');
} else {
- EXPECT_EQ(expected_pattern[i], result[i]) << " where result is "
- << result;
+ EXPECT_EQ(expected_pattern[i], result[i])
+ << " where result is " << result;
}
}
@@ -1077,7 +1126,6 @@ TEST(PrintStlContainerTest, HashMultiSet) {
EXPECT_TRUE(std::equal(a, a + kSize, numbers.begin()));
}
-
TEST(PrintStlContainerTest, List) {
const std::string a[] = {"hello", "world"};
const list<std::string> strings(a, a + 2);
@@ -1107,20 +1155,19 @@ TEST(PrintStlContainerTest, MultiMap) {
}
TEST(PrintStlContainerTest, Set) {
- const unsigned int a[] = { 3, 0, 5 };
+ const unsigned int a[] = {3, 0, 5};
set<unsigned int> set1(a, a + 3);
EXPECT_EQ("{ 0, 3, 5 }", Print(set1));
}
TEST(PrintStlContainerTest, MultiSet) {
- const int a[] = { 1, 1, 2, 5, 1 };
+ const int a[] = {1, 1, 2, 5, 1};
multiset<int> set1(a, a + 5);
EXPECT_EQ("{ 1, 1, 1, 2, 5 }", Print(set1));
}
-
TEST(PrintStlContainerTest, SinglyLinkedList) {
- int a[] = { 9, 2, 8 };
+ int a[] = {9, 2, 8};
const std::forward_list<int> ints(a, a + 3);
EXPECT_EQ("{ 9, 2, 8 }", Print(ints));
}
@@ -1137,33 +1184,46 @@ TEST(PrintStlContainerTest, Vector) {
EXPECT_EQ("{ 1, 2 }", Print(v));
}
+TEST(PrintStlContainerTest, StdSpan) {
+#if GTEST_INTERNAL_HAS_STD_SPAN
+ int a[] = {3, 6, 5};
+ std::span<int> s = a;
+
+ EXPECT_EQ("{ 3, 6, 5 }", Print(s));
+#else
+ GTEST_SKIP() << "Does not have std::span.";
+#endif // GTEST_INTERNAL_HAS_STD_SPAN
+}
+
TEST(PrintStlContainerTest, LongSequence) {
- const int a[100] = { 1, 2, 3 };
+ const int a[100] = {1, 2, 3};
const vector<int> v(a, a + 100);
- EXPECT_EQ("{ 1, 2, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, "
- "0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ... }", Print(v));
+ EXPECT_EQ(
+ "{ 1, 2, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, "
+ "0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ... }",
+ Print(v));
}
TEST(PrintStlContainerTest, NestedContainer) {
- const int a1[] = { 1, 2 };
- const int a2[] = { 3, 4, 5 };
+ const int a1[] = {1, 2};
+ const int a2[] = {3, 4, 5};
const list<int> l1(a1, a1 + 2);
const list<int> l2(a2, a2 + 3);
- vector<list<int> > v;
+ vector<list<int>> v;
v.push_back(l1);
v.push_back(l2);
EXPECT_EQ("{ { 1, 2 }, { 3, 4, 5 } }", Print(v));
}
TEST(PrintStlContainerTest, OneDimensionalNativeArray) {
- const int a[3] = { 1, 2, 3 };
+ const int a[3] = {1, 2, 3};
NativeArray<int> b(a, 3, RelationToSourceReference());
EXPECT_EQ("{ 1, 2, 3 }", Print(b));
}
TEST(PrintStlContainerTest, TwoDimensionalNativeArray) {
- const int a[2][3] = { { 1, 2, 3 }, { 4, 5, 6 } };
+ const int a[2][3] = {{1, 2, 3}, {4, 5, 6}};
NativeArray<int[3]> b(a, 2, RelationToSourceReference());
EXPECT_EQ("{ { 1, 2, 3 }, { 4, 5, 6 } }", Print(b));
}
@@ -1215,20 +1275,18 @@ TEST(PrintStdTupleTest, VariousSizes) {
t10(false, 'a', static_cast<short>(3), 4, 5, 1.5F, -2.5, str, // NOLINT
nullptr, "10");
EXPECT_EQ("(false, 'a' (97, 0x61), 3, 4, 5, 1.5, -2.5, " + PrintPointer(str) +
- " pointing to \"8\", NULL, \"10\")",
+ " pointing to \"8\", NULL, \"10\")",
Print(t10));
}
// Nested tuples.
TEST(PrintStdTupleTest, NestedTuple) {
- ::std::tuple< ::std::tuple<int, bool>, char> nested(
- ::std::make_tuple(5, true), 'a');
+ ::std::tuple<::std::tuple<int, bool>, char> nested(::std::make_tuple(5, true),
+ 'a');
EXPECT_EQ("((5, true), 'a' (97, 0x61))", Print(nested));
}
-TEST(PrintNullptrT, Basic) {
- EXPECT_EQ("(nullptr)", Print(nullptr));
-}
+TEST(PrintNullptrT, Basic) { EXPECT_EQ("(nullptr)", Print(nullptr)); }
TEST(PrintReferenceWrapper, Printable) {
int x = 5;
@@ -1252,8 +1310,7 @@ TEST(PrintReferenceWrapper, Unprintable) {
// Unprintable types in the global namespace.
TEST(PrintUnprintableTypeTest, InGlobalNamespace) {
- EXPECT_EQ("1-byte object <00>",
- Print(UnprintableTemplateInGlobal<char>()));
+ EXPECT_EQ("1-byte object <00>", Print(UnprintableTemplateInGlobal<char>()));
}
// Unprintable types in a user namespace.
@@ -1270,14 +1327,15 @@ struct Big {
};
TEST(PrintUnpritableTypeTest, BigObject) {
- EXPECT_EQ("257-byte object <00-00 00-00 00-00 00-00 00-00 00-00 "
- "00-00 00-00 00-00 00-00 00-00 00-00 00-00 00-00 00-00 00-00 "
- "00-00 00-00 00-00 00-00 00-00 00-00 00-00 00-00 00-00 00-00 "
- "00-00 00-00 00-00 00-00 00-00 00-00 ... 00-00 00-00 00-00 "
- "00-00 00-00 00-00 00-00 00-00 00-00 00-00 00-00 00-00 00-00 "
- "00-00 00-00 00-00 00-00 00-00 00-00 00-00 00-00 00-00 00-00 "
- "00-00 00-00 00-00 00-00 00-00 00-00 00-00 00-00 00>",
- Print(Big()));
+ EXPECT_EQ(
+ "257-byte object <00-00 00-00 00-00 00-00 00-00 00-00 "
+ "00-00 00-00 00-00 00-00 00-00 00-00 00-00 00-00 00-00 00-00 "
+ "00-00 00-00 00-00 00-00 00-00 00-00 00-00 00-00 00-00 00-00 "
+ "00-00 00-00 00-00 00-00 00-00 00-00 ... 00-00 00-00 00-00 "
+ "00-00 00-00 00-00 00-00 00-00 00-00 00-00 00-00 00-00 00-00 "
+ "00-00 00-00 00-00 00-00 00-00 00-00 00-00 00-00 00-00 00-00 "
+ "00-00 00-00 00-00 00-00 00-00 00-00 00-00 00-00 00>",
+ Print(Big()));
}
// Tests printing user-defined streamable types.
@@ -1320,8 +1378,7 @@ TEST(PrintStreamableTypeTest, PathLikeInUserNamespace) {
// Tests printing user-defined types that have a PrintTo() function.
TEST(PrintPrintableTypeTest, InUserNamespace) {
- EXPECT_EQ("PrintableViaPrintTo: 0",
- Print(::foo::PrintableViaPrintTo()));
+ EXPECT_EQ("PrintableViaPrintTo: 0", Print(::foo::PrintableViaPrintTo()));
}
// Tests printing a pointer to a user-defined type that has a <<
@@ -1343,16 +1400,14 @@ TEST(PrintReferenceTest, PrintsAddressAndValue) {
int n = 5;
EXPECT_EQ("@" + PrintPointer(&n) + " 5", PrintByRef(n));
- int a[2][3] = {
- { 0, 1, 2 },
- { 3, 4, 5 }
- };
+ int a[2][3] = {{0, 1, 2}, {3, 4, 5}};
EXPECT_EQ("@" + PrintPointer(a) + " { { 0, 1, 2 }, { 3, 4, 5 } }",
PrintByRef(a));
const ::foo::UnprintableInFoo x;
- EXPECT_EQ("@" + PrintPointer(&x) + " 16-byte object "
- "<EF-12 00-00 34-AB 00-00 00-00 00-00 00-00 00-00>",
+ EXPECT_EQ("@" + PrintPointer(&x) +
+ " 16-byte object "
+ "<EF-12 00-00 34-AB 00-00 00-00 00-00 00-00 00-00>",
PrintByRef(x));
}
@@ -1368,33 +1423,29 @@ TEST(PrintReferenceTest, HandlesFunctionPointer) {
// this limitation.
const std::string fp_string = PrintPointer(reinterpret_cast<const void*>(
reinterpret_cast<internal::BiggestInt>(fp)));
- EXPECT_EQ("@" + fp_pointer_string + " " + fp_string,
- PrintByRef(fp));
+ EXPECT_EQ("@" + fp_pointer_string + " " + fp_string, PrintByRef(fp));
}
// Tests that the universal printer prints a member function pointer
// passed by reference.
TEST(PrintReferenceTest, HandlesMemberFunctionPointer) {
int (Foo::*p)(char ch) = &Foo::MyMethod;
- EXPECT_TRUE(HasPrefix(
- PrintByRef(p),
- "@" + PrintPointer(reinterpret_cast<const void*>(&p)) + " " +
- Print(sizeof(p)) + "-byte object "));
+ EXPECT_TRUE(HasPrefix(PrintByRef(p),
+ "@" + PrintPointer(reinterpret_cast<const void*>(&p)) +
+ " " + Print(sizeof(p)) + "-byte object "));
char (Foo::*p2)(int n) = &Foo::MyVirtualMethod;
- EXPECT_TRUE(HasPrefix(
- PrintByRef(p2),
- "@" + PrintPointer(reinterpret_cast<const void*>(&p2)) + " " +
- Print(sizeof(p2)) + "-byte object "));
+ EXPECT_TRUE(HasPrefix(PrintByRef(p2),
+ "@" + PrintPointer(reinterpret_cast<const void*>(&p2)) +
+ " " + Print(sizeof(p2)) + "-byte object "));
}
// Tests that the universal printer prints a member variable pointer
// passed by reference.
TEST(PrintReferenceTest, HandlesMemberVariablePointer) {
int Foo::*p = &Foo::value; // NOLINT
- EXPECT_TRUE(HasPrefix(
- PrintByRef(p),
- "@" + PrintPointer(&p) + " " + Print(sizeof(p)) + "-byte object "));
+ EXPECT_TRUE(HasPrefix(PrintByRef(p), "@" + PrintPointer(&p) + " " +
+ Print(sizeof(p)) + "-byte object "));
}
// Tests that FormatForComparisonFailureMessage(), which is used to print
@@ -1403,8 +1454,7 @@ TEST(PrintReferenceTest, HandlesMemberVariablePointer) {
// scalar
TEST(FormatForComparisonFailureMessageTest, WorksForScalar) {
- EXPECT_STREQ("123",
- FormatForComparisonFailureMessage(123, 124).c_str());
+ EXPECT_STREQ("123", FormatForComparisonFailureMessage(123, 124).c_str());
}
// non-char pointer
@@ -1418,9 +1468,8 @@ TEST(FormatForComparisonFailureMessageTest, WorksForNonCharPointer) {
TEST(FormatForComparisonFailureMessageTest, FormatsNonCharArrayAsPointer) {
// In expression 'array == x', 'array' is compared by pointer.
// Therefore we want to print an array operand as a pointer.
- int n[] = { 1, 2, 3 };
- EXPECT_EQ(PrintPointer(n),
- FormatForComparisonFailureMessage(n, n).c_str());
+ int n[] = {1, 2, 3};
+ EXPECT_EQ(PrintPointer(n), FormatForComparisonFailureMessage(n, n).c_str());
}
// Tests formatting a char pointer when it's compared with another pointer.
@@ -1436,8 +1485,7 @@ TEST(FormatForComparisonFailureMessageTest, WorksForCharPointerVsPointer) {
// const char*
const char* s = "hello";
- EXPECT_EQ(PrintPointer(s),
- FormatForComparisonFailureMessage(s, s).c_str());
+ EXPECT_EQ(PrintPointer(s), FormatForComparisonFailureMessage(s, s).c_str());
// char*
char ch = 'a';
@@ -1454,8 +1502,7 @@ TEST(FormatForComparisonFailureMessageTest, WorksForWCharPointerVsPointer) {
// const wchar_t*
const wchar_t* s = L"hello";
- EXPECT_EQ(PrintPointer(s),
- FormatForComparisonFailureMessage(s, s).c_str());
+ EXPECT_EQ(PrintPointer(s), FormatForComparisonFailureMessage(s, s).c_str());
// wchar_t*
wchar_t ch = L'a';
@@ -1552,13 +1599,11 @@ TEST(FormatForComparisonFailureMessageTest, WorksForWCharArrayVsStdWString) {
// Useful for testing PrintToString(). We cannot use EXPECT_EQ()
// there as its implementation uses PrintToString(). The caller must
// ensure that 'value' has no side effect.
-#define EXPECT_PRINT_TO_STRING_(value, expected_string) \
- EXPECT_TRUE(PrintToString(value) == (expected_string)) \
+#define EXPECT_PRINT_TO_STRING_(value, expected_string) \
+ EXPECT_TRUE(PrintToString(value) == (expected_string)) \
<< " where " #value " prints as " << (PrintToString(value))
-TEST(PrintToStringTest, WorksForScalar) {
- EXPECT_PRINT_TO_STRING_(123, "123");
-}
+TEST(PrintToStringTest, WorksForScalar) { EXPECT_PRINT_TO_STRING_(123, "123"); }
TEST(PrintToStringTest, WorksForPointerToConstChar) {
const char* p = "hello";
@@ -1583,7 +1628,7 @@ TEST(PrintToStringTest, EscapesForPointerToNonConstChar) {
}
TEST(PrintToStringTest, WorksForArray) {
- int n[3] = { 1, 2, 3 };
+ int n[3] = {1, 2, 3};
EXPECT_PRINT_TO_STRING_(n, "{ 1, 2, 3 }");
}
@@ -1600,8 +1645,8 @@ TEST(PrintToStringTest, WorksForCharArrayWithEmbeddedNul) {
EXPECT_PRINT_TO_STRING_(mutable_str_with_nul, "\"hello\\0 world\"");
}
- TEST(PrintToStringTest, ContainsNonLatin) {
- // Sanity test with valid UTF-8. Prints both in hex and as text.
+TEST(PrintToStringTest, ContainsNonLatin) {
+ // Test with valid UTF-8. Prints both in hex and as text.
std::string non_ascii_str = ::std::string("오전 4:30");
EXPECT_PRINT_TO_STRING_(non_ascii_str,
"\"\\xEC\\x98\\xA4\\xEC\\xA0\\x84 4:30\"\n"
@@ -1612,62 +1657,86 @@ TEST(PrintToStringTest, WorksForCharArrayWithEmbeddedNul) {
"\n As Text: \"From ä — ẑ\"");
}
+TEST(PrintToStringTest, PrintStreamableInLocal) {
+ EXPECT_STREQ("StreamableInLocal",
+ PrintToString(foo::StreamableInLocal()).c_str());
+}
+
+TEST(PrintToStringTest, PrintReferenceToStreamableInLocal) {
+ foo::StreamableInLocal s;
+ std::reference_wrapper<foo::StreamableInLocal> r(s);
+ EXPECT_STREQ("StreamableInLocal", PrintToString(r).c_str());
+}
+
+TEST(PrintToStringTest, PrintReferenceToStreamableInGlobal) {
+ StreamableInGlobal s;
+ std::reference_wrapper<StreamableInGlobal> r(s);
+ EXPECT_STREQ("StreamableInGlobal", PrintToString(r).c_str());
+}
+
+#ifdef GTEST_HAS_ABSL
+TEST(PrintToStringTest, AbslStringify) {
+ EXPECT_PRINT_TO_STRING_(Point(), "(10, 20)");
+}
+#endif
+
TEST(IsValidUTF8Test, IllFormedUTF8) {
// The following test strings are ill-formed UTF-8 and are printed
// as hex only (or ASCII, in case of ASCII bytes) because IsValidUTF8() is
// expected to fail, thus output does not contain "As Text:".
- static const char *const kTestdata[][2] = {
- // 2-byte lead byte followed by a single-byte character.
- {"\xC3\x74", "\"\\xC3t\""},
- // Valid 2-byte character followed by an orphan trail byte.
- {"\xC3\x84\xA4", "\"\\xC3\\x84\\xA4\""},
- // Lead byte without trail byte.
- {"abc\xC3", "\"abc\\xC3\""},
- // 3-byte lead byte, single-byte character, orphan trail byte.
- {"x\xE2\x70\x94", "\"x\\xE2p\\x94\""},
- // Truncated 3-byte character.
- {"\xE2\x80", "\"\\xE2\\x80\""},
- // Truncated 3-byte character followed by valid 2-byte char.
- {"\xE2\x80\xC3\x84", "\"\\xE2\\x80\\xC3\\x84\""},
- // Truncated 3-byte character followed by a single-byte character.
- {"\xE2\x80\x7A", "\"\\xE2\\x80z\""},
- // 3-byte lead byte followed by valid 3-byte character.
- {"\xE2\xE2\x80\x94", "\"\\xE2\\xE2\\x80\\x94\""},
- // 4-byte lead byte followed by valid 3-byte character.
- {"\xF0\xE2\x80\x94", "\"\\xF0\\xE2\\x80\\x94\""},
- // Truncated 4-byte character.
- {"\xF0\xE2\x80", "\"\\xF0\\xE2\\x80\""},
- // Invalid UTF-8 byte sequences embedded in other chars.
- {"abc\xE2\x80\x94\xC3\x74xyc", "\"abc\\xE2\\x80\\x94\\xC3txyc\""},
- {"abc\xC3\x84\xE2\x80\xC3\x84xyz",
- "\"abc\\xC3\\x84\\xE2\\x80\\xC3\\x84xyz\""},
- // Non-shortest UTF-8 byte sequences are also ill-formed.
- // The classics: xC0, xC1 lead byte.
- {"\xC0\x80", "\"\\xC0\\x80\""},
- {"\xC1\x81", "\"\\xC1\\x81\""},
- // Non-shortest sequences.
- {"\xE0\x80\x80", "\"\\xE0\\x80\\x80\""},
- {"\xf0\x80\x80\x80", "\"\\xF0\\x80\\x80\\x80\""},
- // Last valid code point before surrogate range, should be printed as text,
- // too.
- {"\xED\x9F\xBF", "\"\\xED\\x9F\\xBF\"\n As Text: \"퟿\""},
- // Start of surrogate lead. Surrogates are not printed as text.
- {"\xED\xA0\x80", "\"\\xED\\xA0\\x80\""},
- // Last non-private surrogate lead.
- {"\xED\xAD\xBF", "\"\\xED\\xAD\\xBF\""},
- // First private-use surrogate lead.
- {"\xED\xAE\x80", "\"\\xED\\xAE\\x80\""},
- // Last private-use surrogate lead.
- {"\xED\xAF\xBF", "\"\\xED\\xAF\\xBF\""},
- // Mid-point of surrogate trail.
- {"\xED\xB3\xBF", "\"\\xED\\xB3\\xBF\""},
- // First valid code point after surrogate range, should be printed as text,
- // too.
- {"\xEE\x80\x80", "\"\\xEE\\x80\\x80\"\n As Text: \"\""}
- };
-
- for (int i = 0; i < int(sizeof(kTestdata)/sizeof(kTestdata[0])); ++i) {
+ static const char* const kTestdata[][2] = {
+ // 2-byte lead byte followed by a single-byte character.
+ {"\xC3\x74", "\"\\xC3t\""},
+ // Valid 2-byte character followed by an orphan trail byte.
+ {"\xC3\x84\xA4", "\"\\xC3\\x84\\xA4\""},
+ // Lead byte without trail byte.
+ {"abc\xC3", "\"abc\\xC3\""},
+ // 3-byte lead byte, single-byte character, orphan trail byte.
+ {"x\xE2\x70\x94", "\"x\\xE2p\\x94\""},
+ // Truncated 3-byte character.
+ {"\xE2\x80", "\"\\xE2\\x80\""},
+ // Truncated 3-byte character followed by valid 2-byte char.
+ {"\xE2\x80\xC3\x84", "\"\\xE2\\x80\\xC3\\x84\""},
+ // Truncated 3-byte character followed by a single-byte character.
+ {"\xE2\x80\x7A", "\"\\xE2\\x80z\""},
+ // 3-byte lead byte followed by valid 3-byte character.
+ {"\xE2\xE2\x80\x94", "\"\\xE2\\xE2\\x80\\x94\""},
+ // 4-byte lead byte followed by valid 3-byte character.
+ {"\xF0\xE2\x80\x94", "\"\\xF0\\xE2\\x80\\x94\""},
+ // Truncated 4-byte character.
+ {"\xF0\xE2\x80", "\"\\xF0\\xE2\\x80\""},
+ // Invalid UTF-8 byte sequences embedded in other chars.
+ {"abc\xE2\x80\x94\xC3\x74xyc", "\"abc\\xE2\\x80\\x94\\xC3txyc\""},
+ {"abc\xC3\x84\xE2\x80\xC3\x84xyz",
+ "\"abc\\xC3\\x84\\xE2\\x80\\xC3\\x84xyz\""},
+ // Non-shortest UTF-8 byte sequences are also ill-formed.
+ // The classics: xC0, xC1 lead byte.
+ {"\xC0\x80", "\"\\xC0\\x80\""},
+ {"\xC1\x81", "\"\\xC1\\x81\""},
+ // Non-shortest sequences.
+ {"\xE0\x80\x80", "\"\\xE0\\x80\\x80\""},
+ {"\xf0\x80\x80\x80", "\"\\xF0\\x80\\x80\\x80\""},
+ // Last valid code point before surrogate range, should be printed as
+ // text,
+ // too.
+ {"\xED\x9F\xBF", "\"\\xED\\x9F\\xBF\"\n As Text: \"퟿\""},
+ // Start of surrogate lead. Surrogates are not printed as text.
+ {"\xED\xA0\x80", "\"\\xED\\xA0\\x80\""},
+ // Last non-private surrogate lead.
+ {"\xED\xAD\xBF", "\"\\xED\\xAD\\xBF\""},
+ // First private-use surrogate lead.
+ {"\xED\xAE\x80", "\"\\xED\\xAE\\x80\""},
+ // Last private-use surrogate lead.
+ {"\xED\xAF\xBF", "\"\\xED\\xAF\\xBF\""},
+ // Mid-point of surrogate trail.
+ {"\xED\xB3\xBF", "\"\\xED\\xB3\\xBF\""},
+ // First valid code point after surrogate range, should be printed as
+ // text,
+ // too.
+ {"\xEE\x80\x80", "\"\\xEE\\x80\\x80\"\n As Text: \"\""}};
+
+ for (int i = 0; i < int(sizeof(kTestdata) / sizeof(kTestdata[0])); ++i) {
EXPECT_PRINT_TO_STRING_(kTestdata[i][0], kTestdata[i][1]);
}
}
@@ -1772,7 +1841,8 @@ TEST(UniversalPrintTest, SmartPointers) {
std::shared_ptr<int> p3(new int(1979));
EXPECT_EQ("(ptr = " + PrintPointer(p3.get()) + ", value = 1979)",
PrintToString(p3));
-#if __cpp_lib_shared_ptr_arrays >= 201611L
+#if defined(__cpp_lib_shared_ptr_arrays) && \
+ (__cpp_lib_shared_ptr_arrays >= 201611L)
std::shared_ptr<int[]> p4(new int[2]);
EXPECT_EQ("(" + PrintPointer(p4.get()) + ")", PrintToString(p4));
#endif
@@ -1791,7 +1861,8 @@ TEST(UniversalPrintTest, SmartPointers) {
EXPECT_EQ("(nullptr)", PrintToString(std::shared_ptr<const int>()));
EXPECT_EQ("(nullptr)", PrintToString(std::shared_ptr<volatile int>()));
EXPECT_EQ("(nullptr)", PrintToString(std::shared_ptr<volatile const int>()));
-#if __cpp_lib_shared_ptr_arrays >= 201611L
+#if defined(__cpp_lib_shared_ptr_arrays) && \
+ (__cpp_lib_shared_ptr_arrays >= 201611L)
EXPECT_EQ("(nullptr)", PrintToString(std::shared_ptr<int[]>()));
EXPECT_EQ("(nullptr)", PrintToString(std::shared_ptr<const int[]>()));
EXPECT_EQ("(nullptr)", PrintToString(std::shared_ptr<volatile int[]>()));
@@ -1816,15 +1887,15 @@ TEST(UniversalTersePrintTupleFieldsToStringsTestWithStd, PrintsEmptyTuple) {
}
TEST(UniversalTersePrintTupleFieldsToStringsTestWithStd, PrintsOneTuple) {
- Strings result = UniversalTersePrintTupleFieldsToStrings(
- ::std::make_tuple(1));
+ Strings result =
+ UniversalTersePrintTupleFieldsToStrings(::std::make_tuple(1));
ASSERT_EQ(1u, result.size());
EXPECT_EQ("1", result[0]);
}
TEST(UniversalTersePrintTupleFieldsToStringsTestWithStd, PrintsTwoTuple) {
- Strings result = UniversalTersePrintTupleFieldsToStrings(
- ::std::make_tuple(1, 'a'));
+ Strings result =
+ UniversalTersePrintTupleFieldsToStrings(::std::make_tuple(1, 'a'));
ASSERT_EQ(2u, result.size());
EXPECT_EQ("1", result[0]);
EXPECT_EQ("'a' (97, 0x61)", result[1]);
@@ -1873,6 +1944,7 @@ TEST_F(PrintAnyTest, NonEmpty) {
#if GTEST_INTERNAL_HAS_OPTIONAL
TEST(PrintOptionalTest, Basic) {
+ EXPECT_EQ("(nullopt)", PrintToString(internal::Nullopt()));
internal::Optional<int> value;
EXPECT_EQ("(nullopt)", PrintToString(value));
value = {7};
diff --git a/googletest/test/googletest-setuptestsuite-test.py b/googletest/test/googletest-setuptestsuite-test.py
index c82162fc..899531f3 100755
--- a/googletest/test/googletest-setuptestsuite-test.py
+++ b/googletest/test/googletest-setuptestsuite-test.py
@@ -31,10 +31,11 @@
"""Verifies that SetUpTestSuite and TearDownTestSuite errors are noticed."""
-import gtest_test_utils
+from googletest.test import gtest_test_utils
COMMAND = gtest_test_utils.GetTestExecutablePath(
- 'googletest-setuptestsuite-test_')
+ 'googletest-setuptestsuite-test_'
+)
class GTestSetUpTestSuiteTest(gtest_test_utils.TestCase):
@@ -44,11 +45,14 @@ class GTestSetUpTestSuiteTest(gtest_test_utils.TestCase):
self.assertNotEqual(p.exit_code, 0, msg=p.output)
self.assertIn(
- '[ FAILED ] SetupFailTest: SetUpTestSuite or TearDownTestSuite\n'
- '[ FAILED ] TearDownFailTest: SetUpTestSuite or TearDownTestSuite\n'
- '\n'
- ' 2 FAILED TEST SUITES\n',
- p.output)
+ (
+ '[ FAILED ] SetupFailTest: SetUpTestSuite or TearDownTestSuite\n['
+ ' FAILED ] TearDownFailTest: SetUpTestSuite or'
+ ' TearDownTestSuite\n\n 2 FAILED TEST SUITES\n'
+ ),
+ p.output,
+ )
+
if __name__ == '__main__':
gtest_test_utils.Main()
diff --git a/googletest/test/googletest-setuptestsuite-test_.cc b/googletest/test/googletest-setuptestsuite-test_.cc
index a4bc4ef4..d20899f5 100644
--- a/googletest/test/googletest-setuptestsuite-test_.cc
+++ b/googletest/test/googletest-setuptestsuite-test_.cc
@@ -27,23 +27,18 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
#include "gtest/gtest.h"
class SetupFailTest : public ::testing::Test {
protected:
- static void SetUpTestSuite() {
- ASSERT_EQ("", "SET_UP_FAIL");
- }
+ static void SetUpTestSuite() { ASSERT_EQ("", "SET_UP_FAIL"); }
};
TEST_F(SetupFailTest, NoopPassingTest) {}
class TearDownFailTest : public ::testing::Test {
protected:
- static void TearDownTestSuite() {
- ASSERT_EQ("", "TEAR_DOWN_FAIL");
- }
+ static void TearDownTestSuite() { ASSERT_EQ("", "TEAR_DOWN_FAIL"); }
};
TEST_F(TearDownFailTest, NoopPassingTest) {}
diff --git a/googletest/test/googletest-shuffle-test.py b/googletest/test/googletest-shuffle-test.py
index 573cc5ec..61e3a15e 100755
--- a/googletest/test/googletest-shuffle-test.py
+++ b/googletest/test/googletest-shuffle-test.py
@@ -31,7 +31,7 @@
"""Verifies that test shuffling works."""
import os
-import gtest_test_utils
+from googletest.test import gtest_test_utils
# Command to run the googletest-shuffle-test_ program.
COMMAND = gtest_test_utils.GetTestExecutablePath('googletest-shuffle-test_')
@@ -130,38 +130,50 @@ def CalculateTestLists():
if not ALL_TESTS:
ALL_TESTS.extend(
- GetTestsForAllIterations({}, [AlsoRunDisabledTestsFlag()])[0])
+ GetTestsForAllIterations({}, [AlsoRunDisabledTestsFlag()])[0]
+ )
if not ACTIVE_TESTS:
ACTIVE_TESTS.extend(GetTestsForAllIterations({}, [])[0])
if not FILTERED_TESTS:
FILTERED_TESTS.extend(
- GetTestsForAllIterations({}, [FilterFlag(TEST_FILTER)])[0])
+ GetTestsForAllIterations({}, [FilterFlag(TEST_FILTER)])[0]
+ )
if not SHARDED_TESTS:
SHARDED_TESTS.extend(
- GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
- SHARD_INDEX_ENV_VAR: '1'},
- [])[0])
+ GetTestsForAllIterations(
+ {TOTAL_SHARDS_ENV_VAR: '3', SHARD_INDEX_ENV_VAR: '1'}, []
+ )[0]
+ )
if not SHUFFLED_ALL_TESTS:
- SHUFFLED_ALL_TESTS.extend(GetTestsForAllIterations(
- {}, [AlsoRunDisabledTestsFlag(), ShuffleFlag(), RandomSeedFlag(1)])[0])
+ SHUFFLED_ALL_TESTS.extend(
+ GetTestsForAllIterations(
+ {}, [AlsoRunDisabledTestsFlag(), ShuffleFlag(), RandomSeedFlag(1)]
+ )[0]
+ )
if not SHUFFLED_ACTIVE_TESTS:
- SHUFFLED_ACTIVE_TESTS.extend(GetTestsForAllIterations(
- {}, [ShuffleFlag(), RandomSeedFlag(1)])[0])
+ SHUFFLED_ACTIVE_TESTS.extend(
+ GetTestsForAllIterations({}, [ShuffleFlag(), RandomSeedFlag(1)])[0]
+ )
if not SHUFFLED_FILTERED_TESTS:
- SHUFFLED_FILTERED_TESTS.extend(GetTestsForAllIterations(
- {}, [ShuffleFlag(), RandomSeedFlag(1), FilterFlag(TEST_FILTER)])[0])
+ SHUFFLED_FILTERED_TESTS.extend(
+ GetTestsForAllIterations(
+ {}, [ShuffleFlag(), RandomSeedFlag(1), FilterFlag(TEST_FILTER)]
+ )[0]
+ )
if not SHUFFLED_SHARDED_TESTS:
SHUFFLED_SHARDED_TESTS.extend(
- GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
- SHARD_INDEX_ENV_VAR: '1'},
- [ShuffleFlag(), RandomSeedFlag(1)])[0])
+ GetTestsForAllIterations(
+ {TOTAL_SHARDS_ENV_VAR: '3', SHARD_INDEX_ENV_VAR: '1'},
+ [ShuffleFlag(), RandomSeedFlag(1)],
+ )[0]
+ )
class GTestShuffleUnitTest(gtest_test_utils.TestCase):
@@ -177,66 +189,91 @@ class GTestShuffleUnitTest(gtest_test_utils.TestCase):
self.assertEqual(len(SHARDED_TESTS), len(SHUFFLED_SHARDED_TESTS))
def testShuffleChangesTestOrder(self):
- self.assert_(SHUFFLED_ALL_TESTS != ALL_TESTS, SHUFFLED_ALL_TESTS)
- self.assert_(SHUFFLED_ACTIVE_TESTS != ACTIVE_TESTS, SHUFFLED_ACTIVE_TESTS)
- self.assert_(SHUFFLED_FILTERED_TESTS != FILTERED_TESTS,
- SHUFFLED_FILTERED_TESTS)
- self.assert_(SHUFFLED_SHARDED_TESTS != SHARDED_TESTS,
- SHUFFLED_SHARDED_TESTS)
+ self.assertTrue(SHUFFLED_ALL_TESTS != ALL_TESTS, SHUFFLED_ALL_TESTS)
+ self.assertTrue(
+ SHUFFLED_ACTIVE_TESTS != ACTIVE_TESTS, SHUFFLED_ACTIVE_TESTS
+ )
+ self.assertTrue(
+ SHUFFLED_FILTERED_TESTS != FILTERED_TESTS, SHUFFLED_FILTERED_TESTS
+ )
+ self.assertTrue(
+ SHUFFLED_SHARDED_TESTS != SHARDED_TESTS, SHUFFLED_SHARDED_TESTS
+ )
def testShuffleChangesTestCaseOrder(self):
- self.assert_(GetTestCases(SHUFFLED_ALL_TESTS) != GetTestCases(ALL_TESTS),
- GetTestCases(SHUFFLED_ALL_TESTS))
- self.assert_(
+ self.assertTrue(
+ GetTestCases(SHUFFLED_ALL_TESTS) != GetTestCases(ALL_TESTS),
+ GetTestCases(SHUFFLED_ALL_TESTS),
+ )
+ self.assertTrue(
GetTestCases(SHUFFLED_ACTIVE_TESTS) != GetTestCases(ACTIVE_TESTS),
- GetTestCases(SHUFFLED_ACTIVE_TESTS))
- self.assert_(
+ GetTestCases(SHUFFLED_ACTIVE_TESTS),
+ )
+ self.assertTrue(
GetTestCases(SHUFFLED_FILTERED_TESTS) != GetTestCases(FILTERED_TESTS),
- GetTestCases(SHUFFLED_FILTERED_TESTS))
- self.assert_(
+ GetTestCases(SHUFFLED_FILTERED_TESTS),
+ )
+ self.assertTrue(
GetTestCases(SHUFFLED_SHARDED_TESTS) != GetTestCases(SHARDED_TESTS),
- GetTestCases(SHUFFLED_SHARDED_TESTS))
+ GetTestCases(SHUFFLED_SHARDED_TESTS),
+ )
def testShuffleDoesNotRepeatTest(self):
for test in SHUFFLED_ALL_TESTS:
- self.assertEqual(1, SHUFFLED_ALL_TESTS.count(test),
- '%s appears more than once' % (test,))
+ self.assertEqual(
+ 1,
+ SHUFFLED_ALL_TESTS.count(test),
+ '%s appears more than once' % (test,),
+ )
for test in SHUFFLED_ACTIVE_TESTS:
- self.assertEqual(1, SHUFFLED_ACTIVE_TESTS.count(test),
- '%s appears more than once' % (test,))
+ self.assertEqual(
+ 1,
+ SHUFFLED_ACTIVE_TESTS.count(test),
+ '%s appears more than once' % (test,),
+ )
for test in SHUFFLED_FILTERED_TESTS:
- self.assertEqual(1, SHUFFLED_FILTERED_TESTS.count(test),
- '%s appears more than once' % (test,))
+ self.assertEqual(
+ 1,
+ SHUFFLED_FILTERED_TESTS.count(test),
+ '%s appears more than once' % (test,),
+ )
for test in SHUFFLED_SHARDED_TESTS:
- self.assertEqual(1, SHUFFLED_SHARDED_TESTS.count(test),
- '%s appears more than once' % (test,))
+ self.assertEqual(
+ 1,
+ SHUFFLED_SHARDED_TESTS.count(test),
+ '%s appears more than once' % (test,),
+ )
def testShuffleDoesNotCreateNewTest(self):
for test in SHUFFLED_ALL_TESTS:
- self.assert_(test in ALL_TESTS, '%s is an invalid test' % (test,))
+ self.assertTrue(test in ALL_TESTS, '%s is an invalid test' % (test,))
for test in SHUFFLED_ACTIVE_TESTS:
- self.assert_(test in ACTIVE_TESTS, '%s is an invalid test' % (test,))
+ self.assertTrue(test in ACTIVE_TESTS, '%s is an invalid test' % (test,))
for test in SHUFFLED_FILTERED_TESTS:
- self.assert_(test in FILTERED_TESTS, '%s is an invalid test' % (test,))
+ self.assertTrue(test in FILTERED_TESTS, '%s is an invalid test' % (test,))
for test in SHUFFLED_SHARDED_TESTS:
- self.assert_(test in SHARDED_TESTS, '%s is an invalid test' % (test,))
+ self.assertTrue(test in SHARDED_TESTS, '%s is an invalid test' % (test,))
def testShuffleIncludesAllTests(self):
for test in ALL_TESTS:
- self.assert_(test in SHUFFLED_ALL_TESTS, '%s is missing' % (test,))
+ self.assertTrue(test in SHUFFLED_ALL_TESTS, '%s is missing' % (test,))
for test in ACTIVE_TESTS:
- self.assert_(test in SHUFFLED_ACTIVE_TESTS, '%s is missing' % (test,))
+ self.assertTrue(test in SHUFFLED_ACTIVE_TESTS, '%s is missing' % (test,))
for test in FILTERED_TESTS:
- self.assert_(test in SHUFFLED_FILTERED_TESTS, '%s is missing' % (test,))
+ self.assertTrue(
+ test in SHUFFLED_FILTERED_TESTS, '%s is missing' % (test,)
+ )
for test in SHARDED_TESTS:
- self.assert_(test in SHUFFLED_SHARDED_TESTS, '%s is missing' % (test,))
+ self.assertTrue(test in SHUFFLED_SHARDED_TESTS, '%s is missing' % (test,))
def testShuffleLeavesDeathTestsAtFront(self):
non_death_test_found = False
for test in SHUFFLED_ACTIVE_TESTS:
if 'DeathTest.' in test:
- self.assert_(not non_death_test_found,
- '%s appears after a non-death test' % (test,))
+ self.assertTrue(
+ not non_death_test_found,
+ '%s appears after a non-death test' % (test,),
+ )
else:
non_death_test_found = True
@@ -246,9 +283,11 @@ class GTestShuffleUnitTest(gtest_test_utils.TestCase):
[test_case, _] = test.split('.')
if test_cases and test_cases[-1] != test_case:
test_cases.append(test_case)
- self.assertEqual(1, test_cases.count(test_case),
- 'Test case %s is not grouped together in %s' %
- (test_case, tests))
+ self.assertEqual(
+ 1,
+ test_cases.count(test_case),
+ 'Test case %s is not grouped together in %s' % (test_case, tests),
+ )
def testShuffleDoesNotInterleaveTestCases(self):
self._VerifyTestCasesDoNotInterleave(SHUFFLED_ALL_TESTS)
@@ -262,56 +301,71 @@ class GTestShuffleUnitTest(gtest_test_utils.TestCase):
# iteration, and this test depends on the current implementation
# picking successive numbers. This dependency is not ideal, but
# makes the test much easier to write.
+ # pylint: disable-next=unbalanced-tuple-unpacking
[tests_in_iteration1, tests_in_iteration2, tests_in_iteration3] = (
GetTestsForAllIterations(
- {}, [ShuffleFlag(), RandomSeedFlag(1), RepeatFlag(3)]))
+ {}, [ShuffleFlag(), RandomSeedFlag(1), RepeatFlag(3)]
+ )
+ )
# Make sure running the tests with random seed 1 gets the same
# order as in iteration 1 above.
- [tests_with_seed1] = GetTestsForAllIterations(
- {}, [ShuffleFlag(), RandomSeedFlag(1)])
+ tests_with_seed1 = GetTestsForAllIterations(
+ {}, [ShuffleFlag(), RandomSeedFlag(1)]
+ )[0]
self.assertEqual(tests_in_iteration1, tests_with_seed1)
# Make sure running the tests with random seed 2 gets the same
# order as in iteration 2 above. Success means that Google Test
# correctly restores the test order before re-shuffling at the
# beginning of iteration 2.
- [tests_with_seed2] = GetTestsForAllIterations(
- {}, [ShuffleFlag(), RandomSeedFlag(2)])
+ tests_with_seed2 = GetTestsForAllIterations(
+ {}, [ShuffleFlag(), RandomSeedFlag(2)]
+ )[0]
self.assertEqual(tests_in_iteration2, tests_with_seed2)
# Make sure running the tests with random seed 3 gets the same
# order as in iteration 3 above. Success means that Google Test
# correctly restores the test order before re-shuffling at the
# beginning of iteration 3.
- [tests_with_seed3] = GetTestsForAllIterations(
- {}, [ShuffleFlag(), RandomSeedFlag(3)])
+ tests_with_seed3 = GetTestsForAllIterations(
+ {}, [ShuffleFlag(), RandomSeedFlag(3)]
+ )[0]
self.assertEqual(tests_in_iteration3, tests_with_seed3)
def testShuffleGeneratesNewOrderInEachIteration(self):
+ # pylint: disable-next=unbalanced-tuple-unpacking
[tests_in_iteration1, tests_in_iteration2, tests_in_iteration3] = (
GetTestsForAllIterations(
- {}, [ShuffleFlag(), RandomSeedFlag(1), RepeatFlag(3)]))
-
- self.assert_(tests_in_iteration1 != tests_in_iteration2,
- tests_in_iteration1)
- self.assert_(tests_in_iteration1 != tests_in_iteration3,
- tests_in_iteration1)
- self.assert_(tests_in_iteration2 != tests_in_iteration3,
- tests_in_iteration2)
+ {}, [ShuffleFlag(), RandomSeedFlag(1), RepeatFlag(3)]
+ )
+ )
+
+ self.assertTrue(
+ tests_in_iteration1 != tests_in_iteration2, tests_in_iteration1
+ )
+ self.assertTrue(
+ tests_in_iteration1 != tests_in_iteration3, tests_in_iteration1
+ )
+ self.assertTrue(
+ tests_in_iteration2 != tests_in_iteration3, tests_in_iteration2
+ )
def testShuffleShardedTestsPreservesPartition(self):
# If we run M tests on N shards, the same M tests should be run in
# total, regardless of the random seeds used by the shards.
- [tests1] = GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
- SHARD_INDEX_ENV_VAR: '0'},
- [ShuffleFlag(), RandomSeedFlag(1)])
- [tests2] = GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
- SHARD_INDEX_ENV_VAR: '1'},
- [ShuffleFlag(), RandomSeedFlag(20)])
- [tests3] = GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
- SHARD_INDEX_ENV_VAR: '2'},
- [ShuffleFlag(), RandomSeedFlag(25)])
+ tests1 = GetTestsForAllIterations(
+ {TOTAL_SHARDS_ENV_VAR: '3', SHARD_INDEX_ENV_VAR: '0'},
+ [ShuffleFlag(), RandomSeedFlag(1)],
+ )[0]
+ tests2 = GetTestsForAllIterations(
+ {TOTAL_SHARDS_ENV_VAR: '3', SHARD_INDEX_ENV_VAR: '1'},
+ [ShuffleFlag(), RandomSeedFlag(20)],
+ )[0]
+ tests3 = GetTestsForAllIterations(
+ {TOTAL_SHARDS_ENV_VAR: '3', SHARD_INDEX_ENV_VAR: '2'},
+ [ShuffleFlag(), RandomSeedFlag(25)],
+ )[0]
sorted_sharded_tests = tests1 + tests2 + tests3
sorted_sharded_tests.sort()
sorted_active_tests = []
@@ -319,5 +373,6 @@ class GTestShuffleUnitTest(gtest_test_utils.TestCase):
sorted_active_tests.sort()
self.assertEqual(sorted_active_tests, sorted_sharded_tests)
+
if __name__ == '__main__':
gtest_test_utils.Main()
diff --git a/googletest/test/googletest-shuffle-test_.cc b/googletest/test/googletest-shuffle-test_.cc
index 4505663a..b570c488 100644
--- a/googletest/test/googletest-shuffle-test_.cc
+++ b/googletest/test/googletest-shuffle-test_.cc
@@ -27,7 +27,6 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
// Verifies that test shuffling works.
#include "gtest/gtest.h"
@@ -36,7 +35,6 @@ namespace {
using ::testing::EmptyTestEventListener;
using ::testing::InitGoogleTest;
-using ::testing::Message;
using ::testing::Test;
using ::testing::TestEventListeners;
using ::testing::TestInfo;
@@ -88,7 +86,7 @@ class TestNamePrinter : public EmptyTestEventListener {
} // namespace
-int main(int argc, char **argv) {
+int main(int argc, char** argv) {
InitGoogleTest(&argc, argv);
// Replaces the default printer with TestNamePrinter, which prints
diff --git a/googletest/test/googletest-test-part-test.cc b/googletest/test/googletest-test-part-test.cc
index 44cf7ca0..076e5be2 100644
--- a/googletest/test/googletest-test-part-test.cc
+++ b/googletest/test/googletest-test-part-test.cc
@@ -28,7 +28,6 @@
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include "gtest/gtest-test-part.h"
-
#include "gtest/gtest.h"
using testing::Message;
@@ -52,17 +51,14 @@ class TestPartResultTest : public Test {
TestPartResult r1_, r2_, r3_, r4_;
};
-
TEST_F(TestPartResultTest, ConstructorWorks) {
Message message;
message << "something is terribly wrong";
message << static_cast<const char*>(testing::internal::kStackTraceMarker);
message << "some unimportant stack trace";
- const TestPartResult result(TestPartResult::kNonFatalFailure,
- "some_file.cc",
- 42,
- message.GetString().c_str());
+ const TestPartResult result(TestPartResult::kNonFatalFailure, "some_file.cc",
+ 42, message.GetString().c_str());
EXPECT_EQ(TestPartResult::kNonFatalFailure, result.type());
EXPECT_STREQ("some_file.cc", result.file_name());
@@ -72,9 +68,7 @@ TEST_F(TestPartResultTest, ConstructorWorks) {
}
TEST_F(TestPartResultTest, ResultAccessorsWork) {
- const TestPartResult success(TestPartResult::kSuccess,
- "file.cc",
- 42,
+ const TestPartResult success(TestPartResult::kSuccess, "file.cc", 42,
"message");
EXPECT_TRUE(success.passed());
EXPECT_FALSE(success.failed());
@@ -83,19 +77,15 @@ TEST_F(TestPartResultTest, ResultAccessorsWork) {
EXPECT_FALSE(success.skipped());
const TestPartResult nonfatal_failure(TestPartResult::kNonFatalFailure,
- "file.cc",
- 42,
- "message");
+ "file.cc", 42, "message");
EXPECT_FALSE(nonfatal_failure.passed());
EXPECT_TRUE(nonfatal_failure.failed());
EXPECT_TRUE(nonfatal_failure.nonfatally_failed());
EXPECT_FALSE(nonfatal_failure.fatally_failed());
EXPECT_FALSE(nonfatal_failure.skipped());
- const TestPartResult fatal_failure(TestPartResult::kFatalFailure,
- "file.cc",
- 42,
- "message");
+ const TestPartResult fatal_failure(TestPartResult::kFatalFailure, "file.cc",
+ 42, "message");
EXPECT_FALSE(fatal_failure.passed());
EXPECT_TRUE(fatal_failure.failed());
EXPECT_FALSE(fatal_failure.nonfatally_failed());
diff --git a/googletest/test/googletest-throw-on-failure-test.py b/googletest/test/googletest-throw-on-failure-test.py
index ea627c47..106b0044 100755
--- a/googletest/test/googletest-throw-on-failure-test.py
+++ b/googletest/test/googletest-throw-on-failure-test.py
@@ -36,7 +36,7 @@ Google Test) with different environments and command line flags.
"""
import os
-import gtest_test_utils
+from googletest.test import gtest_test_utils
# Constants.
@@ -47,15 +47,22 @@ THROW_ON_FAILURE = 'gtest_throw_on_failure'
# Path to the googletest-throw-on-failure-test_ program, compiled with
# exceptions disabled.
EXE_PATH = gtest_test_utils.GetTestExecutablePath(
- 'googletest-throw-on-failure-test_')
+ 'googletest-throw-on-failure-test_'
+)
# Utilities.
def SetEnvVar(env_var, value):
- """Sets an environment variable to a given value; unsets it when the
+ """Sets an environment variable.
+
+ Sets an environment variable to a given value; unsets it when the
given value is None.
+
+ Args:
+ env_var: environment variable.
+ value: value to set.
"""
env_var = env_var.upper()
@@ -78,14 +85,16 @@ class ThrowOnFailureTest(gtest_test_utils.TestCase):
"""Tests the throw-on-failure mode."""
def RunAndVerify(self, env_var_value, flag_value, should_fail):
- """Runs googletest-throw-on-failure-test_ and verifies that it does
+ """Runs googletest-throw-on-failure-test_ and verifies its behavior.
+
+ Runs googletest-throw-on-failure-test_ and verifies that it does
(or does not) exit with a non-zero code.
Args:
env_var_value: value of the GTEST_BREAK_ON_FAILURE environment
- variable; None if the variable should be unset.
- flag_value: value of the --gtest_break_on_failure flag;
- None if the flag should not be present.
+ variable; None if the variable should be unset.
+ flag_value: value of the --gtest_break_on_failure flag; None if the
+ flag should not be present.
should_fail: True if and only if the program is expected to fail.
"""
@@ -116,11 +125,16 @@ class ThrowOnFailureTest(gtest_test_utils.TestCase):
SetEnvVar(THROW_ON_FAILURE, None)
- msg = ('when %s%s, an assertion failure in "%s" %s cause a non-zero '
- 'exit code.' %
- (THROW_ON_FAILURE, env_var_value_msg, ' '.join(command),
- should_or_not))
- self.assert_(failed == should_fail, msg)
+ msg = (
+ 'when %s%s, an assertion failure in "%s" %s cause a non-zero exit code.'
+ % (
+ THROW_ON_FAILURE,
+ env_var_value_msg,
+ ' '.join(command),
+ should_or_not,
+ )
+ )
+ self.assertTrue(failed == should_fail, msg)
def testDefaultBehavior(self):
"""Tests the behavior of the default mode."""
@@ -130,38 +144,22 @@ class ThrowOnFailureTest(gtest_test_utils.TestCase):
def testThrowOnFailureEnvVar(self):
"""Tests using the GTEST_THROW_ON_FAILURE environment variable."""
- self.RunAndVerify(env_var_value='0',
- flag_value=None,
- should_fail=False)
- self.RunAndVerify(env_var_value='1',
- flag_value=None,
- should_fail=True)
+ self.RunAndVerify(env_var_value='0', flag_value=None, should_fail=False)
+ self.RunAndVerify(env_var_value='1', flag_value=None, should_fail=True)
def testThrowOnFailureFlag(self):
"""Tests using the --gtest_throw_on_failure flag."""
- self.RunAndVerify(env_var_value=None,
- flag_value='0',
- should_fail=False)
- self.RunAndVerify(env_var_value=None,
- flag_value='1',
- should_fail=True)
+ self.RunAndVerify(env_var_value=None, flag_value='0', should_fail=False)
+ self.RunAndVerify(env_var_value=None, flag_value='1', should_fail=True)
def testThrowOnFailureFlagOverridesEnvVar(self):
"""Tests that --gtest_throw_on_failure overrides GTEST_THROW_ON_FAILURE."""
- self.RunAndVerify(env_var_value='0',
- flag_value='0',
- should_fail=False)
- self.RunAndVerify(env_var_value='0',
- flag_value='1',
- should_fail=True)
- self.RunAndVerify(env_var_value='1',
- flag_value='0',
- should_fail=False)
- self.RunAndVerify(env_var_value='1',
- flag_value='1',
- should_fail=True)
+ self.RunAndVerify(env_var_value='0', flag_value='0', should_fail=False)
+ self.RunAndVerify(env_var_value='0', flag_value='1', should_fail=True)
+ self.RunAndVerify(env_var_value='1', flag_value='0', should_fail=False)
+ self.RunAndVerify(env_var_value='1', flag_value='1', should_fail=True)
if __name__ == '__main__':
diff --git a/googletest/test/googletest-throw-on-failure-test_.cc b/googletest/test/googletest-throw-on-failure-test_.cc
index 83bb914c..3b81a5a1 100644
--- a/googletest/test/googletest-throw-on-failure-test_.cc
+++ b/googletest/test/googletest-throw-on-failure-test_.cc
@@ -27,18 +27,18 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
// Tests Google Test's throw-on-failure mode with exceptions disabled.
//
// This program must be compiled with exceptions disabled. It will be
// invoked by googletest-throw-on-failure-test.py, and is expected to exit
// with non-zero in the throw-on-failure mode or 0 otherwise.
-#include "gtest/gtest.h"
+#include <stdio.h> // for fflush, fprintf, NULL, etc.
+#include <stdlib.h> // for exit
-#include <stdio.h> // for fflush, fprintf, NULL, etc.
-#include <stdlib.h> // for exit
-#include <exception> // for set_terminate
+#include <exception> // for set_terminate
+
+#include "gtest/gtest.h"
// This terminate handler aborts the program using exit() rather than abort().
// This avoids showing pop-ups on Windows systems and core dumps on Unix-like
diff --git a/googletest/test/googletest-uninitialized-test.py b/googletest/test/googletest-uninitialized-test.py
index 69595a0d..e5af7c84 100755
--- a/googletest/test/googletest-uninitialized-test.py
+++ b/googletest/test/googletest-uninitialized-test.py
@@ -31,9 +31,11 @@
"""Verifies that Google Test warns the user when not initialized properly."""
-import gtest_test_utils
+from googletest.test import gtest_test_utils
-COMMAND = gtest_test_utils.GetTestExecutablePath('googletest-uninitialized-test_')
+COMMAND = gtest_test_utils.GetTestExecutablePath(
+ 'googletest-uninitialized-test_'
+)
def Assert(condition):
@@ -54,11 +56,12 @@ def TestExitCodeAndOutput(command):
# Verifies that 'command' exits with code 1.
p = gtest_test_utils.Subprocess(command)
if p.exited and p.exit_code == 0:
- Assert('IMPORTANT NOTICE' in p.output);
+ Assert('IMPORTANT NOTICE' in p.output)
Assert('InitGoogleTest' in p.output)
class GTestUninitializedTest(gtest_test_utils.TestCase):
+
def testExitCodeAndOutput(self):
TestExitCodeAndOutput(COMMAND)
diff --git a/googletest/test/googletest-uninitialized-test_.cc b/googletest/test/googletest-uninitialized-test_.cc
index b4434d51..88b61fce 100644
--- a/googletest/test/googletest-uninitialized-test_.cc
+++ b/googletest/test/googletest-uninitialized-test_.cc
@@ -27,7 +27,6 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
#include "gtest/gtest.h"
TEST(DummyTest, Dummy) {
@@ -37,6 +36,4 @@ TEST(DummyTest, Dummy) {
// testing::InitGoogleTest() being called first.
}
-int main() {
- return RUN_ALL_TESTS();
-}
+int main() { return RUN_ALL_TESTS(); }
diff --git a/googletest/test/gtest-typed-test2_test.cc b/googletest/test/gtest-typed-test2_test.cc
index e83ca2e1..f2eae120 100644
--- a/googletest/test/gtest-typed-test2_test.cc
+++ b/googletest/test/gtest-typed-test2_test.cc
@@ -27,11 +27,10 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
#include <vector>
-#include "test/gtest-typed-test_test.h"
#include "gtest/gtest.h"
+#include "test/gtest-typed-test_test.h"
// Tests that the same type-parameterized test case can be
// instantiated in different translation units linked together.
diff --git a/googletest/test/gtest-typed-test_test.cc b/googletest/test/gtest-typed-test_test.cc
index 5fc678cb..0cc8b211 100644
--- a/googletest/test/gtest-typed-test_test.cc
+++ b/googletest/test/gtest-typed-test_test.cc
@@ -27,18 +27,16 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
#include "test/gtest-typed-test_test.h"
#include <set>
+#include <string>
#include <type_traits>
#include <vector>
#include "gtest/gtest.h"
-#if _MSC_VER
GTEST_DISABLE_MSC_WARNINGS_PUSH_(4127 /* conditional expression is constant */)
-#endif // _MSC_VER
using testing::Test;
@@ -50,9 +48,7 @@ class CommonTest : public Test {
// For some technical reason, SetUpTestSuite() and TearDownTestSuite()
// must be public.
public:
- static void SetUpTestSuite() {
- shared_ = new T(5);
- }
+ static void SetUpTestSuite() { shared_ = new T(5); }
static void TearDownTestSuite() {
delete shared_;
@@ -130,8 +126,7 @@ TYPED_TEST(CommonTest, ValuesAreStillCorrect) {
// translation unit.
template <typename T>
-class TypedTest1 : public Test {
-};
+class TypedTest1 : public Test {};
// Verifies that the second argument of TYPED_TEST_SUITE can be a
// single type.
@@ -139,8 +134,7 @@ TYPED_TEST_SUITE(TypedTest1, int);
TYPED_TEST(TypedTest1, A) {}
template <typename T>
-class TypedTest2 : public Test {
-};
+class TypedTest2 : public Test {};
// Verifies that the second argument of TYPED_TEST_SUITE can be a
// Types<...> type list.
@@ -155,15 +149,12 @@ TYPED_TEST(TypedTest2, A) {}
namespace library1 {
template <typename T>
-class NumericTest : public Test {
-};
+class NumericTest : public Test {};
typedef Types<int, long> NumericTypes;
TYPED_TEST_SUITE(NumericTest, NumericTypes);
-TYPED_TEST(NumericTest, DefaultIsZero) {
- EXPECT_EQ(0, TypeParam());
-}
+TYPED_TEST(NumericTest, DefaultIsZero) { EXPECT_EQ(0, TypeParam()); }
} // namespace library1
@@ -265,8 +256,7 @@ TEST_F(TypedTestSuitePStateDeathTest, DetectsTestAfterRegistration) {
// and SetUp()/TearDown() work correctly in type-parameterized tests.
template <typename T>
-class DerivedTest : public CommonTest<T> {
-};
+class DerivedTest : public CommonTest<T> {};
TYPED_TEST_SUITE_P(DerivedTest);
@@ -290,8 +280,8 @@ TYPED_TEST_P(DerivedTest, ValuesAreStillCorrect) {
EXPECT_EQ(2, this->value_);
}
-REGISTER_TYPED_TEST_SUITE_P(DerivedTest,
- ValuesAreCorrect, ValuesAreStillCorrect);
+REGISTER_TYPED_TEST_SUITE_P(DerivedTest, ValuesAreCorrect,
+ ValuesAreStillCorrect);
typedef Types<short, long> MyTwoTypes;
INSTANTIATE_TYPED_TEST_SUITE_P(My, DerivedTest, MyTwoTypes);
@@ -334,14 +324,13 @@ class TypeParametrizedTestNames {
};
INSTANTIATE_TYPED_TEST_SUITE_P(CustomName, TypeParametrizedTestWithNames,
- TwoTypes, TypeParametrizedTestNames);
+ TwoTypes, TypeParametrizedTestNames);
// Tests that multiple TYPED_TEST_SUITE_P's can be defined in the same
// translation unit.
template <typename T>
-class TypedTestP1 : public Test {
-};
+class TypedTestP1 : public Test {};
TYPED_TEST_SUITE_P(TypedTestP1);
@@ -359,8 +348,7 @@ using IntBeforeRegisterTypedTestSuiteP = int;
REGISTER_TYPED_TEST_SUITE_P(TypedTestP1, A, B);
template <typename T>
-class TypedTestP2 : public Test {
-};
+class TypedTestP2 : public Test {};
TYPED_TEST_SUITE_P(TypedTestP2);
@@ -396,21 +384,17 @@ INSTANTIATE_TYPED_TEST_SUITE_P(My, ContainerTest, MyContainers);
namespace library2 {
template <typename T>
-class NumericTest : public Test {
-};
+class NumericTest : public Test {};
TYPED_TEST_SUITE_P(NumericTest);
-TYPED_TEST_P(NumericTest, DefaultIsZero) {
- EXPECT_EQ(0, TypeParam());
-}
+TYPED_TEST_P(NumericTest, DefaultIsZero) { EXPECT_EQ(0, TypeParam()); }
TYPED_TEST_P(NumericTest, ZeroIsLessThanOne) {
EXPECT_LT(TypeParam(0), TypeParam(1));
}
-REGISTER_TYPED_TEST_SUITE_P(NumericTest,
- DefaultIsZero, ZeroIsLessThanOne);
+REGISTER_TYPED_TEST_SUITE_P(NumericTest, DefaultIsZero, ZeroIsLessThanOne);
typedef Types<int, double> NumericTypes;
INSTANTIATE_TYPED_TEST_SUITE_P(My, NumericTest, NumericTypes);
@@ -418,20 +402,22 @@ static const char* GetTestName() {
return testing::UnitTest::GetInstance()->current_test_info()->name();
}
// Test the stripping of space from test names
-template <typename T> class TrimmedTest : public Test { };
+template <typename T>
+class TrimmedTest : public Test {};
TYPED_TEST_SUITE_P(TrimmedTest);
TYPED_TEST_P(TrimmedTest, Test1) { EXPECT_STREQ("Test1", GetTestName()); }
TYPED_TEST_P(TrimmedTest, Test2) { EXPECT_STREQ("Test2", GetTestName()); }
TYPED_TEST_P(TrimmedTest, Test3) { EXPECT_STREQ("Test3", GetTestName()); }
TYPED_TEST_P(TrimmedTest, Test4) { EXPECT_STREQ("Test4", GetTestName()); }
TYPED_TEST_P(TrimmedTest, Test5) { EXPECT_STREQ("Test5", GetTestName()); }
-REGISTER_TYPED_TEST_SUITE_P(
- TrimmedTest,
- Test1, Test2,Test3 , Test4 ,Test5 ); // NOLINT
-template <typename T1, typename T2> struct MyPair {};
+REGISTER_TYPED_TEST_SUITE_P(TrimmedTest, Test1, Test2, Test3, Test4,
+ Test5); // NOLINT
+template <typename T1, typename T2>
+struct MyPair {};
// Be sure to try a type with a comma in its name just in case it matters.
typedef Types<int, double, MyPair<int, int> > TrimTypes;
INSTANTIATE_TYPED_TEST_SUITE_P(My, TrimmedTest, TrimTypes);
} // namespace library2
+GTEST_DISABLE_MSC_WARNINGS_POP_() // 4127
diff --git a/googletest/test/gtest-typed-test_test.h b/googletest/test/gtest-typed-test_test.h
index 8ce559c9..f3ef0a59 100644
--- a/googletest/test/gtest-typed-test_test.h
+++ b/googletest/test/gtest-typed-test_test.h
@@ -40,21 +40,18 @@ using testing::Test;
// and gtest-typed-test2_test.cc.
template <typename T>
-class ContainerTest : public Test {
-};
+class ContainerTest : public Test {};
TYPED_TEST_SUITE_P(ContainerTest);
-TYPED_TEST_P(ContainerTest, CanBeDefaultConstructed) {
- TypeParam container;
-}
+TYPED_TEST_P(ContainerTest, CanBeDefaultConstructed) { TypeParam container; }
TYPED_TEST_P(ContainerTest, InitialSizeIsZero) {
TypeParam container;
EXPECT_EQ(0U, container.size());
}
-REGISTER_TYPED_TEST_SUITE_P(ContainerTest,
- CanBeDefaultConstructed, InitialSizeIsZero);
+REGISTER_TYPED_TEST_SUITE_P(ContainerTest, CanBeDefaultConstructed,
+ InitialSizeIsZero);
#endif // GOOGLETEST_TEST_GTEST_TYPED_TEST_TEST_H_
diff --git a/googletest/test/gtest-unittest-api_test.cc b/googletest/test/gtest-unittest-api_test.cc
index 8ef50583..2ea69273 100644
--- a/googletest/test/gtest-unittest-api_test.cc
+++ b/googletest/test/gtest-unittest-api_test.cc
@@ -32,11 +32,12 @@
// This file contains tests verifying correctness of data provided via
// UnitTest's public methods.
-#include "gtest/gtest.h"
-
#include <string.h> // For strcmp.
+
#include <algorithm>
+#include "gtest/gtest.h"
+
using ::testing::InitGoogleTest;
namespace testing {
@@ -56,13 +57,12 @@ class UnitTestHelper {
static TestSuite const** GetSortedTestSuites() {
UnitTest& unit_test = *UnitTest::GetInstance();
auto const** const test_suites = new const TestSuite*[static_cast<size_t>(
- unit_test.total_test_suite_count())];
+ unit_test.total_test_suite_count())];
for (int i = 0; i < unit_test.total_test_suite_count(); ++i)
test_suites[i] = unit_test.GetTestSuite(i);
- std::sort(test_suites,
- test_suites + unit_test.total_test_suite_count(),
+ std::sort(test_suites, test_suites + unit_test.total_test_suite_count(),
LessByName<TestSuite>());
return test_suites;
}
@@ -73,8 +73,7 @@ class UnitTestHelper {
UnitTest& unit_test = *UnitTest::GetInstance();
for (int i = 0; i < unit_test.total_test_suite_count(); ++i) {
const TestSuite* test_suite = unit_test.GetTestSuite(i);
- if (0 == strcmp(test_suite->name(), name))
- return test_suite;
+ if (0 == strcmp(test_suite->name(), name)) return test_suite;
}
return nullptr;
}
@@ -84,7 +83,7 @@ class UnitTestHelper {
// array.
static TestInfo const** GetSortedTests(const TestSuite* test_suite) {
TestInfo const** const tests = new const TestInfo*[static_cast<size_t>(
- test_suite->total_test_count())];
+ test_suite->total_test_count())];
for (int i = 0; i < test_suite->total_test_count(); ++i)
tests[i] = test_suite->GetTestInfo(i);
@@ -95,7 +94,8 @@ class UnitTestHelper {
}
};
-template <typename T> class TestSuiteWithCommentTest : public Test {};
+template <typename T>
+class TestSuiteWithCommentTest : public Test {};
TYPED_TEST_SUITE(TestSuiteWithCommentTest, Types<int>);
TYPED_TEST(TestSuiteWithCommentTest, Dummy) {}
@@ -106,7 +106,7 @@ const int kTypedTests = 1;
// Since tests can be run in any order, the values the accessors that track
// test execution (such as failed_test_count) can not be predicted.
TEST(ApiTest, UnitTestImmutableAccessorsWork) {
- UnitTest* unit_test = UnitTest::GetInstance();
+ const auto& unit_test = UnitTest::GetInstance();
ASSERT_EQ(2 + kTypedTestSuites, unit_test->total_test_suite_count());
EXPECT_EQ(1 + kTypedTestSuites, unit_test->test_suite_to_run_count());
@@ -224,7 +224,7 @@ TEST(DISABLED_Test, Dummy2) {}
class FinalSuccessChecker : public Environment {
protected:
void TearDown() override {
- UnitTest* unit_test = UnitTest::GetInstance();
+ const auto& unit_test = UnitTest::GetInstance();
EXPECT_EQ(1 + kTypedTestSuites, unit_test->successful_test_suite_count());
EXPECT_EQ(3 + kTypedTests, unit_test->successful_test_count());
@@ -319,7 +319,7 @@ class FinalSuccessChecker : public Environment {
} // namespace internal
} // namespace testing
-int main(int argc, char **argv) {
+int main(int argc, char** argv) {
InitGoogleTest(&argc, argv);
AddGlobalTestEnvironment(new testing::internal::FinalSuccessChecker());
diff --git a/googletest/test/gtest_assert_by_exception_test.cc b/googletest/test/gtest_assert_by_exception_test.cc
index ada4cb30..f507eac4 100644
--- a/googletest/test/gtest_assert_by_exception_test.cc
+++ b/googletest/test/gtest_assert_by_exception_test.cc
@@ -27,16 +27,16 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
// Tests Google Test's assert-by-exception mode with exceptions enabled.
-#include "gtest/gtest.h"
-
-#include <stdlib.h>
#include <stdio.h>
+#include <stdlib.h>
#include <string.h>
+
#include <stdexcept>
+#include "gtest/gtest.h"
+
class ThrowListener : public testing::EmptyTestEventListener {
void OnTestPartResult(const testing::TestPartResult& result) override {
if (result.type() == testing::TestPartResult::kFatalFailure) {
@@ -55,9 +55,7 @@ void Fail(const char* msg) {
exit(1);
}
-static void AssertFalse() {
- ASSERT_EQ(2, 3) << "Expected failure";
-}
+static void AssertFalse() { ASSERT_EQ(2, 3) << "Expected failure"; }
// Tests that an assertion failure throws a subclass of
// std::runtime_error.
@@ -65,21 +63,21 @@ TEST(Test, Test) {
// A successful assertion shouldn't throw.
try {
EXPECT_EQ(3, 3);
- } catch(...) {
+ } catch (...) {
Fail("A successful assertion wrongfully threw.");
}
// A successful assertion shouldn't throw.
try {
EXPECT_EQ(3, 4);
- } catch(...) {
+ } catch (...) {
Fail("A failed non-fatal assertion wrongfully threw.");
}
// A failed assertion should throw.
try {
AssertFalse();
- } catch(const testing::AssertionException& e) {
+ } catch (const testing::AssertionException& e) {
if (strstr(e.what(), "Expected failure") != nullptr) throw;
printf("%s",
@@ -87,7 +85,7 @@ TEST(Test, Test) {
"but the message is incorrect. Instead of containing \"Expected "
"failure\", it is:\n");
Fail(e.what());
- } catch(...) {
+ } catch (...) {
Fail("A failed assertion threw the wrong type of exception.");
}
Fail("A failed assertion should've thrown but didn't.");
@@ -95,9 +93,7 @@ TEST(Test, Test) {
int kTestForContinuingTest = 0;
-TEST(Test, Test2) {
- kTestForContinuingTest = 1;
-}
+TEST(Test, Test2) { kTestForContinuingTest = 1; }
int main(int argc, char** argv) {
testing::InitGoogleTest(&argc, argv);
diff --git a/googletest/test/gtest_dirs_test.cc b/googletest/test/gtest_dirs_test.cc
new file mode 100644
index 00000000..1a5b63d3
--- /dev/null
+++ b/googletest/test/gtest_dirs_test.cc
@@ -0,0 +1,101 @@
+#include <sys/stat.h>
+
+#include <cstdlib>
+#include <cstring>
+#include <string>
+
+#include "gtest/gtest.h"
+#include "gtest/internal/gtest-port.h"
+
+#if GTEST_HAS_FILE_SYSTEM
+
+namespace {
+
+class SetEnv {
+ public:
+ // Sets the environment value with name `name` to `value`, unless `value` is
+ // nullptr, in which case it unsets it. Restores the original value on
+ // destruction.
+ SetEnv(const char* name, const char* value) : name_(name) {
+ const char* old_value = getenv(name);
+ if (old_value != nullptr) {
+ saved_value_ = old_value;
+ have_saved_value_ = true;
+ }
+ if (value == nullptr) {
+ GTEST_CHECK_POSIX_SUCCESS_(unsetenv(name));
+ } else {
+ GTEST_CHECK_POSIX_SUCCESS_(setenv(name, value, 1 /*overwrite*/));
+ }
+ }
+
+ ~SetEnv() {
+ if (have_saved_value_) {
+ GTEST_CHECK_POSIX_SUCCESS_(
+ setenv(name_.c_str(), saved_value_.c_str(), 1 /*overwrite*/));
+ } else {
+ GTEST_CHECK_POSIX_SUCCESS_(unsetenv(name_.c_str()));
+ }
+ }
+
+ private:
+ std::string name_;
+ bool have_saved_value_ = false;
+ std::string saved_value_;
+};
+
+class MakeTempDir {
+ public:
+ // Creates a directory with a unique name including `testname`.
+ // The destructor removes it.
+ explicit MakeTempDir(const std::string& testname) {
+ // mkdtemp requires that the last 6 characters of the input pattern
+ // are Xs, and the string is modified by replacing those characters.
+ std::string pattern = "/tmp/" + testname + "_XXXXXX";
+ GTEST_CHECK_(mkdtemp(pattern.data()) != nullptr);
+ dirname_ = pattern;
+ }
+
+ ~MakeTempDir() { GTEST_CHECK_POSIX_SUCCESS_(rmdir(dirname_.c_str())); }
+
+ const char* DirName() const { return dirname_.c_str(); }
+
+ private:
+ std::string dirname_;
+};
+
+bool StartsWith(const std::string& str, const std::string& prefix) {
+ return str.substr(0, prefix.size()) == prefix;
+}
+
+TEST(TempDirTest, InEnvironment) {
+ // Since the test infrastructure might be verifying directory existence or
+ // even creating subdirectories, we need to be careful that the directories we
+ // specify are actually valid.
+ MakeTempDir temp_dir("TempDirTest_InEnvironment");
+ SetEnv set_env("TEST_TMPDIR", temp_dir.DirName());
+ EXPECT_TRUE(StartsWith(testing::TempDir(), temp_dir.DirName()));
+}
+
+TEST(TempDirTest, NotInEnvironment) {
+ SetEnv set_env("TEST_TMPDIR", nullptr);
+ EXPECT_NE(testing::TempDir(), "");
+}
+
+TEST(SrcDirTest, InEnvironment) {
+ // Since the test infrastructure might be verifying directory existence or
+ // even creating subdirectories, we need to be careful that the directories we
+ // specify are actually valid.
+ MakeTempDir temp_dir("SrcDirTest_InEnvironment");
+ SetEnv set_env("TEST_SRCDIR", temp_dir.DirName());
+ EXPECT_TRUE(StartsWith(testing::SrcDir(), temp_dir.DirName()));
+}
+
+TEST(SrcDirTest, NotInEnvironment) {
+ SetEnv set_env("TEST_SRCDIR", nullptr);
+ EXPECT_NE(testing::SrcDir(), "");
+}
+
+#endif // GTEST_HAS_FILE_SYSTEM
+
+} // namespace
diff --git a/googletest/test/gtest_environment_test.cc b/googletest/test/gtest_environment_test.cc
index 064bfc50..122eaf3c 100644
--- a/googletest/test/gtest_environment_test.cc
+++ b/googletest/test/gtest_environment_test.cc
@@ -30,20 +30,15 @@
//
// Tests using global test environments.
-#include <stdlib.h>
#include <stdio.h>
+#include <stdlib.h>
+
#include "gtest/gtest.h"
#include "src/gtest-internal-inl.h"
-namespace testing {
-GTEST_DECLARE_string_(filter);
-}
-
namespace {
-enum FailureType {
- NO_FAILURE, NON_FATAL_FAILURE, FATAL_FAILURE
-};
+enum FailureType { NO_FAILURE, NON_FATAL_FAILURE, FATAL_FAILURE };
// For testing using global test environments.
class MyEnvironment : public testing::Environment {
@@ -83,9 +78,7 @@ class MyEnvironment : public testing::Environment {
// We call this function to set the type of failure SetUp() should
// generate.
- void set_failure_in_set_up(FailureType type) {
- failure_in_set_up_ = type;
- }
+ void set_failure_in_set_up(FailureType type) { failure_in_set_up_ = type; }
// Was SetUp() run?
bool set_up_was_run() const { return set_up_was_run_; }
@@ -104,9 +97,7 @@ bool test_was_run;
// The sole purpose of this TEST is to enable us to check whether it
// was run.
-TEST(FooTest, Bar) {
- test_was_run = true;
-}
+TEST(FooTest, Bar) { test_was_run = true; }
// Prints the message and aborts the program if condition is false.
void Check(bool condition, const char* msg) {
@@ -130,7 +121,7 @@ int RunAllTests(MyEnvironment* env, FailureType failure) {
} // namespace
-int main(int argc, char **argv) {
+int main(int argc, char** argv) {
testing::InitGoogleTest(&argc, argv);
// Registers a global test environment, and verifies that the
@@ -174,7 +165,7 @@ int main(int argc, char **argv) {
// Verifies that RUN_ALL_TESTS() doesn't do global set-up or
// tear-down when there is no test to run.
- testing::GTEST_FLAG(filter) = "-*";
+ GTEST_FLAG_SET(filter, "-*");
Check(RunAllTests(env, NO_FAILURE) == 0,
"RUN_ALL_TESTS() should return zero, as there is no test to run.");
Check(!env->set_up_was_run(),
diff --git a/googletest/test/gtest_help_test.py b/googletest/test/gtest_help_test.py
index 8d953bbd..38fc90ff 100755
--- a/googletest/test/gtest_help_test.py
+++ b/googletest/test/gtest_help_test.py
@@ -39,46 +39,75 @@ SYNOPSIS
import os
import re
-import gtest_test_utils
+import sys
+from googletest.test import gtest_test_utils
+FREEBSD = ('FreeBSD', 'GNU/kFreeBSD')
+NETBSD = ('NetBSD',)
+OPENBSD = ('OpenBSD',)
+
+
+def is_bsd_based_os() -> bool:
+ """Determine whether or not the OS is BSD-based."""
+ if os.name != 'posix':
+ return False
+
+ return os.uname()[0] in (FREEBSD + NETBSD + OPENBSD)
+
+
+IS_DARWIN = os.name == 'posix' and os.uname()[0] == 'Darwin'
IS_LINUX = os.name == 'posix' and os.uname()[0] == 'Linux'
-IS_GNUKFREEBSD = os.name == 'posix' and os.uname()[0] == 'GNU/kFreeBSD'
+IS_GNUHURD = os.name == 'posix' and os.uname()[0] == 'GNU'
IS_WINDOWS = os.name == 'nt'
PROGRAM_PATH = gtest_test_utils.GetTestExecutablePath('gtest_help_test_')
FLAG_PREFIX = '--gtest_'
DEATH_TEST_STYLE_FLAG = FLAG_PREFIX + 'death_test_style'
STREAM_RESULT_TO_FLAG = FLAG_PREFIX + 'stream_result_to'
-UNKNOWN_FLAG = FLAG_PREFIX + 'unknown_flag_for_testing'
LIST_TESTS_FLAG = FLAG_PREFIX + 'list_tests'
-INCORRECT_FLAG_VARIANTS = [re.sub('^--', '-', LIST_TESTS_FLAG),
- re.sub('^--', '/', LIST_TESTS_FLAG),
- re.sub('_', '-', LIST_TESTS_FLAG)]
INTERNAL_FLAG_FOR_TESTING = FLAG_PREFIX + 'internal_flag_for_testing'
-SUPPORTS_DEATH_TESTS = "DeathTest" in gtest_test_utils.Subprocess(
- [PROGRAM_PATH, LIST_TESTS_FLAG]).output
+SUPPORTS_DEATH_TESTS = (
+ 'DeathTest'
+ in gtest_test_utils.Subprocess([PROGRAM_PATH, LIST_TESTS_FLAG]).output
+)
+
+HAS_ABSL_FLAGS = '--has_absl_flags' in sys.argv
# The help message must match this regex.
HELP_REGEX = re.compile(
- FLAG_PREFIX + r'list_tests.*' +
- FLAG_PREFIX + r'filter=.*' +
- FLAG_PREFIX + r'also_run_disabled_tests.*' +
- FLAG_PREFIX + r'repeat=.*' +
- FLAG_PREFIX + r'shuffle.*' +
- FLAG_PREFIX + r'random_seed=.*' +
- FLAG_PREFIX + r'color=.*' +
- FLAG_PREFIX + r'brief.*' +
- FLAG_PREFIX + r'print_time.*' +
- FLAG_PREFIX + r'output=.*' +
- FLAG_PREFIX + r'break_on_failure.*' +
- FLAG_PREFIX + r'throw_on_failure.*' +
- FLAG_PREFIX + r'catch_exceptions=0.*',
- re.DOTALL)
-
-
-def RunWithFlag(flag):
+ FLAG_PREFIX
+ + r'list_tests.*'
+ + FLAG_PREFIX
+ + r'filter=.*'
+ + FLAG_PREFIX
+ + r'also_run_disabled_tests.*'
+ + FLAG_PREFIX
+ + r'repeat=.*'
+ + FLAG_PREFIX
+ + r'shuffle.*'
+ + FLAG_PREFIX
+ + r'random_seed=.*'
+ + FLAG_PREFIX
+ + r'color=.*'
+ + FLAG_PREFIX
+ + r'brief.*'
+ + FLAG_PREFIX
+ + r'print_time.*'
+ + FLAG_PREFIX
+ + r'output=.*'
+ + FLAG_PREFIX
+ + r'break_on_failure.*'
+ + FLAG_PREFIX
+ + r'throw_on_failure.*'
+ + FLAG_PREFIX
+ + r'catch_exceptions=0.*',
+ re.DOTALL,
+)
+
+
+def run_with_flag(flag):
"""Runs gtest_help_test_ with the given flag.
Returns:
@@ -98,75 +127,57 @@ def RunWithFlag(flag):
class GTestHelpTest(gtest_test_utils.TestCase):
"""Tests the --help flag and its equivalent forms."""
- def TestHelpFlag(self, flag):
+ def test_prints_help_with_full_flag(self):
"""Verifies correct behavior when help flag is specified.
The right message must be printed and the tests must
skipped when the given flag is specified.
-
- Args:
- flag: A flag to pass to the binary or None.
"""
- exit_code, output = RunWithFlag(flag)
- self.assertEquals(0, exit_code)
- self.assert_(HELP_REGEX.search(output), output)
+ exit_code, output = run_with_flag('--help')
+ if HAS_ABSL_FLAGS:
+ # The Abseil flags library prints the ProgramUsageMessage() with
+ # --help and returns 1.
+ self.assertEqual(1, exit_code)
+ else:
+ self.assertEqual(0, exit_code)
+
+ self.assertTrue(HELP_REGEX.search(output), output)
- if IS_LINUX or IS_GNUKFREEBSD:
- self.assert_(STREAM_RESULT_TO_FLAG in output, output)
+ if IS_DARWIN or IS_LINUX or IS_GNUHURD or is_bsd_based_os():
+ self.assertIn(STREAM_RESULT_TO_FLAG, output)
else:
- self.assert_(STREAM_RESULT_TO_FLAG not in output, output)
+ self.assertNotIn(STREAM_RESULT_TO_FLAG, output)
if SUPPORTS_DEATH_TESTS and not IS_WINDOWS:
- self.assert_(DEATH_TEST_STYLE_FLAG in output, output)
+ self.assertIn(DEATH_TEST_STYLE_FLAG, output)
else:
- self.assert_(DEATH_TEST_STYLE_FLAG not in output, output)
+ self.assertNotIn(DEATH_TEST_STYLE_FLAG, output)
- def TestNonHelpFlag(self, flag):
+ def test_runs_tests_without_help_flag(self):
"""Verifies correct behavior when no help flag is specified.
Verifies that when no help flag is specified, the tests are run
and the help message is not printed.
-
- Args:
- flag: A flag to pass to the binary or None.
"""
- exit_code, output = RunWithFlag(flag)
- self.assert_(exit_code != 0)
- self.assert_(not HELP_REGEX.search(output), output)
-
- def testPrintsHelpWithFullFlag(self):
- self.TestHelpFlag('--help')
-
- def testPrintsHelpWithShortFlag(self):
- self.TestHelpFlag('-h')
-
- def testPrintsHelpWithQuestionFlag(self):
- self.TestHelpFlag('-?')
-
- def testPrintsHelpWithWindowsStyleQuestionFlag(self):
- self.TestHelpFlag('/?')
+ exit_code, output = run_with_flag(None)
+ self.assertNotEqual(exit_code, 0)
+ self.assertFalse(HELP_REGEX.search(output), output)
- def testPrintsHelpWithUnrecognizedGoogleTestFlag(self):
- self.TestHelpFlag(UNKNOWN_FLAG)
+ def test_runs_tests_with_gtest_internal_flag(self):
+ """Verifies correct behavior when internal testing flag is specified.
- def testPrintsHelpWithIncorrectFlagStyle(self):
- for incorrect_flag in INCORRECT_FLAG_VARIANTS:
- self.TestHelpFlag(incorrect_flag)
-
- def testRunsTestsWithoutHelpFlag(self):
- """Verifies that when no help flag is specified, the tests are run
- and the help message is not printed."""
-
- self.TestNonHelpFlag(None)
-
- def testRunsTestsWithGtestInternalFlag(self):
- """Verifies that the tests are run and no help message is printed when
- a flag starting with Google Test prefix and 'internal_' is supplied."""
+ Verifies that the tests are run and no help message is printed when
+ a flag starting with Google Test prefix and 'internal_' is supplied.
+ """
- self.TestNonHelpFlag(INTERNAL_FLAG_FOR_TESTING)
+ exit_code, output = run_with_flag(INTERNAL_FLAG_FOR_TESTING)
+ self.assertNotEqual(exit_code, 0)
+ self.assertFalse(HELP_REGEX.search(output), output)
if __name__ == '__main__':
+ if '--has_absl_flags' in sys.argv:
+ sys.argv.remove('--has_absl_flags')
gtest_test_utils.Main()
diff --git a/googletest/test/gtest_help_test_.cc b/googletest/test/gtest_help_test_.cc
index 750ae6ce..18b5f3cd 100644
--- a/googletest/test/gtest_help_test_.cc
+++ b/googletest/test/gtest_help_test_.cc
@@ -27,7 +27,6 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
// This program is meant to be run by gtest_help_test.py. Do not run
// it directly.
@@ -40,6 +39,6 @@ TEST(HelpFlagTest, ShouldNotBeRun) {
ASSERT_TRUE(false) << "Tests shouldn't be run when --help is specified.";
}
-#if GTEST_HAS_DEATH_TEST
+#ifdef GTEST_HAS_DEATH_TEST
TEST(DeathTest, UsedByPythonScriptToDetectSupportForDeathTestsInThisBinary) {}
#endif
diff --git a/googletest/test/gtest_json_test_utils.py b/googletest/test/gtest_json_test_utils.py
index 62bbfc28..86a5925b 100644
--- a/googletest/test/gtest_json_test_utils.py
+++ b/googletest/test/gtest_json_test_utils.py
@@ -42,6 +42,7 @@ def normalize(obj):
Normalized output without any references to transient information that may
change from run to run.
"""
+
def _normalize(key, value):
if key == 'time':
return re.sub(r'^\d+(\.\d+)?s$', '*', value)
@@ -50,8 +51,11 @@ def normalize(obj):
elif key == 'failure':
value = re.sub(r'^.*[/\\](.*:)\d+\n', '\\1*\n', value)
return re.sub(r'Stack trace:\n(.|\n)*', 'Stack trace:\n*', value)
+ elif key == 'file':
+ return re.sub(r'^.*[/\\](.*)', '\\1', value)
else:
return normalize(value)
+
if isinstance(obj, dict):
return {k: _normalize(k, v) for k, v in obj.items()}
if isinstance(obj, list):
diff --git a/googletest/test/gtest_list_output_unittest.py b/googletest/test/gtest_list_output_unittest.py
index a442fc16..afd521d2 100644
--- a/googletest/test/gtest_list_output_unittest.py
+++ b/googletest/test/gtest_list_output_unittest.py
@@ -40,7 +40,7 @@ This script tests such functionality by invoking gtest_list_output_unittest_
import os
import re
-import gtest_test_utils
+from googletest.test import gtest_test_utils
GTEST_LIST_TESTS_FLAG = '--gtest_list_tests'
GTEST_OUTPUT_FLAG = '--gtest_output'
@@ -224,8 +224,7 @@ EXPECTED_JSON = """{
class GTestListTestsOutputUnitTest(gtest_test_utils.TestCase):
- """Unit test for Google Test's list tests with output to file functionality.
- """
+ """Unit test for Google Test's list tests with output to file functionality."""
def testXml(self):
"""Verifies XML output for listing tests in a Google Test binary.
@@ -244,19 +243,22 @@ class GTestListTestsOutputUnitTest(gtest_test_utils.TestCase):
self._TestOutput('json', EXPECTED_JSON)
def _GetOutput(self, out_format):
- file_path = os.path.join(gtest_test_utils.GetTempDir(),
- 'test_out.' + out_format)
+ file_path = os.path.join(
+ gtest_test_utils.GetTempDir(), 'test_out.' + out_format
+ )
gtest_prog_path = gtest_test_utils.GetTestExecutablePath(
- 'gtest_list_output_unittest_')
+ 'gtest_list_output_unittest_'
+ )
- command = ([
+ command = [
gtest_prog_path,
'%s=%s:%s' % (GTEST_OUTPUT_FLAG, out_format, file_path),
- '--gtest_list_tests'
- ])
+ '--gtest_list_tests',
+ ]
environ_copy = os.environ.copy()
p = gtest_test_utils.Subprocess(
- command, env=environ_copy, working_dir=gtest_test_utils.GetTempDir())
+ command, env=environ_copy, working_dir=gtest_test_utils.GetTempDir()
+ )
self.assertTrue(p.exited)
self.assertEqual(0, p.exit_code)
@@ -275,9 +277,10 @@ class GTestListTestsOutputUnitTest(gtest_test_utils.TestCase):
expected_line_re = re.compile(expected_line.strip())
self.assertTrue(
expected_line_re.match(actual_line.strip()),
- ('actual output of "%s",\n'
- 'which does not match expected regex of "%s"\n'
- 'on line %d' % (actual, expected_output, line_count)))
+ 'actual output of "%s",\n'
+ 'which does not match expected regex of "%s"\n'
+ 'on line %d' % (actual, expected_output, line_count),
+ )
line_count = line_count + 1
diff --git a/googletest/test/gtest_main_unittest.cc b/googletest/test/gtest_main_unittest.cc
index eddedeab..29cd5510 100644
--- a/googletest/test/gtest_main_unittest.cc
+++ b/googletest/test/gtest_main_unittest.cc
@@ -27,7 +27,6 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
#include "gtest/gtest.h"
// Tests that we don't have to define main() when we link to
@@ -35,8 +34,7 @@
namespace {
-TEST(GTestMainTest, ShouldSucceed) {
-}
+TEST(GTestMainTest, ShouldSucceed) {}
} // namespace
diff --git a/googletest/test/gtest_pred_impl_unittest.cc b/googletest/test/gtest_pred_impl_unittest.cc
index bbef9947..033e2d9f 100644
--- a/googletest/test/gtest_pred_impl_unittest.cc
+++ b/googletest/test/gtest_pred_impl_unittest.cc
@@ -27,9 +27,6 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-// This file is AUTOMATICALLY GENERATED on 11/05/2019 by command
-// 'gen_gtest_pred_impl.py 5'. DO NOT EDIT BY HAND!
-
// Regression test for gtest_pred_impl.h
//
// This file is generated by a script and quite long. If you intend to
@@ -48,9 +45,10 @@
// stand-alone regression test.
#include <iostream>
+#include <ostream>
-#include "gtest/gtest.h"
#include "gtest/gtest-spi.h"
+#include "gtest/gtest.h"
// A user-defined data type.
struct Bool {
@@ -80,12 +78,8 @@ bool PredFunction1(T1 v1) {
// The following two functions are needed because a compiler doesn't have
// a context yet to know which template function must be instantiated.
-bool PredFunction1Int(int v1) {
- return v1 > 0;
-}
-bool PredFunction1Bool(Bool v1) {
- return v1 > 0;
-}
+bool PredFunction1Int(int v1) { return v1 > 0; }
+bool PredFunction1Bool(Bool v1) { return v1 > 0; }
// A unary predicate functor.
struct PredFunctor1 {
@@ -97,22 +91,17 @@ struct PredFunctor1 {
// A unary predicate-formatter function.
template <typename T1>
-testing::AssertionResult PredFormatFunction1(const char* e1,
- const T1& v1) {
- if (PredFunction1(v1))
- return testing::AssertionSuccess();
+testing::AssertionResult PredFormatFunction1(const char* e1, const T1& v1) {
+ if (PredFunction1(v1)) return testing::AssertionSuccess();
return testing::AssertionFailure()
- << e1
- << " is expected to be positive, but evaluates to "
- << v1 << ".";
+ << e1 << " is expected to be positive, but evaluates to " << v1 << ".";
}
// A unary predicate-formatter functor.
struct PredFormatFunctor1 {
template <typename T1>
- testing::AssertionResult operator()(const char* e1,
- const T1& v1) const {
+ testing::AssertionResult operator()(const char* e1, const T1& v1) const {
return PredFormatFunction1(e1, v1);
}
};
@@ -130,13 +119,12 @@ class Predicate1Test : public testing::Test {
void TearDown() override {
// Verifies that each of the predicate's arguments was evaluated
// exactly once.
- EXPECT_EQ(1, n1_) <<
- "The predicate assertion didn't evaluate argument 2 "
- "exactly once.";
+ EXPECT_EQ(1, n1_) << "The predicate assertion didn't evaluate argument 2 "
+ "exactly once.";
// Verifies that the control flow in the test function is expected.
if (expected_to_finish_ && !finished_) {
- FAIL() << "The predicate assertion unexpactedly aborted the test.";
+ FAIL() << "The predicate assertion unexpectedly aborted the test.";
} else if (!expected_to_finish_ && finished_) {
FAIL() << "The failed predicate assertion didn't abort the test "
"as expected.";
@@ -164,104 +152,100 @@ typedef Predicate1Test ASSERT_PRED1Test;
// Tests a successful EXPECT_PRED1 where the
// predicate-formatter is a function on a built-in type (int).
TEST_F(EXPECT_PRED1Test, FunctionOnBuiltInTypeSuccess) {
- EXPECT_PRED1(PredFunction1Int,
- ++n1_);
+ EXPECT_PRED1(PredFunction1Int, ++n1_);
finished_ = true;
}
// Tests a successful EXPECT_PRED1 where the
// predicate-formatter is a function on a user-defined type (Bool).
TEST_F(EXPECT_PRED1Test, FunctionOnUserTypeSuccess) {
- EXPECT_PRED1(PredFunction1Bool,
- Bool(++n1_));
+ EXPECT_PRED1(PredFunction1Bool, Bool(++n1_));
finished_ = true;
}
// Tests a successful EXPECT_PRED1 where the
// predicate-formatter is a functor on a built-in type (int).
TEST_F(EXPECT_PRED1Test, FunctorOnBuiltInTypeSuccess) {
- EXPECT_PRED1(PredFunctor1(),
- ++n1_);
+ EXPECT_PRED1(PredFunctor1(), ++n1_);
finished_ = true;
}
// Tests a successful EXPECT_PRED1 where the
// predicate-formatter is a functor on a user-defined type (Bool).
TEST_F(EXPECT_PRED1Test, FunctorOnUserTypeSuccess) {
- EXPECT_PRED1(PredFunctor1(),
- Bool(++n1_));
+ EXPECT_PRED1(PredFunctor1(), Bool(++n1_));
finished_ = true;
}
// Tests a failed EXPECT_PRED1 where the
// predicate-formatter is a function on a built-in type (int).
TEST_F(EXPECT_PRED1Test, FunctionOnBuiltInTypeFailure) {
- EXPECT_NONFATAL_FAILURE({ // NOLINT
- EXPECT_PRED1(PredFunction1Int,
- n1_++);
- finished_ = true;
- }, "");
+ EXPECT_NONFATAL_FAILURE(
+ { // NOLINT
+ EXPECT_PRED1(PredFunction1Int, n1_++);
+ finished_ = true;
+ },
+ "");
}
// Tests a failed EXPECT_PRED1 where the
// predicate-formatter is a function on a user-defined type (Bool).
TEST_F(EXPECT_PRED1Test, FunctionOnUserTypeFailure) {
- EXPECT_NONFATAL_FAILURE({ // NOLINT
- EXPECT_PRED1(PredFunction1Bool,
- Bool(n1_++));
- finished_ = true;
- }, "");
+ EXPECT_NONFATAL_FAILURE(
+ { // NOLINT
+ EXPECT_PRED1(PredFunction1Bool, Bool(n1_++));
+ finished_ = true;
+ },
+ "");
}
// Tests a failed EXPECT_PRED1 where the
// predicate-formatter is a functor on a built-in type (int).
TEST_F(EXPECT_PRED1Test, FunctorOnBuiltInTypeFailure) {
- EXPECT_NONFATAL_FAILURE({ // NOLINT
- EXPECT_PRED1(PredFunctor1(),
- n1_++);
- finished_ = true;
- }, "");
+ EXPECT_NONFATAL_FAILURE(
+ { // NOLINT
+ EXPECT_PRED1(PredFunctor1(), n1_++);
+ finished_ = true;
+ },
+ "");
}
// Tests a failed EXPECT_PRED1 where the
// predicate-formatter is a functor on a user-defined type (Bool).
TEST_F(EXPECT_PRED1Test, FunctorOnUserTypeFailure) {
- EXPECT_NONFATAL_FAILURE({ // NOLINT
- EXPECT_PRED1(PredFunctor1(),
- Bool(n1_++));
- finished_ = true;
- }, "");
+ EXPECT_NONFATAL_FAILURE(
+ { // NOLINT
+ EXPECT_PRED1(PredFunctor1(), Bool(n1_++));
+ finished_ = true;
+ },
+ "");
}
// Tests a successful ASSERT_PRED1 where the
// predicate-formatter is a function on a built-in type (int).
TEST_F(ASSERT_PRED1Test, FunctionOnBuiltInTypeSuccess) {
- ASSERT_PRED1(PredFunction1Int,
- ++n1_);
+ ASSERT_PRED1(PredFunction1Int, ++n1_);
finished_ = true;
}
// Tests a successful ASSERT_PRED1 where the
// predicate-formatter is a function on a user-defined type (Bool).
TEST_F(ASSERT_PRED1Test, FunctionOnUserTypeSuccess) {
- ASSERT_PRED1(PredFunction1Bool,
- Bool(++n1_));
+ ASSERT_PRED1(PredFunction1Bool, Bool(++n1_));
finished_ = true;
}
// Tests a successful ASSERT_PRED1 where the
// predicate-formatter is a functor on a built-in type (int).
TEST_F(ASSERT_PRED1Test, FunctorOnBuiltInTypeSuccess) {
- ASSERT_PRED1(PredFunctor1(),
- ++n1_);
+ ASSERT_PRED1(PredFunctor1(), ++n1_);
finished_ = true;
}
// Tests a successful ASSERT_PRED1 where the
// predicate-formatter is a functor on a user-defined type (Bool).
TEST_F(ASSERT_PRED1Test, FunctorOnUserTypeSuccess) {
- ASSERT_PRED1(PredFunctor1(),
- Bool(++n1_));
+ ASSERT_PRED1(PredFunctor1(), Bool(++n1_));
finished_ = true;
}
@@ -269,147 +253,147 @@ TEST_F(ASSERT_PRED1Test, FunctorOnUserTypeSuccess) {
// predicate-formatter is a function on a built-in type (int).
TEST_F(ASSERT_PRED1Test, FunctionOnBuiltInTypeFailure) {
expected_to_finish_ = false;
- EXPECT_FATAL_FAILURE({ // NOLINT
- ASSERT_PRED1(PredFunction1Int,
- n1_++);
- finished_ = true;
- }, "");
+ EXPECT_FATAL_FAILURE(
+ { // NOLINT
+ ASSERT_PRED1(PredFunction1Int, n1_++);
+ finished_ = true;
+ },
+ "");
}
// Tests a failed ASSERT_PRED1 where the
// predicate-formatter is a function on a user-defined type (Bool).
TEST_F(ASSERT_PRED1Test, FunctionOnUserTypeFailure) {
expected_to_finish_ = false;
- EXPECT_FATAL_FAILURE({ // NOLINT
- ASSERT_PRED1(PredFunction1Bool,
- Bool(n1_++));
- finished_ = true;
- }, "");
+ EXPECT_FATAL_FAILURE(
+ { // NOLINT
+ ASSERT_PRED1(PredFunction1Bool, Bool(n1_++));
+ finished_ = true;
+ },
+ "");
}
// Tests a failed ASSERT_PRED1 where the
// predicate-formatter is a functor on a built-in type (int).
TEST_F(ASSERT_PRED1Test, FunctorOnBuiltInTypeFailure) {
expected_to_finish_ = false;
- EXPECT_FATAL_FAILURE({ // NOLINT
- ASSERT_PRED1(PredFunctor1(),
- n1_++);
- finished_ = true;
- }, "");
+ EXPECT_FATAL_FAILURE(
+ { // NOLINT
+ ASSERT_PRED1(PredFunctor1(), n1_++);
+ finished_ = true;
+ },
+ "");
}
// Tests a failed ASSERT_PRED1 where the
// predicate-formatter is a functor on a user-defined type (Bool).
TEST_F(ASSERT_PRED1Test, FunctorOnUserTypeFailure) {
expected_to_finish_ = false;
- EXPECT_FATAL_FAILURE({ // NOLINT
- ASSERT_PRED1(PredFunctor1(),
- Bool(n1_++));
- finished_ = true;
- }, "");
+ EXPECT_FATAL_FAILURE(
+ { // NOLINT
+ ASSERT_PRED1(PredFunctor1(), Bool(n1_++));
+ finished_ = true;
+ },
+ "");
}
// Tests a successful EXPECT_PRED_FORMAT1 where the
// predicate-formatter is a function on a built-in type (int).
TEST_F(EXPECT_PRED_FORMAT1Test, FunctionOnBuiltInTypeSuccess) {
- EXPECT_PRED_FORMAT1(PredFormatFunction1,
- ++n1_);
+ EXPECT_PRED_FORMAT1(PredFormatFunction1, ++n1_);
finished_ = true;
}
// Tests a successful EXPECT_PRED_FORMAT1 where the
// predicate-formatter is a function on a user-defined type (Bool).
TEST_F(EXPECT_PRED_FORMAT1Test, FunctionOnUserTypeSuccess) {
- EXPECT_PRED_FORMAT1(PredFormatFunction1,
- Bool(++n1_));
+ EXPECT_PRED_FORMAT1(PredFormatFunction1, Bool(++n1_));
finished_ = true;
}
// Tests a successful EXPECT_PRED_FORMAT1 where the
// predicate-formatter is a functor on a built-in type (int).
TEST_F(EXPECT_PRED_FORMAT1Test, FunctorOnBuiltInTypeSuccess) {
- EXPECT_PRED_FORMAT1(PredFormatFunctor1(),
- ++n1_);
+ EXPECT_PRED_FORMAT1(PredFormatFunctor1(), ++n1_);
finished_ = true;
}
// Tests a successful EXPECT_PRED_FORMAT1 where the
// predicate-formatter is a functor on a user-defined type (Bool).
TEST_F(EXPECT_PRED_FORMAT1Test, FunctorOnUserTypeSuccess) {
- EXPECT_PRED_FORMAT1(PredFormatFunctor1(),
- Bool(++n1_));
+ EXPECT_PRED_FORMAT1(PredFormatFunctor1(), Bool(++n1_));
finished_ = true;
}
// Tests a failed EXPECT_PRED_FORMAT1 where the
// predicate-formatter is a function on a built-in type (int).
TEST_F(EXPECT_PRED_FORMAT1Test, FunctionOnBuiltInTypeFailure) {
- EXPECT_NONFATAL_FAILURE({ // NOLINT
- EXPECT_PRED_FORMAT1(PredFormatFunction1,
- n1_++);
- finished_ = true;
- }, "");
+ EXPECT_NONFATAL_FAILURE(
+ { // NOLINT
+ EXPECT_PRED_FORMAT1(PredFormatFunction1, n1_++);
+ finished_ = true;
+ },
+ "");
}
// Tests a failed EXPECT_PRED_FORMAT1 where the
// predicate-formatter is a function on a user-defined type (Bool).
TEST_F(EXPECT_PRED_FORMAT1Test, FunctionOnUserTypeFailure) {
- EXPECT_NONFATAL_FAILURE({ // NOLINT
- EXPECT_PRED_FORMAT1(PredFormatFunction1,
- Bool(n1_++));
- finished_ = true;
- }, "");
+ EXPECT_NONFATAL_FAILURE(
+ { // NOLINT
+ EXPECT_PRED_FORMAT1(PredFormatFunction1, Bool(n1_++));
+ finished_ = true;
+ },
+ "");
}
// Tests a failed EXPECT_PRED_FORMAT1 where the
// predicate-formatter is a functor on a built-in type (int).
TEST_F(EXPECT_PRED_FORMAT1Test, FunctorOnBuiltInTypeFailure) {
- EXPECT_NONFATAL_FAILURE({ // NOLINT
- EXPECT_PRED_FORMAT1(PredFormatFunctor1(),
- n1_++);
- finished_ = true;
- }, "");
+ EXPECT_NONFATAL_FAILURE(
+ { // NOLINT
+ EXPECT_PRED_FORMAT1(PredFormatFunctor1(), n1_++);
+ finished_ = true;
+ },
+ "");
}
// Tests a failed EXPECT_PRED_FORMAT1 where the
// predicate-formatter is a functor on a user-defined type (Bool).
TEST_F(EXPECT_PRED_FORMAT1Test, FunctorOnUserTypeFailure) {
- EXPECT_NONFATAL_FAILURE({ // NOLINT
- EXPECT_PRED_FORMAT1(PredFormatFunctor1(),
- Bool(n1_++));
- finished_ = true;
- }, "");
+ EXPECT_NONFATAL_FAILURE(
+ { // NOLINT
+ EXPECT_PRED_FORMAT1(PredFormatFunctor1(), Bool(n1_++));
+ finished_ = true;
+ },
+ "");
}
// Tests a successful ASSERT_PRED_FORMAT1 where the
// predicate-formatter is a function on a built-in type (int).
TEST_F(ASSERT_PRED_FORMAT1Test, FunctionOnBuiltInTypeSuccess) {
- ASSERT_PRED_FORMAT1(PredFormatFunction1,
- ++n1_);
+ ASSERT_PRED_FORMAT1(PredFormatFunction1, ++n1_);
finished_ = true;
}
// Tests a successful ASSERT_PRED_FORMAT1 where the
// predicate-formatter is a function on a user-defined type (Bool).
TEST_F(ASSERT_PRED_FORMAT1Test, FunctionOnUserTypeSuccess) {
- ASSERT_PRED_FORMAT1(PredFormatFunction1,
- Bool(++n1_));
+ ASSERT_PRED_FORMAT1(PredFormatFunction1, Bool(++n1_));
finished_ = true;
}
// Tests a successful ASSERT_PRED_FORMAT1 where the
// predicate-formatter is a functor on a built-in type (int).
TEST_F(ASSERT_PRED_FORMAT1Test, FunctorOnBuiltInTypeSuccess) {
- ASSERT_PRED_FORMAT1(PredFormatFunctor1(),
- ++n1_);
+ ASSERT_PRED_FORMAT1(PredFormatFunctor1(), ++n1_);
finished_ = true;
}
// Tests a successful ASSERT_PRED_FORMAT1 where the
// predicate-formatter is a functor on a user-defined type (Bool).
TEST_F(ASSERT_PRED_FORMAT1Test, FunctorOnUserTypeSuccess) {
- ASSERT_PRED_FORMAT1(PredFormatFunctor1(),
- Bool(++n1_));
+ ASSERT_PRED_FORMAT1(PredFormatFunctor1(), Bool(++n1_));
finished_ = true;
}
@@ -417,44 +401,48 @@ TEST_F(ASSERT_PRED_FORMAT1Test, FunctorOnUserTypeSuccess) {
// predicate-formatter is a function on a built-in type (int).
TEST_F(ASSERT_PRED_FORMAT1Test, FunctionOnBuiltInTypeFailure) {
expected_to_finish_ = false;
- EXPECT_FATAL_FAILURE({ // NOLINT
- ASSERT_PRED_FORMAT1(PredFormatFunction1,
- n1_++);
- finished_ = true;
- }, "");
+ EXPECT_FATAL_FAILURE(
+ { // NOLINT
+ ASSERT_PRED_FORMAT1(PredFormatFunction1, n1_++);
+ finished_ = true;
+ },
+ "");
}
// Tests a failed ASSERT_PRED_FORMAT1 where the
// predicate-formatter is a function on a user-defined type (Bool).
TEST_F(ASSERT_PRED_FORMAT1Test, FunctionOnUserTypeFailure) {
expected_to_finish_ = false;
- EXPECT_FATAL_FAILURE({ // NOLINT
- ASSERT_PRED_FORMAT1(PredFormatFunction1,
- Bool(n1_++));
- finished_ = true;
- }, "");
+ EXPECT_FATAL_FAILURE(
+ { // NOLINT
+ ASSERT_PRED_FORMAT1(PredFormatFunction1, Bool(n1_++));
+ finished_ = true;
+ },
+ "");
}
// Tests a failed ASSERT_PRED_FORMAT1 where the
// predicate-formatter is a functor on a built-in type (int).
TEST_F(ASSERT_PRED_FORMAT1Test, FunctorOnBuiltInTypeFailure) {
expected_to_finish_ = false;
- EXPECT_FATAL_FAILURE({ // NOLINT
- ASSERT_PRED_FORMAT1(PredFormatFunctor1(),
- n1_++);
- finished_ = true;
- }, "");
+ EXPECT_FATAL_FAILURE(
+ { // NOLINT
+ ASSERT_PRED_FORMAT1(PredFormatFunctor1(), n1_++);
+ finished_ = true;
+ },
+ "");
}
// Tests a failed ASSERT_PRED_FORMAT1 where the
// predicate-formatter is a functor on a user-defined type (Bool).
TEST_F(ASSERT_PRED_FORMAT1Test, FunctorOnUserTypeFailure) {
expected_to_finish_ = false;
- EXPECT_FATAL_FAILURE({ // NOLINT
- ASSERT_PRED_FORMAT1(PredFormatFunctor1(),
- Bool(n1_++));
- finished_ = true;
- }, "");
+ EXPECT_FATAL_FAILURE(
+ { // NOLINT
+ ASSERT_PRED_FORMAT1(PredFormatFunctor1(), Bool(n1_++));
+ finished_ = true;
+ },
+ "");
}
// Sample functions/functors for testing binary predicate assertions.
@@ -466,44 +454,33 @@ bool PredFunction2(T1 v1, T2 v2) {
// The following two functions are needed because a compiler doesn't have
// a context yet to know which template function must be instantiated.
-bool PredFunction2Int(int v1, int v2) {
- return v1 + v2 > 0;
-}
-bool PredFunction2Bool(Bool v1, Bool v2) {
- return v1 + v2 > 0;
-}
+bool PredFunction2Int(int v1, int v2) { return v1 + v2 > 0; }
+bool PredFunction2Bool(Bool v1, Bool v2) { return v1 + v2 > 0; }
// A binary predicate functor.
struct PredFunctor2 {
template <typename T1, typename T2>
- bool operator()(const T1& v1,
- const T2& v2) {
+ bool operator()(const T1& v1, const T2& v2) {
return v1 + v2 > 0;
}
};
// A binary predicate-formatter function.
template <typename T1, typename T2>
-testing::AssertionResult PredFormatFunction2(const char* e1,
- const char* e2,
- const T1& v1,
- const T2& v2) {
- if (PredFunction2(v1, v2))
- return testing::AssertionSuccess();
+testing::AssertionResult PredFormatFunction2(const char* e1, const char* e2,
+ const T1& v1, const T2& v2) {
+ if (PredFunction2(v1, v2)) return testing::AssertionSuccess();
return testing::AssertionFailure()
- << e1 << " + " << e2
- << " is expected to be positive, but evaluates to "
- << v1 + v2 << ".";
+ << e1 << " + " << e2
+ << " is expected to be positive, but evaluates to " << v1 + v2 << ".";
}
// A binary predicate-formatter functor.
struct PredFormatFunctor2 {
template <typename T1, typename T2>
- testing::AssertionResult operator()(const char* e1,
- const char* e2,
- const T1& v1,
- const T2& v2) const {
+ testing::AssertionResult operator()(const char* e1, const char* e2,
+ const T1& v1, const T2& v2) const {
return PredFormatFunction2(e1, e2, v1, v2);
}
};
@@ -521,16 +498,14 @@ class Predicate2Test : public testing::Test {
void TearDown() override {
// Verifies that each of the predicate's arguments was evaluated
// exactly once.
- EXPECT_EQ(1, n1_) <<
- "The predicate assertion didn't evaluate argument 2 "
- "exactly once.";
- EXPECT_EQ(1, n2_) <<
- "The predicate assertion didn't evaluate argument 3 "
- "exactly once.";
+ EXPECT_EQ(1, n1_) << "The predicate assertion didn't evaluate argument 2 "
+ "exactly once.";
+ EXPECT_EQ(1, n2_) << "The predicate assertion didn't evaluate argument 3 "
+ "exactly once.";
// Verifies that the control flow in the test function is expected.
if (expected_to_finish_ && !finished_) {
- FAIL() << "The predicate assertion unexpactedly aborted the test.";
+ FAIL() << "The predicate assertion unexpectedly aborted the test.";
} else if (!expected_to_finish_ && finished_) {
FAIL() << "The failed predicate assertion didn't abort the test "
"as expected.";
@@ -560,116 +535,100 @@ typedef Predicate2Test ASSERT_PRED2Test;
// Tests a successful EXPECT_PRED2 where the
// predicate-formatter is a function on a built-in type (int).
TEST_F(EXPECT_PRED2Test, FunctionOnBuiltInTypeSuccess) {
- EXPECT_PRED2(PredFunction2Int,
- ++n1_,
- ++n2_);
+ EXPECT_PRED2(PredFunction2Int, ++n1_, ++n2_);
finished_ = true;
}
// Tests a successful EXPECT_PRED2 where the
// predicate-formatter is a function on a user-defined type (Bool).
TEST_F(EXPECT_PRED2Test, FunctionOnUserTypeSuccess) {
- EXPECT_PRED2(PredFunction2Bool,
- Bool(++n1_),
- Bool(++n2_));
+ EXPECT_PRED2(PredFunction2Bool, Bool(++n1_), Bool(++n2_));
finished_ = true;
}
// Tests a successful EXPECT_PRED2 where the
// predicate-formatter is a functor on a built-in type (int).
TEST_F(EXPECT_PRED2Test, FunctorOnBuiltInTypeSuccess) {
- EXPECT_PRED2(PredFunctor2(),
- ++n1_,
- ++n2_);
+ EXPECT_PRED2(PredFunctor2(), ++n1_, ++n2_);
finished_ = true;
}
// Tests a successful EXPECT_PRED2 where the
// predicate-formatter is a functor on a user-defined type (Bool).
TEST_F(EXPECT_PRED2Test, FunctorOnUserTypeSuccess) {
- EXPECT_PRED2(PredFunctor2(),
- Bool(++n1_),
- Bool(++n2_));
+ EXPECT_PRED2(PredFunctor2(), Bool(++n1_), Bool(++n2_));
finished_ = true;
}
// Tests a failed EXPECT_PRED2 where the
// predicate-formatter is a function on a built-in type (int).
TEST_F(EXPECT_PRED2Test, FunctionOnBuiltInTypeFailure) {
- EXPECT_NONFATAL_FAILURE({ // NOLINT
- EXPECT_PRED2(PredFunction2Int,
- n1_++,
- n2_++);
- finished_ = true;
- }, "");
+ EXPECT_NONFATAL_FAILURE(
+ { // NOLINT
+ EXPECT_PRED2(PredFunction2Int, n1_++, n2_++);
+ finished_ = true;
+ },
+ "");
}
// Tests a failed EXPECT_PRED2 where the
// predicate-formatter is a function on a user-defined type (Bool).
TEST_F(EXPECT_PRED2Test, FunctionOnUserTypeFailure) {
- EXPECT_NONFATAL_FAILURE({ // NOLINT
- EXPECT_PRED2(PredFunction2Bool,
- Bool(n1_++),
- Bool(n2_++));
- finished_ = true;
- }, "");
+ EXPECT_NONFATAL_FAILURE(
+ { // NOLINT
+ EXPECT_PRED2(PredFunction2Bool, Bool(n1_++), Bool(n2_++));
+ finished_ = true;
+ },
+ "");
}
// Tests a failed EXPECT_PRED2 where the
// predicate-formatter is a functor on a built-in type (int).
TEST_F(EXPECT_PRED2Test, FunctorOnBuiltInTypeFailure) {
- EXPECT_NONFATAL_FAILURE({ // NOLINT
- EXPECT_PRED2(PredFunctor2(),
- n1_++,
- n2_++);
- finished_ = true;
- }, "");
+ EXPECT_NONFATAL_FAILURE(
+ { // NOLINT
+ EXPECT_PRED2(PredFunctor2(), n1_++, n2_++);
+ finished_ = true;
+ },
+ "");
}
// Tests a failed EXPECT_PRED2 where the
// predicate-formatter is a functor on a user-defined type (Bool).
TEST_F(EXPECT_PRED2Test, FunctorOnUserTypeFailure) {
- EXPECT_NONFATAL_FAILURE({ // NOLINT
- EXPECT_PRED2(PredFunctor2(),
- Bool(n1_++),
- Bool(n2_++));
- finished_ = true;
- }, "");
+ EXPECT_NONFATAL_FAILURE(
+ { // NOLINT
+ EXPECT_PRED2(PredFunctor2(), Bool(n1_++), Bool(n2_++));
+ finished_ = true;
+ },
+ "");
}
// Tests a successful ASSERT_PRED2 where the
// predicate-formatter is a function on a built-in type (int).
TEST_F(ASSERT_PRED2Test, FunctionOnBuiltInTypeSuccess) {
- ASSERT_PRED2(PredFunction2Int,
- ++n1_,
- ++n2_);
+ ASSERT_PRED2(PredFunction2Int, ++n1_, ++n2_);
finished_ = true;
}
// Tests a successful ASSERT_PRED2 where the
// predicate-formatter is a function on a user-defined type (Bool).
TEST_F(ASSERT_PRED2Test, FunctionOnUserTypeSuccess) {
- ASSERT_PRED2(PredFunction2Bool,
- Bool(++n1_),
- Bool(++n2_));
+ ASSERT_PRED2(PredFunction2Bool, Bool(++n1_), Bool(++n2_));
finished_ = true;
}
// Tests a successful ASSERT_PRED2 where the
// predicate-formatter is a functor on a built-in type (int).
TEST_F(ASSERT_PRED2Test, FunctorOnBuiltInTypeSuccess) {
- ASSERT_PRED2(PredFunctor2(),
- ++n1_,
- ++n2_);
+ ASSERT_PRED2(PredFunctor2(), ++n1_, ++n2_);
finished_ = true;
}
// Tests a successful ASSERT_PRED2 where the
// predicate-formatter is a functor on a user-defined type (Bool).
TEST_F(ASSERT_PRED2Test, FunctorOnUserTypeSuccess) {
- ASSERT_PRED2(PredFunctor2(),
- Bool(++n1_),
- Bool(++n2_));
+ ASSERT_PRED2(PredFunctor2(), Bool(++n1_), Bool(++n2_));
finished_ = true;
}
@@ -677,163 +636,147 @@ TEST_F(ASSERT_PRED2Test, FunctorOnUserTypeSuccess) {
// predicate-formatter is a function on a built-in type (int).
TEST_F(ASSERT_PRED2Test, FunctionOnBuiltInTypeFailure) {
expected_to_finish_ = false;
- EXPECT_FATAL_FAILURE({ // NOLINT
- ASSERT_PRED2(PredFunction2Int,
- n1_++,
- n2_++);
- finished_ = true;
- }, "");
+ EXPECT_FATAL_FAILURE(
+ { // NOLINT
+ ASSERT_PRED2(PredFunction2Int, n1_++, n2_++);
+ finished_ = true;
+ },
+ "");
}
// Tests a failed ASSERT_PRED2 where the
// predicate-formatter is a function on a user-defined type (Bool).
TEST_F(ASSERT_PRED2Test, FunctionOnUserTypeFailure) {
expected_to_finish_ = false;
- EXPECT_FATAL_FAILURE({ // NOLINT
- ASSERT_PRED2(PredFunction2Bool,
- Bool(n1_++),
- Bool(n2_++));
- finished_ = true;
- }, "");
+ EXPECT_FATAL_FAILURE(
+ { // NOLINT
+ ASSERT_PRED2(PredFunction2Bool, Bool(n1_++), Bool(n2_++));
+ finished_ = true;
+ },
+ "");
}
// Tests a failed ASSERT_PRED2 where the
// predicate-formatter is a functor on a built-in type (int).
TEST_F(ASSERT_PRED2Test, FunctorOnBuiltInTypeFailure) {
expected_to_finish_ = false;
- EXPECT_FATAL_FAILURE({ // NOLINT
- ASSERT_PRED2(PredFunctor2(),
- n1_++,
- n2_++);
- finished_ = true;
- }, "");
+ EXPECT_FATAL_FAILURE(
+ { // NOLINT
+ ASSERT_PRED2(PredFunctor2(), n1_++, n2_++);
+ finished_ = true;
+ },
+ "");
}
// Tests a failed ASSERT_PRED2 where the
// predicate-formatter is a functor on a user-defined type (Bool).
TEST_F(ASSERT_PRED2Test, FunctorOnUserTypeFailure) {
expected_to_finish_ = false;
- EXPECT_FATAL_FAILURE({ // NOLINT
- ASSERT_PRED2(PredFunctor2(),
- Bool(n1_++),
- Bool(n2_++));
- finished_ = true;
- }, "");
+ EXPECT_FATAL_FAILURE(
+ { // NOLINT
+ ASSERT_PRED2(PredFunctor2(), Bool(n1_++), Bool(n2_++));
+ finished_ = true;
+ },
+ "");
}
// Tests a successful EXPECT_PRED_FORMAT2 where the
// predicate-formatter is a function on a built-in type (int).
TEST_F(EXPECT_PRED_FORMAT2Test, FunctionOnBuiltInTypeSuccess) {
- EXPECT_PRED_FORMAT2(PredFormatFunction2,
- ++n1_,
- ++n2_);
+ EXPECT_PRED_FORMAT2(PredFormatFunction2, ++n1_, ++n2_);
finished_ = true;
}
// Tests a successful EXPECT_PRED_FORMAT2 where the
// predicate-formatter is a function on a user-defined type (Bool).
TEST_F(EXPECT_PRED_FORMAT2Test, FunctionOnUserTypeSuccess) {
- EXPECT_PRED_FORMAT2(PredFormatFunction2,
- Bool(++n1_),
- Bool(++n2_));
+ EXPECT_PRED_FORMAT2(PredFormatFunction2, Bool(++n1_), Bool(++n2_));
finished_ = true;
}
// Tests a successful EXPECT_PRED_FORMAT2 where the
// predicate-formatter is a functor on a built-in type (int).
TEST_F(EXPECT_PRED_FORMAT2Test, FunctorOnBuiltInTypeSuccess) {
- EXPECT_PRED_FORMAT2(PredFormatFunctor2(),
- ++n1_,
- ++n2_);
+ EXPECT_PRED_FORMAT2(PredFormatFunctor2(), ++n1_, ++n2_);
finished_ = true;
}
// Tests a successful EXPECT_PRED_FORMAT2 where the
// predicate-formatter is a functor on a user-defined type (Bool).
TEST_F(EXPECT_PRED_FORMAT2Test, FunctorOnUserTypeSuccess) {
- EXPECT_PRED_FORMAT2(PredFormatFunctor2(),
- Bool(++n1_),
- Bool(++n2_));
+ EXPECT_PRED_FORMAT2(PredFormatFunctor2(), Bool(++n1_), Bool(++n2_));
finished_ = true;
}
// Tests a failed EXPECT_PRED_FORMAT2 where the
// predicate-formatter is a function on a built-in type (int).
TEST_F(EXPECT_PRED_FORMAT2Test, FunctionOnBuiltInTypeFailure) {
- EXPECT_NONFATAL_FAILURE({ // NOLINT
- EXPECT_PRED_FORMAT2(PredFormatFunction2,
- n1_++,
- n2_++);
- finished_ = true;
- }, "");
+ EXPECT_NONFATAL_FAILURE(
+ { // NOLINT
+ EXPECT_PRED_FORMAT2(PredFormatFunction2, n1_++, n2_++);
+ finished_ = true;
+ },
+ "");
}
// Tests a failed EXPECT_PRED_FORMAT2 where the
// predicate-formatter is a function on a user-defined type (Bool).
TEST_F(EXPECT_PRED_FORMAT2Test, FunctionOnUserTypeFailure) {
- EXPECT_NONFATAL_FAILURE({ // NOLINT
- EXPECT_PRED_FORMAT2(PredFormatFunction2,
- Bool(n1_++),
- Bool(n2_++));
- finished_ = true;
- }, "");
+ EXPECT_NONFATAL_FAILURE(
+ { // NOLINT
+ EXPECT_PRED_FORMAT2(PredFormatFunction2, Bool(n1_++), Bool(n2_++));
+ finished_ = true;
+ },
+ "");
}
// Tests a failed EXPECT_PRED_FORMAT2 where the
// predicate-formatter is a functor on a built-in type (int).
TEST_F(EXPECT_PRED_FORMAT2Test, FunctorOnBuiltInTypeFailure) {
- EXPECT_NONFATAL_FAILURE({ // NOLINT
- EXPECT_PRED_FORMAT2(PredFormatFunctor2(),
- n1_++,
- n2_++);
- finished_ = true;
- }, "");
+ EXPECT_NONFATAL_FAILURE(
+ { // NOLINT
+ EXPECT_PRED_FORMAT2(PredFormatFunctor2(), n1_++, n2_++);
+ finished_ = true;
+ },
+ "");
}
// Tests a failed EXPECT_PRED_FORMAT2 where the
// predicate-formatter is a functor on a user-defined type (Bool).
TEST_F(EXPECT_PRED_FORMAT2Test, FunctorOnUserTypeFailure) {
- EXPECT_NONFATAL_FAILURE({ // NOLINT
- EXPECT_PRED_FORMAT2(PredFormatFunctor2(),
- Bool(n1_++),
- Bool(n2_++));
- finished_ = true;
- }, "");
+ EXPECT_NONFATAL_FAILURE(
+ { // NOLINT
+ EXPECT_PRED_FORMAT2(PredFormatFunctor2(), Bool(n1_++), Bool(n2_++));
+ finished_ = true;
+ },
+ "");
}
// Tests a successful ASSERT_PRED_FORMAT2 where the
// predicate-formatter is a function on a built-in type (int).
TEST_F(ASSERT_PRED_FORMAT2Test, FunctionOnBuiltInTypeSuccess) {
- ASSERT_PRED_FORMAT2(PredFormatFunction2,
- ++n1_,
- ++n2_);
+ ASSERT_PRED_FORMAT2(PredFormatFunction2, ++n1_, ++n2_);
finished_ = true;
}
// Tests a successful ASSERT_PRED_FORMAT2 where the
// predicate-formatter is a function on a user-defined type (Bool).
TEST_F(ASSERT_PRED_FORMAT2Test, FunctionOnUserTypeSuccess) {
- ASSERT_PRED_FORMAT2(PredFormatFunction2,
- Bool(++n1_),
- Bool(++n2_));
+ ASSERT_PRED_FORMAT2(PredFormatFunction2, Bool(++n1_), Bool(++n2_));
finished_ = true;
}
// Tests a successful ASSERT_PRED_FORMAT2 where the
// predicate-formatter is a functor on a built-in type (int).
TEST_F(ASSERT_PRED_FORMAT2Test, FunctorOnBuiltInTypeSuccess) {
- ASSERT_PRED_FORMAT2(PredFormatFunctor2(),
- ++n1_,
- ++n2_);
+ ASSERT_PRED_FORMAT2(PredFormatFunctor2(), ++n1_, ++n2_);
finished_ = true;
}
// Tests a successful ASSERT_PRED_FORMAT2 where the
// predicate-formatter is a functor on a user-defined type (Bool).
TEST_F(ASSERT_PRED_FORMAT2Test, FunctorOnUserTypeSuccess) {
- ASSERT_PRED_FORMAT2(PredFormatFunctor2(),
- Bool(++n1_),
- Bool(++n2_));
+ ASSERT_PRED_FORMAT2(PredFormatFunctor2(), Bool(++n1_), Bool(++n2_));
finished_ = true;
}
@@ -841,48 +784,48 @@ TEST_F(ASSERT_PRED_FORMAT2Test, FunctorOnUserTypeSuccess) {
// predicate-formatter is a function on a built-in type (int).
TEST_F(ASSERT_PRED_FORMAT2Test, FunctionOnBuiltInTypeFailure) {
expected_to_finish_ = false;
- EXPECT_FATAL_FAILURE({ // NOLINT
- ASSERT_PRED_FORMAT2(PredFormatFunction2,
- n1_++,
- n2_++);
- finished_ = true;
- }, "");
+ EXPECT_FATAL_FAILURE(
+ { // NOLINT
+ ASSERT_PRED_FORMAT2(PredFormatFunction2, n1_++, n2_++);
+ finished_ = true;
+ },
+ "");
}
// Tests a failed ASSERT_PRED_FORMAT2 where the
// predicate-formatter is a function on a user-defined type (Bool).
TEST_F(ASSERT_PRED_FORMAT2Test, FunctionOnUserTypeFailure) {
expected_to_finish_ = false;
- EXPECT_FATAL_FAILURE({ // NOLINT
- ASSERT_PRED_FORMAT2(PredFormatFunction2,
- Bool(n1_++),
- Bool(n2_++));
- finished_ = true;
- }, "");
+ EXPECT_FATAL_FAILURE(
+ { // NOLINT
+ ASSERT_PRED_FORMAT2(PredFormatFunction2, Bool(n1_++), Bool(n2_++));
+ finished_ = true;
+ },
+ "");
}
// Tests a failed ASSERT_PRED_FORMAT2 where the
// predicate-formatter is a functor on a built-in type (int).
TEST_F(ASSERT_PRED_FORMAT2Test, FunctorOnBuiltInTypeFailure) {
expected_to_finish_ = false;
- EXPECT_FATAL_FAILURE({ // NOLINT
- ASSERT_PRED_FORMAT2(PredFormatFunctor2(),
- n1_++,
- n2_++);
- finished_ = true;
- }, "");
+ EXPECT_FATAL_FAILURE(
+ { // NOLINT
+ ASSERT_PRED_FORMAT2(PredFormatFunctor2(), n1_++, n2_++);
+ finished_ = true;
+ },
+ "");
}
// Tests a failed ASSERT_PRED_FORMAT2 where the
// predicate-formatter is a functor on a user-defined type (Bool).
TEST_F(ASSERT_PRED_FORMAT2Test, FunctorOnUserTypeFailure) {
expected_to_finish_ = false;
- EXPECT_FATAL_FAILURE({ // NOLINT
- ASSERT_PRED_FORMAT2(PredFormatFunctor2(),
- Bool(n1_++),
- Bool(n2_++));
- finished_ = true;
- }, "");
+ EXPECT_FATAL_FAILURE(
+ { // NOLINT
+ ASSERT_PRED_FORMAT2(PredFormatFunctor2(), Bool(n1_++), Bool(n2_++));
+ finished_ = true;
+ },
+ "");
}
// Sample functions/functors for testing ternary predicate assertions.
@@ -894,49 +837,36 @@ bool PredFunction3(T1 v1, T2 v2, T3 v3) {
// The following two functions are needed because a compiler doesn't have
// a context yet to know which template function must be instantiated.
-bool PredFunction3Int(int v1, int v2, int v3) {
- return v1 + v2 + v3 > 0;
-}
-bool PredFunction3Bool(Bool v1, Bool v2, Bool v3) {
- return v1 + v2 + v3 > 0;
-}
+bool PredFunction3Int(int v1, int v2, int v3) { return v1 + v2 + v3 > 0; }
+bool PredFunction3Bool(Bool v1, Bool v2, Bool v3) { return v1 + v2 + v3 > 0; }
// A ternary predicate functor.
struct PredFunctor3 {
template <typename T1, typename T2, typename T3>
- bool operator()(const T1& v1,
- const T2& v2,
- const T3& v3) {
+ bool operator()(const T1& v1, const T2& v2, const T3& v3) {
return v1 + v2 + v3 > 0;
}
};
// A ternary predicate-formatter function.
template <typename T1, typename T2, typename T3>
-testing::AssertionResult PredFormatFunction3(const char* e1,
- const char* e2,
- const char* e3,
- const T1& v1,
- const T2& v2,
- const T3& v3) {
- if (PredFunction3(v1, v2, v3))
- return testing::AssertionSuccess();
+testing::AssertionResult PredFormatFunction3(const char* e1, const char* e2,
+ const char* e3, const T1& v1,
+ const T2& v2, const T3& v3) {
+ if (PredFunction3(v1, v2, v3)) return testing::AssertionSuccess();
return testing::AssertionFailure()
- << e1 << " + " << e2 << " + " << e3
- << " is expected to be positive, but evaluates to "
- << v1 + v2 + v3 << ".";
+ << e1 << " + " << e2 << " + " << e3
+ << " is expected to be positive, but evaluates to " << v1 + v2 + v3
+ << ".";
}
// A ternary predicate-formatter functor.
struct PredFormatFunctor3 {
template <typename T1, typename T2, typename T3>
- testing::AssertionResult operator()(const char* e1,
- const char* e2,
- const char* e3,
- const T1& v1,
- const T2& v2,
- const T3& v3) const {
+ testing::AssertionResult operator()(const char* e1, const char* e2,
+ const char* e3, const T1& v1,
+ const T2& v2, const T3& v3) const {
return PredFormatFunction3(e1, e2, e3, v1, v2, v3);
}
};
@@ -954,19 +884,16 @@ class Predicate3Test : public testing::Test {
void TearDown() override {
// Verifies that each of the predicate's arguments was evaluated
// exactly once.
- EXPECT_EQ(1, n1_) <<
- "The predicate assertion didn't evaluate argument 2 "
- "exactly once.";
- EXPECT_EQ(1, n2_) <<
- "The predicate assertion didn't evaluate argument 3 "
- "exactly once.";
- EXPECT_EQ(1, n3_) <<
- "The predicate assertion didn't evaluate argument 4 "
- "exactly once.";
+ EXPECT_EQ(1, n1_) << "The predicate assertion didn't evaluate argument 2 "
+ "exactly once.";
+ EXPECT_EQ(1, n2_) << "The predicate assertion didn't evaluate argument 3 "
+ "exactly once.";
+ EXPECT_EQ(1, n3_) << "The predicate assertion didn't evaluate argument 4 "
+ "exactly once.";
// Verifies that the control flow in the test function is expected.
if (expected_to_finish_ && !finished_) {
- FAIL() << "The predicate assertion unexpactedly aborted the test.";
+ FAIL() << "The predicate assertion unexpectedly aborted the test.";
} else if (!expected_to_finish_ && finished_) {
FAIL() << "The failed predicate assertion didn't abort the test "
"as expected.";
@@ -998,128 +925,100 @@ typedef Predicate3Test ASSERT_PRED3Test;
// Tests a successful EXPECT_PRED3 where the
// predicate-formatter is a function on a built-in type (int).
TEST_F(EXPECT_PRED3Test, FunctionOnBuiltInTypeSuccess) {
- EXPECT_PRED3(PredFunction3Int,
- ++n1_,
- ++n2_,
- ++n3_);
+ EXPECT_PRED3(PredFunction3Int, ++n1_, ++n2_, ++n3_);
finished_ = true;
}
// Tests a successful EXPECT_PRED3 where the
// predicate-formatter is a function on a user-defined type (Bool).
TEST_F(EXPECT_PRED3Test, FunctionOnUserTypeSuccess) {
- EXPECT_PRED3(PredFunction3Bool,
- Bool(++n1_),
- Bool(++n2_),
- Bool(++n3_));
+ EXPECT_PRED3(PredFunction3Bool, Bool(++n1_), Bool(++n2_), Bool(++n3_));
finished_ = true;
}
// Tests a successful EXPECT_PRED3 where the
// predicate-formatter is a functor on a built-in type (int).
TEST_F(EXPECT_PRED3Test, FunctorOnBuiltInTypeSuccess) {
- EXPECT_PRED3(PredFunctor3(),
- ++n1_,
- ++n2_,
- ++n3_);
+ EXPECT_PRED3(PredFunctor3(), ++n1_, ++n2_, ++n3_);
finished_ = true;
}
// Tests a successful EXPECT_PRED3 where the
// predicate-formatter is a functor on a user-defined type (Bool).
TEST_F(EXPECT_PRED3Test, FunctorOnUserTypeSuccess) {
- EXPECT_PRED3(PredFunctor3(),
- Bool(++n1_),
- Bool(++n2_),
- Bool(++n3_));
+ EXPECT_PRED3(PredFunctor3(), Bool(++n1_), Bool(++n2_), Bool(++n3_));
finished_ = true;
}
// Tests a failed EXPECT_PRED3 where the
// predicate-formatter is a function on a built-in type (int).
TEST_F(EXPECT_PRED3Test, FunctionOnBuiltInTypeFailure) {
- EXPECT_NONFATAL_FAILURE({ // NOLINT
- EXPECT_PRED3(PredFunction3Int,
- n1_++,
- n2_++,
- n3_++);
- finished_ = true;
- }, "");
+ EXPECT_NONFATAL_FAILURE(
+ { // NOLINT
+ EXPECT_PRED3(PredFunction3Int, n1_++, n2_++, n3_++);
+ finished_ = true;
+ },
+ "");
}
// Tests a failed EXPECT_PRED3 where the
// predicate-formatter is a function on a user-defined type (Bool).
TEST_F(EXPECT_PRED3Test, FunctionOnUserTypeFailure) {
- EXPECT_NONFATAL_FAILURE({ // NOLINT
- EXPECT_PRED3(PredFunction3Bool,
- Bool(n1_++),
- Bool(n2_++),
- Bool(n3_++));
- finished_ = true;
- }, "");
+ EXPECT_NONFATAL_FAILURE(
+ { // NOLINT
+ EXPECT_PRED3(PredFunction3Bool, Bool(n1_++), Bool(n2_++), Bool(n3_++));
+ finished_ = true;
+ },
+ "");
}
// Tests a failed EXPECT_PRED3 where the
// predicate-formatter is a functor on a built-in type (int).
TEST_F(EXPECT_PRED3Test, FunctorOnBuiltInTypeFailure) {
- EXPECT_NONFATAL_FAILURE({ // NOLINT
- EXPECT_PRED3(PredFunctor3(),
- n1_++,
- n2_++,
- n3_++);
- finished_ = true;
- }, "");
+ EXPECT_NONFATAL_FAILURE(
+ { // NOLINT
+ EXPECT_PRED3(PredFunctor3(), n1_++, n2_++, n3_++);
+ finished_ = true;
+ },
+ "");
}
// Tests a failed EXPECT_PRED3 where the
// predicate-formatter is a functor on a user-defined type (Bool).
TEST_F(EXPECT_PRED3Test, FunctorOnUserTypeFailure) {
- EXPECT_NONFATAL_FAILURE({ // NOLINT
- EXPECT_PRED3(PredFunctor3(),
- Bool(n1_++),
- Bool(n2_++),
- Bool(n3_++));
- finished_ = true;
- }, "");
+ EXPECT_NONFATAL_FAILURE(
+ { // NOLINT
+ EXPECT_PRED3(PredFunctor3(), Bool(n1_++), Bool(n2_++), Bool(n3_++));
+ finished_ = true;
+ },
+ "");
}
// Tests a successful ASSERT_PRED3 where the
// predicate-formatter is a function on a built-in type (int).
TEST_F(ASSERT_PRED3Test, FunctionOnBuiltInTypeSuccess) {
- ASSERT_PRED3(PredFunction3Int,
- ++n1_,
- ++n2_,
- ++n3_);
+ ASSERT_PRED3(PredFunction3Int, ++n1_, ++n2_, ++n3_);
finished_ = true;
}
// Tests a successful ASSERT_PRED3 where the
// predicate-formatter is a function on a user-defined type (Bool).
TEST_F(ASSERT_PRED3Test, FunctionOnUserTypeSuccess) {
- ASSERT_PRED3(PredFunction3Bool,
- Bool(++n1_),
- Bool(++n2_),
- Bool(++n3_));
+ ASSERT_PRED3(PredFunction3Bool, Bool(++n1_), Bool(++n2_), Bool(++n3_));
finished_ = true;
}
// Tests a successful ASSERT_PRED3 where the
// predicate-formatter is a functor on a built-in type (int).
TEST_F(ASSERT_PRED3Test, FunctorOnBuiltInTypeSuccess) {
- ASSERT_PRED3(PredFunctor3(),
- ++n1_,
- ++n2_,
- ++n3_);
+ ASSERT_PRED3(PredFunctor3(), ++n1_, ++n2_, ++n3_);
finished_ = true;
}
// Tests a successful ASSERT_PRED3 where the
// predicate-formatter is a functor on a user-defined type (Bool).
TEST_F(ASSERT_PRED3Test, FunctorOnUserTypeSuccess) {
- ASSERT_PRED3(PredFunctor3(),
- Bool(++n1_),
- Bool(++n2_),
- Bool(++n3_));
+ ASSERT_PRED3(PredFunctor3(), Bool(++n1_), Bool(++n2_), Bool(++n3_));
finished_ = true;
}
@@ -1127,70 +1026,61 @@ TEST_F(ASSERT_PRED3Test, FunctorOnUserTypeSuccess) {
// predicate-formatter is a function on a built-in type (int).
TEST_F(ASSERT_PRED3Test, FunctionOnBuiltInTypeFailure) {
expected_to_finish_ = false;
- EXPECT_FATAL_FAILURE({ // NOLINT
- ASSERT_PRED3(PredFunction3Int,
- n1_++,
- n2_++,
- n3_++);
- finished_ = true;
- }, "");
+ EXPECT_FATAL_FAILURE(
+ { // NOLINT
+ ASSERT_PRED3(PredFunction3Int, n1_++, n2_++, n3_++);
+ finished_ = true;
+ },
+ "");
}
// Tests a failed ASSERT_PRED3 where the
// predicate-formatter is a function on a user-defined type (Bool).
TEST_F(ASSERT_PRED3Test, FunctionOnUserTypeFailure) {
expected_to_finish_ = false;
- EXPECT_FATAL_FAILURE({ // NOLINT
- ASSERT_PRED3(PredFunction3Bool,
- Bool(n1_++),
- Bool(n2_++),
- Bool(n3_++));
- finished_ = true;
- }, "");
+ EXPECT_FATAL_FAILURE(
+ { // NOLINT
+ ASSERT_PRED3(PredFunction3Bool, Bool(n1_++), Bool(n2_++), Bool(n3_++));
+ finished_ = true;
+ },
+ "");
}
// Tests a failed ASSERT_PRED3 where the
// predicate-formatter is a functor on a built-in type (int).
TEST_F(ASSERT_PRED3Test, FunctorOnBuiltInTypeFailure) {
expected_to_finish_ = false;
- EXPECT_FATAL_FAILURE({ // NOLINT
- ASSERT_PRED3(PredFunctor3(),
- n1_++,
- n2_++,
- n3_++);
- finished_ = true;
- }, "");
+ EXPECT_FATAL_FAILURE(
+ { // NOLINT
+ ASSERT_PRED3(PredFunctor3(), n1_++, n2_++, n3_++);
+ finished_ = true;
+ },
+ "");
}
// Tests a failed ASSERT_PRED3 where the
// predicate-formatter is a functor on a user-defined type (Bool).
TEST_F(ASSERT_PRED3Test, FunctorOnUserTypeFailure) {
expected_to_finish_ = false;
- EXPECT_FATAL_FAILURE({ // NOLINT
- ASSERT_PRED3(PredFunctor3(),
- Bool(n1_++),
- Bool(n2_++),
- Bool(n3_++));
- finished_ = true;
- }, "");
+ EXPECT_FATAL_FAILURE(
+ { // NOLINT
+ ASSERT_PRED3(PredFunctor3(), Bool(n1_++), Bool(n2_++), Bool(n3_++));
+ finished_ = true;
+ },
+ "");
}
// Tests a successful EXPECT_PRED_FORMAT3 where the
// predicate-formatter is a function on a built-in type (int).
TEST_F(EXPECT_PRED_FORMAT3Test, FunctionOnBuiltInTypeSuccess) {
- EXPECT_PRED_FORMAT3(PredFormatFunction3,
- ++n1_,
- ++n2_,
- ++n3_);
+ EXPECT_PRED_FORMAT3(PredFormatFunction3, ++n1_, ++n2_, ++n3_);
finished_ = true;
}
// Tests a successful EXPECT_PRED_FORMAT3 where the
// predicate-formatter is a function on a user-defined type (Bool).
TEST_F(EXPECT_PRED_FORMAT3Test, FunctionOnUserTypeSuccess) {
- EXPECT_PRED_FORMAT3(PredFormatFunction3,
- Bool(++n1_),
- Bool(++n2_),
+ EXPECT_PRED_FORMAT3(PredFormatFunction3, Bool(++n1_), Bool(++n2_),
Bool(++n3_));
finished_ = true;
}
@@ -1198,19 +1088,14 @@ TEST_F(EXPECT_PRED_FORMAT3Test, FunctionOnUserTypeSuccess) {
// Tests a successful EXPECT_PRED_FORMAT3 where the
// predicate-formatter is a functor on a built-in type (int).
TEST_F(EXPECT_PRED_FORMAT3Test, FunctorOnBuiltInTypeSuccess) {
- EXPECT_PRED_FORMAT3(PredFormatFunctor3(),
- ++n1_,
- ++n2_,
- ++n3_);
+ EXPECT_PRED_FORMAT3(PredFormatFunctor3(), ++n1_, ++n2_, ++n3_);
finished_ = true;
}
// Tests a successful EXPECT_PRED_FORMAT3 where the
// predicate-formatter is a functor on a user-defined type (Bool).
TEST_F(EXPECT_PRED_FORMAT3Test, FunctorOnUserTypeSuccess) {
- EXPECT_PRED_FORMAT3(PredFormatFunctor3(),
- Bool(++n1_),
- Bool(++n2_),
+ EXPECT_PRED_FORMAT3(PredFormatFunctor3(), Bool(++n1_), Bool(++n2_),
Bool(++n3_));
finished_ = true;
}
@@ -1218,67 +1103,60 @@ TEST_F(EXPECT_PRED_FORMAT3Test, FunctorOnUserTypeSuccess) {
// Tests a failed EXPECT_PRED_FORMAT3 where the
// predicate-formatter is a function on a built-in type (int).
TEST_F(EXPECT_PRED_FORMAT3Test, FunctionOnBuiltInTypeFailure) {
- EXPECT_NONFATAL_FAILURE({ // NOLINT
- EXPECT_PRED_FORMAT3(PredFormatFunction3,
- n1_++,
- n2_++,
- n3_++);
- finished_ = true;
- }, "");
+ EXPECT_NONFATAL_FAILURE(
+ { // NOLINT
+ EXPECT_PRED_FORMAT3(PredFormatFunction3, n1_++, n2_++, n3_++);
+ finished_ = true;
+ },
+ "");
}
// Tests a failed EXPECT_PRED_FORMAT3 where the
// predicate-formatter is a function on a user-defined type (Bool).
TEST_F(EXPECT_PRED_FORMAT3Test, FunctionOnUserTypeFailure) {
- EXPECT_NONFATAL_FAILURE({ // NOLINT
- EXPECT_PRED_FORMAT3(PredFormatFunction3,
- Bool(n1_++),
- Bool(n2_++),
- Bool(n3_++));
- finished_ = true;
- }, "");
+ EXPECT_NONFATAL_FAILURE(
+ { // NOLINT
+ EXPECT_PRED_FORMAT3(PredFormatFunction3, Bool(n1_++), Bool(n2_++),
+ Bool(n3_++));
+ finished_ = true;
+ },
+ "");
}
// Tests a failed EXPECT_PRED_FORMAT3 where the
// predicate-formatter is a functor on a built-in type (int).
TEST_F(EXPECT_PRED_FORMAT3Test, FunctorOnBuiltInTypeFailure) {
- EXPECT_NONFATAL_FAILURE({ // NOLINT
- EXPECT_PRED_FORMAT3(PredFormatFunctor3(),
- n1_++,
- n2_++,
- n3_++);
- finished_ = true;
- }, "");
+ EXPECT_NONFATAL_FAILURE(
+ { // NOLINT
+ EXPECT_PRED_FORMAT3(PredFormatFunctor3(), n1_++, n2_++, n3_++);
+ finished_ = true;
+ },
+ "");
}
// Tests a failed EXPECT_PRED_FORMAT3 where the
// predicate-formatter is a functor on a user-defined type (Bool).
TEST_F(EXPECT_PRED_FORMAT3Test, FunctorOnUserTypeFailure) {
- EXPECT_NONFATAL_FAILURE({ // NOLINT
- EXPECT_PRED_FORMAT3(PredFormatFunctor3(),
- Bool(n1_++),
- Bool(n2_++),
- Bool(n3_++));
- finished_ = true;
- }, "");
+ EXPECT_NONFATAL_FAILURE(
+ { // NOLINT
+ EXPECT_PRED_FORMAT3(PredFormatFunctor3(), Bool(n1_++), Bool(n2_++),
+ Bool(n3_++));
+ finished_ = true;
+ },
+ "");
}
// Tests a successful ASSERT_PRED_FORMAT3 where the
// predicate-formatter is a function on a built-in type (int).
TEST_F(ASSERT_PRED_FORMAT3Test, FunctionOnBuiltInTypeSuccess) {
- ASSERT_PRED_FORMAT3(PredFormatFunction3,
- ++n1_,
- ++n2_,
- ++n3_);
+ ASSERT_PRED_FORMAT3(PredFormatFunction3, ++n1_, ++n2_, ++n3_);
finished_ = true;
}
// Tests a successful ASSERT_PRED_FORMAT3 where the
// predicate-formatter is a function on a user-defined type (Bool).
TEST_F(ASSERT_PRED_FORMAT3Test, FunctionOnUserTypeSuccess) {
- ASSERT_PRED_FORMAT3(PredFormatFunction3,
- Bool(++n1_),
- Bool(++n2_),
+ ASSERT_PRED_FORMAT3(PredFormatFunction3, Bool(++n1_), Bool(++n2_),
Bool(++n3_));
finished_ = true;
}
@@ -1286,19 +1164,14 @@ TEST_F(ASSERT_PRED_FORMAT3Test, FunctionOnUserTypeSuccess) {
// Tests a successful ASSERT_PRED_FORMAT3 where the
// predicate-formatter is a functor on a built-in type (int).
TEST_F(ASSERT_PRED_FORMAT3Test, FunctorOnBuiltInTypeSuccess) {
- ASSERT_PRED_FORMAT3(PredFormatFunctor3(),
- ++n1_,
- ++n2_,
- ++n3_);
+ ASSERT_PRED_FORMAT3(PredFormatFunctor3(), ++n1_, ++n2_, ++n3_);
finished_ = true;
}
// Tests a successful ASSERT_PRED_FORMAT3 where the
// predicate-formatter is a functor on a user-defined type (Bool).
TEST_F(ASSERT_PRED_FORMAT3Test, FunctorOnUserTypeSuccess) {
- ASSERT_PRED_FORMAT3(PredFormatFunctor3(),
- Bool(++n1_),
- Bool(++n2_),
+ ASSERT_PRED_FORMAT3(PredFormatFunctor3(), Bool(++n1_), Bool(++n2_),
Bool(++n3_));
finished_ = true;
}
@@ -1307,52 +1180,50 @@ TEST_F(ASSERT_PRED_FORMAT3Test, FunctorOnUserTypeSuccess) {
// predicate-formatter is a function on a built-in type (int).
TEST_F(ASSERT_PRED_FORMAT3Test, FunctionOnBuiltInTypeFailure) {
expected_to_finish_ = false;
- EXPECT_FATAL_FAILURE({ // NOLINT
- ASSERT_PRED_FORMAT3(PredFormatFunction3,
- n1_++,
- n2_++,
- n3_++);
- finished_ = true;
- }, "");
+ EXPECT_FATAL_FAILURE(
+ { // NOLINT
+ ASSERT_PRED_FORMAT3(PredFormatFunction3, n1_++, n2_++, n3_++);
+ finished_ = true;
+ },
+ "");
}
// Tests a failed ASSERT_PRED_FORMAT3 where the
// predicate-formatter is a function on a user-defined type (Bool).
TEST_F(ASSERT_PRED_FORMAT3Test, FunctionOnUserTypeFailure) {
expected_to_finish_ = false;
- EXPECT_FATAL_FAILURE({ // NOLINT
- ASSERT_PRED_FORMAT3(PredFormatFunction3,
- Bool(n1_++),
- Bool(n2_++),
- Bool(n3_++));
- finished_ = true;
- }, "");
+ EXPECT_FATAL_FAILURE(
+ { // NOLINT
+ ASSERT_PRED_FORMAT3(PredFormatFunction3, Bool(n1_++), Bool(n2_++),
+ Bool(n3_++));
+ finished_ = true;
+ },
+ "");
}
// Tests a failed ASSERT_PRED_FORMAT3 where the
// predicate-formatter is a functor on a built-in type (int).
TEST_F(ASSERT_PRED_FORMAT3Test, FunctorOnBuiltInTypeFailure) {
expected_to_finish_ = false;
- EXPECT_FATAL_FAILURE({ // NOLINT
- ASSERT_PRED_FORMAT3(PredFormatFunctor3(),
- n1_++,
- n2_++,
- n3_++);
- finished_ = true;
- }, "");
+ EXPECT_FATAL_FAILURE(
+ { // NOLINT
+ ASSERT_PRED_FORMAT3(PredFormatFunctor3(), n1_++, n2_++, n3_++);
+ finished_ = true;
+ },
+ "");
}
// Tests a failed ASSERT_PRED_FORMAT3 where the
// predicate-formatter is a functor on a user-defined type (Bool).
TEST_F(ASSERT_PRED_FORMAT3Test, FunctorOnUserTypeFailure) {
expected_to_finish_ = false;
- EXPECT_FATAL_FAILURE({ // NOLINT
- ASSERT_PRED_FORMAT3(PredFormatFunctor3(),
- Bool(n1_++),
- Bool(n2_++),
- Bool(n3_++));
- finished_ = true;
- }, "");
+ EXPECT_FATAL_FAILURE(
+ { // NOLINT
+ ASSERT_PRED_FORMAT3(PredFormatFunctor3(), Bool(n1_++), Bool(n2_++),
+ Bool(n3_++));
+ finished_ = true;
+ },
+ "");
}
// Sample functions/functors for testing 4-ary predicate assertions.
@@ -1374,43 +1245,31 @@ bool PredFunction4Bool(Bool v1, Bool v2, Bool v3, Bool v4) {
// A 4-ary predicate functor.
struct PredFunctor4 {
template <typename T1, typename T2, typename T3, typename T4>
- bool operator()(const T1& v1,
- const T2& v2,
- const T3& v3,
- const T4& v4) {
+ bool operator()(const T1& v1, const T2& v2, const T3& v3, const T4& v4) {
return v1 + v2 + v3 + v4 > 0;
}
};
// A 4-ary predicate-formatter function.
template <typename T1, typename T2, typename T3, typename T4>
-testing::AssertionResult PredFormatFunction4(const char* e1,
- const char* e2,
- const char* e3,
- const char* e4,
- const T1& v1,
- const T2& v2,
- const T3& v3,
- const T4& v4) {
- if (PredFunction4(v1, v2, v3, v4))
- return testing::AssertionSuccess();
+testing::AssertionResult PredFormatFunction4(const char* e1, const char* e2,
+ const char* e3, const char* e4,
+ const T1& v1, const T2& v2,
+ const T3& v3, const T4& v4) {
+ if (PredFunction4(v1, v2, v3, v4)) return testing::AssertionSuccess();
return testing::AssertionFailure()
- << e1 << " + " << e2 << " + " << e3 << " + " << e4
- << " is expected to be positive, but evaluates to "
- << v1 + v2 + v3 + v4 << ".";
+ << e1 << " + " << e2 << " + " << e3 << " + " << e4
+ << " is expected to be positive, but evaluates to "
+ << v1 + v2 + v3 + v4 << ".";
}
// A 4-ary predicate-formatter functor.
struct PredFormatFunctor4 {
template <typename T1, typename T2, typename T3, typename T4>
- testing::AssertionResult operator()(const char* e1,
- const char* e2,
- const char* e3,
- const char* e4,
- const T1& v1,
- const T2& v2,
- const T3& v3,
+ testing::AssertionResult operator()(const char* e1, const char* e2,
+ const char* e3, const char* e4,
+ const T1& v1, const T2& v2, const T3& v3,
const T4& v4) const {
return PredFormatFunction4(e1, e2, e3, e4, v1, v2, v3, v4);
}
@@ -1429,22 +1288,18 @@ class Predicate4Test : public testing::Test {
void TearDown() override {
// Verifies that each of the predicate's arguments was evaluated
// exactly once.
- EXPECT_EQ(1, n1_) <<
- "The predicate assertion didn't evaluate argument 2 "
- "exactly once.";
- EXPECT_EQ(1, n2_) <<
- "The predicate assertion didn't evaluate argument 3 "
- "exactly once.";
- EXPECT_EQ(1, n3_) <<
- "The predicate assertion didn't evaluate argument 4 "
- "exactly once.";
- EXPECT_EQ(1, n4_) <<
- "The predicate assertion didn't evaluate argument 5 "
- "exactly once.";
+ EXPECT_EQ(1, n1_) << "The predicate assertion didn't evaluate argument 2 "
+ "exactly once.";
+ EXPECT_EQ(1, n2_) << "The predicate assertion didn't evaluate argument 3 "
+ "exactly once.";
+ EXPECT_EQ(1, n3_) << "The predicate assertion didn't evaluate argument 4 "
+ "exactly once.";
+ EXPECT_EQ(1, n4_) << "The predicate assertion didn't evaluate argument 5 "
+ "exactly once.";
// Verifies that the control flow in the test function is expected.
if (expected_to_finish_ && !finished_) {
- FAIL() << "The predicate assertion unexpactedly aborted the test.";
+ FAIL() << "The predicate assertion unexpectedly aborted the test.";
} else if (!expected_to_finish_ && finished_) {
FAIL() << "The failed predicate assertion didn't abort the test "
"as expected.";
@@ -1478,21 +1333,14 @@ typedef Predicate4Test ASSERT_PRED4Test;
// Tests a successful EXPECT_PRED4 where the
// predicate-formatter is a function on a built-in type (int).
TEST_F(EXPECT_PRED4Test, FunctionOnBuiltInTypeSuccess) {
- EXPECT_PRED4(PredFunction4Int,
- ++n1_,
- ++n2_,
- ++n3_,
- ++n4_);
+ EXPECT_PRED4(PredFunction4Int, ++n1_, ++n2_, ++n3_, ++n4_);
finished_ = true;
}
// Tests a successful EXPECT_PRED4 where the
// predicate-formatter is a function on a user-defined type (Bool).
TEST_F(EXPECT_PRED4Test, FunctionOnUserTypeSuccess) {
- EXPECT_PRED4(PredFunction4Bool,
- Bool(++n1_),
- Bool(++n2_),
- Bool(++n3_),
+ EXPECT_PRED4(PredFunction4Bool, Bool(++n1_), Bool(++n2_), Bool(++n3_),
Bool(++n4_));
finished_ = true;
}
@@ -1500,21 +1348,14 @@ TEST_F(EXPECT_PRED4Test, FunctionOnUserTypeSuccess) {
// Tests a successful EXPECT_PRED4 where the
// predicate-formatter is a functor on a built-in type (int).
TEST_F(EXPECT_PRED4Test, FunctorOnBuiltInTypeSuccess) {
- EXPECT_PRED4(PredFunctor4(),
- ++n1_,
- ++n2_,
- ++n3_,
- ++n4_);
+ EXPECT_PRED4(PredFunctor4(), ++n1_, ++n2_, ++n3_, ++n4_);
finished_ = true;
}
// Tests a successful EXPECT_PRED4 where the
// predicate-formatter is a functor on a user-defined type (Bool).
TEST_F(EXPECT_PRED4Test, FunctorOnUserTypeSuccess) {
- EXPECT_PRED4(PredFunctor4(),
- Bool(++n1_),
- Bool(++n2_),
- Bool(++n3_),
+ EXPECT_PRED4(PredFunctor4(), Bool(++n1_), Bool(++n2_), Bool(++n3_),
Bool(++n4_));
finished_ = true;
}
@@ -1522,73 +1363,60 @@ TEST_F(EXPECT_PRED4Test, FunctorOnUserTypeSuccess) {
// Tests a failed EXPECT_PRED4 where the
// predicate-formatter is a function on a built-in type (int).
TEST_F(EXPECT_PRED4Test, FunctionOnBuiltInTypeFailure) {
- EXPECT_NONFATAL_FAILURE({ // NOLINT
- EXPECT_PRED4(PredFunction4Int,
- n1_++,
- n2_++,
- n3_++,
- n4_++);
- finished_ = true;
- }, "");
+ EXPECT_NONFATAL_FAILURE(
+ { // NOLINT
+ EXPECT_PRED4(PredFunction4Int, n1_++, n2_++, n3_++, n4_++);
+ finished_ = true;
+ },
+ "");
}
// Tests a failed EXPECT_PRED4 where the
// predicate-formatter is a function on a user-defined type (Bool).
TEST_F(EXPECT_PRED4Test, FunctionOnUserTypeFailure) {
- EXPECT_NONFATAL_FAILURE({ // NOLINT
- EXPECT_PRED4(PredFunction4Bool,
- Bool(n1_++),
- Bool(n2_++),
- Bool(n3_++),
- Bool(n4_++));
- finished_ = true;
- }, "");
+ EXPECT_NONFATAL_FAILURE(
+ { // NOLINT
+ EXPECT_PRED4(PredFunction4Bool, Bool(n1_++), Bool(n2_++), Bool(n3_++),
+ Bool(n4_++));
+ finished_ = true;
+ },
+ "");
}
// Tests a failed EXPECT_PRED4 where the
// predicate-formatter is a functor on a built-in type (int).
TEST_F(EXPECT_PRED4Test, FunctorOnBuiltInTypeFailure) {
- EXPECT_NONFATAL_FAILURE({ // NOLINT
- EXPECT_PRED4(PredFunctor4(),
- n1_++,
- n2_++,
- n3_++,
- n4_++);
- finished_ = true;
- }, "");
+ EXPECT_NONFATAL_FAILURE(
+ { // NOLINT
+ EXPECT_PRED4(PredFunctor4(), n1_++, n2_++, n3_++, n4_++);
+ finished_ = true;
+ },
+ "");
}
// Tests a failed EXPECT_PRED4 where the
// predicate-formatter is a functor on a user-defined type (Bool).
TEST_F(EXPECT_PRED4Test, FunctorOnUserTypeFailure) {
- EXPECT_NONFATAL_FAILURE({ // NOLINT
- EXPECT_PRED4(PredFunctor4(),
- Bool(n1_++),
- Bool(n2_++),
- Bool(n3_++),
- Bool(n4_++));
- finished_ = true;
- }, "");
+ EXPECT_NONFATAL_FAILURE(
+ { // NOLINT
+ EXPECT_PRED4(PredFunctor4(), Bool(n1_++), Bool(n2_++), Bool(n3_++),
+ Bool(n4_++));
+ finished_ = true;
+ },
+ "");
}
// Tests a successful ASSERT_PRED4 where the
// predicate-formatter is a function on a built-in type (int).
TEST_F(ASSERT_PRED4Test, FunctionOnBuiltInTypeSuccess) {
- ASSERT_PRED4(PredFunction4Int,
- ++n1_,
- ++n2_,
- ++n3_,
- ++n4_);
+ ASSERT_PRED4(PredFunction4Int, ++n1_, ++n2_, ++n3_, ++n4_);
finished_ = true;
}
// Tests a successful ASSERT_PRED4 where the
// predicate-formatter is a function on a user-defined type (Bool).
TEST_F(ASSERT_PRED4Test, FunctionOnUserTypeSuccess) {
- ASSERT_PRED4(PredFunction4Bool,
- Bool(++n1_),
- Bool(++n2_),
- Bool(++n3_),
+ ASSERT_PRED4(PredFunction4Bool, Bool(++n1_), Bool(++n2_), Bool(++n3_),
Bool(++n4_));
finished_ = true;
}
@@ -1596,21 +1424,14 @@ TEST_F(ASSERT_PRED4Test, FunctionOnUserTypeSuccess) {
// Tests a successful ASSERT_PRED4 where the
// predicate-formatter is a functor on a built-in type (int).
TEST_F(ASSERT_PRED4Test, FunctorOnBuiltInTypeSuccess) {
- ASSERT_PRED4(PredFunctor4(),
- ++n1_,
- ++n2_,
- ++n3_,
- ++n4_);
+ ASSERT_PRED4(PredFunctor4(), ++n1_, ++n2_, ++n3_, ++n4_);
finished_ = true;
}
// Tests a successful ASSERT_PRED4 where the
// predicate-formatter is a functor on a user-defined type (Bool).
TEST_F(ASSERT_PRED4Test, FunctorOnUserTypeSuccess) {
- ASSERT_PRED4(PredFunctor4(),
- Bool(++n1_),
- Bool(++n2_),
- Bool(++n3_),
+ ASSERT_PRED4(PredFunctor4(), Bool(++n1_), Bool(++n2_), Bool(++n3_),
Bool(++n4_));
finished_ = true;
}
@@ -1619,195 +1440,155 @@ TEST_F(ASSERT_PRED4Test, FunctorOnUserTypeSuccess) {
// predicate-formatter is a function on a built-in type (int).
TEST_F(ASSERT_PRED4Test, FunctionOnBuiltInTypeFailure) {
expected_to_finish_ = false;
- EXPECT_FATAL_FAILURE({ // NOLINT
- ASSERT_PRED4(PredFunction4Int,
- n1_++,
- n2_++,
- n3_++,
- n4_++);
- finished_ = true;
- }, "");
+ EXPECT_FATAL_FAILURE(
+ { // NOLINT
+ ASSERT_PRED4(PredFunction4Int, n1_++, n2_++, n3_++, n4_++);
+ finished_ = true;
+ },
+ "");
}
// Tests a failed ASSERT_PRED4 where the
// predicate-formatter is a function on a user-defined type (Bool).
TEST_F(ASSERT_PRED4Test, FunctionOnUserTypeFailure) {
expected_to_finish_ = false;
- EXPECT_FATAL_FAILURE({ // NOLINT
- ASSERT_PRED4(PredFunction4Bool,
- Bool(n1_++),
- Bool(n2_++),
- Bool(n3_++),
- Bool(n4_++));
- finished_ = true;
- }, "");
+ EXPECT_FATAL_FAILURE(
+ { // NOLINT
+ ASSERT_PRED4(PredFunction4Bool, Bool(n1_++), Bool(n2_++), Bool(n3_++),
+ Bool(n4_++));
+ finished_ = true;
+ },
+ "");
}
// Tests a failed ASSERT_PRED4 where the
// predicate-formatter is a functor on a built-in type (int).
TEST_F(ASSERT_PRED4Test, FunctorOnBuiltInTypeFailure) {
expected_to_finish_ = false;
- EXPECT_FATAL_FAILURE({ // NOLINT
- ASSERT_PRED4(PredFunctor4(),
- n1_++,
- n2_++,
- n3_++,
- n4_++);
- finished_ = true;
- }, "");
+ EXPECT_FATAL_FAILURE(
+ { // NOLINT
+ ASSERT_PRED4(PredFunctor4(), n1_++, n2_++, n3_++, n4_++);
+ finished_ = true;
+ },
+ "");
}
// Tests a failed ASSERT_PRED4 where the
// predicate-formatter is a functor on a user-defined type (Bool).
TEST_F(ASSERT_PRED4Test, FunctorOnUserTypeFailure) {
expected_to_finish_ = false;
- EXPECT_FATAL_FAILURE({ // NOLINT
- ASSERT_PRED4(PredFunctor4(),
- Bool(n1_++),
- Bool(n2_++),
- Bool(n3_++),
- Bool(n4_++));
- finished_ = true;
- }, "");
+ EXPECT_FATAL_FAILURE(
+ { // NOLINT
+ ASSERT_PRED4(PredFunctor4(), Bool(n1_++), Bool(n2_++), Bool(n3_++),
+ Bool(n4_++));
+ finished_ = true;
+ },
+ "");
}
// Tests a successful EXPECT_PRED_FORMAT4 where the
// predicate-formatter is a function on a built-in type (int).
TEST_F(EXPECT_PRED_FORMAT4Test, FunctionOnBuiltInTypeSuccess) {
- EXPECT_PRED_FORMAT4(PredFormatFunction4,
- ++n1_,
- ++n2_,
- ++n3_,
- ++n4_);
+ EXPECT_PRED_FORMAT4(PredFormatFunction4, ++n1_, ++n2_, ++n3_, ++n4_);
finished_ = true;
}
// Tests a successful EXPECT_PRED_FORMAT4 where the
// predicate-formatter is a function on a user-defined type (Bool).
TEST_F(EXPECT_PRED_FORMAT4Test, FunctionOnUserTypeSuccess) {
- EXPECT_PRED_FORMAT4(PredFormatFunction4,
- Bool(++n1_),
- Bool(++n2_),
- Bool(++n3_),
- Bool(++n4_));
+ EXPECT_PRED_FORMAT4(PredFormatFunction4, Bool(++n1_), Bool(++n2_),
+ Bool(++n3_), Bool(++n4_));
finished_ = true;
}
// Tests a successful EXPECT_PRED_FORMAT4 where the
// predicate-formatter is a functor on a built-in type (int).
TEST_F(EXPECT_PRED_FORMAT4Test, FunctorOnBuiltInTypeSuccess) {
- EXPECT_PRED_FORMAT4(PredFormatFunctor4(),
- ++n1_,
- ++n2_,
- ++n3_,
- ++n4_);
+ EXPECT_PRED_FORMAT4(PredFormatFunctor4(), ++n1_, ++n2_, ++n3_, ++n4_);
finished_ = true;
}
// Tests a successful EXPECT_PRED_FORMAT4 where the
// predicate-formatter is a functor on a user-defined type (Bool).
TEST_F(EXPECT_PRED_FORMAT4Test, FunctorOnUserTypeSuccess) {
- EXPECT_PRED_FORMAT4(PredFormatFunctor4(),
- Bool(++n1_),
- Bool(++n2_),
- Bool(++n3_),
- Bool(++n4_));
+ EXPECT_PRED_FORMAT4(PredFormatFunctor4(), Bool(++n1_), Bool(++n2_),
+ Bool(++n3_), Bool(++n4_));
finished_ = true;
}
// Tests a failed EXPECT_PRED_FORMAT4 where the
// predicate-formatter is a function on a built-in type (int).
TEST_F(EXPECT_PRED_FORMAT4Test, FunctionOnBuiltInTypeFailure) {
- EXPECT_NONFATAL_FAILURE({ // NOLINT
- EXPECT_PRED_FORMAT4(PredFormatFunction4,
- n1_++,
- n2_++,
- n3_++,
- n4_++);
- finished_ = true;
- }, "");
+ EXPECT_NONFATAL_FAILURE(
+ { // NOLINT
+ EXPECT_PRED_FORMAT4(PredFormatFunction4, n1_++, n2_++, n3_++, n4_++);
+ finished_ = true;
+ },
+ "");
}
// Tests a failed EXPECT_PRED_FORMAT4 where the
// predicate-formatter is a function on a user-defined type (Bool).
TEST_F(EXPECT_PRED_FORMAT4Test, FunctionOnUserTypeFailure) {
- EXPECT_NONFATAL_FAILURE({ // NOLINT
- EXPECT_PRED_FORMAT4(PredFormatFunction4,
- Bool(n1_++),
- Bool(n2_++),
- Bool(n3_++),
- Bool(n4_++));
- finished_ = true;
- }, "");
+ EXPECT_NONFATAL_FAILURE(
+ { // NOLINT
+ EXPECT_PRED_FORMAT4(PredFormatFunction4, Bool(n1_++), Bool(n2_++),
+ Bool(n3_++), Bool(n4_++));
+ finished_ = true;
+ },
+ "");
}
// Tests a failed EXPECT_PRED_FORMAT4 where the
// predicate-formatter is a functor on a built-in type (int).
TEST_F(EXPECT_PRED_FORMAT4Test, FunctorOnBuiltInTypeFailure) {
- EXPECT_NONFATAL_FAILURE({ // NOLINT
- EXPECT_PRED_FORMAT4(PredFormatFunctor4(),
- n1_++,
- n2_++,
- n3_++,
- n4_++);
- finished_ = true;
- }, "");
+ EXPECT_NONFATAL_FAILURE(
+ { // NOLINT
+ EXPECT_PRED_FORMAT4(PredFormatFunctor4(), n1_++, n2_++, n3_++, n4_++);
+ finished_ = true;
+ },
+ "");
}
// Tests a failed EXPECT_PRED_FORMAT4 where the
// predicate-formatter is a functor on a user-defined type (Bool).
TEST_F(EXPECT_PRED_FORMAT4Test, FunctorOnUserTypeFailure) {
- EXPECT_NONFATAL_FAILURE({ // NOLINT
- EXPECT_PRED_FORMAT4(PredFormatFunctor4(),
- Bool(n1_++),
- Bool(n2_++),
- Bool(n3_++),
- Bool(n4_++));
- finished_ = true;
- }, "");
+ EXPECT_NONFATAL_FAILURE(
+ { // NOLINT
+ EXPECT_PRED_FORMAT4(PredFormatFunctor4(), Bool(n1_++), Bool(n2_++),
+ Bool(n3_++), Bool(n4_++));
+ finished_ = true;
+ },
+ "");
}
// Tests a successful ASSERT_PRED_FORMAT4 where the
// predicate-formatter is a function on a built-in type (int).
TEST_F(ASSERT_PRED_FORMAT4Test, FunctionOnBuiltInTypeSuccess) {
- ASSERT_PRED_FORMAT4(PredFormatFunction4,
- ++n1_,
- ++n2_,
- ++n3_,
- ++n4_);
+ ASSERT_PRED_FORMAT4(PredFormatFunction4, ++n1_, ++n2_, ++n3_, ++n4_);
finished_ = true;
}
// Tests a successful ASSERT_PRED_FORMAT4 where the
// predicate-formatter is a function on a user-defined type (Bool).
TEST_F(ASSERT_PRED_FORMAT4Test, FunctionOnUserTypeSuccess) {
- ASSERT_PRED_FORMAT4(PredFormatFunction4,
- Bool(++n1_),
- Bool(++n2_),
- Bool(++n3_),
- Bool(++n4_));
+ ASSERT_PRED_FORMAT4(PredFormatFunction4, Bool(++n1_), Bool(++n2_),
+ Bool(++n3_), Bool(++n4_));
finished_ = true;
}
// Tests a successful ASSERT_PRED_FORMAT4 where the
// predicate-formatter is a functor on a built-in type (int).
TEST_F(ASSERT_PRED_FORMAT4Test, FunctorOnBuiltInTypeSuccess) {
- ASSERT_PRED_FORMAT4(PredFormatFunctor4(),
- ++n1_,
- ++n2_,
- ++n3_,
- ++n4_);
+ ASSERT_PRED_FORMAT4(PredFormatFunctor4(), ++n1_, ++n2_, ++n3_, ++n4_);
finished_ = true;
}
// Tests a successful ASSERT_PRED_FORMAT4 where the
// predicate-formatter is a functor on a user-defined type (Bool).
TEST_F(ASSERT_PRED_FORMAT4Test, FunctorOnUserTypeSuccess) {
- ASSERT_PRED_FORMAT4(PredFormatFunctor4(),
- Bool(++n1_),
- Bool(++n2_),
- Bool(++n3_),
- Bool(++n4_));
+ ASSERT_PRED_FORMAT4(PredFormatFunctor4(), Bool(++n1_), Bool(++n2_),
+ Bool(++n3_), Bool(++n4_));
finished_ = true;
}
@@ -1815,56 +1596,50 @@ TEST_F(ASSERT_PRED_FORMAT4Test, FunctorOnUserTypeSuccess) {
// predicate-formatter is a function on a built-in type (int).
TEST_F(ASSERT_PRED_FORMAT4Test, FunctionOnBuiltInTypeFailure) {
expected_to_finish_ = false;
- EXPECT_FATAL_FAILURE({ // NOLINT
- ASSERT_PRED_FORMAT4(PredFormatFunction4,
- n1_++,
- n2_++,
- n3_++,
- n4_++);
- finished_ = true;
- }, "");
+ EXPECT_FATAL_FAILURE(
+ { // NOLINT
+ ASSERT_PRED_FORMAT4(PredFormatFunction4, n1_++, n2_++, n3_++, n4_++);
+ finished_ = true;
+ },
+ "");
}
// Tests a failed ASSERT_PRED_FORMAT4 where the
// predicate-formatter is a function on a user-defined type (Bool).
TEST_F(ASSERT_PRED_FORMAT4Test, FunctionOnUserTypeFailure) {
expected_to_finish_ = false;
- EXPECT_FATAL_FAILURE({ // NOLINT
- ASSERT_PRED_FORMAT4(PredFormatFunction4,
- Bool(n1_++),
- Bool(n2_++),
- Bool(n3_++),
- Bool(n4_++));
- finished_ = true;
- }, "");
+ EXPECT_FATAL_FAILURE(
+ { // NOLINT
+ ASSERT_PRED_FORMAT4(PredFormatFunction4, Bool(n1_++), Bool(n2_++),
+ Bool(n3_++), Bool(n4_++));
+ finished_ = true;
+ },
+ "");
}
// Tests a failed ASSERT_PRED_FORMAT4 where the
// predicate-formatter is a functor on a built-in type (int).
TEST_F(ASSERT_PRED_FORMAT4Test, FunctorOnBuiltInTypeFailure) {
expected_to_finish_ = false;
- EXPECT_FATAL_FAILURE({ // NOLINT
- ASSERT_PRED_FORMAT4(PredFormatFunctor4(),
- n1_++,
- n2_++,
- n3_++,
- n4_++);
- finished_ = true;
- }, "");
+ EXPECT_FATAL_FAILURE(
+ { // NOLINT
+ ASSERT_PRED_FORMAT4(PredFormatFunctor4(), n1_++, n2_++, n3_++, n4_++);
+ finished_ = true;
+ },
+ "");
}
// Tests a failed ASSERT_PRED_FORMAT4 where the
// predicate-formatter is a functor on a user-defined type (Bool).
TEST_F(ASSERT_PRED_FORMAT4Test, FunctorOnUserTypeFailure) {
expected_to_finish_ = false;
- EXPECT_FATAL_FAILURE({ // NOLINT
- ASSERT_PRED_FORMAT4(PredFormatFunctor4(),
- Bool(n1_++),
- Bool(n2_++),
- Bool(n3_++),
- Bool(n4_++));
- finished_ = true;
- }, "");
+ EXPECT_FATAL_FAILURE(
+ { // NOLINT
+ ASSERT_PRED_FORMAT4(PredFormatFunctor4(), Bool(n1_++), Bool(n2_++),
+ Bool(n3_++), Bool(n4_++));
+ finished_ = true;
+ },
+ "");
}
// Sample functions/functors for testing 5-ary predicate assertions.
@@ -1886,10 +1661,7 @@ bool PredFunction5Bool(Bool v1, Bool v2, Bool v3, Bool v4, Bool v5) {
// A 5-ary predicate functor.
struct PredFunctor5 {
template <typename T1, typename T2, typename T3, typename T4, typename T5>
- bool operator()(const T1& v1,
- const T2& v2,
- const T3& v3,
- const T4& v4,
+ bool operator()(const T1& v1, const T2& v2, const T3& v3, const T4& v4,
const T5& v5) {
return v1 + v2 + v3 + v4 + v5 > 0;
}
@@ -1897,37 +1669,26 @@ struct PredFunctor5 {
// A 5-ary predicate-formatter function.
template <typename T1, typename T2, typename T3, typename T4, typename T5>
-testing::AssertionResult PredFormatFunction5(const char* e1,
- const char* e2,
- const char* e3,
- const char* e4,
- const char* e5,
- const T1& v1,
- const T2& v2,
- const T3& v3,
- const T4& v4,
- const T5& v5) {
- if (PredFunction5(v1, v2, v3, v4, v5))
- return testing::AssertionSuccess();
+testing::AssertionResult PredFormatFunction5(const char* e1, const char* e2,
+ const char* e3, const char* e4,
+ const char* e5, const T1& v1,
+ const T2& v2, const T3& v3,
+ const T4& v4, const T5& v5) {
+ if (PredFunction5(v1, v2, v3, v4, v5)) return testing::AssertionSuccess();
return testing::AssertionFailure()
- << e1 << " + " << e2 << " + " << e3 << " + " << e4 << " + " << e5
- << " is expected to be positive, but evaluates to "
- << v1 + v2 + v3 + v4 + v5 << ".";
+ << e1 << " + " << e2 << " + " << e3 << " + " << e4 << " + " << e5
+ << " is expected to be positive, but evaluates to "
+ << v1 + v2 + v3 + v4 + v5 << ".";
}
// A 5-ary predicate-formatter functor.
struct PredFormatFunctor5 {
template <typename T1, typename T2, typename T3, typename T4, typename T5>
- testing::AssertionResult operator()(const char* e1,
- const char* e2,
- const char* e3,
- const char* e4,
- const char* e5,
- const T1& v1,
- const T2& v2,
- const T3& v3,
- const T4& v4,
+ testing::AssertionResult operator()(const char* e1, const char* e2,
+ const char* e3, const char* e4,
+ const char* e5, const T1& v1,
+ const T2& v2, const T3& v3, const T4& v4,
const T5& v5) const {
return PredFormatFunction5(e1, e2, e3, e4, e5, v1, v2, v3, v4, v5);
}
@@ -1946,25 +1707,20 @@ class Predicate5Test : public testing::Test {
void TearDown() override {
// Verifies that each of the predicate's arguments was evaluated
// exactly once.
- EXPECT_EQ(1, n1_) <<
- "The predicate assertion didn't evaluate argument 2 "
- "exactly once.";
- EXPECT_EQ(1, n2_) <<
- "The predicate assertion didn't evaluate argument 3 "
- "exactly once.";
- EXPECT_EQ(1, n3_) <<
- "The predicate assertion didn't evaluate argument 4 "
- "exactly once.";
- EXPECT_EQ(1, n4_) <<
- "The predicate assertion didn't evaluate argument 5 "
- "exactly once.";
- EXPECT_EQ(1, n5_) <<
- "The predicate assertion didn't evaluate argument 6 "
- "exactly once.";
+ EXPECT_EQ(1, n1_) << "The predicate assertion didn't evaluate argument 2 "
+ "exactly once.";
+ EXPECT_EQ(1, n2_) << "The predicate assertion didn't evaluate argument 3 "
+ "exactly once.";
+ EXPECT_EQ(1, n3_) << "The predicate assertion didn't evaluate argument 4 "
+ "exactly once.";
+ EXPECT_EQ(1, n4_) << "The predicate assertion didn't evaluate argument 5 "
+ "exactly once.";
+ EXPECT_EQ(1, n5_) << "The predicate assertion didn't evaluate argument 6 "
+ "exactly once.";
// Verifies that the control flow in the test function is expected.
if (expected_to_finish_ && !finished_) {
- FAIL() << "The predicate assertion unexpactedly aborted the test.";
+ FAIL() << "The predicate assertion unexpectedly aborted the test.";
} else if (!expected_to_finish_ && finished_) {
FAIL() << "The failed predicate assertion didn't abort the test "
"as expected.";
@@ -2000,152 +1756,106 @@ typedef Predicate5Test ASSERT_PRED5Test;
// Tests a successful EXPECT_PRED5 where the
// predicate-formatter is a function on a built-in type (int).
TEST_F(EXPECT_PRED5Test, FunctionOnBuiltInTypeSuccess) {
- EXPECT_PRED5(PredFunction5Int,
- ++n1_,
- ++n2_,
- ++n3_,
- ++n4_,
- ++n5_);
+ EXPECT_PRED5(PredFunction5Int, ++n1_, ++n2_, ++n3_, ++n4_, ++n5_);
finished_ = true;
}
// Tests a successful EXPECT_PRED5 where the
// predicate-formatter is a function on a user-defined type (Bool).
TEST_F(EXPECT_PRED5Test, FunctionOnUserTypeSuccess) {
- EXPECT_PRED5(PredFunction5Bool,
- Bool(++n1_),
- Bool(++n2_),
- Bool(++n3_),
- Bool(++n4_),
- Bool(++n5_));
+ EXPECT_PRED5(PredFunction5Bool, Bool(++n1_), Bool(++n2_), Bool(++n3_),
+ Bool(++n4_), Bool(++n5_));
finished_ = true;
}
// Tests a successful EXPECT_PRED5 where the
// predicate-formatter is a functor on a built-in type (int).
TEST_F(EXPECT_PRED5Test, FunctorOnBuiltInTypeSuccess) {
- EXPECT_PRED5(PredFunctor5(),
- ++n1_,
- ++n2_,
- ++n3_,
- ++n4_,
- ++n5_);
+ EXPECT_PRED5(PredFunctor5(), ++n1_, ++n2_, ++n3_, ++n4_, ++n5_);
finished_ = true;
}
// Tests a successful EXPECT_PRED5 where the
// predicate-formatter is a functor on a user-defined type (Bool).
TEST_F(EXPECT_PRED5Test, FunctorOnUserTypeSuccess) {
- EXPECT_PRED5(PredFunctor5(),
- Bool(++n1_),
- Bool(++n2_),
- Bool(++n3_),
- Bool(++n4_),
- Bool(++n5_));
+ EXPECT_PRED5(PredFunctor5(), Bool(++n1_), Bool(++n2_), Bool(++n3_),
+ Bool(++n4_), Bool(++n5_));
finished_ = true;
}
// Tests a failed EXPECT_PRED5 where the
// predicate-formatter is a function on a built-in type (int).
TEST_F(EXPECT_PRED5Test, FunctionOnBuiltInTypeFailure) {
- EXPECT_NONFATAL_FAILURE({ // NOLINT
- EXPECT_PRED5(PredFunction5Int,
- n1_++,
- n2_++,
- n3_++,
- n4_++,
- n5_++);
- finished_ = true;
- }, "");
+ EXPECT_NONFATAL_FAILURE(
+ { // NOLINT
+ EXPECT_PRED5(PredFunction5Int, n1_++, n2_++, n3_++, n4_++, n5_++);
+ finished_ = true;
+ },
+ "");
}
// Tests a failed EXPECT_PRED5 where the
// predicate-formatter is a function on a user-defined type (Bool).
TEST_F(EXPECT_PRED5Test, FunctionOnUserTypeFailure) {
- EXPECT_NONFATAL_FAILURE({ // NOLINT
- EXPECT_PRED5(PredFunction5Bool,
- Bool(n1_++),
- Bool(n2_++),
- Bool(n3_++),
- Bool(n4_++),
- Bool(n5_++));
- finished_ = true;
- }, "");
+ EXPECT_NONFATAL_FAILURE(
+ { // NOLINT
+ EXPECT_PRED5(PredFunction5Bool, Bool(n1_++), Bool(n2_++), Bool(n3_++),
+ Bool(n4_++), Bool(n5_++));
+ finished_ = true;
+ },
+ "");
}
// Tests a failed EXPECT_PRED5 where the
// predicate-formatter is a functor on a built-in type (int).
TEST_F(EXPECT_PRED5Test, FunctorOnBuiltInTypeFailure) {
- EXPECT_NONFATAL_FAILURE({ // NOLINT
- EXPECT_PRED5(PredFunctor5(),
- n1_++,
- n2_++,
- n3_++,
- n4_++,
- n5_++);
- finished_ = true;
- }, "");
+ EXPECT_NONFATAL_FAILURE(
+ { // NOLINT
+ EXPECT_PRED5(PredFunctor5(), n1_++, n2_++, n3_++, n4_++, n5_++);
+ finished_ = true;
+ },
+ "");
}
// Tests a failed EXPECT_PRED5 where the
// predicate-formatter is a functor on a user-defined type (Bool).
TEST_F(EXPECT_PRED5Test, FunctorOnUserTypeFailure) {
- EXPECT_NONFATAL_FAILURE({ // NOLINT
- EXPECT_PRED5(PredFunctor5(),
- Bool(n1_++),
- Bool(n2_++),
- Bool(n3_++),
- Bool(n4_++),
- Bool(n5_++));
- finished_ = true;
- }, "");
+ EXPECT_NONFATAL_FAILURE(
+ { // NOLINT
+ EXPECT_PRED5(PredFunctor5(), Bool(n1_++), Bool(n2_++), Bool(n3_++),
+ Bool(n4_++), Bool(n5_++));
+ finished_ = true;
+ },
+ "");
}
// Tests a successful ASSERT_PRED5 where the
// predicate-formatter is a function on a built-in type (int).
TEST_F(ASSERT_PRED5Test, FunctionOnBuiltInTypeSuccess) {
- ASSERT_PRED5(PredFunction5Int,
- ++n1_,
- ++n2_,
- ++n3_,
- ++n4_,
- ++n5_);
+ ASSERT_PRED5(PredFunction5Int, ++n1_, ++n2_, ++n3_, ++n4_, ++n5_);
finished_ = true;
}
// Tests a successful ASSERT_PRED5 where the
// predicate-formatter is a function on a user-defined type (Bool).
TEST_F(ASSERT_PRED5Test, FunctionOnUserTypeSuccess) {
- ASSERT_PRED5(PredFunction5Bool,
- Bool(++n1_),
- Bool(++n2_),
- Bool(++n3_),
- Bool(++n4_),
- Bool(++n5_));
+ ASSERT_PRED5(PredFunction5Bool, Bool(++n1_), Bool(++n2_), Bool(++n3_),
+ Bool(++n4_), Bool(++n5_));
finished_ = true;
}
// Tests a successful ASSERT_PRED5 where the
// predicate-formatter is a functor on a built-in type (int).
TEST_F(ASSERT_PRED5Test, FunctorOnBuiltInTypeSuccess) {
- ASSERT_PRED5(PredFunctor5(),
- ++n1_,
- ++n2_,
- ++n3_,
- ++n4_,
- ++n5_);
+ ASSERT_PRED5(PredFunctor5(), ++n1_, ++n2_, ++n3_, ++n4_, ++n5_);
finished_ = true;
}
// Tests a successful ASSERT_PRED5 where the
// predicate-formatter is a functor on a user-defined type (Bool).
TEST_F(ASSERT_PRED5Test, FunctorOnUserTypeSuccess) {
- ASSERT_PRED5(PredFunctor5(),
- Bool(++n1_),
- Bool(++n2_),
- Bool(++n3_),
- Bool(++n4_),
- Bool(++n5_));
+ ASSERT_PRED5(PredFunctor5(), Bool(++n1_), Bool(++n2_), Bool(++n3_),
+ Bool(++n4_), Bool(++n5_));
finished_ = true;
}
@@ -2153,211 +1863,157 @@ TEST_F(ASSERT_PRED5Test, FunctorOnUserTypeSuccess) {
// predicate-formatter is a function on a built-in type (int).
TEST_F(ASSERT_PRED5Test, FunctionOnBuiltInTypeFailure) {
expected_to_finish_ = false;
- EXPECT_FATAL_FAILURE({ // NOLINT
- ASSERT_PRED5(PredFunction5Int,
- n1_++,
- n2_++,
- n3_++,
- n4_++,
- n5_++);
- finished_ = true;
- }, "");
+ EXPECT_FATAL_FAILURE(
+ { // NOLINT
+ ASSERT_PRED5(PredFunction5Int, n1_++, n2_++, n3_++, n4_++, n5_++);
+ finished_ = true;
+ },
+ "");
}
// Tests a failed ASSERT_PRED5 where the
// predicate-formatter is a function on a user-defined type (Bool).
TEST_F(ASSERT_PRED5Test, FunctionOnUserTypeFailure) {
expected_to_finish_ = false;
- EXPECT_FATAL_FAILURE({ // NOLINT
- ASSERT_PRED5(PredFunction5Bool,
- Bool(n1_++),
- Bool(n2_++),
- Bool(n3_++),
- Bool(n4_++),
- Bool(n5_++));
- finished_ = true;
- }, "");
+ EXPECT_FATAL_FAILURE(
+ { // NOLINT
+ ASSERT_PRED5(PredFunction5Bool, Bool(n1_++), Bool(n2_++), Bool(n3_++),
+ Bool(n4_++), Bool(n5_++));
+ finished_ = true;
+ },
+ "");
}
// Tests a failed ASSERT_PRED5 where the
// predicate-formatter is a functor on a built-in type (int).
TEST_F(ASSERT_PRED5Test, FunctorOnBuiltInTypeFailure) {
expected_to_finish_ = false;
- EXPECT_FATAL_FAILURE({ // NOLINT
- ASSERT_PRED5(PredFunctor5(),
- n1_++,
- n2_++,
- n3_++,
- n4_++,
- n5_++);
- finished_ = true;
- }, "");
+ EXPECT_FATAL_FAILURE(
+ { // NOLINT
+ ASSERT_PRED5(PredFunctor5(), n1_++, n2_++, n3_++, n4_++, n5_++);
+ finished_ = true;
+ },
+ "");
}
// Tests a failed ASSERT_PRED5 where the
// predicate-formatter is a functor on a user-defined type (Bool).
TEST_F(ASSERT_PRED5Test, FunctorOnUserTypeFailure) {
expected_to_finish_ = false;
- EXPECT_FATAL_FAILURE({ // NOLINT
- ASSERT_PRED5(PredFunctor5(),
- Bool(n1_++),
- Bool(n2_++),
- Bool(n3_++),
- Bool(n4_++),
- Bool(n5_++));
- finished_ = true;
- }, "");
+ EXPECT_FATAL_FAILURE(
+ { // NOLINT
+ ASSERT_PRED5(PredFunctor5(), Bool(n1_++), Bool(n2_++), Bool(n3_++),
+ Bool(n4_++), Bool(n5_++));
+ finished_ = true;
+ },
+ "");
}
// Tests a successful EXPECT_PRED_FORMAT5 where the
// predicate-formatter is a function on a built-in type (int).
TEST_F(EXPECT_PRED_FORMAT5Test, FunctionOnBuiltInTypeSuccess) {
- EXPECT_PRED_FORMAT5(PredFormatFunction5,
- ++n1_,
- ++n2_,
- ++n3_,
- ++n4_,
- ++n5_);
+ EXPECT_PRED_FORMAT5(PredFormatFunction5, ++n1_, ++n2_, ++n3_, ++n4_, ++n5_);
finished_ = true;
}
// Tests a successful EXPECT_PRED_FORMAT5 where the
// predicate-formatter is a function on a user-defined type (Bool).
TEST_F(EXPECT_PRED_FORMAT5Test, FunctionOnUserTypeSuccess) {
- EXPECT_PRED_FORMAT5(PredFormatFunction5,
- Bool(++n1_),
- Bool(++n2_),
- Bool(++n3_),
- Bool(++n4_),
- Bool(++n5_));
+ EXPECT_PRED_FORMAT5(PredFormatFunction5, Bool(++n1_), Bool(++n2_),
+ Bool(++n3_), Bool(++n4_), Bool(++n5_));
finished_ = true;
}
// Tests a successful EXPECT_PRED_FORMAT5 where the
// predicate-formatter is a functor on a built-in type (int).
TEST_F(EXPECT_PRED_FORMAT5Test, FunctorOnBuiltInTypeSuccess) {
- EXPECT_PRED_FORMAT5(PredFormatFunctor5(),
- ++n1_,
- ++n2_,
- ++n3_,
- ++n4_,
- ++n5_);
+ EXPECT_PRED_FORMAT5(PredFormatFunctor5(), ++n1_, ++n2_, ++n3_, ++n4_, ++n5_);
finished_ = true;
}
// Tests a successful EXPECT_PRED_FORMAT5 where the
// predicate-formatter is a functor on a user-defined type (Bool).
TEST_F(EXPECT_PRED_FORMAT5Test, FunctorOnUserTypeSuccess) {
- EXPECT_PRED_FORMAT5(PredFormatFunctor5(),
- Bool(++n1_),
- Bool(++n2_),
- Bool(++n3_),
- Bool(++n4_),
- Bool(++n5_));
+ EXPECT_PRED_FORMAT5(PredFormatFunctor5(), Bool(++n1_), Bool(++n2_),
+ Bool(++n3_), Bool(++n4_), Bool(++n5_));
finished_ = true;
}
// Tests a failed EXPECT_PRED_FORMAT5 where the
// predicate-formatter is a function on a built-in type (int).
TEST_F(EXPECT_PRED_FORMAT5Test, FunctionOnBuiltInTypeFailure) {
- EXPECT_NONFATAL_FAILURE({ // NOLINT
- EXPECT_PRED_FORMAT5(PredFormatFunction5,
- n1_++,
- n2_++,
- n3_++,
- n4_++,
- n5_++);
- finished_ = true;
- }, "");
+ EXPECT_NONFATAL_FAILURE(
+ { // NOLINT
+ EXPECT_PRED_FORMAT5(PredFormatFunction5, n1_++, n2_++, n3_++, n4_++,
+ n5_++);
+ finished_ = true;
+ },
+ "");
}
// Tests a failed EXPECT_PRED_FORMAT5 where the
// predicate-formatter is a function on a user-defined type (Bool).
TEST_F(EXPECT_PRED_FORMAT5Test, FunctionOnUserTypeFailure) {
- EXPECT_NONFATAL_FAILURE({ // NOLINT
- EXPECT_PRED_FORMAT5(PredFormatFunction5,
- Bool(n1_++),
- Bool(n2_++),
- Bool(n3_++),
- Bool(n4_++),
- Bool(n5_++));
- finished_ = true;
- }, "");
+ EXPECT_NONFATAL_FAILURE(
+ { // NOLINT
+ EXPECT_PRED_FORMAT5(PredFormatFunction5, Bool(n1_++), Bool(n2_++),
+ Bool(n3_++), Bool(n4_++), Bool(n5_++));
+ finished_ = true;
+ },
+ "");
}
// Tests a failed EXPECT_PRED_FORMAT5 where the
// predicate-formatter is a functor on a built-in type (int).
TEST_F(EXPECT_PRED_FORMAT5Test, FunctorOnBuiltInTypeFailure) {
- EXPECT_NONFATAL_FAILURE({ // NOLINT
- EXPECT_PRED_FORMAT5(PredFormatFunctor5(),
- n1_++,
- n2_++,
- n3_++,
- n4_++,
- n5_++);
- finished_ = true;
- }, "");
+ EXPECT_NONFATAL_FAILURE(
+ { // NOLINT
+ EXPECT_PRED_FORMAT5(PredFormatFunctor5(), n1_++, n2_++, n3_++, n4_++,
+ n5_++);
+ finished_ = true;
+ },
+ "");
}
// Tests a failed EXPECT_PRED_FORMAT5 where the
// predicate-formatter is a functor on a user-defined type (Bool).
TEST_F(EXPECT_PRED_FORMAT5Test, FunctorOnUserTypeFailure) {
- EXPECT_NONFATAL_FAILURE({ // NOLINT
- EXPECT_PRED_FORMAT5(PredFormatFunctor5(),
- Bool(n1_++),
- Bool(n2_++),
- Bool(n3_++),
- Bool(n4_++),
- Bool(n5_++));
- finished_ = true;
- }, "");
+ EXPECT_NONFATAL_FAILURE(
+ { // NOLINT
+ EXPECT_PRED_FORMAT5(PredFormatFunctor5(), Bool(n1_++), Bool(n2_++),
+ Bool(n3_++), Bool(n4_++), Bool(n5_++));
+ finished_ = true;
+ },
+ "");
}
// Tests a successful ASSERT_PRED_FORMAT5 where the
// predicate-formatter is a function on a built-in type (int).
TEST_F(ASSERT_PRED_FORMAT5Test, FunctionOnBuiltInTypeSuccess) {
- ASSERT_PRED_FORMAT5(PredFormatFunction5,
- ++n1_,
- ++n2_,
- ++n3_,
- ++n4_,
- ++n5_);
+ ASSERT_PRED_FORMAT5(PredFormatFunction5, ++n1_, ++n2_, ++n3_, ++n4_, ++n5_);
finished_ = true;
}
// Tests a successful ASSERT_PRED_FORMAT5 where the
// predicate-formatter is a function on a user-defined type (Bool).
TEST_F(ASSERT_PRED_FORMAT5Test, FunctionOnUserTypeSuccess) {
- ASSERT_PRED_FORMAT5(PredFormatFunction5,
- Bool(++n1_),
- Bool(++n2_),
- Bool(++n3_),
- Bool(++n4_),
- Bool(++n5_));
+ ASSERT_PRED_FORMAT5(PredFormatFunction5, Bool(++n1_), Bool(++n2_),
+ Bool(++n3_), Bool(++n4_), Bool(++n5_));
finished_ = true;
}
// Tests a successful ASSERT_PRED_FORMAT5 where the
// predicate-formatter is a functor on a built-in type (int).
TEST_F(ASSERT_PRED_FORMAT5Test, FunctorOnBuiltInTypeSuccess) {
- ASSERT_PRED_FORMAT5(PredFormatFunctor5(),
- ++n1_,
- ++n2_,
- ++n3_,
- ++n4_,
- ++n5_);
+ ASSERT_PRED_FORMAT5(PredFormatFunctor5(), ++n1_, ++n2_, ++n3_, ++n4_, ++n5_);
finished_ = true;
}
// Tests a successful ASSERT_PRED_FORMAT5 where the
// predicate-formatter is a functor on a user-defined type (Bool).
TEST_F(ASSERT_PRED_FORMAT5Test, FunctorOnUserTypeSuccess) {
- ASSERT_PRED_FORMAT5(PredFormatFunctor5(),
- Bool(++n1_),
- Bool(++n2_),
- Bool(++n3_),
- Bool(++n4_),
- Bool(++n5_));
+ ASSERT_PRED_FORMAT5(PredFormatFunctor5(), Bool(++n1_), Bool(++n2_),
+ Bool(++n3_), Bool(++n4_), Bool(++n5_));
finished_ = true;
}
@@ -2365,58 +2021,50 @@ TEST_F(ASSERT_PRED_FORMAT5Test, FunctorOnUserTypeSuccess) {
// predicate-formatter is a function on a built-in type (int).
TEST_F(ASSERT_PRED_FORMAT5Test, FunctionOnBuiltInTypeFailure) {
expected_to_finish_ = false;
- EXPECT_FATAL_FAILURE({ // NOLINT
- ASSERT_PRED_FORMAT5(PredFormatFunction5,
- n1_++,
- n2_++,
- n3_++,
- n4_++,
- n5_++);
- finished_ = true;
- }, "");
+ EXPECT_FATAL_FAILURE(
+ { // NOLINT
+ ASSERT_PRED_FORMAT5(PredFormatFunction5, n1_++, n2_++, n3_++, n4_++,
+ n5_++);
+ finished_ = true;
+ },
+ "");
}
// Tests a failed ASSERT_PRED_FORMAT5 where the
// predicate-formatter is a function on a user-defined type (Bool).
TEST_F(ASSERT_PRED_FORMAT5Test, FunctionOnUserTypeFailure) {
expected_to_finish_ = false;
- EXPECT_FATAL_FAILURE({ // NOLINT
- ASSERT_PRED_FORMAT5(PredFormatFunction5,
- Bool(n1_++),
- Bool(n2_++),
- Bool(n3_++),
- Bool(n4_++),
- Bool(n5_++));
- finished_ = true;
- }, "");
+ EXPECT_FATAL_FAILURE(
+ { // NOLINT
+ ASSERT_PRED_FORMAT5(PredFormatFunction5, Bool(n1_++), Bool(n2_++),
+ Bool(n3_++), Bool(n4_++), Bool(n5_++));
+ finished_ = true;
+ },
+ "");
}
// Tests a failed ASSERT_PRED_FORMAT5 where the
// predicate-formatter is a functor on a built-in type (int).
TEST_F(ASSERT_PRED_FORMAT5Test, FunctorOnBuiltInTypeFailure) {
expected_to_finish_ = false;
- EXPECT_FATAL_FAILURE({ // NOLINT
- ASSERT_PRED_FORMAT5(PredFormatFunctor5(),
- n1_++,
- n2_++,
- n3_++,
- n4_++,
- n5_++);
- finished_ = true;
- }, "");
+ EXPECT_FATAL_FAILURE(
+ { // NOLINT
+ ASSERT_PRED_FORMAT5(PredFormatFunctor5(), n1_++, n2_++, n3_++, n4_++,
+ n5_++);
+ finished_ = true;
+ },
+ "");
}
// Tests a failed ASSERT_PRED_FORMAT5 where the
// predicate-formatter is a functor on a user-defined type (Bool).
TEST_F(ASSERT_PRED_FORMAT5Test, FunctorOnUserTypeFailure) {
expected_to_finish_ = false;
- EXPECT_FATAL_FAILURE({ // NOLINT
- ASSERT_PRED_FORMAT5(PredFormatFunctor5(),
- Bool(n1_++),
- Bool(n2_++),
- Bool(n3_++),
- Bool(n4_++),
- Bool(n5_++));
- finished_ = true;
- }, "");
+ EXPECT_FATAL_FAILURE(
+ { // NOLINT
+ ASSERT_PRED_FORMAT5(PredFormatFunctor5(), Bool(n1_++), Bool(n2_++),
+ Bool(n3_++), Bool(n4_++), Bool(n5_++));
+ finished_ = true;
+ },
+ "");
}
diff --git a/googletest/test/gtest_premature_exit_test.cc b/googletest/test/gtest_premature_exit_test.cc
index 1d1187ef..1a0c5ea4 100644
--- a/googletest/test/gtest_premature_exit_test.cc
+++ b/googletest/test/gtest_premature_exit_test.cc
@@ -81,15 +81,17 @@ TEST_F(PrematureExitDeathTest, FileExistsDuringExecutionOfDeathTest) {
return;
}
- EXPECT_DEATH_IF_SUPPORTED({
- // If the file exists, crash the process such that the main test
- // process will catch the (expected) crash and report a success;
- // otherwise don't crash, which will cause the main test process
- // to report that the death test has failed.
- if (PrematureExitFileExists()) {
- exit(1);
- }
- }, "");
+ EXPECT_DEATH_IF_SUPPORTED(
+ {
+ // If the file exists, crash the process such that the main test
+ // process will catch the (expected) crash and report a success;
+ // otherwise don't crash, which will cause the main test process
+ // to report that the death test has failed.
+ if (PrematureExitFileExists()) {
+ exit(1);
+ }
+ },
+ "");
}
// Tests that the premature-exit file exists during the execution of a
@@ -106,7 +108,7 @@ TEST_F(PrematureExitTest, PrematureExitFileExistsDuringTestExecution) {
} // namespace
-int main(int argc, char **argv) {
+int main(int argc, char** argv) {
InitGoogleTest(&argc, argv);
const int exit_code = RUN_ALL_TESTS();
diff --git a/googletest/test/gtest_repeat_test.cc b/googletest/test/gtest_repeat_test.cc
index 7da4a15e..f67b7886 100644
--- a/googletest/test/gtest_repeat_test.cc
+++ b/googletest/test/gtest_repeat_test.cc
@@ -27,44 +27,32 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
// Tests the --gtest_repeat=number flag.
#include <stdlib.h>
+
#include <iostream>
+
#include "gtest/gtest.h"
#include "src/gtest-internal-inl.h"
-namespace testing {
-
-GTEST_DECLARE_string_(death_test_style);
-GTEST_DECLARE_string_(filter);
-GTEST_DECLARE_int32_(repeat);
-
-} // namespace testing
-
-using testing::GTEST_FLAG(death_test_style);
-using testing::GTEST_FLAG(filter);
-using testing::GTEST_FLAG(repeat);
-
namespace {
// We need this when we are testing Google Test itself and therefore
// cannot use Google Test assertions.
-#define GTEST_CHECK_INT_EQ_(expected, actual) \
- do {\
- const int expected_val = (expected);\
- const int actual_val = (actual);\
- if (::testing::internal::IsTrue(expected_val != actual_val)) {\
- ::std::cout << "Value of: " #actual "\n"\
- << " Actual: " << actual_val << "\n"\
- << "Expected: " #expected "\n"\
- << "Which is: " << expected_val << "\n";\
- ::testing::internal::posix::Abort();\
- }\
+#define GTEST_CHECK_INT_EQ_(expected, actual) \
+ do { \
+ const int expected_val = (expected); \
+ const int actual_val = (actual); \
+ if (::testing::internal::IsTrue(expected_val != actual_val)) { \
+ ::std::cout << "Value of: " #actual "\n" \
+ << " Actual: " << actual_val << "\n" \
+ << "Expected: " #expected "\n" \
+ << "Which is: " << expected_val << "\n"; \
+ ::testing::internal::posix::Abort(); \
+ } \
} while (::testing::internal::AlwaysFalse())
-
// Used for verifying that global environment set-up and tear-down are
// inside the --gtest_repeat loop.
@@ -73,7 +61,7 @@ int g_environment_tear_down_count = 0;
class MyEnvironment : public testing::Environment {
public:
- MyEnvironment() {}
+ MyEnvironment() = default;
void SetUp() override { g_environment_set_up_count++; }
void TearDown() override { g_environment_tear_down_count++; }
};
@@ -91,9 +79,7 @@ TEST(FooTest, ShouldFail) {
int g_should_pass_count = 0;
-TEST(FooTest, ShouldPass) {
- g_should_pass_count++;
-}
+TEST(FooTest, ShouldPass) { g_should_pass_count++; }
// A test that contains a thread-safe death test and a fast death
// test. It should pass.
@@ -103,10 +89,10 @@ int g_death_test_count = 0;
TEST(BarDeathTest, ThreadSafeAndFast) {
g_death_test_count++;
- GTEST_FLAG(death_test_style) = "threadsafe";
+ GTEST_FLAG_SET(death_test_style, "threadsafe");
EXPECT_DEATH_IF_SUPPORTED(::testing::internal::posix::Abort(), "");
- GTEST_FLAG(death_test_style) = "fast";
+ GTEST_FLAG_SET(death_test_style, "fast");
EXPECT_DEATH_IF_SUPPORTED(::testing::internal::posix::Abort(), "");
}
@@ -120,8 +106,7 @@ TEST_P(MyParamTest, ShouldPass) {
GTEST_CHECK_INT_EQ_(g_param_test_count % kNumberOfParamTests, GetParam());
g_param_test_count++;
}
-INSTANTIATE_TEST_SUITE_P(MyParamSequence,
- MyParamTest,
+INSTANTIATE_TEST_SUITE_P(MyParamSequence, MyParamTest,
testing::Range(0, kNumberOfParamTests));
// Resets the count for each test.
@@ -153,7 +138,8 @@ void TestRepeatUnspecified() {
// Tests the behavior of Google Test when --gtest_repeat has the given value.
void TestRepeat(int repeat) {
- GTEST_FLAG(repeat) = repeat;
+ GTEST_FLAG_SET(repeat, repeat);
+ GTEST_FLAG_SET(recreate_environments_when_repeating, true);
ResetCounts();
GTEST_CHECK_INT_EQ_(repeat > 0 ? 1 : 0, RUN_ALL_TESTS());
@@ -163,8 +149,9 @@ void TestRepeat(int repeat) {
// Tests using --gtest_repeat when --gtest_filter specifies an empty
// set of tests.
void TestRepeatWithEmptyFilter(int repeat) {
- GTEST_FLAG(repeat) = repeat;
- GTEST_FLAG(filter) = "None";
+ GTEST_FLAG_SET(repeat, repeat);
+ GTEST_FLAG_SET(recreate_environments_when_repeating, true);
+ GTEST_FLAG_SET(filter, "None");
ResetCounts();
GTEST_CHECK_INT_EQ_(0, RUN_ALL_TESTS());
@@ -174,8 +161,9 @@ void TestRepeatWithEmptyFilter(int repeat) {
// Tests using --gtest_repeat when --gtest_filter specifies a set of
// successful tests.
void TestRepeatWithFilterForSuccessfulTests(int repeat) {
- GTEST_FLAG(repeat) = repeat;
- GTEST_FLAG(filter) = "*-*ShouldFail";
+ GTEST_FLAG_SET(repeat, repeat);
+ GTEST_FLAG_SET(recreate_environments_when_repeating, true);
+ GTEST_FLAG_SET(filter, "*-*ShouldFail");
ResetCounts();
GTEST_CHECK_INT_EQ_(0, RUN_ALL_TESTS());
@@ -190,8 +178,9 @@ void TestRepeatWithFilterForSuccessfulTests(int repeat) {
// Tests using --gtest_repeat when --gtest_filter specifies a set of
// failed tests.
void TestRepeatWithFilterForFailedTests(int repeat) {
- GTEST_FLAG(repeat) = repeat;
- GTEST_FLAG(filter) = "*ShouldFail";
+ GTEST_FLAG_SET(repeat, repeat);
+ GTEST_FLAG_SET(recreate_environments_when_repeating, true);
+ GTEST_FLAG_SET(filter, "*ShouldFail");
ResetCounts();
GTEST_CHECK_INT_EQ_(1, RUN_ALL_TESTS());
diff --git a/googletest/test/gtest_skip_check_output_test.py b/googletest/test/gtest_skip_check_output_test.py
index 14e63ab8..b30a1650 100755
--- a/googletest/test/gtest_skip_check_output_test.py
+++ b/googletest/test/gtest_skip_check_output_test.py
@@ -35,7 +35,7 @@ output.
import re
-import gtest_test_utils
+from googletest.test import gtest_test_utils
# Path to the gtest_skip_in_environment_setup_test binary
EXE_PATH = gtest_test_utils.GetTestExecutablePath('gtest_skip_test')
@@ -51,7 +51,8 @@ class SkipEntireEnvironmentTest(gtest_test_utils.TestCase):
skip_fixture = 'Skipped\nskipping all tests for this fixture\n'
self.assertIsNotNone(
re.search(skip_fixture + '.*' + skip_fixture, OUTPUT, flags=re.DOTALL),
- repr(OUTPUT))
+ repr(OUTPUT),
+ )
self.assertNotIn('FAILED', OUTPUT)
diff --git a/googletest/test/gtest_skip_environment_check_output_test.py b/googletest/test/gtest_skip_environment_check_output_test.py
index 6e791556..388a4e95 100755
--- a/googletest/test/gtest_skip_environment_check_output_test.py
+++ b/googletest/test/gtest_skip_environment_check_output_test.py
@@ -33,11 +33,12 @@ This script invokes gtest_skip_in_environment_setup_test_ and verifies its
output.
"""
-import gtest_test_utils
+from googletest.test import gtest_test_utils
# Path to the gtest_skip_in_environment_setup_test binary
EXE_PATH = gtest_test_utils.GetTestExecutablePath(
- 'gtest_skip_in_environment_setup_test')
+ 'gtest_skip_in_environment_setup_test'
+)
OUTPUT = gtest_test_utils.Subprocess([EXE_PATH]).output
diff --git a/googletest/test/gtest_skip_in_environment_setup_test.cc b/googletest/test/gtest_skip_in_environment_setup_test.cc
index 93723106..5f21c27d 100644
--- a/googletest/test/gtest_skip_in_environment_setup_test.cc
+++ b/googletest/test/gtest_skip_in_environment_setup_test.cc
@@ -31,6 +31,7 @@
// testcases being skipped.
#include <iostream>
+
#include "gtest/gtest.h"
class SetupEnvironment : public testing::Environment {
diff --git a/googletest/test/gtest_skip_test.cc b/googletest/test/gtest_skip_test.cc
index 4a23004c..e1b8d655 100644
--- a/googletest/test/gtest_skip_test.cc
+++ b/googletest/test/gtest_skip_test.cc
@@ -46,10 +46,6 @@ class Fixture : public Test {
}
};
-TEST_F(Fixture, SkipsOneTest) {
- EXPECT_EQ(5, 7);
-}
+TEST_F(Fixture, SkipsOneTest) { EXPECT_EQ(5, 7); }
-TEST_F(Fixture, SkipsAnotherTest) {
- EXPECT_EQ(99, 100);
-}
+TEST_F(Fixture, SkipsAnotherTest) { EXPECT_EQ(99, 100); }
diff --git a/googletest/test/gtest_sole_header_test.cc b/googletest/test/gtest_sole_header_test.cc
index 1d94ac6b..e8e22a83 100644
--- a/googletest/test/gtest_sole_header_test.cc
+++ b/googletest/test/gtest_sole_header_test.cc
@@ -35,9 +35,7 @@
namespace {
-void Subroutine() {
- EXPECT_EQ(42, 42);
-}
+void Subroutine() { EXPECT_EQ(42, 42); }
TEST(NoFatalFailureTest, ExpectNoFatalFailure) {
EXPECT_NO_FATAL_FAILURE(;);
diff --git a/googletest/test/gtest_stress_test.cc b/googletest/test/gtest_stress_test.cc
index 84348191..af8e757d 100644
--- a/googletest/test/gtest_stress_test.cc
+++ b/googletest/test/gtest_stress_test.cc
@@ -27,17 +27,18 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
// Tests that SCOPED_TRACE() and various Google Test assertions can be
// used in a large number of threads concurrently.
-#include "gtest/gtest.h"
-
+#include <algorithm>
+#include <memory>
+#include <string>
#include <vector>
+#include "gtest/gtest.h"
#include "src/gtest-internal-inl.h"
-#if GTEST_IS_THREADSAFE
+#ifdef GTEST_IS_THREADSAFE
namespace testing {
namespace {
@@ -66,8 +67,7 @@ std::string IdToString(int id) {
}
void ExpectKeyAndValueWereRecordedForId(
- const std::vector<TestProperty>& properties,
- int id, const char* suffix) {
+ const std::vector<TestProperty>& properties, int id, const char* suffix) {
TestPropertyKeyIs matches_key(IdToKey(id, suffix).c_str());
const std::vector<TestProperty>::const_iterator property =
std::find_if(properties.begin(), properties.end(), matches_key);
@@ -121,15 +121,13 @@ TEST(StressTest, CanUseScopedTraceAndAssertionsInManyThreads) {
std::unique_ptr<ThreadWithParam<int> > threads[kThreadCount];
Notification threads_can_start;
for (int i = 0; i != kThreadCount; i++)
- threads[i].reset(new ThreadWithParam<int>(&ManyAsserts,
- i,
- &threads_can_start));
+ threads[i] = std::make_unique<ThreadWithParam<int>>(&ManyAsserts, i,
+ &threads_can_start);
threads_can_start.Notify();
// Blocks until all the threads are done.
- for (int i = 0; i != kThreadCount; i++)
- threads[i]->Join();
+ for (int i = 0; i != kThreadCount; i++) threads[i]->Join();
}
// Ensures that kThreadCount*kThreadCount failures have been reported.
@@ -149,7 +147,7 @@ TEST(StressTest, CanUseScopedTraceAndAssertionsInManyThreads) {
ExpectKeyAndValueWereRecordedForId(properties, i, "string");
ExpectKeyAndValueWereRecordedForId(properties, i, "int");
}
- CheckTestFailureCount(kThreadCount*kThreadCount);
+ CheckTestFailureCount(kThreadCount * kThreadCount);
}
void FailingThread(bool is_fatal) {
@@ -196,8 +194,8 @@ TEST(FatalFailureTest, ExpectFatalFailureIgnoresFailuresInOtherThreads) {
TEST(FatalFailureOnAllThreadsTest, ExpectFatalFailureOnAllThreads) {
// This statement should succeed, because failures in all threads are
// considered.
- EXPECT_FATAL_FAILURE_ON_ALL_THREADS(
- GenerateFatalFailureInAnotherThread(true), "expected");
+ EXPECT_FATAL_FAILURE_ON_ALL_THREADS(GenerateFatalFailureInAnotherThread(true),
+ "expected");
CheckTestFailureCount(0);
// We need to add a failure, because main() checks that there are failures.
// But when only this test is run, we shouldn't have any failures.
@@ -226,7 +224,7 @@ TEST(NonFatalFailureOnAllThreadsTest, ExpectNonFatalFailureOnAllThreads) {
} // namespace
} // namespace testing
-int main(int argc, char **argv) {
+int main(int argc, char** argv) {
testing::InitGoogleTest(&argc, argv);
const int result = RUN_ALL_TESTS(); // Expected to fail.
@@ -238,8 +236,7 @@ int main(int argc, char **argv) {
#else
TEST(StressTest,
- DISABLED_ThreadSafetyTestsAreSkippedWhenGoogleTestIsNotThreadSafe) {
-}
+ DISABLED_ThreadSafetyTestsAreSkippedWhenGoogleTestIsNotThreadSafe) {}
int main(int argc, char **argv) {
testing::InitGoogleTest(&argc, argv);
diff --git a/googletest/test/gtest_test_macro_stack_footprint_test.cc b/googletest/test/gtest_test_macro_stack_footprint_test.cc
index a48db050..45f368b3 100644
--- a/googletest/test/gtest_test_macro_stack_footprint_test.cc
+++ b/googletest/test/gtest_test_macro_stack_footprint_test.cc
@@ -39,42 +39,42 @@
// This macro defines 10 dummy tests.
#define TEN_TESTS_(test_case_name) \
- TEST(test_case_name, T0) {} \
- TEST(test_case_name, T1) {} \
- TEST(test_case_name, T2) {} \
- TEST(test_case_name, T3) {} \
- TEST(test_case_name, T4) {} \
- TEST(test_case_name, T5) {} \
- TEST(test_case_name, T6) {} \
- TEST(test_case_name, T7) {} \
- TEST(test_case_name, T8) {} \
+ TEST(test_case_name, T0) {} \
+ TEST(test_case_name, T1) {} \
+ TEST(test_case_name, T2) {} \
+ TEST(test_case_name, T3) {} \
+ TEST(test_case_name, T4) {} \
+ TEST(test_case_name, T5) {} \
+ TEST(test_case_name, T6) {} \
+ TEST(test_case_name, T7) {} \
+ TEST(test_case_name, T8) {} \
TEST(test_case_name, T9) {}
// This macro defines 100 dummy tests.
#define HUNDRED_TESTS_(test_case_name_prefix) \
- TEN_TESTS_(test_case_name_prefix ## 0) \
- TEN_TESTS_(test_case_name_prefix ## 1) \
- TEN_TESTS_(test_case_name_prefix ## 2) \
- TEN_TESTS_(test_case_name_prefix ## 3) \
- TEN_TESTS_(test_case_name_prefix ## 4) \
- TEN_TESTS_(test_case_name_prefix ## 5) \
- TEN_TESTS_(test_case_name_prefix ## 6) \
- TEN_TESTS_(test_case_name_prefix ## 7) \
- TEN_TESTS_(test_case_name_prefix ## 8) \
- TEN_TESTS_(test_case_name_prefix ## 9)
+ TEN_TESTS_(test_case_name_prefix##0) \
+ TEN_TESTS_(test_case_name_prefix##1) \
+ TEN_TESTS_(test_case_name_prefix##2) \
+ TEN_TESTS_(test_case_name_prefix##3) \
+ TEN_TESTS_(test_case_name_prefix##4) \
+ TEN_TESTS_(test_case_name_prefix##5) \
+ TEN_TESTS_(test_case_name_prefix##6) \
+ TEN_TESTS_(test_case_name_prefix##7) \
+ TEN_TESTS_(test_case_name_prefix##8) \
+ TEN_TESTS_(test_case_name_prefix##9)
// This macro defines 1000 dummy tests.
#define THOUSAND_TESTS_(test_case_name_prefix) \
- HUNDRED_TESTS_(test_case_name_prefix ## 0) \
- HUNDRED_TESTS_(test_case_name_prefix ## 1) \
- HUNDRED_TESTS_(test_case_name_prefix ## 2) \
- HUNDRED_TESTS_(test_case_name_prefix ## 3) \
- HUNDRED_TESTS_(test_case_name_prefix ## 4) \
- HUNDRED_TESTS_(test_case_name_prefix ## 5) \
- HUNDRED_TESTS_(test_case_name_prefix ## 6) \
- HUNDRED_TESTS_(test_case_name_prefix ## 7) \
- HUNDRED_TESTS_(test_case_name_prefix ## 8) \
- HUNDRED_TESTS_(test_case_name_prefix ## 9)
+ HUNDRED_TESTS_(test_case_name_prefix##0) \
+ HUNDRED_TESTS_(test_case_name_prefix##1) \
+ HUNDRED_TESTS_(test_case_name_prefix##2) \
+ HUNDRED_TESTS_(test_case_name_prefix##3) \
+ HUNDRED_TESTS_(test_case_name_prefix##4) \
+ HUNDRED_TESTS_(test_case_name_prefix##5) \
+ HUNDRED_TESTS_(test_case_name_prefix##6) \
+ HUNDRED_TESTS_(test_case_name_prefix##7) \
+ HUNDRED_TESTS_(test_case_name_prefix##8) \
+ HUNDRED_TESTS_(test_case_name_prefix##9)
// Ensures that we can define 1000 TEST()s in the same translation
// unit.
diff --git a/googletest/test/gtest_test_utils.py b/googletest/test/gtest_test_utils.py
index d0c24466..964fa9fa 100755
--- a/googletest/test/gtest_test_utils.py
+++ b/googletest/test/gtest_test_utils.py
@@ -29,9 +29,10 @@
"""Unit test utilities for Google C++ Testing and Mocking Framework."""
# Suppresses the 'Import not at the top of the file' lint complaint.
-# pylint: disable-msg=C6204
+# pylint: disable=g-import-not-at-top
import os
+import subprocess
import sys
IS_WINDOWS = os.name == 'nt'
@@ -42,14 +43,7 @@ import atexit
import shutil
import tempfile
import unittest as _test_module
-
-try:
- import subprocess
- _SUBPROCESS_MODULE_AVAILABLE = True
-except:
- import popen2
- _SUBPROCESS_MODULE_AVAILABLE = False
-# pylint: enable-msg=C6204
+# pylint: enable=g-import-not-at-top
GTEST_OUTPUT_VAR_NAME = 'GTEST_OUTPUT'
@@ -69,23 +63,22 @@ def SetEnvVar(env_var, value):
# Here we expose a class from a particular module, depending on the
-# environment. The comment suppresses the 'Invalid variable name' lint
-# complaint.
-TestCase = _test_module.TestCase # pylint: disable=C6409
+# environment.
+TestCase = _test_module.TestCase
# Initially maps a flag to its default value. After
# _ParseAndStripGTestFlags() is called, maps a flag to its actual value.
-_flag_map = {'source_dir': os.path.dirname(sys.argv[0]),
- 'build_dir': os.path.dirname(sys.argv[0])}
+_flag_map = {
+ 'source_dir': os.path.dirname(sys.argv[0]),
+ 'build_dir': os.path.dirname(sys.argv[0]),
+}
_gtest_flags_are_parsed = False
def _ParseAndStripGTestFlags(argv):
"""Parses and strips Google Test flags from argv. This is idempotent."""
- # Suppresses the lint complaint about a global variable since we need it
- # here to maintain module-wide state.
- global _gtest_flags_are_parsed # pylint: disable=W0603
+ global _gtest_flags_are_parsed
if _gtest_flags_are_parsed:
return
@@ -100,7 +93,7 @@ def _ParseAndStripGTestFlags(argv):
while i < len(argv):
prefix = '--' + flag + '='
if argv[i].startswith(prefix):
- _flag_map[flag] = argv[i][len(prefix):]
+ _flag_map[flag] = argv[i][len(prefix) :]
del argv[i]
break
else:
@@ -156,15 +149,16 @@ def GetTestExecutablePath(executable_name, build_dir=None):
Args:
executable_name: name of the test binary that the test script runs.
- build_dir: directory where to look for executables, by default
- the result of GetBuildDir().
+ build_dir: directory where to look for executables, by default the
+ result of GetBuildDir().
Returns:
The absolute path of the test binary.
"""
- path = os.path.abspath(os.path.join(build_dir or GetBuildDir(),
- executable_name))
+ path = os.path.abspath(
+ os.path.join(build_dir or GetBuildDir(), executable_name)
+ )
if (IS_WINDOWS or IS_CYGWIN or IS_OS2) and not path.endswith('.exe'):
path += '.exe'
@@ -172,8 +166,9 @@ def GetTestExecutablePath(executable_name, build_dir=None):
message = (
'Unable to find the test binary "%s". Please make sure to provide\n'
'a path to the binary via the --build_dir flag or the BUILD_DIR\n'
- 'environment variable.' % path)
- print >> sys.stderr, message
+ 'environment variable.' % path
+ )
+ print(message, file=sys.stderr)
sys.exit(1)
return path
@@ -200,6 +195,7 @@ def GetExitStatus(exit_code):
class Subprocess:
+
def __init__(self, command, working_dir=None, capture_stderr=True, env=None):
"""Changes into a specified directory, if provided, and executes a command.
@@ -209,7 +205,7 @@ class Subprocess:
command: The command to run, in the form of sys.argv.
working_dir: The directory to change into.
capture_stderr: Determines whether to capture stderr in the output member
- or to discard it.
+ or to discard it.
env: Dictionary with environment to pass to the subprocess.
Returns:
@@ -224,69 +220,23 @@ class Subprocess:
combined in a string.
"""
- # The subprocess module is the preferrable way of running programs
- # since it is available and behaves consistently on all platforms,
- # including Windows. But it is only available starting in python 2.4.
- # In earlier python versions, we revert to the popen2 module, which is
- # available in python 2.0 and later but doesn't provide required
- # functionality (Popen4) under Windows. This allows us to support Mac
- # OS X 10.4 Tiger, which has python 2.3 installed.
- if _SUBPROCESS_MODULE_AVAILABLE:
- if capture_stderr:
- stderr = subprocess.STDOUT
- else:
- stderr = subprocess.PIPE
-
- p = subprocess.Popen(command,
- stdout=subprocess.PIPE, stderr=stderr,
- cwd=working_dir, universal_newlines=True, env=env)
- # communicate returns a tuple with the file object for the child's
- # output.
- self.output = p.communicate()[0]
- self._return_code = p.returncode
+ if capture_stderr:
+ stderr = subprocess.STDOUT
else:
- old_dir = os.getcwd()
-
- def _ReplaceEnvDict(dest, src):
- # Changes made by os.environ.clear are not inheritable by child
- # processes until Python 2.6. To produce inheritable changes we have
- # to delete environment items with the del statement.
- for key in dest.keys():
- del dest[key]
- dest.update(src)
-
- # When 'env' is not None, backup the environment variables and replace
- # them with the passed 'env'. When 'env' is None, we simply use the
- # current 'os.environ' for compatibility with the subprocess.Popen
- # semantics used above.
- if env is not None:
- old_environ = os.environ.copy()
- _ReplaceEnvDict(os.environ, env)
-
- try:
- if working_dir is not None:
- os.chdir(working_dir)
- if capture_stderr:
- p = popen2.Popen4(command)
- else:
- p = popen2.Popen3(command)
- p.tochild.close()
- self.output = p.fromchild.read()
- ret_code = p.wait()
- finally:
- os.chdir(old_dir)
-
- # Restore the old environment variables
- # if they were replaced.
- if env is not None:
- _ReplaceEnvDict(os.environ, old_environ)
-
- # Converts ret_code to match the semantics of
- # subprocess.Popen.returncode.
- if os.WIFSIGNALED(ret_code):
- self._return_code = -os.WTERMSIG(ret_code)
- else: # os.WIFEXITED(ret_code) should return True here.
- self._return_code = os.WEXITSTATUS(ret_code)
+ stderr = subprocess.PIPE
+
+ p = subprocess.Popen(
+ command,
+ stdout=subprocess.PIPE,
+ stderr=stderr,
+ cwd=working_dir,
+ universal_newlines=True,
+ env=env,
+ )
+ # communicate returns a tuple with the file object for the child's
+ # output.
+ self.output = p.communicate()[0]
+ self._return_code = p.returncode
if bool(self._return_code & 0x80000000):
self.terminated_by_signal = True
diff --git a/googletest/test/gtest_testbridge_test.py b/googletest/test/gtest_testbridge_test.py
index 87ffad73..0d58758b 100755
--- a/googletest/test/gtest_testbridge_test.py
+++ b/googletest/test/gtest_testbridge_test.py
@@ -31,7 +31,7 @@
import os
-import gtest_test_utils
+from googletest.test import gtest_test_utils
binary_name = 'gtest_testbridge_test_'
COMMAND = gtest_test_utils.GetTestExecutablePath(binary_name)
@@ -52,7 +52,7 @@ class GTestTestFilterTest(gtest_test_utils.TestCase):
subprocess_env[TESTBRIDGE_NAME] = '*.TestThatSucceeds'
p = gtest_test_utils.Subprocess(COMMAND, env=subprocess_env)
- self.assertEquals(0, p.exit_code)
+ self.assertEqual(0, p.exit_code)
Assert('filter = *.TestThatSucceeds' in p.output)
Assert('[ OK ] TestFilterTest.TestThatSucceeds' in p.output)
diff --git a/googletest/test/gtest_testbridge_test_.cc b/googletest/test/gtest_testbridge_test_.cc
index 24617b20..c2c000dc 100644
--- a/googletest/test/gtest_testbridge_test_.cc
+++ b/googletest/test/gtest_testbridge_test_.cc
@@ -27,7 +27,6 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
// This program is meant to be run by gtest_test_filter_test.py. Do not run
// it directly.
diff --git a/googletest/test/gtest_throw_on_failure_ex_test.cc b/googletest/test/gtest_throw_on_failure_ex_test.cc
index 1d95adbf..25d7c797 100644
--- a/googletest/test/gtest_throw_on_failure_ex_test.cc
+++ b/googletest/test/gtest_throw_on_failure_ex_test.cc
@@ -27,16 +27,16 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
// Tests Google Test's throw-on-failure mode with exceptions enabled.
-#include "gtest/gtest.h"
-
-#include <stdlib.h>
#include <stdio.h>
+#include <stdlib.h>
#include <string.h>
+
#include <stdexcept>
+#include "gtest/gtest.h"
+
// Prints the given failure message and exits the program with
// non-zero. We use this instead of a Google Test assertion to
// indicate a failure, as the latter is been tested and cannot be
@@ -50,19 +50,19 @@ void Fail(const char* msg) {
// Tests that an assertion failure throws a subclass of
// std::runtime_error.
void TestFailureThrowsRuntimeError() {
- testing::GTEST_FLAG(throw_on_failure) = true;
+ GTEST_FLAG_SET(throw_on_failure, true);
// A successful assertion shouldn't throw.
try {
EXPECT_EQ(3, 3);
- } catch(...) {
+ } catch (...) {
Fail("A successful assertion wrongfully threw.");
}
// A failed assertion should throw a subclass of std::runtime_error.
try {
EXPECT_EQ(2, 3) << "Expected failure";
- } catch(const std::runtime_error& e) {
+ } catch (const std::runtime_error& e) {
if (strstr(e.what(), "Expected failure") != nullptr) return;
printf("%s",
@@ -70,7 +70,7 @@ void TestFailureThrowsRuntimeError() {
"but the message is incorrect. Instead of containing \"Expected "
"failure\", it is:\n");
Fail(e.what());
- } catch(...) {
+ } catch (...) {
Fail("A failed assertion threw the wrong type of exception.");
}
Fail("A failed assertion should've thrown but didn't.");
diff --git a/googletest/test/gtest_unittest.cc b/googletest/test/gtest_unittest.cc
index 1730e8b8..67d776ed 100644
--- a/googletest/test/gtest_unittest.cc
+++ b/googletest/test/gtest_unittest.cc
@@ -37,22 +37,19 @@
// code once "gtest.h" has been #included.
// Do not move it after other gtest #includes.
TEST(CommandLineFlagsTest, CanBeAccessedInCodeOnceGTestHIsIncluded) {
- bool dummy = testing::GTEST_FLAG(also_run_disabled_tests) ||
- testing::GTEST_FLAG(break_on_failure) ||
- testing::GTEST_FLAG(catch_exceptions) ||
- testing::GTEST_FLAG(color) != "unknown" ||
- testing::GTEST_FLAG(fail_fast) ||
- testing::GTEST_FLAG(filter) != "unknown" ||
- testing::GTEST_FLAG(list_tests) ||
- testing::GTEST_FLAG(output) != "unknown" ||
- testing::GTEST_FLAG(brief) || testing::GTEST_FLAG(print_time) ||
- testing::GTEST_FLAG(random_seed) ||
- testing::GTEST_FLAG(repeat) > 0 ||
- testing::GTEST_FLAG(show_internal_stack_frames) ||
- testing::GTEST_FLAG(shuffle) ||
- testing::GTEST_FLAG(stack_trace_depth) > 0 ||
- testing::GTEST_FLAG(stream_result_to) != "unknown" ||
- testing::GTEST_FLAG(throw_on_failure);
+ bool dummy =
+ GTEST_FLAG_GET(also_run_disabled_tests) ||
+ GTEST_FLAG_GET(break_on_failure) || GTEST_FLAG_GET(catch_exceptions) ||
+ GTEST_FLAG_GET(color) != "unknown" || GTEST_FLAG_GET(fail_fast) ||
+ GTEST_FLAG_GET(filter) != "unknown" || GTEST_FLAG_GET(list_tests) ||
+ GTEST_FLAG_GET(output) != "unknown" || GTEST_FLAG_GET(brief) ||
+ GTEST_FLAG_GET(print_time) || GTEST_FLAG_GET(random_seed) ||
+ GTEST_FLAG_GET(repeat) > 0 ||
+ GTEST_FLAG_GET(recreate_environments_when_repeating) ||
+ GTEST_FLAG_GET(show_internal_stack_frames) || GTEST_FLAG_GET(shuffle) ||
+ GTEST_FLAG_GET(stack_trace_depth) > 0 ||
+ GTEST_FLAG_GET(stream_result_to) != "unknown" ||
+ GTEST_FLAG_GET(throw_on_failure);
EXPECT_TRUE(dummy || !dummy); // Suppresses warning that dummy is unused.
}
@@ -63,15 +60,34 @@ TEST(CommandLineFlagsTest, CanBeAccessedInCodeOnceGTestHIsIncluded) {
#include <cstdint>
#include <map>
+#include <memory>
#include <ostream>
+#include <set>
+#include <stdexcept>
#include <string>
#include <type_traits>
#include <unordered_set>
+#include <utility>
#include <vector>
#include "gtest/gtest-spi.h"
#include "src/gtest-internal-inl.h"
+struct ConvertibleGlobalType {
+ // The inner enable_if is to ensure invoking is_constructible doesn't fail.
+ // The outer enable_if is to ensure the overload resolution doesn't encounter
+ // an ambiguity.
+ template <
+ class T,
+ std::enable_if_t<
+ false, std::enable_if_t<std::is_constructible<T>::value, int>> = 0>
+ operator T() const; // NOLINT(google-explicit-constructor)
+};
+void operator<<(ConvertibleGlobalType&, int);
+static_assert(sizeof(decltype(std::declval<ConvertibleGlobalType&>()
+ << 1)(*)()) > 0,
+ "error in operator<< overload resolution");
+
namespace testing {
namespace internal {
@@ -114,15 +130,15 @@ TEST_F(StreamingListenerTest, OnTestIterationEnd) {
EXPECT_EQ("event=TestIterationEnd&passed=1&elapsed_time=0ms\n", *output());
}
-TEST_F(StreamingListenerTest, OnTestCaseStart) {
+TEST_F(StreamingListenerTest, OnTestSuiteStart) {
*output() = "";
- streamer_.OnTestCaseStart(TestCase("FooTest", "Bar", nullptr, nullptr));
+ streamer_.OnTestSuiteStart(TestSuite("FooTest", "Bar", nullptr, nullptr));
EXPECT_EQ("event=TestCaseStart&name=FooTest\n", *output());
}
-TEST_F(StreamingListenerTest, OnTestCaseEnd) {
+TEST_F(StreamingListenerTest, OnTestSuiteEnd) {
*output() = "";
- streamer_.OnTestCaseEnd(TestCase("FooTest", "Bar", nullptr, nullptr));
+ streamer_.OnTestSuiteEnd(TestSuite("FooTest", "Bar", nullptr, nullptr));
EXPECT_EQ("event=TestCaseEnd&passed=1&elapsed_time=0ms\n", *output());
}
@@ -140,8 +156,8 @@ TEST_F(StreamingListenerTest, OnTestEnd) {
TEST_F(StreamingListenerTest, OnTestPartResult) {
*output() = "";
- streamer_.OnTestPartResult(TestPartResult(
- TestPartResult::kFatalFailure, "foo.cc", 42, "failed=\n&%"));
+ streamer_.OnTestPartResult(TestPartResult(TestPartResult::kFatalFailure,
+ "foo.cc", 42, "failed=\n&%"));
// Meta characters in the failure message should be properly escaped.
EXPECT_EQ(
@@ -173,7 +189,7 @@ class TestEventListenersAccessor {
}
static void SuppressEventForwarding(TestEventListeners* listeners) {
- listeners->SuppressEventForwarding();
+ listeners->SuppressEventForwarding(true);
}
};
@@ -199,24 +215,6 @@ using testing::DoubleLE;
using testing::EmptyTestEventListener;
using testing::Environment;
using testing::FloatLE;
-using testing::GTEST_FLAG(also_run_disabled_tests);
-using testing::GTEST_FLAG(break_on_failure);
-using testing::GTEST_FLAG(catch_exceptions);
-using testing::GTEST_FLAG(color);
-using testing::GTEST_FLAG(death_test_use_fork);
-using testing::GTEST_FLAG(fail_fast);
-using testing::GTEST_FLAG(filter);
-using testing::GTEST_FLAG(list_tests);
-using testing::GTEST_FLAG(output);
-using testing::GTEST_FLAG(brief);
-using testing::GTEST_FLAG(print_time);
-using testing::GTEST_FLAG(random_seed);
-using testing::GTEST_FLAG(repeat);
-using testing::GTEST_FLAG(show_internal_stack_frames);
-using testing::GTEST_FLAG(shuffle);
-using testing::GTEST_FLAG(stack_trace_depth);
-using testing::GTEST_FLAG(stream_result_to);
-using testing::GTEST_FLAG(throw_on_failure);
using testing::IsNotSubstring;
using testing::IsSubstring;
using testing::kMaxStackTraceDepth;
@@ -230,7 +228,6 @@ using testing::TestPartResult;
using testing::TestPartResultArray;
using testing::TestProperty;
using testing::TestResult;
-using testing::TestSuite;
using testing::TimeInMillis;
using testing::UnitTest;
using testing::internal::AlwaysFalse;
@@ -246,7 +243,6 @@ using testing::internal::FloatingPoint;
using testing::internal::ForEach;
using testing::internal::FormatEpochTimeInMillisAsIso8601;
using testing::internal::FormatTimeInMillisAsSeconds;
-using testing::internal::GetCurrentOsStackTraceExceptTop;
using testing::internal::GetElementOr;
using testing::internal::GetNextRandomSeed;
using testing::internal::GetRandomSeedFromFlag;
@@ -263,9 +259,7 @@ using testing::internal::IsNotContainer;
using testing::internal::kMaxRandomSeed;
using testing::internal::kTestTypeIdInGoogleTest;
using testing::internal::NativeArray;
-using testing::internal::OsStackTraceGetter;
-using testing::internal::OsStackTraceGetterInterface;
-using testing::internal::ParseInt32Flag;
+using testing::internal::ParseFlag;
using testing::internal::RelationToSourceCopy;
using testing::internal::RelationToSourceReference;
using testing::internal::ShouldRunTestOnShard;
@@ -278,7 +272,6 @@ using testing::internal::StreamableToString;
using testing::internal::String;
using testing::internal::TestEventListenersAccessor;
using testing::internal::TestResultAccessor;
-using testing::internal::UnitTestImpl;
using testing::internal::WideStringToUtf8;
using testing::internal::edit_distance::CalculateOptimalEdits;
using testing::internal::edit_distance::CreateUnifiedDiff;
@@ -289,15 +282,13 @@ using testing::internal::CaptureStdout;
using testing::internal::GetCapturedStdout;
#endif
-#if GTEST_IS_THREADSAFE
+#ifdef GTEST_IS_THREADSAFE
using testing::internal::ThreadWithParam;
#endif
-class TestingVector : public std::vector<int> {
-};
+class TestingVector : public std::vector<int> {};
-::std::ostream& operator<<(::std::ostream& os,
- const TestingVector& vector) {
+::std::ostream& operator<<(::std::ostream& os, const TestingVector& vector) {
os << "{ ";
for (size_t i = 0; i < vector.size(); i++) {
os << vector[i] << " ";
@@ -404,7 +395,7 @@ TEST(CanonicalizeForStdLibVersioning, ElidesDoubleUnderNames) {
// Tests FormatTimeInMillisAsSeconds().
TEST(FormatTimeInMillisAsSecondsTest, FormatsZero) {
- EXPECT_EQ("0", FormatTimeInMillisAsSeconds(0));
+ EXPECT_EQ("0.", FormatTimeInMillisAsSeconds(0));
}
TEST(FormatTimeInMillisAsSecondsTest, FormatsPositiveNumber) {
@@ -412,7 +403,11 @@ TEST(FormatTimeInMillisAsSecondsTest, FormatsPositiveNumber) {
EXPECT_EQ("0.01", FormatTimeInMillisAsSeconds(10));
EXPECT_EQ("0.2", FormatTimeInMillisAsSeconds(200));
EXPECT_EQ("1.2", FormatTimeInMillisAsSeconds(1200));
- EXPECT_EQ("3", FormatTimeInMillisAsSeconds(3000));
+ EXPECT_EQ("3.", FormatTimeInMillisAsSeconds(3000));
+ EXPECT_EQ("10.", FormatTimeInMillisAsSeconds(10000));
+ EXPECT_EQ("100.", FormatTimeInMillisAsSeconds(100000));
+ EXPECT_EQ("123.456", FormatTimeInMillisAsSeconds(123456));
+ EXPECT_EQ("1234567.89", FormatTimeInMillisAsSeconds(1234567890));
}
TEST(FormatTimeInMillisAsSecondsTest, FormatsNegativeNumber) {
@@ -420,12 +415,16 @@ TEST(FormatTimeInMillisAsSecondsTest, FormatsNegativeNumber) {
EXPECT_EQ("-0.01", FormatTimeInMillisAsSeconds(-10));
EXPECT_EQ("-0.2", FormatTimeInMillisAsSeconds(-200));
EXPECT_EQ("-1.2", FormatTimeInMillisAsSeconds(-1200));
- EXPECT_EQ("-3", FormatTimeInMillisAsSeconds(-3000));
+ EXPECT_EQ("-3.", FormatTimeInMillisAsSeconds(-3000));
+ EXPECT_EQ("-10.", FormatTimeInMillisAsSeconds(-10000));
+ EXPECT_EQ("-100.", FormatTimeInMillisAsSeconds(-100000));
+ EXPECT_EQ("-123.456", FormatTimeInMillisAsSeconds(-123456));
+ EXPECT_EQ("-1234567.89", FormatTimeInMillisAsSeconds(-1234567890));
}
// Tests FormatEpochTimeInMillisAsIso8601(). The correctness of conversion
// for particular dates below was verified in Python using
-// datetime.datetime.fromutctimestamp(<timetamp>/1000).
+// datetime.datetime.fromutctimestamp(<timestamp>/1000).
// FormatEpochTimeInMillisAsIso8601 depends on the current timezone, so we
// have to set up a particular timezone to obtain predictable results.
@@ -438,11 +437,12 @@ class FormatEpochTimeInMillisAsIso8601Test : public Test {
private:
void SetUp() override {
- saved_tz_ = nullptr;
+ saved_tz_.reset();
- GTEST_DISABLE_MSC_DEPRECATED_PUSH_(/* getenv, strdup: deprecated */)
- if (getenv("TZ"))
- saved_tz_ = strdup(getenv("TZ"));
+ GTEST_DISABLE_MSC_DEPRECATED_PUSH_(/* getenv: deprecated */)
+ if (const char* tz = getenv("TZ")) {
+ saved_tz_ = std::make_unique<std::string>(tz);
+ }
GTEST_DISABLE_MSC_DEPRECATED_POP_()
// Set up the time zone for FormatEpochTimeInMillisAsIso8601 to use. We
@@ -452,16 +452,15 @@ class FormatEpochTimeInMillisAsIso8601Test : public Test {
}
void TearDown() override {
- SetTimeZone(saved_tz_);
- free(const_cast<char*>(saved_tz_));
- saved_tz_ = nullptr;
+ SetTimeZone(saved_tz_ != nullptr ? saved_tz_->c_str() : nullptr);
+ saved_tz_.reset();
}
static void SetTimeZone(const char* time_zone) {
// tzset() distinguishes between the TZ variable being present and empty
// and not being present, so we have to consider the case of time_zone
// being NULL.
-#if _MSC_VER || GTEST_OS_WINDOWS_MINGW
+#if defined(_MSC_VER) || defined(GTEST_OS_WINDOWS_MINGW)
// ...Unless it's MSVC, whose standard library's _putenv doesn't
// distinguish between an empty and a missing variable.
const std::string env_var =
@@ -471,6 +470,12 @@ class FormatEpochTimeInMillisAsIso8601Test : public Test {
tzset();
GTEST_DISABLE_MSC_WARNINGS_POP_()
#else
+#if defined(GTEST_OS_LINUX_ANDROID) && __ANDROID_API__ < 21
+ // Work around KitKat bug in tzset by setting "UTC" before setting "UTC+00".
+ // See https://github.com/android/ndk/issues/1604.
+ setenv("TZ", "UTC", 1);
+ tzset();
+#endif
if (time_zone) {
setenv(("TZ"), time_zone, 1);
} else {
@@ -480,7 +485,7 @@ class FormatEpochTimeInMillisAsIso8601Test : public Test {
#endif
}
- const char* saved_tz_;
+ std::unique_ptr<std::string> saved_tz_; // Empty and null are different here
};
const TimeInMillis FormatEpochTimeInMillisAsIso8601Test::kMillisPerSec;
@@ -491,9 +496,8 @@ TEST_F(FormatEpochTimeInMillisAsIso8601Test, PrintsTwoDigitSegments) {
}
TEST_F(FormatEpochTimeInMillisAsIso8601Test, IncludesMillisecondsAfterDot) {
- EXPECT_EQ(
- "2011-10-31T18:52:42.234",
- FormatEpochTimeInMillisAsIso8601(1320087162 * kMillisPerSec + 234));
+ EXPECT_EQ("2011-10-31T18:52:42.234",
+ FormatEpochTimeInMillisAsIso8601(1320087162 * kMillisPerSec + 234));
}
TEST_F(FormatEpochTimeInMillisAsIso8601Test, PrintsLeadingZeroes) {
@@ -510,10 +514,10 @@ TEST_F(FormatEpochTimeInMillisAsIso8601Test, PrintsEpochStart) {
EXPECT_EQ("1970-01-01T00:00:00.000", FormatEpochTimeInMillisAsIso8601(0));
}
-# ifdef __BORLANDC__
+#ifdef __BORLANDC__
// Silences warnings: "Condition is always true", "Unreachable code"
-# pragma option push -w-ccc -w-rch
-# endif
+#pragma option push -w-ccc -w-rch
+#endif
// Tests that the LHS of EXPECT_EQ or ASSERT_EQ can be used as a null literal
// when the RHS is a pointer type.
@@ -587,10 +591,10 @@ TEST(NullLiteralTest, NoConversionNoWarning) {
#pragma clang diagnostic pop
#endif
-# ifdef __BORLANDC__
+#ifdef __BORLANDC__
// Restores warnings after previous "#pragma option push" suppressed them.
-# pragma option pop
-# endif
+#pragma option pop
+#endif
//
// Tests CodePointToUtf8().
@@ -618,20 +622,17 @@ TEST(CodePointToUtf8Test, CanEncode8To11Bits) {
// Some compilers (e.g., GCC on MinGW) cannot handle non-ASCII codepoints
// in wide strings and wide chars. In order to accommodate them, we have to
// introduce such character constants as integers.
- EXPECT_EQ("\xD5\xB6",
- CodePointToUtf8(static_cast<wchar_t>(0x576)));
+ EXPECT_EQ("\xD5\xB6", CodePointToUtf8(static_cast<wchar_t>(0x576)));
}
// Tests that Unicode code-points that have 12 to 16 bits are encoded
// as 1110xxxx 10xxxxxx 10xxxxxx.
TEST(CodePointToUtf8Test, CanEncode12To16Bits) {
// 0000 1000 1101 0011 => 1110-0000 10-100011 10-010011
- EXPECT_EQ("\xE0\xA3\x93",
- CodePointToUtf8(static_cast<wchar_t>(0x8D3)));
+ EXPECT_EQ("\xE0\xA3\x93", CodePointToUtf8(static_cast<wchar_t>(0x8D3)));
// 1100 0111 0100 1101 => 1110-1100 10-011101 10-001101
- EXPECT_EQ("\xEC\x9D\x8D",
- CodePointToUtf8(static_cast<wchar_t>(0xC74D)));
+ EXPECT_EQ("\xEC\x9D\x8D", CodePointToUtf8(static_cast<wchar_t>(0xC74D)));
}
#if !GTEST_WIDE_STRING_USES_UTF16_
@@ -683,7 +684,7 @@ TEST(WideStringToUtf8Test, CanEncode8To11Bits) {
EXPECT_STREQ("\xC3\x93", WideStringToUtf8(L"\xD3", -1).c_str());
// 101 0111 0110 => 110-10101 10-110110
- const wchar_t s[] = { 0x576, '\0' };
+ const wchar_t s[] = {0x576, '\0'};
EXPECT_STREQ("\xD5\xB6", WideStringToUtf8(s, 1).c_str());
EXPECT_STREQ("\xD5\xB6", WideStringToUtf8(s, -1).c_str());
}
@@ -692,12 +693,12 @@ TEST(WideStringToUtf8Test, CanEncode8To11Bits) {
// as 1110xxxx 10xxxxxx 10xxxxxx.
TEST(WideStringToUtf8Test, CanEncode12To16Bits) {
// 0000 1000 1101 0011 => 1110-0000 10-100011 10-010011
- const wchar_t s1[] = { 0x8D3, '\0' };
+ const wchar_t s1[] = {0x8D3, '\0'};
EXPECT_STREQ("\xE0\xA3\x93", WideStringToUtf8(s1, 1).c_str());
EXPECT_STREQ("\xE0\xA3\x93", WideStringToUtf8(s1, -1).c_str());
// 1100 0111 0100 1101 => 1110-1100 10-011101 10-001101
- const wchar_t s2[] = { 0xC74D, '\0' };
+ const wchar_t s2[] = {0xC74D, '\0'};
EXPECT_STREQ("\xEC\x9D\x8D", WideStringToUtf8(s2, 1).c_str());
EXPECT_STREQ("\xEC\x9D\x8D", WideStringToUtf8(s2, -1).c_str());
}
@@ -732,11 +733,11 @@ TEST(WideStringToUtf8Test, CanEncodeInvalidCodePoint) {
EXPECT_STREQ("(Invalid Unicode 0xABCDFF)",
WideStringToUtf8(L"\xABCDFF", -1).c_str());
}
-#else // !GTEST_WIDE_STRING_USES_UTF16_
+#else // !GTEST_WIDE_STRING_USES_UTF16_
// Tests that surrogate pairs are encoded correctly on the systems using
// UTF-16 encoding in the wide strings.
TEST(WideStringToUtf8Test, CanEncodeValidUtf16SUrrogatePairs) {
- const wchar_t s[] = { 0xD801, 0xDC00, '\0' };
+ const wchar_t s[] = {0xD801, 0xDC00, '\0'};
EXPECT_STREQ("\xF0\x90\x90\x80", WideStringToUtf8(s, -1).c_str());
}
@@ -744,13 +745,13 @@ TEST(WideStringToUtf8Test, CanEncodeValidUtf16SUrrogatePairs) {
// generates the expected result.
TEST(WideStringToUtf8Test, CanEncodeInvalidUtf16SurrogatePair) {
// Leading surrogate is at the end of the string.
- const wchar_t s1[] = { 0xD800, '\0' };
+ const wchar_t s1[] = {0xD800, '\0'};
EXPECT_STREQ("\xED\xA0\x80", WideStringToUtf8(s1, -1).c_str());
// Leading surrogate is not followed by the trailing surrogate.
- const wchar_t s2[] = { 0xD800, 'M', '\0' };
+ const wchar_t s2[] = {0xD800, 'M', '\0'};
EXPECT_STREQ("\xED\xA0\x80M", WideStringToUtf8(s2, -1).c_str());
// Trailing surrogate appearas without a leading surrogate.
- const wchar_t s3[] = { 0xDC00, 'P', 'Q', 'R', '\0' };
+ const wchar_t s3[] = {0xDC00, 'P', 'Q', 'R', '\0'};
EXPECT_STREQ("\xED\xB0\x80PQR", WideStringToUtf8(s3, -1).c_str());
}
#endif // !GTEST_WIDE_STRING_USES_UTF16_
@@ -758,21 +759,24 @@ TEST(WideStringToUtf8Test, CanEncodeInvalidUtf16SurrogatePair) {
// Tests that codepoint concatenation works correctly.
#if !GTEST_WIDE_STRING_USES_UTF16_
TEST(WideStringToUtf8Test, ConcatenatesCodepointsCorrectly) {
- const wchar_t s[] = { 0x108634, 0xC74D, '\n', 0x576, 0x8D3, 0x108634, '\0'};
+ const wchar_t s[] = {0x108634, 0xC74D, '\n', 0x576, 0x8D3, 0x108634, '\0'};
EXPECT_STREQ(
"\xF4\x88\x98\xB4"
- "\xEC\x9D\x8D"
- "\n"
- "\xD5\xB6"
- "\xE0\xA3\x93"
- "\xF4\x88\x98\xB4",
+ "\xEC\x9D\x8D"
+ "\n"
+ "\xD5\xB6"
+ "\xE0\xA3\x93"
+ "\xF4\x88\x98\xB4",
WideStringToUtf8(s, -1).c_str());
}
#else
TEST(WideStringToUtf8Test, ConcatenatesCodepointsCorrectly) {
- const wchar_t s[] = { 0xC74D, '\n', 0x576, 0x8D3, '\0'};
+ const wchar_t s[] = {0xC74D, '\n', 0x576, 0x8D3, '\0'};
EXPECT_STREQ(
- "\xEC\x9D\x8D" "\n" "\xD5\xB6" "\xE0\xA3\x93",
+ "\xEC\x9D\x8D"
+ "\n"
+ "\xD5\xB6"
+ "\xE0\xA3\x93",
WideStringToUtf8(s, -1).c_str());
}
#endif // !GTEST_WIDE_STRING_USES_UTF16_
@@ -781,9 +785,8 @@ TEST(WideStringToUtf8Test, ConcatenatesCodepointsCorrectly) {
TEST(RandomDeathTest, GeneratesCrashesOnInvalidRange) {
testing::internal::Random random(42);
- EXPECT_DEATH_IF_SUPPORTED(
- random.Generate(0),
- "Cannot generate a number in the range \\[0, 0\\)");
+ EXPECT_DEATH_IF_SUPPORTED(random.Generate(0),
+ "Cannot generate a number in the range \\[0, 0\\)");
EXPECT_DEATH_IF_SUPPORTED(
random.Generate(testing::internal::Random::kMaxRange + 1),
"Generation of a number in \\[0, 2147483649\\) was requested, "
@@ -912,7 +915,7 @@ class VectorShuffleTest : public Test {
return true;
}
- bool found_in_vector[kVectorSize] = { false };
+ bool found_in_vector[kVectorSize] = {false};
for (size_t i = 0; i < vector.size(); i++) {
const int e = vector[i];
if (e < 0 || e >= static_cast<int>(kVectorSize) || found_in_vector[e]) {
@@ -939,8 +942,8 @@ class VectorShuffleTest : public Test {
return false;
}
- static bool RangeIsUnshuffled(
- const TestingVector& vector, int begin, int end) {
+ static bool RangeIsUnshuffled(const TestingVector& vector, int begin,
+ int end) {
return !RangeIsShuffled(vector, begin, end);
}
@@ -965,7 +968,7 @@ TEST_F(VectorShuffleTest, HandlesEmptyRange) {
ASSERT_PRED1(VectorIsUnshuffled, vector_);
// ...in the middle...
- ShuffleRange(&random_, kVectorSize/2, kVectorSize/2, &vector_);
+ ShuffleRange(&random_, kVectorSize / 2, kVectorSize / 2, &vector_);
ASSERT_PRED1(VectorIsNotCorrupt, vector_);
ASSERT_PRED1(VectorIsUnshuffled, vector_);
@@ -987,7 +990,7 @@ TEST_F(VectorShuffleTest, HandlesRangeOfSizeOne) {
ASSERT_PRED1(VectorIsUnshuffled, vector_);
// ...in the middle...
- ShuffleRange(&random_, kVectorSize/2, kVectorSize/2 + 1, &vector_);
+ ShuffleRange(&random_, kVectorSize / 2, kVectorSize / 2 + 1, &vector_);
ASSERT_PRED1(VectorIsNotCorrupt, vector_);
ASSERT_PRED1(VectorIsUnshuffled, vector_);
@@ -1012,7 +1015,7 @@ TEST_F(VectorShuffleTest, ShufflesEntireVector) {
}
TEST_F(VectorShuffleTest, ShufflesStartOfVector) {
- const int kRangeSize = kVectorSize/2;
+ const int kRangeSize = kVectorSize / 2;
ShuffleRange(&random_, 0, kRangeSize, &vector_);
@@ -1034,11 +1037,11 @@ TEST_F(VectorShuffleTest, ShufflesEndOfVector) {
TEST_F(VectorShuffleTest, ShufflesMiddleOfVector) {
const int kRangeSize = static_cast<int>(kVectorSize) / 3;
- ShuffleRange(&random_, kRangeSize, 2*kRangeSize, &vector_);
+ ShuffleRange(&random_, kRangeSize, 2 * kRangeSize, &vector_);
ASSERT_PRED1(VectorIsNotCorrupt, vector_);
EXPECT_PRED3(RangeIsUnshuffled, vector_, 0, kRangeSize);
- EXPECT_PRED3(RangeIsShuffled, vector_, kRangeSize, 2*kRangeSize);
+ EXPECT_PRED3(RangeIsShuffled, vector_, kRangeSize, 2 * kRangeSize);
EXPECT_PRED3(RangeIsUnshuffled, vector_, 2 * kRangeSize,
static_cast<int>(kVectorSize));
}
@@ -1099,17 +1102,16 @@ TEST(StringTest, CaseInsensitiveWideCStringEquals) {
EXPECT_TRUE(String::CaseInsensitiveWideCStringEquals(L"FOOBAR", L"foobar"));
}
-#if GTEST_OS_WINDOWS
+#ifdef GTEST_OS_WINDOWS
// Tests String::ShowWideCString().
TEST(StringTest, ShowWideCString) {
- EXPECT_STREQ("(null)",
- String::ShowWideCString(NULL).c_str());
+ EXPECT_STREQ("(null)", String::ShowWideCString(NULL).c_str());
EXPECT_STREQ("", String::ShowWideCString(L"").c_str());
EXPECT_STREQ("foo", String::ShowWideCString(L"foo").c_str());
}
-# if GTEST_OS_WINDOWS_MOBILE
+#ifdef GTEST_OS_WINDOWS_MOBILE
TEST(StringTest, AnsiAndUtf16Null) {
EXPECT_EQ(NULL, String::AnsiToUtf16(NULL));
EXPECT_EQ(NULL, String::Utf16ToAnsi(NULL));
@@ -1118,21 +1120,21 @@ TEST(StringTest, AnsiAndUtf16Null) {
TEST(StringTest, AnsiAndUtf16ConvertBasic) {
const char* ansi = String::Utf16ToAnsi(L"str");
EXPECT_STREQ("str", ansi);
- delete [] ansi;
+ delete[] ansi;
const WCHAR* utf16 = String::AnsiToUtf16("str");
EXPECT_EQ(0, wcsncmp(L"str", utf16, 3));
- delete [] utf16;
+ delete[] utf16;
}
TEST(StringTest, AnsiAndUtf16ConvertPathChars) {
const char* ansi = String::Utf16ToAnsi(L".:\\ \"*?");
EXPECT_STREQ(".:\\ \"*?", ansi);
- delete [] ansi;
+ delete[] ansi;
const WCHAR* utf16 = String::AnsiToUtf16(".:\\ \"*?");
EXPECT_EQ(0, wcsncmp(L".:\\ \"*?", utf16, 3));
- delete [] utf16;
+ delete[] utf16;
}
-# endif // GTEST_OS_WINDOWS_MOBILE
+#endif // GTEST_OS_WINDOWS_MOBILE
#endif // GTEST_OS_WINDOWS
@@ -1154,9 +1156,7 @@ TEST(TestPropertyTest, ReplaceStringValue) {
// AddFatalFailure() and AddNonfatalFailure() must be stand-alone
// functions (i.e. their definitions cannot be inlined at the call
// sites), or C++Builder won't compile the code.
-static void AddFatalFailure() {
- FAIL() << "Expected fatal failure.";
-}
+static void AddFatalFailure() { FAIL() << "Expected fatal failure."; }
static void AddNonfatalFailure() {
ADD_FAILURE() << "Expected non-fatal failure.";
@@ -1164,10 +1164,7 @@ static void AddNonfatalFailure() {
class ScopedFakeTestPartResultReporterTest : public Test {
public: // Must be public and not protected due to a bug in g++ 3.4.2.
- enum FailureMode {
- FATAL_FAILURE,
- NONFATAL_FAILURE
- };
+ enum FailureMode { FATAL_FAILURE, NONFATAL_FAILURE };
static void AddFailure(FailureMode failure) {
if (failure == FATAL_FAILURE) {
AddFatalFailure();
@@ -1204,10 +1201,10 @@ TEST_F(ScopedFakeTestPartResultReporterTest, DeprecatedConstructor) {
EXPECT_EQ(1, results.size());
}
-#if GTEST_IS_THREADSAFE
+#ifdef GTEST_IS_THREADSAFE
class ScopedFakeTestPartResultReporterWithThreadsTest
- : public ScopedFakeTestPartResultReporterTest {
+ : public ScopedFakeTestPartResultReporterTest {
protected:
static void AddFailureInOtherThread(FailureMode failure) {
ThreadWithParam<FailureMode> thread(&AddFailure, failure, nullptr);
@@ -1260,7 +1257,7 @@ TEST_F(ExpectFatalFailureTest, CatchesFatalFailureOnAllThreads) {
#ifdef __BORLANDC__
// Silences warnings: "Condition is always true"
-# pragma option push -w-ccc
+#pragma option push -w-ccc
#endif
// Tests that EXPECT_FATAL_FAILURE() can be used in a non-void
@@ -1288,7 +1285,7 @@ void DoesNotAbortHelper(bool* aborted) {
#ifdef __BORLANDC__
// Restores warnings after previous "#pragma option push" suppressed them.
-# pragma option pop
+#pragma option pop
#endif
TEST_F(ExpectFatalFailureTest, DoesNotAbort) {
@@ -1307,16 +1304,20 @@ static int global_var = 0;
TEST_F(ExpectFatalFailureTest, AcceptsMacroThatExpandsToUnprotectedComma) {
#ifndef __BORLANDC__
// ICE's in C++Builder.
- EXPECT_FATAL_FAILURE({
- GTEST_USE_UNPROTECTED_COMMA_;
- AddFatalFailure();
- }, "");
+ EXPECT_FATAL_FAILURE(
+ {
+ GTEST_USE_UNPROTECTED_COMMA_;
+ AddFatalFailure();
+ },
+ "");
#endif
- EXPECT_FATAL_FAILURE_ON_ALL_THREADS({
- GTEST_USE_UNPROTECTED_COMMA_;
- AddFatalFailure();
- }, "");
+ EXPECT_FATAL_FAILURE_ON_ALL_THREADS(
+ {
+ GTEST_USE_UNPROTECTED_COMMA_;
+ AddFatalFailure();
+ },
+ "");
}
// Tests EXPECT_NONFATAL_FAILURE{,ON_ALL_THREADS}.
@@ -1324,8 +1325,7 @@ TEST_F(ExpectFatalFailureTest, AcceptsMacroThatExpandsToUnprotectedComma) {
typedef ScopedFakeTestPartResultReporterTest ExpectNonfatalFailureTest;
TEST_F(ExpectNonfatalFailureTest, CatchesNonfatalFailure) {
- EXPECT_NONFATAL_FAILURE(AddNonfatalFailure(),
- "Expected non-fatal failure.");
+ EXPECT_NONFATAL_FAILURE(AddNonfatalFailure(), "Expected non-fatal failure.");
}
TEST_F(ExpectNonfatalFailureTest, AcceptsStdStringObject) {
@@ -1344,18 +1344,22 @@ TEST_F(ExpectNonfatalFailureTest, CatchesNonfatalFailureOnAllThreads) {
// statement that contains a macro which expands to code containing an
// unprotected comma.
TEST_F(ExpectNonfatalFailureTest, AcceptsMacroThatExpandsToUnprotectedComma) {
- EXPECT_NONFATAL_FAILURE({
- GTEST_USE_UNPROTECTED_COMMA_;
- AddNonfatalFailure();
- }, "");
+ EXPECT_NONFATAL_FAILURE(
+ {
+ GTEST_USE_UNPROTECTED_COMMA_;
+ AddNonfatalFailure();
+ },
+ "");
- EXPECT_NONFATAL_FAILURE_ON_ALL_THREADS({
- GTEST_USE_UNPROTECTED_COMMA_;
- AddNonfatalFailure();
- }, "");
+ EXPECT_NONFATAL_FAILURE_ON_ALL_THREADS(
+ {
+ GTEST_USE_UNPROTECTED_COMMA_;
+ AddNonfatalFailure();
+ },
+ "");
}
-#if GTEST_IS_THREADSAFE
+#ifdef GTEST_IS_THREADSAFE
typedef ScopedFakeTestPartResultReporterWithThreadsTest
ExpectFailureWithThreadsTest;
@@ -1396,21 +1400,18 @@ class TestResultTest : public Test {
typedef std::vector<TestPartResult> TPRVector;
// We make use of 2 TestPartResult objects,
- TestPartResult * pr1, * pr2;
+ TestPartResult *pr1, *pr2;
// ... and 3 TestResult objects.
- TestResult * r0, * r1, * r2;
+ TestResult *r0, *r1, *r2;
void SetUp() override {
// pr1 is for success.
- pr1 = new TestPartResult(TestPartResult::kSuccess,
- "foo/bar.cc",
- 10,
+ pr1 = new TestPartResult(TestPartResult::kSuccess, "foo/bar.cc", 10,
"Success!");
// pr2 is for fatal failure.
- pr2 = new TestPartResult(TestPartResult::kFatalFailure,
- "foo/bar.cc",
+ pr2 = new TestPartResult(TestPartResult::kFatalFailure, "foo/bar.cc",
-1, // This line number means "unknown"
"Failure!");
@@ -1423,10 +1424,10 @@ class TestResultTest : public Test {
// state, in particular the TestPartResult vector it holds.
// test_part_results() returns a const reference to this vector.
// We cast it to a non-const object s.t. it can be modified
- TPRVector* results1 = const_cast<TPRVector*>(
- &TestResultAccessor::test_part_results(*r1));
- TPRVector* results2 = const_cast<TPRVector*>(
- &TestResultAccessor::test_part_results(*r2));
+ TPRVector* results1 =
+ const_cast<TPRVector*>(&TestResultAccessor::test_part_results(*r1));
+ TPRVector* results2 =
+ const_cast<TPRVector*>(&TestResultAccessor::test_part_results(*r2));
// r0 is an empty TestResult.
@@ -1597,23 +1598,24 @@ class GTestFlagSaverTest : public Test {
static void SetUpTestSuite() {
saver_ = new GTestFlagSaver;
- GTEST_FLAG(also_run_disabled_tests) = false;
- GTEST_FLAG(break_on_failure) = false;
- GTEST_FLAG(catch_exceptions) = false;
- GTEST_FLAG(death_test_use_fork) = false;
- GTEST_FLAG(color) = "auto";
- GTEST_FLAG(fail_fast) = false;
- GTEST_FLAG(filter) = "";
- GTEST_FLAG(list_tests) = false;
- GTEST_FLAG(output) = "";
- GTEST_FLAG(brief) = false;
- GTEST_FLAG(print_time) = true;
- GTEST_FLAG(random_seed) = 0;
- GTEST_FLAG(repeat) = 1;
- GTEST_FLAG(shuffle) = false;
- GTEST_FLAG(stack_trace_depth) = kMaxStackTraceDepth;
- GTEST_FLAG(stream_result_to) = "";
- GTEST_FLAG(throw_on_failure) = false;
+ GTEST_FLAG_SET(also_run_disabled_tests, false);
+ GTEST_FLAG_SET(break_on_failure, false);
+ GTEST_FLAG_SET(catch_exceptions, false);
+ GTEST_FLAG_SET(death_test_use_fork, false);
+ GTEST_FLAG_SET(color, "auto");
+ GTEST_FLAG_SET(fail_fast, false);
+ GTEST_FLAG_SET(filter, "");
+ GTEST_FLAG_SET(list_tests, false);
+ GTEST_FLAG_SET(output, "");
+ GTEST_FLAG_SET(brief, false);
+ GTEST_FLAG_SET(print_time, true);
+ GTEST_FLAG_SET(random_seed, 0);
+ GTEST_FLAG_SET(repeat, 1);
+ GTEST_FLAG_SET(recreate_environments_when_repeating, true);
+ GTEST_FLAG_SET(shuffle, false);
+ GTEST_FLAG_SET(stack_trace_depth, kMaxStackTraceDepth);
+ GTEST_FLAG_SET(stream_result_to, "");
+ GTEST_FLAG_SET(throw_on_failure, false);
}
// Restores the Google Test flags that the tests have modified. This will
@@ -1626,41 +1628,43 @@ class GTestFlagSaverTest : public Test {
// Verifies that the Google Test flags have their default values, and then
// modifies each of them.
void VerifyAndModifyFlags() {
- EXPECT_FALSE(GTEST_FLAG(also_run_disabled_tests));
- EXPECT_FALSE(GTEST_FLAG(break_on_failure));
- EXPECT_FALSE(GTEST_FLAG(catch_exceptions));
- EXPECT_STREQ("auto", GTEST_FLAG(color).c_str());
- EXPECT_FALSE(GTEST_FLAG(death_test_use_fork));
- EXPECT_FALSE(GTEST_FLAG(fail_fast));
- EXPECT_STREQ("", GTEST_FLAG(filter).c_str());
- EXPECT_FALSE(GTEST_FLAG(list_tests));
- EXPECT_STREQ("", GTEST_FLAG(output).c_str());
- EXPECT_FALSE(GTEST_FLAG(brief));
- EXPECT_TRUE(GTEST_FLAG(print_time));
- EXPECT_EQ(0, GTEST_FLAG(random_seed));
- EXPECT_EQ(1, GTEST_FLAG(repeat));
- EXPECT_FALSE(GTEST_FLAG(shuffle));
- EXPECT_EQ(kMaxStackTraceDepth, GTEST_FLAG(stack_trace_depth));
- EXPECT_STREQ("", GTEST_FLAG(stream_result_to).c_str());
- EXPECT_FALSE(GTEST_FLAG(throw_on_failure));
-
- GTEST_FLAG(also_run_disabled_tests) = true;
- GTEST_FLAG(break_on_failure) = true;
- GTEST_FLAG(catch_exceptions) = true;
- GTEST_FLAG(color) = "no";
- GTEST_FLAG(death_test_use_fork) = true;
- GTEST_FLAG(fail_fast) = true;
- GTEST_FLAG(filter) = "abc";
- GTEST_FLAG(list_tests) = true;
- GTEST_FLAG(output) = "xml:foo.xml";
- GTEST_FLAG(brief) = true;
- GTEST_FLAG(print_time) = false;
- GTEST_FLAG(random_seed) = 1;
- GTEST_FLAG(repeat) = 100;
- GTEST_FLAG(shuffle) = true;
- GTEST_FLAG(stack_trace_depth) = 1;
- GTEST_FLAG(stream_result_to) = "localhost:1234";
- GTEST_FLAG(throw_on_failure) = true;
+ EXPECT_FALSE(GTEST_FLAG_GET(also_run_disabled_tests));
+ EXPECT_FALSE(GTEST_FLAG_GET(break_on_failure));
+ EXPECT_FALSE(GTEST_FLAG_GET(catch_exceptions));
+ EXPECT_STREQ("auto", GTEST_FLAG_GET(color).c_str());
+ EXPECT_FALSE(GTEST_FLAG_GET(death_test_use_fork));
+ EXPECT_FALSE(GTEST_FLAG_GET(fail_fast));
+ EXPECT_STREQ("", GTEST_FLAG_GET(filter).c_str());
+ EXPECT_FALSE(GTEST_FLAG_GET(list_tests));
+ EXPECT_STREQ("", GTEST_FLAG_GET(output).c_str());
+ EXPECT_FALSE(GTEST_FLAG_GET(brief));
+ EXPECT_TRUE(GTEST_FLAG_GET(print_time));
+ EXPECT_EQ(0, GTEST_FLAG_GET(random_seed));
+ EXPECT_EQ(1, GTEST_FLAG_GET(repeat));
+ EXPECT_TRUE(GTEST_FLAG_GET(recreate_environments_when_repeating));
+ EXPECT_FALSE(GTEST_FLAG_GET(shuffle));
+ EXPECT_EQ(kMaxStackTraceDepth, GTEST_FLAG_GET(stack_trace_depth));
+ EXPECT_STREQ("", GTEST_FLAG_GET(stream_result_to).c_str());
+ EXPECT_FALSE(GTEST_FLAG_GET(throw_on_failure));
+
+ GTEST_FLAG_SET(also_run_disabled_tests, true);
+ GTEST_FLAG_SET(break_on_failure, true);
+ GTEST_FLAG_SET(catch_exceptions, true);
+ GTEST_FLAG_SET(color, "no");
+ GTEST_FLAG_SET(death_test_use_fork, true);
+ GTEST_FLAG_SET(fail_fast, true);
+ GTEST_FLAG_SET(filter, "abc");
+ GTEST_FLAG_SET(list_tests, true);
+ GTEST_FLAG_SET(output, "xml:foo.xml");
+ GTEST_FLAG_SET(brief, true);
+ GTEST_FLAG_SET(print_time, false);
+ GTEST_FLAG_SET(random_seed, 1);
+ GTEST_FLAG_SET(repeat, 100);
+ GTEST_FLAG_SET(recreate_environments_when_repeating, false);
+ GTEST_FLAG_SET(shuffle, true);
+ GTEST_FLAG_SET(stack_trace_depth, 1);
+ GTEST_FLAG_SET(stream_result_to, "localhost:1234");
+ GTEST_FLAG_SET(throw_on_failure, true);
}
private:
@@ -1674,21 +1678,17 @@ GTestFlagSaver* GTestFlagSaverTest::saver_ = nullptr;
// tests are designed to work regardless of their order.
// Modifies the Google Test flags in the test body.
-TEST_F(GTestFlagSaverTest, ModifyGTestFlags) {
- VerifyAndModifyFlags();
-}
+TEST_F(GTestFlagSaverTest, ModifyGTestFlags) { VerifyAndModifyFlags(); }
// Verifies that the Google Test flags in the body of the previous test were
// restored to their original values.
-TEST_F(GTestFlagSaverTest, VerifyGTestFlags) {
- VerifyAndModifyFlags();
-}
+TEST_F(GTestFlagSaverTest, VerifyGTestFlags) { VerifyAndModifyFlags(); }
// Sets an environment variable with the given name to the given
// value. If the value argument is "", unsets the environment
// variable. The caller must ensure that both arguments are not NULL.
static void SetEnv(const char* name, const char* value) {
-#if GTEST_OS_WINDOWS_MOBILE
+#ifdef GTEST_OS_WINDOWS_MOBILE
// Environment variables are not supported on Windows CE.
return;
#elif defined(__BORLANDC__) || defined(__SunOS_5_8) || defined(__SunOS_5_9)
@@ -1699,19 +1699,19 @@ static void SetEnv(const char* name, const char* value) {
// Because putenv stores a pointer to the string buffer, we can't delete the
// previous string (if present) until after it's replaced.
- std::string *prev_env = NULL;
+ std::string* prev_env = NULL;
if (added_env.find(name) != added_env.end()) {
prev_env = added_env[name];
}
- added_env[name] = new std::string(
- (Message() << name << "=" << value).GetString());
+ added_env[name] =
+ new std::string((Message() << name << "=" << value).GetString());
// The standard signature of putenv accepts a 'char*' argument. Other
// implementations, like C++Builder's, accept a 'const char*'.
// We cast away the 'const' since that would work for both variants.
putenv(const_cast<char*>(added_env[name]->c_str()));
delete prev_env;
-#elif GTEST_OS_WINDOWS // If we are on Windows proper.
+#elif defined(GTEST_OS_WINDOWS) // If we are on Windows proper.
_putenv((Message() << name << "=" << value).GetString().c_str());
#else
if (*value == '\0') {
@@ -1722,7 +1722,7 @@ static void SetEnv(const char* name, const char* value) {
#endif // GTEST_OS_WINDOWS_MOBILE
}
-#if !GTEST_OS_WINDOWS_MOBILE
+#ifndef GTEST_OS_WINDOWS_MOBILE
// Environment variables are not supported on Windows CE.
using testing::internal::Int32FromGTestEnv;
@@ -1736,7 +1736,7 @@ TEST(Int32FromGTestEnvTest, ReturnsDefaultWhenVariableIsNotSet) {
EXPECT_EQ(10, Int32FromGTestEnv("temp", 10));
}
-# if !defined(GTEST_GET_INT32_FROM_ENV_)
+#if !defined(GTEST_GET_INT32_FROM_ENV_)
// Tests that Int32FromGTestEnv() returns the default value when the
// environment variable overflows as an Int32.
@@ -1762,7 +1762,7 @@ TEST(Int32FromGTestEnvTest, ReturnsDefaultWhenValueIsInvalid) {
EXPECT_EQ(50, Int32FromGTestEnv("temp", 50));
}
-# endif // !defined(GTEST_GET_INT32_FROM_ENV_)
+#endif // !defined(GTEST_GET_INT32_FROM_ENV_)
// Tests that Int32FromGTestEnv() parses and returns the value of the
// environment variable when it represents a valid decimal integer in
@@ -1776,29 +1776,29 @@ TEST(Int32FromGTestEnvTest, ParsesAndReturnsValidValue) {
}
#endif // !GTEST_OS_WINDOWS_MOBILE
-// Tests ParseInt32Flag().
+// Tests ParseFlag().
// Tests that ParseInt32Flag() returns false and doesn't change the
// output value when the flag has wrong format
TEST(ParseInt32FlagTest, ReturnsFalseForInvalidFlag) {
int32_t value = 123;
- EXPECT_FALSE(ParseInt32Flag("--a=100", "b", &value));
+ EXPECT_FALSE(ParseFlag("--a=100", "b", &value));
EXPECT_EQ(123, value);
- EXPECT_FALSE(ParseInt32Flag("a=100", "a", &value));
+ EXPECT_FALSE(ParseFlag("a=100", "a", &value));
EXPECT_EQ(123, value);
}
-// Tests that ParseInt32Flag() returns false and doesn't change the
+// Tests that ParseFlag() returns false and doesn't change the
// output value when the flag overflows as an Int32.
TEST(ParseInt32FlagTest, ReturnsDefaultWhenValueOverflows) {
printf("(expecting 2 warnings)\n");
int32_t value = 123;
- EXPECT_FALSE(ParseInt32Flag("--abc=12345678987654321", "abc", &value));
+ EXPECT_FALSE(ParseFlag("--abc=12345678987654321", "abc", &value));
EXPECT_EQ(123, value);
- EXPECT_FALSE(ParseInt32Flag("--abc=-12345678987654321", "abc", &value));
+ EXPECT_FALSE(ParseFlag("--abc=-12345678987654321", "abc", &value));
EXPECT_EQ(123, value);
}
@@ -1809,10 +1809,10 @@ TEST(ParseInt32FlagTest, ReturnsDefaultWhenValueIsInvalid) {
printf("(expecting 2 warnings)\n");
int32_t value = 123;
- EXPECT_FALSE(ParseInt32Flag("--abc=A1", "abc", &value));
+ EXPECT_FALSE(ParseFlag("--abc=A1", "abc", &value));
EXPECT_EQ(123, value);
- EXPECT_FALSE(ParseInt32Flag("--abc=12X", "abc", &value));
+ EXPECT_FALSE(ParseFlag("--abc=12X", "abc", &value));
EXPECT_EQ(123, value);
}
@@ -1821,18 +1821,17 @@ TEST(ParseInt32FlagTest, ReturnsDefaultWhenValueIsInvalid) {
// the range of an Int32.
TEST(ParseInt32FlagTest, ParsesAndReturnsValidValue) {
int32_t value = 123;
- EXPECT_TRUE(ParseInt32Flag("--" GTEST_FLAG_PREFIX_ "abc=456", "abc", &value));
+ EXPECT_TRUE(ParseFlag("--" GTEST_FLAG_PREFIX_ "abc=456", "abc", &value));
EXPECT_EQ(456, value);
- EXPECT_TRUE(ParseInt32Flag("--" GTEST_FLAG_PREFIX_ "abc=-789",
- "abc", &value));
+ EXPECT_TRUE(ParseFlag("--" GTEST_FLAG_PREFIX_ "abc=-789", "abc", &value));
EXPECT_EQ(-789, value);
}
// Tests that Int32FromEnvOrDie() parses the value of the var or
// returns the correct default.
// Environment variables are not supported on Windows CE.
-#if !GTEST_OS_WINDOWS_MOBILE
+#ifndef GTEST_OS_WINDOWS_MOBILE
TEST(Int32FromEnvOrDieTest, ParsesAndReturnsValidValue) {
EXPECT_EQ(333, Int32FromEnvOrDie(GTEST_FLAG_PREFIX_UPPER_ "UnsetVar", 333));
SetEnv(GTEST_FLAG_PREFIX_UPPER_ "UnsetVar", "123");
@@ -1847,8 +1846,7 @@ TEST(Int32FromEnvOrDieTest, ParsesAndReturnsValidValue) {
TEST(Int32FromEnvOrDieDeathTest, AbortsOnFailure) {
SetEnv(GTEST_FLAG_PREFIX_UPPER_ "VAR", "xxx");
EXPECT_DEATH_IF_SUPPORTED(
- Int32FromEnvOrDie(GTEST_FLAG_PREFIX_UPPER_ "VAR", 123),
- ".*");
+ Int32FromEnvOrDie(GTEST_FLAG_PREFIX_UPPER_ "VAR", 123), ".*");
}
// Tests that Int32FromEnvOrDie() aborts with an error message
@@ -1856,8 +1854,7 @@ TEST(Int32FromEnvOrDieDeathTest, AbortsOnFailure) {
TEST(Int32FromEnvOrDieDeathTest, AbortsOnInt32Overflow) {
SetEnv(GTEST_FLAG_PREFIX_UPPER_ "VAR", "1234567891234567891234");
EXPECT_DEATH_IF_SUPPORTED(
- Int32FromEnvOrDie(GTEST_FLAG_PREFIX_UPPER_ "VAR", 123),
- ".*");
+ Int32FromEnvOrDie(GTEST_FLAG_PREFIX_UPPER_ "VAR", 123), ".*");
}
// Tests that ShouldRunTestOnShard() selects all tests
@@ -1907,7 +1904,7 @@ TEST_F(ShouldShardTest, ReturnsFalseWhenTotalShardIsOne) {
// Tests that sharding is enabled if total_shards > 1 and
// we are not in a death test subprocess.
// Environment variables are not supported on Windows CE.
-#if !GTEST_OS_WINDOWS_MOBILE
+#ifndef GTEST_OS_WINDOWS_MOBILE
TEST_F(ShouldShardTest, WorksWhenShardEnvVarsAreValid) {
SetEnv(index_var_, "4");
SetEnv(total_var_, "22");
@@ -1964,7 +1961,8 @@ TEST(ShouldRunTestOnShardTest, IsPartitionWhenThereAreFiveShards) {
prev_selected_shard_index = shard_index;
} else {
ADD_FAILURE() << "Shard " << prev_selected_shard_index << " and "
- << shard_index << " are both selected to run test " << test_id;
+ << shard_index << " are both selected to run test "
+ << test_id;
}
}
}
@@ -1976,7 +1974,7 @@ TEST(ShouldRunTestOnShardTest, IsPartitionWhenThereAreFiveShards) {
int num_tests_on_shard = 0;
for (int test_id = 0; test_id < num_tests; test_id++) {
num_tests_on_shard +=
- ShouldRunTestOnShard(num_shards, shard_index, test_id);
+ ShouldRunTestOnShard(num_shards, shard_index, test_id);
}
EXPECT_GE(num_tests_on_shard, num_tests / num_shards);
}
@@ -2008,8 +2006,8 @@ TEST(UnitTestTest, ReturnsPlausibleTimestamp) {
void ExpectNonFatalFailureRecordingPropertyWithReservedKey(
const TestResult& test_result, const char* key) {
EXPECT_NONFATAL_FAILURE(Test::RecordProperty(key, "1"), "Reserved key");
- ASSERT_EQ(0, test_result.test_property_count()) << "Property for key '" << key
- << "' recorded unexpectedly.";
+ ASSERT_EQ(0, test_result.test_property_count())
+ << "Property for key '" << key << "' recorded unexpectedly.";
}
void ExpectNonFatalFailureRecordingPropertyWithReservedKeyForCurrentTest(
@@ -2036,10 +2034,10 @@ void ExpectNonFatalFailureRecordingPropertyWithReservedKeyOutsideOfTestSuite(
}
// Tests that property recording functions in UnitTest outside of tests
-// functions correcly. Creating a separate instance of UnitTest ensures it
+// functions correctly. Creating a separate instance of UnitTest ensures it
// is in a state similar to the UnitTest's singleton's between tests.
-class UnitTestRecordPropertyTest :
- public testing::internal::UnitTestRecordPropertyTestHelper {
+class UnitTestRecordPropertyTest
+ : public testing::internal::UnitTestRecordPropertyTestHelper {
public:
static void SetUpTestSuite() {
ExpectNonFatalFailureRecordingPropertyWithReservedKeyForCurrentTestSuite(
@@ -2078,8 +2076,7 @@ TEST_F(UnitTestRecordPropertyTest, OnePropertyFoundWhenAdded) {
EXPECT_STREQ("key_1",
unit_test_.ad_hoc_test_result().GetTestProperty(0).key());
- EXPECT_STREQ("1",
- unit_test_.ad_hoc_test_result().GetTestProperty(0).value());
+ EXPECT_STREQ("1", unit_test_.ad_hoc_test_result().GetTestProperty(0).value());
}
// Tests TestResult has multiple properties when added.
@@ -2120,16 +2117,13 @@ TEST_F(UnitTestRecordPropertyTest, OverridesValuesForDuplicateKeys) {
TEST_F(UnitTestRecordPropertyTest,
AddFailureInsideTestsWhenUsingTestSuiteReservedKeys) {
- ExpectNonFatalFailureRecordingPropertyWithReservedKeyForCurrentTest(
- "name");
+ ExpectNonFatalFailureRecordingPropertyWithReservedKeyForCurrentTest("name");
ExpectNonFatalFailureRecordingPropertyWithReservedKeyForCurrentTest(
"value_param");
ExpectNonFatalFailureRecordingPropertyWithReservedKeyForCurrentTest(
"type_param");
- ExpectNonFatalFailureRecordingPropertyWithReservedKeyForCurrentTest(
- "status");
- ExpectNonFatalFailureRecordingPropertyWithReservedKeyForCurrentTest(
- "time");
+ ExpectNonFatalFailureRecordingPropertyWithReservedKeyForCurrentTest("status");
+ ExpectNonFatalFailureRecordingPropertyWithReservedKeyForCurrentTest("time");
ExpectNonFatalFailureRecordingPropertyWithReservedKeyForCurrentTest(
"classname");
}
@@ -2177,9 +2171,7 @@ static Environment* record_property_env GTEST_ATTRIBUTE_UNUSED_ =
// First, some predicates and predicate-formatters needed by the tests.
// Returns true if and only if the argument is an even number.
-bool IsEven(int n) {
- return (n % 2) == 0;
-}
+bool IsEven(int n) { return (n % 2) == 0; }
// A functor that returns true if and only if the argument is an even number.
struct IsEvenFunctor {
@@ -2226,41 +2218,37 @@ struct AssertIsEvenFunctor {
};
// Returns true if and only if the sum of the arguments is an even number.
-bool SumIsEven2(int n1, int n2) {
- return IsEven(n1 + n2);
-}
+bool SumIsEven2(int n1, int n2) { return IsEven(n1 + n2); }
// A functor that returns true if and only if the sum of the arguments is an
// even number.
struct SumIsEven3Functor {
- bool operator()(int n1, int n2, int n3) {
- return IsEven(n1 + n2 + n3);
- }
+ bool operator()(int n1, int n2, int n3) { return IsEven(n1 + n2 + n3); }
};
// A predicate-formatter function that asserts the sum of the
// arguments is an even number.
-AssertionResult AssertSumIsEven4(
- const char* e1, const char* e2, const char* e3, const char* e4,
- int n1, int n2, int n3, int n4) {
+AssertionResult AssertSumIsEven4(const char* e1, const char* e2, const char* e3,
+ const char* e4, int n1, int n2, int n3,
+ int n4) {
const int sum = n1 + n2 + n3 + n4;
if (IsEven(sum)) {
return AssertionSuccess();
}
Message msg;
- msg << e1 << " + " << e2 << " + " << e3 << " + " << e4
- << " (" << n1 << " + " << n2 << " + " << n3 << " + " << n4
- << ") evaluates to " << sum << ", which is not even.";
+ msg << e1 << " + " << e2 << " + " << e3 << " + " << e4 << " (" << n1 << " + "
+ << n2 << " + " << n3 << " + " << n4 << ") evaluates to " << sum
+ << ", which is not even.";
return AssertionFailure(msg);
}
// A predicate-formatter functor that asserts the sum of the arguments
// is an even number.
struct AssertSumIsEven5Functor {
- AssertionResult operator()(
- const char* e1, const char* e2, const char* e3, const char* e4,
- const char* e5, int n1, int n2, int n3, int n4, int n5) {
+ AssertionResult operator()(const char* e1, const char* e2, const char* e3,
+ const char* e4, const char* e5, int n1, int n2,
+ int n3, int n4, int n5) {
const int sum = n1 + n2 + n3 + n4 + n5;
if (IsEven(sum)) {
return AssertionSuccess();
@@ -2268,14 +2256,12 @@ struct AssertSumIsEven5Functor {
Message msg;
msg << e1 << " + " << e2 << " + " << e3 << " + " << e4 << " + " << e5
- << " ("
- << n1 << " + " << n2 << " + " << n3 << " + " << n4 << " + " << n5
- << ") evaluates to " << sum << ", which is not even.";
+ << " (" << n1 << " + " << n2 << " + " << n3 << " + " << n4 << " + "
+ << n5 << ") evaluates to " << sum << ", which is not even.";
return AssertionFailure(msg);
}
};
-
// Tests unary predicate assertions.
// Tests unary predicate assertions that don't use a custom formatter.
@@ -2285,11 +2271,12 @@ TEST(Pred1Test, WithoutFormat) {
ASSERT_PRED1(IsEven, 4);
// Failure cases.
- EXPECT_NONFATAL_FAILURE({ // NOLINT
- EXPECT_PRED1(IsEven, 5) << "This failure is expected.";
- }, "This failure is expected.");
- EXPECT_FATAL_FAILURE(ASSERT_PRED1(IsEvenFunctor(), 5),
- "evaluates to false");
+ EXPECT_NONFATAL_FAILURE(
+ { // NOLINT
+ EXPECT_PRED1(IsEven, 5) << "This failure is expected.";
+ },
+ "This failure is expected.");
+ EXPECT_FATAL_FAILURE(ASSERT_PRED1(IsEvenFunctor(), 5), "evaluates to false");
}
// Tests unary predicate assertions that use a custom formatter.
@@ -2297,15 +2284,17 @@ TEST(Pred1Test, WithFormat) {
// Success cases.
EXPECT_PRED_FORMAT1(AssertIsEven, 2);
ASSERT_PRED_FORMAT1(AssertIsEvenFunctor(), 4)
- << "This failure is UNEXPECTED!";
+ << "This failure is UNEXPECTED!";
// Failure cases.
const int n = 5;
EXPECT_NONFATAL_FAILURE(EXPECT_PRED_FORMAT1(AssertIsEvenFunctor(), n),
"n evaluates to 5, which is not even.");
- EXPECT_FATAL_FAILURE({ // NOLINT
- ASSERT_PRED_FORMAT1(AssertIsEven, 5) << "This failure is expected.";
- }, "This failure is expected.");
+ EXPECT_FATAL_FAILURE(
+ { // NOLINT
+ ASSERT_PRED_FORMAT1(AssertIsEven, 5) << "This failure is expected.";
+ },
+ "This failure is expected.");
}
// Tests that unary predicate assertions evaluates their arguments
@@ -2317,14 +2306,15 @@ TEST(Pred1Test, SingleEvaluationOnFailure) {
EXPECT_EQ(1, n) << "The argument is not evaluated exactly once.";
// A failure case.
- EXPECT_FATAL_FAILURE({ // NOLINT
- ASSERT_PRED_FORMAT1(AssertIsEvenFunctor(), n++)
- << "This failure is expected.";
- }, "This failure is expected.");
+ EXPECT_FATAL_FAILURE(
+ { // NOLINT
+ ASSERT_PRED_FORMAT1(AssertIsEvenFunctor(), n++)
+ << "This failure is expected.";
+ },
+ "This failure is expected.");
EXPECT_EQ(2, n) << "The argument is not evaluated exactly once.";
}
-
// Tests predicate assertions whose arity is >= 2.
// Tests predicate assertions that don't use a custom formatter.
@@ -2336,19 +2326,23 @@ TEST(PredTest, WithoutFormat) {
// Failure cases.
const int n1 = 1;
const int n2 = 2;
- EXPECT_NONFATAL_FAILURE({ // NOLINT
- EXPECT_PRED2(SumIsEven2, n1, n2) << "This failure is expected.";
- }, "This failure is expected.");
- EXPECT_FATAL_FAILURE({ // NOLINT
- ASSERT_PRED3(SumIsEven3Functor(), 1, 2, 4);
- }, "evaluates to false");
+ EXPECT_NONFATAL_FAILURE(
+ { // NOLINT
+ EXPECT_PRED2(SumIsEven2, n1, n2) << "This failure is expected.";
+ },
+ "This failure is expected.");
+ EXPECT_FATAL_FAILURE(
+ { // NOLINT
+ ASSERT_PRED3(SumIsEven3Functor(), 1, 2, 4);
+ },
+ "evaluates to false");
}
// Tests predicate assertions that use a custom formatter.
TEST(PredTest, WithFormat) {
// Success cases.
- ASSERT_PRED_FORMAT4(AssertSumIsEven4, 4, 6, 8, 10) <<
- "This failure is UNEXPECTED!";
+ ASSERT_PRED_FORMAT4(AssertSumIsEven4, 4, 6, 8, 10)
+ << "This failure is UNEXPECTED!";
EXPECT_PRED_FORMAT5(AssertSumIsEven5Functor(), 2, 4, 6, 8, 10);
// Failure cases.
@@ -2356,13 +2350,17 @@ TEST(PredTest, WithFormat) {
const int n2 = 2;
const int n3 = 4;
const int n4 = 6;
- EXPECT_NONFATAL_FAILURE({ // NOLINT
- EXPECT_PRED_FORMAT4(AssertSumIsEven4, n1, n2, n3, n4);
- }, "evaluates to 13, which is not even.");
- EXPECT_FATAL_FAILURE({ // NOLINT
- ASSERT_PRED_FORMAT5(AssertSumIsEven5Functor(), 1, 2, 4, 6, 8)
- << "This failure is expected.";
- }, "This failure is expected.");
+ EXPECT_NONFATAL_FAILURE(
+ { // NOLINT
+ EXPECT_PRED_FORMAT4(AssertSumIsEven4, n1, n2, n3, n4);
+ },
+ "evaluates to 13, which is not even.");
+ EXPECT_FATAL_FAILURE(
+ { // NOLINT
+ ASSERT_PRED_FORMAT5(AssertSumIsEven5Functor(), 1, 2, 4, 6, 8)
+ << "This failure is expected.";
+ },
+ "This failure is expected.");
}
// Tests that predicate assertions evaluates their arguments
@@ -2380,9 +2378,8 @@ TEST(PredTest, SingleEvaluationOnFailure) {
int n3 = 0;
int n4 = 0;
int n5 = 0;
- ASSERT_PRED_FORMAT5(AssertSumIsEven5Functor(),
- n1++, n2++, n3++, n4++, n5++)
- << "This failure is UNEXPECTED!";
+ ASSERT_PRED_FORMAT5(AssertSumIsEven5Functor(), n1++, n2++, n3++, n4++, n5++)
+ << "This failure is UNEXPECTED!";
EXPECT_EQ(1, n1) << "Argument 1 is not evaluated exactly once.";
EXPECT_EQ(1, n2) << "Argument 2 is not evaluated exactly once.";
EXPECT_EQ(1, n3) << "Argument 3 is not evaluated exactly once.";
@@ -2391,19 +2388,23 @@ TEST(PredTest, SingleEvaluationOnFailure) {
// A failure case.
n1 = n2 = n3 = 0;
- EXPECT_NONFATAL_FAILURE({ // NOLINT
- EXPECT_PRED3(SumIsEven3Functor(), ++n1, n2++, n3++)
- << "This failure is expected.";
- }, "This failure is expected.");
+ EXPECT_NONFATAL_FAILURE(
+ { // NOLINT
+ EXPECT_PRED3(SumIsEven3Functor(), ++n1, n2++, n3++)
+ << "This failure is expected.";
+ },
+ "This failure is expected.");
EXPECT_EQ(1, n1) << "Argument 1 is not evaluated exactly once.";
EXPECT_EQ(1, n2) << "Argument 2 is not evaluated exactly once.";
EXPECT_EQ(1, n3) << "Argument 3 is not evaluated exactly once.";
// Another failure case.
n1 = n2 = n3 = n4 = 0;
- EXPECT_NONFATAL_FAILURE({ // NOLINT
- EXPECT_PRED_FORMAT4(AssertSumIsEven4, ++n1, n2++, n3++, n4++);
- }, "evaluates to 1, which is not even.");
+ EXPECT_NONFATAL_FAILURE(
+ { // NOLINT
+ EXPECT_PRED_FORMAT4(AssertSumIsEven4, ++n1, n2++, n3++, n4++);
+ },
+ "evaluates to 1, which is not even.");
EXPECT_EQ(1, n1) << "Argument 1 is not evaluated exactly once.";
EXPECT_EQ(1, n2) << "Argument 2 is not evaluated exactly once.";
EXPECT_EQ(1, n3) << "Argument 3 is not evaluated exactly once.";
@@ -2414,7 +2415,7 @@ TEST(PredTest, SingleEvaluationOnFailure) {
TEST(PredTest, ExpectPredEvalFailure) {
std::set<int> set_a = {2, 1, 3, 4, 5};
std::set<int> set_b = {0, 4, 8};
- const auto compare_sets = [] (std::set<int>, std::set<int>) { return false; };
+ const auto compare_sets = [](std::set<int>, std::set<int>) { return false; };
EXPECT_NONFATAL_FAILURE(
EXPECT_PRED2(compare_sets, set_a, set_b),
"compare_sets(set_a, set_b) evaluates to false, where\nset_a evaluates "
@@ -2424,9 +2425,7 @@ TEST(PredTest, ExpectPredEvalFailure) {
// Some helper functions for testing using overloaded/template
// functions with ASSERT_PREDn and EXPECT_PREDn.
-bool IsPositive(double x) {
- return x > 0;
-}
+bool IsPositive(double x) { return x > 0; }
template <typename T>
bool IsNegative(T x) {
@@ -2442,7 +2441,7 @@ bool GreaterThan(T1 x1, T2 x2) {
// their types are explicitly specified.
TEST(PredicateAssertionTest, AcceptsOverloadedFunction) {
// C++Builder requires C-style casts rather than static_cast.
- EXPECT_PRED1((bool (*)(int))(IsPositive), 5); // NOLINT
+ EXPECT_PRED1((bool (*)(int))(IsPositive), 5); // NOLINT
ASSERT_PRED1((bool (*)(double))(IsPositive), 6.0); // NOLINT
}
@@ -2455,31 +2454,27 @@ TEST(PredicateAssertionTest, AcceptsTemplateFunction) {
ASSERT_PRED2((GreaterThan<int, int>), 5, 0);
}
-
// Some helper functions for testing using overloaded/template
// functions with ASSERT_PRED_FORMATn and EXPECT_PRED_FORMATn.
AssertionResult IsPositiveFormat(const char* /* expr */, int n) {
- return n > 0 ? AssertionSuccess() :
- AssertionFailure(Message() << "Failure");
+ return n > 0 ? AssertionSuccess() : AssertionFailure(Message() << "Failure");
}
AssertionResult IsPositiveFormat(const char* /* expr */, double x) {
- return x > 0 ? AssertionSuccess() :
- AssertionFailure(Message() << "Failure");
+ return x > 0 ? AssertionSuccess() : AssertionFailure(Message() << "Failure");
}
template <typename T>
AssertionResult IsNegativeFormat(const char* /* expr */, T x) {
- return x < 0 ? AssertionSuccess() :
- AssertionFailure(Message() << "Failure");
+ return x < 0 ? AssertionSuccess() : AssertionFailure(Message() << "Failure");
}
template <typename T1, typename T2>
AssertionResult EqualsFormat(const char* /* expr1 */, const char* /* expr2 */,
const T1& x1, const T2& x2) {
- return x1 == x2 ? AssertionSuccess() :
- AssertionFailure(Message() << "Failure");
+ return x1 == x2 ? AssertionSuccess()
+ : AssertionFailure(Message() << "Failure");
}
// Tests that overloaded functions can be used in *_PRED_FORMAT*
@@ -2496,20 +2491,18 @@ TEST(PredicateFormatAssertionTest, AcceptsTemplateFunction) {
ASSERT_PRED_FORMAT2(EqualsFormat, 3, 3);
}
-
// Tests string assertions.
// Tests ASSERT_STREQ with non-NULL arguments.
TEST(StringAssertionTest, ASSERT_STREQ) {
- const char * const p1 = "good";
+ const char* const p1 = "good";
ASSERT_STREQ(p1, p1);
// Let p2 have the same content as p1, but be at a different address.
const char p2[] = "good";
ASSERT_STREQ(p1, p2);
- EXPECT_FATAL_FAILURE(ASSERT_STREQ("bad", "good"),
- " \"bad\"\n \"good\"");
+ EXPECT_FATAL_FAILURE(ASSERT_STREQ("bad", "good"), " \"bad\"\n \"good\"");
}
// Tests ASSERT_STREQ with NULL arguments.
@@ -2532,8 +2525,7 @@ TEST(StringAssertionTest, ASSERT_STRNE) {
ASSERT_STRNE(nullptr, "");
ASSERT_STRNE("", "Hi");
ASSERT_STRNE("Hi", "");
- EXPECT_FATAL_FAILURE(ASSERT_STRNE("Hi", "Hi"),
- "\"Hi\" vs \"Hi\"");
+ EXPECT_FATAL_FAILURE(ASSERT_STRNE("Hi", "Hi"), "\"Hi\" vs \"Hi\"");
}
// Tests ASSERT_STRCASEEQ.
@@ -2542,8 +2534,7 @@ TEST(StringAssertionTest, ASSERT_STRCASEEQ) {
ASSERT_STRCASEEQ(static_cast<const char*>(nullptr), nullptr);
ASSERT_STRCASEEQ("", "");
- EXPECT_FATAL_FAILURE(ASSERT_STRCASEEQ("Hi", "hi2"),
- "Ignoring case");
+ EXPECT_FATAL_FAILURE(ASSERT_STRCASEEQ("Hi", "hi2"), "Ignoring case");
}
// Tests ASSERT_STRCASENE.
@@ -2555,8 +2546,7 @@ TEST(StringAssertionTest, ASSERT_STRCASENE) {
ASSERT_STRCASENE(nullptr, "");
ASSERT_STRCASENE("", "Hi");
ASSERT_STRCASENE("Hi", "");
- EXPECT_FATAL_FAILURE(ASSERT_STRCASENE("Hi", "hi"),
- "(ignoring case)");
+ EXPECT_FATAL_FAILURE(ASSERT_STRCASENE("Hi", "hi"), "(ignoring case)");
}
// Tests *_STREQ on wide strings.
@@ -2574,17 +2564,17 @@ TEST(StringAssertionTest, STREQ_Wide) {
EXPECT_STREQ(L"Hi", L"Hi");
// Unequal strings.
- EXPECT_NONFATAL_FAILURE(EXPECT_STREQ(L"abc", L"Abc"),
- "Abc");
+ EXPECT_NONFATAL_FAILURE(EXPECT_STREQ(L"abc", L"Abc"), "Abc");
// Strings containing wide characters.
- EXPECT_NONFATAL_FAILURE(EXPECT_STREQ(L"abc\x8119", L"abc\x8120"),
- "abc");
+ EXPECT_NONFATAL_FAILURE(EXPECT_STREQ(L"abc\x8119", L"abc\x8120"), "abc");
// The streaming variation.
- EXPECT_NONFATAL_FAILURE({ // NOLINT
- EXPECT_STREQ(L"abc\x8119", L"abc\x8121") << "Expected failure";
- }, "Expected failure");
+ EXPECT_NONFATAL_FAILURE(
+ { // NOLINT
+ EXPECT_STREQ(L"abc\x8119", L"abc\x8121") << "Expected failure";
+ },
+ "Expected failure");
}
// Tests *_STRNE on wide strings.
@@ -2597,22 +2587,19 @@ TEST(StringAssertionTest, STRNE_Wide) {
"");
// Empty strings.
- EXPECT_NONFATAL_FAILURE(EXPECT_STRNE(L"", L""),
- "L\"\"");
+ EXPECT_NONFATAL_FAILURE(EXPECT_STRNE(L"", L""), "L\"\"");
// Non-null vs NULL.
ASSERT_STRNE(L"non-null", nullptr);
// Equal strings.
- EXPECT_NONFATAL_FAILURE(EXPECT_STRNE(L"Hi", L"Hi"),
- "L\"Hi\"");
+ EXPECT_NONFATAL_FAILURE(EXPECT_STRNE(L"Hi", L"Hi"), "L\"Hi\"");
// Unequal strings.
EXPECT_STRNE(L"abc", L"Abc");
// Strings containing wide characters.
- EXPECT_NONFATAL_FAILURE(EXPECT_STRNE(L"abc\x8119", L"abc\x8119"),
- "abc");
+ EXPECT_NONFATAL_FAILURE(EXPECT_STRNE(L"abc\x8119", L"abc\x8119"), "abc");
// The streaming variation.
ASSERT_STRNE(L"abc\x8119", L"abc\x8120") << "This shouldn't happen";
@@ -2646,12 +2633,13 @@ TEST(IsSubstringTest, ReturnsCorrectResultForWideCString) {
// Tests that IsSubstring() generates the correct message when the input
// argument type is const char*.
TEST(IsSubstringTest, GeneratesCorrectMessageForCString) {
- EXPECT_STREQ("Value of: needle_expr\n"
- " Actual: \"needle\"\n"
- "Expected: a substring of haystack_expr\n"
- "Which is: \"haystack\"",
- IsSubstring("needle_expr", "haystack_expr",
- "needle", "haystack").failure_message());
+ EXPECT_STREQ(
+ "Value of: needle_expr\n"
+ " Actual: \"needle\"\n"
+ "Expected: a substring of haystack_expr\n"
+ "Which is: \"haystack\"",
+ IsSubstring("needle_expr", "haystack_expr", "needle", "haystack")
+ .failure_message());
}
// Tests that IsSubstring returns the correct result when the input
@@ -2672,13 +2660,14 @@ TEST(IsSubstringTest, ReturnsCorrectResultForStdWstring) {
// Tests that IsSubstring() generates the correct message when the input
// argument type is ::std::wstring.
TEST(IsSubstringTest, GeneratesCorrectMessageForWstring) {
- EXPECT_STREQ("Value of: needle_expr\n"
- " Actual: L\"needle\"\n"
- "Expected: a substring of haystack_expr\n"
- "Which is: L\"haystack\"",
- IsSubstring(
- "needle_expr", "haystack_expr",
- ::std::wstring(L"needle"), L"haystack").failure_message());
+ EXPECT_STREQ(
+ "Value of: needle_expr\n"
+ " Actual: L\"needle\"\n"
+ "Expected: a substring of haystack_expr\n"
+ "Which is: L\"haystack\"",
+ IsSubstring("needle_expr", "haystack_expr", ::std::wstring(L"needle"),
+ L"haystack")
+ .failure_message());
}
#endif // GTEST_HAS_STD_WSTRING
@@ -2702,13 +2691,13 @@ TEST(IsNotSubstringTest, ReturnsCorrectResultForWideCString) {
// Tests that IsNotSubstring() generates the correct message when the input
// argument type is const wchar_t*.
TEST(IsNotSubstringTest, GeneratesCorrectMessageForWideCString) {
- EXPECT_STREQ("Value of: needle_expr\n"
- " Actual: L\"needle\"\n"
- "Expected: not a substring of haystack_expr\n"
- "Which is: L\"two needles\"",
- IsNotSubstring(
- "needle_expr", "haystack_expr",
- L"needle", L"two needles").failure_message());
+ EXPECT_STREQ(
+ "Value of: needle_expr\n"
+ " Actual: L\"needle\"\n"
+ "Expected: not a substring of haystack_expr\n"
+ "Which is: L\"two needles\"",
+ IsNotSubstring("needle_expr", "haystack_expr", L"needle", L"two needles")
+ .failure_message());
}
// Tests that IsNotSubstring returns the correct result when the input
@@ -2721,13 +2710,14 @@ TEST(IsNotSubstringTest, ReturnsCorrectResultsForStdString) {
// Tests that IsNotSubstring() generates the correct message when the input
// argument type is ::std::string.
TEST(IsNotSubstringTest, GeneratesCorrectMessageForStdString) {
- EXPECT_STREQ("Value of: needle_expr\n"
- " Actual: \"needle\"\n"
- "Expected: not a substring of haystack_expr\n"
- "Which is: \"two needles\"",
- IsNotSubstring(
- "needle_expr", "haystack_expr",
- ::std::string("needle"), "two needles").failure_message());
+ EXPECT_STREQ(
+ "Value of: needle_expr\n"
+ " Actual: \"needle\"\n"
+ "Expected: not a substring of haystack_expr\n"
+ "Which is: \"two needles\"",
+ IsNotSubstring("needle_expr", "haystack_expr", ::std::string("needle"),
+ "two needles")
+ .failure_message());
}
#if GTEST_HAS_STD_WSTRING
@@ -2774,20 +2764,20 @@ class FloatingPointTest : public Test {
const Bits zero_bits = Floating(0).bits();
// Makes some numbers close to 0.0.
- values_.close_to_positive_zero = Floating::ReinterpretBits(
- zero_bits + max_ulps/2);
- values_.close_to_negative_zero = -Floating::ReinterpretBits(
- zero_bits + max_ulps - max_ulps/2);
- values_.further_from_negative_zero = -Floating::ReinterpretBits(
- zero_bits + max_ulps + 1 - max_ulps/2);
+ values_.close_to_positive_zero =
+ Floating::ReinterpretBits(zero_bits + max_ulps / 2);
+ values_.close_to_negative_zero =
+ -Floating::ReinterpretBits(zero_bits + max_ulps - max_ulps / 2);
+ values_.further_from_negative_zero =
+ -Floating::ReinterpretBits(zero_bits + max_ulps + 1 - max_ulps / 2);
// The bits that represent 1.0.
const Bits one_bits = Floating(1).bits();
// Makes some numbers close to 1.0.
values_.close_to_one = Floating::ReinterpretBits(one_bits + max_ulps);
- values_.further_from_one = Floating::ReinterpretBits(
- one_bits + max_ulps + 1);
+ values_.further_from_one =
+ Floating::ReinterpretBits(one_bits + max_ulps + 1);
// +infinity.
values_.infinity = Floating::Infinity();
@@ -2796,23 +2786,23 @@ class FloatingPointTest : public Test {
const Bits infinity_bits = Floating(values_.infinity).bits();
// Makes some numbers close to infinity.
- values_.close_to_infinity = Floating::ReinterpretBits(
- infinity_bits - max_ulps);
- values_.further_from_infinity = Floating::ReinterpretBits(
- infinity_bits - max_ulps - 1);
+ values_.close_to_infinity =
+ Floating::ReinterpretBits(infinity_bits - max_ulps);
+ values_.further_from_infinity =
+ Floating::ReinterpretBits(infinity_bits - max_ulps - 1);
// Makes some NAN's. Sets the most significant bit of the fraction so that
// our NaN's are quiet; trying to process a signaling NaN would raise an
// exception if our environment enables floating point exceptions.
- values_.nan1 = Floating::ReinterpretBits(Floating::kExponentBitMask
- | (static_cast<Bits>(1) << (Floating::kFractionBitCount - 1)) | 1);
- values_.nan2 = Floating::ReinterpretBits(Floating::kExponentBitMask
- | (static_cast<Bits>(1) << (Floating::kFractionBitCount - 1)) | 200);
+ values_.nan1 = Floating::ReinterpretBits(
+ Floating::kExponentBitMask |
+ (static_cast<Bits>(1) << (Floating::kFractionBitCount - 1)) | 1);
+ values_.nan2 = Floating::ReinterpretBits(
+ Floating::kExponentBitMask |
+ (static_cast<Bits>(1) << (Floating::kFractionBitCount - 1)) | 200);
}
- void TestSize() {
- EXPECT_EQ(sizeof(RawType), sizeof(Bits));
- }
+ void TestSize() { EXPECT_EQ(sizeof(RawType), sizeof(Bits)); }
static TestValues values_;
};
@@ -2825,17 +2815,13 @@ typename FloatingPointTest<RawType>::TestValues
typedef FloatingPointTest<float> FloatTest;
// Tests that the size of Float::Bits matches the size of float.
-TEST_F(FloatTest, Size) {
- TestSize();
-}
+TEST_F(FloatTest, Size) { TestSize(); }
// Tests comparing with +0 and -0.
TEST_F(FloatTest, Zeros) {
EXPECT_FLOAT_EQ(0.0, -0.0);
- EXPECT_NONFATAL_FAILURE(EXPECT_FLOAT_EQ(-0.0, 1.0),
- "1.0");
- EXPECT_FATAL_FAILURE(ASSERT_FLOAT_EQ(0.0, 1.5),
- "1.5");
+ EXPECT_NONFATAL_FAILURE(EXPECT_FLOAT_EQ(-0.0, 1.0), "1.0");
+ EXPECT_FATAL_FAILURE(ASSERT_FLOAT_EQ(0.0, 1.5), "1.5");
}
// Tests comparing numbers close to 0.
@@ -2856,10 +2842,11 @@ TEST_F(FloatTest, AlmostZeros) {
EXPECT_FLOAT_EQ(-0.0, v.close_to_negative_zero);
EXPECT_FLOAT_EQ(v.close_to_positive_zero, v.close_to_negative_zero);
- EXPECT_FATAL_FAILURE({ // NOLINT
- ASSERT_FLOAT_EQ(v.close_to_positive_zero,
- v.further_from_negative_zero);
- }, "v.further_from_negative_zero");
+ EXPECT_FATAL_FAILURE(
+ { // NOLINT
+ ASSERT_FLOAT_EQ(v.close_to_positive_zero, v.further_from_negative_zero);
+ },
+ "v.further_from_negative_zero");
}
// Tests comparing numbers close to each other.
@@ -2871,8 +2858,7 @@ TEST_F(FloatTest, SmallDiff) {
// Tests comparing numbers far apart.
TEST_F(FloatTest, LargeDiff) {
- EXPECT_NONFATAL_FAILURE(EXPECT_FLOAT_EQ(2.5, 3.0),
- "3.0");
+ EXPECT_NONFATAL_FAILURE(EXPECT_FLOAT_EQ(2.5, 3.0), "3.0");
}
// Tests comparing with infinity.
@@ -2901,15 +2887,11 @@ TEST_F(FloatTest, NaN) {
// (parentheses).
static const FloatTest::TestValues& v = this->values_;
- EXPECT_NONFATAL_FAILURE(EXPECT_FLOAT_EQ(v.nan1, v.nan1),
- "v.nan1");
- EXPECT_NONFATAL_FAILURE(EXPECT_FLOAT_EQ(v.nan1, v.nan2),
- "v.nan2");
- EXPECT_NONFATAL_FAILURE(EXPECT_FLOAT_EQ(1.0, v.nan1),
- "v.nan1");
+ EXPECT_NONFATAL_FAILURE(EXPECT_FLOAT_EQ(v.nan1, v.nan1), "v.nan1");
+ EXPECT_NONFATAL_FAILURE(EXPECT_FLOAT_EQ(v.nan1, v.nan2), "v.nan2");
+ EXPECT_NONFATAL_FAILURE(EXPECT_FLOAT_EQ(1.0, v.nan1), "v.nan1");
- EXPECT_FATAL_FAILURE(ASSERT_FLOAT_EQ(v.nan1, v.infinity),
- "v.infinity");
+ EXPECT_FATAL_FAILURE(ASSERT_FLOAT_EQ(v.nan1, v.infinity), "v.infinity");
}
// Tests that *_FLOAT_EQ are reflexive.
@@ -2963,36 +2945,40 @@ TEST_F(FloatTest, FloatLEFails) {
"(2.0f) <= (1.0f)");
// or by a small yet non-negligible margin,
- EXPECT_NONFATAL_FAILURE({ // NOLINT
- EXPECT_PRED_FORMAT2(FloatLE, values_.further_from_one, 1.0f);
- }, "(values_.further_from_one) <= (1.0f)");
+ EXPECT_NONFATAL_FAILURE(
+ { // NOLINT
+ EXPECT_PRED_FORMAT2(FloatLE, values_.further_from_one, 1.0f);
+ },
+ "(values_.further_from_one) <= (1.0f)");
- EXPECT_NONFATAL_FAILURE({ // NOLINT
- EXPECT_PRED_FORMAT2(FloatLE, values_.nan1, values_.infinity);
- }, "(values_.nan1) <= (values_.infinity)");
- EXPECT_NONFATAL_FAILURE({ // NOLINT
- EXPECT_PRED_FORMAT2(FloatLE, -values_.infinity, values_.nan1);
- }, "(-values_.infinity) <= (values_.nan1)");
- EXPECT_FATAL_FAILURE({ // NOLINT
- ASSERT_PRED_FORMAT2(FloatLE, values_.nan1, values_.nan1);
- }, "(values_.nan1) <= (values_.nan1)");
+ EXPECT_NONFATAL_FAILURE(
+ { // NOLINT
+ EXPECT_PRED_FORMAT2(FloatLE, values_.nan1, values_.infinity);
+ },
+ "(values_.nan1) <= (values_.infinity)");
+ EXPECT_NONFATAL_FAILURE(
+ { // NOLINT
+ EXPECT_PRED_FORMAT2(FloatLE, -values_.infinity, values_.nan1);
+ },
+ "(-values_.infinity) <= (values_.nan1)");
+ EXPECT_FATAL_FAILURE(
+ { // NOLINT
+ ASSERT_PRED_FORMAT2(FloatLE, values_.nan1, values_.nan1);
+ },
+ "(values_.nan1) <= (values_.nan1)");
}
// Instantiates FloatingPointTest for testing *_DOUBLE_EQ.
typedef FloatingPointTest<double> DoubleTest;
// Tests that the size of Double::Bits matches the size of double.
-TEST_F(DoubleTest, Size) {
- TestSize();
-}
+TEST_F(DoubleTest, Size) { TestSize(); }
// Tests comparing with +0 and -0.
TEST_F(DoubleTest, Zeros) {
EXPECT_DOUBLE_EQ(0.0, -0.0);
- EXPECT_NONFATAL_FAILURE(EXPECT_DOUBLE_EQ(-0.0, 1.0),
- "1.0");
- EXPECT_FATAL_FAILURE(ASSERT_DOUBLE_EQ(0.0, 1.0),
- "1.0");
+ EXPECT_NONFATAL_FAILURE(EXPECT_DOUBLE_EQ(-0.0, 1.0), "1.0");
+ EXPECT_FATAL_FAILURE(ASSERT_DOUBLE_EQ(0.0, 1.0), "1.0");
}
// Tests comparing numbers close to 0.
@@ -3013,10 +2999,12 @@ TEST_F(DoubleTest, AlmostZeros) {
EXPECT_DOUBLE_EQ(-0.0, v.close_to_negative_zero);
EXPECT_DOUBLE_EQ(v.close_to_positive_zero, v.close_to_negative_zero);
- EXPECT_FATAL_FAILURE({ // NOLINT
- ASSERT_DOUBLE_EQ(v.close_to_positive_zero,
- v.further_from_negative_zero);
- }, "v.further_from_negative_zero");
+ EXPECT_FATAL_FAILURE(
+ { // NOLINT
+ ASSERT_DOUBLE_EQ(v.close_to_positive_zero,
+ v.further_from_negative_zero);
+ },
+ "v.further_from_negative_zero");
}
// Tests comparing numbers close to each other.
@@ -3028,8 +3016,7 @@ TEST_F(DoubleTest, SmallDiff) {
// Tests comparing numbers far apart.
TEST_F(DoubleTest, LargeDiff) {
- EXPECT_NONFATAL_FAILURE(EXPECT_DOUBLE_EQ(2.0, 3.0),
- "3.0");
+ EXPECT_NONFATAL_FAILURE(EXPECT_DOUBLE_EQ(2.0, 3.0), "3.0");
}
// Tests comparing with infinity.
@@ -3053,12 +3040,10 @@ TEST_F(DoubleTest, NaN) {
static const DoubleTest::TestValues& v = this->values_;
// Nokia's STLport crashes if we try to output infinity or NaN.
- EXPECT_NONFATAL_FAILURE(EXPECT_DOUBLE_EQ(v.nan1, v.nan1),
- "v.nan1");
+ EXPECT_NONFATAL_FAILURE(EXPECT_DOUBLE_EQ(v.nan1, v.nan1), "v.nan1");
EXPECT_NONFATAL_FAILURE(EXPECT_DOUBLE_EQ(v.nan1, v.nan2), "v.nan2");
EXPECT_NONFATAL_FAILURE(EXPECT_DOUBLE_EQ(1.0, v.nan1), "v.nan1");
- EXPECT_FATAL_FAILURE(ASSERT_DOUBLE_EQ(v.nan1, v.infinity),
- "v.infinity");
+ EXPECT_FATAL_FAILURE(ASSERT_DOUBLE_EQ(v.nan1, v.infinity), "v.infinity");
}
// Tests that *_DOUBLE_EQ are reflexive.
@@ -3119,22 +3104,29 @@ TEST_F(DoubleTest, DoubleLEFails) {
"(2.0) <= (1.0)");
// or by a small yet non-negligible margin,
- EXPECT_NONFATAL_FAILURE({ // NOLINT
- EXPECT_PRED_FORMAT2(DoubleLE, values_.further_from_one, 1.0);
- }, "(values_.further_from_one) <= (1.0)");
+ EXPECT_NONFATAL_FAILURE(
+ { // NOLINT
+ EXPECT_PRED_FORMAT2(DoubleLE, values_.further_from_one, 1.0);
+ },
+ "(values_.further_from_one) <= (1.0)");
- EXPECT_NONFATAL_FAILURE({ // NOLINT
- EXPECT_PRED_FORMAT2(DoubleLE, values_.nan1, values_.infinity);
- }, "(values_.nan1) <= (values_.infinity)");
- EXPECT_NONFATAL_FAILURE({ // NOLINT
- EXPECT_PRED_FORMAT2(DoubleLE, -values_.infinity, values_.nan1);
- }, " (-values_.infinity) <= (values_.nan1)");
- EXPECT_FATAL_FAILURE({ // NOLINT
- ASSERT_PRED_FORMAT2(DoubleLE, values_.nan1, values_.nan1);
- }, "(values_.nan1) <= (values_.nan1)");
+ EXPECT_NONFATAL_FAILURE(
+ { // NOLINT
+ EXPECT_PRED_FORMAT2(DoubleLE, values_.nan1, values_.infinity);
+ },
+ "(values_.nan1) <= (values_.infinity)");
+ EXPECT_NONFATAL_FAILURE(
+ { // NOLINT
+ EXPECT_PRED_FORMAT2(DoubleLE, -values_.infinity, values_.nan1);
+ },
+ " (-values_.infinity) <= (values_.nan1)");
+ EXPECT_FATAL_FAILURE(
+ { // NOLINT
+ ASSERT_PRED_FORMAT2(DoubleLE, values_.nan1, values_.nan1);
+ },
+ "(values_.nan1) <= (values_.nan1)");
}
-
// Verifies that a test or test case whose name starts with DISABLED_ is
// not run.
@@ -3146,9 +3138,7 @@ TEST(DisabledTest, DISABLED_TestShouldNotRun) {
// A test whose name does not start with DISABLED_.
// Should run.
-TEST(DisabledTest, NotDISABLED_TestShouldRun) {
- EXPECT_EQ(1, 1);
-}
+TEST(DisabledTest, NotDISABLED_TestShouldRun) { EXPECT_EQ(1, 1); }
// A test case whose name starts with DISABLED_.
// Should not run.
@@ -3188,8 +3178,7 @@ TEST_F(DisabledTestsTest, DISABLED_TestShouldNotRun_2) {
// Tests that disabled typed tests aren't run.
template <typename T>
-class TypedTest : public Test {
-};
+class TypedTest : public Test {};
typedef testing::Types<int, double> NumericTypes;
TYPED_TEST_SUITE(TypedTest, NumericTypes);
@@ -3199,8 +3188,7 @@ TYPED_TEST(TypedTest, DISABLED_ShouldNotRun) {
}
template <typename T>
-class DISABLED_TypedTest : public Test {
-};
+class DISABLED_TypedTest : public Test {};
TYPED_TEST_SUITE(DISABLED_TypedTest, NumericTypes);
@@ -3211,8 +3199,7 @@ TYPED_TEST(DISABLED_TypedTest, ShouldNotRun) {
// Tests that disabled type-parameterized tests aren't run.
template <typename T>
-class TypedTestP : public Test {
-};
+class TypedTestP : public Test {};
TYPED_TEST_SUITE_P(TypedTestP);
@@ -3226,8 +3213,7 @@ REGISTER_TYPED_TEST_SUITE_P(TypedTestP, DISABLED_ShouldNotRun);
INSTANTIATE_TYPED_TEST_SUITE_P(My, TypedTestP, NumericTypes);
template <typename T>
-class DISABLED_TypedTestP : public Test {
-};
+class DISABLED_TypedTestP : public Test {};
TYPED_TEST_SUITE_P(DISABLED_TypedTestP);
@@ -3247,15 +3233,11 @@ class SingleEvaluationTest : public Test {
// This helper function is needed by the FailedASSERT_STREQ test
// below. It's public to work around C++Builder's bug with scoping local
// classes.
- static void CompareAndIncrementCharPtrs() {
- ASSERT_STREQ(p1_++, p2_++);
- }
+ static void CompareAndIncrementCharPtrs() { ASSERT_STREQ(p1_++, p2_++); }
// This helper function is needed by the FailedASSERT_NE test below. It's
// public to work around C++Builder's bug with scoping local classes.
- static void CompareAndIncrementInts() {
- ASSERT_NE(a_++, b_++);
- }
+ static void CompareAndIncrementInts() { ASSERT_NE(a_++, b_++); }
protected:
SingleEvaluationTest() {
@@ -3298,8 +3280,7 @@ TEST_F(SingleEvaluationTest, ASSERT_STR) {
EXPECT_EQ(s2_ + 1, p2_);
// failed EXPECT_STRCASEEQ
- EXPECT_NONFATAL_FAILURE(EXPECT_STRCASEEQ(p1_++, p2_++),
- "Ignoring case");
+ EXPECT_NONFATAL_FAILURE(EXPECT_STRCASEEQ(p1_++, p2_++), "Ignoring case");
EXPECT_EQ(s1_ + 2, p1_);
EXPECT_EQ(s2_ + 2, p2_);
}
@@ -3347,11 +3328,7 @@ TEST_F(SingleEvaluationTest, OtherCases) {
#if GTEST_HAS_RTTI
-#ifdef _MSC_VER
-#define ERROR_DESC "class std::runtime_error"
-#else
#define ERROR_DESC "std::runtime_error"
-#endif
#else // GTEST_HAS_RTTI
@@ -3359,34 +3336,39 @@ TEST_F(SingleEvaluationTest, OtherCases) {
#endif // GTEST_HAS_RTTI
-void ThrowAnInteger() {
- throw 1;
-}
-void ThrowRuntimeError(const char* what) {
- throw std::runtime_error(what);
-}
+void ThrowAnInteger() { throw 1; }
+void ThrowRuntimeError(const char* what) { throw std::runtime_error(what); }
// Tests that assertion arguments are evaluated exactly once.
TEST_F(SingleEvaluationTest, ExceptionTests) {
// successful EXPECT_THROW
- EXPECT_THROW({ // NOLINT
- a_++;
- ThrowAnInteger();
- }, int);
+ EXPECT_THROW(
+ { // NOLINT
+ a_++;
+ ThrowAnInteger();
+ },
+ int);
EXPECT_EQ(1, a_);
// failed EXPECT_THROW, throws different
- EXPECT_NONFATAL_FAILURE(EXPECT_THROW({ // NOLINT
- a_++;
- ThrowAnInteger();
- }, bool), "throws a different type");
+ EXPECT_NONFATAL_FAILURE(EXPECT_THROW(
+ { // NOLINT
+ a_++;
+ ThrowAnInteger();
+ },
+ bool),
+ "throws a different type");
EXPECT_EQ(2, a_);
// failed EXPECT_THROW, throws runtime error
- EXPECT_NONFATAL_FAILURE(EXPECT_THROW({ // NOLINT
- a_++;
- ThrowRuntimeError("A description");
- }, bool), "throws " ERROR_DESC " with description \"A description\"");
+ EXPECT_NONFATAL_FAILURE(EXPECT_THROW(
+ { // NOLINT
+ a_++;
+ ThrowRuntimeError("A description");
+ },
+ bool),
+ "throws " ERROR_DESC
+ " with description \"A description\"");
EXPECT_EQ(3, a_);
// failed EXPECT_THROW, throws nothing
@@ -3399,9 +3381,10 @@ TEST_F(SingleEvaluationTest, ExceptionTests) {
// failed EXPECT_NO_THROW
EXPECT_NONFATAL_FAILURE(EXPECT_NO_THROW({ // NOLINT
- a_++;
- ThrowAnInteger();
- }), "it throws");
+ a_++;
+ ThrowAnInteger();
+ }),
+ "it throws");
EXPECT_EQ(6, a_);
// successful EXPECT_ANY_THROW
@@ -3422,12 +3405,8 @@ TEST_F(SingleEvaluationTest, ExceptionTests) {
class NoFatalFailureTest : public Test {
protected:
void Succeeds() {}
- void FailsNonFatal() {
- ADD_FAILURE() << "some non-fatal failure";
- }
- void Fails() {
- FAIL() << "some fatal failure";
- }
+ void FailsNonFatal() { ADD_FAILURE() << "some non-fatal failure"; }
+ void Fails() { FAIL() << "some fatal failure"; }
void DoAssertNoFatalFailureOnFails() {
ASSERT_NO_FATAL_FAILURE(Fails());
@@ -3446,12 +3425,10 @@ TEST_F(NoFatalFailureTest, NoFailure) {
}
TEST_F(NoFatalFailureTest, NonFatalIsNoFailure) {
- EXPECT_NONFATAL_FAILURE(
- EXPECT_NO_FATAL_FAILURE(FailsNonFatal()),
- "some non-fatal failure");
- EXPECT_NONFATAL_FAILURE(
- ASSERT_NO_FATAL_FAILURE(FailsNonFatal()),
- "some non-fatal failure");
+ EXPECT_NONFATAL_FAILURE(EXPECT_NO_FATAL_FAILURE(FailsNonFatal()),
+ "some non-fatal failure");
+ EXPECT_NONFATAL_FAILURE(ASSERT_NO_FATAL_FAILURE(FailsNonFatal()),
+ "some non-fatal failure");
}
TEST_F(NoFatalFailureTest, AssertNoFatalFailureOnFatalFailure) {
@@ -3496,10 +3473,10 @@ TEST_F(NoFatalFailureTest, MessageIsStreamable) {
TestPartResultArray gtest_failures;
{
ScopedFakeTestPartResultReporter gtest_reporter(&gtest_failures);
- EXPECT_NO_FATAL_FAILURE(FAIL() << "foo") << "my message";
+ EXPECT_NO_FATAL_FAILURE([] { FAIL() << "foo"; }()) << "my message";
}
ASSERT_EQ(2, gtest_failures.size());
- EXPECT_EQ(TestPartResult::kNonFatalFailure,
+ EXPECT_EQ(TestPartResult::kFatalFailure,
gtest_failures.GetTestPartResult(0).type());
EXPECT_EQ(TestPartResult::kNonFatalFailure,
gtest_failures.GetTestPartResult(1).type());
@@ -3574,8 +3551,9 @@ TEST(EditDistance, TestSuites) {
EditsToString(CalculateOptimalEdits(CharsToIndices(c->left),
CharsToIndices(c->right))))
<< "Left <" << c->left << "> Right <" << c->right << "> Edits <"
- << EditsToString(CalculateOptimalEdits(
- CharsToIndices(c->left), CharsToIndices(c->right))) << ">";
+ << EditsToString(CalculateOptimalEdits(CharsToIndices(c->left),
+ CharsToIndices(c->right)))
+ << ">";
EXPECT_TRUE(c->expected_diff == CreateUnifiedDiff(CharsToLines(c->left),
CharsToLines(c->right)))
<< "Left <" << c->left << "> Right <" << c->right << "> Diff <"
@@ -3588,8 +3566,7 @@ TEST(EditDistance, TestSuites) {
TEST(AssertionTest, EqFailure) {
const std::string foo_val("5"), bar_val("6");
const std::string msg1(
- EqFailure("foo", "bar", foo_val, bar_val, false)
- .failure_message());
+ EqFailure("foo", "bar", foo_val, bar_val, false).failure_message());
EXPECT_STREQ(
"Expected equality of these values:\n"
" foo\n"
@@ -3599,8 +3576,7 @@ TEST(AssertionTest, EqFailure) {
msg1.c_str());
const std::string msg2(
- EqFailure("foo", "6", foo_val, bar_val, false)
- .failure_message());
+ EqFailure("foo", "6", foo_val, bar_val, false).failure_message());
EXPECT_STREQ(
"Expected equality of these values:\n"
" foo\n"
@@ -3609,8 +3585,7 @@ TEST(AssertionTest, EqFailure) {
msg2.c_str());
const std::string msg3(
- EqFailure("5", "bar", foo_val, bar_val, false)
- .failure_message());
+ EqFailure("5", "bar", foo_val, bar_val, false).failure_message());
EXPECT_STREQ(
"Expected equality of these values:\n"
" 5\n"
@@ -3627,9 +3602,8 @@ TEST(AssertionTest, EqFailure) {
msg4.c_str());
const std::string msg5(
- EqFailure("foo", "bar",
- std::string("\"x\""), std::string("\"y\""),
- true).failure_message());
+ EqFailure("foo", "bar", std::string("\"x\""), std::string("\"y\""), true)
+ .failure_message());
EXPECT_STREQ(
"Expected equality of these values:\n"
" foo\n"
@@ -3664,24 +3638,21 @@ TEST(AssertionTest, AppendUserMessage) {
const std::string foo("foo");
Message msg;
- EXPECT_STREQ("foo",
- AppendUserMessage(foo, msg).c_str());
+ EXPECT_STREQ("foo", AppendUserMessage(foo, msg).c_str());
msg << "bar";
- EXPECT_STREQ("foo\nbar",
- AppendUserMessage(foo, msg).c_str());
+ EXPECT_STREQ("foo\nbar", AppendUserMessage(foo, msg).c_str());
}
#ifdef __BORLANDC__
// Silences warnings: "Condition is always true", "Unreachable code"
-# pragma option push -w-ccc -w-rch
+#pragma option push -w-ccc -w-rch
#endif
// Tests ASSERT_TRUE.
TEST(AssertionTest, ASSERT_TRUE) {
ASSERT_TRUE(2 > 1); // NOLINT
- EXPECT_FATAL_FAILURE(ASSERT_TRUE(2 < 1),
- "2 < 1");
+ EXPECT_FATAL_FAILURE(ASSERT_TRUE(2 < 1), "2 < 1");
}
// Tests ASSERT_TRUE(predicate) for predicates returning AssertionResult.
@@ -3729,7 +3700,7 @@ TEST(AssertionTest, AssertFalseWithAssertionResult) {
#ifdef __BORLANDC__
// Restores warnings after previous "#pragma option push" suppressed them
-# pragma option pop
+#pragma option pop
#endif
// Tests using ASSERT_EQ on double values. The purpose is to make
@@ -3740,18 +3711,19 @@ TEST(ExpectTest, ASSERT_EQ_Double) {
ASSERT_EQ(5.6, 5.6);
// A failure.
- EXPECT_FATAL_FAILURE(ASSERT_EQ(5.1, 5.2),
- "5.1");
+ EXPECT_FATAL_FAILURE(ASSERT_EQ(5.1, 5.2), "5.1");
}
// Tests ASSERT_EQ.
TEST(AssertionTest, ASSERT_EQ) {
ASSERT_EQ(5, 2 + 3);
+ // clang-format off
EXPECT_FATAL_FAILURE(ASSERT_EQ(5, 2*3),
"Expected equality of these values:\n"
" 5\n"
" 2*3\n"
" Which is: 6");
+ // clang-format on
}
// Tests ASSERT_EQ(NULL, pointer).
@@ -3776,8 +3748,7 @@ TEST(ExpectTest, ASSERT_EQ_0) {
ASSERT_EQ(0, n);
// A failure.
- EXPECT_FATAL_FAILURE(ASSERT_EQ(0, 5.6),
- " 0\n 5.6");
+ EXPECT_FATAL_FAILURE(ASSERT_EQ(0, 5.6), " 0\n 5.6");
}
// Tests ASSERT_NE.
@@ -3792,30 +3763,26 @@ TEST(AssertionTest, ASSERT_NE) {
TEST(AssertionTest, ASSERT_LE) {
ASSERT_LE(2, 3);
ASSERT_LE(2, 2);
- EXPECT_FATAL_FAILURE(ASSERT_LE(2, 0),
- "Expected: (2) <= (0), actual: 2 vs 0");
+ EXPECT_FATAL_FAILURE(ASSERT_LE(2, 0), "Expected: (2) <= (0), actual: 2 vs 0");
}
// Tests ASSERT_LT.
TEST(AssertionTest, ASSERT_LT) {
ASSERT_LT(2, 3);
- EXPECT_FATAL_FAILURE(ASSERT_LT(2, 2),
- "Expected: (2) < (2), actual: 2 vs 2");
+ EXPECT_FATAL_FAILURE(ASSERT_LT(2, 2), "Expected: (2) < (2), actual: 2 vs 2");
}
// Tests ASSERT_GE.
TEST(AssertionTest, ASSERT_GE) {
ASSERT_GE(2, 1);
ASSERT_GE(2, 2);
- EXPECT_FATAL_FAILURE(ASSERT_GE(2, 3),
- "Expected: (2) >= (3), actual: 2 vs 3");
+ EXPECT_FATAL_FAILURE(ASSERT_GE(2, 3), "Expected: (2) >= (3), actual: 2 vs 3");
}
// Tests ASSERT_GT.
TEST(AssertionTest, ASSERT_GT) {
ASSERT_GT(2, 1);
- EXPECT_FATAL_FAILURE(ASSERT_GT(2, 2),
- "Expected: (2) > (2), actual: 2 vs 2");
+ EXPECT_FATAL_FAILURE(ASSERT_GT(2, 2), "Expected: (2) > (2), actual: 2 vs 2");
}
#if GTEST_HAS_EXCEPTIONS
@@ -3826,7 +3793,7 @@ void ThrowNothing() {}
TEST(AssertionTest, ASSERT_THROW) {
ASSERT_THROW(ThrowAnInteger(), int);
-# ifndef __BORLANDC__
+#ifndef __BORLANDC__
// ICE's in C++Builder 2007 and 2009.
EXPECT_FATAL_FAILURE(
@@ -3837,9 +3804,10 @@ TEST(AssertionTest, ASSERT_THROW) {
ASSERT_THROW(ThrowRuntimeError("A description"), std::logic_error),
"Expected: ThrowRuntimeError(\"A description\") "
"throws an exception of type std::logic_error.\n "
- "Actual: it throws " ERROR_DESC " "
+ "Actual: it throws " ERROR_DESC
+ " "
"with description \"A description\".");
-# endif
+#endif
EXPECT_FATAL_FAILURE(
ASSERT_THROW(ThrowNothing(), bool),
@@ -3856,17 +3824,17 @@ TEST(AssertionTest, ASSERT_NO_THROW) {
EXPECT_FATAL_FAILURE(ASSERT_NO_THROW(ThrowRuntimeError("A description")),
"Expected: ThrowRuntimeError(\"A description\") "
"doesn't throw an exception.\n "
- "Actual: it throws " ERROR_DESC " "
+ "Actual: it throws " ERROR_DESC
+ " "
"with description \"A description\".");
}
// Tests ASSERT_ANY_THROW.
TEST(AssertionTest, ASSERT_ANY_THROW) {
ASSERT_ANY_THROW(ThrowAnInteger());
- EXPECT_FATAL_FAILURE(
- ASSERT_ANY_THROW(ThrowNothing()),
- "Expected: ThrowNothing() throws an exception.\n"
- " Actual: it doesn't.");
+ EXPECT_FATAL_FAILURE(ASSERT_ANY_THROW(ThrowNothing()),
+ "Expected: ThrowNothing() throws an exception.\n"
+ " Actual: it doesn't.");
}
#endif // GTEST_HAS_EXCEPTIONS
@@ -3880,14 +3848,11 @@ TEST(AssertionTest, AssertPrecedence) {
}
// A subroutine used by the following test.
-void TestEq1(int x) {
- ASSERT_EQ(1, x);
-}
+void TestEq1(int x) { ASSERT_EQ(1, x); }
// Tests calling a test subroutine that's not part of a fixture.
TEST(AssertionTest, NonFixtureSubroutine) {
- EXPECT_FATAL_FAILURE(TestEq1(2),
- " x\n Which is: 2");
+ EXPECT_FATAL_FAILURE(TestEq1(2), " x\n Which is: 2");
}
// An uncopyable class.
@@ -3899,6 +3864,7 @@ class Uncopyable {
bool operator==(const Uncopyable& rhs) const {
return value() == rhs.value();
}
+
private:
// This constructor deliberately has no implementation, as we don't
// want this class to be copyable.
@@ -3911,10 +3877,7 @@ class Uncopyable {
return os << value.value();
}
-
-bool IsPositiveUncopyable(const Uncopyable& x) {
- return x.value() > 0;
-}
+bool IsPositiveUncopyable(const Uncopyable& x) { return x.value() > 0; }
// A subroutine used by the following test.
void TestAssertNonPositive() {
@@ -3933,8 +3896,9 @@ TEST(AssertionTest, AssertWorksWithUncopyableObject) {
Uncopyable x(5);
ASSERT_PRED1(IsPositiveUncopyable, x);
ASSERT_EQ(x, x);
- EXPECT_FATAL_FAILURE(TestAssertNonPositive(),
- "IsPositiveUncopyable(y) evaluates to false, where\ny evaluates to -1");
+ EXPECT_FATAL_FAILURE(
+ TestAssertNonPositive(),
+ "IsPositiveUncopyable(y) evaluates to false, where\ny evaluates to -1");
EXPECT_FATAL_FAILURE(TestAssertEqualsUncopyable(),
"Expected equality of these values:\n"
" x\n Which is: 5\n y\n Which is: -1");
@@ -3945,18 +3909,16 @@ TEST(AssertionTest, ExpectWorksWithUncopyableObject) {
Uncopyable x(5);
EXPECT_PRED1(IsPositiveUncopyable, x);
Uncopyable y(-1);
- EXPECT_NONFATAL_FAILURE(EXPECT_PRED1(IsPositiveUncopyable, y),
- "IsPositiveUncopyable(y) evaluates to false, where\ny evaluates to -1");
+ EXPECT_NONFATAL_FAILURE(
+ EXPECT_PRED1(IsPositiveUncopyable, y),
+ "IsPositiveUncopyable(y) evaluates to false, where\ny evaluates to -1");
EXPECT_EQ(x, x);
EXPECT_NONFATAL_FAILURE(EXPECT_EQ(x, y),
"Expected equality of these values:\n"
" x\n Which is: 5\n y\n Which is: -1");
}
-enum NamedEnum {
- kE1 = 0,
- kE2 = 1
-};
+enum NamedEnum { kE1 = 0, kE2 = 1 };
TEST(AssertionTest, NamedEnum) {
EXPECT_EQ(kE1, kE1);
@@ -3972,7 +3934,7 @@ TEST(AssertionTest, NamedEnum) {
enum {
kCaseA = -1,
-# if GTEST_OS_LINUX
+#ifdef GTEST_OS_LINUX
// We want to test the case where the size of the anonymous enum is
// larger than sizeof(int), to make sure our implementation of the
@@ -3985,21 +3947,21 @@ enum {
// assertions.
kCaseB = testing::internal::kMaxBiggestInt,
-# else
+#else
kCaseB = INT_MAX,
-# endif // GTEST_OS_LINUX
+#endif // GTEST_OS_LINUX
kCaseC = 42
};
TEST(AssertionTest, AnonymousEnum) {
-# if GTEST_OS_LINUX
+#ifdef GTEST_OS_LINUX
EXPECT_EQ(static_cast<int>(kCaseA), static_cast<int>(kCaseB));
-# endif // GTEST_OS_LINUX
+#endif // GTEST_OS_LINUX
EXPECT_EQ(kCaseA, kCaseA);
EXPECT_NE(kCaseA, kCaseB);
@@ -4007,10 +3969,8 @@ TEST(AssertionTest, AnonymousEnum) {
EXPECT_LE(kCaseA, kCaseB);
EXPECT_GT(kCaseB, kCaseA);
EXPECT_GE(kCaseA, kCaseA);
- EXPECT_NONFATAL_FAILURE(EXPECT_GE(kCaseA, kCaseB),
- "(kCaseA) >= (kCaseB)");
- EXPECT_NONFATAL_FAILURE(EXPECT_GE(kCaseA, kCaseC),
- "-1 vs 42");
+ EXPECT_NONFATAL_FAILURE(EXPECT_GE(kCaseA, kCaseB), "(kCaseA) >= (kCaseB)");
+ EXPECT_NONFATAL_FAILURE(EXPECT_GE(kCaseA, kCaseC), "-1 vs 42");
ASSERT_EQ(kCaseA, kCaseA);
ASSERT_NE(kCaseA, kCaseB);
@@ -4019,34 +3979,25 @@ TEST(AssertionTest, AnonymousEnum) {
ASSERT_GT(kCaseB, kCaseA);
ASSERT_GE(kCaseA, kCaseA);
-# ifndef __BORLANDC__
+#ifndef __BORLANDC__
// ICE's in C++Builder.
- EXPECT_FATAL_FAILURE(ASSERT_EQ(kCaseA, kCaseB),
- " kCaseB\n Which is: ");
- EXPECT_FATAL_FAILURE(ASSERT_EQ(kCaseA, kCaseC),
- "\n Which is: 42");
-# endif
+ EXPECT_FATAL_FAILURE(ASSERT_EQ(kCaseA, kCaseB), " kCaseB\n Which is: ");
+ EXPECT_FATAL_FAILURE(ASSERT_EQ(kCaseA, kCaseC), "\n Which is: 42");
+#endif
- EXPECT_FATAL_FAILURE(ASSERT_EQ(kCaseA, kCaseC),
- "\n Which is: -1");
+ EXPECT_FATAL_FAILURE(ASSERT_EQ(kCaseA, kCaseC), "\n Which is: -1");
}
#endif // !GTEST_OS_MAC && !defined(__SUNPRO_CC)
-#if GTEST_OS_WINDOWS
+#ifdef GTEST_OS_WINDOWS
-static HRESULT UnexpectedHRESULTFailure() {
- return E_UNEXPECTED;
-}
+static HRESULT UnexpectedHRESULTFailure() { return E_UNEXPECTED; }
-static HRESULT OkHRESULTSuccess() {
- return S_OK;
-}
+static HRESULT OkHRESULTSuccess() { return S_OK; }
-static HRESULT FalseHRESULTSuccess() {
- return S_FALSE;
-}
+static HRESULT FalseHRESULTSuccess() { return S_FALSE; }
// HRESULT assertion tests test both zero and non-zero
// success codes as well as failure message for each.
@@ -4057,8 +4008,8 @@ TEST(HRESULTAssertionTest, EXPECT_HRESULT_SUCCEEDED) {
EXPECT_HRESULT_SUCCEEDED(S_FALSE);
EXPECT_NONFATAL_FAILURE(EXPECT_HRESULT_SUCCEEDED(UnexpectedHRESULTFailure()),
- "Expected: (UnexpectedHRESULTFailure()) succeeds.\n"
- " Actual: 0x8000FFFF");
+ "Expected: (UnexpectedHRESULTFailure()) succeeds.\n"
+ " Actual: 0x8000FFFF");
}
TEST(HRESULTAssertionTest, ASSERT_HRESULT_SUCCEEDED) {
@@ -4066,35 +4017,35 @@ TEST(HRESULTAssertionTest, ASSERT_HRESULT_SUCCEEDED) {
ASSERT_HRESULT_SUCCEEDED(S_FALSE);
EXPECT_FATAL_FAILURE(ASSERT_HRESULT_SUCCEEDED(UnexpectedHRESULTFailure()),
- "Expected: (UnexpectedHRESULTFailure()) succeeds.\n"
- " Actual: 0x8000FFFF");
+ "Expected: (UnexpectedHRESULTFailure()) succeeds.\n"
+ " Actual: 0x8000FFFF");
}
TEST(HRESULTAssertionTest, EXPECT_HRESULT_FAILED) {
EXPECT_HRESULT_FAILED(E_UNEXPECTED);
EXPECT_NONFATAL_FAILURE(EXPECT_HRESULT_FAILED(OkHRESULTSuccess()),
- "Expected: (OkHRESULTSuccess()) fails.\n"
- " Actual: 0x0");
+ "Expected: (OkHRESULTSuccess()) fails.\n"
+ " Actual: 0x0");
EXPECT_NONFATAL_FAILURE(EXPECT_HRESULT_FAILED(FalseHRESULTSuccess()),
- "Expected: (FalseHRESULTSuccess()) fails.\n"
- " Actual: 0x1");
+ "Expected: (FalseHRESULTSuccess()) fails.\n"
+ " Actual: 0x1");
}
TEST(HRESULTAssertionTest, ASSERT_HRESULT_FAILED) {
ASSERT_HRESULT_FAILED(E_UNEXPECTED);
-# ifndef __BORLANDC__
+#ifndef __BORLANDC__
// ICE's in C++Builder 2007 and 2009.
EXPECT_FATAL_FAILURE(ASSERT_HRESULT_FAILED(OkHRESULTSuccess()),
- "Expected: (OkHRESULTSuccess()) fails.\n"
- " Actual: 0x0");
-# endif
+ "Expected: (OkHRESULTSuccess()) fails.\n"
+ " Actual: 0x0");
+#endif
EXPECT_FATAL_FAILURE(ASSERT_HRESULT_FAILED(FalseHRESULTSuccess()),
- "Expected: (FalseHRESULTSuccess()) fails.\n"
- " Actual: 0x1");
+ "Expected: (FalseHRESULTSuccess()) fails.\n"
+ " Actual: 0x1");
}
// Tests that streaming to the HRESULT macros works.
@@ -4104,25 +4055,23 @@ TEST(HRESULTAssertionTest, Streaming) {
EXPECT_HRESULT_FAILED(E_UNEXPECTED) << "unexpected failure";
ASSERT_HRESULT_FAILED(E_UNEXPECTED) << "unexpected failure";
- EXPECT_NONFATAL_FAILURE(
- EXPECT_HRESULT_SUCCEEDED(E_UNEXPECTED) << "expected failure",
- "expected failure");
+ EXPECT_NONFATAL_FAILURE(EXPECT_HRESULT_SUCCEEDED(E_UNEXPECTED)
+ << "expected failure",
+ "expected failure");
-# ifndef __BORLANDC__
+#ifndef __BORLANDC__
// ICE's in C++Builder 2007 and 2009.
- EXPECT_FATAL_FAILURE(
- ASSERT_HRESULT_SUCCEEDED(E_UNEXPECTED) << "expected failure",
- "expected failure");
-# endif
+ EXPECT_FATAL_FAILURE(ASSERT_HRESULT_SUCCEEDED(E_UNEXPECTED)
+ << "expected failure",
+ "expected failure");
+#endif
- EXPECT_NONFATAL_FAILURE(
- EXPECT_HRESULT_FAILED(S_OK) << "expected failure",
- "expected failure");
+ EXPECT_NONFATAL_FAILURE(EXPECT_HRESULT_FAILED(S_OK) << "expected failure",
+ "expected failure");
- EXPECT_FATAL_FAILURE(
- ASSERT_HRESULT_FAILED(S_OK) << "expected failure",
- "expected failure");
+ EXPECT_FATAL_FAILURE(ASSERT_HRESULT_FAILED(S_OK) << "expected failure",
+ "expected failure");
}
#endif // GTEST_OS_WINDOWS
@@ -4145,8 +4094,7 @@ TEST(AssertionSyntaxTest, BasicAssertionsBehavesLikeSingleStatement) {
else
; // NOLINT
- if (AlwaysFalse())
- ASSERT_LT(1, 3);
+ if (AlwaysFalse()) ASSERT_LT(1, 3);
if (AlwaysFalse())
; // NOLINT
@@ -4165,7 +4113,7 @@ TEST(ExpectThrowTest, DoesNotGenerateUnreachableCodeWarning) {
EXPECT_THROW(throw 1, int);
EXPECT_NONFATAL_FAILURE(EXPECT_THROW(n++, int), "");
- EXPECT_NONFATAL_FAILURE(EXPECT_THROW(throw 1, const char*), "");
+ EXPECT_NONFATAL_FAILURE(EXPECT_THROW(throw n, const char*), "");
EXPECT_NO_THROW(n++);
EXPECT_NONFATAL_FAILURE(EXPECT_NO_THROW(throw 1), "");
EXPECT_ANY_THROW(throw 1);
@@ -4184,24 +4132,21 @@ TEST(ExpectThrowTest, DoesNotGenerateDuplicateCatchClauseWarning) {
#pragma GCC diagnostic ignored "-Wpragmas"
#endif
TEST(AssertionSyntaxTest, ExceptionAssertionsBehavesLikeSingleStatement) {
- if (AlwaysFalse())
- EXPECT_THROW(ThrowNothing(), bool);
+ if (AlwaysFalse()) EXPECT_THROW(ThrowNothing(), bool);
if (AlwaysTrue())
EXPECT_THROW(ThrowAnInteger(), int);
else
; // NOLINT
- if (AlwaysFalse())
- EXPECT_NO_THROW(ThrowAnInteger());
+ if (AlwaysFalse()) EXPECT_NO_THROW(ThrowAnInteger());
if (AlwaysTrue())
EXPECT_NO_THROW(ThrowNothing());
else
; // NOLINT
- if (AlwaysFalse())
- EXPECT_ANY_THROW(ThrowNothing());
+ if (AlwaysFalse()) EXPECT_ANY_THROW(ThrowNothing());
if (AlwaysTrue())
EXPECT_ANY_THROW(ThrowAnInteger());
@@ -4257,8 +4202,8 @@ TEST(AssertionSyntaxTest, WorksWithSwitch) {
}
switch (0)
- case 0:
- EXPECT_FALSE(false) << "EXPECT_FALSE failed in switch case";
+ case 0:
+ EXPECT_FALSE(false) << "EXPECT_FALSE failed in switch case";
// Binary assertions are implemented using a different code path
// than the Boolean assertions. Hence we test them separately.
@@ -4269,22 +4214,20 @@ TEST(AssertionSyntaxTest, WorksWithSwitch) {
}
switch (0)
- case 0:
- EXPECT_NE(1, 2);
+ case 0:
+ EXPECT_NE(1, 2);
}
#if GTEST_HAS_EXCEPTIONS
-void ThrowAString() {
- throw "std::string";
-}
+void ThrowAString() { throw "std::string"; }
// Test that the exception assertion macros compile and work with const
// type qualifier.
TEST(AssertionSyntaxTest, WorksWithConst) {
- ASSERT_THROW(ThrowAString(), const char*);
+ ASSERT_THROW(ThrowAString(), const char*);
- EXPECT_THROW(ThrowAString(), const char*);
+ EXPECT_THROW(ThrowAString(), const char*);
}
#endif // GTEST_HAS_EXCEPTIONS
@@ -4382,22 +4325,19 @@ TEST(AssertionWithMessageTest, ASSERT_FLOATING) {
// Tests using ASSERT_FALSE with a streamed message.
TEST(AssertionWithMessageTest, ASSERT_FALSE) {
ASSERT_FALSE(false) << "This shouldn't fail.";
- EXPECT_FATAL_FAILURE({ // NOLINT
- ASSERT_FALSE(true) << "Expected failure: " << 2 << " > " << 1
- << " evaluates to " << true;
- }, "Expected failure");
+ EXPECT_FATAL_FAILURE(
+ { // NOLINT
+ ASSERT_FALSE(true) << "Expected failure: " << 2 << " > " << 1
+ << " evaluates to " << true;
+ },
+ "Expected failure");
}
// Tests using FAIL with a streamed message.
-TEST(AssertionWithMessageTest, FAIL) {
- EXPECT_FATAL_FAILURE(FAIL() << 0,
- "0");
-}
+TEST(AssertionWithMessageTest, FAIL) { EXPECT_FATAL_FAILURE(FAIL() << 0, "0"); }
// Tests using SUCCEED with a streamed message.
-TEST(AssertionWithMessageTest, SUCCEED) {
- SUCCEED() << "Success == " << 1;
-}
+TEST(AssertionWithMessageTest, SUCCEED) { SUCCEED() << "Success == " << 1; }
// Tests using ASSERT_TRUE with a streamed message.
TEST(AssertionWithMessageTest, ASSERT_TRUE) {
@@ -4411,16 +4351,19 @@ TEST(AssertionWithMessageTest, ASSERT_TRUE) {
"(null)(null)");
}
-#if GTEST_OS_WINDOWS
+#ifdef GTEST_OS_WINDOWS
// Tests using wide strings in assertion messages.
TEST(AssertionWithMessageTest, WideStringMessage) {
- EXPECT_NONFATAL_FAILURE({ // NOLINT
- EXPECT_TRUE(false) << L"This failure is expected.\x8119";
- }, "This failure is expected.");
- EXPECT_FATAL_FAILURE({ // NOLINT
- ASSERT_EQ(1, 2) << "This failure is "
- << L"expected too.\x8120";
- }, "This failure is expected too.");
+ EXPECT_NONFATAL_FAILURE(
+ { // NOLINT
+ EXPECT_TRUE(false) << L"This failure is expected.\x8119";
+ },
+ "This failure is expected.");
+ EXPECT_FATAL_FAILURE(
+ { // NOLINT
+ ASSERT_EQ(1, 2) << "This failure is " << L"expected too.\x8120";
+ },
+ "This failure is expected too.");
}
#endif // GTEST_OS_WINDOWS
@@ -4436,8 +4379,7 @@ TEST(ExpectTest, EXPECT_TRUE) {
"Value of: 2 < 1\n"
" Actual: false\n"
"Expected: true");
- EXPECT_NONFATAL_FAILURE(EXPECT_TRUE(2 > 3),
- "2 > 3");
+ EXPECT_NONFATAL_FAILURE(EXPECT_TRUE(2 > 3), "2 > 3");
}
// Tests EXPECT_TRUE(predicate) for predicates returning AssertionResult.
@@ -4466,8 +4408,7 @@ TEST(ExpectTest, EXPECT_FALSE) {
"Value of: 2 > 1\n"
" Actual: true\n"
"Expected: false");
- EXPECT_NONFATAL_FAILURE(EXPECT_FALSE(2 < 3),
- "2 < 3");
+ EXPECT_NONFATAL_FAILURE(EXPECT_FALSE(2 < 3), "2 < 3");
}
// Tests EXPECT_FALSE(predicate) for predicates returning AssertionResult.
@@ -4486,19 +4427,20 @@ TEST(ExpectTest, ExpectFalseWithAssertionResult) {
#ifdef __BORLANDC__
// Restores warnings after previous "#pragma option push" suppressed them
-# pragma option pop
+#pragma option pop
#endif
// Tests EXPECT_EQ.
TEST(ExpectTest, EXPECT_EQ) {
EXPECT_EQ(5, 2 + 3);
+ // clang-format off
EXPECT_NONFATAL_FAILURE(EXPECT_EQ(5, 2*3),
"Expected equality of these values:\n"
" 5\n"
" 2*3\n"
" Which is: 6");
- EXPECT_NONFATAL_FAILURE(EXPECT_EQ(5, 2 - 3),
- "2 - 3");
+ EXPECT_NONFATAL_FAILURE(EXPECT_EQ(5, 2 - 3), "2 - 3");
+ // clang-format on
}
// Tests using EXPECT_EQ on double values. The purpose is to make
@@ -4509,8 +4451,7 @@ TEST(ExpectTest, EXPECT_EQ_Double) {
EXPECT_EQ(5.6, 5.6);
// A failure.
- EXPECT_NONFATAL_FAILURE(EXPECT_EQ(5.1, 5.2),
- "5.1");
+ EXPECT_NONFATAL_FAILURE(EXPECT_EQ(5.1, 5.2), "5.1");
}
// Tests EXPECT_EQ(NULL, pointer).
@@ -4535,8 +4476,7 @@ TEST(ExpectTest, EXPECT_EQ_0) {
EXPECT_EQ(0, n);
// A failure.
- EXPECT_NONFATAL_FAILURE(EXPECT_EQ(0, 5.6),
- " 0\n 5.6");
+ EXPECT_NONFATAL_FAILURE(EXPECT_EQ(0, 5.6), " 0\n 5.6");
}
// Tests EXPECT_NE.
@@ -4546,19 +4486,16 @@ TEST(ExpectTest, EXPECT_NE) {
EXPECT_NONFATAL_FAILURE(EXPECT_NE('a', 'a'),
"Expected: ('a') != ('a'), "
"actual: 'a' (97, 0x61) vs 'a' (97, 0x61)");
- EXPECT_NONFATAL_FAILURE(EXPECT_NE(2, 2),
- "2");
+ EXPECT_NONFATAL_FAILURE(EXPECT_NE(2, 2), "2");
char* const p0 = nullptr;
- EXPECT_NONFATAL_FAILURE(EXPECT_NE(p0, p0),
- "p0");
+ EXPECT_NONFATAL_FAILURE(EXPECT_NE(p0, p0), "p0");
// Only way to get the Nokia compiler to compile the cast
// is to have a separate void* variable first. Putting
// the two casts on the same line doesn't work, neither does
// a direct C-style to char*.
void* pv1 = (void*)0x1234; // NOLINT
char* const p1 = reinterpret_cast<char*>(pv1);
- EXPECT_NONFATAL_FAILURE(EXPECT_NE(p1, p1),
- "p1");
+ EXPECT_NONFATAL_FAILURE(EXPECT_NE(p1, p1), "p1");
}
// Tests EXPECT_LE.
@@ -4567,8 +4504,7 @@ TEST(ExpectTest, EXPECT_LE) {
EXPECT_LE(2, 2);
EXPECT_NONFATAL_FAILURE(EXPECT_LE(2, 0),
"Expected: (2) <= (0), actual: 2 vs 0");
- EXPECT_NONFATAL_FAILURE(EXPECT_LE(1.1, 0.9),
- "(1.1) <= (0.9)");
+ EXPECT_NONFATAL_FAILURE(EXPECT_LE(1.1, 0.9), "(1.1) <= (0.9)");
}
// Tests EXPECT_LT.
@@ -4576,8 +4512,7 @@ TEST(ExpectTest, EXPECT_LT) {
EXPECT_LT(2, 3);
EXPECT_NONFATAL_FAILURE(EXPECT_LT(2, 2),
"Expected: (2) < (2), actual: 2 vs 2");
- EXPECT_NONFATAL_FAILURE(EXPECT_LT(2, 1),
- "(2) < (1)");
+ EXPECT_NONFATAL_FAILURE(EXPECT_LT(2, 1), "(2) < (1)");
}
// Tests EXPECT_GE.
@@ -4586,8 +4521,7 @@ TEST(ExpectTest, EXPECT_GE) {
EXPECT_GE(2, 2);
EXPECT_NONFATAL_FAILURE(EXPECT_GE(2, 3),
"Expected: (2) >= (3), actual: 2 vs 3");
- EXPECT_NONFATAL_FAILURE(EXPECT_GE(0.9, 1.1),
- "(0.9) >= (1.1)");
+ EXPECT_NONFATAL_FAILURE(EXPECT_GE(0.9, 1.1), "(0.9) >= (1.1)");
}
// Tests EXPECT_GT.
@@ -4595,8 +4529,7 @@ TEST(ExpectTest, EXPECT_GT) {
EXPECT_GT(2, 1);
EXPECT_NONFATAL_FAILURE(EXPECT_GT(2, 2),
"Expected: (2) > (2), actual: 2 vs 2");
- EXPECT_NONFATAL_FAILURE(EXPECT_GT(2, 3),
- "(2) > (3)");
+ EXPECT_NONFATAL_FAILURE(EXPECT_GT(2, 3), "(2) > (3)");
}
#if GTEST_HAS_EXCEPTIONS
@@ -4607,12 +4540,13 @@ TEST(ExpectTest, EXPECT_THROW) {
EXPECT_NONFATAL_FAILURE(EXPECT_THROW(ThrowAnInteger(), bool),
"Expected: ThrowAnInteger() throws an exception of "
"type bool.\n Actual: it throws a different type.");
- EXPECT_NONFATAL_FAILURE(EXPECT_THROW(ThrowRuntimeError("A description"),
- std::logic_error),
- "Expected: ThrowRuntimeError(\"A description\") "
- "throws an exception of type std::logic_error.\n "
- "Actual: it throws " ERROR_DESC " "
- "with description \"A description\".");
+ EXPECT_NONFATAL_FAILURE(
+ EXPECT_THROW(ThrowRuntimeError("A description"), std::logic_error),
+ "Expected: ThrowRuntimeError(\"A description\") "
+ "throws an exception of type std::logic_error.\n "
+ "Actual: it throws " ERROR_DESC
+ " "
+ "with description \"A description\".");
EXPECT_NONFATAL_FAILURE(
EXPECT_THROW(ThrowNothing(), bool),
"Expected: ThrowNothing() throws an exception of type bool.\n"
@@ -4628,17 +4562,17 @@ TEST(ExpectTest, EXPECT_NO_THROW) {
EXPECT_NONFATAL_FAILURE(EXPECT_NO_THROW(ThrowRuntimeError("A description")),
"Expected: ThrowRuntimeError(\"A description\") "
"doesn't throw an exception.\n "
- "Actual: it throws " ERROR_DESC " "
+ "Actual: it throws " ERROR_DESC
+ " "
"with description \"A description\".");
}
// Tests EXPECT_ANY_THROW.
TEST(ExpectTest, EXPECT_ANY_THROW) {
EXPECT_ANY_THROW(ThrowAnInteger());
- EXPECT_NONFATAL_FAILURE(
- EXPECT_ANY_THROW(ThrowNothing()),
- "Expected: ThrowNothing() throws an exception.\n"
- " Actual: it doesn't.");
+ EXPECT_NONFATAL_FAILURE(EXPECT_ANY_THROW(ThrowNothing()),
+ "Expected: ThrowNothing() throws an exception.\n"
+ " Actual: it doesn't.");
}
#endif // GTEST_HAS_EXCEPTIONS
@@ -4650,7 +4584,6 @@ TEST(ExpectTest, ExpectPrecedence) {
" true && false\n Which is: false");
}
-
// Tests the StreamableToString() function.
// Tests using StreamableToString() on a scalar.
@@ -4688,8 +4621,7 @@ TEST(StreamableToStringTest, NullCString) {
TEST(StreamableTest, string) {
static const std::string str(
"This failure message is a std::string, and is expected.");
- EXPECT_FATAL_FAILURE(FAIL() << str,
- str.c_str());
+ EXPECT_FATAL_FAILURE(FAIL() << str, str.c_str());
}
// Tests that we can output strings containing embedded NULs.
@@ -4697,25 +4629,24 @@ TEST(StreamableTest, string) {
TEST(StreamableTest, stringWithEmbeddedNUL) {
static const char char_array_with_nul[] =
"Here's a NUL\0 and some more string";
- static const std::string string_with_nul(char_array_with_nul,
- sizeof(char_array_with_nul)
- - 1); // drops the trailing NUL
+ static const std::string string_with_nul(
+ char_array_with_nul,
+ sizeof(char_array_with_nul) - 1); // drops the trailing NUL
EXPECT_FATAL_FAILURE(FAIL() << string_with_nul,
"Here's a NUL\\0 and some more string");
}
// Tests that we can output a NUL char.
TEST(StreamableTest, NULChar) {
- EXPECT_FATAL_FAILURE({ // NOLINT
- FAIL() << "A NUL" << '\0' << " and some more string";
- }, "A NUL\\0 and some more string");
+ EXPECT_FATAL_FAILURE(
+ { // NOLINT
+ FAIL() << "A NUL" << '\0' << " and some more string";
+ },
+ "A NUL\\0 and some more string");
}
// Tests using int as an assertion message.
-TEST(StreamableTest, int) {
- EXPECT_FATAL_FAILURE(FAIL() << 900913,
- "900913");
-}
+TEST(StreamableTest, int) { EXPECT_FATAL_FAILURE(FAIL() << 900913, "900913"); }
// Tests using NULL char pointer as an assertion message.
//
@@ -4729,10 +4660,12 @@ TEST(StreamableTest, NullCharPtr) {
// Tests that basic IO manipulators (endl, ends, and flush) can be
// streamed to testing::Message.
TEST(StreamableTest, BasicIoManip) {
- EXPECT_FATAL_FAILURE({ // NOLINT
- FAIL() << "Line 1." << std::endl
- << "A NUL char " << std::ends << std::flush << " in line 2.";
- }, "Line 1.\nA NUL char \\0 in line 2.");
+ EXPECT_FATAL_FAILURE(
+ { // NOLINT
+ FAIL() << "Line 1." << std::endl
+ << "A NUL char " << std::ends << std::flush << " in line 2.";
+ },
+ "Line 1.\nA NUL char \\0 in line 2.");
}
// Tests the macros that haven't been covered so far.
@@ -4746,8 +4679,7 @@ void AddFailureHelper(bool* aborted) {
// Tests ADD_FAILURE.
TEST(MacroTest, ADD_FAILURE) {
bool aborted = true;
- EXPECT_NONFATAL_FAILURE(AddFailureHelper(&aborted),
- "Intentional failure.");
+ EXPECT_NONFATAL_FAILURE(AddFailureHelper(&aborted), "Intentional failure.");
EXPECT_FALSE(aborted);
}
@@ -4768,8 +4700,7 @@ TEST(MacroTest, ADD_FAILURE_AT) {
// Tests FAIL.
TEST(MacroTest, FAIL) {
- EXPECT_FATAL_FAILURE(FAIL(),
- "Failed");
+ EXPECT_FATAL_FAILURE(FAIL(), "Failed");
EXPECT_FATAL_FAILURE(FAIL() << "Intentional failure.",
"Intentional failure.");
}
@@ -4802,37 +4733,34 @@ TEST(MacroTest, SUCCEED) {
// Tests using bool values in {EXPECT|ASSERT}_EQ.
TEST(EqAssertionTest, Bool) {
- EXPECT_EQ(true, true);
- EXPECT_FATAL_FAILURE({
- bool false_value = false;
- ASSERT_EQ(false_value, true);
- }, " false_value\n Which is: false\n true");
+ EXPECT_EQ(true, true);
+ EXPECT_FATAL_FAILURE(
+ {
+ bool false_value = false;
+ ASSERT_EQ(false_value, true);
+ },
+ " false_value\n Which is: false\n true");
}
// Tests using int values in {EXPECT|ASSERT}_EQ.
TEST(EqAssertionTest, Int) {
ASSERT_EQ(32, 32);
- EXPECT_NONFATAL_FAILURE(EXPECT_EQ(32, 33),
- " 32\n 33");
+ EXPECT_NONFATAL_FAILURE(EXPECT_EQ(32, 33), " 32\n 33");
}
// Tests using time_t values in {EXPECT|ASSERT}_EQ.
TEST(EqAssertionTest, Time_T) {
- EXPECT_EQ(static_cast<time_t>(0),
- static_cast<time_t>(0));
- EXPECT_FATAL_FAILURE(ASSERT_EQ(static_cast<time_t>(0),
- static_cast<time_t>(1234)),
- "1234");
+ EXPECT_EQ(static_cast<time_t>(0), static_cast<time_t>(0));
+ EXPECT_FATAL_FAILURE(
+ ASSERT_EQ(static_cast<time_t>(0), static_cast<time_t>(1234)), "1234");
}
// Tests using char values in {EXPECT|ASSERT}_EQ.
TEST(EqAssertionTest, Char) {
ASSERT_EQ('z', 'z');
const char ch = 'b';
- EXPECT_NONFATAL_FAILURE(EXPECT_EQ('\0', ch),
- " ch\n Which is: 'b'");
- EXPECT_NONFATAL_FAILURE(EXPECT_EQ('a', ch),
- " ch\n Which is: 'b'");
+ EXPECT_NONFATAL_FAILURE(EXPECT_EQ('\0', ch), " ch\n Which is: 'b'");
+ EXPECT_NONFATAL_FAILURE(EXPECT_EQ('a', ch), " ch\n Which is: 'b'");
}
// Tests using wchar_t values in {EXPECT|ASSERT}_EQ.
@@ -4848,8 +4776,7 @@ TEST(EqAssertionTest, WideChar) {
static wchar_t wchar;
wchar = L'b';
- EXPECT_NONFATAL_FAILURE(EXPECT_EQ(L'a', wchar),
- "wchar");
+ EXPECT_NONFATAL_FAILURE(EXPECT_EQ(L'a', wchar), "wchar");
wchar = 0x8119;
EXPECT_FATAL_FAILURE(ASSERT_EQ(static_cast<wchar_t>(0x8120), wchar),
" wchar\n Which is: L'");
@@ -4868,13 +4795,11 @@ TEST(EqAssertionTest, StdString) {
// Compares a const char* to an std::string that has different
// content
- EXPECT_NONFATAL_FAILURE(EXPECT_EQ("Test", ::std::string("test")),
- "\"test\"");
+ EXPECT_NONFATAL_FAILURE(EXPECT_EQ("Test", ::std::string("test")), "\"test\"");
// Compares an std::string to a char* that has different content.
char* const p1 = const_cast<char*>("foo");
- EXPECT_NONFATAL_FAILURE(EXPECT_EQ(::std::string("bar"), p1),
- "p1");
+ EXPECT_NONFATAL_FAILURE(EXPECT_EQ(::std::string("bar"), p1), "p1");
// Compares two std::strings that have different contents, one of
// which having a NUL character in the middle. This should fail.
@@ -4895,28 +4820,31 @@ TEST(EqAssertionTest, StdWideString) {
// Compares an std::wstring to a const wchar_t* that has identical
// content.
- const wchar_t kTestX8119[] = { 'T', 'e', 's', 't', 0x8119, '\0' };
+ const wchar_t kTestX8119[] = {'T', 'e', 's', 't', 0x8119, '\0'};
EXPECT_EQ(::std::wstring(kTestX8119), kTestX8119);
// Compares an std::wstring to a const wchar_t* that has different
// content.
- const wchar_t kTestX8120[] = { 'T', 'e', 's', 't', 0x8120, '\0' };
- EXPECT_NONFATAL_FAILURE({ // NOLINT
- EXPECT_EQ(::std::wstring(kTestX8119), kTestX8120);
- }, "kTestX8120");
+ const wchar_t kTestX8120[] = {'T', 'e', 's', 't', 0x8120, '\0'};
+ EXPECT_NONFATAL_FAILURE(
+ { // NOLINT
+ EXPECT_EQ(::std::wstring(kTestX8119), kTestX8120);
+ },
+ "kTestX8120");
// Compares two std::wstrings that have different contents, one of
// which having a NUL character in the middle.
::std::wstring wstr3(wstr1);
wstr3.at(2) = L'\0';
- EXPECT_NONFATAL_FAILURE(EXPECT_EQ(wstr1, wstr3),
- "wstr3");
+ EXPECT_NONFATAL_FAILURE(EXPECT_EQ(wstr1, wstr3), "wstr3");
// Compares a wchar_t* to an std::wstring that has different
// content.
- EXPECT_FATAL_FAILURE({ // NOLINT
- ASSERT_EQ(const_cast<wchar_t*>(L"foo"), ::std::wstring(L"bar"));
- }, "");
+ EXPECT_FATAL_FAILURE(
+ { // NOLINT
+ ASSERT_EQ(const_cast<wchar_t*>(L"foo"), ::std::wstring(L"bar"));
+ },
+ "");
}
#endif // GTEST_HAS_STD_WSTRING
@@ -4934,10 +4862,8 @@ TEST(EqAssertionTest, CharPointer) {
char* const p2 = reinterpret_cast<char*>(pv2);
ASSERT_EQ(p1, p1);
- EXPECT_NONFATAL_FAILURE(EXPECT_EQ(p0, p2),
- " p2\n Which is:");
- EXPECT_NONFATAL_FAILURE(EXPECT_EQ(p1, p2),
- " p2\n Which is:");
+ EXPECT_NONFATAL_FAILURE(EXPECT_EQ(p0, p2), " p2\n Which is:");
+ EXPECT_NONFATAL_FAILURE(EXPECT_EQ(p1, p2), " p2\n Which is:");
EXPECT_FATAL_FAILURE(ASSERT_EQ(reinterpret_cast<char*>(0x1234),
reinterpret_cast<char*>(0xABC0)),
"ABC0");
@@ -4956,16 +4882,13 @@ TEST(EqAssertionTest, WideCharPointer) {
wchar_t* const p2 = reinterpret_cast<wchar_t*>(pv2);
EXPECT_EQ(p0, p0);
- EXPECT_NONFATAL_FAILURE(EXPECT_EQ(p0, p2),
- " p2\n Which is:");
- EXPECT_NONFATAL_FAILURE(EXPECT_EQ(p1, p2),
- " p2\n Which is:");
+ EXPECT_NONFATAL_FAILURE(EXPECT_EQ(p0, p2), " p2\n Which is:");
+ EXPECT_NONFATAL_FAILURE(EXPECT_EQ(p1, p2), " p2\n Which is:");
void* pv3 = (void*)0x1234; // NOLINT
void* pv4 = (void*)0xABC0; // NOLINT
const wchar_t* p3 = reinterpret_cast<const wchar_t*>(pv3);
const wchar_t* p4 = reinterpret_cast<const wchar_t*>(pv4);
- EXPECT_NONFATAL_FAILURE(EXPECT_EQ(p3, p4),
- "p4");
+ EXPECT_NONFATAL_FAILURE(EXPECT_EQ(p3, p4), "p4");
}
// Tests using other types of pointers in {EXPECT|ASSERT}_EQ.
@@ -4987,15 +4910,11 @@ class UnprintableChar {
bool operator!=(const UnprintableChar& rhs) const {
return char_ != rhs.char_;
}
- bool operator<(const UnprintableChar& rhs) const {
- return char_ < rhs.char_;
- }
+ bool operator<(const UnprintableChar& rhs) const { return char_ < rhs.char_; }
bool operator<=(const UnprintableChar& rhs) const {
return char_ <= rhs.char_;
}
- bool operator>(const UnprintableChar& rhs) const {
- return char_ > rhs.char_;
- }
+ bool operator>(const UnprintableChar& rhs) const { return char_ > rhs.char_; }
bool operator>=(const UnprintableChar& rhs) const {
return char_ >= rhs.char_;
}
@@ -5044,7 +4963,7 @@ TEST(ComparisonAssertionTest, AcceptsUnprintableArgs) {
// both in a TEST and in a TEST_F.
class Foo {
public:
- Foo() {}
+ Foo() = default;
private:
int Bar() const { return 1; }
@@ -5057,9 +4976,7 @@ class Foo {
// Tests that the FRIEND_TEST declaration allows a TEST to access a
// class's private members. This should compile.
-TEST(FRIEND_TEST_Test, TEST) {
- ASSERT_EQ(1, Foo().Bar());
-}
+TEST(FRIEND_TEST_Test, TEST) { ASSERT_EQ(1, Foo().Bar()); }
// The fixture needed to test using FRIEND_TEST with TEST_F.
class FRIEND_TEST_Test2 : public Test {
@@ -5069,9 +4986,7 @@ class FRIEND_TEST_Test2 : public Test {
// Tests that the FRIEND_TEST declaration allows a TEST_F to access a
// class's private members. This should compile.
-TEST_F(FRIEND_TEST_Test2, TEST_F) {
- ASSERT_EQ(1, foo.Bar());
-}
+TEST_F(FRIEND_TEST_Test2, TEST_F) { ASSERT_EQ(1, foo.Bar()); }
// Tests the life cycle of Test objects.
@@ -5206,15 +5121,14 @@ class Base {
public:
explicit Base(int an_x) : x_(an_x) {}
int x() const { return x_; }
+
private:
int x_;
};
-std::ostream& operator<<(std::ostream& os,
- const Base& val) {
+std::ostream& operator<<(std::ostream& os, const Base& val) {
return os << val.x();
}
-std::ostream& operator<<(std::ostream& os,
- const Base* pointer) {
+std::ostream& operator<<(std::ostream& os, const Base* pointer) {
return os << "(" << pointer->x() << ")";
}
@@ -5231,7 +5145,7 @@ TEST(MessageTest, CanStreamUserTypeInGlobalNameSpace) {
namespace {
class MyTypeInUnnamedNameSpace : public Base {
public:
- explicit MyTypeInUnnamedNameSpace(int an_x): Base(an_x) {}
+ explicit MyTypeInUnnamedNameSpace(int an_x) : Base(an_x) {}
};
std::ostream& operator<<(std::ostream& os,
const MyTypeInUnnamedNameSpace& val) {
@@ -5256,14 +5170,12 @@ TEST(MessageTest, CanStreamUserTypeInUnnamedNameSpace) {
namespace namespace1 {
class MyTypeInNameSpace1 : public Base {
public:
- explicit MyTypeInNameSpace1(int an_x): Base(an_x) {}
+ explicit MyTypeInNameSpace1(int an_x) : Base(an_x) {}
};
-std::ostream& operator<<(std::ostream& os,
- const MyTypeInNameSpace1& val) {
+std::ostream& operator<<(std::ostream& os, const MyTypeInNameSpace1& val) {
return os << val.x();
}
-std::ostream& operator<<(std::ostream& os,
- const MyTypeInNameSpace1* pointer) {
+std::ostream& operator<<(std::ostream& os, const MyTypeInNameSpace1* pointer) {
return os << "(" << pointer->x() << ")";
}
} // namespace namespace1
@@ -5281,7 +5193,7 @@ TEST(MessageTest, CanStreamUserTypeInUserNameSpace) {
namespace namespace2 {
class MyTypeInNameSpace2 : public ::Base {
public:
- explicit MyTypeInNameSpace2(int an_x): Base(an_x) {}
+ explicit MyTypeInNameSpace2(int an_x) : Base(an_x) {}
};
} // namespace namespace2
std::ostream& operator<<(std::ostream& os,
@@ -5312,21 +5224,18 @@ TEST(MessageTest, NullPointers) {
Message* p6 = nullptr;
msg << p1 << p2 << p3 << p4 << p5 << p6;
- ASSERT_STREQ("(null)(null)(null)(null)(null)(null)",
- msg.GetString().c_str());
+ ASSERT_STREQ("(null)(null)(null)(null)(null)(null)", msg.GetString().c_str());
}
// Tests streaming wide strings to testing::Message.
TEST(MessageTest, WideStrings) {
// Streams a NULL of type const wchar_t*.
const wchar_t* const_wstr = nullptr;
- EXPECT_STREQ("(null)",
- (Message() << const_wstr).GetString().c_str());
+ EXPECT_STREQ("(null)", (Message() << const_wstr).GetString().c_str());
// Streams a NULL of type wchar_t*.
wchar_t* wstr = nullptr;
- EXPECT_STREQ("(null)",
- (Message() << wstr).GetString().c_str());
+ EXPECT_STREQ("(null)", (Message() << wstr).GetString().c_str());
// Streams a non-NULL of type const wchar_t*.
const_wstr = L"abc\x8119";
@@ -5335,11 +5244,9 @@ TEST(MessageTest, WideStrings) {
// Streams a non-NULL of type wchar_t*.
wstr = const_cast<wchar_t*>(const_wstr);
- EXPECT_STREQ("abc\xe8\x84\x99",
- (Message() << wstr).GetString().c_str());
+ EXPECT_STREQ("abc\xe8\x84\x99", (Message() << wstr).GetString().c_str());
}
-
// This line tests that we can define tests in the testing namespace.
namespace testing {
@@ -5353,14 +5260,12 @@ class TestInfoTest : public Test {
for (int i = 0; i < test_suite->total_test_count(); ++i) {
const TestInfo* const test_info = test_suite->GetTestInfo(i);
- if (strcmp(test_name, test_info->name()) == 0)
- return test_info;
+ if (strcmp(test_name, test_info->name()) == 0) return test_info;
}
return nullptr;
}
- static const TestResult* GetTestResult(
- const TestInfo* test_info) {
+ static const TestResult* GetTestResult(const TestInfo* test_info) {
return test_info->result();
}
};
@@ -5384,26 +5289,25 @@ TEST_F(TestInfoTest, result) {
ASSERT_EQ(0, GetTestResult(test_info)->total_part_count());
}
-#define VERIFY_CODE_LOCATION \
- const int expected_line = __LINE__ - 1; \
+#define VERIFY_CODE_LOCATION \
+ const int expected_line = __LINE__ - 1; \
const TestInfo* const test_info = GetUnitTestImpl()->current_test_info(); \
- ASSERT_TRUE(test_info); \
- EXPECT_STREQ(__FILE__, test_info->file()); \
+ ASSERT_TRUE(test_info); \
+ EXPECT_STREQ(__FILE__, test_info->file()); \
EXPECT_EQ(expected_line, test_info->line())
+// clang-format off
TEST(CodeLocationForTEST, Verify) {
VERIFY_CODE_LOCATION;
}
-class CodeLocationForTESTF : public Test {
-};
+class CodeLocationForTESTF : public Test {};
TEST_F(CodeLocationForTESTF, Verify) {
VERIFY_CODE_LOCATION;
}
-class CodeLocationForTESTP : public TestWithParam<int> {
-};
+class CodeLocationForTESTP : public TestWithParam<int> {};
TEST_P(CodeLocationForTESTP, Verify) {
VERIFY_CODE_LOCATION;
@@ -5412,8 +5316,7 @@ TEST_P(CodeLocationForTESTP, Verify) {
INSTANTIATE_TEST_SUITE_P(, CodeLocationForTESTP, Values(0));
template <typename T>
-class CodeLocationForTYPEDTEST : public Test {
-};
+class CodeLocationForTYPEDTEST : public Test {};
TYPED_TEST_SUITE(CodeLocationForTYPEDTEST, int);
@@ -5422,8 +5325,7 @@ TYPED_TEST(CodeLocationForTYPEDTEST, Verify) {
}
template <typename T>
-class CodeLocationForTYPEDTESTP : public Test {
-};
+class CodeLocationForTYPEDTESTP : public Test {};
TYPED_TEST_SUITE_P(CodeLocationForTYPEDTESTP);
@@ -5436,6 +5338,7 @@ REGISTER_TYPED_TEST_SUITE_P(CodeLocationForTYPEDTESTP, Verify);
INSTANTIATE_TYPED_TEST_SUITE_P(My, CodeLocationForTYPEDTESTP, int);
#undef VERIFY_CODE_LOCATION
+// clang-format on
// Tests setting up and tearing down a test case.
// Legacy API is deprecated but still available
@@ -5495,9 +5398,7 @@ const char* SetUpTestCaseTest::shared_resource_ = nullptr;
TEST_F(SetUpTestCaseTest, Test1) { EXPECT_STRNE(nullptr, shared_resource_); }
// Another test that uses the shared resource.
-TEST_F(SetUpTestCaseTest, Test2) {
- EXPECT_STREQ("123", shared_resource_);
-}
+TEST_F(SetUpTestCaseTest, Test2) { EXPECT_STREQ("123", shared_resource_); }
#endif // GTEST_REMOVE_LEGACY_TEST_CASEAPI_
// Tests SetupTestSuite/TearDown TestSuite
@@ -5580,6 +5481,7 @@ struct Flags {
print_time(true),
random_seed(0),
repeat(1),
+ recreate_environments_when_repeating(true),
shuffle(false),
stack_trace_depth(kMaxStackTraceDepth),
stream_result_to(""),
@@ -5683,6 +5585,16 @@ struct Flags {
return flags;
}
+ // Creates a Flags struct where the gtest_recreate_environments_when_repeating
+ // flag has the given value.
+ static Flags RecreateEnvironmentsWhenRepeating(
+ bool recreate_environments_when_repeating) {
+ Flags flags;
+ flags.recreate_environments_when_repeating =
+ recreate_environments_when_repeating;
+ return flags;
+ }
+
// Creates a Flags struct where the gtest_shuffle flag has the given
// value.
static Flags Shuffle(bool shuffle) {
@@ -5728,6 +5640,7 @@ struct Flags {
bool print_time;
int32_t random_seed;
int32_t repeat;
+ bool recreate_environments_when_repeating;
bool shuffle;
int32_t stack_trace_depth;
const char* stream_result_to;
@@ -5739,22 +5652,23 @@ class ParseFlagsTest : public Test {
protected:
// Clears the flags before each test.
void SetUp() override {
- GTEST_FLAG(also_run_disabled_tests) = false;
- GTEST_FLAG(break_on_failure) = false;
- GTEST_FLAG(catch_exceptions) = false;
- GTEST_FLAG(death_test_use_fork) = false;
- GTEST_FLAG(fail_fast) = false;
- GTEST_FLAG(filter) = "";
- GTEST_FLAG(list_tests) = false;
- GTEST_FLAG(output) = "";
- GTEST_FLAG(brief) = false;
- GTEST_FLAG(print_time) = true;
- GTEST_FLAG(random_seed) = 0;
- GTEST_FLAG(repeat) = 1;
- GTEST_FLAG(shuffle) = false;
- GTEST_FLAG(stack_trace_depth) = kMaxStackTraceDepth;
- GTEST_FLAG(stream_result_to) = "";
- GTEST_FLAG(throw_on_failure) = false;
+ GTEST_FLAG_SET(also_run_disabled_tests, false);
+ GTEST_FLAG_SET(break_on_failure, false);
+ GTEST_FLAG_SET(catch_exceptions, false);
+ GTEST_FLAG_SET(death_test_use_fork, false);
+ GTEST_FLAG_SET(fail_fast, false);
+ GTEST_FLAG_SET(filter, "");
+ GTEST_FLAG_SET(list_tests, false);
+ GTEST_FLAG_SET(output, "");
+ GTEST_FLAG_SET(brief, false);
+ GTEST_FLAG_SET(print_time, true);
+ GTEST_FLAG_SET(random_seed, 0);
+ GTEST_FLAG_SET(repeat, 1);
+ GTEST_FLAG_SET(recreate_environments_when_repeating, true);
+ GTEST_FLAG_SET(shuffle, false);
+ GTEST_FLAG_SET(stack_trace_depth, kMaxStackTraceDepth);
+ GTEST_FLAG_SET(stream_result_to, "");
+ GTEST_FLAG_SET(throw_on_failure, false);
}
// Asserts that two narrow or wide string arrays are equal.
@@ -5771,45 +5685,48 @@ class ParseFlagsTest : public Test {
// Verifies that the flag values match the expected values.
static void CheckFlags(const Flags& expected) {
EXPECT_EQ(expected.also_run_disabled_tests,
- GTEST_FLAG(also_run_disabled_tests));
- EXPECT_EQ(expected.break_on_failure, GTEST_FLAG(break_on_failure));
- EXPECT_EQ(expected.catch_exceptions, GTEST_FLAG(catch_exceptions));
- EXPECT_EQ(expected.death_test_use_fork, GTEST_FLAG(death_test_use_fork));
- EXPECT_EQ(expected.fail_fast, GTEST_FLAG(fail_fast));
- EXPECT_STREQ(expected.filter, GTEST_FLAG(filter).c_str());
- EXPECT_EQ(expected.list_tests, GTEST_FLAG(list_tests));
- EXPECT_STREQ(expected.output, GTEST_FLAG(output).c_str());
- EXPECT_EQ(expected.brief, GTEST_FLAG(brief));
- EXPECT_EQ(expected.print_time, GTEST_FLAG(print_time));
- EXPECT_EQ(expected.random_seed, GTEST_FLAG(random_seed));
- EXPECT_EQ(expected.repeat, GTEST_FLAG(repeat));
- EXPECT_EQ(expected.shuffle, GTEST_FLAG(shuffle));
- EXPECT_EQ(expected.stack_trace_depth, GTEST_FLAG(stack_trace_depth));
+ GTEST_FLAG_GET(also_run_disabled_tests));
+ EXPECT_EQ(expected.break_on_failure, GTEST_FLAG_GET(break_on_failure));
+ EXPECT_EQ(expected.catch_exceptions, GTEST_FLAG_GET(catch_exceptions));
+ EXPECT_EQ(expected.death_test_use_fork,
+ GTEST_FLAG_GET(death_test_use_fork));
+ EXPECT_EQ(expected.fail_fast, GTEST_FLAG_GET(fail_fast));
+ EXPECT_STREQ(expected.filter, GTEST_FLAG_GET(filter).c_str());
+ EXPECT_EQ(expected.list_tests, GTEST_FLAG_GET(list_tests));
+ EXPECT_STREQ(expected.output, GTEST_FLAG_GET(output).c_str());
+ EXPECT_EQ(expected.brief, GTEST_FLAG_GET(brief));
+ EXPECT_EQ(expected.print_time, GTEST_FLAG_GET(print_time));
+ EXPECT_EQ(expected.random_seed, GTEST_FLAG_GET(random_seed));
+ EXPECT_EQ(expected.repeat, GTEST_FLAG_GET(repeat));
+ EXPECT_EQ(expected.recreate_environments_when_repeating,
+ GTEST_FLAG_GET(recreate_environments_when_repeating));
+ EXPECT_EQ(expected.shuffle, GTEST_FLAG_GET(shuffle));
+ EXPECT_EQ(expected.stack_trace_depth, GTEST_FLAG_GET(stack_trace_depth));
EXPECT_STREQ(expected.stream_result_to,
- GTEST_FLAG(stream_result_to).c_str());
- EXPECT_EQ(expected.throw_on_failure, GTEST_FLAG(throw_on_failure));
+ GTEST_FLAG_GET(stream_result_to).c_str());
+ EXPECT_EQ(expected.throw_on_failure, GTEST_FLAG_GET(throw_on_failure));
}
// Parses a command line (specified by argc1 and argv1), then
// verifies that the flag values are expected and that the
// recognized flags are removed from the command line.
template <typename CharType>
- static void TestParsingFlags(int argc1, const CharType** argv1,
- int argc2, const CharType** argv2,
- const Flags& expected, bool should_print_help) {
+ static void TestParsingFlags(int argc1, const CharType** argv1, int argc2,
+ const CharType** argv2, const Flags& expected,
+ bool should_print_help) {
const bool saved_help_flag = ::testing::internal::g_help_flag;
::testing::internal::g_help_flag = false;
-# if GTEST_HAS_STREAM_REDIRECTION
+#if GTEST_HAS_STREAM_REDIRECTION
CaptureStdout();
-# endif
+#endif
// Parses the command line.
internal::ParseGoogleTestFlagsOnly(&argc1, const_cast<CharType**>(argv1));
-# if GTEST_HAS_STREAM_REDIRECTION
+#if GTEST_HAS_STREAM_REDIRECTION
const std::string captured_stdout = GetCapturedStdout();
-# endif
+#endif
// Verifies the flag values.
CheckFlags(expected);
@@ -5822,16 +5739,16 @@ class ParseFlagsTest : public Test {
// help message for the flags it recognizes.
EXPECT_EQ(should_print_help, ::testing::internal::g_help_flag);
-# if GTEST_HAS_STREAM_REDIRECTION
+#if GTEST_HAS_STREAM_REDIRECTION
const char* const expected_help_fragment =
"This program contains tests written using";
if (should_print_help) {
EXPECT_PRED_FORMAT2(IsSubstring, expected_help_fragment, captured_stdout);
} else {
- EXPECT_PRED_FORMAT2(IsNotSubstring,
- expected_help_fragment, captured_stdout);
+ EXPECT_PRED_FORMAT2(IsNotSubstring, expected_help_fragment,
+ captured_stdout);
}
-# endif // GTEST_HAS_STREAM_REDIRECTION
+#endif // GTEST_HAS_STREAM_REDIRECTION
::testing::internal::g_help_flag = saved_help_flag;
}
@@ -5839,10 +5756,10 @@ class ParseFlagsTest : public Test {
// This macro wraps TestParsingFlags s.t. the user doesn't need
// to specify the array sizes.
-# define GTEST_TEST_PARSING_FLAGS_(argv1, argv2, expected, should_print_help) \
- TestParsingFlags(sizeof(argv1)/sizeof(*argv1) - 1, argv1, \
- sizeof(argv2)/sizeof(*argv2) - 1, argv2, \
- expected, should_print_help)
+#define GTEST_TEST_PARSING_FLAGS_(argv1, argv2, expected, should_print_help) \
+ TestParsingFlags(sizeof(argv1) / sizeof(*argv1) - 1, argv1, \
+ sizeof(argv2) / sizeof(*argv2) - 1, argv2, expected, \
+ should_print_help)
};
// Tests parsing an empty command line.
@@ -5872,15 +5789,6 @@ TEST_F(ParseFlagsTest, FailFast) {
GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::FailFast(true), false);
}
-// Tests parsing a bad --gtest_filter flag.
-TEST_F(ParseFlagsTest, FilterBad) {
- const char* argv[] = {"foo.exe", "--gtest_filter", nullptr};
-
- const char* argv2[] = {"foo.exe", "--gtest_filter", nullptr};
-
- GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::Filter(""), true);
-}
-
// Tests parsing an empty --gtest_filter flag.
TEST_F(ParseFlagsTest, FilterEmpty) {
const char* argv[] = {"foo.exe", "--gtest_filter=", nullptr};
@@ -6033,15 +5941,6 @@ TEST_F(ParseFlagsTest, ListTestsFalse_F) {
GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::ListTests(false), false);
}
-// Tests parsing --gtest_output (invalid).
-TEST_F(ParseFlagsTest, OutputEmpty) {
- const char* argv[] = {"foo.exe", "--gtest_output", nullptr};
-
- const char* argv2[] = {"foo.exe", "--gtest_output", nullptr};
-
- GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags(), true);
-}
-
// Tests parsing --gtest_output=xml
TEST_F(ParseFlagsTest, OutputXml) {
const char* argv[] = {"foo.exe", "--gtest_output=xml", nullptr};
@@ -6067,8 +5966,8 @@ TEST_F(ParseFlagsTest, OutputXmlDirectory) {
const char* argv2[] = {"foo.exe", nullptr};
- GTEST_TEST_PARSING_FLAGS_(argv, argv2,
- Flags::Output("xml:directory/path/"), false);
+ GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::Output("xml:directory/path/"),
+ false);
}
// Tests having a --gtest_brief flag
@@ -6161,6 +6060,20 @@ TEST_F(ParseFlagsTest, Repeat) {
GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::Repeat(1000), false);
}
+// Tests parsing --gtest_recreate_environments_when_repeating
+TEST_F(ParseFlagsTest, RecreateEnvironmentsWhenRepeating) {
+ const char* argv[] = {
+ "foo.exe",
+ "--gtest_recreate_environments_when_repeating=0",
+ nullptr,
+ };
+
+ const char* argv2[] = {"foo.exe", nullptr};
+
+ GTEST_TEST_PARSING_FLAGS_(
+ argv, argv2, Flags::RecreateEnvironmentsWhenRepeating(false), false);
+}
+
// Tests having a --gtest_also_run_disabled_tests flag
TEST_F(ParseFlagsTest, AlsoRunDisabledTestsFlag) {
const char* argv[] = {"foo.exe", "--gtest_also_run_disabled_tests", nullptr};
@@ -6235,8 +6148,8 @@ TEST_F(ParseFlagsTest, StreamResultTo) {
const char* argv2[] = {"foo.exe", nullptr};
- GTEST_TEST_PARSING_FLAGS_(
- argv, argv2, Flags::StreamResultTo("localhost:1234"), false);
+ GTEST_TEST_PARSING_FLAGS_(argv, argv2,
+ Flags::StreamResultTo("localhost:1234"), false);
}
// Tests parsing --gtest_throw_on_failure.
@@ -6267,23 +6180,78 @@ TEST_F(ParseFlagsTest, ThrowOnFailureTrue) {
GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::ThrowOnFailure(true), false);
}
-# if GTEST_OS_WINDOWS
+// Tests parsing a bad --gtest_filter flag.
+TEST_F(ParseFlagsTest, FilterBad) {
+ const char* argv[] = {"foo.exe", "--gtest_filter", nullptr};
+
+ const char* argv2[] = {"foo.exe", "--gtest_filter", nullptr};
+
+#if defined(GTEST_HAS_ABSL) && defined(GTEST_HAS_DEATH_TEST)
+ // Invalid flag arguments are a fatal error when using the Abseil Flags.
+ EXPECT_EXIT(GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::Filter(""), true),
+ testing::ExitedWithCode(1),
+ "ERROR: Missing the value for the flag 'gtest_filter'");
+#elif !defined(GTEST_HAS_ABSL)
+ GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::Filter(""), true);
+#else
+ static_cast<void>(argv);
+ static_cast<void>(argv2);
+#endif
+}
+
+// Tests parsing --gtest_output (invalid).
+TEST_F(ParseFlagsTest, OutputEmpty) {
+ const char* argv[] = {"foo.exe", "--gtest_output", nullptr};
+
+ const char* argv2[] = {"foo.exe", "--gtest_output", nullptr};
+
+#if defined(GTEST_HAS_ABSL) && defined(GTEST_HAS_DEATH_TEST)
+ // Invalid flag arguments are a fatal error when using the Abseil Flags.
+ EXPECT_EXIT(GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags(), true),
+ testing::ExitedWithCode(1),
+ "ERROR: Missing the value for the flag 'gtest_output'");
+#elif !defined(GTEST_HAS_ABSL)
+ GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags(), true);
+#else
+ static_cast<void>(argv);
+ static_cast<void>(argv2);
+#endif
+}
+
+#ifdef GTEST_HAS_ABSL
+TEST_F(ParseFlagsTest, AbseilPositionalFlags) {
+ const char* argv[] = {"foo.exe", "--gtest_throw_on_failure=1", "--",
+ "--other_flag", nullptr};
+
+ // When using Abseil flags, it should be possible to pass flags not recognized
+ // using "--" to delimit positional arguments. These flags should be returned
+ // though argv.
+ const char* argv2[] = {"foo.exe", "--other_flag", nullptr};
+
+ GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::ThrowOnFailure(true), false);
+}
+#endif
+
+TEST_F(ParseFlagsTest, UnrecognizedFlags) {
+ const char* argv[] = {"foo.exe", "--gtest_filter=abcd", "--other_flag",
+ nullptr};
+
+ const char* argv2[] = {"foo.exe", "--other_flag", nullptr};
+
+ GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::Filter("abcd"), false);
+}
+
+#ifdef GTEST_OS_WINDOWS
// Tests parsing wide strings.
TEST_F(ParseFlagsTest, WideStrings) {
- const wchar_t* argv[] = {
- L"foo.exe",
- L"--gtest_filter=Foo*",
- L"--gtest_list_tests=1",
- L"--gtest_break_on_failure",
- L"--non_gtest_flag",
- NULL
- };
+ const wchar_t* argv[] = {L"foo.exe",
+ L"--gtest_filter=Foo*",
+ L"--gtest_list_tests=1",
+ L"--gtest_break_on_failure",
+ L"--non_gtest_flag",
+ NULL};
- const wchar_t* argv2[] = {
- L"foo.exe",
- L"--non_gtest_flag",
- NULL
- };
+ const wchar_t* argv2[] = {L"foo.exe", L"--non_gtest_flag", NULL};
Flags expected_flags;
expected_flags.break_on_failure = true;
@@ -6292,7 +6260,7 @@ TEST_F(ParseFlagsTest, WideStrings) {
GTEST_TEST_PARSING_FLAGS_(argv, argv2, expected_flags, false);
}
-# endif // GTEST_OS_WINDOWS
+#endif // GTEST_OS_WINDOWS
#if GTEST_USE_OWN_FLAGFILE_FLAG_
class FlagfileTest : public ParseFlagsTest {
@@ -6340,8 +6308,8 @@ TEST_F(FlagfileTest, Empty) {
// Tests passing a non-empty --gtest_filter flag via --gtest_flagfile.
TEST_F(FlagfileTest, FilterNonEmpty) {
- internal::FilePath flagfile_path(CreateFlagfile(
- "--" GTEST_FLAG_PREFIX_ "filter=abc"));
+ internal::FilePath flagfile_path(
+ CreateFlagfile("--" GTEST_FLAG_PREFIX_ "filter=abc"));
std::string flagfile_flag =
std::string("--" GTEST_FLAG_PREFIX_ "flagfile=") + flagfile_path.c_str();
@@ -6354,10 +6322,10 @@ TEST_F(FlagfileTest, FilterNonEmpty) {
// Tests passing several flags via --gtest_flagfile.
TEST_F(FlagfileTest, SeveralFlags) {
- internal::FilePath flagfile_path(CreateFlagfile(
- "--" GTEST_FLAG_PREFIX_ "filter=abc\n"
- "--" GTEST_FLAG_PREFIX_ "break_on_failure\n"
- "--" GTEST_FLAG_PREFIX_ "list_tests"));
+ internal::FilePath flagfile_path(
+ CreateFlagfile("--" GTEST_FLAG_PREFIX_ "filter=abc\n"
+ "--" GTEST_FLAG_PREFIX_ "break_on_failure\n"
+ "--" GTEST_FLAG_PREFIX_ "list_tests"));
std::string flagfile_flag =
std::string("--" GTEST_FLAG_PREFIX_ "flagfile=") + flagfile_path.c_str();
@@ -6381,8 +6349,7 @@ class CurrentTestInfoTest : public Test {
// the test case is run.
static void SetUpTestSuite() {
// There should be no tests running at this point.
- const TestInfo* test_info =
- UnitTest::GetInstance()->current_test_info();
+ const TestInfo* test_info = UnitTest::GetInstance()->current_test_info();
EXPECT_TRUE(test_info == nullptr)
<< "There should be no tests running at this point.";
}
@@ -6390,8 +6357,7 @@ class CurrentTestInfoTest : public Test {
// Tests that current_test_info() returns NULL after the last test in
// the test case has run.
static void TearDownTestSuite() {
- const TestInfo* test_info =
- UnitTest::GetInstance()->current_test_info();
+ const TestInfo* test_info = UnitTest::GetInstance()->current_test_info();
EXPECT_TRUE(test_info == nullptr)
<< "There should be no tests running at this point.";
}
@@ -6400,8 +6366,7 @@ class CurrentTestInfoTest : public Test {
// Tests that current_test_info() returns TestInfo for currently running
// test by checking the expected test name against the actual one.
TEST_F(CurrentTestInfoTest, WorksForFirstTestInATestSuite) {
- const TestInfo* test_info =
- UnitTest::GetInstance()->current_test_info();
+ const TestInfo* test_info = UnitTest::GetInstance()->current_test_info();
ASSERT_TRUE(nullptr != test_info)
<< "There is a test running so we should have a valid TestInfo.";
EXPECT_STREQ("CurrentTestInfoTest", test_info->test_suite_name())
@@ -6415,8 +6380,7 @@ TEST_F(CurrentTestInfoTest, WorksForFirstTestInATestSuite) {
// use this test to see that the TestInfo object actually changed from
// the previous invocation.
TEST_F(CurrentTestInfoTest, WorksForSecondTestInATestSuite) {
- const TestInfo* test_info =
- UnitTest::GetInstance()->current_test_info();
+ const TestInfo* test_info = UnitTest::GetInstance()->current_test_info();
ASSERT_TRUE(nullptr != test_info)
<< "There is a test running so we should have a valid TestInfo.";
EXPECT_STREQ("CurrentTestInfoTest", test_info->test_suite_name())
@@ -6427,7 +6391,6 @@ TEST_F(CurrentTestInfoTest, WorksForSecondTestInATestSuite) {
} // namespace testing
-
// These two lines test that we can define tests in a namespace that
// has the name "testing" and is nested in another namespace.
namespace my_namespace {
@@ -6476,13 +6439,12 @@ TEST(StreamingAssertionsTest, Unconditional) {
SUCCEED() << "expected success";
EXPECT_NONFATAL_FAILURE(ADD_FAILURE() << "expected failure",
"expected failure");
- EXPECT_FATAL_FAILURE(FAIL() << "expected failure",
- "expected failure");
+ EXPECT_FATAL_FAILURE(FAIL() << "expected failure", "expected failure");
}
#ifdef __BORLANDC__
// Silences warnings: "Condition is always true", "Unreachable code"
-# pragma option push -w-ccc -w-rch
+#pragma option push -w-ccc -w-rch
#endif
TEST(StreamingAssertionsTest, Truth) {
@@ -6505,7 +6467,7 @@ TEST(StreamingAssertionsTest, Truth2) {
#ifdef __BORLANDC__
// Restores warnings after previous "#pragma option push" suppressed them
-# pragma option pop
+#pragma option pop
#endif
TEST(StreamingAssertionsTest, IntegerEquals) {
@@ -6576,28 +6538,32 @@ TEST(StreamingAssertionsTest, FloatingPointEquals) {
TEST(StreamingAssertionsTest, Throw) {
EXPECT_THROW(ThrowAnInteger(), int) << "unexpected failure";
ASSERT_THROW(ThrowAnInteger(), int) << "unexpected failure";
- EXPECT_NONFATAL_FAILURE(EXPECT_THROW(ThrowAnInteger(), bool) <<
- "expected failure", "expected failure");
- EXPECT_FATAL_FAILURE(ASSERT_THROW(ThrowAnInteger(), bool) <<
- "expected failure", "expected failure");
+ EXPECT_NONFATAL_FAILURE(EXPECT_THROW(ThrowAnInteger(), bool)
+ << "expected failure",
+ "expected failure");
+ EXPECT_FATAL_FAILURE(ASSERT_THROW(ThrowAnInteger(), bool)
+ << "expected failure",
+ "expected failure");
}
TEST(StreamingAssertionsTest, NoThrow) {
EXPECT_NO_THROW(ThrowNothing()) << "unexpected failure";
ASSERT_NO_THROW(ThrowNothing()) << "unexpected failure";
- EXPECT_NONFATAL_FAILURE(EXPECT_NO_THROW(ThrowAnInteger()) <<
- "expected failure", "expected failure");
- EXPECT_FATAL_FAILURE(ASSERT_NO_THROW(ThrowAnInteger()) <<
- "expected failure", "expected failure");
+ EXPECT_NONFATAL_FAILURE(EXPECT_NO_THROW(ThrowAnInteger())
+ << "expected failure",
+ "expected failure");
+ EXPECT_FATAL_FAILURE(ASSERT_NO_THROW(ThrowAnInteger()) << "expected failure",
+ "expected failure");
}
TEST(StreamingAssertionsTest, AnyThrow) {
EXPECT_ANY_THROW(ThrowAnInteger()) << "unexpected failure";
ASSERT_ANY_THROW(ThrowAnInteger()) << "unexpected failure";
- EXPECT_NONFATAL_FAILURE(EXPECT_ANY_THROW(ThrowNothing()) <<
- "expected failure", "expected failure");
- EXPECT_FATAL_FAILURE(ASSERT_ANY_THROW(ThrowNothing()) <<
- "expected failure", "expected failure");
+ EXPECT_NONFATAL_FAILURE(EXPECT_ANY_THROW(ThrowNothing())
+ << "expected failure",
+ "expected failure");
+ EXPECT_FATAL_FAILURE(ASSERT_ANY_THROW(ThrowNothing()) << "expected failure",
+ "expected failure");
}
#endif // GTEST_HAS_EXCEPTIONS
@@ -6605,67 +6571,67 @@ TEST(StreamingAssertionsTest, AnyThrow) {
// Tests that Google Test correctly decides whether to use colors in the output.
TEST(ColoredOutputTest, UsesColorsWhenGTestColorFlagIsYes) {
- GTEST_FLAG(color) = "yes";
+ GTEST_FLAG_SET(color, "yes");
- SetEnv("TERM", "xterm"); // TERM supports colors.
- EXPECT_TRUE(ShouldUseColor(true)); // Stdout is a TTY.
+ SetEnv("TERM", "xterm"); // TERM supports colors.
+ EXPECT_TRUE(ShouldUseColor(true)); // Stdout is a TTY.
EXPECT_TRUE(ShouldUseColor(false)); // Stdout is not a TTY.
- SetEnv("TERM", "dumb"); // TERM doesn't support colors.
- EXPECT_TRUE(ShouldUseColor(true)); // Stdout is a TTY.
+ SetEnv("TERM", "dumb"); // TERM doesn't support colors.
+ EXPECT_TRUE(ShouldUseColor(true)); // Stdout is a TTY.
EXPECT_TRUE(ShouldUseColor(false)); // Stdout is not a TTY.
}
TEST(ColoredOutputTest, UsesColorsWhenGTestColorFlagIsAliasOfYes) {
SetEnv("TERM", "dumb"); // TERM doesn't support colors.
- GTEST_FLAG(color) = "True";
+ GTEST_FLAG_SET(color, "True");
EXPECT_TRUE(ShouldUseColor(false)); // Stdout is not a TTY.
- GTEST_FLAG(color) = "t";
+ GTEST_FLAG_SET(color, "t");
EXPECT_TRUE(ShouldUseColor(false)); // Stdout is not a TTY.
- GTEST_FLAG(color) = "1";
+ GTEST_FLAG_SET(color, "1");
EXPECT_TRUE(ShouldUseColor(false)); // Stdout is not a TTY.
}
TEST(ColoredOutputTest, UsesNoColorWhenGTestColorFlagIsNo) {
- GTEST_FLAG(color) = "no";
+ GTEST_FLAG_SET(color, "no");
- SetEnv("TERM", "xterm"); // TERM supports colors.
- EXPECT_FALSE(ShouldUseColor(true)); // Stdout is a TTY.
+ SetEnv("TERM", "xterm"); // TERM supports colors.
+ EXPECT_FALSE(ShouldUseColor(true)); // Stdout is a TTY.
EXPECT_FALSE(ShouldUseColor(false)); // Stdout is not a TTY.
- SetEnv("TERM", "dumb"); // TERM doesn't support colors.
- EXPECT_FALSE(ShouldUseColor(true)); // Stdout is a TTY.
+ SetEnv("TERM", "dumb"); // TERM doesn't support colors.
+ EXPECT_FALSE(ShouldUseColor(true)); // Stdout is a TTY.
EXPECT_FALSE(ShouldUseColor(false)); // Stdout is not a TTY.
}
TEST(ColoredOutputTest, UsesNoColorWhenGTestColorFlagIsInvalid) {
SetEnv("TERM", "xterm"); // TERM supports colors.
- GTEST_FLAG(color) = "F";
+ GTEST_FLAG_SET(color, "F");
EXPECT_FALSE(ShouldUseColor(true)); // Stdout is a TTY.
- GTEST_FLAG(color) = "0";
+ GTEST_FLAG_SET(color, "0");
EXPECT_FALSE(ShouldUseColor(true)); // Stdout is a TTY.
- GTEST_FLAG(color) = "unknown";
+ GTEST_FLAG_SET(color, "unknown");
EXPECT_FALSE(ShouldUseColor(true)); // Stdout is a TTY.
}
TEST(ColoredOutputTest, UsesColorsWhenStdoutIsTty) {
- GTEST_FLAG(color) = "auto";
+ GTEST_FLAG_SET(color, "auto");
- SetEnv("TERM", "xterm"); // TERM supports colors.
+ SetEnv("TERM", "xterm"); // TERM supports colors.
EXPECT_FALSE(ShouldUseColor(false)); // Stdout is not a TTY.
EXPECT_TRUE(ShouldUseColor(true)); // Stdout is a TTY.
}
TEST(ColoredOutputTest, UsesColorsWhenTermSupportsColors) {
- GTEST_FLAG(color) = "auto";
+ GTEST_FLAG_SET(color, "auto");
-#if GTEST_OS_WINDOWS && !GTEST_OS_WINDOWS_MINGW
+#if defined(GTEST_OS_WINDOWS) && !defined(GTEST_OS_WINDOWS_MINGW)
// On Windows, we ignore the TERM variable as it's usually not set.
SetEnv("TERM", "dumb");
@@ -6680,46 +6646,49 @@ TEST(ColoredOutputTest, UsesColorsWhenTermSupportsColors) {
// On non-Windows platforms, we rely on TERM to determine if the
// terminal supports colors.
- SetEnv("TERM", "dumb"); // TERM doesn't support colors.
+ SetEnv("TERM", "dumb"); // TERM doesn't support colors.
EXPECT_FALSE(ShouldUseColor(true)); // Stdout is a TTY.
- SetEnv("TERM", "emacs"); // TERM doesn't support colors.
+ SetEnv("TERM", "emacs"); // TERM doesn't support colors.
EXPECT_FALSE(ShouldUseColor(true)); // Stdout is a TTY.
- SetEnv("TERM", "vt100"); // TERM doesn't support colors.
+ SetEnv("TERM", "vt100"); // TERM doesn't support colors.
EXPECT_FALSE(ShouldUseColor(true)); // Stdout is a TTY.
- SetEnv("TERM", "xterm-mono"); // TERM doesn't support colors.
+ SetEnv("TERM", "xterm-mono"); // TERM doesn't support colors.
EXPECT_FALSE(ShouldUseColor(true)); // Stdout is a TTY.
- SetEnv("TERM", "xterm"); // TERM supports colors.
+ SetEnv("TERM", "xterm"); // TERM supports colors.
+ EXPECT_TRUE(ShouldUseColor(true)); // Stdout is a TTY.
+
+ SetEnv("TERM", "xterm-color"); // TERM supports colors.
EXPECT_TRUE(ShouldUseColor(true)); // Stdout is a TTY.
- SetEnv("TERM", "xterm-color"); // TERM supports colors.
+ SetEnv("TERM", "xterm-kitty"); // TERM supports colors.
EXPECT_TRUE(ShouldUseColor(true)); // Stdout is a TTY.
- SetEnv("TERM", "xterm-256color"); // TERM supports colors.
+ SetEnv("TERM", "xterm-256color"); // TERM supports colors.
EXPECT_TRUE(ShouldUseColor(true)); // Stdout is a TTY.
- SetEnv("TERM", "screen"); // TERM supports colors.
+ SetEnv("TERM", "screen"); // TERM supports colors.
EXPECT_TRUE(ShouldUseColor(true)); // Stdout is a TTY.
SetEnv("TERM", "screen-256color"); // TERM supports colors.
EXPECT_TRUE(ShouldUseColor(true)); // Stdout is a TTY.
- SetEnv("TERM", "tmux"); // TERM supports colors.
+ SetEnv("TERM", "tmux"); // TERM supports colors.
EXPECT_TRUE(ShouldUseColor(true)); // Stdout is a TTY.
- SetEnv("TERM", "tmux-256color"); // TERM supports colors.
+ SetEnv("TERM", "tmux-256color"); // TERM supports colors.
EXPECT_TRUE(ShouldUseColor(true)); // Stdout is a TTY.
- SetEnv("TERM", "rxvt-unicode"); // TERM supports colors.
+ SetEnv("TERM", "rxvt-unicode"); // TERM supports colors.
EXPECT_TRUE(ShouldUseColor(true)); // Stdout is a TTY.
SetEnv("TERM", "rxvt-unicode-256color"); // TERM supports colors.
- EXPECT_TRUE(ShouldUseColor(true)); // Stdout is a TTY.
+ EXPECT_TRUE(ShouldUseColor(true)); // Stdout is a TTY.
- SetEnv("TERM", "linux"); // TERM supports colors.
+ SetEnv("TERM", "linux"); // TERM supports colors.
EXPECT_TRUE(ShouldUseColor(true)); // Stdout is a TTY.
SetEnv("TERM", "cygwin"); // TERM supports colors.
@@ -6842,12 +6811,10 @@ class TestListener : public EmptyTestEventListener {
public:
TestListener() : on_start_counter_(nullptr), is_destroyed_(nullptr) {}
TestListener(int* on_start_counter, bool* is_destroyed)
- : on_start_counter_(on_start_counter),
- is_destroyed_(is_destroyed) {}
+ : on_start_counter_(on_start_counter), is_destroyed_(is_destroyed) {}
~TestListener() override {
- if (is_destroyed_)
- *is_destroyed_ = true;
+ if (is_destroyed_) *is_destroyed_ = true;
}
protected:
@@ -6904,8 +6871,8 @@ TEST(TestEventListenersTest, Append) {
{
TestEventListeners listeners;
listeners.Append(listener);
- TestEventListenersAccessor::GetRepeater(&listeners)->OnTestProgramStart(
- *UnitTest::GetInstance());
+ TestEventListenersAccessor::GetRepeater(&listeners)
+ ->OnTestProgramStart(*UnitTest::GetInstance());
EXPECT_EQ(1, on_start_counter);
}
EXPECT_TRUE(is_destroyed);
@@ -6948,7 +6915,8 @@ class SequenceTestingListener : public EmptyTestEventListener {
std::vector<std::string>* vector_;
const char* const id_;
- GTEST_DISALLOW_COPY_AND_ASSIGN_(SequenceTestingListener);
+ SequenceTestingListener(const SequenceTestingListener&) = delete;
+ SequenceTestingListener& operator=(const SequenceTestingListener&) = delete;
};
TEST(EventListenerTest, AppendKeepsOrder) {
@@ -6958,32 +6926,32 @@ TEST(EventListenerTest, AppendKeepsOrder) {
listeners.Append(new SequenceTestingListener(&vec, "2nd"));
listeners.Append(new SequenceTestingListener(&vec, "3rd"));
- TestEventListenersAccessor::GetRepeater(&listeners)->OnTestProgramStart(
- *UnitTest::GetInstance());
+ TestEventListenersAccessor::GetRepeater(&listeners)
+ ->OnTestProgramStart(*UnitTest::GetInstance());
ASSERT_EQ(3U, vec.size());
EXPECT_STREQ("1st.OnTestProgramStart", vec[0].c_str());
EXPECT_STREQ("2nd.OnTestProgramStart", vec[1].c_str());
EXPECT_STREQ("3rd.OnTestProgramStart", vec[2].c_str());
vec.clear();
- TestEventListenersAccessor::GetRepeater(&listeners)->OnTestProgramEnd(
- *UnitTest::GetInstance());
+ TestEventListenersAccessor::GetRepeater(&listeners)
+ ->OnTestProgramEnd(*UnitTest::GetInstance());
ASSERT_EQ(3U, vec.size());
EXPECT_STREQ("3rd.OnTestProgramEnd", vec[0].c_str());
EXPECT_STREQ("2nd.OnTestProgramEnd", vec[1].c_str());
EXPECT_STREQ("1st.OnTestProgramEnd", vec[2].c_str());
vec.clear();
- TestEventListenersAccessor::GetRepeater(&listeners)->OnTestIterationStart(
- *UnitTest::GetInstance(), 0);
+ TestEventListenersAccessor::GetRepeater(&listeners)
+ ->OnTestIterationStart(*UnitTest::GetInstance(), 0);
ASSERT_EQ(3U, vec.size());
EXPECT_STREQ("1st.OnTestIterationStart", vec[0].c_str());
EXPECT_STREQ("2nd.OnTestIterationStart", vec[1].c_str());
EXPECT_STREQ("3rd.OnTestIterationStart", vec[2].c_str());
vec.clear();
- TestEventListenersAccessor::GetRepeater(&listeners)->OnTestIterationEnd(
- *UnitTest::GetInstance(), 0);
+ TestEventListenersAccessor::GetRepeater(&listeners)
+ ->OnTestIterationEnd(*UnitTest::GetInstance(), 0);
ASSERT_EQ(3U, vec.size());
EXPECT_STREQ("3rd.OnTestIterationEnd", vec[0].c_str());
EXPECT_STREQ("2nd.OnTestIterationEnd", vec[1].c_str());
@@ -7003,8 +6971,8 @@ TEST(TestEventListenersTest, Release) {
TestEventListeners listeners;
listeners.Append(listener);
EXPECT_EQ(listener, listeners.Release(listener));
- TestEventListenersAccessor::GetRepeater(&listeners)->OnTestProgramStart(
- *UnitTest::GetInstance());
+ TestEventListenersAccessor::GetRepeater(&listeners)
+ ->OnTestProgramStart(*UnitTest::GetInstance());
EXPECT_TRUE(listeners.Release(listener) == nullptr);
}
EXPECT_EQ(0, on_start_counter);
@@ -7022,17 +6990,20 @@ TEST(EventListenerTest, SuppressEventForwarding) {
ASSERT_TRUE(TestEventListenersAccessor::EventForwardingEnabled(listeners));
TestEventListenersAccessor::SuppressEventForwarding(&listeners);
ASSERT_FALSE(TestEventListenersAccessor::EventForwardingEnabled(listeners));
- TestEventListenersAccessor::GetRepeater(&listeners)->OnTestProgramStart(
- *UnitTest::GetInstance());
+ TestEventListenersAccessor::GetRepeater(&listeners)
+ ->OnTestProgramStart(*UnitTest::GetInstance());
EXPECT_EQ(0, on_start_counter);
}
// Tests that events generated by Google Test are not forwarded in
// death test subprocesses.
-TEST(EventListenerDeathTest, EventsNotForwardedInDeathTestSubprecesses) {
- EXPECT_DEATH_IF_SUPPORTED({
- GTEST_CHECK_(TestEventListenersAccessor::EventForwardingEnabled(
- *GetUnitTestImpl()->listeners())) << "expected failure";},
+TEST(EventListenerDeathTest, EventsNotForwardedInDeathTestSubprocesses) {
+ EXPECT_DEATH_IF_SUPPORTED(
+ {
+ GTEST_CHECK_(TestEventListenersAccessor::EventForwardingEnabled(
+ *GetUnitTestImpl()->listeners()))
+ << "expected failure";
+ },
"expected failure");
}
@@ -7049,8 +7020,8 @@ TEST(EventListenerTest, default_result_printer) {
EXPECT_EQ(listener, listeners.default_result_printer());
- TestEventListenersAccessor::GetRepeater(&listeners)->OnTestProgramStart(
- *UnitTest::GetInstance());
+ TestEventListenersAccessor::GetRepeater(&listeners)
+ ->OnTestProgramStart(*UnitTest::GetInstance());
EXPECT_EQ(1, on_start_counter);
@@ -7063,8 +7034,8 @@ TEST(EventListenerTest, default_result_printer) {
// After broadcasting an event the counter is still the same, indicating
// the listener is not in the list anymore.
- TestEventListenersAccessor::GetRepeater(&listeners)->OnTestProgramStart(
- *UnitTest::GetInstance());
+ TestEventListenersAccessor::GetRepeater(&listeners)
+ ->OnTestProgramStart(*UnitTest::GetInstance());
EXPECT_EQ(1, on_start_counter);
}
@@ -7086,8 +7057,8 @@ TEST(EventListenerTest, RemovingDefaultResultPrinterWorks) {
EXPECT_FALSE(is_destroyed);
// Broadcasting events now should not affect default_result_printer.
- TestEventListenersAccessor::GetRepeater(&listeners)->OnTestProgramStart(
- *UnitTest::GetInstance());
+ TestEventListenersAccessor::GetRepeater(&listeners)
+ ->OnTestProgramStart(*UnitTest::GetInstance());
EXPECT_EQ(0, on_start_counter);
}
// Destroying the list should not affect the listener now, too.
@@ -7108,8 +7079,8 @@ TEST(EventListenerTest, default_xml_generator) {
EXPECT_EQ(listener, listeners.default_xml_generator());
- TestEventListenersAccessor::GetRepeater(&listeners)->OnTestProgramStart(
- *UnitTest::GetInstance());
+ TestEventListenersAccessor::GetRepeater(&listeners)
+ ->OnTestProgramStart(*UnitTest::GetInstance());
EXPECT_EQ(1, on_start_counter);
@@ -7122,8 +7093,8 @@ TEST(EventListenerTest, default_xml_generator) {
// After broadcasting an event the counter is still the same, indicating
// the listener is not in the list anymore.
- TestEventListenersAccessor::GetRepeater(&listeners)->OnTestProgramStart(
- *UnitTest::GetInstance());
+ TestEventListenersAccessor::GetRepeater(&listeners)
+ ->OnTestProgramStart(*UnitTest::GetInstance());
EXPECT_EQ(1, on_start_counter);
}
@@ -7145,8 +7116,8 @@ TEST(EventListenerTest, RemovingDefaultXmlGeneratorWorks) {
EXPECT_FALSE(is_destroyed);
// Broadcasting events now should not affect default_xml_generator.
- TestEventListenersAccessor::GetRepeater(&listeners)->OnTestProgramStart(
- *UnitTest::GetInstance());
+ TestEventListenersAccessor::GetRepeater(&listeners)
+ ->OnTestProgramStart(*UnitTest::GetInstance());
EXPECT_EQ(0, on_start_counter);
}
// Destroying the list should not affect the listener now, too.
@@ -7154,7 +7125,7 @@ TEST(EventListenerTest, RemovingDefaultXmlGeneratorWorks) {
delete listener;
}
-// Sanity tests to ensure that the alternative, verbose spellings of
+// Tests to ensure that the alternative, verbose spellings of
// some of the macros work. We don't test them thoroughly as that
// would be quite involved. Since their implementations are
// straightforward, and they are rarely used, we'll just rely on the
@@ -7234,28 +7205,26 @@ struct IncompleteType;
// Tests that HasDebugStringAndShortDebugString<T>::value is a compile-time
// constant.
TEST(HasDebugStringAndShortDebugStringTest, ValueIsCompileTimeConstant) {
- GTEST_COMPILE_ASSERT_(
- HasDebugStringAndShortDebugString<HasDebugStringMethods>::value,
- const_true);
- GTEST_COMPILE_ASSERT_(
+ static_assert(HasDebugStringAndShortDebugString<HasDebugStringMethods>::value,
+ "const_true");
+ static_assert(
HasDebugStringAndShortDebugString<InheritsDebugStringMethods>::value,
- const_true);
- GTEST_COMPILE_ASSERT_(HasDebugStringAndShortDebugString<
- const InheritsDebugStringMethods>::value,
- const_true);
- GTEST_COMPILE_ASSERT_(
+ "const_true");
+ static_assert(HasDebugStringAndShortDebugString<
+ const InheritsDebugStringMethods>::value,
+ "const_true");
+ static_assert(
!HasDebugStringAndShortDebugString<WrongTypeDebugStringMethod>::value,
- const_false);
- GTEST_COMPILE_ASSERT_(
+ "const_false");
+ static_assert(
!HasDebugStringAndShortDebugString<NotConstDebugStringMethod>::value,
- const_false);
- GTEST_COMPILE_ASSERT_(
+ "const_false");
+ static_assert(
!HasDebugStringAndShortDebugString<MissingDebugStringMethod>::value,
- const_false);
- GTEST_COMPILE_ASSERT_(
- !HasDebugStringAndShortDebugString<IncompleteType>::value, const_false);
- GTEST_COMPILE_ASSERT_(!HasDebugStringAndShortDebugString<int>::value,
- const_false);
+ "const_false");
+ static_assert(!HasDebugStringAndShortDebugString<IncompleteType>::value,
+ "const_false");
+ static_assert(!HasDebugStringAndShortDebugString<int>::value, "const_false");
}
// Tests that HasDebugStringAndShortDebugString<T>::value is true when T has
@@ -7306,7 +7275,6 @@ TEST(GTestReferenceToConstTest, Works) {
TestGTestReferenceToConst<const std::string&, const std::string&>();
}
-
// Tests IsContainerTest.
class NonContainer {};
@@ -7318,10 +7286,9 @@ TEST(IsContainerTestTest, WorksForNonContainer) {
}
TEST(IsContainerTestTest, WorksForContainer) {
+ EXPECT_EQ(sizeof(IsContainer), sizeof(IsContainerTest<std::vector<bool>>(0)));
EXPECT_EQ(sizeof(IsContainer),
- sizeof(IsContainerTest<std::vector<bool> >(0)));
- EXPECT_EQ(sizeof(IsContainer),
- sizeof(IsContainerTest<std::map<int, double> >(0)));
+ sizeof(IsContainerTest<std::map<int, double>>(0)));
}
struct ConstOnlyContainerWithPointerIterator {
@@ -7370,8 +7337,8 @@ TEST(ArrayEqTest, WorksForDegeneratedArrays) {
TEST(ArrayEqTest, WorksForOneDimensionalArrays) {
// Note that a and b are distinct but compatible types.
- const int a[] = { 0, 1 };
- long b[] = { 0, 1 };
+ const int a[] = {0, 1};
+ long b[] = {0, 1};
EXPECT_TRUE(ArrayEq(a, b));
EXPECT_TRUE(ArrayEq(a, 2, b));
@@ -7381,9 +7348,9 @@ TEST(ArrayEqTest, WorksForOneDimensionalArrays) {
}
TEST(ArrayEqTest, WorksForTwoDimensionalArrays) {
- const char a[][3] = { "hi", "lo" };
- const char b[][3] = { "hi", "lo" };
- const char c[][3] = { "hi", "li" };
+ const char a[][3] = {"hi", "lo"};
+ const char b[][3] = {"hi", "lo"};
+ const char c[][3] = {"hi", "li"};
EXPECT_TRUE(ArrayEq(a, b));
EXPECT_TRUE(ArrayEq(a, 2, b));
@@ -7401,11 +7368,11 @@ TEST(ArrayAwareFindTest, WorksForOneDimensionalArray) {
}
TEST(ArrayAwareFindTest, WorksForTwoDimensionalArray) {
- int a[][2] = { { 0, 1 }, { 2, 3 }, { 4, 5 } };
- const int b[2] = { 2, 3 };
+ int a[][2] = {{0, 1}, {2, 3}, {4, 5}};
+ const int b[2] = {2, 3};
EXPECT_EQ(a + 1, ArrayAwareFind(a, a + 3, b));
- const int c[2] = { 6, 7 };
+ const int c[2] = {6, 7};
EXPECT_EQ(a + 3, ArrayAwareFind(a, a + 3, c));
}
@@ -7431,7 +7398,7 @@ TEST(CopyArrayTest, WorksForOneDimensionalArrays) {
}
TEST(CopyArrayTest, WorksForTwoDimensionalArrays) {
- const int a[2][3] = { { 0, 1, 2 }, { 3, 4, 5 } };
+ const int a[2][3] = {{0, 1, 2}, {3, 4, 5}};
int b[2][3];
#ifndef __BORLANDC__ // C++Builder cannot compile some array size deductions.
CopyArray(a, &b);
@@ -7446,7 +7413,7 @@ TEST(CopyArrayTest, WorksForTwoDimensionalArrays) {
// Tests NativeArray.
TEST(NativeArrayTest, ConstructorFromArrayWorks) {
- const int a[3] = { 0, 1, 2 };
+ const int a[3] = {0, 1, 2};
NativeArray<int> na(a, 3, RelationToSourceReference());
EXPECT_EQ(3U, na.size());
EXPECT_EQ(a, na.begin());
@@ -7476,7 +7443,7 @@ TEST(NativeArrayTest, TypeMembersAreCorrect) {
}
TEST(NativeArrayTest, MethodsWork) {
- const int a[3] = { 0, 1, 2 };
+ const int a[3] = {0, 1, 2};
NativeArray<int> na(a, 3, RelationToSourceCopy());
ASSERT_EQ(3U, na.size());
EXPECT_EQ(3, na.end() - na.begin());
@@ -7495,14 +7462,14 @@ TEST(NativeArrayTest, MethodsWork) {
NativeArray<int> na2(a, 3, RelationToSourceReference());
EXPECT_TRUE(na == na2);
- const int b1[3] = { 0, 1, 1 };
- const int b2[4] = { 0, 1, 2, 3 };
+ const int b1[3] = {0, 1, 1};
+ const int b2[4] = {0, 1, 2, 3};
EXPECT_FALSE(na == NativeArray<int>(b1, 3, RelationToSourceReference()));
EXPECT_FALSE(na == NativeArray<int>(b2, 4, RelationToSourceCopy()));
}
TEST(NativeArrayTest, WorksForTwoDimensionalArray) {
- const char a[2][3] = { "hi", "lo" };
+ const char a[2][3] = {"hi", "lo"};
NativeArray<char[3]> na(a, 2, RelationToSourceReference());
ASSERT_EQ(2U, na.size());
EXPECT_EQ(a, na.begin());
@@ -7767,7 +7734,7 @@ auto* dynamic_test = testing::RegisterTest(
__LINE__, []() -> DynamicUnitTestFixture* { return new DynamicTest; });
TEST(RegisterTest, WasRegistered) {
- auto* unittest = testing::UnitTest::GetInstance();
+ const auto& unittest = testing::UnitTest::GetInstance();
for (int i = 0; i < unittest->total_test_suite_count(); ++i) {
auto* tests = unittest->GetTestSuite(i);
if (tests->name() != std::string("DynamicUnitTestFixture")) continue;
@@ -7782,3 +7749,35 @@ TEST(RegisterTest, WasRegistered) {
FAIL() << "Didn't find the test!";
}
+
+// Test that the pattern globbing algorithm is linear. If not, this test should
+// time out.
+TEST(PatternGlobbingTest, MatchesFilterLinearRuntime) {
+ std::string name(100, 'a'); // Construct the string (a^100)b
+ name.push_back('b');
+
+ std::string pattern; // Construct the string ((a*)^100)b
+ for (int i = 0; i < 100; ++i) {
+ pattern.append("a*");
+ }
+ pattern.push_back('b');
+
+ EXPECT_TRUE(
+ testing::internal::UnitTestOptions::MatchesFilter(name, pattern.c_str()));
+}
+
+TEST(PatternGlobbingTest, MatchesFilterWithMultiplePatterns) {
+ const std::string name = "aaaa";
+ EXPECT_TRUE(testing::internal::UnitTestOptions::MatchesFilter(name, "a*"));
+ EXPECT_TRUE(testing::internal::UnitTestOptions::MatchesFilter(name, "a*:"));
+ EXPECT_FALSE(testing::internal::UnitTestOptions::MatchesFilter(name, "ab"));
+ EXPECT_FALSE(testing::internal::UnitTestOptions::MatchesFilter(name, "ab:"));
+ EXPECT_TRUE(testing::internal::UnitTestOptions::MatchesFilter(name, "ab:a*"));
+}
+
+TEST(PatternGlobbingTest, MatchesFilterEdgeCases) {
+ EXPECT_FALSE(testing::internal::UnitTestOptions::MatchesFilter("", "*a"));
+ EXPECT_TRUE(testing::internal::UnitTestOptions::MatchesFilter("", "*"));
+ EXPECT_FALSE(testing::internal::UnitTestOptions::MatchesFilter("a", ""));
+ EXPECT_TRUE(testing::internal::UnitTestOptions::MatchesFilter("", ""));
+}
diff --git a/googletest/test/gtest_xml_outfile2_test_.cc b/googletest/test/gtest_xml_outfile2_test_.cc
index f9a2a6e9..4a76429c 100644
--- a/googletest/test/gtest_xml_outfile2_test_.cc
+++ b/googletest/test/gtest_xml_outfile2_test_.cc
@@ -30,6 +30,8 @@
// gtest_xml_outfile2_test_ writes some xml via TestProperty used by
// gtest_xml_outfiles_test.py
+#include <atomic>
+
#include "gtest/gtest.h"
class PropertyTwo : public testing::Test {
@@ -38,6 +40,38 @@ class PropertyTwo : public testing::Test {
void TearDown() override { RecordProperty("TearDownProp", 2); }
};
-TEST_F(PropertyTwo, TestSomeProperties) {
- RecordProperty("TestSomeProperty", 2);
+TEST_F(PropertyTwo, TestInt64ConvertibleProperties) {
+ float float_prop = 3.25;
+ RecordProperty("TestFloatProperty", float_prop);
+
+ double double_prop = 4.75;
+ RecordProperty("TestDoubleProperty", double_prop);
+
+ // Validate we can write an unsigned size_t as a property
+ size_t size_t_prop = 5;
+ RecordProperty("TestSizetProperty", size_t_prop);
+
+ bool bool_prop = true;
+ RecordProperty("TestBoolProperty", bool_prop);
+
+ char char_prop = 'A';
+ RecordProperty("TestCharProperty", char_prop);
+
+ int16_t int16_prop = 6;
+ RecordProperty("TestInt16Property", int16_prop);
+
+ int32_t int32_prop = 7;
+ RecordProperty("TestInt32Property", int32_prop);
+
+ int64_t int64_prop = 8;
+ RecordProperty("TestInt64Property", int64_prop);
+
+ enum Foo {
+ NINE = 9,
+ };
+ Foo enum_prop = NINE;
+ RecordProperty("TestEnumProperty", enum_prop);
+
+ std::atomic<int> atomic_int_prop(10);
+ RecordProperty("TestAtomicIntProperty", atomic_int_prop);
}
diff --git a/googletest/test/gtest_xml_outfiles_test.py b/googletest/test/gtest_xml_outfiles_test.py
index ac66feb6..d17cc0c9 100755
--- a/googletest/test/gtest_xml_outfiles_test.py
+++ b/googletest/test/gtest_xml_outfiles_test.py
@@ -33,8 +33,8 @@
import os
from xml.dom import minidom, Node
-import gtest_test_utils
-import gtest_xml_test_utils
+from googletest.test import gtest_test_utils
+from googletest.test import gtest_xml_test_utils
GTEST_OUTPUT_SUBDIR = "xml_outfiles"
GTEST_OUTPUT_1_TEST = "gtest_xml_outfile1_test_"
@@ -43,7 +43,7 @@ GTEST_OUTPUT_2_TEST = "gtest_xml_outfile2_test_"
EXPECTED_XML_1 = """<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="1" failures="0" disabled="0" errors="0" time="*" timestamp="*" name="AllTests">
<testsuite name="PropertyOne" tests="1" failures="0" skipped="0" disabled="0" errors="0" time="*" timestamp="*">
- <testcase name="TestSomeProperties" status="run" result="completed" time="*" timestamp="*" classname="PropertyOne">
+ <testcase name="TestSomeProperties" file="gtest_xml_outfile1_test_.cc" line="41" status="run" result="completed" time="*" timestamp="*" classname="PropertyOne">
<properties>
<property name="SetUpProp" value="1"/>
<property name="TestSomeProperty" value="1"/>
@@ -57,10 +57,19 @@ EXPECTED_XML_1 = """<?xml version="1.0" encoding="UTF-8"?>
EXPECTED_XML_2 = """<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="1" failures="0" disabled="0" errors="0" time="*" timestamp="*" name="AllTests">
<testsuite name="PropertyTwo" tests="1" failures="0" skipped="0" disabled="0" errors="0" time="*" timestamp="*">
- <testcase name="TestSomeProperties" status="run" result="completed" time="*" timestamp="*" classname="PropertyTwo">
+ <testcase name="TestInt64ConvertibleProperties" file="gtest_xml_outfile2_test_.cc" line="43" status="run" result="completed" time="*" timestamp="*" classname="PropertyTwo">
<properties>
<property name="SetUpProp" value="2"/>
- <property name="TestSomeProperty" value="2"/>
+ <property name="TestFloatProperty" value="3.25"/>
+ <property name="TestDoubleProperty" value="4.75"/>
+ <property name="TestSizetProperty" value="5"/>
+ <property name="TestBoolProperty" value="true"/>
+ <property name="TestCharProperty" value="A"/>
+ <property name="TestInt16Property" value="6"/>
+ <property name="TestInt32Property" value="7"/>
+ <property name="TestInt64Property" value="8"/>
+ <property name="TestEnumProperty" value="9"/>
+ <property name="TestAtomicIntProperty" value="10"/>
<property name="TearDownProp" value="2"/>
</properties>
</testcase>
@@ -76,8 +85,9 @@ class GTestXMLOutFilesTest(gtest_xml_test_utils.GTestXMLTestCase):
# We want the trailing '/' that the last "" provides in os.path.join, for
# telling Google Test to create an output directory instead of a single file
# for xml output.
- self.output_dir_ = os.path.join(gtest_test_utils.GetTempDir(),
- GTEST_OUTPUT_SUBDIR, "")
+ self.output_dir_ = os.path.join(
+ gtest_test_utils.GetTempDir(), GTEST_OUTPUT_SUBDIR, ""
+ )
self.DeleteFilesAndDir()
def tearDown(self):
@@ -106,17 +116,20 @@ class GTestXMLOutFilesTest(gtest_xml_test_utils.GTestXMLTestCase):
def _TestOutFile(self, test_name, expected_xml):
gtest_prog_path = gtest_test_utils.GetTestExecutablePath(test_name)
command = [gtest_prog_path, "--gtest_output=xml:%s" % self.output_dir_]
- p = gtest_test_utils.Subprocess(command,
- working_dir=gtest_test_utils.GetTempDir())
- self.assert_(p.exited)
- self.assertEquals(0, p.exit_code)
+ p = gtest_test_utils.Subprocess(
+ command, working_dir=gtest_test_utils.GetTempDir()
+ )
+ self.assertTrue(p.exited)
+ self.assertEqual(0, p.exit_code)
output_file_name1 = test_name + ".xml"
output_file1 = os.path.join(self.output_dir_, output_file_name1)
- output_file_name2 = 'lt-' + output_file_name1
+ output_file_name2 = "lt-" + output_file_name1
output_file2 = os.path.join(self.output_dir_, output_file_name2)
- self.assert_(os.path.isfile(output_file1) or os.path.isfile(output_file2),
- output_file1)
+ self.assertTrue(
+ os.path.isfile(output_file1) or os.path.isfile(output_file2),
+ output_file1,
+ )
expected = minidom.parseString(expected_xml)
if os.path.isfile(output_file1):
@@ -124,8 +137,7 @@ class GTestXMLOutFilesTest(gtest_xml_test_utils.GTestXMLTestCase):
else:
actual = minidom.parse(output_file2)
self.NormalizeXml(actual.documentElement)
- self.AssertEquivalentNodes(expected.documentElement,
- actual.documentElement)
+ self.AssertEquivalentNodes(expected.documentElement, actual.documentElement)
expected.unlink()
actual.unlink()
diff --git a/googletest/test/gtest_xml_output_unittest.py b/googletest/test/gtest_xml_output_unittest.py
index eade7aac..422569e4 100755
--- a/googletest/test/gtest_xml_output_unittest.py
+++ b/googletest/test/gtest_xml_output_unittest.py
@@ -38,8 +38,8 @@ import re
import sys
from xml.dom import minidom, Node
-import gtest_test_utils
-import gtest_xml_test_utils
+from googletest.test import gtest_test_utils
+from googletest.test import gtest_xml_test_utils
GTEST_FILTER_FLAG = '--gtest_filter'
GTEST_LIST_TESTS_FLAG = '--gtest_list_tests'
@@ -59,135 +59,138 @@ SUPPORTS_STACK_TRACES = NO_STACKTRACE_SUPPORT_FLAG not in sys.argv
if SUPPORTS_STACK_TRACES:
STACK_TRACE_TEMPLATE = '\nStack trace:\n*'
+ STACK_TRACE_ENTITY_TEMPLATE = ''
else:
- STACK_TRACE_TEMPLATE = ''
+ STACK_TRACE_TEMPLATE = '\n'
+ STACK_TRACE_ENTITY_TEMPLATE = '&#x0A;'
# unittest.main() can't handle unknown flags
sys.argv.remove(NO_STACKTRACE_SUPPORT_FLAG)
EXPECTED_NON_EMPTY_XML = """<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="26" failures="5" disabled="2" errors="0" time="*" timestamp="*" name="AllTests" ad_hoc_property="42">
<testsuite name="SuccessfulTest" tests="1" failures="0" disabled="0" skipped="0" errors="0" time="*" timestamp="*">
- <testcase name="Succeeds" status="run" result="completed" time="*" timestamp="*" classname="SuccessfulTest"/>
+ <testcase name="Succeeds" file="gtest_xml_output_unittest_.cc" line="53" status="run" result="completed" time="*" timestamp="*" classname="SuccessfulTest"/>
</testsuite>
<testsuite name="FailedTest" tests="1" failures="1" disabled="0" skipped="0" errors="0" time="*" timestamp="*">
- <testcase name="Fails" status="run" result="completed" time="*" timestamp="*" classname="FailedTest">
- <failure message="gtest_xml_output_unittest_.cc:*&#x0A;Expected equality of these values:&#x0A; 1&#x0A; 2" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
+ <testcase name="Fails" file="gtest_xml_output_unittest_.cc" line="61" status="run" result="completed" time="*" timestamp="*" classname="FailedTest">
+ <failure message="gtest_xml_output_unittest_.cc:*&#x0A;Expected equality of these values:&#x0A; 1&#x0A; 2%(stack_entity)s" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
Expected equality of these values:
1
2%(stack)s]]></failure>
</testcase>
</testsuite>
<testsuite name="MixedResultTest" tests="3" failures="1" disabled="1" skipped="0" errors="0" time="*" timestamp="*">
- <testcase name="Succeeds" status="run" result="completed" time="*" timestamp="*" classname="MixedResultTest"/>
- <testcase name="Fails" status="run" result="completed" time="*" timestamp="*" classname="MixedResultTest">
- <failure message="gtest_xml_output_unittest_.cc:*&#x0A;Expected equality of these values:&#x0A; 1&#x0A; 2" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
+ <testcase name="Succeeds" file="gtest_xml_output_unittest_.cc" line="88" status="run" result="completed" time="*" timestamp="*" classname="MixedResultTest"/>
+ <testcase name="Fails" file="gtest_xml_output_unittest_.cc" line="93" status="run" result="completed" time="*" timestamp="*" classname="MixedResultTest">
+ <failure message="gtest_xml_output_unittest_.cc:*&#x0A;Expected equality of these values:&#x0A; 1&#x0A; 2%(stack_entity)s" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
Expected equality of these values:
1
2%(stack)s]]></failure>
- <failure message="gtest_xml_output_unittest_.cc:*&#x0A;Expected equality of these values:&#x0A; 2&#x0A; 3" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
+ <failure message="gtest_xml_output_unittest_.cc:*&#x0A;Expected equality of these values:&#x0A; 2&#x0A; 3%(stack_entity)s" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
Expected equality of these values:
2
3%(stack)s]]></failure>
</testcase>
- <testcase name="DISABLED_test" status="notrun" result="suppressed" time="*" timestamp="*" classname="MixedResultTest"/>
+ <testcase name="DISABLED_test" file="gtest_xml_output_unittest_.cc" line="98" status="notrun" result="suppressed" time="*" timestamp="*" classname="MixedResultTest"/>
</testsuite>
<testsuite name="XmlQuotingTest" tests="1" failures="1" disabled="0" skipped="0" errors="0" time="*" timestamp="*">
- <testcase name="OutputsCData" status="run" result="completed" time="*" timestamp="*" classname="XmlQuotingTest">
- <failure message="gtest_xml_output_unittest_.cc:*&#x0A;Failed&#x0A;XML output: &lt;?xml encoding=&quot;utf-8&quot;&gt;&lt;top&gt;&lt;![CDATA[cdata text]]&gt;&lt;/top&gt;" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
+ <testcase name="OutputsCData" file="gtest_xml_output_unittest_.cc" line="102" status="run" result="completed" time="*" timestamp="*" classname="XmlQuotingTest">
+ <failure message="gtest_xml_output_unittest_.cc:*&#x0A;Failed&#x0A;XML output: &lt;?xml encoding=&quot;utf-8&quot;&gt;&lt;top&gt;&lt;![CDATA[cdata text]]&gt;&lt;/top&gt;%(stack_entity)s" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
Failed
XML output: <?xml encoding="utf-8"><top><![CDATA[cdata text]]>]]&gt;<![CDATA[</top>%(stack)s]]></failure>
</testcase>
</testsuite>
<testsuite name="InvalidCharactersTest" tests="1" failures="1" disabled="0" skipped="0" errors="0" time="*" timestamp="*">
- <testcase name="InvalidCharactersInMessage" status="run" result="completed" time="*" timestamp="*" classname="InvalidCharactersTest">
- <failure message="gtest_xml_output_unittest_.cc:*&#x0A;Failed&#x0A;Invalid characters in brackets []" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
+ <testcase name="InvalidCharactersInMessage" file="gtest_xml_output_unittest_.cc" line="109" status="run" result="completed" time="*" timestamp="*" classname="InvalidCharactersTest">
+ <failure message="gtest_xml_output_unittest_.cc:*&#x0A;Failed&#x0A;Invalid characters in brackets []%(stack_entity)s" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
Failed
Invalid characters in brackets []%(stack)s]]></failure>
</testcase>
</testsuite>
<testsuite name="DisabledTest" tests="1" failures="0" disabled="1" skipped="0" errors="0" time="*" timestamp="*">
- <testcase name="DISABLED_test_not_run" status="notrun" result="suppressed" time="*" timestamp="*" classname="DisabledTest"/>
+ <testcase name="DISABLED_test_not_run" file="gtest_xml_output_unittest_.cc" line="68" status="notrun" result="suppressed" time="*" timestamp="*" classname="DisabledTest"/>
</testsuite>
<testsuite name="SkippedTest" tests="3" failures="1" disabled="0" skipped="2" errors="0" time="*" timestamp="*">
- <testcase name="Skipped" status="run" result="skipped" time="*" timestamp="*" classname="SkippedTest">
- <skipped message="gtest_xml_output_unittest_.cc:*&#x0A;"><![CDATA[gtest_xml_output_unittest_.cc:*
+ <testcase name="Skipped" status="run" file="gtest_xml_output_unittest_.cc" line="75" result="skipped" time="*" timestamp="*" classname="SkippedTest">
+ <skipped message="gtest_xml_output_unittest_.cc:*&#x0A;%(stack_entity)s"><![CDATA[gtest_xml_output_unittest_.cc:*
%(stack)s]]></skipped>
</testcase>
- <testcase name="SkippedWithMessage" status="run" result="skipped" time="*" timestamp="*" classname="SkippedTest">
- <skipped message="gtest_xml_output_unittest_.cc:*&#x0A;It is good practice to tell why you skip a test."><![CDATA[gtest_xml_output_unittest_.cc:*
+ <testcase name="SkippedWithMessage" file="gtest_xml_output_unittest_.cc" line="79" status="run" result="skipped" time="*" timestamp="*" classname="SkippedTest">
+ <skipped message="gtest_xml_output_unittest_.cc:*&#x0A;It is good practice to tell why you skip a test.%(stack_entity)s"><![CDATA[gtest_xml_output_unittest_.cc:*
It is good practice to tell why you skip a test.%(stack)s]]></skipped>
</testcase>
- <testcase name="SkippedAfterFailure" status="run" result="completed" time="*" timestamp="*" classname="SkippedTest">
- <failure message="gtest_xml_output_unittest_.cc:*&#x0A;Expected equality of these values:&#x0A; 1&#x0A; 2" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
+ <testcase name="SkippedAfterFailure" file="gtest_xml_output_unittest_.cc" line="83" status="run" result="completed" time="*" timestamp="*" classname="SkippedTest">
+ <failure message="gtest_xml_output_unittest_.cc:*&#x0A;Expected equality of these values:&#x0A; 1&#x0A; 2%(stack_entity)s" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
Expected equality of these values:
1
2%(stack)s]]></failure>
- <skipped message="gtest_xml_output_unittest_.cc:*&#x0A;It is good practice to tell why you skip a test."><![CDATA[gtest_xml_output_unittest_.cc:*
+ <skipped message="gtest_xml_output_unittest_.cc:*&#x0A;It is good practice to tell why you skip a test.%(stack_entity)s"><![CDATA[gtest_xml_output_unittest_.cc:*
It is good practice to tell why you skip a test.%(stack)s]]></skipped>
</testcase>
</testsuite>
<testsuite name="PropertyRecordingTest" tests="4" failures="0" disabled="0" skipped="0" errors="0" time="*" timestamp="*" SetUpTestSuite="yes" TearDownTestSuite="aye">
- <testcase name="OneProperty" status="run" result="completed" time="*" timestamp="*" classname="PropertyRecordingTest">
+ <testcase name="OneProperty" file="gtest_xml_output_unittest_.cc" line="121" status="run" result="completed" time="*" timestamp="*" classname="PropertyRecordingTest">
<properties>
<property name="key_1" value="1"/>
</properties>
</testcase>
- <testcase name="IntValuedProperty" status="run" result="completed" time="*" timestamp="*" classname="PropertyRecordingTest">
+ <testcase name="IntValuedProperty" file="gtest_xml_output_unittest_.cc" line="125" status="run" result="completed" time="*" timestamp="*" classname="PropertyRecordingTest">
<properties>
<property name="key_int" value="1"/>
</properties>
</testcase>
- <testcase name="ThreeProperties" status="run" result="completed" time="*" timestamp="*" classname="PropertyRecordingTest">
+ <testcase name="ThreeProperties" file="gtest_xml_output_unittest_.cc" line="129" status="run" result="completed" time="*" timestamp="*" classname="PropertyRecordingTest">
<properties>
<property name="key_1" value="1"/>
<property name="key_2" value="2"/>
<property name="key_3" value="3"/>
</properties>
</testcase>
- <testcase name="TwoValuesForOneKeyUsesLastValue" status="run" result="completed" time="*" timestamp="*" classname="PropertyRecordingTest">
+ <testcase name="TwoValuesForOneKeyUsesLastValue" file="gtest_xml_output_unittest_.cc" line="135" status="run" result="completed" time="*" timestamp="*" classname="PropertyRecordingTest">
<properties>
<property name="key_1" value="2"/>
</properties>
</testcase>
</testsuite>
<testsuite name="NoFixtureTest" tests="3" failures="0" disabled="0" skipped="0" errors="0" time="*" timestamp="*">
- <testcase name="RecordProperty" status="run" result="completed" time="*" timestamp="*" classname="NoFixtureTest">
+ <testcase name="RecordProperty" file="gtest_xml_output_unittest_.cc" line="140" status="run" result="completed" time="*" timestamp="*" classname="NoFixtureTest">
<properties>
<property name="key" value="1"/>
</properties>
</testcase>
- <testcase name="ExternalUtilityThatCallsRecordIntValuedProperty" status="run" result="completed" time="*" timestamp="*" classname="NoFixtureTest">
+ <testcase name="ExternalUtilityThatCallsRecordIntValuedProperty" file="gtest_xml_output_unittest_.cc" line="153" status="run" result="completed" time="*" timestamp="*" classname="NoFixtureTest">
<properties>
<property name="key_for_utility_int" value="1"/>
</properties>
</testcase>
- <testcase name="ExternalUtilityThatCallsRecordStringValuedProperty" status="run" result="completed" time="*" timestamp="*" classname="NoFixtureTest">
+ <testcase name="ExternalUtilityThatCallsRecordStringValuedProperty" file="gtest_xml_output_unittest_.cc" line="157" status="run" result="completed" time="*" timestamp="*" classname="NoFixtureTest">
<properties>
<property name="key_for_utility_string" value="1"/>
</properties>
</testcase>
</testsuite>
<testsuite name="Single/ValueParamTest" tests="4" failures="0" disabled="0" skipped="0" errors="0" time="*" timestamp="*">
- <testcase name="HasValueParamAttribute/0" value_param="33" status="run" result="completed" time="*" timestamp="*" classname="Single/ValueParamTest" />
- <testcase name="HasValueParamAttribute/1" value_param="42" status="run" result="completed" time="*" timestamp="*" classname="Single/ValueParamTest" />
- <testcase name="AnotherTestThatHasValueParamAttribute/0" value_param="33" status="run" result="completed" time="*" timestamp="*" classname="Single/ValueParamTest" />
- <testcase name="AnotherTestThatHasValueParamAttribute/1" value_param="42" status="run" result="completed" time="*" timestamp="*" classname="Single/ValueParamTest" />
+ <testcase name="HasValueParamAttribute/0" file="gtest_xml_output_unittest_.cc" line="164" value_param="33" status="run" result="completed" time="*" timestamp="*" classname="Single/ValueParamTest" />
+ <testcase name="HasValueParamAttribute/1" file="gtest_xml_output_unittest_.cc" line="164" value_param="42" status="run" result="completed" time="*" timestamp="*" classname="Single/ValueParamTest" />
+ <testcase name="AnotherTestThatHasValueParamAttribute/0" file="gtest_xml_output_unittest_.cc" line="165" value_param="33" status="run" result="completed" time="*" timestamp="*" classname="Single/ValueParamTest" />
+ <testcase name="AnotherTestThatHasValueParamAttribute/1" file="gtest_xml_output_unittest_.cc" line="165" value_param="42" status="run" result="completed" time="*" timestamp="*" classname="Single/ValueParamTest" />
</testsuite>
<testsuite name="TypedTest/0" tests="1" failures="0" disabled="0" skipped="0" errors="0" time="*" timestamp="*">
- <testcase name="HasTypeParamAttribute" type_param="*" status="run" result="completed" time="*" timestamp="*" classname="TypedTest/0" />
+ <testcase name="HasTypeParamAttribute" file="gtest_xml_output_unittest_.cc" line="173" type_param="*" status="run" result="completed" time="*" timestamp="*" classname="TypedTest/0" />
</testsuite>
<testsuite name="TypedTest/1" tests="1" failures="0" disabled="0" skipped="0" errors="0" time="*" timestamp="*">
- <testcase name="HasTypeParamAttribute" type_param="*" status="run" result="completed" time="*" timestamp="*" classname="TypedTest/1" />
+ <testcase name="HasTypeParamAttribute" file="gtest_xml_output_unittest_.cc" line="173" type_param="*" status="run" result="completed" time="*" timestamp="*" classname="TypedTest/1" />
</testsuite>
<testsuite name="Single/TypeParameterizedTestSuite/0" tests="1" failures="0" disabled="0" skipped="0" errors="0" time="*" timestamp="*">
- <testcase name="HasTypeParamAttribute" type_param="*" status="run" result="completed" time="*" timestamp="*" classname="Single/TypeParameterizedTestSuite/0" />
+ <testcase name="HasTypeParamAttribute" file="gtest_xml_output_unittest_.cc" line="180" type_param="*" status="run" result="completed" time="*" timestamp="*" classname="Single/TypeParameterizedTestSuite/0" />
</testsuite>
<testsuite name="Single/TypeParameterizedTestSuite/1" tests="1" failures="0" disabled="0" skipped="0" errors="0" time="*" timestamp="*">
- <testcase name="HasTypeParamAttribute" type_param="*" status="run" result="completed" time="*" timestamp="*" classname="Single/TypeParameterizedTestSuite/1" />
+ <testcase name="HasTypeParamAttribute" file="gtest_xml_output_unittest_.cc" line="180" type_param="*" status="run" result="completed" time="*" timestamp="*" classname="Single/TypeParameterizedTestSuite/1" />
</testsuite>
</testsuites>""" % {
- 'stack': STACK_TRACE_TEMPLATE
+ 'stack': STACK_TRACE_TEMPLATE,
+ 'stack_entity': STACK_TRACE_ENTITY_TEMPLATE,
}
EXPECTED_FILTERED_TEST_XML = """<?xml version="1.0" encoding="UTF-8"?>
@@ -195,24 +198,24 @@ EXPECTED_FILTERED_TEST_XML = """<?xml version="1.0" encoding="UTF-8"?>
timestamp="*" name="AllTests" ad_hoc_property="42">
<testsuite name="SuccessfulTest" tests="1" failures="0" disabled="0" skipped="0"
errors="0" time="*" timestamp="*">
- <testcase name="Succeeds" status="run" result="completed" time="*" timestamp="*" classname="SuccessfulTest"/>
+ <testcase name="Succeeds" file="gtest_xml_output_unittest_.cc" line="53" status="run" result="completed" time="*" timestamp="*" classname="SuccessfulTest"/>
</testsuite>
</testsuites>"""
EXPECTED_SHARDED_TEST_XML = """<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="3" failures="0" disabled="0" errors="0" time="*" timestamp="*" name="AllTests" ad_hoc_property="42">
<testsuite name="SuccessfulTest" tests="1" failures="0" disabled="0" skipped="0" errors="0" time="*" timestamp="*">
- <testcase name="Succeeds" status="run" result="completed" time="*" timestamp="*" classname="SuccessfulTest"/>
+ <testcase name="Succeeds" file="gtest_xml_output_unittest_.cc" line="53" status="run" result="completed" time="*" timestamp="*" classname="SuccessfulTest"/>
</testsuite>
<testsuite name="PropertyRecordingTest" tests="1" failures="0" disabled="0" skipped="0" errors="0" time="*" timestamp="*" SetUpTestSuite="yes" TearDownTestSuite="aye">
- <testcase name="IntValuedProperty" status="run" result="completed" time="*" timestamp="*" classname="PropertyRecordingTest">
+ <testcase name="IntValuedProperty" file="gtest_xml_output_unittest_.cc" line="125" status="run" result="completed" time="*" timestamp="*" classname="PropertyRecordingTest">
<properties>
<property name="key_int" value="1"/>
</properties>
</testcase>
</testsuite>
<testsuite name="Single/ValueParamTest" tests="1" failures="0" disabled="0" skipped="0" errors="0" time="*" timestamp="*">
- <testcase name="HasValueParamAttribute/0" value_param="33" status="run" result="completed" time="*" timestamp="*" classname="Single/ValueParamTest" />
+ <testcase name="HasValueParamAttribute/0" file="gtest_xml_output_unittest_.cc" line="164" value_param="33" status="run" result="completed" time="*" timestamp="*" classname="Single/ValueParamTest" />
</testsuite>
</testsuites>"""
@@ -221,32 +224,37 @@ EXPECTED_NO_TEST_XML = """<?xml version="1.0" encoding="UTF-8"?>
timestamp="*" name="AllTests">
<testsuite name="NonTestSuiteFailure" tests="1" failures="1" disabled="0" skipped="0" errors="0" time="*" timestamp="*">
<testcase name="" status="run" result="completed" time="*" timestamp="*" classname="">
- <failure message="gtest_no_test_unittest.cc:*&#x0A;Expected equality of these values:&#x0A; 1&#x0A; 2" type=""><![CDATA[gtest_no_test_unittest.cc:*
+ <failure message="gtest_no_test_unittest.cc:*&#x0A;Expected equality of these values:&#x0A; 1&#x0A; 2%(stack_entity)s" type=""><![CDATA[gtest_no_test_unittest.cc:*
Expected equality of these values:
1
2%(stack)s]]></failure>
</testcase>
</testsuite>
</testsuites>""" % {
- 'stack': STACK_TRACE_TEMPLATE
+ 'stack': STACK_TRACE_TEMPLATE,
+ 'stack_entity': STACK_TRACE_ENTITY_TEMPLATE,
}
GTEST_PROGRAM_PATH = gtest_test_utils.GetTestExecutablePath(GTEST_PROGRAM_NAME)
-SUPPORTS_TYPED_TESTS = 'TypedTest' in gtest_test_utils.Subprocess(
- [GTEST_PROGRAM_PATH, GTEST_LIST_TESTS_FLAG], capture_stderr=False).output
+SUPPORTS_TYPED_TESTS = (
+ 'TypedTest'
+ in gtest_test_utils.Subprocess(
+ [GTEST_PROGRAM_PATH, GTEST_LIST_TESTS_FLAG], capture_stderr=False
+ ).output
+)
class GTestXMLOutputUnitTest(gtest_xml_test_utils.GTestXMLTestCase):
- """
- Unit test for Google Test's XML output functionality.
- """
+ """Unit test for Google Test's XML output functionality."""
# This test currently breaks on platforms that do not support typed and
# type-parameterized tests, so we don't run it under them.
if SUPPORTS_TYPED_TESTS:
+
def testNonEmptyXmlOutput(self):
- """
+ """Generates non-empty XML and verifies it matches the expected output.
+
Runs a test program that generates a non-empty XML output, and
tests that the XML output is expected.
"""
@@ -273,28 +281,34 @@ class GTestXMLOutputUnitTest(gtest_xml_test_utils.GTestXMLTestCase):
# parse the expected datetime manually.
match = re.match(r'(\d+)-(\d\d)-(\d\d)T(\d\d):(\d\d):(\d\d)', date_time_str)
self.assertTrue(
- re.match,
- 'XML datettime string %s has incorrect format' % date_time_str)
+ re.match, 'XML datettime string %s has incorrect format' % date_time_str
+ )
date_time_from_xml = datetime.datetime(
- year=int(match.group(1)), month=int(match.group(2)),
- day=int(match.group(3)), hour=int(match.group(4)),
- minute=int(match.group(5)), second=int(match.group(6)))
+ year=int(match.group(1)),
+ month=int(match.group(2)),
+ day=int(match.group(3)),
+ hour=int(match.group(4)),
+ minute=int(match.group(5)),
+ second=int(match.group(6)),
+ )
time_delta = abs(datetime.datetime.now() - date_time_from_xml)
# timestamp value should be near the current local time
- self.assertTrue(time_delta < datetime.timedelta(seconds=600),
- 'time_delta is %s' % time_delta)
+ self.assertLess(time_delta, datetime.timedelta(seconds=600))
actual.unlink()
def testDefaultOutputFile(self):
- """
+ """Tests XML file with default name is created when name is not specified.
+
Confirms that Google Test produces an XML output file with the expected
default name if no name is explicitly specified.
"""
- output_file = os.path.join(gtest_test_utils.GetTempDir(),
- GTEST_DEFAULT_OUTPUT_FILE)
+ output_file = os.path.join(
+ gtest_test_utils.GetTempDir(), GTEST_DEFAULT_OUTPUT_FILE
+ )
gtest_prog_path = gtest_test_utils.GetTestExecutablePath(
- 'gtest_no_test_unittest')
+ 'gtest_no_test_unittest'
+ )
try:
os.remove(output_file)
except OSError:
@@ -304,39 +318,47 @@ class GTestXMLOutputUnitTest(gtest_xml_test_utils.GTestXMLTestCase):
p = gtest_test_utils.Subprocess(
[gtest_prog_path, '%s=xml' % GTEST_OUTPUT_FLAG],
- working_dir=gtest_test_utils.GetTempDir())
- self.assert_(p.exited)
- self.assertEquals(0, p.exit_code)
- self.assert_(os.path.isfile(output_file))
+ working_dir=gtest_test_utils.GetTempDir(),
+ )
+ self.assertTrue(p.exited)
+ self.assertEqual(0, p.exit_code)
+ self.assertTrue(os.path.isfile(output_file))
def testSuppressedXmlOutput(self):
- """
+ """Verifies XML output is suppressed if default listener is shut down.
+
Tests that no XML file is generated if the default XML listener is
shut down before RUN_ALL_TESTS is invoked.
"""
- xml_path = os.path.join(gtest_test_utils.GetTempDir(),
- GTEST_PROGRAM_NAME + 'out.xml')
+ xml_path = os.path.join(
+ gtest_test_utils.GetTempDir(), GTEST_PROGRAM_NAME + 'out.xml'
+ )
if os.path.isfile(xml_path):
os.remove(xml_path)
- command = [GTEST_PROGRAM_PATH,
- '%s=xml:%s' % (GTEST_OUTPUT_FLAG, xml_path),
- '--shut_down_xml']
+ command = [
+ GTEST_PROGRAM_PATH,
+ '%s=xml:%s' % (GTEST_OUTPUT_FLAG, xml_path),
+ '--shut_down_xml',
+ ]
p = gtest_test_utils.Subprocess(command)
if p.terminated_by_signal:
# p.signal is available only if p.terminated_by_signal is True.
self.assertFalse(
p.terminated_by_signal,
- '%s was killed by signal %d' % (GTEST_PROGRAM_NAME, p.signal))
+ '%s was killed by signal %d' % (GTEST_PROGRAM_NAME, p.signal),
+ )
else:
- self.assert_(p.exited)
- self.assertEquals(1, p.exit_code,
- "'%s' exited with code %s, which doesn't match "
- 'the expected exit code %s.'
- % (command, p.exit_code, 1))
+ self.assertTrue(p.exited)
+ self.assertEqual(
+ 1,
+ p.exit_code,
+ "'%s' exited with code %s, which doesn't match "
+ 'the expected exit code %s.' % (command, p.exit_code, 1),
+ )
- self.assert_(not os.path.isfile(xml_path))
+ self.assertFalse(os.path.isfile(xml_path))
def testFilteredTestXmlOutput(self):
"""Verifies XML output when a filter is applied.
@@ -345,8 +367,12 @@ class GTestXMLOutputUnitTest(gtest_xml_test_utils.GTestXMLTestCase):
non-selected tests do not show up in the XML output.
"""
- self._TestXmlOutput(GTEST_PROGRAM_NAME, EXPECTED_FILTERED_TEST_XML, 0,
- extra_args=['%s=SuccessfulTest.*' % GTEST_FILTER_FLAG])
+ self._TestXmlOutput(
+ GTEST_PROGRAM_NAME,
+ EXPECTED_FILTERED_TEST_XML,
+ 0,
+ extra_args=['%s=SuccessfulTest.*' % GTEST_FILTER_FLAG],
+ )
def testShardedTestXmlOutput(self):
"""Verifies XML output when run using multiple shards.
@@ -359,53 +385,81 @@ class GTestXMLOutputUnitTest(gtest_xml_test_utils.GTestXMLTestCase):
GTEST_PROGRAM_NAME,
EXPECTED_SHARDED_TEST_XML,
0,
- extra_env={SHARD_INDEX_ENV_VAR: '0',
- TOTAL_SHARDS_ENV_VAR: '10'})
+ extra_env={SHARD_INDEX_ENV_VAR: '0', TOTAL_SHARDS_ENV_VAR: '10'},
+ )
+
+ def _GetXmlOutput(
+ self, gtest_prog_name, extra_args, extra_env, expected_exit_code
+ ):
+ """Returns the XML output generated by running the program gtest_prog_name.
- def _GetXmlOutput(self, gtest_prog_name, extra_args, extra_env,
- expected_exit_code):
- """
- Returns the xml output generated by running the program gtest_prog_name.
Furthermore, the program's exit code must be expected_exit_code.
+
+ Args:
+ gtest_prog_name: Program to run.
+ extra_args: Optional arguments to pass to program.
+ extra_env: Optional environment variables to set.
+ expected_exit_code: Expected exit code from running gtest_prog_name.
"""
- xml_path = os.path.join(gtest_test_utils.GetTempDir(),
- gtest_prog_name + 'out.xml')
+ xml_path = os.path.join(
+ gtest_test_utils.GetTempDir(), gtest_prog_name + 'out.xml'
+ )
gtest_prog_path = gtest_test_utils.GetTestExecutablePath(gtest_prog_name)
- command = ([gtest_prog_path, '%s=xml:%s' % (GTEST_OUTPUT_FLAG, xml_path)] +
- extra_args)
+ command = [
+ gtest_prog_path,
+ '%s=xml:%s' % (GTEST_OUTPUT_FLAG, xml_path),
+ ] + extra_args
environ_copy = os.environ.copy()
if extra_env:
environ_copy.update(extra_env)
p = gtest_test_utils.Subprocess(command, env=environ_copy)
if p.terminated_by_signal:
- self.assert_(False,
- '%s was killed by signal %d' % (gtest_prog_name, p.signal))
+ self.assertTrue(
+ False, '%s was killed by signal %d' % (gtest_prog_name, p.signal)
+ )
else:
- self.assert_(p.exited)
- self.assertEquals(expected_exit_code, p.exit_code,
- "'%s' exited with code %s, which doesn't match "
- 'the expected exit code %s.'
- % (command, p.exit_code, expected_exit_code))
+ self.assertTrue(p.exited)
+ self.assertEqual(
+ expected_exit_code,
+ p.exit_code,
+ "'%s' exited with code %s, which doesn't match "
+ 'the expected exit code %s.'
+ % (command, p.exit_code, expected_exit_code),
+ )
actual = minidom.parse(xml_path)
return actual
- def _TestXmlOutput(self, gtest_prog_name, expected_xml,
- expected_exit_code, extra_args=None, extra_env=None):
- """
+ def _TestXmlOutput(
+ self,
+ gtest_prog_name,
+ expected_xml,
+ expected_exit_code,
+ extra_args=None,
+ extra_env=None,
+ ):
+ """Asserts that the XML document matches.
+
Asserts that the XML document generated by running the program
gtest_prog_name matches expected_xml, a string containing another
XML document. Furthermore, the program's exit code must be
expected_exit_code.
+
+ Args:
+ gtest_prog_name: Program to run.
+ expected_xml: Path to XML document to match.
+ expected_exit_code: Expected exit code from running gtest_prog_name.
+ extra_args: Optional arguments to pass to program.
+ extra_env: Optional environment variables to set.
"""
- actual = self._GetXmlOutput(gtest_prog_name, extra_args or [],
- extra_env or {}, expected_exit_code)
+ actual = self._GetXmlOutput(
+ gtest_prog_name, extra_args or [], extra_env or {}, expected_exit_code
+ )
expected = minidom.parseString(expected_xml)
self.NormalizeXml(actual.documentElement)
- self.AssertEquivalentNodes(expected.documentElement,
- actual.documentElement)
+ self.AssertEquivalentNodes(expected.documentElement, actual.documentElement)
expected.unlink()
actual.unlink()
diff --git a/googletest/test/gtest_xml_output_unittest_.cc b/googletest/test/gtest_xml_output_unittest_.cc
index c0036aae..0ab33022 100644
--- a/googletest/test/gtest_xml_output_unittest_.cc
+++ b/googletest/test/gtest_xml_output_unittest_.cc
@@ -35,18 +35,20 @@
//
// This program will be invoked from a Python unit test. Don't run it
// directly.
+// clang-format off
+
+#include <string>
#include "gtest/gtest.h"
using ::testing::InitGoogleTest;
+using ::testing::Test;
using ::testing::TestEventListeners;
using ::testing::TestWithParam;
using ::testing::UnitTest;
-using ::testing::Test;
using ::testing::Values;
-class SuccessfulTest : public Test {
-};
+class SuccessfulTest : public Test {};
TEST_F(SuccessfulTest, Succeeds) {
SUCCEED() << "This is a success.";
@@ -191,3 +193,5 @@ int main(int argc, char** argv) {
testing::Test::RecordProperty("ad_hoc_property", "42");
return RUN_ALL_TESTS();
}
+
+// clang-format on
diff --git a/googletest/test/gtest_xml_test_utils.py b/googletest/test/gtest_xml_test_utils.py
index ec42c62c..74e0f4a0 100755
--- a/googletest/test/gtest_xml_test_utils.py
+++ b/googletest/test/gtest_xml_test_utils.py
@@ -31,18 +31,17 @@
import re
from xml.dom import minidom, Node
-import gtest_test_utils
+from googletest.test import gtest_test_utils
GTEST_DEFAULT_OUTPUT_FILE = 'test_detail.xml'
-class GTestXMLTestCase(gtest_test_utils.TestCase):
- """
- Base class for tests of Google Test's XML output functionality.
- """
+class GTestXMLTestCase(gtest_test_utils.TestCase):
+ """Base class for tests of Google Test's XML output functionality."""
def AssertEquivalentNodes(self, expected_node, actual_node):
- """
+ """Asserts that actual_node is equivalent to expected_node.
+
Asserts that actual_node (a DOM node object) is equivalent to
expected_node (another DOM node object), in that either both of
them are CDATA nodes and have the same value, or both are DOM
@@ -58,46 +57,66 @@ class GTestXMLTestCase(gtest_test_utils.TestCase):
CDATA sections) as expected_node. Note that we ignore the
order of the children as they are not guaranteed to be in any
particular order.
+
+ Args:
+ expected_node: expected DOM node object
+ actual_node: actual DOM node object
"""
if expected_node.nodeType == Node.CDATA_SECTION_NODE:
- self.assertEquals(Node.CDATA_SECTION_NODE, actual_node.nodeType)
- self.assertEquals(expected_node.nodeValue, actual_node.nodeValue)
+ self.assertEqual(Node.CDATA_SECTION_NODE, actual_node.nodeType)
+ self.assertEqual(expected_node.nodeValue, actual_node.nodeValue)
return
- self.assertEquals(Node.ELEMENT_NODE, actual_node.nodeType)
- self.assertEquals(Node.ELEMENT_NODE, expected_node.nodeType)
- self.assertEquals(expected_node.tagName, actual_node.tagName)
+ self.assertEqual(Node.ELEMENT_NODE, actual_node.nodeType)
+ self.assertEqual(Node.ELEMENT_NODE, expected_node.nodeType)
+ self.assertEqual(expected_node.tagName, actual_node.tagName)
expected_attributes = expected_node.attributes
actual_attributes = actual_node.attributes
- self.assertEquals(
- expected_attributes.length, actual_attributes.length,
- 'attribute numbers differ in element %s:\nExpected: %r\nActual: %r' % (
- actual_node.tagName, expected_attributes.keys(),
- actual_attributes.keys()))
+ self.assertEqual(
+ expected_attributes.length,
+ actual_attributes.length,
+ 'attribute numbers differ in element %s:\nExpected: %r\nActual: %r'
+ % (
+ actual_node.tagName,
+ expected_attributes.keys(),
+ actual_attributes.keys(),
+ ),
+ )
for i in range(expected_attributes.length):
expected_attr = expected_attributes.item(i)
actual_attr = actual_attributes.get(expected_attr.name)
- self.assert_(
+ self.assertTrue(
actual_attr is not None,
- 'expected attribute %s not found in element %s' %
- (expected_attr.name, actual_node.tagName))
- self.assertEquals(
- expected_attr.value, actual_attr.value,
- ' values of attribute %s in element %s differ: %s vs %s' %
- (expected_attr.name, actual_node.tagName,
- expected_attr.value, actual_attr.value))
+ 'expected attribute %s not found in element %s'
+ % (expected_attr.name, actual_node.tagName),
+ )
+ self.assertEqual(
+ expected_attr.value,
+ actual_attr.value,
+ ' values of attribute %s in element %s differ: %s vs %s'
+ % (
+ expected_attr.name,
+ actual_node.tagName,
+ expected_attr.value,
+ actual_attr.value,
+ ),
+ )
expected_children = self._GetChildren(expected_node)
actual_children = self._GetChildren(actual_node)
- self.assertEquals(
- len(expected_children), len(actual_children),
- 'number of child elements differ in element ' + actual_node.tagName)
+ self.assertEqual(
+ len(expected_children),
+ len(actual_children),
+ 'number of child elements differ in element ' + actual_node.tagName,
+ )
for child_id, child in expected_children.items():
- self.assert_(child_id in actual_children,
- '<%s> is not in <%s> (in element %s)' %
- (child_id, actual_children, actual_node.tagName))
+ self.assertTrue(
+ child_id in actual_children,
+ '<%s> is not in <%s> (in element %s)'
+ % (child_id, actual_children, actual_node.tagName),
+ )
self.AssertEquivalentNodes(child, actual_children[child_id])
identifying_attribute = {
@@ -110,40 +129,54 @@ class GTestXMLTestCase(gtest_test_utils.TestCase):
}
def _GetChildren(self, element):
- """
- Fetches all of the child nodes of element, a DOM Element object.
- Returns them as the values of a dictionary keyed by the IDs of the
- children. For <testsuites>, <testsuite>, <testcase>, and <property>
- elements, the ID is the value of their "name" attribute; for <failure>
- elements, it is the value of the "message" attribute; for <properties>
- elements, it is the value of their parent's "name" attribute plus the
- literal string "properties"; CDATA sections and non-whitespace
- text nodes are concatenated into a single CDATA section with ID
- "detail". An exception is raised if any element other than the above
- four is encountered, if two child elements with the same identifying
- attributes are encountered, or if any other type of node is encountered.
+ """Fetches all of the child nodes of element, a DOM Element object.
+
+ Returns them as the values of a dictionary keyed by the IDs of the children.
+ For <testsuites>, <testsuite>, <testcase>, and <property> elements, the ID
+ is the value of their "name" attribute; for <failure> elements, it is the
+ value of the "message" attribute; for <properties> elements, it is the value
+ of their parent's "name" attribute plus the literal string "properties";
+ CDATA sections and non-whitespace text nodes are concatenated into a single
+ CDATA section with ID "detail". An exception is raised if any element other
+ than the above four is encountered, if two child elements with the same
+ identifying attributes are encountered, or if any other type of node is
+ encountered.
+
+ Args:
+ element: DOM Element object
+
+ Returns:
+ Dictionary where keys are the IDs of the children.
"""
children = {}
for child in element.childNodes:
if child.nodeType == Node.ELEMENT_NODE:
if child.tagName == 'properties':
- self.assert_(child.parentNode is not None,
- 'Encountered <properties> element without a parent')
+ self.assertTrue(
+ child.parentNode is not None,
+ 'Encountered <properties> element without a parent',
+ )
child_id = child.parentNode.getAttribute('name') + '-properties'
else:
- self.assert_(child.tagName in self.identifying_attribute,
- 'Encountered unknown element <%s>' % child.tagName)
+ self.assertTrue(
+ child.tagName in self.identifying_attribute,
+ 'Encountered unknown element <%s>' % child.tagName,
+ )
child_id = child.getAttribute(
- self.identifying_attribute[child.tagName])
- self.assert_(child_id not in children)
+ self.identifying_attribute[child.tagName]
+ )
+ self.assertNotIn(child_id, children)
children[child_id] = child
elif child.nodeType in [Node.TEXT_NODE, Node.CDATA_SECTION_NODE]:
if 'detail' not in children:
- if (child.nodeType == Node.CDATA_SECTION_NODE or
- not child.nodeValue.isspace()):
+ if (
+ child.nodeType == Node.CDATA_SECTION_NODE
+ or not child.nodeValue.isspace()
+ ):
children['detail'] = child.ownerDocument.createCDATASection(
- child.nodeValue)
+ child.nodeValue
+ )
else:
children['detail'].nodeValue += child.nodeValue
else:
@@ -151,7 +184,8 @@ class GTestXMLTestCase(gtest_test_utils.TestCase):
return children
def NormalizeXml(self, element):
- """
+ """Normalizes XML that may change from run to run.
+
Normalizes Google Test's XML output to eliminate references to transient
information that may change from run to run.
@@ -168,15 +202,25 @@ class GTestXMLTestCase(gtest_test_utils.TestCase):
file's basename and a single asterisk for the line number.
* The directory names in file paths are removed.
* The stack traces are removed.
+
+ Args:
+ element: DOM element to normalize
"""
+ if element.tagName == 'testcase':
+ source_file = element.getAttributeNode('file')
+ if source_file:
+ source_file.value = re.sub(r'^.*[/\\](.*)', '\\1', source_file.value)
if element.tagName in ('testsuites', 'testsuite', 'testcase'):
timestamp = element.getAttributeNode('timestamp')
- timestamp.value = re.sub(r'^\d{4}-\d\d-\d\dT\d\d:\d\d:\d\d\.\d\d\d$',
- '*', timestamp.value)
+ timestamp.value = re.sub(
+ r'^\d{4}-\d\d-\d\dT\d\d:\d\d:\d\d\.\d\d\d$', '*', timestamp.value
+ )
if element.tagName in ('testsuites', 'testsuite', 'testcase'):
time = element.getAttributeNode('time')
- time.value = re.sub(r'^\d+(\.\d+)?$', '*', time.value)
+ # The value for exact N seconds has a trailing decimal point (e.g., "10."
+ # instead of "10")
+ time.value = re.sub(r'^\d+\.(\d+)?$', '*', time.value)
type_param = element.getAttributeNode('type_param')
if type_param and type_param.value:
type_param.value = '*'
@@ -190,8 +234,9 @@ class GTestXMLTestCase(gtest_test_utils.TestCase):
# Replaces the source line information with a normalized form.
cdata = re.sub(source_line_pat, '\\1*\n', child.nodeValue)
# Removes the actual stack trace.
- child.nodeValue = re.sub(r'Stack trace:\n(.|\n)*',
- 'Stack trace:\n*', cdata)
+ child.nodeValue = re.sub(
+ r'Stack trace:\n(.|\n)*', 'Stack trace:\n*', cdata
+ )
for child in element.childNodes:
if child.nodeType == Node.ELEMENT_NODE:
self.NormalizeXml(child)
diff --git a/googletest/test/production.h b/googletest/test/production.h
index 41a54722..4dec8d46 100644
--- a/googletest/test/production.h
+++ b/googletest/test/production.h
@@ -46,6 +46,7 @@ class PrivateCode {
PrivateCode();
int x() const { return x_; }
+
private:
void set_x(int an_x) { x_ = an_x; }
int x_;