Adds GTEST_SKIP() macro to skip tests in run-time
This commit is contained in:
parent
7888184f28
commit
a62e586979
|
@ -198,6 +198,7 @@ if (gtest_build_tests)
|
||||||
test/gtest-typed-test2_test.cc)
|
test/gtest-typed-test2_test.cc)
|
||||||
cxx_test(gtest_unittest gtest_main)
|
cxx_test(gtest_unittest gtest_main)
|
||||||
cxx_test(gtest-unittest-api_test gtest)
|
cxx_test(gtest-unittest-api_test gtest)
|
||||||
|
cxx_test(gtest_skip_test gtest_main)
|
||||||
|
|
||||||
############################################################
|
############################################################
|
||||||
# C++ tests built with non-standard compiler flags.
|
# C++ tests built with non-standard compiler flags.
|
||||||
|
|
|
@ -51,7 +51,8 @@ class GTEST_API_ TestPartResult {
|
||||||
enum Type {
|
enum Type {
|
||||||
kSuccess, // Succeeded.
|
kSuccess, // Succeeded.
|
||||||
kNonFatalFailure, // Failed but the test can continue.
|
kNonFatalFailure, // Failed but the test can continue.
|
||||||
kFatalFailure // Failed and the test should be terminated.
|
kFatalFailure, // Failed and the test should be terminated.
|
||||||
|
kSkip // Skipped.
|
||||||
};
|
};
|
||||||
|
|
||||||
// C'tor. TestPartResult does NOT have a default constructor.
|
// C'tor. TestPartResult does NOT have a default constructor.
|
||||||
|
@ -87,18 +88,21 @@ class GTEST_API_ TestPartResult {
|
||||||
// Gets the message associated with the test part.
|
// Gets the message associated with the test part.
|
||||||
const char* message() const { return message_.c_str(); }
|
const char* message() const { return message_.c_str(); }
|
||||||
|
|
||||||
|
// Returns true iff the test part was skipped.
|
||||||
|
bool skipped() const { return type_ == kSkip; }
|
||||||
|
|
||||||
// Returns true iff the test part passed.
|
// Returns true iff the test part passed.
|
||||||
bool passed() const { return type_ == kSuccess; }
|
bool passed() const { return type_ == kSuccess; }
|
||||||
|
|
||||||
// Returns true iff the test part failed.
|
|
||||||
bool failed() const { return type_ != kSuccess; }
|
|
||||||
|
|
||||||
// Returns true iff the test part non-fatally failed.
|
// Returns true iff the test part non-fatally failed.
|
||||||
bool nonfatally_failed() const { return type_ == kNonFatalFailure; }
|
bool nonfatally_failed() const { return type_ == kNonFatalFailure; }
|
||||||
|
|
||||||
// Returns true iff the test part fatally failed.
|
// Returns true iff the test part fatally failed.
|
||||||
bool fatally_failed() const { return type_ == kFatalFailure; }
|
bool fatally_failed() const { return type_ == kFatalFailure; }
|
||||||
|
|
||||||
|
// Returns true iff the test part failed.
|
||||||
|
bool failed() const { return fatally_failed() || nonfatally_failed(); }
|
||||||
|
|
||||||
private:
|
private:
|
||||||
Type type_;
|
Type type_;
|
||||||
|
|
||||||
|
|
|
@ -418,6 +418,9 @@ class GTEST_API_ Test {
|
||||||
// Returns true iff the current test has a non-fatal failure.
|
// Returns true iff the current test has a non-fatal failure.
|
||||||
static bool HasNonfatalFailure();
|
static bool HasNonfatalFailure();
|
||||||
|
|
||||||
|
// Returns true iff the current test was skipped.
|
||||||
|
static bool IsSkipped();
|
||||||
|
|
||||||
// Returns true iff the current test has a (either fatal or
|
// Returns true iff the current test has a (either fatal or
|
||||||
// non-fatal) failure.
|
// non-fatal) failure.
|
||||||
static bool HasFailure() { return HasFatalFailure() || HasNonfatalFailure(); }
|
static bool HasFailure() { return HasFatalFailure() || HasNonfatalFailure(); }
|
||||||
|
@ -552,7 +555,10 @@ class GTEST_API_ TestResult {
|
||||||
int test_property_count() const;
|
int test_property_count() const;
|
||||||
|
|
||||||
// Returns true iff the test passed (i.e. no test part failed).
|
// Returns true iff the test passed (i.e. no test part failed).
|
||||||
bool Passed() const { return !Failed(); }
|
bool Passed() const { return !Skipped() && !Failed(); }
|
||||||
|
|
||||||
|
// Returns true iff the test was skipped.
|
||||||
|
bool Skipped() const;
|
||||||
|
|
||||||
// Returns true iff the test failed.
|
// Returns true iff the test failed.
|
||||||
bool Failed() const;
|
bool Failed() const;
|
||||||
|
@ -831,6 +837,9 @@ class GTEST_API_ TestCase {
|
||||||
// Gets the number of successful tests in this test case.
|
// Gets the number of successful tests in this test case.
|
||||||
int successful_test_count() const;
|
int successful_test_count() const;
|
||||||
|
|
||||||
|
// Gets the number of skipped tests in this test case.
|
||||||
|
int skipped_test_count() const;
|
||||||
|
|
||||||
// Gets the number of failed tests in this test case.
|
// Gets the number of failed tests in this test case.
|
||||||
int failed_test_count() const;
|
int failed_test_count() const;
|
||||||
|
|
||||||
|
@ -913,6 +922,11 @@ class GTEST_API_ TestCase {
|
||||||
return test_info->should_run() && test_info->result()->Passed();
|
return test_info->should_run() && test_info->result()->Passed();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Returns true iff test skipped.
|
||||||
|
static bool TestSkipped(const TestInfo* test_info) {
|
||||||
|
return test_info->should_run() && test_info->result()->Skipped();
|
||||||
|
}
|
||||||
|
|
||||||
// Returns true iff test failed.
|
// Returns true iff test failed.
|
||||||
static bool TestFailed(const TestInfo* test_info) {
|
static bool TestFailed(const TestInfo* test_info) {
|
||||||
return test_info->should_run() && test_info->result()->Failed();
|
return test_info->should_run() && test_info->result()->Failed();
|
||||||
|
@ -1235,6 +1249,9 @@ class GTEST_API_ UnitTest {
|
||||||
// Gets the number of successful tests.
|
// Gets the number of successful tests.
|
||||||
int successful_test_count() const;
|
int successful_test_count() const;
|
||||||
|
|
||||||
|
// Gets the number of skipped tests.
|
||||||
|
int skipped_test_count() const;
|
||||||
|
|
||||||
// Gets the number of failed tests.
|
// Gets the number of failed tests.
|
||||||
int failed_test_count() const;
|
int failed_test_count() const;
|
||||||
|
|
||||||
|
@ -1812,6 +1829,17 @@ class TestWithParam : public Test, public WithParamInterface<T> {
|
||||||
|
|
||||||
// Macros for indicating success/failure in test code.
|
// Macros for indicating success/failure in test code.
|
||||||
|
|
||||||
|
// Skips test in runtime.
|
||||||
|
// Skipping test aborts current function.
|
||||||
|
// Skipped tests are neither successful nor failed.
|
||||||
|
#define GTEST_SKIP() GTEST_SKIP_("Skipped")
|
||||||
|
|
||||||
|
// Define this macro to 1 to omit the definition of SKIP(), which is a
|
||||||
|
// generic name and may clash with some other libraries.
|
||||||
|
#if !GTEST_DONT_DEFINE_SKIP
|
||||||
|
# define SKIP() GTEST_SKIP()
|
||||||
|
#endif
|
||||||
|
|
||||||
// ADD_FAILURE unconditionally adds a failure to the current test.
|
// ADD_FAILURE unconditionally adds a failure to the current test.
|
||||||
// SUCCEED generates a success - it doesn't automatically make the
|
// SUCCEED generates a success - it doesn't automatically make the
|
||||||
// current test successful, as a test is only successful when it has
|
// current test successful, as a test is only successful when it has
|
||||||
|
|
|
@ -1155,6 +1155,9 @@ class NativeArray {
|
||||||
#define GTEST_SUCCESS_(message) \
|
#define GTEST_SUCCESS_(message) \
|
||||||
GTEST_MESSAGE_(message, ::testing::TestPartResult::kSuccess)
|
GTEST_MESSAGE_(message, ::testing::TestPartResult::kSuccess)
|
||||||
|
|
||||||
|
#define GTEST_SKIP_(message) \
|
||||||
|
return GTEST_MESSAGE_(message, ::testing::TestPartResult::kSkip)
|
||||||
|
|
||||||
// Suppresses MSVC warnings 4072 (unreachable code) for the code following
|
// Suppresses MSVC warnings 4072 (unreachable code) for the code following
|
||||||
// statement if it returns or throws (or doesn't return or throw in some
|
// statement if it returns or throws (or doesn't return or throw in some
|
||||||
// situations).
|
// situations).
|
||||||
|
|
|
@ -534,6 +534,9 @@ class GTEST_API_ UnitTestImpl {
|
||||||
// Gets the number of successful tests.
|
// Gets the number of successful tests.
|
||||||
int successful_test_count() const;
|
int successful_test_count() const;
|
||||||
|
|
||||||
|
// Gets the number of skipped tests.
|
||||||
|
int skipped_test_count() const;
|
||||||
|
|
||||||
// Gets the number of failed tests.
|
// Gets the number of failed tests.
|
||||||
int failed_test_count() const;
|
int failed_test_count() const;
|
||||||
|
|
||||||
|
|
|
@ -51,6 +51,7 @@ std::ostream& operator<<(std::ostream& os, const TestPartResult& result) {
|
||||||
return os
|
return os
|
||||||
<< result.file_name() << ":" << result.line_number() << ": "
|
<< result.file_name() << ":" << result.line_number() << ": "
|
||||||
<< (result.type() == TestPartResult::kSuccess ? "Success" :
|
<< (result.type() == TestPartResult::kSuccess ? "Success" :
|
||||||
|
result.type() == TestPartResult::kSkip ? "Skipped" :
|
||||||
result.type() == TestPartResult::kFatalFailure ? "Fatal failure" :
|
result.type() == TestPartResult::kFatalFailure ? "Fatal failure" :
|
||||||
"Non-fatal failure") << ":\n"
|
"Non-fatal failure") << ":\n"
|
||||||
<< result.message() << std::endl;
|
<< result.message() << std::endl;
|
||||||
|
|
|
@ -765,6 +765,11 @@ int UnitTestImpl::successful_test_count() const {
|
||||||
return SumOverTestCaseList(test_cases_, &TestCase::successful_test_count);
|
return SumOverTestCaseList(test_cases_, &TestCase::successful_test_count);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Gets the number of skipped tests.
|
||||||
|
int UnitTestImpl::skipped_test_count() const {
|
||||||
|
return SumOverTestCaseList(test_cases_, &TestCase::skipped_test_count);
|
||||||
|
}
|
||||||
|
|
||||||
// Gets the number of failed tests.
|
// Gets the number of failed tests.
|
||||||
int UnitTestImpl::failed_test_count() const {
|
int UnitTestImpl::failed_test_count() const {
|
||||||
return SumOverTestCaseList(test_cases_, &TestCase::failed_test_count);
|
return SumOverTestCaseList(test_cases_, &TestCase::failed_test_count);
|
||||||
|
@ -2181,6 +2186,16 @@ void TestResult::Clear() {
|
||||||
elapsed_time_ = 0;
|
elapsed_time_ = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Returns true off the test part was skipped.
|
||||||
|
static bool TestPartSkipped(const TestPartResult& result) {
|
||||||
|
return result.skipped();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Returns true iff the test was skipped.
|
||||||
|
bool TestResult::Skipped() const {
|
||||||
|
return !Failed() && CountIf(test_part_results_, TestPartSkipped) > 0;
|
||||||
|
}
|
||||||
|
|
||||||
// Returns true iff the test failed.
|
// Returns true iff the test failed.
|
||||||
bool TestResult::Failed() const {
|
bool TestResult::Failed() const {
|
||||||
for (int i = 0; i < total_part_count(); ++i) {
|
for (int i = 0; i < total_part_count(); ++i) {
|
||||||
|
@ -2511,6 +2526,11 @@ bool Test::HasNonfatalFailure() {
|
||||||
HasNonfatalFailure();
|
HasNonfatalFailure();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Returns true iff the current test was skipped.
|
||||||
|
bool Test::IsSkipped() {
|
||||||
|
return internal::GetUnitTestImpl()->current_test_result()->Skipped();
|
||||||
|
}
|
||||||
|
|
||||||
// class TestInfo
|
// class TestInfo
|
||||||
|
|
||||||
// Constructs a TestInfo object. It assumes ownership of the test factory
|
// Constructs a TestInfo object. It assumes ownership of the test factory
|
||||||
|
@ -2689,6 +2709,11 @@ int TestCase::successful_test_count() const {
|
||||||
return CountIf(test_info_list_, TestPassed);
|
return CountIf(test_info_list_, TestPassed);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Gets the number of successful tests in this test case.
|
||||||
|
int TestCase::skipped_test_count() const {
|
||||||
|
return CountIf(test_info_list_, TestSkipped);
|
||||||
|
}
|
||||||
|
|
||||||
// Gets the number of failed tests in this test case.
|
// Gets the number of failed tests in this test case.
|
||||||
int TestCase::failed_test_count() const {
|
int TestCase::failed_test_count() const {
|
||||||
return CountIf(test_info_list_, TestFailed);
|
return CountIf(test_info_list_, TestFailed);
|
||||||
|
@ -2840,6 +2865,8 @@ static std::string FormatTestCaseCount(int test_case_count) {
|
||||||
// between the two when viewing the test result.
|
// between the two when viewing the test result.
|
||||||
static const char * TestPartResultTypeToString(TestPartResult::Type type) {
|
static const char * TestPartResultTypeToString(TestPartResult::Type type) {
|
||||||
switch (type) {
|
switch (type) {
|
||||||
|
case TestPartResult::kSkip:
|
||||||
|
return "Skipped";
|
||||||
case TestPartResult::kSuccess:
|
case TestPartResult::kSuccess:
|
||||||
return "Success";
|
return "Success";
|
||||||
|
|
||||||
|
@ -3093,6 +3120,7 @@ class PrettyUnitTestResultPrinter : public TestEventListener {
|
||||||
|
|
||||||
private:
|
private:
|
||||||
static void PrintFailedTests(const UnitTest& unit_test);
|
static void PrintFailedTests(const UnitTest& unit_test);
|
||||||
|
static void PrintSkippedTests(const UnitTest& unit_test);
|
||||||
};
|
};
|
||||||
|
|
||||||
// Fired before each iteration of tests starts.
|
// Fired before each iteration of tests starts.
|
||||||
|
@ -3160,18 +3188,25 @@ void PrettyUnitTestResultPrinter::OnTestStart(const TestInfo& test_info) {
|
||||||
// Called after an assertion failure.
|
// Called after an assertion failure.
|
||||||
void PrettyUnitTestResultPrinter::OnTestPartResult(
|
void PrettyUnitTestResultPrinter::OnTestPartResult(
|
||||||
const TestPartResult& result) {
|
const TestPartResult& result) {
|
||||||
// If the test part succeeded, we don't need to do anything.
|
switch(result.type()) {
|
||||||
if (result.type() == TestPartResult::kSuccess)
|
// If the test part succeeded, or was skipped,
|
||||||
|
// we don't need to do anything.
|
||||||
|
case TestPartResult::kSkip:
|
||||||
|
case TestPartResult::kSuccess:
|
||||||
return;
|
return;
|
||||||
|
default:
|
||||||
// Print failure message from the assertion (e.g. expected this and got that).
|
// Print failure message from the assertion
|
||||||
|
// (e.g. expected this and got that).
|
||||||
PrintTestPartResult(result);
|
PrintTestPartResult(result);
|
||||||
fflush(stdout);
|
fflush(stdout);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void PrettyUnitTestResultPrinter::OnTestEnd(const TestInfo& test_info) {
|
void PrettyUnitTestResultPrinter::OnTestEnd(const TestInfo& test_info) {
|
||||||
if (test_info.result()->Passed()) {
|
if (test_info.result()->Passed()) {
|
||||||
ColoredPrintf(COLOR_GREEN, "[ OK ] ");
|
ColoredPrintf(COLOR_GREEN, "[ OK ] ");
|
||||||
|
} else if (test_info.result()->Skipped()) {
|
||||||
|
ColoredPrintf(COLOR_GREEN, "[ SKIPPED ] ");
|
||||||
} else {
|
} else {
|
||||||
ColoredPrintf(COLOR_RED, "[ FAILED ] ");
|
ColoredPrintf(COLOR_RED, "[ FAILED ] ");
|
||||||
}
|
}
|
||||||
|
@ -3221,7 +3256,7 @@ void PrettyUnitTestResultPrinter::PrintFailedTests(const UnitTest& unit_test) {
|
||||||
}
|
}
|
||||||
for (int j = 0; j < test_case.total_test_count(); ++j) {
|
for (int j = 0; j < test_case.total_test_count(); ++j) {
|
||||||
const TestInfo& test_info = *test_case.GetTestInfo(j);
|
const TestInfo& test_info = *test_case.GetTestInfo(j);
|
||||||
if (!test_info.should_run() || test_info.result()->Passed()) {
|
if (!test_info.should_run() || !test_info.result()->Failed()) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
ColoredPrintf(COLOR_RED, "[ FAILED ] ");
|
ColoredPrintf(COLOR_RED, "[ FAILED ] ");
|
||||||
|
@ -3232,6 +3267,30 @@ void PrettyUnitTestResultPrinter::PrintFailedTests(const UnitTest& unit_test) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Internal helper for printing the list of skipped tests.
|
||||||
|
void PrettyUnitTestResultPrinter::PrintSkippedTests(const UnitTest& unit_test) {
|
||||||
|
const int skipped_test_count = unit_test.skipped_test_count();
|
||||||
|
if (skipped_test_count == 0) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
for (int i = 0; i < unit_test.total_test_case_count(); ++i) {
|
||||||
|
const TestCase& test_case = *unit_test.GetTestCase(i);
|
||||||
|
if (!test_case.should_run() || (test_case.skipped_test_count() == 0)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
for (int j = 0; j < test_case.total_test_count(); ++j) {
|
||||||
|
const TestInfo& test_info = *test_case.GetTestInfo(j);
|
||||||
|
if (!test_info.should_run() || !test_info.result()->Skipped()) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
ColoredPrintf(COLOR_GREEN, "[ SKIPPED ] ");
|
||||||
|
printf("%s.%s", test_case.name(), test_info.name());
|
||||||
|
printf("\n");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
void PrettyUnitTestResultPrinter::OnTestIterationEnd(const UnitTest& unit_test,
|
void PrettyUnitTestResultPrinter::OnTestIterationEnd(const UnitTest& unit_test,
|
||||||
int /*iteration*/) {
|
int /*iteration*/) {
|
||||||
ColoredPrintf(COLOR_GREEN, "[==========] ");
|
ColoredPrintf(COLOR_GREEN, "[==========] ");
|
||||||
|
@ -3246,6 +3305,13 @@ void PrettyUnitTestResultPrinter::OnTestIterationEnd(const UnitTest& unit_test,
|
||||||
ColoredPrintf(COLOR_GREEN, "[ PASSED ] ");
|
ColoredPrintf(COLOR_GREEN, "[ PASSED ] ");
|
||||||
printf("%s.\n", FormatTestCount(unit_test.successful_test_count()).c_str());
|
printf("%s.\n", FormatTestCount(unit_test.successful_test_count()).c_str());
|
||||||
|
|
||||||
|
const int skipped_test_count = unit_test.skipped_test_count();
|
||||||
|
if (skipped_test_count > 0) {
|
||||||
|
ColoredPrintf(COLOR_GREEN, "[ SKIPPED ] ");
|
||||||
|
printf("%s, listed below:\n", FormatTestCount(skipped_test_count).c_str());
|
||||||
|
PrintSkippedTests(unit_test);
|
||||||
|
}
|
||||||
|
|
||||||
int num_failures = unit_test.failed_test_count();
|
int num_failures = unit_test.failed_test_count();
|
||||||
if (!unit_test.Passed()) {
|
if (!unit_test.Passed()) {
|
||||||
const int failed_test_count = unit_test.failed_test_count();
|
const int failed_test_count = unit_test.failed_test_count();
|
||||||
|
@ -4417,6 +4483,11 @@ int UnitTest::successful_test_count() const {
|
||||||
return impl()->successful_test_count();
|
return impl()->successful_test_count();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Gets the number of skipped tests.
|
||||||
|
int UnitTest::skipped_test_count() const {
|
||||||
|
return impl()->skipped_test_count();
|
||||||
|
}
|
||||||
|
|
||||||
// Gets the number of failed tests.
|
// Gets the number of failed tests.
|
||||||
int UnitTest::failed_test_count() const { return impl()->failed_test_count(); }
|
int UnitTest::failed_test_count() const { return impl()->failed_test_count(); }
|
||||||
|
|
||||||
|
@ -4537,7 +4608,8 @@ void UnitTest::AddTestPartResult(
|
||||||
impl_->GetTestPartResultReporterForCurrentThread()->
|
impl_->GetTestPartResultReporterForCurrentThread()->
|
||||||
ReportTestPartResult(result);
|
ReportTestPartResult(result);
|
||||||
|
|
||||||
if (result_type != TestPartResult::kSuccess) {
|
if (result_type != TestPartResult::kSuccess &&
|
||||||
|
result_type != TestPartResult::kSkip) {
|
||||||
// gtest_break_on_failure takes precedence over
|
// gtest_break_on_failure takes precedence over
|
||||||
// gtest_throw_on_failure. This allows a user to set the latter
|
// gtest_throw_on_failure. This allows a user to set the latter
|
||||||
// in the code (perhaps in order to use Google Test assertions
|
// in the code (perhaps in order to use Google Test assertions
|
||||||
|
|
|
@ -49,9 +49,10 @@ class TestPartResultTest : public Test {
|
||||||
TestPartResultTest()
|
TestPartResultTest()
|
||||||
: r1_(TestPartResult::kSuccess, "foo/bar.cc", 10, "Success!"),
|
: r1_(TestPartResult::kSuccess, "foo/bar.cc", 10, "Success!"),
|
||||||
r2_(TestPartResult::kNonFatalFailure, "foo/bar.cc", -1, "Failure!"),
|
r2_(TestPartResult::kNonFatalFailure, "foo/bar.cc", -1, "Failure!"),
|
||||||
r3_(TestPartResult::kFatalFailure, NULL, -1, "Failure!") {}
|
r3_(TestPartResult::kFatalFailure, NULL, -1, "Failure!"),
|
||||||
|
r4_(TestPartResult::kSkip, "foo/bar.cc", 2, "Skipped!") {}
|
||||||
|
|
||||||
TestPartResult r1_, r2_, r3_;
|
TestPartResult r1_, r2_, r3_, r4_;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
|
@ -82,6 +83,7 @@ TEST_F(TestPartResultTest, ResultAccessorsWork) {
|
||||||
EXPECT_FALSE(success.failed());
|
EXPECT_FALSE(success.failed());
|
||||||
EXPECT_FALSE(success.nonfatally_failed());
|
EXPECT_FALSE(success.nonfatally_failed());
|
||||||
EXPECT_FALSE(success.fatally_failed());
|
EXPECT_FALSE(success.fatally_failed());
|
||||||
|
EXPECT_FALSE(success.skipped());
|
||||||
|
|
||||||
const TestPartResult nonfatal_failure(TestPartResult::kNonFatalFailure,
|
const TestPartResult nonfatal_failure(TestPartResult::kNonFatalFailure,
|
||||||
"file.cc",
|
"file.cc",
|
||||||
|
@ -91,6 +93,7 @@ TEST_F(TestPartResultTest, ResultAccessorsWork) {
|
||||||
EXPECT_TRUE(nonfatal_failure.failed());
|
EXPECT_TRUE(nonfatal_failure.failed());
|
||||||
EXPECT_TRUE(nonfatal_failure.nonfatally_failed());
|
EXPECT_TRUE(nonfatal_failure.nonfatally_failed());
|
||||||
EXPECT_FALSE(nonfatal_failure.fatally_failed());
|
EXPECT_FALSE(nonfatal_failure.fatally_failed());
|
||||||
|
EXPECT_FALSE(nonfatal_failure.skipped());
|
||||||
|
|
||||||
const TestPartResult fatal_failure(TestPartResult::kFatalFailure,
|
const TestPartResult fatal_failure(TestPartResult::kFatalFailure,
|
||||||
"file.cc",
|
"file.cc",
|
||||||
|
@ -100,6 +103,17 @@ TEST_F(TestPartResultTest, ResultAccessorsWork) {
|
||||||
EXPECT_TRUE(fatal_failure.failed());
|
EXPECT_TRUE(fatal_failure.failed());
|
||||||
EXPECT_FALSE(fatal_failure.nonfatally_failed());
|
EXPECT_FALSE(fatal_failure.nonfatally_failed());
|
||||||
EXPECT_TRUE(fatal_failure.fatally_failed());
|
EXPECT_TRUE(fatal_failure.fatally_failed());
|
||||||
|
EXPECT_FALSE(fatal_failure.skipped());
|
||||||
|
|
||||||
|
const TestPartResult skip(TestPartResult::kSkip,
|
||||||
|
"file.cc",
|
||||||
|
42,
|
||||||
|
"message");
|
||||||
|
EXPECT_FALSE(skip.passed());
|
||||||
|
EXPECT_FALSE(skip.failed());
|
||||||
|
EXPECT_FALSE(skip.nonfatally_failed());
|
||||||
|
EXPECT_FALSE(skip.fatally_failed());
|
||||||
|
EXPECT_TRUE(skip.skipped());
|
||||||
}
|
}
|
||||||
|
|
||||||
// Tests TestPartResult::type().
|
// Tests TestPartResult::type().
|
||||||
|
@ -107,23 +121,27 @@ TEST_F(TestPartResultTest, type) {
|
||||||
EXPECT_EQ(TestPartResult::kSuccess, r1_.type());
|
EXPECT_EQ(TestPartResult::kSuccess, r1_.type());
|
||||||
EXPECT_EQ(TestPartResult::kNonFatalFailure, r2_.type());
|
EXPECT_EQ(TestPartResult::kNonFatalFailure, r2_.type());
|
||||||
EXPECT_EQ(TestPartResult::kFatalFailure, r3_.type());
|
EXPECT_EQ(TestPartResult::kFatalFailure, r3_.type());
|
||||||
|
EXPECT_EQ(TestPartResult::kSkip, r4_.type());
|
||||||
}
|
}
|
||||||
|
|
||||||
// Tests TestPartResult::file_name().
|
// Tests TestPartResult::file_name().
|
||||||
TEST_F(TestPartResultTest, file_name) {
|
TEST_F(TestPartResultTest, file_name) {
|
||||||
EXPECT_STREQ("foo/bar.cc", r1_.file_name());
|
EXPECT_STREQ("foo/bar.cc", r1_.file_name());
|
||||||
EXPECT_STREQ(NULL, r3_.file_name());
|
EXPECT_STREQ(NULL, r3_.file_name());
|
||||||
|
EXPECT_STREQ("foo/bar.cc", r4_.file_name());
|
||||||
}
|
}
|
||||||
|
|
||||||
// Tests TestPartResult::line_number().
|
// Tests TestPartResult::line_number().
|
||||||
TEST_F(TestPartResultTest, line_number) {
|
TEST_F(TestPartResultTest, line_number) {
|
||||||
EXPECT_EQ(10, r1_.line_number());
|
EXPECT_EQ(10, r1_.line_number());
|
||||||
EXPECT_EQ(-1, r2_.line_number());
|
EXPECT_EQ(-1, r2_.line_number());
|
||||||
|
EXPECT_EQ(2, r4_.line_number());
|
||||||
}
|
}
|
||||||
|
|
||||||
// Tests TestPartResult::message().
|
// Tests TestPartResult::message().
|
||||||
TEST_F(TestPartResultTest, message) {
|
TEST_F(TestPartResultTest, message) {
|
||||||
EXPECT_STREQ("Success!", r1_.message());
|
EXPECT_STREQ("Success!", r1_.message());
|
||||||
|
EXPECT_STREQ("Skipped!", r4_.message());
|
||||||
}
|
}
|
||||||
|
|
||||||
// Tests TestPartResult::passed().
|
// Tests TestPartResult::passed().
|
||||||
|
@ -131,6 +149,7 @@ TEST_F(TestPartResultTest, Passed) {
|
||||||
EXPECT_TRUE(r1_.passed());
|
EXPECT_TRUE(r1_.passed());
|
||||||
EXPECT_FALSE(r2_.passed());
|
EXPECT_FALSE(r2_.passed());
|
||||||
EXPECT_FALSE(r3_.passed());
|
EXPECT_FALSE(r3_.passed());
|
||||||
|
EXPECT_FALSE(r4_.passed());
|
||||||
}
|
}
|
||||||
|
|
||||||
// Tests TestPartResult::failed().
|
// Tests TestPartResult::failed().
|
||||||
|
@ -138,6 +157,15 @@ TEST_F(TestPartResultTest, Failed) {
|
||||||
EXPECT_FALSE(r1_.failed());
|
EXPECT_FALSE(r1_.failed());
|
||||||
EXPECT_TRUE(r2_.failed());
|
EXPECT_TRUE(r2_.failed());
|
||||||
EXPECT_TRUE(r3_.failed());
|
EXPECT_TRUE(r3_.failed());
|
||||||
|
EXPECT_FALSE(r4_.failed());
|
||||||
|
}
|
||||||
|
|
||||||
|
// Tests TestPartResult::failed().
|
||||||
|
TEST_F(TestPartResultTest, Skipped) {
|
||||||
|
EXPECT_FALSE(r1_.skipped());
|
||||||
|
EXPECT_FALSE(r2_.skipped());
|
||||||
|
EXPECT_FALSE(r3_.skipped());
|
||||||
|
EXPECT_TRUE(r4_.skipped());
|
||||||
}
|
}
|
||||||
|
|
||||||
// Tests TestPartResult::fatally_failed().
|
// Tests TestPartResult::fatally_failed().
|
||||||
|
@ -145,6 +173,7 @@ TEST_F(TestPartResultTest, FatallyFailed) {
|
||||||
EXPECT_FALSE(r1_.fatally_failed());
|
EXPECT_FALSE(r1_.fatally_failed());
|
||||||
EXPECT_FALSE(r2_.fatally_failed());
|
EXPECT_FALSE(r2_.fatally_failed());
|
||||||
EXPECT_TRUE(r3_.fatally_failed());
|
EXPECT_TRUE(r3_.fatally_failed());
|
||||||
|
EXPECT_FALSE(r4_.fatally_failed());
|
||||||
}
|
}
|
||||||
|
|
||||||
// Tests TestPartResult::nonfatally_failed().
|
// Tests TestPartResult::nonfatally_failed().
|
||||||
|
@ -152,6 +181,7 @@ TEST_F(TestPartResultTest, NonfatallyFailed) {
|
||||||
EXPECT_FALSE(r1_.nonfatally_failed());
|
EXPECT_FALSE(r1_.nonfatally_failed());
|
||||||
EXPECT_TRUE(r2_.nonfatally_failed());
|
EXPECT_TRUE(r2_.nonfatally_failed());
|
||||||
EXPECT_FALSE(r3_.nonfatally_failed());
|
EXPECT_FALSE(r3_.nonfatally_failed());
|
||||||
|
EXPECT_FALSE(r4_.nonfatally_failed());
|
||||||
}
|
}
|
||||||
|
|
||||||
// Tests the TestPartResultArray class.
|
// Tests the TestPartResultArray class.
|
||||||
|
|
|
@ -44,4 +44,5 @@
|
||||||
#include "gtest-typed-test_test.cc"
|
#include "gtest-typed-test_test.cc"
|
||||||
#include "gtest-typed-test2_test.cc"
|
#include "gtest-typed-test2_test.cc"
|
||||||
#include "gtest_unittest.cc"
|
#include "gtest_unittest.cc"
|
||||||
|
#include "gtest_skip_test.cc"
|
||||||
#include "production.cc"
|
#include "production.cc"
|
||||||
|
|
38
googletest/test/gtest_skip_test.cc
Normal file
38
googletest/test/gtest_skip_test.cc
Normal file
|
@ -0,0 +1,38 @@
|
||||||
|
// Copyright 2008 Google Inc.
|
||||||
|
// All Rights Reserved.
|
||||||
|
//
|
||||||
|
// Redistribution and use in source and binary forms, with or without
|
||||||
|
// modification, are permitted provided that the following conditions are
|
||||||
|
// met:
|
||||||
|
//
|
||||||
|
// * Redistributions of source code must retain the above copyright
|
||||||
|
// notice, this list of conditions and the following disclaimer.
|
||||||
|
// * Redistributions in binary form must reproduce the above
|
||||||
|
// copyright notice, this list of conditions and the following disclaimer
|
||||||
|
// in the documentation and/or other materials provided with the
|
||||||
|
// distribution.
|
||||||
|
// * Neither the name of Google Inc. nor the names of its
|
||||||
|
// contributors may be used to endorse or promote products derived from
|
||||||
|
// this software without specific prior written permission.
|
||||||
|
//
|
||||||
|
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
|
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||||
|
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
|
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
|
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||||
|
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
//
|
||||||
|
// Author: arseny.aprelev@gmail.com (Arseny Aprelev)
|
||||||
|
//
|
||||||
|
|
||||||
|
#include <gtest/gtest.h>
|
||||||
|
|
||||||
|
TEST(SkipTest, DoesSkip) {
|
||||||
|
SKIP();
|
||||||
|
EXPECT_EQ(0, 1);
|
||||||
|
}
|
Loading…
Reference in New Issue
Block a user