1
0
mirror of https://github.com/catchorg/Catch2.git synced 2025-01-16 15:18:00 +00:00

Compare commits

...

5 Commits

Author SHA1 Message Date
Martin Hořeňovský
250d9b9c72
Fix how testRandomOrder.py builds tag arguments 2020-07-26 10:51:55 +02:00
Martin Hořeňovský
90d6fd849e
Increase tolerances in --min-duration tests
The underpowered and oversubscribed CI servers are hell.
2020-07-26 10:48:03 +02:00
Martin Hořeňovský
13917c44b4
--min-duration is overriden by -d no 2020-07-26 10:48:01 +02:00
Martin Hořeňovský
e6d947f6d4
Refactor tests for duration reporting threshold 2020-07-26 10:47:58 +02:00
John Bytheway
80b0d6975c
Add --min-duration option
A test runner already has a --durations option to print durations.
However, this isn't entirely satisfactory.

When there are many tests, this produces output spam which makes it hard
to find the test failure output.  Nevertheless, it is helpful to be
informed of tests which are unusually slow.

Therefore, introduce a new option --min-duration that causes all
durations above a certain threshold to be printed.  This allows slow
tests to be visible without mentioning every test.
2020-07-26 10:47:53 +02:00
13 changed files with 91 additions and 6 deletions

View File

@ -222,6 +222,10 @@ available warnings
When set to ```yes``` Catch will report the duration of each test case, in milliseconds. Note that it does this regardless of whether a test case passes or fails. Note, also, the certain reporters (e.g. Junit) always report test case durations regardless of this option being set or not.
<pre>-D, --min-duration &lt;value></pre>
When set, Catch will report the duration of each test case that took more than &lt;value> seconds, in milliseconds.
<a id="input-file"></a>
## Load test names to run from a file
<pre>-f, --input-file &lt;filename></pre>

View File

@ -69,6 +69,7 @@ namespace Catch {
bool Config::warnAboutMissingAssertions() const { return !!(m_data.warnings & WarnAbout::NoAssertions); }
bool Config::warnAboutNoTests() const { return !!(m_data.warnings & WarnAbout::NoTests); }
ShowDurations::OrNot Config::showDurations() const { return m_data.showDurations; }
double Config::minDuration() const { return m_data.minDuration; }
RunTests::InWhatOrder Config::runOrder() const { return m_data.runOrder; }
unsigned int Config::rngSeed() const { return m_data.rngSeed; }
UseColour::YesOrNo Config::useColour() const { return m_data.useColour; }

View File

@ -48,6 +48,7 @@ namespace Catch {
Verbosity verbosity = Verbosity::Normal;
WarnAbout::What warnings = WarnAbout::Nothing;
ShowDurations::OrNot showDurations = ShowDurations::DefaultForReporter;
double minDuration = -1;
RunTests::InWhatOrder runOrder = RunTests::InDeclarationOrder;
UseColour::YesOrNo useColour = UseColour::Auto;
WaitForKeypress::When waitForKeypress = WaitForKeypress::Never;
@ -98,6 +99,7 @@ namespace Catch {
bool warnAboutMissingAssertions() const override;
bool warnAboutNoTests() const override;
ShowDurations::OrNot showDurations() const override;
double minDuration() const override;
RunTests::InWhatOrder runOrder() const override;
unsigned int rngSeed() const override;
UseColour::YesOrNo useColour() const override;

View File

@ -67,6 +67,7 @@ namespace Catch {
virtual int abortAfter() const = 0;
virtual bool showInvisibles() const = 0;
virtual ShowDurations::OrNot showDurations() const = 0;
virtual double minDuration() const = 0;
virtual TestSpec const& testSpec() const = 0;
virtual bool hasTestFilters() const = 0;
virtual std::vector<std::string> const& getTestsOrTags() const = 0;

View File

@ -170,6 +170,9 @@ namespace Catch {
| Opt( [&]( bool flag ) { config.showDurations = flag ? ShowDurations::Always : ShowDurations::Never; }, "yes|no" )
["-d"]["--durations"]
( "show test durations" )
| Opt( config.minDuration, "seconds" )
["-D"]["--min-duration"]
( "show test durations for tests taking at least the given number of seconds" )
| Opt( loadTestNamesFromFile, "filename" )
["-f"]["--input-file"]
( "load test names to run from a file" )

View File

@ -43,6 +43,17 @@ namespace Catch {
return std::string(buffer);
}
bool shouldShowDuration( IConfig const& config, double duration ) {
if ( config.showDurations() == ShowDurations::Always ) {
return true;
}
if ( config.showDurations() == ShowDurations::Never ) {
return false;
}
const double min = config.minDuration();
return min >= 0 && duration >= min;
}
std::string serializeFilters( std::vector<std::string> const& container ) {
ReusableStringStream oss;
bool first = true;

View File

@ -24,6 +24,9 @@ namespace Catch {
// Returns double formatted as %.3f (format expected on output)
std::string getFormattedDuration( double duration );
//! Should the reporter show
bool shouldShowDuration( IConfig const& config, double duration );
std::string serializeFilters( std::vector<std::string> const& container );
struct StreamingReporterBase : IStreamingReporter {

View File

@ -286,8 +286,9 @@ private:
}
void CompactReporter::sectionEnded(SectionStats const& _sectionStats) {
if (m_config->showDurations() == ShowDurations::Always) {
stream << getFormattedDuration(_sectionStats.durationInSeconds) << " s: " << _sectionStats.sectionInfo.name << std::endl;
double dur = _sectionStats.durationInSeconds;
if ( shouldShowDuration( *m_config, dur ) ) {
stream << getFormattedDuration( dur ) << " s: " << _sectionStats.sectionInfo.name << std::endl;
}
}

View File

@ -420,8 +420,9 @@ void ConsoleReporter::sectionEnded(SectionStats const& _sectionStats) {
stream << "\nNo assertions in test case";
stream << " '" << _sectionStats.sectionInfo.name << "'\n" << std::endl;
}
if (m_config->showDurations() == ShowDurations::Always) {
stream << getFormattedDuration(_sectionStats.durationInSeconds) << " s: " << _sectionStats.sectionInfo.name << std::endl;
double dur = _sectionStats.durationInSeconds;
if (shouldShowDuration(*m_config, dur)) {
stream << getFormattedDuration(dur) << " s: " << _sectionStats.sectionInfo.name << std::endl;
}
if (m_headerPrinted) {
m_headerPrinted = false;

View File

@ -28,6 +28,7 @@ set(TEST_SOURCES
${SELF_TEST_DIR}/IntrospectiveTests/Xml.tests.cpp
${SELF_TEST_DIR}/IntrospectiveTests/ToString.tests.cpp
${SELF_TEST_DIR}/IntrospectiveTests/UniquePtr.tests.cpp
${SELF_TEST_DIR}/TimingTests/Sleep.tests.cpp
${SELF_TEST_DIR}/UsageTests/Approx.tests.cpp
${SELF_TEST_DIR}/UsageTests/BDD.tests.cpp
${SELF_TEST_DIR}/UsageTests/Benchmark.tests.cpp

View File

@ -8,6 +8,40 @@ project( Catch2ExtraTests LANGUAGES CXX )
message( STATUS "Extra tests included" )
# The MinDuration reporting tests do not need separate compilation, but
# they have non-trivial execution time, so they are categorized as
# extra tests, so that they are run less.
add_test(NAME MinDuration::SimpleThreshold COMMAND $<TARGET_FILE:SelfTest> --min-duration 0.22 [min_duration_test])
set_tests_properties(
MinDuration::SimpleThreshold
PROPERTIES
PASS_REGULAR_EXPRESSION "s: sleep_for_250ms"
FAIL_REGULAR_EXPRESSION "sleep_for_100ms"
RUN_SERIAL ON # The test is timing sensitive, so we want to run it
# serially to avoid false positives on oversubscribed machines
)
# -d yes overrides the threshold, so we should see the faster test even
# with a ridiculous high min duration threshold
add_test(NAME MinDuration::DurationOverrideYes COMMAND $<TARGET_FILE:SelfTest> --min-duration 1.0 -d yes [min_duration_test])
set_tests_properties(
MinDuration::DurationOverrideYes
PROPERTIES
PASS_REGULAR_EXPRESSION "s: sleep_for_100ms"
)
# -d no overrides the threshold, so we should never see any tests even
# with ridiculously low min duration threshold
add_test(NAME MinDuration::DurationOverrideNo COMMAND $<TARGET_FILE:SelfTest> --min-duration 0.0001 -d no [min_duration_test])
set_tests_properties(
MinDuration::DurationOverrideNo
PROPERTIES
FAIL_REGULAR_EXPRESSION "sleep_for_250ms"
)
# ------------ end of duration reporting tests
# define folders used:
set( TESTS_DIR ${CATCH_DIR}/tests/ExtraTests )

View File

@ -0,0 +1,23 @@
/*
* Copyright 2011 Two Blue Cubes Ltd. All rights reserved.
*
* Distributed under the Boost Software License, Version 1.0. (See accompanying
* file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
*/
#include <catch2/catch_test_macros.hpp>
#include <chrono>
#include <thread>
TEST_CASE( "sleep_for_100ms", "[.min_duration_test][approvals]" )
{
std::this_thread::sleep_for( std::chrono::milliseconds( 100 ) );
CHECK( true );
}
TEST_CASE( "sleep_for_250ms", "[.min_duration_test][approvals]" )
{
std::this_thread::sleep_for( std::chrono::milliseconds( 250 ) );
CHECK( true );
}

View File

@ -16,9 +16,9 @@ import xml.etree.ElementTree as ET
def list_tests(self_test_exe, tags, rng_seed):
cmd = [self_test_exe, '--reporter', 'xml', '--list-tests', '--order', 'rand',
'--rng-seed', str(rng_seed)]
tags_arg = ','.join('[{}]'.format(t) for t in tags)
tags_arg = ','.join('[{}]~[.]'.format(t) for t in tags)
if tags_arg:
cmd.append(tags_arg + '~[.]')
cmd.append(tags_arg)
process = subprocess.Popen(
cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = process.communicate()