fix some false positive warnings emitted by msvc (#399)

* fix some false positive warnings emitted by msvc
* make find_package for Python3 more specific
This commit is contained in:
Hans Dembinski 2024-04-25 12:51:47 +02:00 committed by GitHub
parent 90867e24a4
commit 66842660c0
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
11 changed files with 100 additions and 95 deletions

View File

@ -112,7 +112,7 @@ public:
/// Returns index and shift (if axis has grown) for the passed argument.
auto update(value_type x) noexcept {
auto impl = [this](long x) -> std::pair<index_type, index_type> {
const auto i = x - min_;
const auto i = static_cast<value_type>(x) - min_;
if (i >= 0) {
const auto k = static_cast<axis::index_type>(i);
if (k < size()) return {k, 0};

View File

@ -54,13 +54,13 @@ public:
@param successes Number of successful trials.
@param failures Number of failed trials.
*/
interval_type operator()(value_type successes, value_type failures) const noexcept {
interval_type operator()(value_type successes, value_type failures) const noexcept override {
// analytical solution when successes or failures are zero
// T. Mans (2014), Electronic Journal of Statistics. 8 (1): 817-840.
// arXiv:1303.1288. doi:10.1214/14-EJS909.
const value_type total = successes + failures;
if (successes == 0) return {0, 1 - std::pow(alpha_half_, 1 / total)};
if (failures == 0) return {std::pow(alpha_half_, 1 / total), 1};
const value_type one{1.0}, zero{0.0}, total{successes + failures};
if (successes == 0) return {zero, one - std::pow(alpha_half_, one / total)};
if (failures == 0) return {std::pow(alpha_half_, one / total), one};
// Source:
// https://en.wikipedia.org/wiki/
@ -68,7 +68,7 @@ public:
math::beta_distribution<value_type> beta_a(successes, failures + 1);
const value_type a = math::quantile(beta_a, alpha_half_);
math::beta_distribution<value_type> beta_b(successes + 1, failures);
const value_type b = math::quantile(beta_b, 1 - alpha_half_);
const value_type b = math::quantile(beta_b, one - alpha_half_);
return {a, b};
}

View File

@ -51,19 +51,20 @@ public:
@param successes Number of successful trials.
@param failures Number of failed trials.
*/
interval_type operator()(value_type successes, value_type failures) const noexcept {
interval_type operator()(value_type successes,
value_type failures) const noexcept override {
// See L.D. Brown, T.T. Cai, A. DasGupta, Statistical Science 16 (2001) 101-133,
// doi:10.1214/ss/1009213286, section 4.1.2.
const value_type half{0.5};
const value_type half{0.5}, one{1.0}, zero{0.0};
const value_type total = successes + failures;
// if successes or failures are 0, modified interval is equal to Clopper-Pearson
if (successes == 0) return {0, 1 - std::pow(alpha_half_, 1 / total)};
if (failures == 0) return {std::pow(alpha_half_, 1 / total), 1};
if (successes == 0) return {zero, one - std::pow(alpha_half_, one / total)};
if (failures == 0) return {std::pow(alpha_half_, one / total), one};
math::beta_distribution<value_type> beta(successes + half, failures + half);
const value_type a = successes == 1 ? 0 : math::quantile(beta, alpha_half_);
const value_type b = failures == 1 ? 1 : math::quantile(beta, 1 - alpha_half_);
const value_type a = successes == 1 ? zero : math::quantile(beta, alpha_half_);
const value_type b = failures == 1 ? one : math::quantile(beta, one - alpha_half_);
return {a, b};
}

View File

@ -59,7 +59,8 @@ public:
@param successes Number of successful trials.
@param failures Number of failed trials.
*/
interval_type operator()(value_type successes, value_type failures) const noexcept {
interval_type operator()(value_type successes,
value_type failures) const noexcept override {
// See https://en.wikipedia.org/wiki/
// Binomial_proportion_confidence_interval
// #Normal_approximation_interval_or_Wald_interval

View File

@ -58,7 +58,8 @@ public:
@param successes Number of successful trials.
@param failures Number of failed trials.
*/
interval_type operator()(value_type successes, value_type failures) const noexcept {
interval_type operator()(value_type successes,
value_type failures) const noexcept override {
// See https://en.wikipedia.org/wiki/
// Binomial_proportion_confidence_interval
// #Wilson_score_interval

View File

@ -2,13 +2,15 @@
# Distributed under the Boost Software License, Version 1.0.
# See accompanying file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt
find_package(Python3)
find_package(Python3 COMPONENTS Interpreter)
if (Python3_FOUND)
# checks that b2 and cmake are in sync
add_test(NAME runpy-${PROJECT_NAME}_check_build_system COMMAND ${Python3_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/check_build_system.py)
# checks that all headers are included in odr test
add_test(NAME runpy-${PROJECT_NAME}_check_odr_test COMMAND ${Python3_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/check_odr_test.py)
else()
MESSAGE(WARNING "Python interpreter not found, cannot check for odr violations and build system consistency")
endif()
include(BoostTest OPTIONAL RESULT_VARIABLE HAVE_BOOST_TEST)

View File

@ -20,7 +20,7 @@ void run_tests() {
{
c_t c;
++c;
BOOST_TEST_EQ(c.value(), 1);
BOOST_TEST_EQ(c.value(), static_cast<T>(1));
BOOST_TEST_EQ(str(c), "1"s);
BOOST_TEST_EQ(str(c, 2, false), " 1"s);
BOOST_TEST_EQ(str(c, 2, true), "1 "s);

View File

@ -25,8 +25,8 @@ void run_tests() {
{
f_t f;
BOOST_TEST_EQ(f.successes(), 0);
BOOST_TEST_EQ(f.failures(), 0);
BOOST_TEST_EQ(f.successes(), static_cast<T>(0));
BOOST_TEST_EQ(f.failures(), static_cast<T>(0));
BOOST_TEST(std::isnan(f.value()));
BOOST_TEST(std::isnan(f.variance()));
@ -38,22 +38,22 @@ void run_tests() {
{
f_t f;
f(true);
BOOST_TEST_EQ(f.successes(), 1);
BOOST_TEST_EQ(f.failures(), 0);
BOOST_TEST_EQ(f.successes(), static_cast<T>(1));
BOOST_TEST_EQ(f.failures(), static_cast<T>(0));
BOOST_TEST_EQ(str(f), "fraction(1, 0)"s);
f(false);
BOOST_TEST_EQ(f.successes(), 1);
BOOST_TEST_EQ(f.failures(), 1);
BOOST_TEST_EQ(f.successes(), static_cast<T>(1));
BOOST_TEST_EQ(f.failures(), static_cast<T>(1));
BOOST_TEST_EQ(str(f), "fraction(1, 1)"s);
BOOST_TEST_EQ(str(f, 20), "fraction(1, 1) "s);
}
{
f_t f(3, 1);
BOOST_TEST_EQ(f.successes(), 3);
BOOST_TEST_EQ(f.failures(), 1);
BOOST_TEST_EQ(f.value(), 0.75);
BOOST_TEST_IS_CLOSE(f.variance(), 0.75 * (1 - 0.75) / 4, eps);
BOOST_TEST_EQ(f.successes(), static_cast<T>(3));
BOOST_TEST_EQ(f.failures(), static_cast<T>(1));
BOOST_TEST_EQ(f.value(), 0.75f);
BOOST_TEST_IS_CLOSE(f.variance(), 0.75f * (1.0f - 0.75f) / 4.f, eps);
const auto ci = f.confidence_interval();
const auto expected = utility::wilson_interval<double>()(3, 1);
@ -63,10 +63,10 @@ void run_tests() {
{
f_t f(0, 1);
BOOST_TEST_EQ(f.successes(), 0);
BOOST_TEST_EQ(f.failures(), 1);
BOOST_TEST_EQ(f.value(), 0);
BOOST_TEST_EQ(f.variance(), 0);
BOOST_TEST_EQ(f.successes(), static_cast<T>(0));
BOOST_TEST_EQ(f.failures(), static_cast<T>(1));
BOOST_TEST_EQ(f.value(), 0.f);
BOOST_TEST_EQ(f.variance(), 0.f);
const auto ci = f.confidence_interval();
const auto expected = utility::wilson_interval<double>()(0, 1);
@ -76,10 +76,10 @@ void run_tests() {
{
f_t f(1, 0);
BOOST_TEST_EQ(f.successes(), 1);
BOOST_TEST_EQ(f.failures(), 0);
BOOST_TEST_EQ(f.value(), 1);
BOOST_TEST_EQ(f.variance(), 0);
BOOST_TEST_EQ(f.successes(), static_cast<T>(1));
BOOST_TEST_EQ(f.failures(), static_cast<T>(0));
BOOST_TEST_EQ(f.value(), 1.f);
BOOST_TEST_EQ(f.variance(), 0.f);
const auto ci = f.confidence_interval();
const auto expected = utility::wilson_interval<double>()(1, 0);

View File

@ -75,39 +75,39 @@ void run_tests() {
// - lower edge of shrink: pick bin which contains edge, lower <= x < upper
// - upper edge of shrink: pick bin which contains edge + 1, lower < x <= upper
{
auto h = make(Tag(), ID(0, 3));
auto h = make(Tag(), ID(0.0, 3.0));
const auto& ax = h.axis();
BOOST_TEST_EQ(ax.value(0), 0);
BOOST_TEST_EQ(ax.value(3), 3);
BOOST_TEST_EQ(ax.index(-1), -1);
BOOST_TEST_EQ(ax.index(3), 3);
BOOST_TEST_EQ(ax.value(0), 0.0);
BOOST_TEST_EQ(ax.value(3), 3.0);
BOOST_TEST_EQ(ax.index(-1.0), -1);
BOOST_TEST_EQ(ax.index(3.0), 3);
BOOST_TEST_EQ(reduce(h, shrink(-1, 5)).axis(), ID(0, 3));
BOOST_TEST_EQ(reduce(h, shrink(0, 3)).axis(), ID(0, 3));
BOOST_TEST_EQ(reduce(h, shrink(1, 3)).axis(), ID(1, 3));
BOOST_TEST_EQ(reduce(h, shrink(1.001, 3)).axis(), ID(1, 3));
BOOST_TEST_EQ(reduce(h, shrink(1.999, 3)).axis(), ID(1, 3));
BOOST_TEST_EQ(reduce(h, shrink(2, 3)).axis(), ID(2, 3));
BOOST_TEST_EQ(reduce(h, shrink(0, 2.999)).axis(), ID(0, 3));
BOOST_TEST_EQ(reduce(h, shrink(0, 2.001)).axis(), ID(0, 3));
BOOST_TEST_EQ(reduce(h, shrink(0, 2)).axis(), ID(0, 2));
BOOST_TEST_EQ(reduce(h, shrink(0, 1.999)).axis(), ID(0, 2));
BOOST_TEST_EQ(reduce(h, shrink(-1, 5)).axis(), ID(0.0, 3.0));
BOOST_TEST_EQ(reduce(h, shrink(0, 3)).axis(), ID(0.0, 3.0));
BOOST_TEST_EQ(reduce(h, shrink(1, 3)).axis(), ID(1.0, 3.0));
BOOST_TEST_EQ(reduce(h, shrink(1.001, 3)).axis(), ID(1.0, 3.0));
BOOST_TEST_EQ(reduce(h, shrink(1.999, 3)).axis(), ID(1.0, 3.0));
BOOST_TEST_EQ(reduce(h, shrink(2, 3)).axis(), ID(2.0, 3.0));
BOOST_TEST_EQ(reduce(h, shrink(0, 2.999)).axis(), ID(0.0, 3.0));
BOOST_TEST_EQ(reduce(h, shrink(0, 2.001)).axis(), ID(0.0, 3.0));
BOOST_TEST_EQ(reduce(h, shrink(0, 2)).axis(), ID(0.0, 2.0));
BOOST_TEST_EQ(reduce(h, shrink(0, 1.999)).axis(), ID(0.0, 2.0));
BOOST_TEST_EQ(reduce(h, crop(-1, 5)).axis(), ID(0, 3));
BOOST_TEST_EQ(reduce(h, crop(0, 3)).axis(), ID(0, 3));
BOOST_TEST_EQ(reduce(h, crop(1, 3)).axis(), ID(1, 3));
BOOST_TEST_EQ(reduce(h, crop(1.001, 3)).axis(), ID(1, 3));
BOOST_TEST_EQ(reduce(h, crop(1.999, 3)).axis(), ID(1, 3));
BOOST_TEST_EQ(reduce(h, crop(2, 3)).axis(), ID(2, 3));
BOOST_TEST_EQ(reduce(h, crop(0, 2.999)).axis(), ID(0, 3));
BOOST_TEST_EQ(reduce(h, crop(0, 2.001)).axis(), ID(0, 3));
BOOST_TEST_EQ(reduce(h, crop(0, 2)).axis(), ID(0, 2));
BOOST_TEST_EQ(reduce(h, crop(0, 1.999)).axis(), ID(0, 2));
BOOST_TEST_EQ(reduce(h, crop(-1, 5)).axis(), ID(0.0, 3.0));
BOOST_TEST_EQ(reduce(h, crop(0, 3)).axis(), ID(0.0, 3.0));
BOOST_TEST_EQ(reduce(h, crop(1, 3)).axis(), ID(1.0, 3.0));
BOOST_TEST_EQ(reduce(h, crop(1.001, 3)).axis(), ID(1.0, 3.0));
BOOST_TEST_EQ(reduce(h, crop(1.999, 3)).axis(), ID(1.0, 3.0));
BOOST_TEST_EQ(reduce(h, crop(2, 3)).axis(), ID(2.0, 3.0));
BOOST_TEST_EQ(reduce(h, crop(0, 2.999)).axis(), ID(0.0, 3.0));
BOOST_TEST_EQ(reduce(h, crop(0, 2.001)).axis(), ID(0.0, 3.0));
BOOST_TEST_EQ(reduce(h, crop(0, 2)).axis(), ID(0.0, 2.0));
BOOST_TEST_EQ(reduce(h, crop(0, 1.999)).axis(), ID(0.0, 2.0));
}
// shrink and rebin
{
auto h = make_s(Tag(), std::vector<int>(), R(4, 1, 5, "1"), R(3, -1, 2, "2"));
auto h = make_s(Tag(), std::vector<int>(), R(4, 1.0, 5.0, "1"), R(3, -1.0, 2.0, "2"));
/*
matrix layout:

View File

@ -18,7 +18,7 @@ template <class T>
void test() {
const T atol = 0.001;
clopper_pearson_interval<T> iv(deviation{1.f});
clopper_pearson_interval<T> iv(deviation{1});
{
const auto x = iv(0.f, 1.f);

View File

@ -25,78 +25,78 @@ void test() {
jeffreys_interval<T> iv(confidence_level{0.95});
{
auto p = iv(0, 7);
BOOST_TEST_IS_CLOSE(p.first, 0, atol);
BOOST_TEST_IS_CLOSE(p.second, 0.41, atol);
auto p = iv(0.f, 7.f);
BOOST_TEST_IS_CLOSE(p.first, 0.f, atol);
BOOST_TEST_IS_CLOSE(p.second, 0.41f, atol);
}
{
auto p = iv(1, 6);
BOOST_TEST_IS_CLOSE(p.first, 0, atol);
BOOST_TEST_IS_CLOSE(p.second, 0.501, atol);
auto p = iv(1.f, 6.f);
BOOST_TEST_IS_CLOSE(p.first, 0.f, atol);
BOOST_TEST_IS_CLOSE(p.second, 0.501f, atol);
}
{
auto p = iv(2, 5);
BOOST_TEST_IS_CLOSE(p.first, 0.065, atol);
BOOST_TEST_IS_CLOSE(p.second, 0.648, atol);
auto p = iv(2.f, 5.f);
BOOST_TEST_IS_CLOSE(p.first, 0.065f, atol);
BOOST_TEST_IS_CLOSE(p.second, 0.648f, atol);
}
{
auto p = iv(3, 4);
BOOST_TEST_IS_CLOSE(p.first, 0.139, atol);
BOOST_TEST_IS_CLOSE(p.second, 0.766, atol);
auto p = iv(3.f, 4.f);
BOOST_TEST_IS_CLOSE(p.first, 0.139f, atol);
BOOST_TEST_IS_CLOSE(p.second, 0.766f, atol);
}
{
auto p = iv(4, 7 - 4);
BOOST_TEST_IS_CLOSE(p.first, 0.234, atol);
BOOST_TEST_IS_CLOSE(p.second, 0.861, atol);
auto p = iv(4.f, 7.f - 4.f);
BOOST_TEST_IS_CLOSE(p.first, 0.234f, atol);
BOOST_TEST_IS_CLOSE(p.second, 0.861f, atol);
}
// extrapolated from table
{
auto p = iv(5, 2);
BOOST_TEST_IS_CLOSE(p.first, 1 - 0.648, atol);
BOOST_TEST_IS_CLOSE(p.second, 1 - 0.065, atol);
auto p = iv(5.f, 2.f);
BOOST_TEST_IS_CLOSE(p.first, 1.f - 0.648f, atol);
BOOST_TEST_IS_CLOSE(p.second, 1.f - 0.065f, atol);
}
// extrapolated from table
{
auto p = iv(6, 1);
BOOST_TEST_IS_CLOSE(p.first, 1 - 0.501, atol);
BOOST_TEST_IS_CLOSE(p.second, 1, atol);
auto p = iv(6.f, 1.f);
BOOST_TEST_IS_CLOSE(p.first, 1.f - 0.501f, atol);
BOOST_TEST_IS_CLOSE(p.second, 1.f, atol);
}
// extrapolated from table
{
auto p = iv(7, 0);
BOOST_TEST_IS_CLOSE(p.first, 1 - 0.41, atol);
BOOST_TEST_IS_CLOSE(p.second, 1, atol);
auto p = iv(7.f, 0.f);
BOOST_TEST_IS_CLOSE(p.first, 1.f - 0.41f, atol);
BOOST_TEST_IS_CLOSE(p.second, 1.f, atol);
}
// not in table
{
auto p = iv(0, 1);
BOOST_TEST_IS_CLOSE(p.first, 0, atol);
BOOST_TEST_IS_CLOSE(p.second, 0.975, atol);
auto p = iv(0.f, 1.f);
BOOST_TEST_IS_CLOSE(p.first, 0.f, atol);
BOOST_TEST_IS_CLOSE(p.second, 0.975f, atol);
fraction<T> f(0, 1);
fraction<T> f(0.f, 1.f);
const auto y = iv(f);
BOOST_TEST_IS_CLOSE(y.first, 0.0, atol);
BOOST_TEST_IS_CLOSE(y.second, 0.975, atol);
BOOST_TEST_IS_CLOSE(y.first, 0.f, atol);
BOOST_TEST_IS_CLOSE(y.second, 0.975f, atol);
}
// not in table
{
auto p = iv(1, 0);
auto p = iv(1.f, 0.f);
BOOST_TEST_IS_CLOSE(p.first, 0.025, atol);
BOOST_TEST_IS_CLOSE(p.second, 1, atol);
fraction<T> f(1, 0);
fraction<T> f(1.f, 0.f);
const auto y = iv(f);
BOOST_TEST_IS_CLOSE(y.first, 0.025, atol);
BOOST_TEST_IS_CLOSE(y.second, 1, atol);
BOOST_TEST_IS_CLOSE(y.first, 0.025f, atol);
BOOST_TEST_IS_CLOSE(y.second, 1.f, atol);
}
}