updated output of test

This commit is contained in:
cyborg1811m 2024-06-05 19:44:22 +02:00
parent 2e3f1dc43d
commit 8a1d6385ef
4 changed files with 56 additions and 25 deletions

View File

@ -7,6 +7,8 @@
#include <cstddef> #include <cstddef>
#include <iterator> #include <iterator>
#include <stdexcept>
#include <string>
namespace cc::helper { namespace cc::helper {

View File

@ -11,11 +11,14 @@
#include <iostream> #include <iostream>
#include <ranges> #include <ranges>
#include <tuple> #include <tuple>
#include <numeric>
#include "test_ret_val.h" #include "test_ret_val.h"
#define TEST_FAIL(msg) ret_val_s { "", ReturnCode::FAILED, msg } #define TEST_FAIL(msg) ret_val_s { "", ReturnCode::FAILED, msg }
#define TEST_PASS(msg) ret_val_s { "", ReturnCode::PASSED, msg } #define TEST_PASS() ret_val_s { "", ReturnCode::PASSED, nullptr }
#define TEST_PASS_MSG(msg) ret_val_s { "", ReturnCode::PASSED, msg }
#define TEST_SKIP() ret_val_s { "", ReturnCode::SKIPPED, nullptr }
template<typename Suite> template<typename Suite>
struct quick_test_def; struct quick_test_def;
@ -79,7 +82,7 @@ class test_suite {
auto test_arr = expand_test_tuple(_tests, std::make_index_sequence<TEST_NR>()); auto test_arr = expand_test_tuple(_tests, std::make_index_sequence<TEST_NR>());
int num_failed = 0; int num_failed = 0;
std::cout << "--------------\n"; std::array<std::array<ReturnCode, 2>, TEST_NR> ret_vals = { { ReturnCode::NOT_EVALUATED } };
for (auto [i, test_ref] : std::ranges::views::enumerate(test_arr)) { for (auto [i, test_ref] : std::ranges::views::enumerate(test_arr)) {
const auto& test = test_ref.get(); const auto& test = test_ref.get();
@ -96,26 +99,50 @@ class test_suite {
ret = ret_val_s(test.name(), ReturnCode::FAILED, ret_exc_str.c_str()); ret = ret_val_s(test.name(), ReturnCode::FAILED, ret_exc_str.c_str());
} }
std::cout << "Result of Runtime Evaluation of Test \"" << ret.test_name << "\": " << ret.val << "\n" std::cout << "Result of Runtime Evaluation of Test \"" << ret.test_name << "\": " << ret.val;
<< "\t" << ret.msg << std::endl; if (ret.msg != nullptr) {
std::cout << "\n\t" << ret.msg;
if (ret.val != ReturnCode::PASSED) {
--num_failed;
} }
std::cout << std::endl;
ret_vals[i][0] = ret.val;
} }
if (test.evalFlag() == EvalFlag::CONSTEVAL || test.evalFlag() == EvalFlag::RUNTIME_CONSTEVAL) { if (test.evalFlag() == EvalFlag::CONSTEVAL || test.evalFlag() == EvalFlag::RUNTIME_CONSTEVAL) {
const ret_val_s &ret = test.c_res(); const ret_val_s &ret = test.c_res();
std::cout << "Result of Consteval Evaluation of Test \"" << ret.test_name << "\": " << ret.val << "\n" std::cout << "Result of Consteval Evaluation of Test \"" << ret.test_name << "\": " << ret.val;
<< "\t" << ret.msg << std::endl; if (ret.msg != nullptr) {
std::cout << "\n\t" << ret.msg;
}
std::cout << std::endl;
if (ret.val != ReturnCode::PASSED) { ret_vals[i][1] = ret.val;
--num_failed;
}
}
} }
return num_failed; std::cout << "--------------\n";
}
auto ret_vals_j = ret_vals | std::ranges::views::join;
auto correct = std::ranges::count_if(ret_vals, [](auto&& e) {
return std::ranges::none_of(e, [](auto&& c) { return c == ReturnCode::FAILED; })
&& std::ranges::any_of(e, [](auto&& c) { return c == ReturnCode::PASSED; });
});
auto failed = std::ranges::count_if(ret_vals, [](auto&& e) {
return std::ranges::any_of(e, [](auto&& c) { return c == ReturnCode::FAILED; }); });
auto full_skipped = std::ranges::count_if(ret_vals, [](auto&& e) {
return std::ranges::all_of(e, [](auto&& c) { return c == ReturnCode::SKIPPED || c == ReturnCode::NOT_EVALUATED; }); });
auto part_skipped = std::ranges::count_if(ret_vals, [](auto&& e) {
return std::ranges::any_of(e, [](auto&& c) { return c == ReturnCode::SKIPPED; }); });
std::size_t num_tests = ret_vals.size();
std::cout << "Final Result: " << "\n"
<< correct << "/" << num_tests << " tests evaluated correctly" << "\n"
<< failed << "/" << num_tests << " tests failed" << "\n"
<< full_skipped << "/" << num_tests << " tests skipped" << "\n"
<< part_skipped << "/" << num_tests << " tests have been partially skipped" << "\n";
return -failed;
} }
private: private:

View File

@ -6,7 +6,7 @@
#include <const_list.h> #include <const_list.h>
enum ReturnCode { FAILED = -1, PASSED = 0 }; enum ReturnCode { FAILED = -1, PASSED = 0, SKIPPED = 1, NOT_EVALUATED = 2 };
struct ret_val_s { struct ret_val_s {
const char *test_name = ""; const char *test_name = "";
@ -31,6 +31,10 @@ std::ostream& operator<<(std::ostream& os, const ReturnCode &rc) {
return os << "FAILED"; return os << "FAILED";
case PASSED: case PASSED:
return os << "PASSED"; return os << "PASSED";
case SKIPPED:
return os << "SKIPPED";
case NOT_EVALUATED:
return os << "NOT EVALUATED";
default: default:
return os; return os;
} }

View File

@ -1,34 +1,32 @@
#include <const_vector.hpp> #include <const_vector.hpp>
#include "test_define_test.hpp" #include "test_define_test.hpp"
#include "test_util.hpp"
#include "test_ret_val.h" #include "test_ret_val.h"
constexpr test_suite tests = define_tests("Tests") constexpr test_suite tests = define_tests("Tests")
("Test Runtime", [](int = 1) constexpr{ ("Test Runtime", [](int = 1) constexpr{
return TEST_PASS("PASS"); return TEST_PASS();
}, EvalFlag::RUNTIME) }, EvalFlag::RUNTIME)
("Test Consteval 1", [](char = 2) constexpr { ("Test Consteval 1", [](char = 2) constexpr {
return TEST_FAIL("ups");
if (std::is_constant_evaluated()) { if (std::is_constant_evaluated()) {
return TEST_PASS("PASS"); return TEST_PASS();
} else { } else {
return TEST_FAIL("FAIL"); return TEST_FAIL("FAIL");
} }
}, EvalFlag::CONSTEVAL) }, EvalFlag::CONSTEVAL)
("Test Consteval", [](char = 2) constexpr { ("Test Consteval", [](char = 2) constexpr {
if (std::is_constant_evaluated()) { if (std::is_constant_evaluated()) {
return TEST_PASS( "PASS"); return TEST_SKIP();
} else { } else {
return TEST_FAIL("FAIL"); return TEST_FAIL("FAIL");
} }
}, EvalFlag::CONSTEVAL) }, EvalFlag::CONSTEVAL)
("Test Runtime Consteval", [](short = 3) constexpr{ ("Test Runtime Consteval", [](short = 3) constexpr{
if (std::is_constant_evaluated()) { if (std::is_constant_evaluated()) {
return TEST_PASS("PASS Consteval"); return TEST_SKIP();
} else { } else {
return TEST_PASS("PASS Runtime"); return TEST_PASS();
} }
}, EvalFlag::RUNTIME_CONSTEVAL); }, EvalFlag::RUNTIME_CONSTEVAL);