Instead of tracking passed = 0/1 rename the field to exit_code and invert the values so that they match the KSFT_* exit codes. This will allow us to fold SKIP / XFAIL into the same value.
Signed-off-by: Jakub Kicinski kuba@kernel.org --- tools/testing/selftests/kselftest_harness.h | 52 ++++++++++++--------- tools/testing/selftests/net/tls.c | 2 +- 2 files changed, 30 insertions(+), 24 deletions(-)
diff --git a/tools/testing/selftests/kselftest_harness.h b/tools/testing/selftests/kselftest_harness.h index b271cb721b81..70366864ffd9 100644 --- a/tools/testing/selftests/kselftest_harness.h +++ b/tools/testing/selftests/kselftest_harness.h @@ -136,7 +136,7 @@ fprintf(TH_LOG_STREAM, "# SKIP %s\n", \ _metadata->results->reason); \ } \ - _metadata->passed = 1; \ + _metadata->exit_code = KSFT_PASS; \ _metadata->skip = 1; \ _metadata->trigger = 0; \ statement; \ @@ -163,7 +163,7 @@ fprintf(TH_LOG_STREAM, "# XFAIL %s\n", \ _metadata->results->reason); \ } \ - _metadata->passed = 1; \ + _metadata->exit_code = KSFT_PASS; \ _metadata->xfail = 1; \ _metadata->trigger = 0; \ statement; \ @@ -416,7 +416,7 @@ if (setjmp(_metadata->env) == 0) { \ fixture_name##_setup(_metadata, &self, variant->data); \ /* Let setup failure terminate early. */ \ - if (!_metadata->passed || _metadata->skip) \ + if (!__test_passed(_metadata) || _metadata->skip) \ return; \ _metadata->setup_completed = true; \ fixture_name##_##test_name(_metadata, &self, variant->data); \ @@ -723,7 +723,7 @@ __bail(_assert, _metadata))
#define __INC_STEP(_metadata) \ - if (_metadata->passed) \ + if (__test_passed(_metadata)) \ _metadata->results->step++;
#define is_signed_type(var) (!!(((__typeof__(var))(-1)) < (__typeof__(var))1)) @@ -769,7 +769,7 @@ break; \ } \ } \ - _metadata->passed = 0; \ + _metadata->exit_code = KSFT_FAIL; \ /* Ensure the optional handler is triggered */ \ _metadata->trigger = 1; \ } \ @@ -781,7 +781,7 @@ if (_assert) __INC_STEP(_metadata); \ if (!(strcmp(__exp, __seen) _t 0)) { \ __TH_LOG("Expected '%s' %s '%s'.", __exp, #_t, __seen); \ - _metadata->passed = 0; \ + _metadata->exit_code = KSFT_FAIL; \ _metadata->trigger = 1; \ } \ } while (0); OPTIONAL_HANDLER(_assert) @@ -860,7 +860,7 @@ struct __test_metadata { pid_t pid; /* pid of test when being run */ struct __fixture_metadata *fixture; int termsig; - int passed; + int exit_code; int skip; /* did SKIP get used? */ int xfail; /* did XFAIL get used? */ int trigger; /* extra handler after the evaluation */ @@ -874,6 +874,12 @@ struct __test_metadata { struct __test_metadata *prev, *next; };
+static inline bool __test_passed(struct __test_metadata *metadata) +{ + return metadata->exit_code != KSFT_FAIL && + metadata->exit_code <= KSFT_SKIP; +} + /* * Since constructors are called in reverse order, reverse the test * list so tests are run in source declaration order. @@ -941,7 +947,7 @@ void __wait_for_test(struct __test_metadata *t) int status;
if (sigaction(SIGALRM, &action, &saved_action)) { - t->passed = 0; + t->exit_code = KSFT_FAIL; fprintf(TH_LOG_STREAM, "# %s: unable to install SIGALRM handler\n", t->name); @@ -953,7 +959,7 @@ void __wait_for_test(struct __test_metadata *t) waitpid(t->pid, &status, 0); alarm(0); if (sigaction(SIGALRM, &saved_action, NULL)) { - t->passed = 0; + t->exit_code = KSFT_FAIL; fprintf(TH_LOG_STREAM, "# %s: unable to uninstall SIGALRM handler\n", t->name); @@ -962,19 +968,19 @@ void __wait_for_test(struct __test_metadata *t) __active_test = NULL;
if (t->timed_out) { - t->passed = 0; + t->exit_code = KSFT_FAIL; fprintf(TH_LOG_STREAM, "# %s: Test terminated by timeout\n", t->name); } else if (WIFEXITED(status)) { if (WEXITSTATUS(status) == KSFT_SKIP) { /* SKIP */ - t->passed = 1; + t->exit_code = KSFT_PASS; t->skip = 1; } else if (WEXITSTATUS(status) == KSFT_XFAIL) { - t->passed = 1; + t->exit_code = KSFT_PASS; t->xfail = 1; } else if (t->termsig != -1) { - t->passed = 0; + t->exit_code = KSFT_FAIL; fprintf(TH_LOG_STREAM, "# %s: Test exited normally instead of by signal (code: %d)\n", t->name, @@ -983,11 +989,11 @@ void __wait_for_test(struct __test_metadata *t) switch (WEXITSTATUS(status)) { /* Success */ case KSFT_PASS: - t->passed = 1; + t->exit_code = KSFT_PASS; break; /* Other failure, assume step report. */ default: - t->passed = 0; + t->exit_code = KSFT_FAIL; fprintf(TH_LOG_STREAM, "# %s: Test failed at step #%d\n", t->name, @@ -995,13 +1001,13 @@ void __wait_for_test(struct __test_metadata *t) } } } else if (WIFSIGNALED(status)) { - t->passed = 0; + t->exit_code = KSFT_FAIL; if (WTERMSIG(status) == SIGABRT) { fprintf(TH_LOG_STREAM, "# %s: Test terminated by assertion\n", t->name); } else if (WTERMSIG(status) == t->termsig) { - t->passed = 1; + t->exit_code = KSFT_PASS; } else { fprintf(TH_LOG_STREAM, "# %s: Test terminated unexpectedly by signal %d\n", @@ -1144,7 +1150,7 @@ void __run_test(struct __fixture_metadata *f, char test_name[LINE_MAX];
/* reset test struct */ - t->passed = 1; + t->exit_code = KSFT_PASS; t->skip = 0; t->xfail = 0; t->trigger = 0; @@ -1164,7 +1170,7 @@ void __run_test(struct __fixture_metadata *f, t->pid = fork(); if (t->pid < 0) { ksft_print_msg("ERROR SPAWNING TEST CHILD\n"); - t->passed = 0; + t->exit_code = KSFT_FAIL; } else if (t->pid == 0) { setpgrp(); t->fn(t, variant); @@ -1172,7 +1178,7 @@ void __run_test(struct __fixture_metadata *f, _exit(KSFT_SKIP); if (t->xfail) _exit(KSFT_XFAIL); - if (t->passed) + if (__test_passed(t)) _exit(KSFT_PASS); /* Something else happened. */ _exit(KSFT_FAIL); @@ -1180,7 +1186,7 @@ void __run_test(struct __fixture_metadata *f, __wait_for_test(t); } ksft_print_msg(" %4s %s\n", - t->passed ? "OK" : "FAIL", test_name); + __test_passed(t) ? "OK" : "FAIL", test_name);
if (t->skip) ksft_test_result_skip("%s\n", t->results->reason[0] ? @@ -1189,7 +1195,7 @@ void __run_test(struct __fixture_metadata *f, ksft_test_result_xfail("%s\n", t->results->reason[0] ? t->results->reason : "unknown"); else - ksft_test_result(t->passed, "%s\n", test_name); + ksft_test_result(__test_passed(t), "%s\n", test_name); }
static int test_harness_run(int argc, char **argv) @@ -1237,7 +1243,7 @@ static int test_harness_run(int argc, char **argv) t->results = results; __run_test(f, v, t); t->results = NULL; - if (t->passed) + if (__test_passed(t)) pass_count++; else ret = 1; diff --git a/tools/testing/selftests/net/tls.c b/tools/testing/selftests/net/tls.c index 49c84602707f..046d1ccedcf3 100644 --- a/tools/testing/selftests/net/tls.c +++ b/tools/testing/selftests/net/tls.c @@ -1882,7 +1882,7 @@ TEST_F(tls_err, poll_partial_rec_async) pfd.events = POLLIN; EXPECT_EQ(poll(&pfd, 1, 20), 1);
- exit(!_metadata->passed); + exit(!__test_passed(_metadata)); } }