aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorStefan Boberg <[email protected]>2026-02-27 19:36:22 +0100
committerGitHub Enterprise <[email protected]>2026-02-27 19:36:22 +0100
commit1ed3139e577f6c8aa6d07f7e76afa3a80d9d4852 (patch)
tree8a44a56f18643db156f8b925be3d8edc25909dcc
parentadd full WebSocket (RFC 6455) client/server support for zenhttp (#792) (diff)
downloadzen-1ed3139e577f6c8aa6d07f7e76afa3a80d9d4852.tar.xz
zen-1ed3139e577f6c8aa6d07f7e76afa3a80d9d4852.zip
Add test summary table and failure reporting to xmake test (#794)
- Add a summary table printed after all test suites complete, showing per-suite test case counts, assertion counts, timings and pass/fail status. - Add failure reporting: individual failing test cases are listed at the end with their file path and line number for easy navigation. - Made zenserver instances spawned by a hub not create new console windows for a better background testing experience - The TestListener in testing.cpp now writes a machine-readable summary file (via `ZEN_TEST_SUMMARY_FILE` env var) containing aggregate counts and per-test-case failure details. This runs as a doctest listener alongside any active reporter, so it works with both console and JUnit modes. - Tests now run in a deterministic order defined by a single ordered list that also serves as the test name/target mapping, replacing the previous unordered table + separate order list. - The `--run` option now accepts comma-separated values (e.g. `--run=core,http,util`) and validates each name, reporting unknown test names early. - Fix platform detection in `xmake test`: the config command now passes `-p` explicitly, fixing "mingw" misdetection when running from Git Bash on Windows. - Add missing "util" entry to the help text for `--run`.
-rw-r--r--src/zencore/testing.cpp56
-rw-r--r--src/zenutil/include/zenutil/zenserverprocess.h1
-rw-r--r--src/zenutil/zenserverprocess.cpp3
-rw-r--r--xmake.lua304
4 files changed, 310 insertions, 54 deletions
diff --git a/src/zencore/testing.cpp b/src/zencore/testing.cpp
index 936424e0f..ef8fb0480 100644
--- a/src/zencore/testing.cpp
+++ b/src/zencore/testing.cpp
@@ -5,6 +5,12 @@
#if ZEN_WITH_TESTS
+# include <chrono>
+# include <cstdlib>
+# include <cstdio>
+# include <string>
+# include <vector>
+
# include <doctest/doctest.h>
namespace zen::testing {
@@ -21,9 +27,35 @@ struct TestListener : public doctest::IReporter
void report_query(const doctest::QueryData& /*in*/) override {}
- void test_run_start() override {}
+ void test_run_start() override { RunStart = std::chrono::steady_clock::now(); }
+
+ void test_run_end(const doctest::TestRunStats& in) override
+ {
+ auto elapsed = std::chrono::steady_clock::now() - RunStart;
+ double elapsedSeconds = std::chrono::duration_cast<std::chrono::milliseconds>(elapsed).count() / 1000.0;
- void test_run_end(const doctest::TestRunStats& /*in*/) override {}
+ // Write machine-readable summary to file if requested (used by xmake test summary table)
+ const char* summaryFile = std::getenv("ZEN_TEST_SUMMARY_FILE");
+ if (summaryFile && summaryFile[0] != '\0')
+ {
+ if (FILE* f = std::fopen(summaryFile, "w"))
+ {
+ std::fprintf(f,
+ "cases_total=%u\ncases_passed=%u\nassertions_total=%d\nassertions_passed=%d\n"
+ "elapsed_seconds=%.3f\n",
+ in.numTestCasesPassingFilters,
+ in.numTestCasesPassingFilters - in.numTestCasesFailed,
+ in.numAsserts,
+ in.numAsserts - in.numAssertsFailed,
+ elapsedSeconds);
+ for (const auto& failure : FailedTests)
+ {
+ std::fprintf(f, "failed=%s|%s|%u\n", failure.Name.c_str(), failure.File.c_str(), failure.Line);
+ }
+ std::fclose(f);
+ }
+ }
+ }
void test_case_start(const doctest::TestCaseData& in) override
{
@@ -37,7 +69,14 @@ struct TestListener : public doctest::IReporter
ZEN_CONSOLE("{}-------------------------------------------------------------------------------{}", ColorYellow, ColorNone);
}
- void test_case_end(const doctest::CurrentTestCaseStats& /*in*/) override { Current = nullptr; }
+ void test_case_end(const doctest::CurrentTestCaseStats& in) override
+ {
+ if (!in.testCaseSuccess && Current)
+ {
+ FailedTests.push_back({Current->m_name, Current->m_file.c_str(), Current->m_line});
+ }
+ Current = nullptr;
+ }
void test_case_exception(const doctest::TestCaseException& /*in*/) override {}
@@ -57,7 +96,16 @@ struct TestListener : public doctest::IReporter
void test_case_skipped(const doctest::TestCaseData& /*in*/) override {}
- const doctest::TestCaseData* Current = nullptr;
+ const doctest::TestCaseData* Current = nullptr;
+ std::chrono::steady_clock::time_point RunStart = {};
+
+ struct FailedTestInfo
+ {
+ std::string Name;
+ std::string File;
+ unsigned Line;
+ };
+ std::vector<FailedTestInfo> FailedTests;
};
struct TestRunner::Impl
diff --git a/src/zenutil/include/zenutil/zenserverprocess.h b/src/zenutil/include/zenutil/zenserverprocess.h
index d0402640b..b781a03a9 100644
--- a/src/zenutil/include/zenutil/zenserverprocess.h
+++ b/src/zenutil/include/zenutil/zenserverprocess.h
@@ -42,6 +42,7 @@ public:
std::filesystem::path GetTestRootDir(std::string_view Path);
inline bool IsInitialized() const { return m_IsInitialized; }
inline bool IsTestEnvironment() const { return m_IsTestInstance; }
+ inline bool IsHubEnvironment() const { return m_IsHubInstance; }
inline std::string_view GetServerClass() const { return m_ServerClass; }
inline uint16_t GetNewPortNumber() { return m_NextPortNumber.fetch_add(1); }
diff --git a/src/zenutil/zenserverprocess.cpp b/src/zenutil/zenserverprocess.cpp
index ef2a4fda5..579ba450a 100644
--- a/src/zenutil/zenserverprocess.cpp
+++ b/src/zenutil/zenserverprocess.cpp
@@ -934,7 +934,8 @@ ZenServerInstance::SpawnServer(int BasePort, std::string_view AdditionalServerAr
CommandLine << " " << AdditionalServerArgs;
}
- SpawnServerInternal(ChildId, CommandLine, !IsTest, WaitTimeoutMs);
+ const bool OpenConsole = !IsTest && !m_Env.IsHubEnvironment();
+ SpawnServerInternal(ChildId, CommandLine, OpenConsole, WaitTimeoutMs);
}
void
diff --git a/xmake.lua b/xmake.lua
index d49743cb2..d7a905981 100644
--- a/xmake.lua
+++ b/xmake.lua
@@ -344,10 +344,10 @@ task("sln")
task("test")
set_menu {
- usage = "xmake test --run=[core|store|http|server|integration|all]",
+ usage = "xmake test --run=[core|store|http|server|integration|util|remotestore|all] (comma-separated)",
description = "Run Zen tests",
options = {
- {'r', "run", "kv", "all", "Run test(s)", " - all", " - core ", " - remotestore", " - store", " - http", " - server", " - integration"},
+ {'r', "run", "kv", "all", "Run test(s) - comma-separated", " - all", " - core", " - http", " - util", " - store", " - remotestore", " - server", " - integration"},
{'j', "junit", "k", nil, "Enable junit report output"}
}
}
@@ -359,39 +359,61 @@ task("test")
config.load()
local testname = option.get("run")
+
+ -- Ordered list of available tests (order defines execution order)
local available_tests = {
- core = "zencore-test",
- http = "zenhttp-test",
- util = "zenutil-test",
- store = "zenstore-test",
- remotestore = "zenremotestore-test",
- server = "zenserver",
- integration = "zenserver-test"
+ {"core", "zencore-test"},
+ {"http", "zenhttp-test"},
+ {"util", "zenutil-test"},
+ {"store", "zenstore-test"},
+ {"remotestore", "zenremotestore-test"},
+ {"server", "zenserver"},
+ {"integration", "zenserver-test"},
}
- local arch
+ local plat, arch
if is_host("windows") then
+ plat = "windows"
arch = "x64"
- elseif is_arch("arm64") then
- arch = "arm64"
+ elseif is_host("macosx") then
+ plat = "macosx"
+ arch = is_arch("arm64") and "arm64" or "x86_64"
else
+ plat = "linux"
arch = "x86_64"
end
- print(os.exec("xmake config -c -m debug -a "..arch))
+ print(os.exec("xmake config -c -m debug -p "..plat.." -a "..arch))
print(os.exec("xmake"))
+ -- Parse comma-separated test names into a set
+ local requested = {}
+ for token in testname:gmatch("[^,]+") do
+ requested[token:match("^%s*(.-)%s*$")] = true
+ end
+
+ -- Filter to requested test(s)
local tests = {}
- local found_match = false
+ local matched = {}
- for name, test in pairs(available_tests) do
- if name == testname or testname == "all" then
- tests[name] = test
- found_match = true
+ for _, entry in ipairs(available_tests) do
+ local name, target = entry[1], entry[2]
+ if requested["all"] or requested[name] then
+ table.insert(tests, {name = name, target = target})
+ matched[name] = true
+ end
+ end
+
+ -- Check for unknown test names
+ if not requested["all"] then
+ for name, _ in pairs(requested) do
+ if not matched[name] then
+ raise("no tests match specification: '%s'", name)
+ end
end
end
- if not found_match then
+ if #tests == 0 then
raise("no tests match specification: '%s'", testname)
end
@@ -404,39 +426,223 @@ task("test")
os.mkdir(junit_report_dir)
end
- try
- {
- function()
- for name, test in pairs(tests) do
- printf("=== %s ===\n", test)
- local cmd = string.format("xmake run %s", test)
- if name == "server" then
- cmd = string.format("xmake run %s test", test)
- end
- cmd = string.format("%s --duration=true", cmd)
- if use_junit_reporting then
- local target = project.target(test)
- local junit_report_file = path.join(junit_report_dir, string.format("junit-%s-%s-%s.xml", config.plat(), arch, test))
- junit_report_files[test] = junit_report_file
- cmd = string.format("%s --reporters=junit --out=%s", cmd, junit_report_file)
- end
+ -- Results collection for summary table
+ local results = {}
+ local any_failed = false
+
+ -- Format a number with thousands separators (e.g. 31103 -> "31,103")
+ local function format_number(n)
+ local s = tostring(n)
+ local pos = #s % 3
+ if pos == 0 then pos = 3 end
+ local result = s:sub(1, pos)
+ for i = pos + 1, #s, 3 do
+ result = result .. "," .. s:sub(i, i + 2)
+ end
+ return result
+ end
- os.exec(cmd)
+ -- Center a string within a given width
+ local function center_str(s, width)
+ local pad = width - #s
+ local lpad = math.floor(pad / 2)
+ local rpad = pad - lpad
+ return string.rep(" ", lpad) .. s .. string.rep(" ", rpad)
+ end
+
+ -- Left-align a string within a given width (with 1-space left margin)
+ local function left_pad_str(s, width)
+ return " " .. s .. string.rep(" ", width - #s - 1)
+ end
+
+ -- Format elapsed seconds as a human-readable string
+ local function format_time(seconds)
+ if seconds >= 60 then
+ local mins = math.floor(seconds / 60)
+ local secs = seconds - mins * 60
+ return string.format("%dm %04.1fs", mins, secs)
+ else
+ return string.format("%.1fs", seconds)
+ end
+ end
+
+ -- Parse test summary file written by TestListener
+ local function parse_summary_file(filepath)
+ if not os.isfile(filepath) then return nil end
+ local content = io.readfile(filepath)
+ if not content then return nil end
+ local ct = content:match("cases_total=(%d+)")
+ local cp = content:match("cases_passed=(%d+)")
+ local at = content:match("assertions_total=(%d+)")
+ local ap = content:match("assertions_passed=(%d+)")
+ if ct then
+ local failures = {}
+ for name, file, line in content:gmatch("failed=([^|\n]+)|([^|\n]+)|(%d+)") do
+ table.insert(failures, {name = name, file = file, line = tonumber(line)})
end
- end,
-
- finally
- {
- function (ok, errors)
- for test, junit_report_file in pairs(junit_report_files) do
- printf("=== report - %s ===\n", test)
- local data = io.readfile(junit_report_file)
- print(data)
+ local es = content:match("elapsed_seconds=([%d%.]+)")
+ return {
+ cases_total = tonumber(ct),
+ cases_passed = tonumber(cp) or 0,
+ asserts_total = tonumber(at) or 0,
+ asserts_passed = tonumber(ap) or 0,
+ elapsed_seconds = tonumber(es) or 0,
+ failures = failures
+ }
+ end
+ return nil
+ end
+
+ -- Temp directory for summary files
+ local summary_dir = path.join(os.tmpdir(), "zen-test-summary")
+ os.mkdir(summary_dir)
+
+ -- Run each test suite and collect results
+ for _, entry in ipairs(tests) do
+ local name, target = entry.name, entry.target
+ printf("=== %s ===\n", target)
+
+ local suite_name = target
+ if name == "server" then
+ suite_name = "zenserver (test)"
+ end
+
+ local cmd = string.format("xmake run %s", target)
+ if name == "server" then
+ cmd = string.format("xmake run %s test", target)
+ end
+ cmd = string.format("%s --duration=true", cmd)
+
+ if use_junit_reporting then
+ local junit_report_file = path.join(junit_report_dir, string.format("junit-%s-%s-%s.xml", config.plat(), arch, target))
+ junit_report_files[target] = junit_report_file
+ cmd = string.format("%s --reporters=junit --out=%s", cmd, junit_report_file)
+ end
+
+ -- Tell TestListener where to write the summary
+ local summary_file = path.join(summary_dir, target .. ".txt")
+ os.setenv("ZEN_TEST_SUMMARY_FILE", summary_file)
+
+ -- Run test with real-time streaming output
+ local test_ok = true
+ try {
+ function()
+ os.exec(cmd)
+ end,
+ catch {
+ function(errors)
+ test_ok = false
end
- if (errors) then
- raise(errors)
+ }
+ }
+
+ -- Read summary written by TestListener
+ local summary = parse_summary_file(summary_file)
+ os.tryrm(summary_file)
+
+ if not test_ok then
+ any_failed = true
+ end
+
+ table.insert(results, {
+ suite = suite_name,
+ cases_passed = summary and summary.cases_passed or 0,
+ cases_total = summary and summary.cases_total or 0,
+ asserts_passed = summary and summary.asserts_passed or 0,
+ asserts_total = summary and summary.asserts_total or 0,
+ elapsed_seconds = summary and summary.elapsed_seconds or 0,
+ failures = summary and summary.failures or {},
+ passed = test_ok
+ })
+ end
+
+ -- Clean up
+ os.setenv("ZEN_TEST_SUMMARY_FILE", "")
+ os.tryrm(summary_dir)
+
+ -- Print JUnit reports if requested
+ for test, junit_report_file in pairs(junit_report_files) do
+ printf("=== report - %s ===\n", test)
+ if os.isfile(junit_report_file) then
+ local data = io.readfile(junit_report_file)
+ if data then
+ print(data)
+ end
+ end
+ end
+
+ -- Print summary table
+ if #results > 0 then
+ -- Calculate column widths based on content
+ local col_suite = #("Suite")
+ local col_cases = #("Cases")
+ local col_asserts = #("Assertions")
+ local col_time = #("Time")
+ local col_status = #("Status")
+
+ for _, r in ipairs(results) do
+ col_suite = math.max(col_suite, #r.suite)
+ local cases_str = format_number(r.cases_passed) .. "/" .. format_number(r.cases_total)
+ col_cases = math.max(col_cases, #cases_str)
+ local asserts_str = format_number(r.asserts_passed) .. "/" .. format_number(r.asserts_total)
+ col_asserts = math.max(col_asserts, #asserts_str)
+ col_time = math.max(col_time, #format_time(r.elapsed_seconds))
+ local status_str = r.passed and "SUCCESS" or "FAILED"
+ col_status = math.max(col_status, #status_str)
+ end
+
+ -- Add padding (1 space each side)
+ col_suite = col_suite + 2
+ col_cases = col_cases + 2
+ col_asserts = col_asserts + 2
+ col_time = col_time + 2
+ col_status = col_status + 2
+
+ -- Build horizontal border segments
+ local h_suite = string.rep("-", col_suite)
+ local h_cases = string.rep("-", col_cases)
+ local h_asserts = string.rep("-", col_asserts)
+ local h_time = string.rep("-", col_time)
+ local h_status = string.rep("-", col_status)
+
+ local top = "+" .. h_suite .. "+" .. h_cases .. "+" .. h_asserts .. "+" .. h_time .. "+" .. h_status .. "+"
+ local mid = "+" .. h_suite .. "+" .. h_cases .. "+" .. h_asserts .. "+" .. h_time .. "+" .. h_status .. "+"
+ local bottom = "+" .. h_suite .. "+" .. h_cases .. "+" .. h_asserts .. "+" .. h_time .. "+" .. h_status .. "+"
+ local vbar = "|"
+
+ local header_msg = any_failed and "Some tests failed:" or "All tests passed:"
+ printf("\n* %s\n", header_msg)
+ printf(" %s\n", top)
+ printf(" %s%s%s%s%s%s%s%s%s%s%s\n", vbar, center_str("Suite", col_suite), vbar, center_str("Cases", col_cases), vbar, center_str("Assertions", col_asserts), vbar, center_str("Time", col_time), vbar, center_str("Status", col_status), vbar)
+
+ for _, r in ipairs(results) do
+ printf(" %s\n", mid)
+ local cases_str = format_number(r.cases_passed) .. "/" .. format_number(r.cases_total)
+ local asserts_str = format_number(r.asserts_passed) .. "/" .. format_number(r.asserts_total)
+ local time_str = format_time(r.elapsed_seconds)
+ local status_str = r.passed and "SUCCESS" or "FAILED"
+ printf(" %s%s%s%s%s%s%s%s%s%s%s\n", vbar, left_pad_str(r.suite, col_suite), vbar, left_pad_str(cases_str, col_cases), vbar, left_pad_str(asserts_str, col_asserts), vbar, left_pad_str(time_str, col_time), vbar, left_pad_str(status_str, col_status), vbar)
+ end
+ printf(" %s\n", bottom)
+ end
+
+ -- Print list of individual failing tests
+ if any_failed then
+ printf("\n Failures:\n")
+ for _, r in ipairs(results) do
+ if #r.failures > 0 then
+ printf(" -- %s --\n", r.suite)
+ for _, f in ipairs(r.failures) do
+ printf(" FAILED: %s (%s:%d)\n", f.name, f.file, f.line)
end
+ elseif not r.passed then
+ printf(" -- %s --\n", r.suite)
+ printf(" (test binary exited with error, no failure details available)\n")
end
- }
- }
+ end
+ end
+
+ if any_failed then
+ raise("one or more test suites failed")
+ end
end)