aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorStefan Boberg <[email protected]>2026-02-28 15:36:13 +0100
committerGitHub Enterprise <[email protected]>2026-02-28 15:36:13 +0100
commitc7e0efb9c12f4607d4bc6a844a3e5bd3272bd839 (patch)
tree47c7edc37dcd54f06be04b40f96e6edc24d7f4ab
parentadd multirange requests to blob store (#795) (diff)
downloadzen-c7e0efb9c12f4607d4bc6a844a3e5bd3272bd839.tar.xz
zen-c7e0efb9c12f4607d4bc6a844a3e5bd3272bd839.zip
test running / reporting improvements (#797)
**CI/CD improvements (validate.yml):** - Add test reporter (`ue-foundation/test-reporter@v2`) for all three platforms, rendering JUnit test results directly in PR check runs - Add "Trust workspace" step on Windows to fix git safe.directory ownership issue with self-hosted runners - Clean stale report files before each test run to prevent false failures from leftover XML - Broaden `paths-ignore` to skip builds for non-code changes (`*.md`, `LICENSE`, `.gitignore`, `docs/**`) **Test improvements:** - Convert `CHECK` to `REQUIRE` in several test suites (projectstore, integration, http) for fail-fast behavior - Mark some tests with `doctest::skip()` for selective execution - Skip httpclient transport tests pending investigation - Add `--noskip` option to `xmake test` task - Add `--repeat=<N>` option to `xmake test` task, to run tests repeatedly N times or until there is a failure **xmake test output improvements:** - Add totals row to test summary table - Right-justify numeric columns in summary table
-rw-r--r--.github/workflows/create_release.yml8
-rw-r--r--.github/workflows/validate.yml85
-rw-r--r--src/zenhttp/httpclient_test.cpp4
-rw-r--r--src/zenserver-test/buildstore-tests.cpp16
-rw-r--r--src/zenserver-test/cache-tests.cpp10
-rw-r--r--src/zenserver-test/hub-tests.cpp2
-rw-r--r--src/zenserver-test/projectstore-tests.cpp34
-rw-r--r--src/zenserver-test/workspace-tests.cpp4
-rw-r--r--src/zenstore/cache/structuredcachestore.cpp2
-rw-r--r--xmake.lua162
10 files changed, 211 insertions, 116 deletions
diff --git a/.github/workflows/create_release.yml b/.github/workflows/create_release.yml
index f095f3d21..203588d24 100644
--- a/.github/workflows/create_release.yml
+++ b/.github/workflows/create_release.yml
@@ -15,7 +15,7 @@ jobs:
timeout-minutes: 25
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
- name: Setup xmake
uses: ue-foundation/[email protected]
@@ -46,7 +46,7 @@ jobs:
timeout-minutes: 25
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
- name: Install UE Toolchain
run: |
@@ -89,7 +89,7 @@ jobs:
timeout-minutes: 25
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
- name: Setup xmake
uses: ue-foundation/[email protected]
@@ -133,7 +133,7 @@ jobs:
needs: [bundle-linux, bundle-macos, bundle-windows]
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
- name: Read VERSION.txt
id: read_version
diff --git a/.github/workflows/validate.yml b/.github/workflows/validate.yml
index 17e031ba0..bf7b1c853 100644
--- a/.github/workflows/validate.yml
+++ b/.github/workflows/validate.yml
@@ -4,6 +4,10 @@ env:
WINDOWS_SDK_VERSION: 22621
XMAKE_GLOBALDIR: ${{ github.workspace }}/../.xmake_shared
+concurrency:
+ group: ${{ github.workflow }}-${{ github.ref }}
+ cancel-in-progress: ${{ github.ref_name != 'main' }}
+
on:
pull_request:
types:
@@ -13,8 +17,11 @@ on:
branches:
- 'main'
paths-ignore:
+ - '*.md'
+ - 'LICENSE'
+ - '.gitignore'
+ - 'docs/**'
- 'VERSION.txt'
- - 'CHANGELOG.md'
- '.github/workflows/create_release.yml'
- '.github/workflows/mirror_releases.yml'
- '.github/workflows/mirror.yml'
@@ -22,31 +29,22 @@ on:
branches:
- 'main'
paths-ignore:
+ - '*.md'
+ - 'LICENSE'
+ - '.gitignore'
+ - 'docs/**'
- 'VERSION.txt'
- - 'CHANGELOG.md'
- '.github/workflows/create_release.yml'
- '.github/workflows/mirror_releases.yml'
- '.github/workflows/mirror.yml'
jobs:
- cancel-old-build:
- name: Cancel previous builds
- runs-on: [linux, x64, zen]
-
- steps:
- - name: Cancel Previous Runs
- if: ${{ github.ref_name != 'main'}}
- uses: ue-foundation/[email protected]
- with:
- access_token: ${{ github.token }}
-
clang-format:
- needs: cancel-old-build
name: Check clang-format
runs-on: [linux, x64, zen]
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
- name: clang-format
uses: ue-foundation/clang-format-action@epic-batching-v1
@@ -55,7 +53,6 @@ jobs:
check-path: 'src'
windows-build:
- needs: cancel-old-build
name: Build & Test Windows
runs-on: [windows, x64, zen]
timeout-minutes: 25
@@ -69,7 +66,10 @@ jobs:
steps:
- name: Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
+
+ - name: Trust workspace
+ run: git config --global --add safe.directory "${{ github.workspace }}"
- name: Setup xmake
uses: ue-foundation/[email protected]
@@ -80,6 +80,10 @@ jobs:
run: |
xmake config -vD -y -m ${{ matrix.config }} --arch=${{ matrix.arch }} --zensentry=yes
+ - name: Clean reports
+ if: ${{ matrix.config == 'debug' }}
+ run: if (Test-Path build/reports) { Remove-Item -Recurse -Force build/reports }
+
- name: Build & Test
if: ${{ matrix.config == 'debug' }}
run: |
@@ -92,9 +96,18 @@ jobs:
name: reports-win64
path: build/reports/*.xml
+ - name: Test Report
+ if: ${{ (failure() || success()) && (matrix.config == 'debug') }}
+ uses: ue-foundation/test-reporter@v2
+ with:
+ name: Test Results (win64)
+ path: build/reports/*.xml
+ reporter: java-junit
+ list-tests: 'failed'
+
- name: Bundle
if: ${{ matrix.config == 'release' }}
- run: |
+ run: |
xmake bundle -v -y --codesignidentity="Epic Games"
- name: Upload zenserver-win64
@@ -105,7 +118,6 @@ jobs:
path: build/zenserver-win64.zip
linux-build:
- needs: cancel-old-build
name: Build & Test Linux
runs-on: [linux, x64, zen]
timeout-minutes: 25
@@ -119,7 +131,7 @@ jobs:
steps:
- name: Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Install UE Toolchain
run: |
@@ -135,6 +147,11 @@ jobs:
run: |
./scripts/ue_build_linux/ue_build.sh ./.tmp-ue-toolchain xmake config -v -y -m ${{ matrix.config }} --arch=${{ matrix.arch }} --zensentry=yes
+ - name: Clean reports
+ if: ${{ matrix.config == 'debug' }}
+ shell: bash
+ run: rm -rf build/reports
+
- name: Build & Test
if: ${{ matrix.config == 'debug' }}
run: |
@@ -147,6 +164,15 @@ jobs:
name: reports-linux
path: build/reports/*.xml
+ - name: Test Report
+ if: ${{ (failure() || success()) && (matrix.config == 'debug') }}
+ uses: ue-foundation/test-reporter@v2
+ with:
+ name: Test Results (linux)
+ path: build/reports/*.xml
+ reporter: java-junit
+ list-tests: 'failed'
+
- name: Bundle
if: ${{ matrix.config == 'release' }}
run: |
@@ -160,7 +186,6 @@ jobs:
path: build/zenserver-linux.zip
macos-build:
- needs: cancel-old-build
name: Build & Test MacOS
runs-on: [macos, x64, zen]
timeout-minutes: 25
@@ -174,7 +199,7 @@ jobs:
steps:
- name: Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Setup xmake
uses: ue-foundation/[email protected]
@@ -185,6 +210,11 @@ jobs:
run: |
xmake config -v -y -m ${{ matrix.config }} --arch=${{ matrix.arch }} --zensentry=yes
+ - name: Clean reports
+ if: ${{ matrix.config == 'debug' }}
+ shell: bash
+ run: rm -rf build/reports
+
- name: Build & Test
if: ${{ matrix.config == 'debug' }}
run: |
@@ -197,9 +227,18 @@ jobs:
name: reports-macos
path: build/reports/*.xml
+ - name: Test Report
+ if: ${{ (failure() || success()) && (matrix.config == 'debug') }}
+ uses: ue-foundation/test-reporter@v2
+ with:
+ name: Test Results (macos)
+ path: build/reports/*.xml
+ reporter: java-junit
+ list-tests: 'failed'
+
- name: Bundle
if: ${{ matrix.config == 'release' }}
- run: |
+ run: |
xmake bundle -v -y --codesignidentity="Developer ID Application"
- name: Upload zenserver-macos
diff --git a/src/zenhttp/httpclient_test.cpp b/src/zenhttp/httpclient_test.cpp
index 509b56371..91b1a3414 100644
--- a/src/zenhttp/httpclient_test.cpp
+++ b/src/zenhttp/httpclient_test.cpp
@@ -1079,7 +1079,7 @@ struct FaultTcpServer
}
};
-TEST_CASE("httpclient.transport-faults")
+TEST_CASE("httpclient.transport-faults" * doctest::skip())
{
SUBCASE("connection reset before response")
{
@@ -1217,7 +1217,7 @@ TEST_CASE("httpclient.transport-faults")
}
}
-TEST_CASE("httpclient.transport-faults-post")
+TEST_CASE("httpclient.transport-faults-post" * doctest::skip())
{
constexpr size_t kPostBodySize = 256 * 1024;
diff --git a/src/zenserver-test/buildstore-tests.cpp b/src/zenserver-test/buildstore-tests.cpp
index ef48b2362..7cd31db06 100644
--- a/src/zenserver-test/buildstore-tests.cpp
+++ b/src/zenserver-test/buildstore-tests.cpp
@@ -389,7 +389,7 @@ TEST_CASE("buildstore.metadata")
HttpClient::Response Result = Client.Post(fmt::format("{}/{}/{}/blobs/getBlobMetadata", Namespace, Bucket, BuildId),
Payload,
HttpClient::Accept(ZenContentType::kCbObject));
- CHECK(Result);
+ REQUIRE(Result);
std::vector<CbObject> ResultMetadatas;
@@ -570,7 +570,7 @@ TEST_CASE("buildstore.cache")
{
std::vector<BuildStorageCache::BlobExistsResult> Exists = Cache->BlobsExists(BuildId, BlobHashes);
- CHECK(Exists.size() == BlobHashes.size());
+ REQUIRE(Exists.size() == BlobHashes.size());
for (size_t I = 0; I < BlobCount; I++)
{
CHECK(Exists[I].HasBody);
@@ -609,7 +609,7 @@ TEST_CASE("buildstore.cache")
{
std::vector<BuildStorageCache::BlobExistsResult> Exists = Cache->BlobsExists(BuildId, BlobHashes);
- CHECK(Exists.size() == BlobHashes.size());
+ REQUIRE(Exists.size() == BlobHashes.size());
for (size_t I = 0; I < BlobCount; I++)
{
CHECK(Exists[I].HasBody);
@@ -617,7 +617,7 @@ TEST_CASE("buildstore.cache")
}
std::vector<CbObject> FetchedMetadatas = Cache->GetBlobMetadatas(BuildId, BlobHashes);
- CHECK_EQ(BlobCount, FetchedMetadatas.size());
+ REQUIRE_EQ(BlobCount, FetchedMetadatas.size());
for (size_t I = 0; I < BlobCount; I++)
{
@@ -638,7 +638,7 @@ TEST_CASE("buildstore.cache")
{
std::vector<BuildStorageCache::BlobExistsResult> Exists = Cache->BlobsExists(BuildId, BlobHashes);
- CHECK(Exists.size() == BlobHashes.size());
+ REQUIRE(Exists.size() == BlobHashes.size());
for (size_t I = 0; I < BlobCount * 2; I++)
{
CHECK(Exists[I].HasBody);
@@ -649,7 +649,7 @@ TEST_CASE("buildstore.cache")
CHECK_EQ(BlobCount, MetaDatas.size());
std::vector<CbObject> FetchedMetadatas = Cache->GetBlobMetadatas(BuildId, BlobHashes);
- CHECK_EQ(BlobCount, FetchedMetadatas.size());
+ REQUIRE_EQ(BlobCount, FetchedMetadatas.size());
for (size_t I = 0; I < BlobCount; I++)
{
@@ -672,7 +672,7 @@ TEST_CASE("buildstore.cache")
CreateZenBuildStorageCache(Client, Stats, Namespace, Bucket, TempDir, GetTinyWorkerPool(EWorkloadType::Background)));
std::vector<BuildStorageCache::BlobExistsResult> Exists = Cache->BlobsExists(BuildId, BlobHashes);
- CHECK(Exists.size() == BlobHashes.size());
+ REQUIRE(Exists.size() == BlobHashes.size());
for (size_t I = 0; I < BlobCount * 2; I++)
{
CHECK(Exists[I].HasBody);
@@ -691,7 +691,7 @@ TEST_CASE("buildstore.cache")
CHECK_EQ(BlobCount, MetaDatas.size());
std::vector<CbObject> FetchedMetadatas = Cache->GetBlobMetadatas(BuildId, BlobHashes);
- CHECK_EQ(BlobCount, FetchedMetadatas.size());
+ REQUIRE_EQ(BlobCount, FetchedMetadatas.size());
for (size_t I = 0; I < BlobCount; I++)
{
diff --git a/src/zenserver-test/cache-tests.cpp b/src/zenserver-test/cache-tests.cpp
index 0272d3797..745a89253 100644
--- a/src/zenserver-test/cache-tests.cpp
+++ b/src/zenserver-test/cache-tests.cpp
@@ -145,7 +145,7 @@ TEST_CASE("zcache.cbpackage")
for (const zen::CbAttachment& LhsAttachment : LhsAttachments)
{
const zen::CbAttachment* RhsAttachment = Rhs.FindAttachment(LhsAttachment.GetHash());
- CHECK(RhsAttachment);
+ REQUIRE(RhsAttachment);
zen::SharedBuffer LhsBuffer = LhsAttachment.AsCompressedBinary().Decompress();
CHECK(!LhsBuffer.IsNull());
@@ -1373,14 +1373,8 @@ TEST_CASE("zcache.rpc")
}
}
-TEST_CASE("zcache.failing.upstream")
+TEST_CASE("zcache.failing.upstream" * doctest::skip())
{
- // This is an exploratory test that takes a long time to run, so lets skip it by default
- if (true)
- {
- return;
- }
-
using namespace std::literals;
using namespace utils;
diff --git a/src/zenserver-test/hub-tests.cpp b/src/zenserver-test/hub-tests.cpp
index 42a5dcae4..bd85a5020 100644
--- a/src/zenserver-test/hub-tests.cpp
+++ b/src/zenserver-test/hub-tests.cpp
@@ -232,7 +232,7 @@ TEST_CASE("hub.lifecycle.children")
TEST_SUITE_END();
-TEST_CASE("hub.consul.lifecycle")
+TEST_CASE("hub.consul.lifecycle" * doctest::skip())
{
zen::consul::ConsulProcess ConsulProc;
ConsulProc.SpawnConsulAgent();
diff --git a/src/zenserver-test/projectstore-tests.cpp b/src/zenserver-test/projectstore-tests.cpp
index 735aef159..487832405 100644
--- a/src/zenserver-test/projectstore-tests.cpp
+++ b/src/zenserver-test/projectstore-tests.cpp
@@ -71,7 +71,7 @@ TEST_CASE("project.basic")
{
auto Response = Http.Get("/prj/test"sv);
- CHECK(Response.StatusCode == HttpResponseCode::OK);
+ REQUIRE(Response.StatusCode == HttpResponseCode::OK);
CbObject ResponseObject = Response.AsObject();
@@ -92,7 +92,7 @@ TEST_CASE("project.basic")
{
auto Response = Http.Get(""sv);
- CHECK(Response.StatusCode == HttpResponseCode::OK);
+ REQUIRE(Response.StatusCode == HttpResponseCode::OK);
CbObject ResponseObject = Response.AsObject();
@@ -213,7 +213,7 @@ TEST_CASE("project.basic")
auto Response = Http.Get(ChunkGetUri);
REQUIRE(Response);
- CHECK(Response.StatusCode == HttpResponseCode::OK);
+ REQUIRE(Response.StatusCode == HttpResponseCode::OK);
IoBuffer Data = Response.ResponsePayload;
IoBuffer ReferenceData = IoBufferBuilder::MakeFromFile(RootPath / BinPath);
@@ -235,13 +235,13 @@ TEST_CASE("project.basic")
auto Response = Http.Get(ChunkGetUri, {{"Accept-Type", "application/x-ue-comp"}});
REQUIRE(Response);
- CHECK(Response.StatusCode == HttpResponseCode::OK);
+ REQUIRE(Response.StatusCode == HttpResponseCode::OK);
IoBuffer Data = Response.ResponsePayload;
IoHash RawHash;
uint64_t RawSize;
CompressedBuffer Compressed = CompressedBuffer::FromCompressed(SharedBuffer(Data), RawHash, RawSize);
- CHECK(Compressed);
+ REQUIRE(Compressed);
IoBuffer DataDecompressed = Compressed.Decompress().AsIoBuffer();
IoBuffer ReferenceData = IoBufferBuilder::MakeFromFile(RootPath / BinPath);
CHECK(RawSize == ReferenceData.GetSize());
@@ -436,13 +436,13 @@ TEST_CASE("project.remote")
HttpClient Http{UrlBase};
HttpClient::Response Response = Http.Post(fmt::format("/prj/{}", ProjectName), ProjectPayload);
- CHECK(Response);
+ REQUIRE(Response);
};
auto MakeOplog = [](std::string_view UrlBase, std::string_view ProjectName, std::string_view OplogName) {
HttpClient Http{UrlBase};
HttpClient::Response Response = Http.Post(fmt::format("/prj/{}/oplog/{}", ProjectName, OplogName), IoBuffer{});
- CHECK(Response);
+ REQUIRE(Response);
};
auto MakeOp = [](std::string_view UrlBase, std::string_view ProjectName, std::string_view OplogName, const CbPackage& OpPackage) {
@@ -453,7 +453,7 @@ TEST_CASE("project.remote")
HttpClient Http{UrlBase};
HttpClient::Response Response = Http.Post(fmt::format("/prj/{}/oplog/{}/new", ProjectName, OplogName), Body);
- CHECK(Response);
+ REQUIRE(Response);
};
MakeProject(Servers.GetInstance(0).GetBaseUri(), "proj0");
@@ -504,7 +504,7 @@ TEST_CASE("project.remote")
HttpClient::Response Response =
Http.Post(fmt::format("/prj/{}/oplog/{}/rpc", Project, Oplog), Payload, {{"Accept", "application/x-ue-cbpkg"}});
- CHECK(Response);
+ REQUIRE(Response);
CbPackage ResponsePackage = ParsePackageMessage(Response.ResponsePayload);
CHECK(ResponsePackage.GetAttachments().size() == AttachmentHashes.size());
for (auto A : ResponsePackage.GetAttachments())
@@ -519,7 +519,7 @@ TEST_CASE("project.remote")
HttpClient Http{Servers.GetInstance(ServerIndex).GetBaseUri()};
HttpClient::Response Response = Http.Get(fmt::format("/prj/{}/oplog/{}/entries", Project, Oplog));
- CHECK(Response);
+ REQUIRE(Response);
IoBuffer Payload(Response.ResponsePayload);
CbObject OplogResonse = LoadCompactBinaryObject(Payload);
@@ -541,7 +541,7 @@ TEST_CASE("project.remote")
auto HttpWaitForCompletion = [](ZenServerInstance& Server, const HttpClient::Response& Response) {
REQUIRE(Response);
const uint64_t JobId = ParseInt<uint64_t>(Response.AsText()).value_or(0);
- CHECK(JobId != 0);
+ REQUIRE(JobId != 0);
HttpClient Http{Server.GetBaseUri()};
@@ -549,10 +549,10 @@ TEST_CASE("project.remote")
{
HttpClient::Response StatusResponse =
Http.Get(fmt::format("/admin/jobs/{}", JobId), {{"Accept", ToString(ZenContentType::kCbObject)}});
- CHECK(StatusResponse);
+ REQUIRE(StatusResponse);
CbObject ResponseObject = StatusResponse.AsObject();
std::string_view Status = ResponseObject["Status"sv].AsString();
- CHECK(Status != "Aborted"sv);
+ REQUIRE(Status != "Aborted"sv);
if (Status == "Complete"sv)
{
return;
@@ -887,16 +887,16 @@ TEST_CASE("project.rpcappendop")
Project.AddString("project"sv, ""sv);
Project.AddString("projectfile"sv, ""sv);
HttpClient::Response Response = Client.Post(fmt::format("/prj/{}", ProjectName), Project.Save());
- CHECK_MESSAGE(Response.IsSuccess(), Response.ErrorMessage(""));
+ REQUIRE_MESSAGE(Response.IsSuccess(), Response.ErrorMessage(""));
};
auto MakeOplog = [](HttpClient& Client, std::string_view ProjectName, std::string_view OplogName) {
HttpClient::Response Response = Client.Post(fmt::format("/prj/{}/oplog/{}", ProjectName, OplogName));
- CHECK_MESSAGE(Response.IsSuccess(), Response.ErrorMessage(""));
+ REQUIRE_MESSAGE(Response.IsSuccess(), Response.ErrorMessage(""));
};
auto GetOplog = [](HttpClient& Client, std::string_view ProjectName, std::string_view OplogName) {
HttpClient::Response Response = Client.Get(fmt::format("/prj/{}/oplog/{}", ProjectName, OplogName));
- CHECK_MESSAGE(Response.IsSuccess(), Response.ErrorMessage(""));
+ REQUIRE_MESSAGE(Response.IsSuccess(), Response.ErrorMessage(""));
return Response.AsObject();
};
@@ -910,7 +910,7 @@ TEST_CASE("project.rpcappendop")
}
Request.EndArray(); // "ops"
HttpClient::Response Response = Client.Post(fmt::format("/prj/{}/oplog/{}/rpc", ProjectName, OplogName), Request.Save());
- CHECK_MESSAGE(Response.IsSuccess(), Response.ErrorMessage(""));
+ REQUIRE_MESSAGE(Response.IsSuccess(), Response.ErrorMessage(""));
CbObjectView ResponsePayload = Response.AsPackage().GetObject();
CbArrayView NeedArray = ResponsePayload["need"sv].AsArrayView();
diff --git a/src/zenserver-test/workspace-tests.cpp b/src/zenserver-test/workspace-tests.cpp
index 7595d790a..aedadf0c3 100644
--- a/src/zenserver-test/workspace-tests.cpp
+++ b/src/zenserver-test/workspace-tests.cpp
@@ -514,9 +514,9 @@ TEST_CASE("workspaces.share")
}
IoBuffer BatchResponse =
Client.Post(fmt::format("/ws/{}/{}/batch", WorkspaceId, ShareId), BuildChunkBatchRequest(BatchEntries)).ResponsePayload;
- CHECK(BatchResponse);
+ REQUIRE(BatchResponse);
std::vector<IoBuffer> BatchResult = ParseChunkBatchResponse(BatchResponse);
- CHECK(BatchResult.size() == Files.size());
+ REQUIRE(BatchResult.size() == Files.size());
for (const RequestChunkEntry& Request : BatchEntries)
{
IoBuffer Result = BatchResult[Request.CorrelationId];
diff --git a/src/zenstore/cache/structuredcachestore.cpp b/src/zenstore/cache/structuredcachestore.cpp
index 4e8475293..d8a5755c5 100644
--- a/src/zenstore/cache/structuredcachestore.cpp
+++ b/src/zenstore/cache/structuredcachestore.cpp
@@ -1551,7 +1551,7 @@ TEST_CASE("cachestore.size")
}
}
-TEST_CASE("cachestore.threadedinsert") // * doctest::skip(true))
+TEST_CASE("cachestore.threadedinsert" * doctest::skip())
{
// for (uint32_t i = 0; i < 100; ++i)
{
diff --git a/xmake.lua b/xmake.lua
index d7a905981..1416fbb6a 100644
--- a/xmake.lua
+++ b/xmake.lua
@@ -348,7 +348,9 @@ task("test")
description = "Run Zen tests",
options = {
{'r', "run", "kv", "all", "Run test(s) - comma-separated", " - all", " - core", " - http", " - util", " - store", " - remotestore", " - server", " - integration"},
- {'j', "junit", "k", nil, "Enable junit report output"}
+ {'j', "junit", "k", nil, "Enable junit report output"},
+ {'n', "noskip", "k", nil, "Run skipped tests (passes --no-skip to doctest)"},
+ {nil, "repeat", "kv", nil, "Repeat tests N times (stops on first failure)"}
}
}
on_run(function()
@@ -418,6 +420,8 @@ task("test")
end
local use_junit_reporting = option.get("junit")
+ local use_noskip = option.get("noskip")
+ local repeat_count = tonumber(option.get("repeat")) or 1
local junit_report_files = {}
local junit_report_dir
@@ -451,10 +455,15 @@ task("test")
end
-- Left-align a string within a given width (with 1-space left margin)
- local function left_pad_str(s, width)
+ local function left_align_str(s, width)
return " " .. s .. string.rep(" ", width - #s - 1)
end
+ -- Right-align a string within a given width (with 1-space right margin)
+ local function right_align_str(s, width)
+ return string.rep(" ", width - #s - 1) .. s .. " "
+ end
+
-- Format elapsed seconds as a human-readable string
local function format_time(seconds)
if seconds >= 60 then
@@ -498,62 +507,78 @@ task("test")
os.mkdir(summary_dir)
-- Run each test suite and collect results
- for _, entry in ipairs(tests) do
- local name, target = entry.name, entry.target
- printf("=== %s ===\n", target)
-
- local suite_name = target
- if name == "server" then
- suite_name = "zenserver (test)"
+ for iteration = 1, repeat_count do
+ if repeat_count > 1 then
+ printf("\n*** Iteration %d/%d ***\n", iteration, repeat_count)
end
- local cmd = string.format("xmake run %s", target)
- if name == "server" then
- cmd = string.format("xmake run %s test", target)
- end
- cmd = string.format("%s --duration=true", cmd)
+ for _, entry in ipairs(tests) do
+ local name, target = entry.name, entry.target
+ printf("=== %s ===\n", target)
- if use_junit_reporting then
- local junit_report_file = path.join(junit_report_dir, string.format("junit-%s-%s-%s.xml", config.plat(), arch, target))
- junit_report_files[target] = junit_report_file
- cmd = string.format("%s --reporters=junit --out=%s", cmd, junit_report_file)
- end
+ local suite_name = target
+ if name == "server" then
+ suite_name = "zenserver (test)"
+ end
- -- Tell TestListener where to write the summary
- local summary_file = path.join(summary_dir, target .. ".txt")
- os.setenv("ZEN_TEST_SUMMARY_FILE", summary_file)
-
- -- Run test with real-time streaming output
- local test_ok = true
- try {
- function()
- os.exec(cmd)
- end,
- catch {
- function(errors)
- test_ok = false
- end
+ local cmd = string.format("xmake run %s", target)
+ if name == "server" then
+ cmd = string.format("xmake run %s test", target)
+ end
+ cmd = string.format("%s --duration=true", cmd)
+
+ if use_junit_reporting then
+ local junit_report_file = path.join(junit_report_dir, string.format("junit-%s-%s-%s.xml", config.plat(), arch, target))
+ junit_report_files[target] = junit_report_file
+ cmd = string.format("%s --reporters=junit --out=%s", cmd, junit_report_file)
+ end
+ if use_noskip then
+ cmd = string.format("%s --no-skip", cmd)
+ end
+
+ -- Tell TestListener where to write the summary
+ local summary_file = path.join(summary_dir, target .. ".txt")
+ os.setenv("ZEN_TEST_SUMMARY_FILE", summary_file)
+
+ -- Run test with real-time streaming output
+ local test_ok = true
+ try {
+ function()
+ os.exec(cmd)
+ end,
+ catch {
+ function(errors)
+ test_ok = false
+ end
+ }
}
- }
- -- Read summary written by TestListener
- local summary = parse_summary_file(summary_file)
- os.tryrm(summary_file)
+ -- Read summary written by TestListener
+ local summary = parse_summary_file(summary_file)
+ os.tryrm(summary_file)
+
+ if not test_ok then
+ any_failed = true
+ end
- if not test_ok then
- any_failed = true
+ table.insert(results, {
+ suite = suite_name,
+ cases_passed = summary and summary.cases_passed or 0,
+ cases_total = summary and summary.cases_total or 0,
+ asserts_passed = summary and summary.asserts_passed or 0,
+ asserts_total = summary and summary.asserts_total or 0,
+ elapsed_seconds = summary and summary.elapsed_seconds or 0,
+ failures = summary and summary.failures or {},
+ passed = test_ok
+ })
end
- table.insert(results, {
- suite = suite_name,
- cases_passed = summary and summary.cases_passed or 0,
- cases_total = summary and summary.cases_total or 0,
- asserts_passed = summary and summary.asserts_passed or 0,
- asserts_total = summary and summary.asserts_total or 0,
- elapsed_seconds = summary and summary.elapsed_seconds or 0,
- failures = summary and summary.failures or {},
- passed = test_ok
- })
+ if any_failed then
+ if repeat_count > 1 then
+ printf("\n*** Failure detected on iteration %d, stopping ***\n", iteration)
+ end
+ break
+ end
end
-- Clean up
@@ -580,6 +605,13 @@ task("test")
local col_time = #("Time")
local col_status = #("Status")
+ -- Compute totals
+ local total_cases_passed = 0
+ local total_cases_total = 0
+ local total_asserts_passed = 0
+ local total_asserts_total = 0
+ local total_elapsed = 0
+
for _, r in ipairs(results) do
col_suite = math.max(col_suite, #r.suite)
local cases_str = format_number(r.cases_passed) .. "/" .. format_number(r.cases_total)
@@ -589,8 +621,20 @@ task("test")
col_time = math.max(col_time, #format_time(r.elapsed_seconds))
local status_str = r.passed and "SUCCESS" or "FAILED"
col_status = math.max(col_status, #status_str)
+
+ total_cases_passed = total_cases_passed + r.cases_passed
+ total_cases_total = total_cases_total + r.cases_total
+ total_asserts_passed = total_asserts_passed + r.asserts_passed
+ total_asserts_total = total_asserts_total + r.asserts_total
+ total_elapsed = total_elapsed + r.elapsed_seconds
end
+ -- Account for totals row in column widths
+ col_suite = math.max(col_suite, #("Total"))
+ col_cases = math.max(col_cases, #(format_number(total_cases_passed) .. "/" .. format_number(total_cases_total)))
+ col_asserts = math.max(col_asserts, #(format_number(total_asserts_passed) .. "/" .. format_number(total_asserts_total)))
+ col_time = math.max(col_time, #format_time(total_elapsed))
+
-- Add padding (1 space each side)
col_suite = col_suite + 2
col_cases = col_cases + 2
@@ -621,8 +665,26 @@ task("test")
local asserts_str = format_number(r.asserts_passed) .. "/" .. format_number(r.asserts_total)
local time_str = format_time(r.elapsed_seconds)
local status_str = r.passed and "SUCCESS" or "FAILED"
- printf(" %s%s%s%s%s%s%s%s%s%s%s\n", vbar, left_pad_str(r.suite, col_suite), vbar, left_pad_str(cases_str, col_cases), vbar, left_pad_str(asserts_str, col_asserts), vbar, left_pad_str(time_str, col_time), vbar, left_pad_str(status_str, col_status), vbar)
+ printf(" %s%s%s%s%s%s%s%s%s%s%s\n", vbar, left_align_str(r.suite, col_suite), vbar, right_align_str(cases_str, col_cases), vbar, right_align_str(asserts_str, col_asserts), vbar, right_align_str(time_str, col_time), vbar, right_align_str(status_str, col_status), vbar)
end
+
+ -- Totals row
+ if #results > 1 then
+ local h_suite_eq = string.rep("=", col_suite)
+ local h_cases_eq = string.rep("=", col_cases)
+ local h_asserts_eq = string.rep("=", col_asserts)
+ local h_time_eq = string.rep("=", col_time)
+ local h_status_eq = string.rep("=", col_status)
+ local totals_sep = "+" .. h_suite_eq .. "+" .. h_cases_eq .. "+" .. h_asserts_eq .. "+" .. h_time_eq .. "+" .. h_status_eq .. "+"
+ printf(" %s\n", totals_sep)
+
+ local total_cases_str = format_number(total_cases_passed) .. "/" .. format_number(total_cases_total)
+ local total_asserts_str = format_number(total_asserts_passed) .. "/" .. format_number(total_asserts_total)
+ local total_time_str = format_time(total_elapsed)
+ local total_status_str = any_failed and "FAILED" or "SUCCESS"
+ printf(" %s%s%s%s%s%s%s%s%s%s%s\n", vbar, left_align_str("Total", col_suite), vbar, right_align_str(total_cases_str, col_cases), vbar, right_align_str(total_asserts_str, col_asserts), vbar, right_align_str(total_time_str, col_time), vbar, right_align_str(total_status_str, col_status), vbar)
+ end
+
printf(" %s\n", bottom)
end