aboutsummaryrefslogtreecommitdiff
path: root/xmake.lua
diff options
context:
space:
mode:
Diffstat (limited to 'xmake.lua')
-rw-r--r--xmake.lua534
1 files changed, 460 insertions, 74 deletions
diff --git a/xmake.lua b/xmake.lua
index 18429de61..dfe383c1a 100644
--- a/xmake.lua
+++ b/xmake.lua
@@ -74,6 +74,8 @@ add_defines("EASTL_STD_ITERATOR_CATEGORY_ENABLED", "EASTL_DEPRECATIONS_FOR_2024_
add_requires("eastl", {system = false})
add_requires("consul", {system = false}) -- for hub tests
+add_requires("nomad", {system = false}) -- for nomad provisioner tests
+add_requires("oidctoken", {system = false})
if has_config("zenmimalloc") and not use_asan then
add_requires("mimalloc", {system = false})
@@ -101,25 +103,29 @@ if is_plat("windows") then
add_requires("7z")
end
--- If we're using the UE cross-compile toolchain, we need to ensure we link statically
--- against the toolchain libc++ and libc++abi, as the system ones can differ in ABI
--- leading to nasty crashes
-
-if is_plat("linux") and os.getenv("UE_TOOLCHAIN_DIR") then
- add_ldflags("-static-libstdc++")
- add_ldflags("$(projectdir)/thirdparty/ue-libcxx/lib64/libc++.a")
- add_ldflags("$(projectdir)/thirdparty/ue-libcxx/lib64/libc++abi.a")
- set_toolset("objcopy", "$(env UE_TOOLCHAIN_DIR)/bin/llvm-objcopy")
+-- When using the UE clang toolchain, statically link the toolchain's libc++ and
+-- libc++abi to avoid ABI mismatches with system libraries at runtime.
+-- These are project-level flags (not in the toolchain definition) so they don't
+-- propagate into cmake package builds.
+if is_plat("linux") and get_config("toolchain") == "ue-clang" then
+ add_ldflags("-static-libstdc++", {force = true})
+ add_ldflags("$(projectdir)/thirdparty/ue-libcxx/lib64/libc++.a", {force = true})
+ add_ldflags("$(projectdir)/thirdparty/ue-libcxx/lib64/libc++abi.a", {force = true})
+ add_ldflags("-lpthread", {force = true})
end
+
if has_config("zensentry") and not use_asan then
if is_plat("linux") then
- add_requires("sentry-native 0.7.6")
+ add_requires("sentry-native 0.12.1", {configs = {backend = "crashpad"}})
elseif is_plat("windows") then
- add_requires("sentry-native 0.7.6", {debug = is_mode("debug"), configs = {backend = "crashpad"}})
+ add_requires("sentry-native 0.12.1", {debug = is_mode("debug"), configs = {backend = "crashpad"}})
else
- add_requires("sentry-native 0.7.6", {configs = {backend = "crashpad"}})
+ add_requires("sentry-native 0.12.1", {configs = {backend = "crashpad"}})
end
end
+
+enable_unity = false
+
--add_rules("c++.unity_build")
if is_mode("release") then
@@ -209,7 +215,7 @@ function add_define_by_config(define, config_name)
end
option("zensentry")
- set_default(false)
+ set_default(true)
set_showmenu(true)
set_description("Enables Sentry support")
option_end()
@@ -240,6 +246,30 @@ else
add_defines("ZEN_WITH_HTTPSYS=0")
end
+local compute_default = false
+
+option("zencompute")
+ set_default(compute_default)
+ set_showmenu(true)
+ set_description("Enable compute services endpoint")
+option_end()
+add_define_by_config("ZEN_WITH_COMPUTE_SERVICES", "zencompute")
+
+option("zenhorde")
+ set_default(compute_default)
+ set_showmenu(true)
+ set_description("Enable Horde worker provisioning")
+option_end()
+add_define_by_config("ZEN_WITH_HORDE", "zenhorde")
+
+option("zennomad")
+ set_default(compute_default)
+ set_showmenu(true)
+ set_description("Enable Nomad worker provisioning")
+option_end()
+add_define_by_config("ZEN_WITH_NOMAD", "zennomad")
+
+
if is_os("windows") then
add_defines("UE_MEMORY_TRACE_AVAILABLE=1")
option("zenmemtrack")
@@ -265,6 +295,25 @@ set_languages("cxx20")
-- always generate debug information
set_symbols("debug")
+includes("toolchains")
+
+-- Auto-select the UE clang toolchain on Linux when the SDK is available
+if is_plat("linux") and not get_config("toolchain") then
+ local ue_sdk = os.getenv("UE_TOOLCHAIN_DIR")
+ if not ue_sdk or ue_sdk == "" then
+ local home = os.getenv("HOME")
+ if home then
+ local default_path = path.join(home, ".ue-toolchain")
+ if os.isdir(default_path) then
+ ue_sdk = default_path
+ end
+ end
+ end
+ if ue_sdk and ue_sdk ~= "" and os.isdir(ue_sdk) then
+ set_toolchains("ue-clang")
+ end
+end
+
includes("thirdparty")
includes("src/transports")
includes("src/zenbase")
@@ -272,6 +321,13 @@ includes("src/zencore", "src/zencore-test")
includes("src/zenhttp", "src/zenhttp-test")
includes("src/zennet", "src/zennet-test")
includes("src/zenremotestore", "src/zenremotestore-test")
+includes("src/zencompute", "src/zencompute-test")
+if has_config("zenhorde") then
+ includes("src/zenhorde")
+end
+if has_config("zennomad") then
+ includes("src/zennomad")
+end
includes("src/zenstore", "src/zenstore-test")
includes("src/zentelemetry", "src/zentelemetry-test")
includes("src/zenutil", "src/zenutil-test")
@@ -317,26 +373,46 @@ task("precommit")
task("sln")
set_menu {
- usage = "xmake sln",
+ usage = "xmake sln [--open]",
description = "Generate IDE project files",
+ options = {
+ {'o', "open", "k", nil, "Open the generated project in the IDE after generation"},
+ }
}
if is_os("windows") then
on_run(function ()
- print(os.exec("xmake project --yes --kind=vsxmake2022 -m release,debug -a x64"))
+ import("core.base.option")
+ os.exec("xmake project --yes --kind=vsxmake2022 -m release,debug -a x64")
+ if option.get("open") then
+ local sln = path.join(os.projectdir(), "vsxmake2022", path.filename(os.projectdir()) .. ".sln")
+ printf("opening %s\n", sln)
+ try { function() os.execv("explorer", {sln}) end, catch { function() end } }
+ end
end)
elseif is_os("macosx") then
on_run(function ()
- print(os.exec("xmake project --yes --kind=xcode -m release,debug -a x64,arm64"))
+ import("core.base.option")
+ os.exec("xmake project --yes --kind=xcode -m release,debug -a x64,arm64")
+ if option.get("open") then
+ local xcproj = path.join(os.projectdir(), path.filename(os.projectdir()) .. ".xcodeproj")
+ printf("opening %s\n", xcproj)
+ os.exec("open \"%s\"", xcproj)
+ end
end)
end
task("test")
set_menu {
- usage = "xmake test --run=[core|store|server|integration|all]",
+ usage = "xmake test --run=[name|all] [-- extra-args...] (use --list to see available tests)",
description = "Run Zen tests",
options = {
- {'r', "run", "kv", "all", "Run test(s)", " - all", " - core ", " - remotestore", " - store", " - server", " - integration"},
- {'j', "junit", "k", nil, "Enable junit report output"}
+ {'r', "run", "kv", "all", "Run test(s) - comma-separated"},
+ {'l', "list", "k", nil, "List available test names"},
+ {'j', "junit", "k", nil, "Enable junit report output"},
+ {'n', "noskip", "k", nil, "Run skipped tests (passes --no-skip to doctest)"},
+ {nil, "repeat", "kv", nil, "Repeat tests N times (stops on first failure)"},
+ {'v', "verbose", "k", nil, "Route child process output to stdout (zenserver-test)"},
+ {nil, "arguments", "vs", nil, "Extra arguments passed to test runners (after --)"}
}
}
on_run(function()
@@ -346,44 +422,106 @@ task("test")
config.load()
- local testname = option.get("run")
- local available_tests = {
- core = "zencore-test",
- http = "zenhttp-test",
- util = "zenutil-test",
- store = "zenstore-test",
- remotestore = "zenremotestore-test",
- server = "zenserver",
- integration = "zenserver-test"
+ -- Override table: target name -> short name (for targets that don't follow convention)
+ local short_name_overrides = {
+ ["zenserver-test"] = "integration",
}
- local arch
- if is_host("windows") then
- arch = "x64"
- elseif is_arch("arm64") then
- arch = "arm64"
- else
- arch = "x86_64"
+ -- Build test list from targets in the "tests" group
+ local available_tests = {}
+ for name, target in pairs(project.targets()) do
+ if target:get("group") == "tests" and name:endswith("-test") then
+ local short = short_name_overrides[name]
+ if not short then
+ -- Derive short name: "zencore-test" -> "core"
+ short = name
+ if short:startswith("zen") then short = short:sub(4) end
+ if short:endswith("-test") then short = short:sub(1, -6) end
+ end
+ table.insert(available_tests, {short, name})
+ end
+ end
+
+ -- Add non-test-group entries that have a test subcommand
+ table.insert(available_tests, {"server", "zenserver"})
+
+ table.sort(available_tests, function(a, b) return a[1] < b[1] end)
+
+ -- Handle --list: print discovered test names and exit
+ if option.get("list") then
+ printf("Available tests:\n")
+ for _, entry in ipairs(available_tests) do
+ printf(" %-16s -> %s\n", entry[1], entry[2])
+ end
+ return
end
- print(os.exec("xmake config -c -m debug -a "..arch))
- print(os.exec("xmake"))
+ local testname = option.get("run")
+
+ -- Parse comma-separated test names into a set
+ local requested = {}
+ for token in testname:gmatch("[^,]+") do
+ requested[token:match("^%s*(.-)%s*$")] = true
+ end
+ -- Filter to requested test(s)
local tests = {}
- local found_match = false
+ local matched = {}
- for name, test in pairs(available_tests) do
- if name == testname or testname == "all" then
- tests[name] = test
- found_match = true
+ for _, entry in ipairs(available_tests) do
+ local name, target = entry[1], entry[2]
+ if requested["all"] or requested[name] then
+ table.insert(tests, {name = name, target = target})
+ matched[name] = true
end
end
- if not found_match then
+ -- Check for unknown test names
+ if not requested["all"] then
+ for name, _ in pairs(requested) do
+ if not matched[name] then
+ raise("no tests match specification: '%s'", name)
+ end
+ end
+ end
+
+ if #tests == 0 then
raise("no tests match specification: '%s'", testname)
end
+ local plat, arch
+ if is_host("windows") then
+ plat = "windows"
+ arch = "x64"
+ elseif is_host("macosx") then
+ plat = "macosx"
+ arch = is_arch("arm64") and "arm64" or "x86_64"
+ else
+ plat = "linux"
+ arch = "x86_64"
+ end
+
+ -- Only reconfigure if current config doesn't already match
+ if config.get("mode") ~= "debug" or config.get("plat") ~= plat or config.get("arch") ~= arch then
+ local toolchain_flag = config.get("toolchain") and ("--toolchain=" .. config.get("toolchain")) or ""
+ local sdk_flag = config.get("sdk") and ("--sdk=" .. config.get("sdk")) or ""
+ os.exec("xmake config -c -m debug -p %s -a %s %s %s", plat, arch, toolchain_flag, sdk_flag)
+ end
+
+ -- Build targets we're going to run
+ if requested["all"] then
+ os.exec("xmake build -y")
+ else
+ for _, entry in ipairs(tests) do
+ os.exec("xmake build -y %s", entry.target)
+ end
+ end
+
local use_junit_reporting = option.get("junit")
+ local use_noskip = option.get("noskip")
+ local use_verbose = option.get("verbose")
+ local repeat_count = tonumber(option.get("repeat")) or 1
+ local extra_args = option.get("arguments") or {}
local junit_report_files = {}
local junit_report_dir
@@ -392,39 +530,287 @@ task("test")
os.mkdir(junit_report_dir)
end
- try
- {
- function()
- for name, test in pairs(tests) do
- printf("=== %s ===\n", test)
- local cmd = string.format("xmake run %s", test)
- if name == "server" then
- cmd = string.format("xmake run %s test", test)
- end
- cmd = string.format("%s --duration=true", cmd)
- if use_junit_reporting then
- local target = project.target(test)
- local junit_report_file = path.join(junit_report_dir, string.format("junit-%s-%s-%s.xml", config.plat(), arch, test))
- junit_report_files[test] = junit_report_file
- cmd = string.format("%s --reporters=junit --out=%s", cmd, junit_report_file)
- end
+ -- Results collection for summary table
+ local results = {}
+ local any_failed = false
+
+ -- Format a number with thousands separators (e.g. 31103 -> "31,103")
+ local function format_number(n)
+ local s = tostring(n)
+ local pos = #s % 3
+ if pos == 0 then pos = 3 end
+ local result = s:sub(1, pos)
+ for i = pos + 1, #s, 3 do
+ result = result .. "," .. s:sub(i, i + 2)
+ end
+ return result
+ end
+
+ -- Center a string within a given width
+ local function center_str(s, width)
+ local pad = width - #s
+ local lpad = math.floor(pad / 2)
+ local rpad = pad - lpad
+ return string.rep(" ", lpad) .. s .. string.rep(" ", rpad)
+ end
+
+ -- Left-align a string within a given width (with 1-space left margin)
+ local function left_align_str(s, width)
+ return " " .. s .. string.rep(" ", width - #s - 1)
+ end
+
+ -- Right-align a string within a given width (with 1-space right margin)
+ local function right_align_str(s, width)
+ return string.rep(" ", width - #s - 1) .. s .. " "
+ end
+
+ -- Format elapsed seconds as a human-readable string
+ local function format_time(seconds)
+ if seconds >= 60 then
+ local mins = math.floor(seconds / 60)
+ local secs = seconds - mins * 60
+ return string.format("%dm %04.1fs", mins, secs)
+ else
+ return string.format("%.1fs", seconds)
+ end
+ end
- os.exec(cmd)
+ -- Parse test summary file written by TestListener
+ local function parse_summary_file(filepath)
+ if not os.isfile(filepath) then return nil end
+ local content = io.readfile(filepath)
+ if not content then return nil end
+ local ct = content:match("cases_total=(%d+)")
+ local cp = content:match("cases_passed=(%d+)")
+ local at = content:match("assertions_total=(%d+)")
+ local ap = content:match("assertions_passed=(%d+)")
+ if ct then
+ local failures = {}
+ for name, file, line in content:gmatch("failed=([^|\n]+)|([^|\n]+)|(%d+)") do
+ table.insert(failures, {name = name, file = file, line = tonumber(line)})
end
- end,
-
- finally
- {
- function (ok, errors)
- for test, junit_report_file in pairs(junit_report_files) do
- printf("=== report - %s ===\n", test)
- local data = io.readfile(junit_report_file)
- print(data)
- end
- if (errors) then
- raise(errors)
+ local es = content:match("elapsed_seconds=([%d%.]+)")
+ return {
+ cases_total = tonumber(ct),
+ cases_passed = tonumber(cp) or 0,
+ asserts_total = tonumber(at) or 0,
+ asserts_passed = tonumber(ap) or 0,
+ elapsed_seconds = tonumber(es) or 0,
+ failures = failures
+ }
+ end
+ return nil
+ end
+
+ -- Temp directory for summary files
+ local summary_dir = path.join(os.tmpdir(), "zen-test-summary")
+ os.mkdir(summary_dir)
+
+ -- Run each test suite and collect results
+ for iteration = 1, repeat_count do
+ if repeat_count > 1 then
+ printf("\n*** Iteration %d/%d ***\n", iteration, repeat_count)
+ end
+
+ for _, entry in ipairs(tests) do
+ local name, target = entry.name, entry.target
+ printf("=== %s ===\n", target)
+
+ local suite_name = target
+ if name == "server" then
+ suite_name = "zenserver (test)"
+ end
+
+ local cmd = string.format("xmake run %s", target)
+ if name == "server" then
+ cmd = string.format("xmake run %s test", target)
+ end
+ cmd = string.format("%s --duration=true", cmd)
+
+ if use_junit_reporting then
+ local junit_report_file = path.join(junit_report_dir, string.format("junit-%s-%s-%s.xml", config.plat(), arch, target))
+ junit_report_files[target] = junit_report_file
+ cmd = string.format("%s --reporters=junit --out=%s", cmd, junit_report_file)
+ end
+ if use_noskip then
+ cmd = string.format("%s --no-skip", cmd)
+ end
+ if use_verbose and name == "integration" then
+ cmd = string.format("%s --verbose", cmd)
+ end
+ for _, arg in ipairs(extra_args) do
+ cmd = string.format("%s %s", cmd, arg)
+ end
+
+ -- Tell TestListener where to write the summary
+ local summary_file = path.join(summary_dir, target .. ".txt")
+ os.setenv("ZEN_TEST_SUMMARY_FILE", summary_file)
+
+ -- Run test with real-time streaming output
+ local test_ok = true
+ try {
+ function()
+ os.exec(cmd)
+ end,
+ catch {
+ function(errors)
+ test_ok = false
+ end
+ }
+ }
+
+ -- Read summary written by TestListener
+ local summary = parse_summary_file(summary_file)
+ os.tryrm(summary_file)
+
+ if not test_ok then
+ any_failed = true
+ end
+
+ table.insert(results, {
+ suite = suite_name,
+ cases_passed = summary and summary.cases_passed or 0,
+ cases_total = summary and summary.cases_total or 0,
+ asserts_passed = summary and summary.asserts_passed or 0,
+ asserts_total = summary and summary.asserts_total or 0,
+ elapsed_seconds = summary and summary.elapsed_seconds or 0,
+ failures = summary and summary.failures or {},
+ passed = test_ok
+ })
+ end
+
+ if any_failed then
+ if repeat_count > 1 then
+ printf("\n*** Failure detected on iteration %d, stopping ***\n", iteration)
+ end
+ break
+ end
+ end
+
+ -- Clean up
+ os.setenv("ZEN_TEST_SUMMARY_FILE", "")
+ os.tryrm(summary_dir)
+
+ -- Print JUnit reports if requested
+ for test, junit_report_file in pairs(junit_report_files) do
+ printf("=== report - %s ===\n", test)
+ if os.isfile(junit_report_file) then
+ local data = io.readfile(junit_report_file)
+ if data then
+ print(data)
+ end
+ end
+ end
+
+ -- Print summary table
+ if #results > 0 then
+ -- Calculate column widths based on content
+ local col_suite = #("Suite")
+ local col_cases = #("Cases")
+ local col_asserts = #("Assertions")
+ local col_time = #("Time")
+ local col_status = #("Status")
+
+ -- Compute totals
+ local total_cases_passed = 0
+ local total_cases_total = 0
+ local total_asserts_passed = 0
+ local total_asserts_total = 0
+ local total_elapsed = 0
+
+ for _, r in ipairs(results) do
+ col_suite = math.max(col_suite, #r.suite)
+ local cases_str = format_number(r.cases_passed) .. "/" .. format_number(r.cases_total)
+ col_cases = math.max(col_cases, #cases_str)
+ local asserts_str = format_number(r.asserts_passed) .. "/" .. format_number(r.asserts_total)
+ col_asserts = math.max(col_asserts, #asserts_str)
+ col_time = math.max(col_time, #format_time(r.elapsed_seconds))
+ local status_str = r.passed and "SUCCESS" or "FAILED"
+ col_status = math.max(col_status, #status_str)
+
+ total_cases_passed = total_cases_passed + r.cases_passed
+ total_cases_total = total_cases_total + r.cases_total
+ total_asserts_passed = total_asserts_passed + r.asserts_passed
+ total_asserts_total = total_asserts_total + r.asserts_total
+ total_elapsed = total_elapsed + r.elapsed_seconds
+ end
+
+ -- Account for totals row in column widths
+ col_suite = math.max(col_suite, #("Total"))
+ col_cases = math.max(col_cases, #(format_number(total_cases_passed) .. "/" .. format_number(total_cases_total)))
+ col_asserts = math.max(col_asserts, #(format_number(total_asserts_passed) .. "/" .. format_number(total_asserts_total)))
+ col_time = math.max(col_time, #format_time(total_elapsed))
+
+ -- Add padding (1 space each side)
+ col_suite = col_suite + 2
+ col_cases = col_cases + 2
+ col_asserts = col_asserts + 2
+ col_time = col_time + 2
+ col_status = col_status + 2
+
+ -- Build horizontal border segments
+ local h_suite = string.rep("-", col_suite)
+ local h_cases = string.rep("-", col_cases)
+ local h_asserts = string.rep("-", col_asserts)
+ local h_time = string.rep("-", col_time)
+ local h_status = string.rep("-", col_status)
+
+ local top = "+" .. h_suite .. "+" .. h_cases .. "+" .. h_asserts .. "+" .. h_time .. "+" .. h_status .. "+"
+ local mid = "+" .. h_suite .. "+" .. h_cases .. "+" .. h_asserts .. "+" .. h_time .. "+" .. h_status .. "+"
+ local bottom = "+" .. h_suite .. "+" .. h_cases .. "+" .. h_asserts .. "+" .. h_time .. "+" .. h_status .. "+"
+ local vbar = "|"
+
+ local header_msg = any_failed and "Some tests failed:" or "All tests passed:"
+ printf("\n* %s\n", header_msg)
+ printf(" %s\n", top)
+ printf(" %s%s%s%s%s%s%s%s%s%s%s\n", vbar, center_str("Suite", col_suite), vbar, center_str("Cases", col_cases), vbar, center_str("Assertions", col_asserts), vbar, center_str("Time", col_time), vbar, center_str("Status", col_status), vbar)
+
+ for _, r in ipairs(results) do
+ printf(" %s\n", mid)
+ local cases_str = format_number(r.cases_passed) .. "/" .. format_number(r.cases_total)
+ local asserts_str = format_number(r.asserts_passed) .. "/" .. format_number(r.asserts_total)
+ local time_str = format_time(r.elapsed_seconds)
+ local status_str = r.passed and "SUCCESS" or "FAILED"
+ printf(" %s%s%s%s%s%s%s%s%s%s%s\n", vbar, left_align_str(r.suite, col_suite), vbar, right_align_str(cases_str, col_cases), vbar, right_align_str(asserts_str, col_asserts), vbar, right_align_str(time_str, col_time), vbar, right_align_str(status_str, col_status), vbar)
+ end
+
+ -- Totals row
+ if #results > 1 then
+ local h_suite_eq = string.rep("=", col_suite)
+ local h_cases_eq = string.rep("=", col_cases)
+ local h_asserts_eq = string.rep("=", col_asserts)
+ local h_time_eq = string.rep("=", col_time)
+ local h_status_eq = string.rep("=", col_status)
+ local totals_sep = "+" .. h_suite_eq .. "+" .. h_cases_eq .. "+" .. h_asserts_eq .. "+" .. h_time_eq .. "+" .. h_status_eq .. "+"
+ printf(" %s\n", totals_sep)
+
+ local total_cases_str = format_number(total_cases_passed) .. "/" .. format_number(total_cases_total)
+ local total_asserts_str = format_number(total_asserts_passed) .. "/" .. format_number(total_asserts_total)
+ local total_time_str = format_time(total_elapsed)
+ local total_status_str = any_failed and "FAILED" or "SUCCESS"
+ printf(" %s%s%s%s%s%s%s%s%s%s%s\n", vbar, left_align_str("Total", col_suite), vbar, right_align_str(total_cases_str, col_cases), vbar, right_align_str(total_asserts_str, col_asserts), vbar, right_align_str(total_time_str, col_time), vbar, right_align_str(total_status_str, col_status), vbar)
+ end
+
+ printf(" %s\n", bottom)
+ end
+
+ -- Print list of individual failing tests
+ if any_failed then
+ printf("\n Failures:\n")
+ for _, r in ipairs(results) do
+ if #r.failures > 0 then
+ printf(" -- %s --\n", r.suite)
+ for _, f in ipairs(r.failures) do
+ printf(" FAILED: %s (%s:%d)\n", f.name, f.file, f.line)
end
+ elseif not r.passed then
+ printf(" -- %s --\n", r.suite)
+ printf(" (test binary exited with error, no failure details available)\n")
end
- }
- }
+ end
+ end
+
+ if any_failed then
+ raise("one or more test suites failed")
+ end
end)