-- Copyright Epic Games, Inc. All Rights Reserved. set_configvar("ZEN_SCHEMA_VERSION", 5) -- force state wipe after 0.2.31 causing bad data (dan.engelbrecht) set_configvar("ZEN_DATA_FORCE_SCRUB_VERSION", 0) set_allowedplats("windows", "linux", "macosx") set_allowedarchs("windows|x64", "linux|x86_64", "macosx|x86_64", "macosx|arm64") -------------------------------------------------------------------------- -- We support debug and release modes. On Windows we use static CRT to -- minimize dependencies. set_allowedmodes("debug", "release") add_rules("mode.debug", "mode.release") if is_plat("windows") then if false then -- DLL runtime if is_mode("debug") then set_runtimes("MDd") else set_runtimes("MD") end else -- static runtime if is_mode("debug") then set_runtimes("MTd") else set_runtimes("MT") end end end -------------------------------------------------------------------------- -- Sanitizers -- -- https://xmake.io/api/description/builtin-policies.html#build-sanitizer-address -- -- When using sanitizers, it may be necessary to change some configuration -- options. In particular, you may want to use `--zensentry=no` to disable -- Sentry support as it may not be compatible with some sanitizers. Also, -- it may be necessary to disable mimalloc by using `--zenmimalloc=no`. -- AddressSanitizer is supported on Windows (MSVC 2019+), Linux, and MacOS local use_asan = false -- Automatically disables Sentry when set to true set_policy("build.sanitizer.address", use_asan) -- ThreadSanitizer, MemorySanitizer, LeakSanitizer, and UndefinedBehaviorSanitizer -- are supported on Linux and MacOS only. --set_policy("build.sanitizer.thread", true) --set_policy("build.sanitizer.memory", true) --set_policy("build.sanitizer.leak", true) --set_policy("build.sanitizer.undefined", true) -------------------------------------------------------------------------- -- Dependencies add_repositories("zen-repo repo") set_policy("build.ccache", false) set_policy("package.precompiled", false) add_defines("gsl_FEATURE_GSL_COMPATIBILITY_MODE=1") add_requires("gsl-lite", {system = false}) add_requires("http_parser", {system = false}) add_requires("json11", {system = false}) add_requires("lua", {system = false}) add_requires("lz4", {system = false}) add_requires("xxhash", {system = false}) add_requires("zlib", {system = false}) add_defines("EASTL_STD_ITERATOR_CATEGORY_ENABLED", "EASTL_DEPRECATIONS_FOR_2024_APRIL=EA_DISABLED") add_requires("eastl", {system = false}) add_requires("consul", {system = false}) -- for hub tests if has_config("zenmimalloc") and not use_asan then add_requires("mimalloc", {system = false}) end -------------------------------------------------------------------------- -- Crypto configuration. For reasons unknown each platform needs a -- different package if is_plat("windows") then -- we use schannel on Windows add_defines("ZEN_USE_OPENSSL=0") add_requires("libcurl", {system = false}) elseif is_plat("linux", "macosx") then add_requires("openssl3", {system = false}) add_defines("ZEN_USE_OPENSSL=1") add_requires("libcurl", {system = false, configs = {openssl3 = true}}) end -------------------------------------------------------------------------- if is_plat("windows") then -- for bundling, Linux tries to compile from source which fails with UE toolchain, -- fallback is regular zip add_requires("7z") end -- If we're using the UE cross-compile toolchain, we need to ensure we link statically -- against the toolchain libc++ and libc++abi, as the system ones can differ in ABI -- leading to nasty crashes if is_plat("linux") and os.getenv("UE_TOOLCHAIN_DIR") then add_ldflags("-static-libstdc++") add_ldflags("$(projectdir)/thirdparty/ue-libcxx/lib64/libc++.a") add_ldflags("$(projectdir)/thirdparty/ue-libcxx/lib64/libc++abi.a") set_toolset("objcopy", "$(env UE_TOOLCHAIN_DIR)/bin/llvm-objcopy") end if has_config("zensentry") and not use_asan then if is_plat("linux") then add_requires("sentry-native 0.12.1", {configs = {backend = "crashpad"}}) elseif is_plat("windows") then add_requires("sentry-native 0.12.1", {debug = is_mode("debug"), configs = {backend = "crashpad"}}) else add_requires("sentry-native 0.12.1", {configs = {backend = "crashpad"}}) end end enable_unity = false --add_rules("c++.unity_build") if is_mode("release") then -- LTO does not appear to work with the current Linux UE toolchain -- It's currently also temporarily disabled on Windows to investigate -- build issues due to git apply not applying the necessary patches correctly -- in CI for some reason. -- Also, disabled LTO on Mac to reduce time spent building openssl tests if not is_plat("linux", "windows", "macosx") then set_policy("build.optimization.lto", true) end set_optimize("fastest") end if is_mode("debug") then add_defines("DEBUG") end if is_mode("debug") then add_defines("ZEN_WITH_TESTS=1") else add_defines("ZEN_WITH_TESTS=0") end -- fmt 11+ requires utf-8 when using unicode if is_os("windows") then set_encodings("utf-8") else set_encodings("source:utf-8", "target:utf-8") end if is_os("windows") then add_defines( "_CRT_SECURE_NO_WARNINGS", "_UNICODE", "UNICODE", "_CONSOLE", "NOMINMAX", -- stop Windows SDK defining 'min' and 'max' "NOGDI", -- otherwise Windows.h defines 'GetObject' "WIN32_LEAN_AND_MEAN", -- cut down Windows.h "_WIN32_WINNT=0x0A00", "_WINSOCK_DEPRECATED_NO_WARNINGS" -- let us use the ANSI functions ) -- Make builds more deterministic and portable add_cxxflags("/d1trimfile:$(curdir)\\") -- eliminates the base path from __FILE__ paths add_cxxflags("/experimental:deterministic") -- (more) deterministic compiler output add_ldflags("/PDBALTPATH:%_PDB%") -- deterministic pdb reference in exe add_cxxflags("/Zc:preprocessor") -- Enable preprocessor conformance mode add_cxxflags("/Zc:u8EscapeEncoding") -- Enable UTF-8 encoding for u8 string literals add_cxxflags("/Zc:inline") -- Enforce inline semantics -- add_ldflags("/MAP") end if is_os("linux") or is_os("macosx") then add_cxxflags("-Wno-implicit-fallthrough") add_cxxflags("-Wno-missing-field-initializers") add_cxxflags("-Wno-strict-aliasing") add_cxxflags("-Wno-switch") add_cxxflags("-Wno-unused-lambda-capture") add_cxxflags("-Wno-unused-private-field") add_cxxflags("-Wno-unused-value") add_cxxflags("-Wno-unused-variable") add_cxxflags("-Wno-vla-cxx-extension") end if is_os("macosx") then -- silence warnings about -Wno-vla-cxx-extension since to my knowledge we can't -- detect the clang version used in Xcode and only recent versions contain this flag add_cxxflags("-Wno-unknown-warning-option") end if is_os("linux") then add_defines("_GNU_SOURCE") end -- Turn use of undefined cpp macros into errors if is_os("windows") then add_cxxflags("/we4668") else add_cxxflags("-Wundef") end function add_define_by_config(define, config_name) local value = has_config(config_name) and 1 or 0 add_defines(define.."="..value) end option("zensentry") set_default(true) set_showmenu(true) set_description("Enables Sentry support") option_end() add_define_by_config("ZEN_USE_SENTRY", "zensentry") option("zenmimalloc") set_default(not use_asan) set_showmenu(true) set_description("Use MiMalloc for faster memory management") option_end() add_define_by_config("ZEN_USE_MIMALLOC", "zenmimalloc") option("zenrpmalloc") set_default(true) set_showmenu(true) set_description("Use rpmalloc for faster memory management") option_end() add_define_by_config("ZEN_USE_RPMALLOC", "zenrpmalloc") if is_os("windows") then option("httpsys") set_default(true) set_showmenu(true) set_description("Enable http.sys server") option_end() add_define_by_config("ZEN_WITH_HTTPSYS", "httpsys") else add_defines("ZEN_WITH_HTTPSYS=0") end option("zencompute") set_default(false) set_showmenu(true) set_description("Enable compute services endpoint") option_end() add_define_by_config("ZEN_WITH_COMPUTE_SERVICES", "zencompute") if is_os("windows") then add_defines("UE_MEMORY_TRACE_AVAILABLE=1") option("zenmemtrack") set_default(true) set_showmenu(true) set_description("Enable UE's Memory Trace support") option_end() add_define_by_config("ZEN_WITH_MEMTRACK", "zenmemtrack") else add_defines("ZEN_WITH_MEMTRACK=0") end option("zentrace") set_default(true) set_showmenu(true) set_description("Enable UE's Trace support") option_end() add_define_by_config("ZEN_WITH_TRACE", "zentrace") set_warnings("allextra", "error") set_languages("cxx20") -- always generate debug information set_symbols("debug") includes("thirdparty") includes("src/transports") includes("src/zenbase") includes("src/zencore", "src/zencore-test") includes("src/zenhttp", "src/zenhttp-test") includes("src/zennet", "src/zennet-test") includes("src/zenremotestore", "src/zenremotestore-test") includes("src/zencompute", "src/zencompute-test") includes("src/zenstore", "src/zenstore-test") includes("src/zentelemetry", "src/zentelemetry-test") includes("src/zenutil", "src/zenutil-test") includes("src/zenvfs") includes("src/zenserver", "src/zenserver-test") includes("src/zen") includes("src/zentest-appstub") -------------------------------------------------------------------------- task("bundle") set_menu { usage = "xmake bundle", description = "Create Zip bundle from binaries", options = { {nil, "withtrace", "k", nil, "Compiles with trace support"}, {nil, "codesignidentity", "v", nil, "Code signing identity"}, } } on_run(function () import("scripts.bundle") bundle() end) task("updatefrontend") set_menu { usage = "xmake updatefrontend", description = "Create Zip of the frontend/html folder for bundling with zenserver executable", } on_run(function() import("scripts.updatefrontend") updatefrontend() end) task("precommit") set_menu { usage = "xmake precommit", description = "Run required pre-commit steps (clang-format, etc)", } on_run(function () print(os.exec("pre-commit run --all-files")) end) task("sln") set_menu { usage = "xmake sln", description = "Generate IDE project files", } if is_os("windows") then on_run(function () print(os.exec("xmake project --yes --kind=vsxmake2022 -m release,debug -a x64")) end) elseif is_os("macosx") then on_run(function () print(os.exec("xmake project --yes --kind=xcode -m release,debug -a x64,arm64")) end) end task("test") set_menu { usage = "xmake test --run=[core|store|http|server|integration|util|remotestore|all] (comma-separated)", description = "Run Zen tests", options = { {'r', "run", "kv", "all", "Run test(s) - comma-separated", " - all", " - core", " - http", " - util", " - store", " - remotestore", " - server", " - integration"}, {'j', "junit", "k", nil, "Enable junit report output"}, {'n', "noskip", "k", nil, "Run skipped tests (passes --no-skip to doctest)"}, {nil, "repeat", "kv", nil, "Repeat tests N times (stops on first failure)"} } } on_run(function() import("core.base.option") import("core.project.config") import("core.project.project") config.load() local testname = option.get("run") -- Ordered list of available tests (order defines execution order) local available_tests = { {"core", "zencore-test"}, {"http", "zenhttp-test"}, {"util", "zenutil-test"}, {"store", "zenstore-test"}, {"remotestore", "zenremotestore-test"}, {"server", "zenserver"}, {"integration", "zenserver-test"}, } local plat, arch if is_host("windows") then plat = "windows" arch = "x64" elseif is_host("macosx") then plat = "macosx" arch = is_arch("arm64") and "arm64" or "x86_64" else plat = "linux" arch = "x86_64" end print(os.exec("xmake config -c -m debug -p "..plat.." -a "..arch)) print(os.exec("xmake")) -- Parse comma-separated test names into a set local requested = {} for token in testname:gmatch("[^,]+") do requested[token:match("^%s*(.-)%s*$")] = true end -- Filter to requested test(s) local tests = {} local matched = {} for _, entry in ipairs(available_tests) do local name, target = entry[1], entry[2] if requested["all"] or requested[name] then table.insert(tests, {name = name, target = target}) matched[name] = true end end -- Check for unknown test names if not requested["all"] then for name, _ in pairs(requested) do if not matched[name] then raise("no tests match specification: '%s'", name) end end end if #tests == 0 then raise("no tests match specification: '%s'", testname) end local use_junit_reporting = option.get("junit") local use_noskip = option.get("noskip") local repeat_count = tonumber(option.get("repeat")) or 1 local junit_report_files = {} local junit_report_dir if use_junit_reporting then junit_report_dir = path.join(os.projectdir(), config.get("buildir"), "reports") os.mkdir(junit_report_dir) end -- Results collection for summary table local results = {} local any_failed = false -- Format a number with thousands separators (e.g. 31103 -> "31,103") local function format_number(n) local s = tostring(n) local pos = #s % 3 if pos == 0 then pos = 3 end local result = s:sub(1, pos) for i = pos + 1, #s, 3 do result = result .. "," .. s:sub(i, i + 2) end return result end -- Center a string within a given width local function center_str(s, width) local pad = width - #s local lpad = math.floor(pad / 2) local rpad = pad - lpad return string.rep(" ", lpad) .. s .. string.rep(" ", rpad) end -- Left-align a string within a given width (with 1-space left margin) local function left_align_str(s, width) return " " .. s .. string.rep(" ", width - #s - 1) end -- Right-align a string within a given width (with 1-space right margin) local function right_align_str(s, width) return string.rep(" ", width - #s - 1) .. s .. " " end -- Format elapsed seconds as a human-readable string local function format_time(seconds) if seconds >= 60 then local mins = math.floor(seconds / 60) local secs = seconds - mins * 60 return string.format("%dm %04.1fs", mins, secs) else return string.format("%.1fs", seconds) end end -- Parse test summary file written by TestListener local function parse_summary_file(filepath) if not os.isfile(filepath) then return nil end local content = io.readfile(filepath) if not content then return nil end local ct = content:match("cases_total=(%d+)") local cp = content:match("cases_passed=(%d+)") local at = content:match("assertions_total=(%d+)") local ap = content:match("assertions_passed=(%d+)") if ct then local failures = {} for name, file, line in content:gmatch("failed=([^|\n]+)|([^|\n]+)|(%d+)") do table.insert(failures, {name = name, file = file, line = tonumber(line)}) end local es = content:match("elapsed_seconds=([%d%.]+)") return { cases_total = tonumber(ct), cases_passed = tonumber(cp) or 0, asserts_total = tonumber(at) or 0, asserts_passed = tonumber(ap) or 0, elapsed_seconds = tonumber(es) or 0, failures = failures } end return nil end -- Temp directory for summary files local summary_dir = path.join(os.tmpdir(), "zen-test-summary") os.mkdir(summary_dir) -- Run each test suite and collect results for iteration = 1, repeat_count do if repeat_count > 1 then printf("\n*** Iteration %d/%d ***\n", iteration, repeat_count) end for _, entry in ipairs(tests) do local name, target = entry.name, entry.target printf("=== %s ===\n", target) local suite_name = target if name == "server" then suite_name = "zenserver (test)" end local cmd = string.format("xmake run %s", target) if name == "server" then cmd = string.format("xmake run %s test", target) end cmd = string.format("%s --duration=true", cmd) if use_junit_reporting then local junit_report_file = path.join(junit_report_dir, string.format("junit-%s-%s-%s.xml", config.plat(), arch, target)) junit_report_files[target] = junit_report_file cmd = string.format("%s --reporters=junit --out=%s", cmd, junit_report_file) end if use_noskip then cmd = string.format("%s --no-skip", cmd) end -- Tell TestListener where to write the summary local summary_file = path.join(summary_dir, target .. ".txt") os.setenv("ZEN_TEST_SUMMARY_FILE", summary_file) -- Run test with real-time streaming output local test_ok = true try { function() os.exec(cmd) end, catch { function(errors) test_ok = false end } } -- Read summary written by TestListener local summary = parse_summary_file(summary_file) os.tryrm(summary_file) if not test_ok then any_failed = true end table.insert(results, { suite = suite_name, cases_passed = summary and summary.cases_passed or 0, cases_total = summary and summary.cases_total or 0, asserts_passed = summary and summary.asserts_passed or 0, asserts_total = summary and summary.asserts_total or 0, elapsed_seconds = summary and summary.elapsed_seconds or 0, failures = summary and summary.failures or {}, passed = test_ok }) end if any_failed then if repeat_count > 1 then printf("\n*** Failure detected on iteration %d, stopping ***\n", iteration) end break end end -- Clean up os.setenv("ZEN_TEST_SUMMARY_FILE", "") os.tryrm(summary_dir) -- Print JUnit reports if requested for test, junit_report_file in pairs(junit_report_files) do printf("=== report - %s ===\n", test) if os.isfile(junit_report_file) then local data = io.readfile(junit_report_file) if data then print(data) end end end -- Print summary table if #results > 0 then -- Calculate column widths based on content local col_suite = #("Suite") local col_cases = #("Cases") local col_asserts = #("Assertions") local col_time = #("Time") local col_status = #("Status") -- Compute totals local total_cases_passed = 0 local total_cases_total = 0 local total_asserts_passed = 0 local total_asserts_total = 0 local total_elapsed = 0 for _, r in ipairs(results) do col_suite = math.max(col_suite, #r.suite) local cases_str = format_number(r.cases_passed) .. "/" .. format_number(r.cases_total) col_cases = math.max(col_cases, #cases_str) local asserts_str = format_number(r.asserts_passed) .. "/" .. format_number(r.asserts_total) col_asserts = math.max(col_asserts, #asserts_str) col_time = math.max(col_time, #format_time(r.elapsed_seconds)) local status_str = r.passed and "SUCCESS" or "FAILED" col_status = math.max(col_status, #status_str) total_cases_passed = total_cases_passed + r.cases_passed total_cases_total = total_cases_total + r.cases_total total_asserts_passed = total_asserts_passed + r.asserts_passed total_asserts_total = total_asserts_total + r.asserts_total total_elapsed = total_elapsed + r.elapsed_seconds end -- Account for totals row in column widths col_suite = math.max(col_suite, #("Total")) col_cases = math.max(col_cases, #(format_number(total_cases_passed) .. "/" .. format_number(total_cases_total))) col_asserts = math.max(col_asserts, #(format_number(total_asserts_passed) .. "/" .. format_number(total_asserts_total))) col_time = math.max(col_time, #format_time(total_elapsed)) -- Add padding (1 space each side) col_suite = col_suite + 2 col_cases = col_cases + 2 col_asserts = col_asserts + 2 col_time = col_time + 2 col_status = col_status + 2 -- Build horizontal border segments local h_suite = string.rep("-", col_suite) local h_cases = string.rep("-", col_cases) local h_asserts = string.rep("-", col_asserts) local h_time = string.rep("-", col_time) local h_status = string.rep("-", col_status) local top = "+" .. h_suite .. "+" .. h_cases .. "+" .. h_asserts .. "+" .. h_time .. "+" .. h_status .. "+" local mid = "+" .. h_suite .. "+" .. h_cases .. "+" .. h_asserts .. "+" .. h_time .. "+" .. h_status .. "+" local bottom = "+" .. h_suite .. "+" .. h_cases .. "+" .. h_asserts .. "+" .. h_time .. "+" .. h_status .. "+" local vbar = "|" local header_msg = any_failed and "Some tests failed:" or "All tests passed:" printf("\n* %s\n", header_msg) printf(" %s\n", top) printf(" %s%s%s%s%s%s%s%s%s%s%s\n", vbar, center_str("Suite", col_suite), vbar, center_str("Cases", col_cases), vbar, center_str("Assertions", col_asserts), vbar, center_str("Time", col_time), vbar, center_str("Status", col_status), vbar) for _, r in ipairs(results) do printf(" %s\n", mid) local cases_str = format_number(r.cases_passed) .. "/" .. format_number(r.cases_total) local asserts_str = format_number(r.asserts_passed) .. "/" .. format_number(r.asserts_total) local time_str = format_time(r.elapsed_seconds) local status_str = r.passed and "SUCCESS" or "FAILED" printf(" %s%s%s%s%s%s%s%s%s%s%s\n", vbar, left_align_str(r.suite, col_suite), vbar, right_align_str(cases_str, col_cases), vbar, right_align_str(asserts_str, col_asserts), vbar, right_align_str(time_str, col_time), vbar, right_align_str(status_str, col_status), vbar) end -- Totals row if #results > 1 then local h_suite_eq = string.rep("=", col_suite) local h_cases_eq = string.rep("=", col_cases) local h_asserts_eq = string.rep("=", col_asserts) local h_time_eq = string.rep("=", col_time) local h_status_eq = string.rep("=", col_status) local totals_sep = "+" .. h_suite_eq .. "+" .. h_cases_eq .. "+" .. h_asserts_eq .. "+" .. h_time_eq .. "+" .. h_status_eq .. "+" printf(" %s\n", totals_sep) local total_cases_str = format_number(total_cases_passed) .. "/" .. format_number(total_cases_total) local total_asserts_str = format_number(total_asserts_passed) .. "/" .. format_number(total_asserts_total) local total_time_str = format_time(total_elapsed) local total_status_str = any_failed and "FAILED" or "SUCCESS" printf(" %s%s%s%s%s%s%s%s%s%s%s\n", vbar, left_align_str("Total", col_suite), vbar, right_align_str(total_cases_str, col_cases), vbar, right_align_str(total_asserts_str, col_asserts), vbar, right_align_str(total_time_str, col_time), vbar, right_align_str(total_status_str, col_status), vbar) end printf(" %s\n", bottom) end -- Print list of individual failing tests if any_failed then printf("\n Failures:\n") for _, r in ipairs(results) do if #r.failures > 0 then printf(" -- %s --\n", r.suite) for _, f in ipairs(r.failures) do printf(" FAILED: %s (%s:%d)\n", f.name, f.file, f.line) end elseif not r.passed then printf(" -- %s --\n", r.suite) printf(" (test binary exited with error, no failure details available)\n") end end end if any_failed then raise("one or more test suites failed") end end)