diff options
Diffstat (limited to 'scripts')
| -rw-r--r-- | scripts/docker.lua | 88 | ||||
| -rw-r--r-- | scripts/test.lua | 401 | ||||
| -rwxr-xr-x | scripts/test_scripts/block-clone-test-mac.sh | 43 | ||||
| -rw-r--r-- | scripts/test_scripts/block-clone-test-windows.ps1 | 145 | ||||
| -rwxr-xr-x | scripts/test_scripts/block-clone-test.sh | 143 | ||||
| -rw-r--r-- | scripts/test_scripts/builds-download-upload-test.py | 74 | ||||
| -rw-r--r-- | scripts/test_scripts/builds-download-upload-update-build-ids.py | 150 | ||||
| -rw-r--r-- | scripts/test_scripts/oplog-import-export-test.py | 177 | ||||
| -rw-r--r-- | scripts/test_scripts/oplog-update-build-ids.py | 151 | ||||
| -rwxr-xr-x | scripts/ue_build_linux/verify_linux_toolchains.sh | 121 | ||||
| -rw-r--r-- | scripts/updatefrontend.lua | 111 | ||||
| -rwxr-xr-x | scripts/win_cross/get_win_sdk.sh | 305 |
12 files changed, 1724 insertions, 185 deletions
diff --git a/scripts/docker.lua b/scripts/docker.lua new file mode 100644 index 000000000..f66f8db86 --- /dev/null +++ b/scripts/docker.lua @@ -0,0 +1,88 @@ +-- Copyright Epic Games, Inc. All Rights Reserved. + +import("core.base.option") + +-------------------------------------------------------------------------------- +local function _get_version() + local version_file = path.join(os.projectdir(), "VERSION.txt") + local version = io.readfile(version_file) + if version then + version = version:trim() + end + if not version or version == "" then + raise("Failed to read version from VERSION.txt") + end + return version +end + +-------------------------------------------------------------------------------- +function main() + local registry = option.get("registry") + local tag = option.get("tag") + local push = option.get("push") + local no_wine = option.get("no-wine") + local win_binary = option.get("win-binary") + + if not tag then + tag = _get_version() + end + + local image_name = no_wine and "zenserver-compute-linux" or "zenserver-compute" + if registry then + image_name = registry .. "/" .. image_name + end + + local full_tag = image_name .. ":" .. tag + + -- Verify the zenserver binary exists + local binary_path = path.join(os.projectdir(), "build/linux/x86_64/release/zenserver") + if not os.isfile(binary_path) then + raise("zenserver binary not found at %s\nBuild it first with: xmake config -y -m release -a x64 && xmake build -y zenserver", binary_path) + end + + -- Stage Windows binary if provided + local win_staging_dir = nil + if win_binary then + if not os.isfile(win_binary) then + raise("Windows binary not found at %s", win_binary) + end + win_staging_dir = path.join(os.projectdir(), "build/win-binary-staging") + os.mkdir(win_staging_dir) + os.cp(win_binary, path.join(win_staging_dir, "zenserver.exe")) + print("-- Including Windows binary: %s", win_binary) + end + + -- Build the Docker image + local dockerfile = path.join(os.projectdir(), "docker/Dockerfile") + print("-- Building Docker image: %s", full_tag) + local args = {"build", "-t", full_tag, "-f", dockerfile} + if no_wine then + table.insert(args, "--build-arg") + table.insert(args, "INSTALL_WINE=false") + end + if win_staging_dir then + table.insert(args, "--build-arg") + table.insert(args, "WIN_BINARY_DIR=build/win-binary-staging") + end + table.insert(args, os.projectdir()) + local ret = os.execv("docker", args) + if ret > 0 then + raise("Docker build failed") + end + + -- Clean up staging directory + if win_staging_dir then + os.rmdir(win_staging_dir) + end + + print("-- Built image: %s", full_tag) + + if push then + print("-- Pushing image: %s", full_tag) + ret = os.execv("docker", {"push", full_tag}) + if ret > 0 then + raise("Docker push failed") + end + print("-- Pushed image: %s", full_tag) + end +end diff --git a/scripts/test.lua b/scripts/test.lua new file mode 100644 index 000000000..df1218ce8 --- /dev/null +++ b/scripts/test.lua @@ -0,0 +1,401 @@ +-- Copyright Epic Games, Inc. All Rights Reserved. + +function main() + import("core.base.option") + import("core.project.config") + import("core.project.project") + + config.load() + + -- Override table: target name -> short name (for targets that don't follow convention) + local short_name_overrides = { + ["zenserver-test"] = "integration", + } + + -- Build test list from targets in the "tests" group + local available_tests = {} + for name, target in pairs(project.targets()) do + if target:get("group") == "tests" and name:endswith("-test") then + local short = short_name_overrides[name] + if not short then + -- Derive short name: "zencore-test" -> "core" + short = name + if short:startswith("zen") then short = short:sub(4) end + if short:endswith("-test") then short = short:sub(1, -6) end + end + table.insert(available_tests, {short, name}) + end + end + + -- Add non-test-group entries that have a test subcommand + table.insert(available_tests, {"server", "zenserver"}) + + table.sort(available_tests, function(a, b) return a[1] < b[1] end) + + -- Handle --list: print discovered test names and exit + if option.get("list") then + printf("Available tests:\n") + for _, entry in ipairs(available_tests) do + printf(" %-16s -> %s\n", entry[1], entry[2]) + end + return + end + + local testname = option.get("run") + + -- Parse comma-separated test names into a set + local requested = {} + for token in testname:gmatch("[^,]+") do + requested[token:match("^%s*(.-)%s*$")] = true + end + + -- Filter to requested test(s) + local tests = {} + local matched = {} + + for _, entry in ipairs(available_tests) do + local name, target = entry[1], entry[2] + if requested["all"] or requested[name] then + table.insert(tests, {name = name, target = target}) + matched[name] = true + end + end + + -- Check for unknown test names + if not requested["all"] then + for name, _ in pairs(requested) do + if not matched[name] then + raise("no tests match specification: '%s'", name) + end + end + end + + if #tests == 0 then + raise("no tests match specification: '%s'", testname) + end + + local plat, arch + if is_host("windows") then + plat = "windows" + arch = "x64" + elseif is_host("macosx") then + plat = "macosx" + arch = is_arch("arm64") and "arm64" or "x86_64" + else + plat = "linux" + arch = "x86_64" + end + + -- Only reconfigure if current config doesn't already match + if config.get("mode") ~= "debug" or config.get("plat") ~= plat or config.get("arch") ~= arch then + local toolchain_flag = config.get("toolchain") and ("--toolchain=" .. config.get("toolchain")) or "" + local sdk_flag = config.get("sdk") and ("--sdk=" .. config.get("sdk")) or "" + os.exec("xmake config -y -c -m debug -p %s -a %s %s %s", plat, arch, toolchain_flag, sdk_flag) + end + + -- Build targets we're going to run + if requested["all"] then + os.exec("xmake build -y") + else + for _, entry in ipairs(tests) do + os.exec("xmake build -y %s", entry.target) + end + end + + local use_junit_reporting = option.get("junit") + local use_noskip = option.get("noskip") + local use_verbose = option.get("verbose") + local repeat_count = tonumber(option.get("repeat")) or 1 + local extra_args = option.get("arguments") or {} + local junit_report_files = {} + + local junit_report_dir + if use_junit_reporting then + junit_report_dir = path.join(os.projectdir(), config.get("buildir"), "reports") + os.mkdir(junit_report_dir) + end + + -- Results collection for summary table + local results = {} + local any_failed = false + + -- Format a number with thousands separators (e.g. 31103 -> "31,103") + local function format_number(n) + local s = tostring(n) + local pos = #s % 3 + if pos == 0 then pos = 3 end + local result = s:sub(1, pos) + for i = pos + 1, #s, 3 do + result = result .. "," .. s:sub(i, i + 2) + end + return result + end + + -- Center a string within a given width + local function center_str(s, width) + local pad = width - #s + local lpad = math.floor(pad / 2) + local rpad = pad - lpad + return string.rep(" ", lpad) .. s .. string.rep(" ", rpad) + end + + -- Left-align a string within a given width (with 1-space left margin) + local function left_align_str(s, width) + return " " .. s .. string.rep(" ", width - #s - 1) + end + + -- Right-align a string within a given width (with 1-space right margin) + local function right_align_str(s, width) + return string.rep(" ", width - #s - 1) .. s .. " " + end + + -- Format elapsed seconds as a human-readable string + local function format_time(seconds) + if seconds >= 60 then + local mins = math.floor(seconds / 60) + local secs = seconds - mins * 60 + return string.format("%dm %04.1fs", mins, secs) + else + return string.format("%.1fs", seconds) + end + end + + -- Parse test summary file written by TestListener + local function parse_summary_file(filepath) + if not os.isfile(filepath) then return nil end + local content = io.readfile(filepath) + if not content then return nil end + local ct = content:match("cases_total=(%d+)") + local cp = content:match("cases_passed=(%d+)") + local at = content:match("assertions_total=(%d+)") + local ap = content:match("assertions_passed=(%d+)") + if ct then + local failures = {} + for name, file, line in content:gmatch("failed=([^|\n]+)|([^|\n]+)|(%d+)") do + table.insert(failures, {name = name, file = file, line = tonumber(line)}) + end + local es = content:match("elapsed_seconds=([%d%.]+)") + return { + cases_total = tonumber(ct), + cases_passed = tonumber(cp) or 0, + asserts_total = tonumber(at) or 0, + asserts_passed = tonumber(ap) or 0, + elapsed_seconds = tonumber(es) or 0, + failures = failures + } + end + return nil + end + + -- Temp directory for summary files + local summary_dir = path.join(os.tmpdir(), "zen-test-summary") + os.mkdir(summary_dir) + + -- Run each test suite and collect results + for iteration = 1, repeat_count do + if repeat_count > 1 then + printf("\n*** Iteration %d/%d ***\n", iteration, repeat_count) + end + + for _, entry in ipairs(tests) do + local name, target = entry.name, entry.target + printf("=== %s ===\n", target) + + local suite_name = target + if name == "server" then + suite_name = "zenserver (test)" + end + + local cmd = string.format("xmake run %s", target) + if name == "server" then + cmd = string.format("xmake run %s test", target) + end + cmd = string.format("%s --duration=true", cmd) + + if use_junit_reporting then + local junit_report_file = path.join(junit_report_dir, string.format("junit-%s-%s-%s.xml", config.plat(), arch, target)) + junit_report_files[target] = junit_report_file + cmd = string.format("%s --reporters=junit --out=%s", cmd, junit_report_file) + end + if use_noskip then + cmd = string.format("%s --no-skip", cmd) + end + if use_verbose and name == "integration" then + cmd = string.format("%s --verbose", cmd) + end + for _, arg in ipairs(extra_args) do + cmd = string.format("%s %s", cmd, arg) + end + + -- Tell TestListener where to write the summary + local summary_file = path.join(summary_dir, target .. ".txt") + os.setenv("ZEN_TEST_SUMMARY_FILE", summary_file) + + -- Run test with real-time streaming output + local test_ok = true + try { + function() + os.exec(cmd) + end, + catch { + function(errors) + test_ok = false + end + } + } + + -- Read summary written by TestListener + local summary = parse_summary_file(summary_file) + os.tryrm(summary_file) + + if not test_ok then + any_failed = true + end + + table.insert(results, { + suite = suite_name, + cases_passed = summary and summary.cases_passed or 0, + cases_total = summary and summary.cases_total or 0, + asserts_passed = summary and summary.asserts_passed or 0, + asserts_total = summary and summary.asserts_total or 0, + elapsed_seconds = summary and summary.elapsed_seconds or 0, + failures = summary and summary.failures or {}, + passed = test_ok + }) + end + + if any_failed then + if repeat_count > 1 then + printf("\n*** Failure detected on iteration %d, stopping ***\n", iteration) + end + break + end + end + + -- Clean up + os.setenv("ZEN_TEST_SUMMARY_FILE", "") + os.tryrm(summary_dir) + + -- Print JUnit reports if requested + for test, junit_report_file in pairs(junit_report_files) do + printf("=== report - %s ===\n", test) + if os.isfile(junit_report_file) then + local data = io.readfile(junit_report_file) + if data then + print(data) + end + end + end + + -- Print summary table + if #results > 0 then + -- Calculate column widths based on content + local col_suite = #("Suite") + local col_cases = #("Cases") + local col_asserts = #("Assertions") + local col_time = #("Time") + local col_status = #("Status") + + -- Compute totals + local total_cases_passed = 0 + local total_cases_total = 0 + local total_asserts_passed = 0 + local total_asserts_total = 0 + local total_elapsed = 0 + + for _, r in ipairs(results) do + col_suite = math.max(col_suite, #r.suite) + local cases_str = format_number(r.cases_passed) .. "/" .. format_number(r.cases_total) + col_cases = math.max(col_cases, #cases_str) + local asserts_str = format_number(r.asserts_passed) .. "/" .. format_number(r.asserts_total) + col_asserts = math.max(col_asserts, #asserts_str) + col_time = math.max(col_time, #format_time(r.elapsed_seconds)) + local status_str = r.passed and "SUCCESS" or "FAILED" + col_status = math.max(col_status, #status_str) + + total_cases_passed = total_cases_passed + r.cases_passed + total_cases_total = total_cases_total + r.cases_total + total_asserts_passed = total_asserts_passed + r.asserts_passed + total_asserts_total = total_asserts_total + r.asserts_total + total_elapsed = total_elapsed + r.elapsed_seconds + end + + -- Account for totals row in column widths + col_suite = math.max(col_suite, #("Total")) + col_cases = math.max(col_cases, #(format_number(total_cases_passed) .. "/" .. format_number(total_cases_total))) + col_asserts = math.max(col_asserts, #(format_number(total_asserts_passed) .. "/" .. format_number(total_asserts_total))) + col_time = math.max(col_time, #format_time(total_elapsed)) + + -- Add padding (1 space each side) + col_suite = col_suite + 2 + col_cases = col_cases + 2 + col_asserts = col_asserts + 2 + col_time = col_time + 2 + col_status = col_status + 2 + + -- Build horizontal border segments + local h_suite = string.rep("-", col_suite) + local h_cases = string.rep("-", col_cases) + local h_asserts = string.rep("-", col_asserts) + local h_time = string.rep("-", col_time) + local h_status = string.rep("-", col_status) + + local top = "+" .. h_suite .. "+" .. h_cases .. "+" .. h_asserts .. "+" .. h_time .. "+" .. h_status .. "+" + local mid = "+" .. h_suite .. "+" .. h_cases .. "+" .. h_asserts .. "+" .. h_time .. "+" .. h_status .. "+" + local bottom = "+" .. h_suite .. "+" .. h_cases .. "+" .. h_asserts .. "+" .. h_time .. "+" .. h_status .. "+" + local vbar = "|" + + local header_msg = any_failed and "Some tests failed:" or "All tests passed:" + printf("\n* %s\n", header_msg) + printf(" %s\n", top) + printf(" %s%s%s%s%s%s%s%s%s%s%s\n", vbar, center_str("Suite", col_suite), vbar, center_str("Cases", col_cases), vbar, center_str("Assertions", col_asserts), vbar, center_str("Time", col_time), vbar, center_str("Status", col_status), vbar) + + for _, r in ipairs(results) do + printf(" %s\n", mid) + local cases_str = format_number(r.cases_passed) .. "/" .. format_number(r.cases_total) + local asserts_str = format_number(r.asserts_passed) .. "/" .. format_number(r.asserts_total) + local time_str = format_time(r.elapsed_seconds) + local status_str = r.passed and "SUCCESS" or "FAILED" + printf(" %s%s%s%s%s%s%s%s%s%s%s\n", vbar, left_align_str(r.suite, col_suite), vbar, right_align_str(cases_str, col_cases), vbar, right_align_str(asserts_str, col_asserts), vbar, right_align_str(time_str, col_time), vbar, right_align_str(status_str, col_status), vbar) + end + + -- Totals row + if #results > 1 then + local h_suite_eq = string.rep("=", col_suite) + local h_cases_eq = string.rep("=", col_cases) + local h_asserts_eq = string.rep("=", col_asserts) + local h_time_eq = string.rep("=", col_time) + local h_status_eq = string.rep("=", col_status) + local totals_sep = "+" .. h_suite_eq .. "+" .. h_cases_eq .. "+" .. h_asserts_eq .. "+" .. h_time_eq .. "+" .. h_status_eq .. "+" + printf(" %s\n", totals_sep) + + local total_cases_str = format_number(total_cases_passed) .. "/" .. format_number(total_cases_total) + local total_asserts_str = format_number(total_asserts_passed) .. "/" .. format_number(total_asserts_total) + local total_time_str = format_time(total_elapsed) + local total_status_str = any_failed and "FAILED" or "SUCCESS" + printf(" %s%s%s%s%s%s%s%s%s%s%s\n", vbar, left_align_str("Total", col_suite), vbar, right_align_str(total_cases_str, col_cases), vbar, right_align_str(total_asserts_str, col_asserts), vbar, right_align_str(total_time_str, col_time), vbar, right_align_str(total_status_str, col_status), vbar) + end + + printf(" %s\n", bottom) + end + + -- Print list of individual failing tests + if any_failed then + printf("\n Failures:\n") + for _, r in ipairs(results) do + if #r.failures > 0 then + printf(" -- %s --\n", r.suite) + for _, f in ipairs(r.failures) do + printf(" FAILED: %s (%s:%d)\n", f.name, f.file, f.line) + end + elseif not r.passed then + printf(" -- %s --\n", r.suite) + printf(" (test binary exited with error, no failure details available)\n") + end + end + end + + if any_failed then + raise("one or more test suites failed") + end +end diff --git a/scripts/test_scripts/block-clone-test-mac.sh b/scripts/test_scripts/block-clone-test-mac.sh new file mode 100755 index 000000000..a3d3ca4d3 --- /dev/null +++ b/scripts/test_scripts/block-clone-test-mac.sh @@ -0,0 +1,43 @@ +#!/usr/bin/env bash +# Test block-clone functionality on macOS (APFS). +# +# APFS is the default filesystem on modern Macs and natively supports +# clonefile(), so no special setup is needed — just run the tests. +# +# Usage: +# ./scripts/test_scripts/block-clone-test-mac.sh [path-to-zencore-test] +# +# If no path is given, defaults to build/macosx/<arch>/debug/zencore-test +# relative to the repository root. + +set -euo pipefail + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +REPO_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)" + +ARCH="$(uname -m)" +TEST_BINARY="${1:-$REPO_ROOT/build/macosx/$ARCH/debug/zencore-test}" + +if [ ! -x "$TEST_BINARY" ]; then + echo "error: test binary not found or not executable: $TEST_BINARY" >&2 + echo "hint: build with 'xmake config -m debug && xmake build zencore-test'" >&2 + exit 1 +fi + +# Verify we're on APFS +BINARY_DIR="$(dirname "$TEST_BINARY")" +FS_TYPE="$(diskutil info "$(df "$BINARY_DIR" | tail -1 | awk '{print $1}')" 2>/dev/null | grep "Type (Bundle)" | awk '{print $NF}' || true)" + +if [ "$FS_TYPE" != "apfs" ]; then + echo "warning: filesystem does not appear to be APFS (got: ${FS_TYPE:-unknown}), clone tests may skip" >&2 +fi + +TEST_CASES="TryCloneFile,CopyFile.Clone,SupportsBlockRefCounting,CloneQueryInterface" + +echo "Running block-clone tests ..." +echo "---" +"$TEST_BINARY" \ + --test-suite="core.filesystem" \ + --test-case="$TEST_CASES" +echo "---" +echo "All block-clone tests passed." diff --git a/scripts/test_scripts/block-clone-test-windows.ps1 b/scripts/test_scripts/block-clone-test-windows.ps1 new file mode 100644 index 000000000..df24831a4 --- /dev/null +++ b/scripts/test_scripts/block-clone-test-windows.ps1 @@ -0,0 +1,145 @@ +# Test block-clone functionality on a temporary ReFS VHD. +# +# Requires: +# - Administrator privileges +# - Windows Server, or Windows 10/11 Pro for Workstations (ReFS support) +# - Hyper-V PowerShell module (for New-VHD), or diskpart fallback +# +# Usage: +# # From an elevated PowerShell prompt: +# .\scripts\test_scripts\block-clone-test-windows.ps1 [-TestBinary <path>] +# +# If -TestBinary is not given, defaults to build\windows\x64\debug\zencore-test.exe +# relative to the repository root. + +param( + [string]$TestBinary = "" +) + +$ErrorActionPreference = "Stop" + +$ScriptDir = Split-Path -Parent $MyInvocation.MyCommand.Definition +$RepoRoot = (Resolve-Path "$ScriptDir\..\..").Path + +if (-not $TestBinary) { + $TestBinary = Join-Path $RepoRoot "build\windows\x64\debug\zencore-test.exe" +} + +$ImageSizeMB = 2048 +$TestCases = "TryCloneFile,CopyFile.Clone,SupportsBlockRefCounting,CloneQueryInterface" + +$VhdPath = "" +$MountLetter = "" + +function Cleanup { + $ErrorActionPreference = "SilentlyContinue" + + if ($MountLetter) { + Write-Host "Dismounting VHD ..." + Dismount-VHD -Path $VhdPath -ErrorAction SilentlyContinue + } + if ($VhdPath -and (Test-Path $VhdPath)) { + Remove-Item -Force $VhdPath -ErrorAction SilentlyContinue + } +} + +trap { + Cleanup + throw $_ +} + +# --- Preflight checks --- + +$IsAdmin = ([Security.Principal.WindowsPrincipal] [Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole( + [Security.Principal.WindowsBuiltInRole]::Administrator) +if (-not $IsAdmin) { + Write-Error "This script must be run as Administrator (for VHD mount/format)." + exit 1 +} + +if (-not (Test-Path $TestBinary)) { + Write-Error "Test binary not found: $TestBinary`nHint: build with 'xmake config -m debug && xmake build zencore-test'" + exit 1 +} + +# Check that ReFS formatting is available +$RefsAvailable = $true +try { + # A quick check: on non-Server/Workstation SKUs, Format-Volume -FileSystem ReFS will fail + $OsCaption = (Get-CimInstance Win32_OperatingSystem).Caption + if ($OsCaption -notmatch "Server|Workstation|Enterprise") { + Write-Warning "ReFS may not be available on this Windows edition: $OsCaption" + Write-Warning "Continuing anyway — format step will fail if unsupported." + } +} catch { + # Non-fatal, just proceed +} + +# --- Create and mount ReFS VHD --- + +$VhdPath = Join-Path $env:TEMP "refs-clone-test-$([guid]::NewGuid().ToString('N').Substring(0,8)).vhdx" + +Write-Host "Creating ${ImageSizeMB}MB VHDX at $VhdPath ..." + +try { + # Prefer Hyper-V cmdlet if available + New-VHD -Path $VhdPath -SizeBytes ($ImageSizeMB * 1MB) -Fixed | Out-Null +} catch { + # Fallback to diskpart + Write-Host "New-VHD not available, falling back to diskpart ..." + $DiskpartScript = @" +create vdisk file="$VhdPath" maximum=$ImageSizeMB type=fixed +"@ + $DiskpartScript | diskpart | Out-Null +} + +Write-Host "Mounting and initializing VHD ..." + +Mount-VHD -Path $VhdPath +$Disk = Get-VHD -Path $VhdPath | Get-Disk + +# Suppress Explorer's auto-open / "format disk?" prompts for the raw partition +Stop-Service ShellHWDetection -ErrorAction SilentlyContinue + +try { + Initialize-Disk -Number $Disk.Number -PartitionStyle GPT -ErrorAction SilentlyContinue + $Partition = New-Partition -DiskNumber $Disk.Number -UseMaximumSize -AssignDriveLetter + $MountLetter = $Partition.DriveLetter + + Write-Host "Formatting ${MountLetter}: as ReFS with integrity disabled ..." + Format-Volume -DriveLetter $MountLetter -FileSystem ReFS -NewFileSystemLabel "CloneTest" -Confirm:$false | Out-Null + + # Disable integrity streams (required for block cloning to work on ReFS) + Set-FileIntegrity "${MountLetter}:\" -Enable $false -ErrorAction SilentlyContinue +} finally { + Start-Service ShellHWDetection -ErrorAction SilentlyContinue +} + +$MountRoot = "${MountLetter}:\" + +# --- Copy test binary and run --- + +Write-Host "Copying test binary to ReFS volume ..." +Copy-Item $TestBinary "$MountRoot\zencore-test.exe" + +Write-Host "Running block-clone tests ..." +Write-Host "---" + +$proc = Start-Process -FilePath "$MountRoot\zencore-test.exe" ` + -ArgumentList "--test-suite=core.filesystem", "--test-case=$TestCases" ` + -NoNewWindow -Wait -PassThru + +Write-Host "---" + +if ($proc.ExitCode -ne 0) { + Write-Error "Tests failed with exit code $($proc.ExitCode)" + Cleanup + exit $proc.ExitCode +} + +Write-Host "ReFS: all block-clone tests passed." + +# --- Cleanup --- + +Cleanup +Write-Host "Done." diff --git a/scripts/test_scripts/block-clone-test.sh b/scripts/test_scripts/block-clone-test.sh new file mode 100755 index 000000000..7c6bf5605 --- /dev/null +++ b/scripts/test_scripts/block-clone-test.sh @@ -0,0 +1,143 @@ +#!/usr/bin/env bash +# Test block-clone functionality on temporary Btrfs and XFS loopback filesystems. +# +# Requires: root/sudo, btrfs-progs (mkfs.btrfs), xfsprogs (mkfs.xfs) +# +# Usage: +# sudo ./scripts/test_scripts/block-clone-test.sh [path-to-zencore-test] +# +# If no path is given, defaults to build/linux/x86_64/debug/zencore-test +# relative to the repository root. +# +# Options: +# --btrfs-only Only test Btrfs +# --xfs-only Only test XFS + +set -euo pipefail + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +REPO_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)" + +TEST_BINARY="" +RUN_BTRFS=true +RUN_XFS=true + +for arg in "$@"; do + case "$arg" in + --btrfs-only) RUN_XFS=false ;; + --xfs-only) RUN_BTRFS=false ;; + *) TEST_BINARY="$arg" ;; + esac +done + +TEST_BINARY="${TEST_BINARY:-$REPO_ROOT/build/linux/x86_64/debug/zencore-test}" +IMAGE_SIZE="512M" +TEST_CASES="TryCloneFile,CopyFile.Clone,SupportsBlockRefCounting,CloneQueryInterface" + +# Track all temp files for cleanup +CLEANUP_MOUNTS=() +CLEANUP_DIRS=() +CLEANUP_FILES=() + +cleanup() { + local exit_code=$? + set +e + + for mnt in "${CLEANUP_MOUNTS[@]}"; do + if mountpoint -q "$mnt" 2>/dev/null; then + umount "$mnt" + fi + done + for dir in "${CLEANUP_DIRS[@]}"; do + [ -d "$dir" ] && rmdir "$dir" + done + for f in "${CLEANUP_FILES[@]}"; do + [ -f "$f" ] && rm -f "$f" + done + + if [ $exit_code -ne 0 ]; then + echo "FAILED (exit code $exit_code)" + fi + exit $exit_code +} +trap cleanup EXIT + +# --- Preflight checks --- + +if [ "$(id -u)" -ne 0 ]; then + echo "error: this script must be run as root (for mount/umount)" >&2 + exit 1 +fi + +if [ ! -x "$TEST_BINARY" ]; then + echo "error: test binary not found or not executable: $TEST_BINARY" >&2 + echo "hint: build with 'xmake config -m debug && xmake build zencore-test'" >&2 + exit 1 +fi + +if $RUN_BTRFS && ! command -v mkfs.btrfs &>/dev/null; then + echo "warning: mkfs.btrfs not found — install btrfs-progs to test Btrfs, skipping" >&2 + RUN_BTRFS=false +fi + +if $RUN_XFS && ! command -v mkfs.xfs &>/dev/null; then + echo "warning: mkfs.xfs not found — install xfsprogs to test XFS, skipping" >&2 + RUN_XFS=false +fi + +if ! $RUN_BTRFS && ! $RUN_XFS; then + echo "error: no filesystems to test" >&2 + exit 1 +fi + +# --- Helper to create, mount, and run tests on a loopback filesystem --- + +run_tests_on_fs() { + local fs_type="$1" + local mkfs_cmd="$2" + + echo "" + echo "========================================" + echo " Testing block-clone on $fs_type" + echo "========================================" + + local image_path mount_path + image_path="$(mktemp "/tmp/${fs_type}-clone-test-XXXXXX.img")" + mount_path="$(mktemp -d "/tmp/${fs_type}-clone-mount-XXXXXX")" + CLEANUP_FILES+=("$image_path") + CLEANUP_DIRS+=("$mount_path") + CLEANUP_MOUNTS+=("$mount_path") + + echo "Creating ${IMAGE_SIZE} ${fs_type} image at ${image_path} ..." + truncate -s "$IMAGE_SIZE" "$image_path" + $mkfs_cmd "$image_path" + + echo "Mounting at ${mount_path} ..." + mount -o loop "$image_path" "$mount_path" + chmod 777 "$mount_path" + + echo "Copying test binary ..." + cp "$TEST_BINARY" "$mount_path/zencore-test" + chmod +x "$mount_path/zencore-test" + + echo "Running tests ..." + echo "---" + "$mount_path/zencore-test" \ + --test-suite="core.filesystem" \ + --test-case="$TEST_CASES" + echo "---" + echo "$fs_type: all block-clone tests passed." +} + +# --- Run --- + +if $RUN_BTRFS; then + run_tests_on_fs "btrfs" "mkfs.btrfs -q" +fi + +if $RUN_XFS; then + run_tests_on_fs "xfs" "mkfs.xfs -q -m reflink=1" +fi + +echo "" +echo "All block-clone tests passed." diff --git a/scripts/test_scripts/builds-download-upload-test.py b/scripts/test_scripts/builds-download-upload-test.py index e4fee7cb8..8ff5245c1 100644 --- a/scripts/test_scripts/builds-download-upload-test.py +++ b/scripts/test_scripts/builds-download-upload-test.py @@ -4,6 +4,8 @@ from __future__ import annotations import argparse +import json +import os import platform import subprocess import sys @@ -15,22 +17,51 @@ _ARCH = "x64" if sys.platform == "win32" else platform.machine().lower() _EXE_SUFFIX = ".exe" if sys.platform == "win32" else "" +def _cache_dir() -> Path: + if sys.platform == "win32": + base = Path(os.environ.get("LOCALAPPDATA", Path.home() / "AppData" / "Local")) + return base / "Temp" / "zen" + elif sys.platform == "darwin": + return Path.home() / "Library" / "Caches" / "zen" + else: + base = Path(os.environ.get("XDG_CACHE_HOME", Path.home() / ".cache")) + return base / "zen" + + +_BUILD_IDS_PATH = _cache_dir() / "builds-download-upload-build-ids.json" + + class Build(NamedTuple): name: str bucket: str id: str -BUILDS = [ - Build("XB1Client", "fortnitegame.staged-build.fortnite-main.xb1-client", "09a7616c1a388dfe6056aa57"), - Build("WindowsClient", "fortnitegame.staged-build.fortnite-main.windows-client", "09a762c81e2cf213142d0ce5"), - Build("SwitchClient", "fortnitegame.staged-build.fortnite-main.switch-client", "09a75bf9c3ce75bce09f644f"), - Build("LinuxServer", "fortnitegame.staged-build.fortnite-main.linux-server", "09a750ac155eb3e3b62e87e0"), - Build("Switch2Client", "fortnitegame.staged-build.fortnite-main.switch2-client", "09a78f3df07b289691ec5710"), - Build("PS4Client", "fortnitegame.staged-build.fortnite-main.ps4-client", "09a76ea92ad301d4724fafad"), - Build("IOSClient", "fortnitegame.staged-build.fortnite-main.ios-client", "09a7816fa26c23362fef0c5d"), - Build("AndroidClient", "fortnitegame.staged-build.fortnite-main.android-client", "09a76725f1620d62c6be06e4"), -] +def load_builds() -> tuple[str, list[Build]]: + if not _BUILD_IDS_PATH.exists(): + print(f"Build IDs file not found: {_BUILD_IDS_PATH}") + answer = input("Run builds-download-upload-update-build-ids.py now to populate it? [y/N] ").strip().lower() + if answer == "y": + update_script = Path(__file__).parent / "builds-download-upload-update-build-ids.py" + subprocess.run([sys.executable, str(update_script)], check=True) + else: + sys.exit("Aborted. Run scripts/test_scripts/builds-download-upload-update-build-ids.py to populate it.") + with _BUILD_IDS_PATH.open() as f: + data: dict = json.load(f) + namespace = data.get("namespace", "") + if not namespace: + sys.exit(f"error: {_BUILD_IDS_PATH} is missing 'namespace'") + builds = [] + for name, entry in data.get("builds", {}).items(): + bucket = entry.get("bucket", "") + build_id = entry.get("buildId", "") + if not bucket or not build_id: + sys.exit(f"error: entry '{name}' in {_BUILD_IDS_PATH} is missing 'bucket' or 'buildId'") + builds.append(Build(name, bucket, build_id)) + if not builds: + sys.exit(f"error: {_BUILD_IDS_PATH} contains no builds") + return namespace, builds + ZEN_EXE: Path = Path(f"./build/{_PLATFORM}/{_ARCH}/release/zen{_EXE_SUFFIX}") ZEN_METADATA_DIR: Path = Path(__file__).resolve().parent / "metadatas" @@ -99,12 +130,12 @@ def wipe_or_create(label: str, path: Path, extra_zen_args: list[str] | None = No print() -def check_prerequisites() -> None: +def check_prerequisites(builds: list[Build]) -> None: if not ZEN_EXE.is_file(): sys.exit(f"error: zen executable not found: {ZEN_EXE}") if not ZEN_METADATA_DIR.is_dir(): sys.exit(f"error: metadata directory not found: {ZEN_METADATA_DIR}") - for build in BUILDS: + for build in builds: metadata = ZEN_METADATA_DIR / f"{build.name}.json" if not metadata.is_file(): sys.exit(f"error: metadata file not found: {metadata}") @@ -145,10 +176,10 @@ def main() -> None: ) parser.add_argument( "--data-path", - default=Path(Path(__file__).stem + "_datadir"), + default=None, type=Path, metavar="PATH", - help=f"root path for all data directories (default: {Path(__file__).stem}_datadir)", + help="root path for all data directories", ) parser.add_argument( "--zen-exe-path", @@ -162,17 +193,24 @@ def main() -> None: data_path = args.positional_path if data_path is None: data_path = args.data_path + if data_path is None: + print("WARNING: This script may require up to 1TB of free disk space.") + raw = input("Enter root path for all data directories: ").strip() + if not raw: + sys.exit("error: data path is required") + data_path = Path(raw) ZEN_EXE = args.zen_exe_positional if ZEN_EXE is None: ZEN_EXE = args.zen_exe_path + namespace, builds = load_builds() zen_system_dir = data_path / "system" zen_download_dir = data_path / "Download" zen_cache_data_dir = data_path / "ZenBuildsCache" zen_upload_dir = data_path / "Upload" zen_chunk_cache_path = data_path / "ChunkCache" - check_prerequisites() + check_prerequisites(builds) start_server("cache zenserver", zen_cache_data_dir, ZEN_CACHE_PORT, extra_zen_args=extra_zen_args, extra_server_args=["--buildstore-enabled"]) @@ -180,12 +218,12 @@ def main() -> None: wipe_or_create("download folder", zen_download_dir, extra_zen_args) wipe_or_create("system folder", zen_system_dir, extra_zen_args) - for build in BUILDS: + for build in builds: print(f"--------- importing {build.name} build") run(zen_cmd( "builds", "download", "--host", "https://jupiter.devtools.epicgames.com", - "--namespace", "fortnite.oplog", + "--namespace", namespace, "--bucket", build.bucket, "--build-id", build.id, "--local-path", zen_download_dir / build.name, @@ -199,7 +237,7 @@ def main() -> None: wipe_or_create("upload folder", zen_upload_dir, extra_zen_args) - for build in BUILDS: + for build in builds: print(f"--------- exporting {build.name} build") run(zen_cmd( "builds", "upload", diff --git a/scripts/test_scripts/builds-download-upload-update-build-ids.py b/scripts/test_scripts/builds-download-upload-update-build-ids.py new file mode 100644 index 000000000..2a63aa44d --- /dev/null +++ b/scripts/test_scripts/builds-download-upload-update-build-ids.py @@ -0,0 +1,150 @@ +#!/usr/bin/env python3 +"""Update builds-download-upload-build-ids.json with build IDs at the highest common changelist across all buckets.""" + +from __future__ import annotations + +import argparse +import json +import os +import platform +import subprocess +import sys +import tempfile +from pathlib import Path + +_PLATFORM = "windows" if sys.platform == "win32" else "macosx" if sys.platform == "darwin" else "linux" +_ARCH = "x64" if sys.platform == "win32" else platform.machine().lower() +_EXE_SUFFIX = ".exe" if sys.platform == "win32" else "" +_DEFAULT_ZEN = Path(f"build/{_PLATFORM}/{_ARCH}/release/zen{_EXE_SUFFIX}") + + +def _cache_dir() -> Path: + if sys.platform == "win32": + base = Path(os.environ.get("LOCALAPPDATA", Path.home() / "AppData" / "Local")) + return base / "Temp" / "zen" + elif sys.platform == "darwin": + return Path.home() / "Library" / "Caches" / "zen" + else: + base = Path(os.environ.get("XDG_CACHE_HOME", Path.home() / ".cache")) + return base / "zen" + + +_OUTPUT_PATH = _cache_dir() / "builds-download-upload-build-ids.json" + +# Maps build name -> Jupiter bucket +_BUILDS: list[tuple[str, str]] = [ + ("XB1Client", "fortnitegame.staged-build.fortnite-main.xb1-client"), + ("WindowsClient", "fortnitegame.staged-build.fortnite-main.windows-client"), + ("SwitchClient", "fortnitegame.staged-build.fortnite-main.switch-client"), + ("LinuxServer", "fortnitegame.staged-build.fortnite-main.linux-server"), + ("Switch2Client", "fortnitegame.staged-build.fortnite-main.switch2-client"), + ("PS4Client", "fortnitegame.staged-build.fortnite-main.ps4-client"), + ("PS5Client", "fortnitegame.staged-build.fortnite-main.ps5-client"), + ("IOSClient", "fortnitegame.staged-build.fortnite-main.ios-client"), + ("AndroidClient", "fortnitegame.staged-build.fortnite-main.android-client"), +] + + +def list_builds_for_bucket(zen: str, host: str, namespace: str, bucket: str) -> list[dict]: + """Run zen builds list for a single bucket and return the results array.""" + with tempfile.NamedTemporaryFile(suffix=".json", delete=False) as tmp: + result_path = Path(tmp.name) + + cmd = [ + zen, "builds", "list", + "--namespace", namespace, + "--bucket", bucket, + "--host", host, + "--result-path", str(result_path), + ] + + try: + subprocess.run(cmd, check=True, capture_output=True) + except FileNotFoundError: + sys.exit(f"error: zen binary not found: {zen}") + except subprocess.CalledProcessError as e: + sys.exit( + f"error: zen builds list failed for bucket '{bucket}' with exit code {e.returncode}\n" + f"stderr: {e.stderr.decode(errors='replace')}" + ) + + with result_path.open() as f: + data = json.load(f) + result_path.unlink(missing_ok=True) + + return data.get("results", []) + + +def main() -> None: + parser = argparse.ArgumentParser( + description="Refresh builds-download-upload-build-ids.json with build IDs at the highest changelist present in all buckets." + ) + parser.add_argument("--host", default="https://jupiter.devtools.epicgames.com", help="Jupiter host URL") + parser.add_argument("--zen", default=str(_DEFAULT_ZEN), help="Path to the zen binary") + parser.add_argument("--namespace", default="fortnite.oplog", help="Builds storage namespace") + args = parser.parse_args() + + # For each bucket, fetch results and build a changelist -> buildId map. + # bucket_cl_map[bucket] = { changelist_int: buildId_str, ... } + bucket_cl_map: dict[str, dict[int, str]] = {} + + for name, bucket in _BUILDS: + print(f"Querying {name} ({bucket}) ...") + results = list_builds_for_bucket(args.zen, args.host, args.namespace, bucket) + if not results: + sys.exit(f"error: no results for bucket '{bucket}' (build '{name}')") + + cl_map: dict[int, str] = {} + for entry in results: + build_id = entry.get("buildId", "") + metadata = entry.get("metadata") or {} + cl = metadata.get("commit") + if build_id and cl is not None: + # Keep first occurrence (most recent) per changelist + if cl not in cl_map: + cl_map[int(cl)] = build_id + + if not cl_map: + sys.exit( + f"error: bucket '{bucket}' (build '{name}') returned {len(results)} entries " + "but none had both buildId and changelist in metadata" + ) + + print(f" {len(cl_map)} distinct changelists, latest CL {max(cl_map)}") + bucket_cl_map[bucket] = cl_map + + # Find the highest changelist present in every bucket's result set. + common_cls = set(next(iter(bucket_cl_map.values())).keys()) + for bucket, cl_map in bucket_cl_map.items(): + common_cls &= set(cl_map.keys()) + + if not common_cls: + sys.exit( + "error: no changelist is present in all buckets.\n" + "Per-bucket CL ranges:\n" + + "\n".join( + f" {name} ({bucket}): {min(bucket_cl_map[bucket])} – {max(bucket_cl_map[bucket])}" + for name, bucket in _BUILDS + ) + ) + + best_cl = max(common_cls) + print(f"\nHighest common changelist: {best_cl}") + + build_ids: dict[str, dict[str, str]] = {} + for name, bucket in _BUILDS: + build_id = bucket_cl_map[bucket][best_cl] + build_ids[name] = {"bucket": bucket, "buildId": build_id} + print(f" {name}: {build_id}") + + output = {"namespace": args.namespace, "builds": build_ids} + _OUTPUT_PATH.parent.mkdir(parents=True, exist_ok=True) + with _OUTPUT_PATH.open("w") as f: + json.dump(output, f, indent=2) + f.write("\n") + + print(f"\nWrote {_OUTPUT_PATH}") + + +if __name__ == "__main__": + main() diff --git a/scripts/test_scripts/oplog-import-export-test.py b/scripts/test_scripts/oplog-import-export-test.py index b2a5ece6c..f913a7351 100644 --- a/scripts/test_scripts/oplog-import-export-test.py +++ b/scripts/test_scripts/oplog-import-export-test.py @@ -4,6 +4,8 @@ from __future__ import annotations import argparse +import json +import os import platform import subprocess import sys @@ -15,23 +17,51 @@ _ARCH = "x64" if sys.platform == "win32" else platform.machine().lower() _EXE_SUFFIX = ".exe" if sys.platform == "win32" else "" +def _cache_dir() -> Path: + if sys.platform == "win32": + base = Path(os.environ.get("LOCALAPPDATA", Path.home() / "AppData" / "Local")) + return base / "Temp" / "zen" + elif sys.platform == "darwin": + return Path.home() / "Library" / "Caches" / "zen" + else: + base = Path(os.environ.get("XDG_CACHE_HOME", Path.home() / ".cache")) + return base / "zen" + + +_BUILD_IDS_PATH = _cache_dir() / "oplog-import-export-build-ids.json" + + class Build(NamedTuple): name: str bucket: str id: str -BUILDS = [ - Build("XB1Client", "fortnitegame.oplog.fortnite-main.xb1client", "09a75f7f3b7517653dcdaaa4"), - Build("WindowsClient", "fortnitegame.oplog.fortnite-main.windowsclient", "09a75d977ef944ecfd0eddfd"), - Build("SwitchClient", "fortnitegame.oplog.fortnite-main.switchclient", "09a74d03b3598ec94cfd2644"), - Build("XSXClient", "fortnitegame.oplog.fortnite-main.xsxclient", "09a76c2bbd6cd78f4d40d9ea"), - Build("Switch2Client", "fortnitegame.oplog.fortnite-main.switch2client", "09a7686b3d9faa78fb24a38f"), - Build("PS4Client", "fortnitegame.oplog.fortnite-main.ps4client", "09a75b72d1c260ed26020140"), - Build("LinuxServer", "fortnitegame.oplog.fortnite-main.linuxserver", "09a747f5e0ee83a04be013e6"), - Build("IOSClient", "fortnitegame.oplog.fortnite-main.iosclient", "09a75f677e883325a209148c"), - Build("Android_ASTCClient", "fortnitegame.oplog.fortnite-main.android_astcclient", "09a7422c08c6f37becc7d37f"), -] +def load_builds() -> tuple[str, list[Build]]: + if not _BUILD_IDS_PATH.exists(): + print(f"Build IDs file not found: {_BUILD_IDS_PATH}") + answer = input("Run oplog-update-build-ids.py now to populate it? [y/N] ").strip().lower() + if answer == "y": + update_script = Path(__file__).parent / "oplog-update-build-ids.py" + subprocess.run([sys.executable, str(update_script)], check=True) + else: + sys.exit("Aborted. Run scripts/test_scripts/oplog-update-build-ids.py to populate it.") + with _BUILD_IDS_PATH.open() as f: + data: dict = json.load(f) + namespace = data.get("namespace", "") + if not namespace: + sys.exit(f"error: {_BUILD_IDS_PATH} is missing 'namespace'") + builds = [] + for name, entry in data.get("builds", {}).items(): + bucket = entry.get("bucket", "") + build_id = entry.get("buildId", "") + if not bucket or not build_id: + sys.exit(f"error: entry '{name}' in {_BUILD_IDS_PATH} is missing 'bucket' or 'buildId'") + builds.append(Build(name, bucket, build_id)) + if not builds: + sys.exit(f"error: {_BUILD_IDS_PATH} contains no builds") + return namespace, builds + ZEN_EXE: Path = Path(f"./build/{_PLATFORM}/{_ARCH}/release/zen{_EXE_SUFFIX}") @@ -50,6 +80,11 @@ SERVER_ARGS: tuple[str, ...] = ( ) +def zen_cmd(*args: str | Path, extra_zen_args: list[str] | None = None) -> list[str | Path]: + """Build a zen CLI command list, inserting extra_zen_args before subcommands.""" + return [ZEN_EXE, *(extra_zen_args or []), *args] + + def run(cmd: list[str | Path]) -> None: try: subprocess.run(cmd, check=True) @@ -59,31 +94,33 @@ def run(cmd: list[str | Path]) -> None: sys.exit(f"error: command failed with exit code {e.returncode}:\n {' '.join(str(x) for x in e.cmd)}") -def stop_server(label: str, port: int) -> None: +def stop_server(label: str, port: int, extra_zen_args: list[str] | None = None) -> None: """Stop a zen server. Tolerates failures so it is safe to call from finally blocks.""" print(f"--------- stopping {label}") try: - subprocess.run([ZEN_EXE, "down", "--port", str(port)]) + subprocess.run(zen_cmd("down", "--port", str(port), extra_zen_args=extra_zen_args)) except OSError as e: print(f"warning: could not stop {label}: {e}", file=sys.stderr) print() -def start_server(label: str, data_dir: Path, port: int, extra_args: list[str] | None = None) -> None: +def start_server(label: str, data_dir: Path, port: int, extra_zen_args: list[str] | None = None, + extra_server_args: list[str] | None = None) -> None: print(f"--------- starting {label} {data_dir}") - run([ - ZEN_EXE, "up", "--port", str(port), "--show-console", "--", + run(zen_cmd( + "up", "--port", str(port), "--show-console", "--", f"--data-dir={data_dir}", *SERVER_ARGS, - *(extra_args or []), - ]) + *(extra_server_args or []), + extra_zen_args=extra_zen_args, + )) print() -def wipe_or_create(label: str, path: Path) -> None: +def wipe_or_create(label: str, path: Path, extra_zen_args: list[str] | None = None) -> None: if path.exists(): print(f"--------- cleaning {label} {path}") - run([ZEN_EXE, "wipe", "-y", path]) + run(zen_cmd("wipe", "-y", path, extra_zen_args=extra_zen_args)) else: print(f"--------- creating {label} {path}") path.mkdir(parents=True, exist_ok=True) @@ -95,24 +132,39 @@ def check_prerequisites() -> None: sys.exit(f"error: zen executable not found: {ZEN_EXE}") -def setup_project(port: int) -> None: +def setup_project(port: int, extra_zen_args: list[str] | None = None) -> None: """Create the FortniteGame project on the server at the given port.""" print("--------- creating FortniteGame project") - run([ZEN_EXE, "project-create", f"--hosturl=127.0.0.1:{port}", "FortniteGame", "--force-update"]) + run(zen_cmd("project-create", f"--hosturl=127.0.0.1:{port}", "FortniteGame", "--force-update", + extra_zen_args=extra_zen_args)) print() -def setup_oplog(port: int, build_name: str) -> None: +def setup_oplog(port: int, build_name: str, extra_zen_args: list[str] | None = None) -> None: """Create the oplog in the FortniteGame project on the server at the given port.""" print(f"--------- creating {build_name} oplog") - run([ZEN_EXE, "oplog-create", f"--hosturl=127.0.0.1:{port}", "FortniteGame", build_name, "--force-update"]) + run(zen_cmd("oplog-create", f"--hosturl=127.0.0.1:{port}", "FortniteGame", build_name, "--force-update", + extra_zen_args=extra_zen_args)) print() def main() -> None: global ZEN_EXE - parser = argparse.ArgumentParser(description=__doc__) + # Split on '--' to separate script args from extra zen CLI args + script_argv: list[str] = [] + extra_zen_args: list[str] = [] + if "--" in sys.argv[1:]: + sep = sys.argv.index("--", 1) + script_argv = sys.argv[1:sep] + extra_zen_args = sys.argv[sep + 1:] + else: + script_argv = sys.argv[1:] + + parser = argparse.ArgumentParser( + description=__doc__, + epilog="Any arguments after '--' are forwarded to every zen CLI invocation.", + ) parser.add_argument( "positional_path", nargs="?", @@ -131,10 +183,10 @@ def main() -> None: ) parser.add_argument( "--data-path", - default=Path(Path(__file__).stem + "_datadir"), + default=None, type=Path, metavar="PATH", - help=f"root path for all data directories (default: {Path(__file__).stem}_datadir)", + help="root path for all data directories", ) parser.add_argument( "--zen-exe-path", @@ -143,15 +195,22 @@ def main() -> None: metavar="PATH", help=f"path to zen executable (default: {ZEN_EXE})", ) - args = parser.parse_args() + args = parser.parse_args(script_argv) data_path = args.positional_path if data_path is None: data_path = args.data_path + if data_path is None: + print("WARNING: This script may require up to 1TB of free disk space.") + raw = input("Enter root path for all data directories: ").strip() + if not raw: + sys.exit("error: data path is required") + data_path = Path(raw) ZEN_EXE = args.zen_exe_positional if ZEN_EXE is None: ZEN_EXE = args.zen_exe_path + namespace, builds = load_builds() zen_data_dir = data_path / "DDC" / "OplogsZen" zen_cache_data_dir = data_path / "DDC" / "ZenBuildsCache" zen_import_data_dir = data_path / "DDC" / "OplogsZenImport" @@ -159,75 +218,81 @@ def main() -> None: check_prerequisites() - start_server("cache zenserver", zen_cache_data_dir, ZEN_CACHE_PORT, ["--buildstore-enabled"]) + start_server("cache zenserver", zen_cache_data_dir, ZEN_CACHE_PORT, + extra_zen_args=extra_zen_args, extra_server_args=["--buildstore-enabled"]) try: - wipe_or_create("zenserver data", zen_data_dir) - start_server("zenserver", zen_data_dir, ZEN_PORT) + wipe_or_create("zenserver data", zen_data_dir, extra_zen_args) + start_server("zenserver", zen_data_dir, ZEN_PORT, extra_zen_args=extra_zen_args) try: - setup_project(ZEN_PORT) + setup_project(ZEN_PORT, extra_zen_args) - for build in BUILDS: - setup_oplog(ZEN_PORT, build.name) + for build in builds: + setup_oplog(ZEN_PORT, build.name, extra_zen_args) print(f"--------- importing {build.name} oplog") - run([ - ZEN_EXE, "oplog-import", + run(zen_cmd( + "oplog-import", f"--hosturl=127.0.0.1:{ZEN_PORT}", "FortniteGame", build.name, "--clean", "--builds", "https://jupiter.devtools.epicgames.com", - "--namespace", "fortnite.oplog", + "--namespace", namespace, "--bucket", build.bucket, "--builds-id", build.id, f"--zen-cache-host={ZEN_CACHE}", f"--zen-cache-upload={ZEN_CACHE_POPULATE}", f"--allow-partial-block-requests={ZEN_PARTIAL_REQUEST_MODE}", - ]) + extra_zen_args=extra_zen_args, + )) print() print(f"--------- validating {build.name} oplog") - run([ZEN_EXE, "oplog-validate", f"--hosturl=127.0.0.1:{ZEN_PORT}", "FortniteGame", build.name]) + run(zen_cmd("oplog-validate", f"--hosturl=127.0.0.1:{ZEN_PORT}", "FortniteGame", build.name, + extra_zen_args=extra_zen_args)) print() - wipe_or_create("export folder", export_dir) + wipe_or_create("export folder", export_dir, extra_zen_args) - for build in BUILDS: + for build in builds: print(f"--------- exporting {build.name} oplog") - run([ - ZEN_EXE, "oplog-export", + run(zen_cmd( + "oplog-export", f"--hosturl=127.0.0.1:{ZEN_PORT}", "FortniteGame", build.name, "--file", export_dir, "--forcetempblocks", - ]) + extra_zen_args=extra_zen_args, + )) print() finally: - stop_server("zenserver", ZEN_PORT) + stop_server("zenserver", ZEN_PORT, extra_zen_args) - wipe_or_create("alternate zenserver data", zen_import_data_dir) - start_server("import zenserver", zen_import_data_dir, ZEN_PORT) + wipe_or_create("alternate zenserver data", zen_import_data_dir, extra_zen_args) + start_server("import zenserver", zen_import_data_dir, ZEN_PORT, extra_zen_args=extra_zen_args) try: - setup_project(ZEN_PORT) + setup_project(ZEN_PORT, extra_zen_args) - for build in BUILDS: - setup_oplog(ZEN_PORT, build.name) + for build in builds: + setup_oplog(ZEN_PORT, build.name, extra_zen_args) print(f"--------- importing {build.name} oplog") - run([ - ZEN_EXE, "oplog-import", + run(zen_cmd( + "oplog-import", f"--hosturl=127.0.0.1:{ZEN_PORT}", "FortniteGame", build.name, "--file", export_dir, - ]) + extra_zen_args=extra_zen_args, + )) print() print(f"--------- validating {build.name} oplog") - run([ZEN_EXE, "oplog-validate", f"--hosturl=127.0.0.1:{ZEN_PORT}", "FortniteGame", build.name]) + run(zen_cmd("oplog-validate", f"--hosturl=127.0.0.1:{ZEN_PORT}", "FortniteGame", build.name, + extra_zen_args=extra_zen_args)) print() finally: - stop_server("alternative zenserver", ZEN_PORT) + stop_server("alternative zenserver", ZEN_PORT, extra_zen_args) finally: - stop_server("cache zenserver", ZEN_CACHE_PORT) + stop_server("cache zenserver", ZEN_CACHE_PORT, extra_zen_args) if __name__ == "__main__": diff --git a/scripts/test_scripts/oplog-update-build-ids.py b/scripts/test_scripts/oplog-update-build-ids.py new file mode 100644 index 000000000..67e128c8e --- /dev/null +++ b/scripts/test_scripts/oplog-update-build-ids.py @@ -0,0 +1,151 @@ +#!/usr/bin/env python3 +"""Update oplog-import-export-build-ids.json with build IDs at the highest common changelist across all buckets.""" + +from __future__ import annotations + +import argparse +import json +import os +import platform +import subprocess +import sys +import tempfile +from pathlib import Path + +_PLATFORM = "windows" if sys.platform == "win32" else "macosx" if sys.platform == "darwin" else "linux" +_ARCH = "x64" if sys.platform == "win32" else platform.machine().lower() +_EXE_SUFFIX = ".exe" if sys.platform == "win32" else "" +_DEFAULT_ZEN = Path(f"build/{_PLATFORM}/{_ARCH}/release/zen{_EXE_SUFFIX}") + + +def _cache_dir() -> Path: + if sys.platform == "win32": + base = Path(os.environ.get("LOCALAPPDATA", Path.home() / "AppData" / "Local")) + return base / "Temp" / "zen" + elif sys.platform == "darwin": + return Path.home() / "Library" / "Caches" / "zen" + else: + base = Path(os.environ.get("XDG_CACHE_HOME", Path.home() / ".cache")) + return base / "zen" + + +_OUTPUT_PATH = _cache_dir() / "oplog-import-export-build-ids.json" + +# Maps build name -> Jupiter bucket +_BUILDS: list[tuple[str, str]] = [ + ("XB1Client", "fortnitegame.oplog.fortnite-main.xb1client"), + ("WindowsClient", "fortnitegame.oplog.fortnite-main.windowsclient"), + ("SwitchClient", "fortnitegame.oplog.fortnite-main.switchclient"), + ("XSXClient", "fortnitegame.oplog.fortnite-main.xsxclient"), + ("Switch2Client", "fortnitegame.oplog.fortnite-main.switch2client"), + ("PS4Client", "fortnitegame.oplog.fortnite-main.ps4client"), + ("PS5Client", "fortnitegame.oplog.fortnite-main.ps5client"), + ("LinuxServer", "fortnitegame.oplog.fortnite-main.linuxserver"), + ("IOSClient", "fortnitegame.oplog.fortnite-main.iosclient"), + ("Android_ASTCClient", "fortnitegame.oplog.fortnite-main.android_astcclient"), +] + + +def list_builds_for_bucket(zen: str, host: str, namespace: str, bucket: str) -> list[dict]: + """Run zen builds list for a single bucket and return the results array.""" + with tempfile.NamedTemporaryFile(suffix=".json", delete=False) as tmp: + result_path = Path(tmp.name) + + cmd = [ + zen, "builds", "list", + "--namespace", namespace, + "--bucket", bucket, + "--host", host, + "--result-path", str(result_path), + ] + + try: + subprocess.run(cmd, check=True, capture_output=True) + except FileNotFoundError: + sys.exit(f"error: zen binary not found: {zen}") + except subprocess.CalledProcessError as e: + sys.exit( + f"error: zen builds list failed for bucket '{bucket}' with exit code {e.returncode}\n" + f"stderr: {e.stderr.decode(errors='replace')}" + ) + + with result_path.open() as f: + data = json.load(f) + result_path.unlink(missing_ok=True) + + return data.get("results", []) + + +def main() -> None: + parser = argparse.ArgumentParser( + description="Refresh oplog-import-export-build-ids.json with build IDs at the highest changelist present in all buckets." + ) + parser.add_argument("--host", default="https://jupiter.devtools.epicgames.com", help="Jupiter host URL") + parser.add_argument("--zen", default=str(_DEFAULT_ZEN), help="Path to the zen binary") + parser.add_argument("--namespace", default="fortnite.oplog", help="Builds storage namespace") + args = parser.parse_args() + + # For each bucket, fetch results and build a changelist -> buildId map. + # bucket_cl_map[bucket] = { changelist_int: buildId_str, ... } + bucket_cl_map: dict[str, dict[int, str]] = {} + + for name, bucket in _BUILDS: + print(f"Querying {name} ({bucket}) ...") + results = list_builds_for_bucket(args.zen, args.host, args.namespace, bucket) + if not results: + sys.exit(f"error: no results for bucket '{bucket}' (build '{name}')") + + cl_map: dict[int, str] = {} + for entry in results: + build_id = entry.get("buildId", "") + metadata = entry.get("metadata") or {} + cl = metadata.get("changelist") + if build_id and cl is not None: + # Keep first occurrence (most recent) per changelist + if cl not in cl_map: + cl_map[int(cl)] = build_id + + if not cl_map: + sys.exit( + f"error: bucket '{bucket}' (build '{name}') returned {len(results)} entries " + "but none had both buildId and changelist in metadata" + ) + + print(f" {len(cl_map)} distinct changelists, latest CL {max(cl_map)}") + bucket_cl_map[bucket] = cl_map + + # Find the highest changelist present in every bucket's result set. + common_cls = set(next(iter(bucket_cl_map.values())).keys()) + for bucket, cl_map in bucket_cl_map.items(): + common_cls &= set(cl_map.keys()) + + if not common_cls: + sys.exit( + "error: no changelist is present in all buckets.\n" + "Per-bucket CL ranges:\n" + + "\n".join( + f" {name} ({bucket}): {min(bucket_cl_map[bucket])} – {max(bucket_cl_map[bucket])}" + for name, bucket in _BUILDS + ) + ) + + best_cl = max(common_cls) + print(f"\nHighest common changelist: {best_cl}") + + build_ids: dict[str, dict[str, str]] = {} + for name, bucket in _BUILDS: + build_id = bucket_cl_map[bucket][best_cl] + build_ids[name] = {"bucket": bucket, "buildId": build_id} + print(f" {name}: {build_id}") + + output = {"namespace": args.namespace, "builds": build_ids} + _OUTPUT_PATH.parent.mkdir(parents=True, exist_ok=True) + with _OUTPUT_PATH.open("w") as f: + json.dump(output, f, indent=2) + f.write("\n") + + print(f"\nWrote {_OUTPUT_PATH}") + + +if __name__ == "__main__": + main() diff --git a/scripts/ue_build_linux/verify_linux_toolchains.sh b/scripts/ue_build_linux/verify_linux_toolchains.sh new file mode 100755 index 000000000..10fad8a82 --- /dev/null +++ b/scripts/ue_build_linux/verify_linux_toolchains.sh @@ -0,0 +1,121 @@ +#!/usr/bin/env bash +# +# Verify that the project builds on Linux with gcc, ue-clang, clang-19 and clang-20. +# Each toolchain gets a clean slate (build dirs + xmake caches wiped). +# +# Usage: +# ./scripts/verify_linux_toolchains.sh # build all four +# ./scripts/verify_linux_toolchains.sh gcc clang-19 # build only specific ones +# ./scripts/verify_linux_toolchains.sh --clean # also wipe ~/.xmake package cache +# +# Installing toolchains (Ubuntu 24.04): +# - gcc: sudo apt install build-essential +# - ue-clang: use scripts/ue_build_linux/get_ue_toolchain.sh +# - clang-19: sudo apt install clang-19 +# - clang-20: sudo apt install clang-20 + +set -euo pipefail + +SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)" +PROJECT_DIR="$(cd "$SCRIPT_DIR/../.." && pwd)" + +RED='\033[0;31m' +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +NC='\033[0m' + +PASSED=() +FAILED=() +declare -A TIMINGS +CLEAN_XMAKE_HOME=false + +clean_build_state() { + echo -e "${YELLOW}Cleaning build state...${NC}" + rm -rf "$PROJECT_DIR/.xmake" "$PROJECT_DIR/build" + if [ "$CLEAN_XMAKE_HOME" = true ]; then + rm -rf ~/.xmake + fi +} + +build_toolchain() { + local NAME="$1" + shift + local CONFIG_ARGS=("$@") + + echo "" + echo "============================================================" + echo -e "${YELLOW}Building with: ${NAME}${NC}" + echo " xmake config args: ${CONFIG_ARGS[*]}" + echo "============================================================" + + clean_build_state + + local START_TIME=$SECONDS + + if ! (cd "$PROJECT_DIR" && xmake config -y -m debug "${CONFIG_ARGS[@]}"); then + TIMINGS[$NAME]=$(( SECONDS - START_TIME )) + echo -e "${RED}FAILED: ${NAME} (config, ${TIMINGS[$NAME]}s)${NC}" + FAILED+=("$NAME") + return 1 + fi + + if ! (cd "$PROJECT_DIR" && xmake -y -j"$(nproc)"); then + TIMINGS[$NAME]=$(( SECONDS - START_TIME )) + echo -e "${RED}FAILED: ${NAME} (build, ${TIMINGS[$NAME]}s)${NC}" + FAILED+=("$NAME") + return 1 + fi + + TIMINGS[$NAME]=$(( SECONDS - START_TIME )) + echo -e "${GREEN}PASSED: ${NAME} (${TIMINGS[$NAME]}s)${NC}" + PASSED+=("$NAME") +} + +# Available toolchain configurations +declare -A TOOLCHAINS +TOOLCHAINS[gcc]="--toolchain=gcc" +TOOLCHAINS[ue-clang]="--toolchain=ue-clang" +TOOLCHAINS[clang-19]="--toolchain=clang-19" +TOOLCHAINS[clang-20]="--toolchain=clang-20" + +# Parse arguments +SELECTED=() +for ARG in "$@"; do + if [ "$ARG" = "--clean" ]; then + CLEAN_XMAKE_HOME=true + else + SELECTED+=("$ARG") + fi +done + +if [ ${#SELECTED[@]} -eq 0 ]; then + SELECTED=(gcc ue-clang clang-19 clang-20) +fi + +TOTAL_START=$SECONDS + +for TC in "${SELECTED[@]}"; do + if [ -z "${TOOLCHAINS[$TC]+x}" ]; then + echo -e "${RED}Unknown toolchain: ${TC}${NC}" + echo "Available: ${!TOOLCHAINS[*]}" + exit 1 + fi + + # shellcheck disable=SC2086 + build_toolchain "$TC" ${TOOLCHAINS[$TC]} || true +done + +TOTAL_ELAPSED=$(( SECONDS - TOTAL_START )) + +echo "" +echo "============================================================" +echo "Results (${TOTAL_ELAPSED}s total):" +echo "============================================================" +for TC in "${PASSED[@]}"; do + echo -e " ${GREEN}PASS${NC} ${TC} (${TIMINGS[$TC]}s)" +done +for TC in "${FAILED[@]}"; do + echo -e " ${RED}FAIL${NC} ${TC} (${TIMINGS[$TC]}s)" +done + +[ ${#FAILED[@]} -eq 0 ] diff --git a/scripts/updatefrontend.lua b/scripts/updatefrontend.lua deleted file mode 100644 index ab37819d7..000000000 --- a/scripts/updatefrontend.lua +++ /dev/null @@ -1,111 +0,0 @@ --- Copyright Epic Games, Inc. All Rights Reserved. - --------------------------------------------------------------------------------- -local function _exec(cmd, ...) - local args = {} - for _, arg in pairs({...}) do - if arg then - table.insert(args, arg) - end - end - - print("--", cmd, table.unpack(args)) - local ret = os.execv(cmd, args) - print() - return ret -end - --------------------------------------------------------------------------------- -local function _zip(store_only, zip_path, ...) - -- Here's the rules; if len(...) is 1 and it is a dir then create a zip with - -- archive paths like this; - -- - -- glob(foo/bar/**) -> foo/bar/abc, foo/bar/dir/123 -> zip(abc, dir/123) - -- - -- Otherwise assume ... is file paths and add without leading directories; - -- - -- foo/abc, bar/123 -> zip(abc, 123) - - zip_path = path.absolute(zip_path) - os.tryrm(zip_path) - - local inputs = {...} - - local source_dir = nil - if #inputs == 1 and os.isdir(inputs[1]) then - source_dir = inputs[1] - end - - import("detect.tools.find_7z") - local cmd_7z = find_7z() - if cmd_7z then - input_paths = {} - if source_dir then - -- Suffixing a directory path with a "/." will have 7z set the path - -- for archived files relative to that directory. - input_paths = { path.join(source_dir, ".") } - else - for _, input_path in pairs(inputs) do - -- If there is a "/./" anywhere in file paths then 7z drops all - -- directory information and just archives the file by name - input_path = path.relative(input_path, ".") - if input_path:sub(2,2) ~= ":" then - input_path = "./"..input_path - end - table.insert(input_paths, input_path) - end - end - - compression_level = "-mx1" - if store_only then - compression_level = "-mx0" - end - - local ret = _exec(cmd_7z, "a", compression_level, zip_path, table.unpack(input_paths)) - if ret > 0 then - raise("Received error from 7z") - end - return - end - - print("7z not found, falling back to zip") - - import("detect.tools.find_zip") - zip_cmd = find_zip() - if zip_cmd then - local input_paths = inputs - local cwd = os.curdir() - if source_dir then - os.cd(source_dir) - input_paths = { "." } - end - - compression_level = "-1" - if store_only then - compression_level = "-0" - end - - local strip_leading_path = nil - if not source_dir then - strip_leading_path = "--junk-paths" - end - - local ret = _exec(zip_cmd, "-r", compression_level, strip_leading_path, zip_path, table.unpack(input_paths)) - if ret > 0 then - raise("Received error from zip") - end - - os.cd(cwd) - return - end - print("zip not found") - - raise("Unable to find a suitable zip tool") -end - --------------------------------------------------------------------------------- -function main() - local zip_path = "src/zenserver/frontend/html.zip" - local content_dir = "src/zenserver/frontend/html/" - _zip(true, zip_path, content_dir) -end diff --git a/scripts/win_cross/get_win_sdk.sh b/scripts/win_cross/get_win_sdk.sh new file mode 100755 index 000000000..b22d1bf3a --- /dev/null +++ b/scripts/win_cross/get_win_sdk.sh @@ -0,0 +1,305 @@ +#!/bin/bash +# +# Downloads xwin and uses it to fetch the Windows SDK and MSVC CRT headers/libs +# needed for cross-compiling Windows binaries from Linux using clang-cl. +# +# Usage: +# ./get_win_sdk.sh [output_dir] +# +# Output defaults to ~/.xwin-sdk (override via $XWIN_SDK_DIR or first argument). + +set -euo pipefail + +SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) + +die() { echo "ERROR: $1" >&2; exit 1; } + +sdk_dir="${1:-${XWIN_SDK_DIR:-${HOME}/.xwin-sdk}}" + +if [[ "${sdk_dir}" == "--help" ]]; then + echo "usage: $(basename "${BASH_SOURCE[0]}") [output_dir]" + echo "" + echo "Downloads the Windows SDK and MSVC CRT via xwin for cross-compilation." + echo "Default output: ~/.xwin-sdk (override via \$XWIN_SDK_DIR or first argument)" + exit 0 +fi + +# If the directory already has SDK content, skip download +if [ -d "${sdk_dir}/sdk/include/um" ] && [ -d "${sdk_dir}/crt/include" ]; then + echo "SDK already present at '${sdk_dir}', skipping download." + echo "Delete the directory to force re-download." + # Still create the compat layout in case it's missing (e.g. script was updated) + CREATE_COMPAT_ONLY=true +else + CREATE_COMPAT_ONLY=false +fi + +if [ -e "${sdk_dir}" ]; then + # Allow re-use of existing empty or partial directory + if [ -d "${sdk_dir}" ]; then + : + else + die "'${sdk_dir}' exists but is not a directory" + fi +fi + +mkdir -p "${sdk_dir}" + +# ------------------------------------------------------------------------- +# Detect LLVM installation +# ------------------------------------------------------------------------- +LLVM_BIN="${LLVM_BIN_DIR:-}" +if [ -z "${LLVM_BIN}" ]; then + # Try common locations + for candidate in /usr/lib/llvm-19/bin /usr/lib/llvm-18/bin /usr/lib/llvm-17/bin; do + if [ -x "${candidate}/clang" ]; then + LLVM_BIN="${candidate}" + break + fi + done +fi +if [ -z "${LLVM_BIN}" ]; then + # Fallback: try to find clang on PATH + CLANG_PATH=$(command -v clang 2>/dev/null || true) + if [ -n "${CLANG_PATH}" ]; then + LLVM_BIN=$(dirname "$(readlink -f "${CLANG_PATH}")") + fi +fi +if [ -z "${LLVM_BIN}" ]; then + die "Could not find LLVM/clang installation. Set LLVM_BIN_DIR to the bin directory." +fi +echo "Using LLVM at: ${LLVM_BIN}" + +# ------------------------------------------------------------------------- +# Download xwin binary and fetch SDK (skip if already present) +# ------------------------------------------------------------------------- +if [ "${CREATE_COMPAT_ONLY}" = false ]; then + XWIN_VERSION="0.6.5" + XWIN_ARCHIVE="xwin-${XWIN_VERSION}-x86_64-unknown-linux-musl.tar.gz" + XWIN_URL="https://github.com/Jake-Shadle/xwin/releases/download/${XWIN_VERSION}/${XWIN_ARCHIVE}" + + TMPDIR=$(mktemp -d) + trap 'rm -rf "${TMPDIR}"' EXIT + + echo "Downloading xwin ${XWIN_VERSION}..." + if command -v wget &>/dev/null; then + wget -q --show-progress -O "${TMPDIR}/${XWIN_ARCHIVE}" "${XWIN_URL}" + elif command -v curl &>/dev/null; then + curl -fSL --progress-bar -o "${TMPDIR}/${XWIN_ARCHIVE}" "${XWIN_URL}" + else + die "Neither wget nor curl found" + fi + + echo "Extracting xwin..." + tar -xzf "${TMPDIR}/${XWIN_ARCHIVE}" -C "${TMPDIR}" + + XWIN_BIN="${TMPDIR}/xwin-${XWIN_VERSION}-x86_64-unknown-linux-musl/xwin" + if [ ! -x "${XWIN_BIN}" ]; then + die "xwin binary not found after extraction" + fi + + echo "Fetching Windows SDK and CRT (this may take a few minutes)..." + "${XWIN_BIN}" --accept-license splat --output "${sdk_dir}" +fi + +# ------------------------------------------------------------------------- +# Create tool wrapper scripts in bin/ +# ------------------------------------------------------------------------- +BIN_DIR="${sdk_dir}/bin" +mkdir -p "${BIN_DIR}" + +# clang-cl wrapper (since the host may not have a clang-cl symlink) +cat > "${BIN_DIR}/clang-cl" << WRAPPER +#!/bin/bash +exec "${LLVM_BIN}/clang" --driver-mode=cl -D_ALLOW_COMPILER_AND_STL_VERSION_MISMATCH "\$@" +WRAPPER +chmod +x "${BIN_DIR}/clang-cl" + +# clang wrapper for GNU assembly (.S files) +cat > "${BIN_DIR}/clang" << WRAPPER +#!/bin/bash +exec "${LLVM_BIN}/clang" "\$@" +WRAPPER +chmod +x "${BIN_DIR}/clang" + +# ------------------------------------------------------------------------- +# Create MSVC-compatible directory layout for xmake package builds. +# +# xmake's built-in msvc toolchain on Linux uses find_build_tools() which +# expects the following structure: +# <sdk>/VC/Tools/MSVC/<version>/include → CRT headers +# <sdk>/VC/Tools/MSVC/<version>/lib/<arch> → CRT libs +# <sdk>/Windows Kits/10/Include/<ver>/{ucrt,um,shared} → SDK headers +# <sdk>/Windows Kits/10/Lib/<ver>/{ucrt,um}/<arch> → SDK libs +# <sdk>/bin/<arch>/ → tool wrappers +# +# We create this layout using symlinks back to the xwin flat layout. +# ------------------------------------------------------------------------- +echo "Creating MSVC-compatible directory layout..." + +FAKE_VC_VER="14.0.0" +FAKE_SDK_VER="10.0.0.0" + +# --- VC Tools (CRT) --- +VC_DIR="${sdk_dir}/VC/Tools/MSVC/${FAKE_VC_VER}" +mkdir -p "${VC_DIR}" +ln -sfn "${sdk_dir}/crt/include" "${VC_DIR}/include" +mkdir -p "${VC_DIR}/lib" +ln -sfn "${sdk_dir}/crt/lib/x86_64" "${VC_DIR}/lib/x64" + +# --- Windows Kits (SDK headers) --- +WINSDK_INC="${sdk_dir}/Windows Kits/10/Include/${FAKE_SDK_VER}" +mkdir -p "${WINSDK_INC}" +ln -sfn "${sdk_dir}/sdk/include/ucrt" "${WINSDK_INC}/ucrt" +ln -sfn "${sdk_dir}/sdk/include/um" "${WINSDK_INC}/um" +ln -sfn "${sdk_dir}/sdk/include/shared" "${WINSDK_INC}/shared" + +# --- Windows Kits (SDK libs) --- +WINSDK_LIB="${sdk_dir}/Windows Kits/10/Lib/${FAKE_SDK_VER}" +mkdir -p "${WINSDK_LIB}/ucrt" "${WINSDK_LIB}/um" +ln -sfn "${sdk_dir}/sdk/lib/ucrt/x86_64" "${WINSDK_LIB}/ucrt/x64" +ln -sfn "${sdk_dir}/sdk/lib/um/x86_64" "${WINSDK_LIB}/um/x64" + +# --- Tool wrappers in bin/<arch>/ (for msvc toolchain PATH setup) --- +ARCH_BIN="${sdk_dir}/bin/x64" +mkdir -p "${ARCH_BIN}" + +# cl → clang-cl wrapper +cat > "${ARCH_BIN}/cl" << WRAPPER +#!/bin/bash +exec "${LLVM_BIN}/clang" --driver-mode=cl -D_ALLOW_COMPILER_AND_STL_VERSION_MISMATCH "\$@" +WRAPPER +chmod +x "${ARCH_BIN}/cl" +cp "${ARCH_BIN}/cl" "${ARCH_BIN}/cl.exe" + +# link → lld-link (with /lib mode redirecting to llvm-lib for archiver use) +# xmake sets ar=link.exe for non-LTO MSVC builds and may pass linker-only flags +# like /opt:ref to the archiver. We detect /lib mode, filter those flags, and +# redirect to llvm-lib. Also handles response files (@file) that xmake uses +# when the argument list is too long. +cat > "${ARCH_BIN}/link" << WRAPPER +#!/bin/bash +ALL_ARGS=() +for arg in "\$@"; do + if [[ "\$arg" == @* ]]; then + rspfile="\${arg#@}" + while IFS= read -r line; do + [[ -n "\$line" ]] && ALL_ARGS+=("\$line") + done < "\$rspfile" + else + ALL_ARGS+=("\$arg") + fi +done +LIB_MODE=false +HAS_OUT_LIB=false +HAS_OBJ_ONLY=true +ARGS=() +for arg in "\${ALL_ARGS[@]}"; do + lower="\${arg,,}" + case "\$lower" in + /lib|-lib) LIB_MODE=true ;; + /out:*.lib|-out:*.lib) HAS_OUT_LIB=true; ARGS+=("\$arg") ;; + /opt:*|-opt:*) ;; + /subsystem:*|-subsystem:*) HAS_OBJ_ONLY=false; ARGS+=("\$arg") ;; + *.exe) HAS_OBJ_ONLY=false; ARGS+=("\$arg") ;; + *) ARGS+=("\$arg") ;; + esac +done +if [ "\$LIB_MODE" = true ] || ([ "\$HAS_OUT_LIB" = true ] && [ "\$HAS_OBJ_ONLY" = true ]); then + LIB_ARGS=() + for arg in "\${ARGS[@]}"; do + case "\${arg,,}" in + -nodefaultlib:*|/nodefaultlib:*) ;; + *) LIB_ARGS+=("\$arg") ;; + esac + done + exec "${LLVM_BIN}/llvm-lib" "\${LIB_ARGS[@]}" +else + exec "${LLVM_BIN}/lld-link" "\$@" +fi +WRAPPER +chmod +x "${ARCH_BIN}/link" +cp "${ARCH_BIN}/link" "${ARCH_BIN}/link.exe" + +# lib → llvm-lib +cat > "${ARCH_BIN}/lib" << WRAPPER +#!/bin/bash +exec "${LLVM_BIN}/llvm-lib" "\$@" +WRAPPER +chmod +x "${ARCH_BIN}/lib" +cp "${ARCH_BIN}/lib" "${ARCH_BIN}/lib.exe" + +# rc → llvm-rc (with SDK include paths for winres.h etc.) +cat > "${ARCH_BIN}/rc" << WRAPPER +#!/bin/bash +exec "${LLVM_BIN}/llvm-rc" /I "${sdk_dir}/crt/include" /I "${sdk_dir}/sdk/include/ucrt" /I "${sdk_dir}/sdk/include/um" /I "${sdk_dir}/sdk/include/shared" "\$@" +WRAPPER +chmod +x "${ARCH_BIN}/rc" +cp "${ARCH_BIN}/rc" "${ARCH_BIN}/rc.exe" + +# ml64 → llvm-ml (MASM-compatible assembler) +cat > "${ARCH_BIN}/ml64" << WRAPPER +#!/bin/bash +exec "${LLVM_BIN}/llvm-ml" -m64 "\$@" +WRAPPER +chmod +x "${ARCH_BIN}/ml64" +cp "${ARCH_BIN}/ml64" "${ARCH_BIN}/ml64.exe" + +# clang-cl (for xmake's built-in clang-cl toolchain detection) +cat > "${ARCH_BIN}/clang-cl" << WRAPPER +#!/bin/bash +exec "${LLVM_BIN}/clang" --driver-mode=cl -D_ALLOW_COMPILER_AND_STL_VERSION_MISMATCH "\$@" +WRAPPER +chmod +x "${ARCH_BIN}/clang-cl" + +# llvm-ar (cmake's clang-cl driver may use llvm-ar as archiver name but with +# MSVC-style flags like /nologo /out: — redirect to llvm-lib which handles these) +cat > "${ARCH_BIN}/llvm-ar" << WRAPPER +#!/bin/bash +exec "${LLVM_BIN}/llvm-lib" "\$@" +WRAPPER +chmod +x "${ARCH_BIN}/llvm-ar" + +# lld-link (for LTO builds where clang-cl toolchain uses lld-link) +cat > "${ARCH_BIN}/lld-link" << WRAPPER +#!/bin/bash +exec "${LLVM_BIN}/lld-link" "\$@" +WRAPPER +chmod +x "${ARCH_BIN}/lld-link" + +# mt → llvm-mt (manifest tool) +cat > "${ARCH_BIN}/mt" << WRAPPER +#!/bin/bash +exec "${LLVM_BIN}/llvm-mt" "\$@" +WRAPPER +chmod +x "${ARCH_BIN}/mt" +cp "${ARCH_BIN}/mt" "${ARCH_BIN}/mt.exe" + +# ------------------------------------------------------------------------- +# Create debug CRT lib symlinks (cmake's try_compile uses Debug config +# by default, which links against msvcrtd.lib etc. -- these don't exist +# in xwin since it only ships release libs. Symlink to release versions +# so cmake compiler tests pass.) +# ------------------------------------------------------------------------- +CRT_LIB="${sdk_dir}/crt/lib/x86_64" +for lib in msvcrt MSVCRT vcruntime msvcprt libcmt LIBCMT libcpmt libcpmt1 libconcrt libconcrt1 libvcruntime; do + release="${CRT_LIB}/${lib}.lib" + debug="${CRT_LIB}/${lib}d.lib" + if [ -f "${release}" ] && [ ! -e "${debug}" ]; then + ln -sfn "${lib}.lib" "${debug}" + fi +done + +echo "" +echo "Windows SDK installed to: ${sdk_dir}" +echo " SDK headers: ${sdk_dir}/sdk/include/um" +echo " SDK libs: ${sdk_dir}/sdk/lib/um/x86_64" +echo " CRT headers: ${sdk_dir}/crt/include" +echo " CRT libs: ${sdk_dir}/crt/lib/x86_64" +echo " Tool wrappers: ${BIN_DIR}/" +echo " MSVC compat: ${sdk_dir}/VC/ and ${sdk_dir}/Windows Kits/" +echo "" +echo "Usage:" +echo " xmake config -p windows -a x64 --toolchain=clang-cl --sdk=\${sdk_dir}" +echo "" +echo "Done" |