aboutsummaryrefslogtreecommitdiff
path: root/scripts/test_scripts
diff options
context:
space:
mode:
Diffstat (limited to 'scripts/test_scripts')
-rw-r--r--scripts/test_scripts/builds-download-upload-test.py260
-rw-r--r--scripts/test_scripts/builds-download-upload-update-build-ids.py150
-rw-r--r--scripts/test_scripts/metadatas/AndroidClient.json9
-rw-r--r--scripts/test_scripts/metadatas/IOSClient.json9
-rw-r--r--scripts/test_scripts/metadatas/LinuxServer.json9
-rw-r--r--scripts/test_scripts/metadatas/PS4Client.json9
-rw-r--r--scripts/test_scripts/metadatas/Switch2Client.json9
-rw-r--r--scripts/test_scripts/metadatas/SwitchClient.json9
-rw-r--r--scripts/test_scripts/metadatas/WindowsClient.json9
-rw-r--r--scripts/test_scripts/metadatas/XB1Client.json9
-rw-r--r--scripts/test_scripts/oplog-import-export-test.py299
-rw-r--r--scripts/test_scripts/oplog-update-build-ids.py151
12 files changed, 932 insertions, 0 deletions
diff --git a/scripts/test_scripts/builds-download-upload-test.py b/scripts/test_scripts/builds-download-upload-test.py
new file mode 100644
index 000000000..8ff5245c1
--- /dev/null
+++ b/scripts/test_scripts/builds-download-upload-test.py
@@ -0,0 +1,260 @@
+#!/usr/bin/env python3
+"""Test script for builds download/upload operations."""
+
+from __future__ import annotations
+
+import argparse
+import json
+import os
+import platform
+import subprocess
+import sys
+from pathlib import Path
+from typing import NamedTuple
+
+_PLATFORM = "windows" if sys.platform == "win32" else "macosx" if sys.platform == "darwin" else "linux"
+_ARCH = "x64" if sys.platform == "win32" else platform.machine().lower()
+_EXE_SUFFIX = ".exe" if sys.platform == "win32" else ""
+
+
+def _cache_dir() -> Path:
+ if sys.platform == "win32":
+ base = Path(os.environ.get("LOCALAPPDATA", Path.home() / "AppData" / "Local"))
+ return base / "Temp" / "zen"
+ elif sys.platform == "darwin":
+ return Path.home() / "Library" / "Caches" / "zen"
+ else:
+ base = Path(os.environ.get("XDG_CACHE_HOME", Path.home() / ".cache"))
+ return base / "zen"
+
+
+_BUILD_IDS_PATH = _cache_dir() / "builds-download-upload-build-ids.json"
+
+
+class Build(NamedTuple):
+ name: str
+ bucket: str
+ id: str
+
+
+def load_builds() -> tuple[str, list[Build]]:
+ if not _BUILD_IDS_PATH.exists():
+ print(f"Build IDs file not found: {_BUILD_IDS_PATH}")
+ answer = input("Run builds-download-upload-update-build-ids.py now to populate it? [y/N] ").strip().lower()
+ if answer == "y":
+ update_script = Path(__file__).parent / "builds-download-upload-update-build-ids.py"
+ subprocess.run([sys.executable, str(update_script)], check=True)
+ else:
+ sys.exit("Aborted. Run scripts/test_scripts/builds-download-upload-update-build-ids.py to populate it.")
+ with _BUILD_IDS_PATH.open() as f:
+ data: dict = json.load(f)
+ namespace = data.get("namespace", "")
+ if not namespace:
+ sys.exit(f"error: {_BUILD_IDS_PATH} is missing 'namespace'")
+ builds = []
+ for name, entry in data.get("builds", {}).items():
+ bucket = entry.get("bucket", "")
+ build_id = entry.get("buildId", "")
+ if not bucket or not build_id:
+ sys.exit(f"error: entry '{name}' in {_BUILD_IDS_PATH} is missing 'bucket' or 'buildId'")
+ builds.append(Build(name, bucket, build_id))
+ if not builds:
+ sys.exit(f"error: {_BUILD_IDS_PATH} contains no builds")
+ return namespace, builds
+
+
+ZEN_EXE: Path = Path(f"./build/{_PLATFORM}/{_ARCH}/release/zen{_EXE_SUFFIX}")
+ZEN_METADATA_DIR: Path = Path(__file__).resolve().parent / "metadatas"
+
+ZEN_PORT = 8558
+ZEN_CACHE_PORT = 8559
+ZEN_CACHE = f"http://127.0.0.1:{ZEN_CACHE_PORT}"
+ZEN_PARTIAL_REQUEST_MODE = "true"
+
+SERVER_ARGS: tuple[str, ...] = (
+ "--http", "asio",
+ "--gc-cache-duration-seconds", "1209600",
+ "--gc-interval-seconds", "21600",
+ "--gc-low-diskspace-threshold", "2147483648",
+ "--cache-bucket-limit-overwrites",
+)
+
+
+def zen_cmd(*args: str | Path, extra_zen_args: list[str] | None = None) -> list[str | Path]:
+ """Build a zen CLI command list, inserting extra_zen_args before subcommands."""
+ return [ZEN_EXE, *(extra_zen_args or []), *args]
+
+
+def run(cmd: list[str | Path]) -> None:
+ print(f" > {' '.join(str(x) for x in cmd)}")
+ try:
+ subprocess.run(cmd, check=True)
+ except FileNotFoundError:
+ sys.exit(f"error: executable not found: {cmd[0]}")
+ except subprocess.CalledProcessError as e:
+ sys.exit(f"error: command failed with exit code {e.returncode}:\n {' '.join(str(x) for x in e.cmd)}")
+
+
+def stop_server(label: str, port: int, extra_zen_args: list[str] | None = None) -> None:
+ """Stop a zen server. Tolerates failures so it is safe to call from finally blocks."""
+ print(f"--------- stopping {label}")
+ cmd = zen_cmd("down", "--port", str(port), extra_zen_args=extra_zen_args)
+ print(f" > {' '.join(str(x) for x in cmd)}")
+ try:
+ subprocess.run(cmd)
+ except OSError as e:
+ print(f"warning: could not stop {label}: {e}", file=sys.stderr)
+ print()
+
+
+def start_server(label: str, data_dir: Path, port: int, extra_zen_args: list[str] | None = None,
+ extra_server_args: list[str] | None = None) -> None:
+ print(f"--------- starting {label} {data_dir}")
+ run(zen_cmd(
+ "up", "--port", str(port), "--show-console", "--",
+ f"--data-dir={data_dir}",
+ *SERVER_ARGS,
+ *(extra_server_args or []),
+ extra_zen_args=extra_zen_args,
+ ))
+ print()
+
+
+def wipe_or_create(label: str, path: Path, extra_zen_args: list[str] | None = None) -> None:
+ if path.exists():
+ print(f"--------- cleaning {label} {path}")
+ run(zen_cmd("wipe", "-y", path, extra_zen_args=extra_zen_args))
+ else:
+ print(f"--------- creating {label} {path}")
+ path.mkdir(parents=True, exist_ok=True)
+ print()
+
+
+def check_prerequisites(builds: list[Build]) -> None:
+ if not ZEN_EXE.is_file():
+ sys.exit(f"error: zen executable not found: {ZEN_EXE}")
+ if not ZEN_METADATA_DIR.is_dir():
+ sys.exit(f"error: metadata directory not found: {ZEN_METADATA_DIR}")
+ for build in builds:
+ metadata = ZEN_METADATA_DIR / f"{build.name}.json"
+ if not metadata.is_file():
+ sys.exit(f"error: metadata file not found: {metadata}")
+
+
+def main() -> None:
+ global ZEN_EXE
+
+ # Split on '--' to separate script args from extra zen CLI args
+ script_argv: list[str] = []
+ extra_zen_args: list[str] = []
+ if "--" in sys.argv[1:]:
+ sep = sys.argv.index("--", 1)
+ script_argv = sys.argv[1:sep]
+ extra_zen_args = sys.argv[sep + 1:]
+ else:
+ script_argv = sys.argv[1:]
+
+ parser = argparse.ArgumentParser(
+ description=__doc__,
+ epilog="Any arguments after '--' are forwarded to every zen CLI invocation.",
+ )
+ parser.add_argument(
+ "positional_path",
+ nargs="?",
+ default=None,
+ type=Path,
+ metavar="DATA_PATH",
+ help="root path for all data directories (positional shorthand for --data-path)",
+ )
+ parser.add_argument(
+ "zen_exe_positional",
+ nargs="?",
+ default=None,
+ type=Path,
+ metavar="ZEN_EXE_PATH",
+ help="path to zen executable (positional shorthand for --zen-exe-path)",
+ )
+ parser.add_argument(
+ "--data-path",
+ default=None,
+ type=Path,
+ metavar="PATH",
+ help="root path for all data directories",
+ )
+ parser.add_argument(
+ "--zen-exe-path",
+ default=ZEN_EXE,
+ type=Path,
+ metavar="PATH",
+ help=f"path to zen executable (default: {ZEN_EXE})",
+ )
+ args = parser.parse_args(script_argv)
+
+ data_path = args.positional_path
+ if data_path is None:
+ data_path = args.data_path
+ if data_path is None:
+ print("WARNING: This script may require up to 1TB of free disk space.")
+ raw = input("Enter root path for all data directories: ").strip()
+ if not raw:
+ sys.exit("error: data path is required")
+ data_path = Path(raw)
+
+ ZEN_EXE = args.zen_exe_positional
+ if ZEN_EXE is None:
+ ZEN_EXE = args.zen_exe_path
+ namespace, builds = load_builds()
+ zen_system_dir = data_path / "system"
+ zen_download_dir = data_path / "Download"
+ zen_cache_data_dir = data_path / "ZenBuildsCache"
+ zen_upload_dir = data_path / "Upload"
+ zen_chunk_cache_path = data_path / "ChunkCache"
+
+ check_prerequisites(builds)
+
+ start_server("cache zenserver", zen_cache_data_dir, ZEN_CACHE_PORT,
+ extra_zen_args=extra_zen_args, extra_server_args=["--buildstore-enabled"])
+ try:
+ wipe_or_create("download folder", zen_download_dir, extra_zen_args)
+ wipe_or_create("system folder", zen_system_dir, extra_zen_args)
+
+ for build in builds:
+ print(f"--------- importing {build.name} build")
+ run(zen_cmd(
+ "builds", "download",
+ "--host", "https://jupiter.devtools.epicgames.com",
+ "--namespace", namespace,
+ "--bucket", build.bucket,
+ "--build-id", build.id,
+ "--local-path", zen_download_dir / build.name,
+ f"--zen-cache-host={ZEN_CACHE}",
+ f"--allow-partial-block-requests={ZEN_PARTIAL_REQUEST_MODE}",
+ "--verify",
+ "--system-dir", zen_system_dir,
+ extra_zen_args=extra_zen_args,
+ ))
+ print()
+
+ wipe_or_create("upload folder", zen_upload_dir, extra_zen_args)
+
+ for build in builds:
+ print(f"--------- exporting {build.name} build")
+ run(zen_cmd(
+ "builds", "upload",
+ "--storage-path", zen_upload_dir,
+ "--build-id", build.id,
+ "--local-path", zen_download_dir / build.name,
+ "--verify",
+ "--system-dir", zen_system_dir,
+ "--metadata-path", str(ZEN_METADATA_DIR / f"{build.name}.json"),
+ "--create-build",
+ "--chunking-cache-path", zen_chunk_cache_path,
+ extra_zen_args=extra_zen_args,
+ ))
+ print()
+ finally:
+ stop_server("cache zenserver", ZEN_CACHE_PORT, extra_zen_args)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/scripts/test_scripts/builds-download-upload-update-build-ids.py b/scripts/test_scripts/builds-download-upload-update-build-ids.py
new file mode 100644
index 000000000..2a63aa44d
--- /dev/null
+++ b/scripts/test_scripts/builds-download-upload-update-build-ids.py
@@ -0,0 +1,150 @@
+#!/usr/bin/env python3
+"""Update builds-download-upload-build-ids.json with build IDs at the highest common changelist across all buckets."""
+
+from __future__ import annotations
+
+import argparse
+import json
+import os
+import platform
+import subprocess
+import sys
+import tempfile
+from pathlib import Path
+
+_PLATFORM = "windows" if sys.platform == "win32" else "macosx" if sys.platform == "darwin" else "linux"
+_ARCH = "x64" if sys.platform == "win32" else platform.machine().lower()
+_EXE_SUFFIX = ".exe" if sys.platform == "win32" else ""
+_DEFAULT_ZEN = Path(f"build/{_PLATFORM}/{_ARCH}/release/zen{_EXE_SUFFIX}")
+
+
+def _cache_dir() -> Path:
+ if sys.platform == "win32":
+ base = Path(os.environ.get("LOCALAPPDATA", Path.home() / "AppData" / "Local"))
+ return base / "Temp" / "zen"
+ elif sys.platform == "darwin":
+ return Path.home() / "Library" / "Caches" / "zen"
+ else:
+ base = Path(os.environ.get("XDG_CACHE_HOME", Path.home() / ".cache"))
+ return base / "zen"
+
+
+_OUTPUT_PATH = _cache_dir() / "builds-download-upload-build-ids.json"
+
+# Maps build name -> Jupiter bucket
+_BUILDS: list[tuple[str, str]] = [
+ ("XB1Client", "fortnitegame.staged-build.fortnite-main.xb1-client"),
+ ("WindowsClient", "fortnitegame.staged-build.fortnite-main.windows-client"),
+ ("SwitchClient", "fortnitegame.staged-build.fortnite-main.switch-client"),
+ ("LinuxServer", "fortnitegame.staged-build.fortnite-main.linux-server"),
+ ("Switch2Client", "fortnitegame.staged-build.fortnite-main.switch2-client"),
+ ("PS4Client", "fortnitegame.staged-build.fortnite-main.ps4-client"),
+ ("PS5Client", "fortnitegame.staged-build.fortnite-main.ps5-client"),
+ ("IOSClient", "fortnitegame.staged-build.fortnite-main.ios-client"),
+ ("AndroidClient", "fortnitegame.staged-build.fortnite-main.android-client"),
+]
+
+
+def list_builds_for_bucket(zen: str, host: str, namespace: str, bucket: str) -> list[dict]:
+ """Run zen builds list for a single bucket and return the results array."""
+ with tempfile.NamedTemporaryFile(suffix=".json", delete=False) as tmp:
+ result_path = Path(tmp.name)
+
+ cmd = [
+ zen, "builds", "list",
+ "--namespace", namespace,
+ "--bucket", bucket,
+ "--host", host,
+ "--result-path", str(result_path),
+ ]
+
+ try:
+ subprocess.run(cmd, check=True, capture_output=True)
+ except FileNotFoundError:
+ sys.exit(f"error: zen binary not found: {zen}")
+ except subprocess.CalledProcessError as e:
+ sys.exit(
+ f"error: zen builds list failed for bucket '{bucket}' with exit code {e.returncode}\n"
+ f"stderr: {e.stderr.decode(errors='replace')}"
+ )
+
+ with result_path.open() as f:
+ data = json.load(f)
+ result_path.unlink(missing_ok=True)
+
+ return data.get("results", [])
+
+
+def main() -> None:
+ parser = argparse.ArgumentParser(
+ description="Refresh builds-download-upload-build-ids.json with build IDs at the highest changelist present in all buckets."
+ )
+ parser.add_argument("--host", default="https://jupiter.devtools.epicgames.com", help="Jupiter host URL")
+ parser.add_argument("--zen", default=str(_DEFAULT_ZEN), help="Path to the zen binary")
+ parser.add_argument("--namespace", default="fortnite.oplog", help="Builds storage namespace")
+ args = parser.parse_args()
+
+ # For each bucket, fetch results and build a changelist -> buildId map.
+ # bucket_cl_map[bucket] = { changelist_int: buildId_str, ... }
+ bucket_cl_map: dict[str, dict[int, str]] = {}
+
+ for name, bucket in _BUILDS:
+ print(f"Querying {name} ({bucket}) ...")
+ results = list_builds_for_bucket(args.zen, args.host, args.namespace, bucket)
+ if not results:
+ sys.exit(f"error: no results for bucket '{bucket}' (build '{name}')")
+
+ cl_map: dict[int, str] = {}
+ for entry in results:
+ build_id = entry.get("buildId", "")
+ metadata = entry.get("metadata") or {}
+ cl = metadata.get("commit")
+ if build_id and cl is not None:
+ # Keep first occurrence (most recent) per changelist
+ if cl not in cl_map:
+ cl_map[int(cl)] = build_id
+
+ if not cl_map:
+ sys.exit(
+ f"error: bucket '{bucket}' (build '{name}') returned {len(results)} entries "
+ "but none had both buildId and changelist in metadata"
+ )
+
+ print(f" {len(cl_map)} distinct changelists, latest CL {max(cl_map)}")
+ bucket_cl_map[bucket] = cl_map
+
+ # Find the highest changelist present in every bucket's result set.
+ common_cls = set(next(iter(bucket_cl_map.values())).keys())
+ for bucket, cl_map in bucket_cl_map.items():
+ common_cls &= set(cl_map.keys())
+
+ if not common_cls:
+ sys.exit(
+ "error: no changelist is present in all buckets.\n"
+ "Per-bucket CL ranges:\n"
+ + "\n".join(
+ f" {name} ({bucket}): {min(bucket_cl_map[bucket])} – {max(bucket_cl_map[bucket])}"
+ for name, bucket in _BUILDS
+ )
+ )
+
+ best_cl = max(common_cls)
+ print(f"\nHighest common changelist: {best_cl}")
+
+ build_ids: dict[str, dict[str, str]] = {}
+ for name, bucket in _BUILDS:
+ build_id = bucket_cl_map[bucket][best_cl]
+ build_ids[name] = {"bucket": bucket, "buildId": build_id}
+ print(f" {name}: {build_id}")
+
+ output = {"namespace": args.namespace, "builds": build_ids}
+ _OUTPUT_PATH.parent.mkdir(parents=True, exist_ok=True)
+ with _OUTPUT_PATH.open("w") as f:
+ json.dump(output, f, indent=2)
+ f.write("\n")
+
+ print(f"\nWrote {_OUTPUT_PATH}")
+
+
+if __name__ == "__main__":
+ main()
diff --git a/scripts/test_scripts/metadatas/AndroidClient.json b/scripts/test_scripts/metadatas/AndroidClient.json
new file mode 100644
index 000000000..378d0454d
--- /dev/null
+++ b/scripts/test_scripts/metadatas/AndroidClient.json
@@ -0,0 +1,9 @@
+{
+ "name": "++Fortnite+Main-CL-50966326 AndroidClient",
+ "branch": "ZenBuildTest2",
+ "baselineBranch": "ZenBuildTest2",
+ "platform": "Android",
+ "project": "Fortnite",
+ "changelist": 50966326,
+ "buildType": "staged-build"
+}
diff --git a/scripts/test_scripts/metadatas/IOSClient.json b/scripts/test_scripts/metadatas/IOSClient.json
new file mode 100644
index 000000000..fb0f9a342
--- /dev/null
+++ b/scripts/test_scripts/metadatas/IOSClient.json
@@ -0,0 +1,9 @@
+{
+ "name": "++Fortnite+Main-CL-50966326 IOSClient",
+ "branch": "ZenBuildTest2",
+ "baselineBranch": "ZenBuildTest2",
+ "platform": "IOS",
+ "project": "Fortnite",
+ "changelist": 50966326,
+ "buildType": "staged-build"
+}
diff --git a/scripts/test_scripts/metadatas/LinuxServer.json b/scripts/test_scripts/metadatas/LinuxServer.json
new file mode 100644
index 000000000..02ae2d970
--- /dev/null
+++ b/scripts/test_scripts/metadatas/LinuxServer.json
@@ -0,0 +1,9 @@
+{
+ "name": "++Fortnite+Main-CL-50966326 LinuxServer",
+ "branch": "ZenBuildTest2",
+ "baselineBranch": "ZenBuildTest2",
+ "platform": "Linux",
+ "project": "Fortnite",
+ "changelist": 50966326,
+ "buildType": "staged-build"
+}
diff --git a/scripts/test_scripts/metadatas/PS4Client.json b/scripts/test_scripts/metadatas/PS4Client.json
new file mode 100644
index 000000000..6e49e3e5e
--- /dev/null
+++ b/scripts/test_scripts/metadatas/PS4Client.json
@@ -0,0 +1,9 @@
+{
+ "name": "++Fortnite+Main-CL-50966326 PS4Client",
+ "branch": "ZenBuildTest2",
+ "baselineBranch": "ZenBuildTest2",
+ "platform": "PS4",
+ "project": "Fortnite",
+ "changelist": 50966326,
+ "buildType": "staged-build"
+}
diff --git a/scripts/test_scripts/metadatas/Switch2Client.json b/scripts/test_scripts/metadatas/Switch2Client.json
new file mode 100644
index 000000000..41732e7bc
--- /dev/null
+++ b/scripts/test_scripts/metadatas/Switch2Client.json
@@ -0,0 +1,9 @@
+{
+ "name": "++Fortnite+Main-CL-50966326 Switch2Client",
+ "branch": "ZenBuildTest2",
+ "baselineBranch": "ZenBuildTest2",
+ "platform": "Switch2",
+ "project": "Fortnite",
+ "changelist": 50966326,
+ "buildType": "staged-build"
+}
diff --git a/scripts/test_scripts/metadatas/SwitchClient.json b/scripts/test_scripts/metadatas/SwitchClient.json
new file mode 100644
index 000000000..49362f23e
--- /dev/null
+++ b/scripts/test_scripts/metadatas/SwitchClient.json
@@ -0,0 +1,9 @@
+{
+ "name": "++Fortnite+Main-CL-50966326 SwitchClient",
+ "branch": "ZenBuildTest2",
+ "baselineBranch": "ZenBuildTest2",
+ "platform": "Switch",
+ "project": "Fortnite",
+ "changelist": 50966326,
+ "buildType": "staged-build"
+}
diff --git a/scripts/test_scripts/metadatas/WindowsClient.json b/scripts/test_scripts/metadatas/WindowsClient.json
new file mode 100644
index 000000000..c7af270c2
--- /dev/null
+++ b/scripts/test_scripts/metadatas/WindowsClient.json
@@ -0,0 +1,9 @@
+{
+ "name": "++Fortnite+Main-CL-50966326 Windows Client",
+ "branch": "ZenBuildTest2",
+ "baselineBranch": "ZenBuildTest2",
+ "platform": "Windows",
+ "project": "Fortnite",
+ "changelist": 50966326,
+ "buildType": "staged-build"
+}
diff --git a/scripts/test_scripts/metadatas/XB1Client.json b/scripts/test_scripts/metadatas/XB1Client.json
new file mode 100644
index 000000000..36fb45801
--- /dev/null
+++ b/scripts/test_scripts/metadatas/XB1Client.json
@@ -0,0 +1,9 @@
+{
+ "name": "++Fortnite+Main-CL-50966326 XB1Client",
+ "branch": "ZenBuildTest2",
+ "baselineBranch": "ZenBuildTest2",
+ "platform": "XB1",
+ "project": "Fortnite",
+ "changelist": 50966326,
+ "buildType": "staged-build"
+}
diff --git a/scripts/test_scripts/oplog-import-export-test.py b/scripts/test_scripts/oplog-import-export-test.py
new file mode 100644
index 000000000..f913a7351
--- /dev/null
+++ b/scripts/test_scripts/oplog-import-export-test.py
@@ -0,0 +1,299 @@
+#!/usr/bin/env python3
+"""Test script for oplog import/export operations."""
+
+from __future__ import annotations
+
+import argparse
+import json
+import os
+import platform
+import subprocess
+import sys
+from pathlib import Path
+from typing import NamedTuple
+
+_PLATFORM = "windows" if sys.platform == "win32" else "macosx" if sys.platform == "darwin" else "linux"
+_ARCH = "x64" if sys.platform == "win32" else platform.machine().lower()
+_EXE_SUFFIX = ".exe" if sys.platform == "win32" else ""
+
+
+def _cache_dir() -> Path:
+ if sys.platform == "win32":
+ base = Path(os.environ.get("LOCALAPPDATA", Path.home() / "AppData" / "Local"))
+ return base / "Temp" / "zen"
+ elif sys.platform == "darwin":
+ return Path.home() / "Library" / "Caches" / "zen"
+ else:
+ base = Path(os.environ.get("XDG_CACHE_HOME", Path.home() / ".cache"))
+ return base / "zen"
+
+
+_BUILD_IDS_PATH = _cache_dir() / "oplog-import-export-build-ids.json"
+
+
+class Build(NamedTuple):
+ name: str
+ bucket: str
+ id: str
+
+
+def load_builds() -> tuple[str, list[Build]]:
+ if not _BUILD_IDS_PATH.exists():
+ print(f"Build IDs file not found: {_BUILD_IDS_PATH}")
+ answer = input("Run oplog-update-build-ids.py now to populate it? [y/N] ").strip().lower()
+ if answer == "y":
+ update_script = Path(__file__).parent / "oplog-update-build-ids.py"
+ subprocess.run([sys.executable, str(update_script)], check=True)
+ else:
+ sys.exit("Aborted. Run scripts/test_scripts/oplog-update-build-ids.py to populate it.")
+ with _BUILD_IDS_PATH.open() as f:
+ data: dict = json.load(f)
+ namespace = data.get("namespace", "")
+ if not namespace:
+ sys.exit(f"error: {_BUILD_IDS_PATH} is missing 'namespace'")
+ builds = []
+ for name, entry in data.get("builds", {}).items():
+ bucket = entry.get("bucket", "")
+ build_id = entry.get("buildId", "")
+ if not bucket or not build_id:
+ sys.exit(f"error: entry '{name}' in {_BUILD_IDS_PATH} is missing 'bucket' or 'buildId'")
+ builds.append(Build(name, bucket, build_id))
+ if not builds:
+ sys.exit(f"error: {_BUILD_IDS_PATH} contains no builds")
+ return namespace, builds
+
+
+ZEN_EXE: Path = Path(f"./build/{_PLATFORM}/{_ARCH}/release/zen{_EXE_SUFFIX}")
+
+ZEN_PORT = 8558
+ZEN_CACHE_PORT = 8559
+ZEN_CACHE = f"http://127.0.0.1:{ZEN_CACHE_PORT}"
+ZEN_CACHE_POPULATE = "true"
+ZEN_PARTIAL_REQUEST_MODE = "true"
+
+SERVER_ARGS: tuple[str, ...] = (
+ "--http", "asio",
+ "--gc-cache-duration-seconds", "1209600",
+ "--gc-interval-seconds", "21600",
+ "--gc-low-diskspace-threshold", "2147483648",
+ "--cache-bucket-limit-overwrites",
+)
+
+
+def zen_cmd(*args: str | Path, extra_zen_args: list[str] | None = None) -> list[str | Path]:
+ """Build a zen CLI command list, inserting extra_zen_args before subcommands."""
+ return [ZEN_EXE, *(extra_zen_args or []), *args]
+
+
+def run(cmd: list[str | Path]) -> None:
+ try:
+ subprocess.run(cmd, check=True)
+ except FileNotFoundError:
+ sys.exit(f"error: executable not found: {cmd[0]}")
+ except subprocess.CalledProcessError as e:
+ sys.exit(f"error: command failed with exit code {e.returncode}:\n {' '.join(str(x) for x in e.cmd)}")
+
+
+def stop_server(label: str, port: int, extra_zen_args: list[str] | None = None) -> None:
+ """Stop a zen server. Tolerates failures so it is safe to call from finally blocks."""
+ print(f"--------- stopping {label}")
+ try:
+ subprocess.run(zen_cmd("down", "--port", str(port), extra_zen_args=extra_zen_args))
+ except OSError as e:
+ print(f"warning: could not stop {label}: {e}", file=sys.stderr)
+ print()
+
+
+def start_server(label: str, data_dir: Path, port: int, extra_zen_args: list[str] | None = None,
+ extra_server_args: list[str] | None = None) -> None:
+ print(f"--------- starting {label} {data_dir}")
+ run(zen_cmd(
+ "up", "--port", str(port), "--show-console", "--",
+ f"--data-dir={data_dir}",
+ *SERVER_ARGS,
+ *(extra_server_args or []),
+ extra_zen_args=extra_zen_args,
+ ))
+ print()
+
+
+def wipe_or_create(label: str, path: Path, extra_zen_args: list[str] | None = None) -> None:
+ if path.exists():
+ print(f"--------- cleaning {label} {path}")
+ run(zen_cmd("wipe", "-y", path, extra_zen_args=extra_zen_args))
+ else:
+ print(f"--------- creating {label} {path}")
+ path.mkdir(parents=True, exist_ok=True)
+ print()
+
+
+def check_prerequisites() -> None:
+ if not ZEN_EXE.is_file():
+ sys.exit(f"error: zen executable not found: {ZEN_EXE}")
+
+
+def setup_project(port: int, extra_zen_args: list[str] | None = None) -> None:
+ """Create the FortniteGame project on the server at the given port."""
+ print("--------- creating FortniteGame project")
+ run(zen_cmd("project-create", f"--hosturl=127.0.0.1:{port}", "FortniteGame", "--force-update",
+ extra_zen_args=extra_zen_args))
+ print()
+
+
+def setup_oplog(port: int, build_name: str, extra_zen_args: list[str] | None = None) -> None:
+ """Create the oplog in the FortniteGame project on the server at the given port."""
+ print(f"--------- creating {build_name} oplog")
+ run(zen_cmd("oplog-create", f"--hosturl=127.0.0.1:{port}", "FortniteGame", build_name, "--force-update",
+ extra_zen_args=extra_zen_args))
+ print()
+
+
+def main() -> None:
+ global ZEN_EXE
+
+ # Split on '--' to separate script args from extra zen CLI args
+ script_argv: list[str] = []
+ extra_zen_args: list[str] = []
+ if "--" in sys.argv[1:]:
+ sep = sys.argv.index("--", 1)
+ script_argv = sys.argv[1:sep]
+ extra_zen_args = sys.argv[sep + 1:]
+ else:
+ script_argv = sys.argv[1:]
+
+ parser = argparse.ArgumentParser(
+ description=__doc__,
+ epilog="Any arguments after '--' are forwarded to every zen CLI invocation.",
+ )
+ parser.add_argument(
+ "positional_path",
+ nargs="?",
+ default=None,
+ type=Path,
+ metavar="DATA_PATH",
+ help="root path for all data directories (positional shorthand for --data-path)",
+ )
+ parser.add_argument(
+ "zen_exe_positional",
+ nargs="?",
+ default=None,
+ type=Path,
+ metavar="ZEN_EXE_PATH",
+ help="path to zen executable (positional shorthand for --zen-exe-path)",
+ )
+ parser.add_argument(
+ "--data-path",
+ default=None,
+ type=Path,
+ metavar="PATH",
+ help="root path for all data directories",
+ )
+ parser.add_argument(
+ "--zen-exe-path",
+ default=ZEN_EXE,
+ type=Path,
+ metavar="PATH",
+ help=f"path to zen executable (default: {ZEN_EXE})",
+ )
+ args = parser.parse_args(script_argv)
+
+ data_path = args.positional_path
+ if data_path is None:
+ data_path = args.data_path
+ if data_path is None:
+ print("WARNING: This script may require up to 1TB of free disk space.")
+ raw = input("Enter root path for all data directories: ").strip()
+ if not raw:
+ sys.exit("error: data path is required")
+ data_path = Path(raw)
+
+ ZEN_EXE = args.zen_exe_positional
+ if ZEN_EXE is None:
+ ZEN_EXE = args.zen_exe_path
+ namespace, builds = load_builds()
+ zen_data_dir = data_path / "DDC" / "OplogsZen"
+ zen_cache_data_dir = data_path / "DDC" / "ZenBuildsCache"
+ zen_import_data_dir = data_path / "DDC" / "OplogsZenImport"
+ export_dir = data_path / "Export" / "FortniteGame"
+
+ check_prerequisites()
+
+ start_server("cache zenserver", zen_cache_data_dir, ZEN_CACHE_PORT,
+ extra_zen_args=extra_zen_args, extra_server_args=["--buildstore-enabled"])
+ try:
+ wipe_or_create("zenserver data", zen_data_dir, extra_zen_args)
+ start_server("zenserver", zen_data_dir, ZEN_PORT, extra_zen_args=extra_zen_args)
+ try:
+ setup_project(ZEN_PORT, extra_zen_args)
+
+ for build in builds:
+ setup_oplog(ZEN_PORT, build.name, extra_zen_args)
+
+ print(f"--------- importing {build.name} oplog")
+ run(zen_cmd(
+ "oplog-import",
+ f"--hosturl=127.0.0.1:{ZEN_PORT}",
+ "FortniteGame", build.name,
+ "--clean",
+ "--builds", "https://jupiter.devtools.epicgames.com",
+ "--namespace", namespace,
+ "--bucket", build.bucket,
+ "--builds-id", build.id,
+ f"--zen-cache-host={ZEN_CACHE}",
+ f"--zen-cache-upload={ZEN_CACHE_POPULATE}",
+ f"--allow-partial-block-requests={ZEN_PARTIAL_REQUEST_MODE}",
+ extra_zen_args=extra_zen_args,
+ ))
+ print()
+
+ print(f"--------- validating {build.name} oplog")
+ run(zen_cmd("oplog-validate", f"--hosturl=127.0.0.1:{ZEN_PORT}", "FortniteGame", build.name,
+ extra_zen_args=extra_zen_args))
+ print()
+
+ wipe_or_create("export folder", export_dir, extra_zen_args)
+
+ for build in builds:
+ print(f"--------- exporting {build.name} oplog")
+ run(zen_cmd(
+ "oplog-export",
+ f"--hosturl=127.0.0.1:{ZEN_PORT}",
+ "FortniteGame", build.name,
+ "--file", export_dir,
+ "--forcetempblocks",
+ extra_zen_args=extra_zen_args,
+ ))
+ print()
+ finally:
+ stop_server("zenserver", ZEN_PORT, extra_zen_args)
+
+ wipe_or_create("alternate zenserver data", zen_import_data_dir, extra_zen_args)
+ start_server("import zenserver", zen_import_data_dir, ZEN_PORT, extra_zen_args=extra_zen_args)
+ try:
+ setup_project(ZEN_PORT, extra_zen_args)
+
+ for build in builds:
+ setup_oplog(ZEN_PORT, build.name, extra_zen_args)
+
+ print(f"--------- importing {build.name} oplog")
+ run(zen_cmd(
+ "oplog-import",
+ f"--hosturl=127.0.0.1:{ZEN_PORT}",
+ "FortniteGame", build.name,
+ "--file", export_dir,
+ extra_zen_args=extra_zen_args,
+ ))
+ print()
+
+ print(f"--------- validating {build.name} oplog")
+ run(zen_cmd("oplog-validate", f"--hosturl=127.0.0.1:{ZEN_PORT}", "FortniteGame", build.name,
+ extra_zen_args=extra_zen_args))
+ print()
+ finally:
+ stop_server("alternative zenserver", ZEN_PORT, extra_zen_args)
+ finally:
+ stop_server("cache zenserver", ZEN_CACHE_PORT, extra_zen_args)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/scripts/test_scripts/oplog-update-build-ids.py b/scripts/test_scripts/oplog-update-build-ids.py
new file mode 100644
index 000000000..67e128c8e
--- /dev/null
+++ b/scripts/test_scripts/oplog-update-build-ids.py
@@ -0,0 +1,151 @@
+#!/usr/bin/env python3
+"""Update oplog-import-export-build-ids.json with build IDs at the highest common changelist across all buckets."""
+
+from __future__ import annotations
+
+import argparse
+import json
+import os
+import platform
+import subprocess
+import sys
+import tempfile
+from pathlib import Path
+
+_PLATFORM = "windows" if sys.platform == "win32" else "macosx" if sys.platform == "darwin" else "linux"
+_ARCH = "x64" if sys.platform == "win32" else platform.machine().lower()
+_EXE_SUFFIX = ".exe" if sys.platform == "win32" else ""
+_DEFAULT_ZEN = Path(f"build/{_PLATFORM}/{_ARCH}/release/zen{_EXE_SUFFIX}")
+
+
+def _cache_dir() -> Path:
+ if sys.platform == "win32":
+ base = Path(os.environ.get("LOCALAPPDATA", Path.home() / "AppData" / "Local"))
+ return base / "Temp" / "zen"
+ elif sys.platform == "darwin":
+ return Path.home() / "Library" / "Caches" / "zen"
+ else:
+ base = Path(os.environ.get("XDG_CACHE_HOME", Path.home() / ".cache"))
+ return base / "zen"
+
+
+_OUTPUT_PATH = _cache_dir() / "oplog-import-export-build-ids.json"
+
+# Maps build name -> Jupiter bucket
+_BUILDS: list[tuple[str, str]] = [
+ ("XB1Client", "fortnitegame.oplog.fortnite-main.xb1client"),
+ ("WindowsClient", "fortnitegame.oplog.fortnite-main.windowsclient"),
+ ("SwitchClient", "fortnitegame.oplog.fortnite-main.switchclient"),
+ ("XSXClient", "fortnitegame.oplog.fortnite-main.xsxclient"),
+ ("Switch2Client", "fortnitegame.oplog.fortnite-main.switch2client"),
+ ("PS4Client", "fortnitegame.oplog.fortnite-main.ps4client"),
+ ("PS5Client", "fortnitegame.oplog.fortnite-main.ps5client"),
+ ("LinuxServer", "fortnitegame.oplog.fortnite-main.linuxserver"),
+ ("IOSClient", "fortnitegame.oplog.fortnite-main.iosclient"),
+ ("Android_ASTCClient", "fortnitegame.oplog.fortnite-main.android_astcclient"),
+]
+
+
+def list_builds_for_bucket(zen: str, host: str, namespace: str, bucket: str) -> list[dict]:
+ """Run zen builds list for a single bucket and return the results array."""
+ with tempfile.NamedTemporaryFile(suffix=".json", delete=False) as tmp:
+ result_path = Path(tmp.name)
+
+ cmd = [
+ zen, "builds", "list",
+ "--namespace", namespace,
+ "--bucket", bucket,
+ "--host", host,
+ "--result-path", str(result_path),
+ ]
+
+ try:
+ subprocess.run(cmd, check=True, capture_output=True)
+ except FileNotFoundError:
+ sys.exit(f"error: zen binary not found: {zen}")
+ except subprocess.CalledProcessError as e:
+ sys.exit(
+ f"error: zen builds list failed for bucket '{bucket}' with exit code {e.returncode}\n"
+ f"stderr: {e.stderr.decode(errors='replace')}"
+ )
+
+ with result_path.open() as f:
+ data = json.load(f)
+ result_path.unlink(missing_ok=True)
+
+ return data.get("results", [])
+
+
+def main() -> None:
+ parser = argparse.ArgumentParser(
+ description="Refresh oplog-import-export-build-ids.json with build IDs at the highest changelist present in all buckets."
+ )
+ parser.add_argument("--host", default="https://jupiter.devtools.epicgames.com", help="Jupiter host URL")
+ parser.add_argument("--zen", default=str(_DEFAULT_ZEN), help="Path to the zen binary")
+ parser.add_argument("--namespace", default="fortnite.oplog", help="Builds storage namespace")
+ args = parser.parse_args()
+
+ # For each bucket, fetch results and build a changelist -> buildId map.
+ # bucket_cl_map[bucket] = { changelist_int: buildId_str, ... }
+ bucket_cl_map: dict[str, dict[int, str]] = {}
+
+ for name, bucket in _BUILDS:
+ print(f"Querying {name} ({bucket}) ...")
+ results = list_builds_for_bucket(args.zen, args.host, args.namespace, bucket)
+ if not results:
+ sys.exit(f"error: no results for bucket '{bucket}' (build '{name}')")
+
+ cl_map: dict[int, str] = {}
+ for entry in results:
+ build_id = entry.get("buildId", "")
+ metadata = entry.get("metadata") or {}
+ cl = metadata.get("changelist")
+ if build_id and cl is not None:
+ # Keep first occurrence (most recent) per changelist
+ if cl not in cl_map:
+ cl_map[int(cl)] = build_id
+
+ if not cl_map:
+ sys.exit(
+ f"error: bucket '{bucket}' (build '{name}') returned {len(results)} entries "
+ "but none had both buildId and changelist in metadata"
+ )
+
+ print(f" {len(cl_map)} distinct changelists, latest CL {max(cl_map)}")
+ bucket_cl_map[bucket] = cl_map
+
+ # Find the highest changelist present in every bucket's result set.
+ common_cls = set(next(iter(bucket_cl_map.values())).keys())
+ for bucket, cl_map in bucket_cl_map.items():
+ common_cls &= set(cl_map.keys())
+
+ if not common_cls:
+ sys.exit(
+ "error: no changelist is present in all buckets.\n"
+ "Per-bucket CL ranges:\n"
+ + "\n".join(
+ f" {name} ({bucket}): {min(bucket_cl_map[bucket])} – {max(bucket_cl_map[bucket])}"
+ for name, bucket in _BUILDS
+ )
+ )
+
+ best_cl = max(common_cls)
+ print(f"\nHighest common changelist: {best_cl}")
+
+ build_ids: dict[str, dict[str, str]] = {}
+ for name, bucket in _BUILDS:
+ build_id = bucket_cl_map[bucket][best_cl]
+ build_ids[name] = {"bucket": bucket, "buildId": build_id}
+ print(f" {name}: {build_id}")
+
+ output = {"namespace": args.namespace, "builds": build_ids}
+ _OUTPUT_PATH.parent.mkdir(parents=True, exist_ok=True)
+ with _OUTPUT_PATH.open("w") as f:
+ json.dump(output, f, indent=2)
+ f.write("\n")
+
+ print(f"\nWrote {_OUTPUT_PATH}")
+
+
+if __name__ == "__main__":
+ main()