aboutsummaryrefslogtreecommitdiff
path: root/scripts
diff options
context:
space:
mode:
authorStefan Boberg <[email protected]>2021-08-21 18:51:29 +0200
committerStefan Boberg <[email protected]>2021-08-21 18:51:29 +0200
commitee2d4c08b48c17605f36c00377beae0b0dc072e3 (patch)
tree19c8f1dfef4b2e19293327ee2a6d3a1321339397 /scripts
parentImproved crash reporting setup and removed old stubs (diff)
downloadzen-ee2d4c08b48c17605f36c00377beae0b0dc072e3.tar.xz
zen-ee2d4c08b48c17605f36c00377beae0b0dc072e3.zip
Initial build deploy scripts. Still missing debug information upload step so should not be used
Diffstat (limited to 'scripts')
-rw-r--r--scripts/deploybuild.py79
-rw-r--r--scripts/p4utils.py356
-rw-r--r--scripts/peafour.py187
-rw-r--r--scripts/sentry-cli.exebin0 -> 8873472 bytes
4 files changed, 622 insertions, 0 deletions
diff --git a/scripts/deploybuild.py b/scripts/deploybuild.py
new file mode 100644
index 000000000..4d9f49d90
--- /dev/null
+++ b/scripts/deploybuild.py
@@ -0,0 +1,79 @@
+import argparse
+import sys
+import os
+import fileinput
+import colorama
+import shutil
+from peafour import P4
+from colorama import Fore, Back, Style
+
+colorama.init()
+
+origcwd = os.getcwd()
+
+# Parse and validate arguments
+
+parser = argparse.ArgumentParser(description='Deploy a zen build to an UE tree')
+parser.add_argument("root", help="Path to an UE5 root directory")
+args = parser.parse_args()
+
+engineroot = args.root
+
+if not os.path.isfile(os.path.join(engineroot, "RunUAT.bat")):
+ print(f"{Fore.RED}Not a valid UE5 engine root directory: '{engineroot}'")
+ print(Style.RESET_ALL)
+ exit()
+
+# Establish root of zen tree
+
+zenroot = __file__
+
+while not os.path.exists(os.path.join(zenroot, "zen.sln")):
+ zenroot = os.path.dirname(zenroot)
+
+print(f"Zen root: {zenroot}")
+
+# Upload symbols etc to Sentry
+
+# scripts\sentry-cli.exe upload-dif --org to --project zen-server x64\Release\zenserver.exe x64\Release\zenserver.pdb
+
+# Change into root directory to pick up Perforce environment
+
+os.chdir(engineroot)
+p4info = P4.info().run()
+
+if not os.path.samefile(p4info.clientRoot, engineroot):
+ print(f"{Fore.RED}Could not find P4 client for UE5 engine root directory '{engineroot}'")
+ print(Style.RESET_ALL)
+ exit()
+
+# check out the binaries
+
+print(f"Reverting any previous unsubmitted deploy")
+
+try:
+ P4.revert("Engine/Binaries/Win64/zenserver.*").run()
+except:
+ pass
+
+print(f"Checking out zenserver executables")
+
+try:
+ P4.edit("Engine/Binaries/Win64/zenserver.*").run()
+except:
+ pass
+
+print(f"Placing zenserver executables into tree")
+
+crashpadtarget = os.path.join(engineroot, "Engine/Binaries/Win64/crashpad_handler.exe")
+
+try:
+ shutil.copy(os.path.join(zenroot, "x64\Release\zenserver.exe"), os.path.join(engineroot, "Engine/Binaries/Win64/zenserver.exe"))
+ shutil.copy(os.path.join(zenroot, r'vcpkg_installed\x64-windows-static\tools\sentry-native\crashpad_handler.exe'), crashpadtarget)
+ P4.add(crashpadtarget).run()
+ print("All done and good!")
+except Exception as e:
+ print(f"Noooooo: {e.args}")
+ pass
+
+# scripts\sentry-cli.exe upload-dif --org to --project zen-server
diff --git a/scripts/p4utils.py b/scripts/p4utils.py
new file mode 100644
index 000000000..d2a9933e7
--- /dev/null
+++ b/scripts/p4utils.py
@@ -0,0 +1,356 @@
+import os
+import re
+import time
+import flow.cmd
+import threading
+from peafour import P4
+import subprocess as sp
+
+#-------------------------------------------------------------------------------
+p4_set_result = None
+def get_p4_set(prop_name):
+ if ret := os.getenv(prop_name):
+ return ret
+
+ global p4_set_result
+ if not p4_set_result:
+ p4_set_result = {}
+ try: proc = sp.Popen(("p4", "set", "-q"), stdout=sp.PIPE)
+ except: return
+
+ for line in iter(proc.stdout.readline, b""):
+ try: key, value = line.split(b"=", 1)
+ except ValueError: continue
+ p4_set_result[key.decode()] = value.strip().decode()
+
+ proc.wait()
+ proc.stdout.close()
+
+ return p4_set_result.get(prop_name, None)
+
+#-------------------------------------------------------------------------------
+def login():
+ try:
+ detail = P4.login(s=True).run()
+ except P4.Error:
+ raise EnvironmentError("No valid Perforce session found. Run 'p4 login' to authenticate.")
+ return getattr(detail, "User", None)
+
+#-------------------------------------------------------------------------------
+def get_p4config_name():
+ return os.path.basename(get_p4_set("P4CONFIG") or ".p4config.txt")
+
+#-------------------------------------------------------------------------------
+def has_p4config(start_dir):
+ from pathlib import Path
+ p4config = get_p4config_name()
+ for dir in (Path(start_dir) / "x").parents:
+ candidate = dir / p4config
+ if os.path.isfile(candidate):
+ return True, candidate
+ return False, p4config
+
+#-------------------------------------------------------------------------------
+def create_p4config(p4config_path, client, username, port=None):
+ with open(p4config_path, "wt") as out:
+ print_args = { "sep" : "", "file" : out }
+ print("P4CLIENT=", client, **print_args)
+ print("P4USER=", username, **print_args)
+ if port:
+ print("P4PORT", port, **print_args)
+
+#-------------------------------------------------------------------------------
+def ensure_p4config(start_dir=None):
+ start_dir = start_dir or os.getcwd()
+ found, p4config_name = has_p4config(start_dir)
+ if found:
+ return p4config_name, False
+
+ username = login()
+
+ # Get the client for 'start_dir'
+ client = get_client_from_dir(start_dir, username)
+ if not client:
+ return
+ client, root_dir = client
+
+ # Now we know where to locate a p4config file
+ p4config_path = f"{root_dir}/{p4config_name}"
+ create_p4config(p4config_path, client, username)
+ return p4config_path, True
+
+#-------------------------------------------------------------------------------
+def get_client_from_dir(root_dir, username):
+ import socket
+ host_name = socket.gethostname().lower()
+
+ root_dir = os.path.normpath(root_dir).replace("\\", "/").lower()
+
+ clients = (x for x in P4.clients(u=username) if x.Host.lower() == host_name)
+ for client in clients:
+ if client.Root.replace("\\", "/").lower() in root_dir:
+ client_host = client.Host.lower()
+ if not client_host or client_host == host_name:
+ return client.client, client.Root
+
+#-------------------------------------------------------------------------------
+def get_branch_root(depot_path):
+ def fstat_paths():
+ limit = 5 # ...two of which are always required
+ query_path = "//"
+ for piece in depot_path[2:].split("/")[:limit]:
+ query_path += piece + "/"
+ yield query_path + "GenerateProjectFiles.bat"
+
+ print("Probing for well-known file:")
+ for x in fstat_paths():
+ print(" ", x)
+
+ fstat = P4.fstat(fstat_paths(), T="depotFile")
+ root_path = fstat.run(on_error=False)
+ if root_path:
+ return "/".join(root_path.depotFile.split("/")[:-1]) + "/"
+
+ raise ValueError("Unable to establish branch root")
+
+
+
+#-------------------------------------------------------------------------------
+class TempBranchSpec(object):
+ def __init__(self, use, username, from_path, to_path, ignore_streams=False):
+ import hashlib
+ id = hashlib.md5()
+ id.update(from_path.encode())
+ id.update(to_path.encode())
+ id = id.hexdigest()[:6]
+ self._name = f"{username}-ushell.{use}-{id}"
+
+ # To map between streams we need to extract the internal branchspec that
+ # Perforce builds. If from/to aren't related streams it will fail so we
+ # fallback to a conventional trivial branchspec.
+ try:
+ if ignore_streams:
+ raise P4.Error("")
+
+ branch = P4.branch(self._name, o=True, S=from_path[:-1], P=to_path[:-1])
+ result = branch.run()
+ spec = result.as_dict()
+ except P4.Error:
+ spec = {
+ "Branch" : self._name,
+ "View0" : f"{from_path}... {to_path}...",
+ }
+
+ P4.branch(i=True).run(input_data=spec, on_error=False)
+
+ def __del__(self):
+ P4.branch(self._name, d=True).run()
+
+ def __str__(self):
+ return self._name
+
+
+
+#-------------------------------------------------------------------------------
+def _kb_string(value):
+ return format(value // 1024, ",") + "KB"
+
+#-------------------------------------------------------------------------------
+class _SyncRota(object):
+ class _Worker(object):
+ def __init__(self, id):
+ self.id = id
+ self.work_items = []
+ self.burden = 0
+ self.done_size = 0
+ self.done_items = 0
+ self.error = False
+
+ def __init__(self, changelist, worker_count):
+ self._workers = [_SyncRota._Worker(x) for x in range(worker_count)]
+ self.changelist = str(changelist)
+
+ def add_work(self, item, rev, cost):
+ worker = min(self._workers, key=lambda x: x.burden)
+ worker.work_items.append((item, rev, cost))
+ worker.burden += cost
+
+ def sort(self):
+ direction = 1
+ for worker in self._workers:
+ worker.work_items.sort(key=lambda x: x[2] * direction)
+ direction *= -1
+
+ def read_workers(self):
+ yield from (x for x in self._workers if x.work_items)
+
+#-------------------------------------------------------------------------------
+class Syncer(object):
+ def __init__(self):
+ self._paths = []
+ self._excluded_views = set()
+
+ def _read_sync_specs(self, include_excluded=True):
+ cl_suffix = "@" + self._rota.changelist
+ cl_suffix = "#0" if cl_suffix == "@0" else cl_suffix
+ for depot_path in self._paths:
+ yield depot_path + cl_suffix
+
+ if include_excluded:
+ # Using "@0" results in slow queries it seems
+ yield from (x + "#0" for x in self._excluded_views)
+
+ def _is_excluded(self, path):
+ return next((True for x in self._re_excludes if x.match(path)), False)
+
+ def _build_exclude_re(self):
+ re_excludes = []
+ for view in self._excluded_views:
+ view = view.replace("...", "@")
+ view = view.replace(".", "\\.")
+ view = view.replace("*", "[^/]*")
+ view = view.replace("@", ".*")
+ re_excludes.append(view)
+
+ if re_excludes:
+ # The expression isn't escaped so hopefully it's not complicated...
+ try: re_excludes = [re.compile(x, re.IGNORECASE) for x in re_excludes]
+ except: pass
+
+ self._re_excludes = re_excludes
+
+ def add_path(self, dir):
+ self._paths.append(dir)
+
+ def add_exclude(self, view):
+ self._excluded_views.add(view)
+
+ def schedule(self, changelist, worker_count=8):
+ self._build_exclude_re()
+
+ # P4.<cmd> uses p4's Python-marshalled output (the -G option). However the
+ # "p4 sync -n" will report open files via a "info" message instead of a
+ # structured "stat" one. So we explicitly add open files to the rota.
+ def read_items():
+ yield from P4.sync(self._read_sync_specs(), n=True).read(on_error=False)
+ yield from P4.opened().read(on_error=False)
+
+ self._rota = _SyncRota(changelist, worker_count)
+
+ # Fill the rota
+ total_size = 0
+ count = 0
+ for item in read_items():
+ depot_path = item.depotFile
+ rev = int(item.rev)
+
+ if self._is_excluded(depot_path):
+ if item.action != "deleted":
+ continue
+ rev = 0
+
+ if count % 17 == 0:
+ print("\r" + str(count), "files", f"({_kb_string(total_size)})", end="")
+
+ size = int(getattr(item, "fileSize", 0)) # deletes have no size attr
+ self._rota.add_work(depot_path, rev, size)
+
+ total_size += size
+ count += 1
+ self._rota.sort()
+ print("\r" + str(count), "files", f"({_kb_string(total_size)})")
+
+ def sync(self, *, dryrun=False, echo=False):
+ # Sum up what we have to do
+ total_burden = sum(x.burden for x in self._rota.read_workers())
+ total_items = sum(len(x.work_items) for x in self._rota.read_workers())
+
+ print(f"Fetching {_kb_string(total_burden)} in {total_items} files")
+
+ # Launch the worker threads
+ def sync_thread(worker):
+ def on_error(p4_error):
+ if "not enough space" in p4_error.data:
+ worker.error = "Out of disk space"
+ raise EOFError()
+
+ try:
+ # Annecdotally path@cl appears to be the quickest. path#rev is
+ # appeared 15% slower, and with -L it was 30%.
+ def read_sync_items():
+ cl_prefix = "@" + self._rota.changelist
+ for path, rev, size in worker.work_items:
+ yield path + (cl_prefix if rev else "#0")
+
+ sync = P4(b=8192).sync(read_sync_items(), n=dryrun)
+ for item in sync.read(on_error=on_error):
+ if echo:
+ print(item.depotFile)
+ worker.done_size += int(getattr(item, "fileSize", 0)) + 0.01
+ except EOFError:
+ pass
+
+ def create_thread(worker):
+ thread = threading.Thread(target=sync_thread, args=(worker,))
+ thread.start()
+ return thread
+
+ threads = [create_thread(x) for x in self._rota.read_workers()]
+ print(f"Using {len(threads)} workers")
+
+ # While there are active threads, print detail about their progress
+ total_burden += (0.01 * total_items)
+ while not echo:
+ threads = [x for x in threads if x.is_alive()]
+ if not threads:
+ break
+
+ done_size = sum(x.done_size for x in self._rota.read_workers())
+ progress = ((done_size * 1000) // total_burden) / 10
+ print("\r%5.1f%%" % progress, _kb_string(int(done_size)), end="");
+ time.sleep(0.3)
+ else:
+ for thread in threads:
+ thread.join()
+ print("\r...done ")
+
+ # Check for errors from the workers
+ for worker in (x for x in self._rota.read_workers() if x.error):
+ print(flow.cmd.text.red("!!" + str(worker.error)))
+ return False
+
+ # Nothing more to do if this is a dry run as the remaining tasks need a
+ # sync to operate on.
+ if dryrun:
+ return True
+
+ # P4.sync() returns 'stat' type events but "p4 sync" will report files
+ # with a complex sync scenario only as unstructured 'info' messages. As the
+ # above won't know about these files we'll do a second sync to catch them.
+ global sync_errors
+ sync_errors = False
+ print("Finalising ", end="")
+ def read_depot_files():
+ def on_error(data):
+ msg = data.data.strip()
+ if "up-to-date" in msg: return
+ if "not in client view" in msg: return
+
+ print("\n", flow.cmd.text.red(msg), end="")
+ global sync_errors
+ sync_errors = True
+
+ def on_info(data):
+ print("\n", flow.cmd.text.light_yellow(data.data), end="")
+
+ sync = P4.sync(self._read_sync_specs(False), n=True)
+ for item in sync.read(on_error=on_error, on_info=on_info):
+ if not self._is_excluded(item.depotFile):
+ yield item.depotFile
+
+ sync = P4.sync(read_depot_files(), q=True)
+ for i in sync.read(on_error=False):
+ pass
+ print()
+
+ return not sync_errors
diff --git a/scripts/peafour.py b/scripts/peafour.py
new file mode 100644
index 000000000..c9f559a21
--- /dev/null
+++ b/scripts/peafour.py
@@ -0,0 +1,187 @@
+import types
+import marshal
+import threading
+import subprocess as sp
+
+#-------------------------------------------------------------------------------
+class _P4Result(object):
+ def __init__(self, result):
+ super().__setattr__("_result", result)
+
+ def __str__(self): return str(self._result)
+ def __contains__(self, key): return (key in self._result) or (key + "0" in self._result)
+ def as_dict(self): return {k.decode():v.decode() for k,v in self._result.items()}
+ def __setattr__(self, key, value): self._result[key.encode()] = str(value).encode()
+
+ def __getattr__(self, key):
+ if (key + "0").encode() in self._result:
+ def as_list():
+ index = 0;
+ while (key + str(index)).encode() in self._result:
+ indexed_key = (key + str(index)).encode()
+ yield self._result[indexed_key].decode()
+ index += 1
+ return as_list()
+
+ ret = self._result.get(str.encode(key))
+ if ret == None: raise AttributeError(key)
+ return ret.decode(errors="replace")
+
+#-------------------------------------------------------------------------------
+class _P4Command(object):
+ @staticmethod
+ def _read_args(*args, **kwargs):
+ for k,v in kwargs.items():
+ if isinstance(v, bool):
+ if v:
+ yield "-" + k
+ elif v != None:
+ yield f"-{k}={v}" if len(k) > 1 else f"-{k}{v}"
+
+ for arg in (x for x in args if isinstance(x, str)):
+ yield arg
+
+ def __init__(self, **options):
+ opt_iter = _P4Command._read_args(**options)
+ self._command = ["p4", "-Qutf8", "-G"]
+ self._command += (x for x in opt_iter)
+
+ def start(self, command, *args, **kwargs):
+ self._stdin_args = []
+ for arg in (x for x in args if not isinstance(x, str)):
+ if not hasattr(arg, "__iter__"):
+ raise TypeError("P4 arguments can be only strings or sequences")
+ self._stdin_args.append(arg)
+
+ self._proc = None
+
+ arg_iter = _P4Command._read_args(*args, **kwargs)
+
+ if self._stdin_args:
+ self._command.append("-x-")
+ self._command.append(command)
+ self._command += (x for x in arg_iter)
+
+ def __del__(self): self._close_proc()
+ def __str__(self): return " ".join(self._command)
+ def __iter__(self): yield from self._iter()
+ def __getattr__(self, name): return getattr(self.run(), name)
+
+ def run(self, **kwargs):
+ return next(self._iter(**kwargs), None)
+
+ def read(self, **kwargs):
+ yield from self._iter(**kwargs)
+
+ def _close_proc(self):
+ if self._proc:
+ if self._proc.stdin:
+ self._proc.stdin.close()
+ self._proc.stdout.close()
+ self._proc = None
+
+ def _iter(self, input_data=None, on_error=True, on_info=None, on_text=None):
+ stdin = None
+ if input_data != None:
+ if self._stdin_args:
+ raise _P4.Error("It is unsupported to have both generator-type arguments and input data")
+
+ if isinstance(input_data, dict):
+ input_data = {str(k).encode():str(v).encode() for k,v in input_data.items()}
+ else:
+ raise _P4.Error("Unsupported input data type; " + type(input_data).__name__)
+ stdin = sp.PIPE
+
+ if self._stdin_args:
+ stdin = sp.PIPE
+
+ proc = sp.Popen(self._command, stdout=sp.PIPE, stdin=stdin)
+ self._proc = proc
+
+ if stdin:
+ def stdin_thread_entry():
+ try:
+ if input_data:
+ marshal.dump(input_data, proc.stdin, 0)
+ for args in self._stdin_args:
+ for arg in args:
+ arg = str(arg).encode() + b"\n"
+ proc.stdin.write(arg)
+ except (BrokenPipeError, OSError):
+ pass
+ finally:
+ try: proc.stdin.close()
+ except: pass
+
+ stdin_thread = threading.Thread(target=stdin_thread_entry)
+ stdin_thread.start()
+
+ while True:
+ try: result = marshal.load(proc.stdout)
+ except EOFError: break
+
+ code = result[b"code"]
+ del result[b"code"]
+
+ if code == b"error":
+ if isinstance(on_error, bool):
+ if on_error:
+ raise _P4.Error(result[b"data"].decode()[:-1])
+ continue
+
+ try:
+ on_error(_P4Result(result))
+ except:
+ proc.terminate()
+ raise
+ continue
+
+ if code == b"stat":
+ yield _P4Result(result)
+ continue
+
+ if code == b"text" and on_text:
+ try:
+ data = result.get(b"data", b"")
+ on_text(data)
+ continue
+ except:
+ proc.terminate()
+ raise
+
+ if code == b"info" and on_info:
+ try:
+ on_info(_P4Result(result))
+ continue
+ except:
+ proc.terminate()
+ raise
+
+ if stdin:
+ stdin_thread.join()
+ self._close_proc()
+
+#-------------------------------------------------------------------------------
+class _P4(object):
+ class Error(Exception):
+ def __init__(self, msg):
+ super().__init__("Perforce: " + msg)
+
+ def __init__(self, **options):
+ self._options = options
+
+ def __getattr__(self, command):
+ if command == "Error":
+ return _P4.Error
+
+ def inner(*args, **kwargs):
+ instance = _P4Command(**self._options)
+ instance.start(command, *args, **kwargs)
+ return instance
+
+ return inner
+
+ def __call__(self, **options):
+ return _P4(**options)
+
+P4 = _P4()
diff --git a/scripts/sentry-cli.exe b/scripts/sentry-cli.exe
new file mode 100644
index 000000000..6a3ad9800
--- /dev/null
+++ b/scripts/sentry-cli.exe
Binary files differ