aboutsummaryrefslogtreecommitdiff
path: root/scripts/deploy_release.py
diff options
context:
space:
mode:
authorStefan Boberg <[email protected]>2025-11-19 08:16:42 +0100
committerStefan Boberg <[email protected]>2025-11-19 08:17:29 +0100
commit900b30cc9379e69579fc0d03e719cdab23555c88 (patch)
tree05d8d21261a5132445cfc24331698323bee5ade7 /scripts/deploy_release.py
parent5.7.11 (diff)
downloadzen-900b30cc9379e69579fc0d03e719cdab23555c88.tar.xz
zen-900b30cc9379e69579fc0d03e719cdab23555c88.zip
renamed deployment script
Diffstat (limited to 'scripts/deploy_release.py')
-rw-r--r--scripts/deploy_release.py344
1 files changed, 344 insertions, 0 deletions
diff --git a/scripts/deploy_release.py b/scripts/deploy_release.py
new file mode 100644
index 000000000..52805738f
--- /dev/null
+++ b/scripts/deploy_release.py
@@ -0,0 +1,344 @@
+#!/usr/bin/env python3
+"""
+Download GitHub release artifacts for Zen.
+
+This script downloads Mac, Linux, and Windows artifacts from the GitHub release
+corresponding to the current version in VERSION.txt and copies them to a specified
+UE (p4) workspace directory.
+
+Before copying the artifacts, it syncs (to head) the files contained in the downloaded
+archives from Perforce and then opens them for editing and leaves them in the default
+pending changelist.
+
+"""
+
+import argparse
+import os
+import sys
+from pathlib import Path
+import shutil
+import subprocess
+
+
+def read_version(repo_root):
+ """Read the version from VERSION.txt."""
+ version_file = repo_root / "VERSION.txt"
+ if not version_file.exists():
+ print(f"Error: VERSION.txt not found at {version_file}", file=sys.stderr)
+ sys.exit(1)
+
+ version = version_file.read_text().strip()
+ print(f"Version: {version}")
+ return version
+
+
+def download_artifact_gh(version, artifact_name, output_path, verbose=False):
+ """Download a release artifact using gh CLI."""
+ print(f"Downloading {artifact_name}...")
+ print(f" To: {output_path}")
+
+ try:
+ # Build gh release download command
+ cmd = ['gh', 'release', 'download', f'v{version}', '--pattern', artifact_name]
+
+ if verbose:
+ print(f" Command: {' '.join(cmd)}")
+
+ # Create a temporary directory as a subdirectory of current working directory
+ # so gh can access the git repository context
+ temp_dir = Path.cwd() / '.temp_download'
+ temp_dir.mkdir(exist_ok=True)
+
+ try:
+ # Download to temp directory
+ result = subprocess.run(
+ cmd,
+ cwd=temp_dir,
+ capture_output=True,
+ text=True
+ )
+
+ if result.returncode != 0:
+ print(f" Error: gh release download failed", file=sys.stderr)
+ if result.stderr:
+ print(f" {result.stderr.strip()}", file=sys.stderr)
+ return False
+
+ # Move the downloaded file to the target location
+ downloaded_file = temp_dir / artifact_name
+ if not downloaded_file.exists():
+ print(f" Error: Downloaded file not found at {downloaded_file}", file=sys.stderr)
+ return False
+
+ shutil.move(str(downloaded_file), str(output_path))
+
+ file_size = output_path.stat().st_size
+ print(f" Downloaded successfully ({file_size:,} bytes)")
+ return True
+ finally:
+ # Clean up temp directory
+ if temp_dir.exists():
+ shutil.rmtree(temp_dir, ignore_errors=True)
+
+ except FileNotFoundError:
+ print(f" Error: 'gh' command not found. Please install GitHub CLI.", file=sys.stderr)
+ print(f" Visit: https://cli.github.com/", file=sys.stderr)
+ return False
+ except Exception as e:
+ print(f" Error: {e}", file=sys.stderr)
+ return False
+
+
+def get_archive_files(zip_path):
+ """Get list of files contained in the zip archive."""
+ import zipfile
+
+ try:
+ with zipfile.ZipFile(zip_path, 'r') as zip_ref:
+ return [info.filename for info in zip_ref.filelist if not info.is_dir()]
+ except Exception as e:
+ print(f" Error reading archive: {e}", file=sys.stderr)
+ return []
+
+
+def checkout_files_p4(target_dir, file_list, verbose=False):
+ """Check out specific files from Perforce."""
+ if not file_list:
+ return True
+
+ print(f"Checking out files from Perforce...")
+
+ try:
+ # First sync files to latest revision
+ sync_cmd = ['p4', 'sync'] + file_list
+
+ if verbose:
+ print(f" Command: {' '.join(sync_cmd)}")
+ print(f" Working directory: {target_dir}")
+
+ sync_result = subprocess.run(
+ sync_cmd,
+ cwd=target_dir,
+ capture_output=True,
+ text=True
+ )
+
+ if sync_result.returncode == 0 or 'up-to-date' in sync_result.stdout:
+ if verbose and sync_result.stdout and sync_result.stdout.strip():
+ print(f" {sync_result.stdout.strip()}")
+ else:
+ # Sync might fail if files don't exist yet, which is okay
+ if sync_result.stderr and sync_result.stderr.strip():
+ print(f" Sync warning: {sync_result.stderr.strip()}")
+
+ # Now check out files using p4 edit
+ # Use relative paths and run from target_dir so p4 can infer P4CLIENT correctly
+ cmd = ['p4', 'edit'] + file_list
+
+ if verbose:
+ print(f" Command: {' '.join(cmd)}")
+ print(f" Working directory: {target_dir}")
+
+ result = subprocess.run(
+ cmd,
+ cwd=target_dir,
+ capture_output=True,
+ text=True
+ )
+
+ if result.returncode == 0 or 'opened for edit' in result.stdout:
+ # Count how many files were opened
+ output_lines = result.stdout.strip().split('\n') if result.stdout.strip() else []
+ file_count = len([line for line in output_lines if 'opened for edit' in line])
+ print(f" Checked out {file_count} file(s)")
+ return True
+ else:
+ # p4 edit might return non-zero if files don't exist yet or are already open
+ # This is often not a fatal error
+ if result.stdout and result.stdout.strip():
+ print(f" {result.stdout.strip()}")
+ if result.stderr and result.stderr.strip():
+ print(f" {result.stderr.strip()}")
+ return True # Continue anyway
+ except FileNotFoundError:
+ print(f" Warning: 'p4' command not found. Skipping Perforce checkout.", file=sys.stderr)
+ return True # Continue without P4
+ except Exception as e:
+ print(f" Warning: Error during P4 checkout: {e}", file=sys.stderr)
+ return True # Continue anyway
+
+
+def revert_unchanged_files_p4(target_dir, file_list, verbose=False):
+ """Revert files that have no content changes from the checked-in version."""
+ if not file_list:
+ return
+
+ print(f"Reverting unchanged files...")
+
+ try:
+ # Use p4 revert -a to revert unchanged files
+ # -a flag reverts files that are open for edit but have no content changes
+ revert_cmd = ['p4', 'revert', '-a'] + file_list
+
+ if verbose:
+ print(f" Command: {' '.join(revert_cmd)}")
+ print(f" Working directory: {target_dir}")
+
+ revert_result = subprocess.run(
+ revert_cmd,
+ cwd=target_dir,
+ capture_output=True,
+ text=True
+ )
+
+ if revert_result.returncode == 0:
+ # Count how many files were reverted
+ output_lines = revert_result.stdout.strip().split('\n') if revert_result.stdout.strip() else []
+ # p4 revert -a outputs lines like "file.txt - was edit, reverted"
+ reverted_count = len([line for line in output_lines if 'reverted' in line.lower()])
+ if reverted_count > 0:
+ print(f" Reverted {reverted_count} unchanged file(s)")
+ else:
+ print(f" No unchanged files to revert")
+ else:
+ if revert_result.stderr and revert_result.stderr.strip():
+ print(f" Revert warning: {revert_result.stderr.strip()}")
+ except FileNotFoundError:
+ # p4 not available, skip
+ pass
+ except Exception as e:
+ print(f" Warning: Error during P4 revert: {e}", file=sys.stderr)
+
+
+def extract_artifact(zip_path, target_dir, artifact_name):
+ """Extract a zip file to the target directory."""
+ import zipfile
+
+ print(f"Extracting {artifact_name}...")
+ print(f" To: {target_dir}")
+
+ try:
+ with zipfile.ZipFile(zip_path, 'r') as zip_ref:
+ zip_ref.extractall(target_dir)
+
+ print(f" Extracted successfully")
+ return True
+ except Exception as e:
+ print(f" Error extracting: {e}", file=sys.stderr)
+ return False
+
+
+def main():
+ parser = argparse.ArgumentParser(
+ description="Download Zen release artifacts from GitHub for the current version."
+ )
+ parser.add_argument(
+ "output_dir",
+ type=Path,
+ help="Directory where artifacts will be copied"
+ )
+ parser.add_argument(
+ "--version-file",
+ type=Path,
+ help="Path to VERSION.txt (default: auto-detect from script location)"
+ )
+ parser.add_argument(
+ "-v", "--verbose",
+ action="store_true",
+ help="Print command lines before executing external commands"
+ )
+
+ args = parser.parse_args()
+
+ # Determine repository root
+ if args.version_file:
+ repo_root = args.version_file.parent
+ else:
+ # Assume script is in scripts/ directory
+ script_dir = Path(__file__).resolve().parent
+ repo_root = script_dir.parent
+
+ # Read version
+ version = read_version(repo_root)
+
+ # Create output directory if it doesn't exist
+ output_dir = args.output_dir.resolve()
+ output_dir.mkdir(parents=True, exist_ok=True)
+ print(f"Output directory: {output_dir}\n")
+
+ # Define artifacts to download
+ artifacts = [
+ {
+ "name": "zenserver-win64.zip",
+ "platform": "Windows",
+ "target_subdir": "Engine/Binaries/Win64"
+ },
+ {
+ "name": "zenserver-macos.zip",
+ "platform": "macOS",
+ "target_subdir": "Engine/Binaries/Mac"
+ },
+ {
+ "name": "zenserver-linux.zip",
+ "platform": "Linux",
+ "target_subdir": "Engine/Binaries/Linux"
+ }
+ ]
+
+ # Download each artifact
+ success_count = 0
+ failed_artifacts = []
+
+ for artifact in artifacts:
+ artifact_name = artifact["name"]
+ platform = artifact["platform"]
+ target_subdir = artifact["target_subdir"]
+
+ # Download to temporary location first
+ temp_path = output_dir / artifact_name
+
+ # Target extraction directory
+ target_dir = output_dir / target_subdir
+ target_dir.mkdir(parents=True, exist_ok=True)
+
+ # Download using gh CLI
+ print(f"[{platform}]")
+ if download_artifact_gh(version, artifact_name, temp_path, args.verbose):
+ # Get list of files in the archive
+ archive_files = get_archive_files(temp_path)
+
+ # Check out only those specific files from P4 before extracting
+ if archive_files:
+ checkout_files_p4(target_dir, archive_files, args.verbose)
+
+ # Extract to platform-specific directory
+ if extract_artifact(temp_path, target_dir, artifact_name):
+ # Revert any files that didn't actually change
+ if archive_files:
+ revert_unchanged_files_p4(target_dir, archive_files, args.verbose)
+
+ # Remove temporary zip file
+ temp_path.unlink()
+ success_count += 1
+ else:
+ failed_artifacts.append(f"{platform} ({artifact_name})")
+ else:
+ failed_artifacts.append(f"{platform} ({artifact_name})")
+ print()
+
+ # Summary
+ print("=" * 60)
+ print(f"Download Summary:")
+ print(f" Successful: {success_count}/{len(artifacts)}")
+ if failed_artifacts:
+ print(f" Failed:")
+ for artifact in failed_artifacts:
+ print(f" - {artifact}")
+ sys.exit(1)
+ else:
+ print(f" All artifacts downloaded successfully!")
+ print(f" Location: {output_dir}")
+
+
+if __name__ == "__main__":
+ main()