From f632bd1069868538d0a8aa69a03c217e9060a633 Mon Sep 17 00:00:00 2001 From: Nathan Chancellor Date: Wed, 11 Dec 2024 09:51:26 -0700 Subject: [PATCH] python: Add and use smarter subprocess.run() wrappers Signed-off-by: Nathan Chancellor --- fish/functions/cbl_lkt.fish | 6 +- python/lib/kernel.py | 69 ++++++---------- python/lib/setup.py | 107 +++++++++---------------- python/lib/utils.py | 96 +++++++++++++++++----- python/pgo-llvm-builder/build.py | 70 +++++++--------- python/pgo-llvm-builder/stitch_rust.py | 43 +++++----- python/scripts/cbl_bld_krnl_pkg.py | 101 +++++++---------------- python/scripts/cbl_bld_krnl_vm.py | 4 +- python/scripts/cbl_gen_build_report.py | 30 ++++--- python/scripts/cbl_gen_line.py | 17 +--- python/scripts/cbl_ptchmn.py | 41 +++------- python/scripts/cbl_report.py | 88 ++++++++------------ python/scripts/cbl_vmm.py | 23 ++---- python/scripts/gen_patch_cc.py | 17 ++-- python/scripts/korg_tc.py | 4 +- python/scripts/nas_sync.py | 9 +-- python/scripts/oci_rm.py | 14 ++-- python/scripts/test_boot_utils.py | 11 ++- python/scripts/tmxrun.py | 3 +- python/scripts/upd_distro.py | 9 +-- python/setup/alpine.py | 13 ++- python/setup/arch.py | 22 +++-- python/setup/deb.py | 19 ++--- python/setup/debian.py | 7 +- python/setup/equinix.py | 27 ++----- python/setup/fedora.py | 31 +++---- python/setup/macos.py | 37 +++++---- python/setup/ubuntu.py | 3 +- 28 files changed, 390 insertions(+), 531 deletions(-) diff --git a/fish/functions/cbl_lkt.fish b/fish/functions/cbl_lkt.fish index 5254cb8b3..7b923ddf9 100644 --- a/fish/functions/cbl_lkt.fish +++ b/fish/functions/cbl_lkt.fish @@ -258,8 +258,8 @@ function cbl_lkt -d "Tests a Linux kernel with llvm-kernel-testing" set report $log_folder/report.txt cbl_gen_build_report $log_folder - set log_files (python3 -c "from pathlib import Path -import subprocess + set log_files (PYTHONPATH=$PYTHON_FOLDER python3 -c "from pathlib import Path +import lib.utils # Gmail has a maximum attachment size of 25MB MAX_SIZE = 25000000 @@ -286,7 +286,7 @@ if total_size > MAX_SIZE: '--zstd', ] cmd += [str(file.relative_to(log_folder)) for file in files] - subprocess.run(cmd, check=True) + lib.utils.run(cmd) if tarball.stat().st_size > MAX_SIZE: raise RuntimeError('Tarball is greater than 25MB??') diff --git a/python/lib/kernel.py b/python/lib/kernel.py index f3f157889..6a7ee440a 100755 --- a/python/lib/kernel.py +++ b/python/lib/kernel.py @@ -5,7 +5,7 @@ import os from pathlib import Path import shutil -import subprocess +from subprocess import CalledProcessError import sys import time @@ -26,8 +26,8 @@ def prepare_source(base_name, base_ref='origin/master'): source_folder = Path(os.environ['CBL_SRC_P'], base_name) - subprocess.run(['git', 'remote', 'update', '--prune', 'origin'], check=True, cwd=source_folder) - subprocess.run(['git', 'reset', '--hard', base_ref], check=True, cwd=source_folder) + lib.utils.call_git(source_folder, ['remote', 'update', '--prune', 'origin']) + lib.utils.call_git_loud(source_folder, ['reset', '--hard', base_ref]) reverts = [] patches = [] @@ -42,10 +42,7 @@ def prepare_source(base_name, base_ref='origin/master'): patches.append('https://lore.kernel.org/all/20241210-bcachefs-fix-declaration-after-label-err-v1-1-22c705fc47e8@kernel.org/') # bcachefs: Add empty statement between label and declaration in check_inode_hash_info_matches_root() # yapf: enable - # pylint: disable=subprocess-run-check try: - common_kwargs = {'check': True, 'cwd': source_folder, 'text': True} - for revert in reverts: if isinstance(revert, tuple): commit_range = revert[0] @@ -55,57 +52,41 @@ def prepare_source(base_name, base_ref='origin/master'): raise RuntimeError(f"No git range indicator in {commit_range}") # generate diff from range - range_diff = subprocess.run( # noqa: PLW1510 - ['git', 'diff', commit_range], - **common_kwargs, - capture_output=True).stdout + range_diff = lib.utils.call_git(source_folder, ['diff', commit_range]).stdout # apply diff in reverse - subprocess.run( # noqa: PLW1510 - ['git', 'apply', '--3way', '--reverse'], - **common_kwargs, - input=range_diff) + lib.utils.call_git_loud(source_folder, ['apply', '--3way', '--reverse'], + input=range_diff) # commit the result - subprocess.run( # noqa: PLW1510 - ['git', 'commit', '-m', commit_msg], **common_kwargs) + lib.utils.call_git_loud(source_folder, ['commit', '-m', commit_msg]) else: - subprocess.run( # noqa: PLW1510 - ['git', 'revert', '--mainline', '1', '--no-edit', revert], **common_kwargs) + lib.utils.call_git_loud(source_folder, + ['revert', '--mainline', '1', '--no-edit', revert]) for patch in patches: if isinstance(patch, Path): - subprocess.run(['git', 'am', '-3', patch], **common_kwargs) # noqa: PLW1510 + lib.utils.call_git_loud(source_folder, ['am', '-3', patch]) elif patch.startswith('https://lore.kernel.org/'): - subprocess.run( # noqa: PLW1510 - ['b4', 'shazam', '-l', '-P', '_', '-s', patch], **common_kwargs) + lib.utils.run(['b4', 'shazam', '-l', '-P', '_', '-s', patch], cwd=source_folder) elif patch.startswith(('https://', 'http://')): - patch_input = subprocess.run(['curl', '-LSs', patch], - capture_output=True, - check=True, - text=True).stdout - subprocess.run( # noqa: PLW1510 - ['git', 'am', '-3'], **common_kwargs, input=patch_input) + patch_input = lib.utils.curl([patch]).decode('utf-8') + lib.utils.call_git_loud(source_folder, ['am', '-3'], input=patch_input) else: raise RuntimeError(f"Can't handle {patch}?") for commit in commits: - patch_input = subprocess.run(['git', 'fp', '-1', '--stdout', commit], - capture_output=True, - check=True, - cwd=Path(os.environ['CBL_SRC_P'], 'linux-next'), - text=True).stdout - subprocess.run(['git', 'am', '-3'], **common_kwargs, input=patch_input) # noqa: PLW1510 - # pylint: enable=subprocess-run-check - except subprocess.CalledProcessError as err: - subprocess.run(['git', 'ama'], check=False, cwd=source_folder) + patch_input = lib.utils.call_git(Path(os.environ['CBL_SRC_P'], 'linux-next'), + ['fp', '-1', '--stdout', commit]).stdout + lib.utils.call_git_loud(source_folder, ['am', '-3'], input=patch_input) + except CalledProcessError as err: + lib.utils.call_git(source_folder, 'ama', check=False) sys.exit(err.returncode) # Basically '$binary --version | head -1' def get_tool_version(binary_path): - return subprocess.run([binary_path, '--version'], capture_output=True, check=True, - text=True).stdout.splitlines()[0] + return lib.utils.chronic([binary_path, '--version']).stdout.splitlines()[0] def kmake(variables, @@ -208,9 +189,11 @@ def kmake(variables, if not (gnu_time := shutil.which('time')): raise RuntimeError('Could not find time binary in PATH?') make_cmd = [gnu_time, '-v', *make_cmd] - lib.utils.print_cmd(make_cmd) - if not use_time: + else: start_time = time.time() - subprocess.run(make_cmd, check=True, env=env, stdin=stdin) - if not use_time: - print(f"\nTime: {lib.utils.get_duration(start_time)}") + try: + lib.utils.run(make_cmd, env=env, stdin=stdin, show_cmd=True) + finally: + if not use_time: + # pylint: disable-next=possibly-used-before-assignment + print(f"\nTime: {lib.utils.get_duration(start_time)}") diff --git a/python/lib/setup.py b/python/lib/setup.py index 641bb5dec..16782297f 100755 --- a/python/lib/setup.py +++ b/python/lib/setup.py @@ -12,7 +12,6 @@ import shutil import socket import sys -import subprocess sys.path.append(str(Path(__file__).resolve().parents[1])) # pylint: disable=wrong-import-position @@ -123,11 +122,11 @@ def write(self, path=None, dryrun=False): path = Path('/etc/fstab') lib.utils.print_or_write_text(path, self._gen_str(), dryrun) if not dryrun: - subprocess.run(['systemctl', 'daemon-reload'], check=True) + lib.utils.run(['systemctl', 'daemon-reload']) def add_user_to_group(groupname, username): - subprocess.run(['usermod', '-aG', groupname, username], check=True) + lib.utils.run(['usermod', '-aG', groupname, username]) def add_user_to_group_if_exists(groupname, username): @@ -154,11 +153,11 @@ def check_root(): # Easier than os.walk() + shutil.chown() def chown(new_user, folder): - subprocess.run(['chown', '-R', f"{new_user}:{new_user}", folder], check=True) + lib.utils.run(['chown', '-R', f"{new_user}:{new_user}", folder]) def chpasswd(user_name, new_password): - subprocess.run(['chpasswd'], check=True, input=f"{user_name}:{new_password}", text=True) + lib.utils.run('chpasswd', input=f"{user_name}:{new_password}") def chsh_fish(username): @@ -168,19 +167,15 @@ def chsh_fish(username): if fish not in Path('/etc/shells').read_text(encoding='utf-8'): raise RuntimeError(f"{fish} is not in /etc/shells?") - subprocess.run(['chsh', '-s', fish, username], check=True) + lib.utils.run(['chsh', '-s', fish, username]) def clone_env(username): if not (env_tmp := Path('/tmp/env')).exists(): # noqa: S108 - subprocess.run(['git', 'clone', 'https://github.com/nathanchance/env', env_tmp], check=True) + lib.utils.run(['git', 'clone', 'https://github.com/nathanchance/env', env_tmp]) chown(username, env_tmp) -def curl(curl_args): - return subprocess.run(['curl', '-fLSs', *curl_args], capture_output=True, check=True).stdout - - def dnf(dnf_arguments): lib.utils.run_as_root(['dnf', *dnf_arguments]) @@ -193,24 +188,16 @@ def enable_tailscale(): def fetch_gpg_key(source_url, dest): - # Use curl to avoid requests - key_data = curl([source_url]) - # Dearmor if necessary - if key_data[0:2] != b'\x99\x02': - key_data = subprocess.run(['gpg', '--dearmor'], - capture_output=True, - check=True, - input=key_data).stdout + if (key_data := lib.utils.curl(source_url))[0:2] != b'\x99\x02': + key_data = lib.utils.chronic(['gpg', '--dearmor'], input=key_data).stdout dest.write_bytes(key_data) def get_active_ethernet_info(): nmcli_cmd = ['nmcli', '-f', 'TYPE,NAME,DEVICE', '-t', 'connection', 'show', '--active'] - active_connections = subprocess.run(nmcli_cmd, capture_output=True, check=True, - text=True).stdout.splitlines() - for line in active_connections: + for line in lib.utils.chronic(nmcli_cmd).stdout.splitlines(): if 'ethernet' in line: return line.split(':')[1:] return None @@ -223,10 +210,7 @@ def get_env_root(): def get_glibc_version(): - ldd_version_out = subprocess.run(['ldd', '--version'], - capture_output=True, - check=True, - text=True).stdout + ldd_version_out = lib.utils.chronic(['ldd', '--version']).stdout ldd_version = ldd_version_out.split('\n')[0].split(' ')[-1].split('.') if len(ldd_version) < 3: ldd_version += [0] @@ -238,10 +222,8 @@ def get_hostname(): def get_ip_addr_for_intf(intf): - ip_out = subprocess.run(['ip', 'addr'], capture_output=True, check=True, - text=True).stdout.split('\n') ip_addr = None - for line in ip_out: + for line in lib.utils.chronic(['ip', 'addr']).stdout.split('\n'): ip_a_regex = fr'inet\s+(\d{{1,3}}\.\d{{1,3}}\.\d{{1,3}}\.\d{{1,3}})/\d+\s+.*{intf}$' if (match := re.search(ip_a_regex, line)): ip_addr = match.groups()[0] @@ -295,10 +277,7 @@ def is_equinix(): def is_installed(package_to_check): if shutil.which('pacman'): - pacman_packages = subprocess.run(['pacman', '-Qq'], - capture_output=True, - check=True, - text=True).stdout + pacman_packages = lib.utils.chronic(['pacman', '-Qq']).stdout return bool(re.search(f"^{package_to_check}$", pacman_packages, flags=re.M)) if shutil.which('dnf'): @@ -308,11 +287,7 @@ def is_installed(package_to_check): else: raise RuntimeError('Not implemented for the current package manager!') - try: - subprocess.run([*cmd, package_to_check], capture_output=True, check=True) - except subprocess.CalledProcessError: - return False - return True + return lib.utils.run_check_rc_zero([*cmd, package_to_check]) def is_lxc(): @@ -334,8 +309,7 @@ def is_virtual_machine(): def is_systemd_init(): if not shutil.which('systemctl'): return False - res = subprocess.run(['systemctl', 'is-system-running', '--quiet'], check=False) - return res.returncode == 0 + return lib.utils.run_check_rc_zero(['systemctl', 'is-system-running', '--quiet']) def pacman(args): @@ -380,13 +354,13 @@ def set_ip_addr_for_intf(con_name, intf, ip_addr): raise RuntimeError(f"{ip_addr} not supported by script!") dns = ['8.8.8.8', '8.8.4.4', '1.1.1.1', local_dns] - subprocess.run([*nmcli_mod, 'ipv4.addresses', f"{ip_addr}/24"], check=True) - subprocess.run([*nmcli_mod, 'ipv4.dns', ' '.join(dns)], check=True) - subprocess.run([*nmcli_mod, 'ipv4.gateway', gateway], check=True) - subprocess.run([*nmcli_mod, 'ipv4.method', 'manual'], check=True) - subprocess.run(['nmcli', 'connection', 'reload'], check=True) - subprocess.run(['nmcli', 'connection', 'down', con_name], check=True) - subprocess.run(['nmcli', 'connection', 'up', con_name, 'ifname', intf], check=True) + lib.utils.run([*nmcli_mod, 'ipv4.addresses', f"{ip_addr}/24"]) + lib.utils.run([*nmcli_mod, 'ipv4.dns', ' '.join(dns)]) + lib.utils.run([*nmcli_mod, 'ipv4.gateway', gateway]) + lib.utils.run([*nmcli_mod, 'ipv4.method', 'manual']) + lib.utils.run(['nmcli', 'connection', 'reload']) + lib.utils.run(['nmcli', 'connection', 'down', con_name]) + lib.utils.run(['nmcli', 'connection', 'up', con_name, 'ifname', intf]) current_ip = get_ip_addr_for_intf(intf) if current_ip != ip_addr: @@ -397,14 +371,11 @@ def set_ip_addr_for_intf(con_name, intf, ip_addr): def set_date_time(): if is_systemd_init(): - subprocess.run(['timedatectl', 'set-timezone', 'America/Phoenix'], check=True) + lib.utils.run(['timedatectl', 'set-timezone', 'America/Phoenix']) def setup_initial_fish_config(username): - fish_ver = subprocess.run(['fish', '-c', 'echo $version'], - capture_output=True, - check=True, - text=True).stdout.strip() + fish_ver = lib.utils.chronic(['fish', '-c', 'echo $version']).stdout.strip() if tuple(int(x) for x in fish_ver.split('.')) < (3, 4, 0): raise RuntimeError(f"{fish_ver} is less than 3.4.0!") @@ -461,15 +432,12 @@ def setup_libvirt(username): systemctl_enable(['libvirtd.service']) # Make the default network come up automatically on restart. - subprocess.run(['virsh', 'net-autostart', 'default'], check=True) + lib.utils.run(['virsh', 'net-autostart', 'default']) # Start network if it is not already started - net_info = subprocess.run(['virsh', 'net-info', 'default'], - capture_output=True, - check=True, - text=True).stdout + net_info = lib.utils.chronic(['virsh', 'net-info', 'default']).stdout if re.search('^Active.*no', net_info, flags=re.M): - subprocess.run(['virsh', 'net-start', 'default'], check=True) + lib.utils.run(['virsh', 'net-start', 'default']) def setup_mnt_nas(): @@ -491,16 +459,14 @@ def setup_mnt_ssd(user_name): chown(user_name, mnt_point) if mnt_point not in (fstab := Fstab()): - partuuid = subprocess.run(['blkid', '-o', 'value', '-s', 'PARTUUID', ssd_partition], - capture_output=True, - check=True, - text=True).stdout.strip() + partuuid = lib.utils.chronic(['blkid', '-o', 'value', '-s', 'PARTUUID', + ssd_partition]).stdout.strip() fstab[mnt_point] = FstabItem(f"PARTUUID={partuuid}", mnt_point, 'ext4', 'defaults,noatime', '0', '1') fstab.write() - subprocess.run(['mount', '-a'], check=True) + lib.utils.run(['mount', '-a']) if shutil.which('docker'): docker_json = Path('/etc/docker/daemon.json') @@ -535,9 +501,8 @@ def setup_ssh_authorized_keys(user_name): else: raise RuntimeError( 'No suitable download command could be found for downloading SSH key!') - ssh_key = subprocess.run([*cmd, 'https://github.com/nathanchance.keys'], - capture_output=True, - check=True).stdout + ssh_key = lib.utils.chronic([*cmd, 'https://github.com/nathanchance.keys'], + text=None).stdout ssh_authorized_keys.write_bytes(ssh_key) os.umask(old_umask) chown(user_name, ssh_authorized_keys.parent) @@ -557,7 +522,7 @@ def setup_sudo_symlink(): raise RuntimeError(f"Can't handle doas location ('{doas}')?") sudo_bin.symlink_to(relative_doas) - subprocess.run(['stow', '-d', sudo_prefix.parent, '-R', sudo_prefix.name, '-v'], check=True) + lib.utils.run(['stow', '-d', sudo_prefix.parent, '-R', sudo_prefix.name, '-v']) def systemctl_enable(items_to_enable, now=True): @@ -566,7 +531,7 @@ def systemctl_enable(items_to_enable, now=True): cmd.append('--now') cmd += items_to_enable - subprocess.run(cmd, check=True) + lib.utils.run(cmd) def user_exists(user): @@ -580,12 +545,12 @@ def user_exists(user): def using_systemd_boot(): if not shutil.which('bootctl'): return False - return subprocess.run(['bootctl', '--quiet', 'is-installed'], check=False).returncode == 0 + return lib.utils.run_check_rc_zero(['bootctl', '--quiet', 'is-installed']) def umount_gracefully(folder): - if subprocess.run(['mountpoint', '-q', folder], check=False).returncode == 0: - subprocess.run(['umount', folder], check=True) + if lib.utils.run_check_rc_zero(['mountpoint', '-q', folder]): + lib.utils.run(['umount', folder]) def zypper(zypper_args): diff --git a/python/lib/utils.py b/python/lib/utils.py index 306939bb2..8c52d1aba 100755 --- a/python/lib/utils.py +++ b/python/lib/utils.py @@ -2,6 +2,7 @@ # SPDX-License-Identifier: MIT # Copyright (C) 2022-2023 Nathan Chancellor +import copy import os from pathlib import Path import shlex @@ -11,15 +12,42 @@ import time -def call_git(directory, cmd): - return subprocess.run(['git', *cmd], capture_output=True, check=True, cwd=directory, text=True) +def call_git(directory, cmd, **kwargs): + kwargs.setdefault('cwd', directory) + + git_cmd = ['git'] + (git_cmd.append if isinstance(cmd, (str, os.PathLike)) else git_cmd.extend)(cmd) + + if kwargs.pop('show_cmd', False): + cmd_to_print = git_cmd.copy() + if kwargs['cwd']: + cmd_to_print[1:1] = ['-C', kwargs['cwd']] + print_cmd(cmd_to_print) + + return chronic(git_cmd, **kwargs) + + +def call_git_loud(directory, cmd, **kwargs): + return call_git(directory, cmd, **kwargs, capture_output=False) + + +def chronic(*args, **kwargs): + kwargs.setdefault('capture_output', True) + + return run(*args, **kwargs) + + +def curl(cmd, **kwargs): + kwargs.setdefault('text', None) + + curl_cmd = ['curl', '-LSs'] + (curl_cmd.append if isinstance(cmd, str) else curl_cmd.extend)(cmd) + + return chronic(curl_cmd, **kwargs).stdout def detect_virt(*args): - return subprocess.run(['systemd-detect-virt', *args], - capture_output=True, - check=False, - text=True).stdout.strip() + return chronic(['systemd-detect-virt', *args], check=False).stdout.strip() def get_duration(start_seconds, end_seconds=None): @@ -42,8 +70,8 @@ def get_duration(start_seconds, end_seconds=None): return ' '.join(parts) -def get_git_output(directory, cmd): - return call_git(directory, cmd).stdout.strip() +def get_git_output(directory, cmd, **kwargs): + return call_git(directory, cmd, **kwargs).stdout.strip() def in_container(): @@ -66,14 +94,16 @@ def path_and_text(*args): return path, None -def print_cmd(command, show_cmd_location=False, end='\n'): +def print_cmd(cmd, show_cmd_location=False, end='\n'): if show_cmd_location: cmd_loc = '(container) ' if in_container() else '(host) ' else: cmd_loc = '' - print(f"{cmd_loc}$ {' '.join([shlex.quote(str(elem)) for elem in command])}", - end=end, - flush=True) + if isinstance(cmd, (str, os.PathLike)): + cmd_str = cmd + else: + cmd_str = ' '.join(shlex.quote(str(elem)) for elem in cmd) + print(f"{cmd_loc}$ {cmd_str}", end=end, flush=True) def print_header(string): @@ -101,21 +131,51 @@ def print_red(msg): print_color('\033[01;31m', msg) +def run(*args, **kwargs): + kwargs.setdefault('check', True) + + kwargs.setdefault('text', True) + if (input_val := kwargs.get('input')) and not isinstance(input_val, str): + kwargs['text'] = None + + if (show_cmd_location := kwargs.pop('show_cmd_location', False)) or kwargs.pop( + 'show_cmd', False): + print_cmd(*args, show_cmd_location=show_cmd_location) + + if env := kwargs.pop('env', None): + kwargs['env'] = os.environ | copy.deepcopy(env) + + try: + # This function defaults check=True so if check=False here, it is explicit + # pylint: disable-next=subprocess-run-check + return subprocess.run(*args, **kwargs) # noqa: PLW1510 + except subprocess.CalledProcessError as err: + if kwargs.get('capture_output'): + print(err.stdout) + print(err.stderr) + raise err + + def run_as_root(full_cmd): - cmd_copy = full_cmd.copy() - # If we have to escalate via 'sudo', print the command so it can be audited - # if necessary. + cmd_copy = [full_cmd] if isinstance(full_cmd, (str, os.PathLike)) else full_cmd.copy() + if os.geteuid() != 0: cmd_copy.insert(0, 'sudo') - print_cmd(cmd_copy, show_cmd_location=True) - subprocess.run(cmd_copy, check=True) + + # If we have to escalate via 'sudo', print the command so it can be audited + # if necessary. + run(cmd_copy, show_cmd_location=cmd_copy[0] == 'sudo') + + +def run_check_rc_zero(*args, **kwargs): + return chronic(*args, **kwargs, check=False).returncode == 0 def print_or_run_cmd(cmd, dryrun, end='\n\n'): if dryrun: print_cmd(cmd, end=end) else: - subprocess.run(cmd, check=True) + run(cmd) def print_or_write_text(path, text, dryrun): diff --git a/python/pgo-llvm-builder/build.py b/python/pgo-llvm-builder/build.py index 7dba9cc0d..df3e85ee2 100755 --- a/python/pgo-llvm-builder/build.py +++ b/python/pgo-llvm-builder/build.py @@ -6,7 +6,12 @@ from pathlib import Path import platform import shutil -import subprocess +import sys + +sys.path.append(str(Path(__file__).resolve().parents[1])) +# pylint: disable=wrong-import-position +import lib.utils +# pylint: enable=wrong-import-position IMAGE_TAG = 'pgo-llvm-builder' ROOT = Path(__file__).resolve().parent @@ -79,14 +84,13 @@ # First, build container if necessary if not (build_container := args.force_build_container): - podman_image_cmd = ['podman', 'image', 'ls', '--format', 'json'] - cmd_out = subprocess.run(podman_image_cmd, capture_output=True, check=True, text=True).stdout + cmd_out = lib.utils.chronic(['podman', 'image', 'ls', '--format', 'json']).stdout build_container = not [ name for item in json.loads(cmd_out) if 'Names' in item for name in item['Names'] if 'pgo-llvm-builder' in name ] if build_container: - podman_build_cmd = [ + lib.utils.run([ 'podman', 'build', *[f"--build-arg={arg}" for arg in args.build_args], @@ -95,8 +99,7 @@ '--tag', IMAGE_TAG, ROOT, - ] - subprocess.run(podman_build_cmd, check=True) + ]) build_folder = Path(args.build_folder).resolve() if args.build_folder else BUILD @@ -108,9 +111,8 @@ raise FileNotFoundError('Invalid llvm-project provided, no llvm folder?') elif not (llvm_folder := Path(GIT, 'llvm-project')).exists(): llvm_folder.parent.mkdir(exist_ok=True, parents=True) - git_clone_cmd = ['git', 'clone', 'https://github.com/llvm/llvm-project', llvm_folder] - subprocess.run(git_clone_cmd, check=True) -subprocess.run(['git', 'remote', 'update'], check=True, cwd=llvm_folder) + lib.utils.call_git_loud(None, ['clone', 'https://github.com/llvm/llvm-project', llvm_folder]) +lib.utils.call_git_loud(llvm_folder, ['remote', 'update']) if args.tc_build_folder: tc_build_folder = Path(args.tc_build_folder).resolve() @@ -119,15 +121,10 @@ else: if not (tc_build_folder := Path(GIT, 'tc-build')).exists(): tc_build_folder.parent.mkdir(exist_ok=True, parents=True) - git_clone_cmd = [ - 'git', - 'clone', - 'https://github.com/ClangBuiltLinux/tc-build', - tc_build_folder, - ] - subprocess.run(git_clone_cmd, check=True) - subprocess.run(['git', 'remote', 'update'], check=True, cwd=tc_build_folder) - subprocess.run(['git', 'reset', '--hard', '@{u}'], check=True, cwd=tc_build_folder) + lib.utils.call_git_loud( + None, ['clone', 'https://github.com/ClangBuiltLinux/tc-build', tc_build_folder]) + lib.utils.call_git_loud(tc_build_folder, ['remote', 'update']) + lib.utils.call_git_loud(tc_build_folder, ['reset', '--hard', '@{u}']) llvm_git_dir = Path(llvm_folder, '.git') static_mounts = [ @@ -209,11 +206,7 @@ if 'llvmorg' not in ref: date_info = datetime.datetime.now(datetime.timezone.utc).strftime('%Y%m%d-%H%M%S') - ref_info = subprocess.run(['git', 'show', '--format=%H', '-s', ref], - capture_output=True, - check=True, - cwd=llvm_folder, - text=True).stdout.strip() + ref_info = lib.utils.get_git_output(llvm_folder, ['show', '--format=%H', '-s', ref]) VERSION += f"-{ref_info}-{date_info}" if (llvm_install := Path(install_folder, @@ -226,10 +219,9 @@ if (worktree := Path(SRC, 'llvm-project')).exists(): shutil.rmtree(worktree) - subprocess.run(['git', 'worktree', 'prune'], check=True, cwd=llvm_folder) + lib.utils.call_git(llvm_folder, ['worktree', 'prune']) - git_worktree_cmd = ['git', 'worktree', 'add', '--detach', worktree, ref] - subprocess.run(git_worktree_cmd, check=True, cwd=llvm_folder) + lib.utils.call_git_loud(llvm_folder, ['worktree', 'add', '--detach', worktree, ref]) # Python 3.12 deprecates and changes a few things in the tests. If we are # running the tests, make sure we have the fixes. It is safe to apply them @@ -245,26 +237,20 @@ 'int(frame_size/16 - 4)) * 16') gen_cfi_funcs.write_text(new_text, encoding='utf-8') else: - subprocess.run([ - 'git', + lib.utils.call_git_loud(worktree, [ 'cherry-pick', '--no-commit', '015c43178f9d8531b6bcd1685dbf72b7d837cf5a', - ], - check=True, - cwd=worktree) + ]) # https://github.com/llvm/llvm-project/commit/01fdc2a3c9e0df4e54bb9b88f385f68e7b0d808c if (uctc := Path(worktree, 'llvm/utils/update_cc_test_checks.py')).exists(): uctc_txt = uctc.read_text(encoding='utf-8') if 'distutils.spawn' in uctc_txt: - subprocess.run([ - 'git', + lib.utils.call_git_loud(worktree, [ 'cherry-pick', '--no-commit', '01fdc2a3c9e0df4e54bb9b88f385f68e7b0d808c', - ], - check=True, - cwd=worktree) + ]) shutil.rmtree(build_folder, ignore_errors=True) build_folder.mkdir(exist_ok=True, parents=True) @@ -306,10 +292,10 @@ maj_ver = int(VERSION.split('.', 1)[0]) if (maj_ver >= 16 and MACHINE == 'x86_64') or (maj_ver >= 18 and MACHINE == 'aarch64'): build_cmd += ['--bolt', '--lto', 'thin'] - subprocess.run(build_cmd, check=True) + lib.utils.run(build_cmd) llvm_tarball = Path(llvm_install.parent, f"{llvm_install.name}.tar") - tar_cmd = [ + lib.utils.run([ 'tar', '--create', '--directory', @@ -317,19 +303,17 @@ '--file', llvm_tarball, llvm_install.name, - ] - subprocess.run(tar_cmd, check=True) + ]) llvm_tarball_compressed = llvm_tarball.with_suffix('.tar.zst') - zstd_cmd = [ + lib.utils.run([ 'zstd', '-19', '-T0', '-o', llvm_tarball_compressed, llvm_tarball, - ] - subprocess.run(zstd_cmd, check=True) + ]) INFO_TEXT = ('\n' f"Tarball is available at: {llvm_tarball}\n" diff --git a/python/pgo-llvm-builder/stitch_rust.py b/python/pgo-llvm-builder/stitch_rust.py index c4d8a1077..99ca1625d 100755 --- a/python/pgo-llvm-builder/stitch_rust.py +++ b/python/pgo-llvm-builder/stitch_rust.py @@ -4,7 +4,6 @@ import hashlib from pathlib import Path import shutil -import subprocess import sys if sys.version_info >= (3, 11, 0): import tomllib @@ -14,6 +13,11 @@ ) sys.exit(1) +sys.path.append(str(Path(__file__).resolve().parents[1])) +# pylint: disable=wrong-import-position +import lib.utils +# pylint: enable=wrong-import-position + ROOT = Path(__file__).resolve().parent RUST = Path(ROOT, 'rust') @@ -21,15 +25,6 @@ SUCCESS = '\033[01;32mSUCCESS\033[0m' -def run_cmd_print(cmd, **kwargs): - try: - subprocess.run(cmd, capture_output=True, check=True, text=True, **kwargs) - except subprocess.CalledProcessError as err: - print(f"{FAILED} ({cmd[0]} failed with '{err.stderr}')") - sys.exit(err.returncode) - print(SUCCESS) - - def download_and_verify(url, dst): if dst.exists(): return @@ -40,19 +35,22 @@ def download_and_verify(url, dst): rust_gpg.unlink() rust_gpg.mkdir(parents=True) print(f"Preparing GPG folder ('{rust_gpg}')... ", end='') - run_cmd_print([*base_gpg_cmd, '--recv-keys', '85AB96E6FA1BE5FE']) - print() + lib.utils.chronic([*base_gpg_cmd, '--recv-keys', '85AB96E6FA1BE5FE']) + print(SUCCESS, end='\n\n') print(f"Downloading {url} to {dst}... ", end='') - run_cmd_print(['curl', '-LSs', '-o', dst, url]) + lib.utils.curl(['-o', dst, url]) + print(SUCCESS, end='\n\n') print(f"Downloading GPG signature for {dst.name}... ", end='') gpg_url = f"{url}.asc" (gpg_dst := Path(dst.parent, f"{dst.name}.asc")).unlink(missing_ok=True) - run_cmd_print(['curl', '-LSs', '-o', gpg_dst, gpg_url]) + lib.utils.curl(['-o', gpg_dst, gpg_url]) + print(SUCCESS, end='\n\n') print(f"Verifying {dst.name} with GPG signature... ", end='') - run_cmd_print([*base_gpg_cmd, '--verify', gpg_dst, dst]) + lib.utils.chronic([*base_gpg_cmd, '--verify', gpg_dst, dst]) + print(SUCCESS, end='\n\n') gpg_dst.unlink() @@ -85,7 +83,8 @@ def prepare_rust_components(toml, target): print(SUCCESS) print(f"Extracting {pkg_name} to {dst}... ", end='') - run_cmd_print(['tar', '-C', RUST, '-xzf', pkg_tarball]) + lib.utils.chronic(['tar', '-C', RUST, '-xzf', pkg_tarball]) + print(SUCCESS, end='\n\n') pkg_tarball.unlink() print() @@ -164,7 +163,7 @@ def generate_llvm_rust_tarball(scripts, llvm_tarball, rust_version): prefix.mkdir(parents=True) # Extract LLVM tarball into prefix - subprocess.run([ + lib.utils.run([ *tar_cmd, '--directory', prefix, @@ -172,16 +171,15 @@ def generate_llvm_rust_tarball(scripts, llvm_tarball, rust_version): '--file', llvm_tarball, '--strip-components=1', - ], - check=True) + ]) # Install Rust components into prefix for script in scripts: # Use '--disable-ldconfig' as the prefix is not '/usr/local' - subprocess.run([script, '--disable-ldconfig', f"--prefix={prefix}"], check=True) + lib.utils.run([script, '--disable-ldconfig', f"--prefix={prefix}"]) # Repackage prefix into LLVM+Rust tarball - subprocess.run([ + lib.utils.run([ *tar_cmd, '--create', '--directory', @@ -189,8 +187,7 @@ def generate_llvm_rust_tarball(scripts, llvm_tarball, rust_version): '--file', llvm_rust_tarball, prefix.name, - ], - check=True) + ]) shutil.rmtree(prefix) print(f"Modified tarball is now available at {llvm_rust_tarball}") diff --git a/python/scripts/cbl_bld_krnl_pkg.py b/python/scripts/cbl_bld_krnl_pkg.py index 543b567ee..bb54ec930 100755 --- a/python/scripts/cbl_bld_krnl_pkg.py +++ b/python/scripts/cbl_bld_krnl_pkg.py @@ -4,7 +4,7 @@ import os from pathlib import Path import shutil -import subprocess +from subprocess import DEVNULL import sys import korg_tc @@ -14,13 +14,15 @@ import lib.kernel # pylint: enable=wrong-import-position +CONFIG_URL = 'https://gitlab.archlinux.org/archlinux/packaging/packages/linux/-/raw/main/config' + def recreate_folder(folder): if folder.exists(): try: shutil.rmtree(folder) if folder.is_dir() else folder.unlink() except PermissionError: - subprocess.run(['sudo', 'rm', '--recursive', folder], check=True) + lib.utils.run_as_root(['rm', '--recursive', folder]) folder.mkdir(parents=True) @@ -61,17 +63,11 @@ def _prepare_files(self, _localmodconfig=False, _menuconfig=False, _extra_config src_config_file = Path(os.environ['ENV_FOLDER'], f"configs/kernel/{self._pkgname}.config") dst_config_file = Path(self._build_folder, '.config') base_sc_cmd = [Path(self._source_folder, 'scripts/config'), '--file', src_config_file] - kconfig_env = {'KCONFIG_CONFIG': src_config_file, **os.environ} + kconfig_env = {'KCONFIG_CONFIG': src_config_file} plain_make_vars = {'ARCH': 'x86_64', 'LOCALVERSION': '', 'O': self._build_folder} # Step 1: Copy default Arch configuration and set a few options - crl_cmd = [ - 'curl', - '-LSs', - '-o', src_config_file, - 'https://gitlab.archlinux.org/archlinux/packaging/packages/linux/-/raw/main/config', - ] # yapf: disable - subprocess.run(crl_cmd, check=True) + lib.utils.curl(['-o', src_config_file, CONFIG_URL]) sc_cmd = [ *base_sc_cmd, '-d', 'LOCALVERSION_AUTO', @@ -80,7 +76,7 @@ def _prepare_files(self, _localmodconfig=False, _menuconfig=False, _extra_config # until https://git.kernel.org/linus/c7ff693fa2094ba0a9d0a20feb4ab1658eff9c33 has been accounted for by Arch upstream '-e', 'MODULE_COMPRESS', ] # yapf: disable - subprocess.run(sc_cmd, check=True) + lib.utils.run(sc_cmd) # Step 2: Run olddefconfig lib.kernel.kmake(plain_make_vars.copy(), ['olddefconfig'], @@ -92,7 +88,7 @@ def _prepare_files(self, _localmodconfig=False, _menuconfig=False, _extra_config # Step 4: Enable ThinLTO, CFI, or UBSAN (and any other requested configurations) if self.extra_sc_args: - subprocess.run([*base_sc_cmd, *self.extra_sc_args], check=True) + lib.utils.run([*base_sc_cmd, *self.extra_sc_args]) self._kmake(['olddefconfig'], env=kconfig_env) # Copy new configuration into place @@ -100,9 +96,8 @@ def _prepare_files(self, _localmodconfig=False, _menuconfig=False, _extra_config shutil.copyfile(src_config_file, dst_config_file) self._kmake(['olddefconfig', 'prepare']) - subprocess.run( - ['git', '--no-pager', 'diff', '--no-index', src_config_file, dst_config_file], - check=False) + lib.utils.run(['git', '--no-pager', 'diff', '--no-index', src_config_file, dst_config_file], + check=False) print('Setting version...') Path(self._build_folder, 'localversion.10-pkgname').write_text('-llvm\n', encoding='utf-8') @@ -130,20 +125,16 @@ def package(self): modulesdir = Path(pkgdir, 'usr/lib/modules', self._kernver) print('Installing boot image...') - kernel_image = subprocess.run(['make', '-s', f"O={self._build_folder}", 'image_name'], - capture_output=True, - cwd=self._source_folder, - text=True, - check=False).stdout.strip() + kernel_image = lib.utils.chronic(['make', '-s', f"O={self._build_folder}", 'image_name'], + cwd=self._source_folder).stdout.strip() # systemd expects to find the kernel here to allow hibernation # https://github.com/systemd/systemd/commit/edda44605f06a41fb86b7ab8128dcf99161d2344 - subprocess.run([ + lib.utils.run([ 'install', '-Dm644', Path(self._build_folder, kernel_image), Path(modulesdir, 'vmlinuz'), - ], - check=True) + ]) # Used by mkinitcpio to name the kernel (pkgbase := Path(modulesdir, 'pkgbase')).write_text(f"{self._pkgname}\n", encoding='utf-8') @@ -165,11 +156,7 @@ def package(self): for link in ['source', 'build']: Path(modulesdir, link).unlink(missing_ok=True) - pkgver = subprocess.run(['git', 'describe'], - capture_output=True, - check=True, - cwd=self._source_folder, - text=True).stdout.strip().replace('-', '_') + pkgver = lib.utils.get_git_output(self._source_folder, 'describe').replace('-', '_') pkgbuild_text = fr""" pkgname={self._pkgname} pkgver={pkgver} @@ -193,19 +180,16 @@ def package(self): mv -v "$pkgroot"/pkg-prepared "$pkgroot"/pkg }}""" Path(pkgroot, 'PKGBUILD').write_text(pkgbuild_text, encoding='utf-8') - subprocess.run(['makepkg', '-R'], check=True, cwd=pkgroot) + lib.utils.run(['makepkg', '-R'], cwd=pkgroot) def prepare(self, base_ref, localmodconfig=False, menuconfig=False, extra_config_targets=None): lib.kernel.prepare_source(self._pkgname, base_ref) self._prepare_files(localmodconfig, menuconfig, extra_config_targets) - self._kernver = subprocess.run( + self._kernver = lib.utils.chronic( ['make', '-s', 'LOCALVERSION=', f"O={self._build_folder}", 'kernelrelease'], - capture_output=True, - cwd=self._source_folder, - text=True, - check=False).stdout.strip() + cwd=self._source_folder).stdout.strip() print(f"Prepared {self._pkgname} version {self._kernver}") @@ -222,27 +206,16 @@ def _prepare_files(self, localmodconfig, menuconfig, extra_config_targets=None): recreate_folder(self._build_folder) - crl_cmd = [ - 'curl', - '-LSs', - '-o', config, - 'https://gitlab.archlinux.org/archlinux/packaging/packages/linux/-/raw/main/config', - ] # yapf: disable - subprocess.run(crl_cmd, check=True) + lib.utils.curl(['-o', config, CONFIG_URL]) sc_cmd = [*base_sc_cmd, '-m', 'DRM', *self.extra_sc_args] - subprocess.run(sc_cmd, check=True) + lib.utils.run(sc_cmd) self._kmake(['olddefconfig']) if localmodconfig: if not (modprobedb := Path('/tmp/modprobed.db')).exists(): # noqa: S108 raise RuntimeError(f"localmodconfig requested without {modprobedb}!") - self._kmake(['localmodconfig'], - env={ - 'LSMOD': modprobedb, - **os.environ, - }, - stdin=subprocess.DEVNULL) + self._kmake(['localmodconfig'], env={'LSMOD': modprobedb}, stdin=DEVNULL) if menuconfig: self._kmake(['menuconfig']) @@ -268,34 +241,22 @@ def _prepare_files(self, _localmodconfig=False, _menuconfig=False, _extra_config for part in ['', '_dev', '_hwmon']: src_url = f"https://github.com/pop-os/system76-io-dkms/raw/master/system76-io{part}.c" dst_local = Path(self._source_folder, 'drivers/hwmon', src_url.rsplit('/', 1)[-1]) - subprocess.run(['curl', '-LSs', '-o', dst_local, src_url], check=True) + lib.utils.curl(['-o', dst_local, src_url]) git_add_files.append(dst_local.relative_to(self._source_folder)) with Path(self._source_folder, git_add_files[0]).open('a', encoding='utf-8') as file: file.write('obj-m += system76-io.o\n') - subprocess.run(['git', 'add', *git_add_files], check=True, cwd=self._source_folder) - subprocess.run(['git', 'commit', '-m', 'Add system76-io driver'], - check=True, - cwd=self._source_folder) - - git_kwargs = { - 'capture_output': True, - 'check': False, - 'cwd': self._source_folder, - 'text': True, - } - local_ver_parts = [] + lib.utils.call_git(self._source_folder, ['add', *git_add_files]) + lib.utils.call_git_loud(self._source_folder, ['commit', '-m', 'Add system76-io driver']) - # pylint: disable=subprocess-run-check - head = subprocess.run( # noqa: PLW1510 - ['git', 'rev-parse', '--verify', 'HEAD'], **git_kwargs).stdout.strip() - exact_match = subprocess.run( # noqa: PLW1510 - ['git', 'describe', '--exact-match'], **git_kwargs).stdout.strip() - if head and exact_match == '': - if (atag := subprocess.run( # noqa: PLW1510 - ['git', 'describe'], **git_kwargs).stdout.strip()): + local_ver_parts = [] + head = lib.utils.get_git_output(self._source_folder, ['rev-parse', '--verify', 'HEAD'], + check=False) + exact_match = lib.utils.get_git_output(self._source_folder, ['describe', '--exact-match'], + check=False) + if head and not exact_match: + if atag := lib.utils.get_git_output(self._source_folder, 'describe', check=False): local_ver_parts.append(f"{int(atag.split('-')[-2]):05}") local_ver_parts.append(f"g{head[0:12]}") - # pylint: enable=subprocess-run-check if local_ver_parts: Path(self._build_folder, diff --git a/python/scripts/cbl_bld_krnl_vm.py b/python/scripts/cbl_bld_krnl_vm.py index 1af98d291..9a4531050 100755 --- a/python/scripts/cbl_bld_krnl_vm.py +++ b/python/scripts/cbl_bld_krnl_vm.py @@ -7,7 +7,6 @@ from pathlib import Path import platform import shutil -import subprocess import sys import requests @@ -17,6 +16,7 @@ sys.path.append(str(Path(__file__).resolve().parents[1])) # pylint: disable=wrong-import-position import lib.kernel +import lib.utils # pylint: enable=wrong-import-position @@ -108,7 +108,7 @@ def parse_arguments(): def build_kernel_for_vm(kernel_src, add_make_targets, make_variables, config, menuconfig, vm_name): if Path(kernel_src, '.config').exists(): - subprocess.run(['git', '-C', src_folder, 'cl', '-q'], check=True) + lib.utils.call_git(src_folder, ['cl', '-q']) if (build := Path(make_variables['O'])).exists(): shutil.rmtree(build) build.mkdir(parents=True) diff --git a/python/scripts/cbl_gen_build_report.py b/python/scripts/cbl_gen_build_report.py index e853bebfd..8c4d70d65 100755 --- a/python/scripts/cbl_gen_build_report.py +++ b/python/scripts/cbl_gen_build_report.py @@ -5,7 +5,12 @@ from argparse import ArgumentParser from pathlib import Path import re -import subprocess +import sys + +sys.path.append(str(Path(__file__).resolve().parents[1])) +# pylint: disable=wrong-import-position +import lib.utils +# pylint: enable=wrong-import-position def parse_arguments(): @@ -25,11 +30,6 @@ def get_log(log_folder, key): return Path(log_folder, key + '.log') -def git_get(repo, cmd): - return subprocess.run(['git', *cmd], capture_output=True, check=True, cwd=repo, - text=True).stdout - - def generate_warnings(log_folder, src_folder): # Get full list of logs from folder, excluding internal logs for filtering sake internal_files = {elem + '.log' for elem in ['failed', 'info', 'skipped', 'success']} @@ -180,16 +180,14 @@ def generate_report(log_folder): for warning in warnings: report_text += f"{log}:{warning}" - if src_folder.exists(): - mfc = git_get(src_folder, ['mfc']).strip() - if mfc: - report_text += f"\n{src_folder.name} commit logs:\n\n" - branch = git_get(src_folder, ['bn']).strip() - if (remote := git_get(src_folder, ['rn', branch]).strip()): - since = f"{remote}/{branch}" - else: - since = f"{mfc}^" - report_text += git_get(src_folder, ['l', f"{since}^.."]) + if src_folder.exists() and (mfc := lib.utils.get_git_output(src_folder, 'mfc')): + report_text += f"\n{src_folder.name} commit logs:\n\n" + branch = lib.utils.get_git_output(src_folder, 'bn') + if remote := lib.utils.get_git_output(src_folder, ['rn', branch]): + since = f"{remote}/{branch}" + else: + since = f"{mfc}^" + report_text += lib.utils.call_git(src_folder, ['l', f"{since}^.."]).stdout return report_text diff --git a/python/scripts/cbl_gen_line.py b/python/scripts/cbl_gen_line.py index f666a101a..979ddbe12 100755 --- a/python/scripts/cbl_gen_line.py +++ b/python/scripts/cbl_gen_line.py @@ -4,7 +4,6 @@ import json import os from pathlib import Path -import subprocess import sys sys.path.append(str(Path(__file__).resolve().parents[1])) @@ -14,19 +13,11 @@ def generate_patch_lines(args): - try: - git_branch = lib.utils.get_git_output(args.directory, ['rev-parse', '--abbrev-ref', 'HEAD']) - except subprocess.CalledProcessError as err: - print(err.stderr) - raise err + git_branch = lib.utils.get_git_output(args.directory, ['rev-parse', '--abbrev-ref', 'HEAD']) if not git_branch.startswith('b4/'): raise RuntimeError(f"Not on a b4 managed branch? Have: {git_branch}") - b4_info_raw = subprocess.run(['b4', 'prep', '--show-info'], - capture_output=True, - check=True, - cwd=args.directory, - text=True).stdout + b4_info_raw = lib.utils.chronic(['b4', 'prep', '--show-info'], cwd=args.directory).stdout b4_info = dict(item.split(': ', 1) for item in b4_info_raw.splitlines()) commit_keys = [] @@ -48,9 +39,7 @@ def generate_patch_lines(args): def generate_pr_lines(args): for pr in args.prs: gh_pr_cmd = ['gh', '-R', 'llvm/llvm-project', 'pr', 'view', '--json', 'title,url', pr] - result = subprocess.run(gh_pr_cmd, capture_output=True, check=True, text=True) - - info = json.loads(result.stdout) + info = json.loads(lib.utils.chronic(gh_pr_cmd).stdout) print(f" set -a gh_prs {info['url']} # {info['title']}") diff --git a/python/scripts/cbl_ptchmn.py b/python/scripts/cbl_ptchmn.py index c5ec60967..4f6e4c13d 100755 --- a/python/scripts/cbl_ptchmn.py +++ b/python/scripts/cbl_ptchmn.py @@ -2,7 +2,6 @@ from argparse import ArgumentParser from pathlib import Path -import subprocess import os import sys @@ -11,22 +10,8 @@ import lib.utils -def git(directory, cmd, **kwargs): - return subprocess.run(['git', *cmd], - capture_output=True, - check=True, - cwd=directory, - text=True, - **kwargs) - - -def git_loud(directory, cmd, **kwargs): - lib.utils.print_cmd(['git', '-C', directory, *cmd]) - return git(directory, cmd, **kwargs) - - def get_patches_folder(repo): - branch = git(repo, ['bn']).stdout.strip() + branch = lib.utils.get_git_output(repo, 'bn') return Path(os.environ['GITHUB_FOLDER'], 'patches', repo.name, branch) @@ -46,19 +31,11 @@ def parse_arguments(): return parser.parse_args() -def apply(repo, patches): - try: - git(repo, ['am', *patches]) - except subprocess.CalledProcessError as err: - print(err.stderr) - raise err - - def sync(repo, patches_output): if repo.name not in ('linux', 'linux-next') and 'linux-stable' not in repo.name: raise RuntimeError(f"Supplied repo ('{repo}, {repo.name}') is not supported by cbl_ptchmn!") - if not (mfc := git(repo, ['mfc']).stdout.strip()): + if not (mfc := lib.utils.get_git_output(repo, 'mfc')): raise RuntimeError('My first commit could not be found?') # Generate a list of patches to remove. The Python documentation states @@ -69,17 +46,17 @@ def sync(repo, patches_output): item.unlink() fp_cmd = ['fp', f"--base={mfc}^", '-o', patches_output, f"{mfc}^..HEAD"] - git_loud(repo, fp_cmd) + lib.utils.call_git(repo, fp_cmd, show_cmd=True) status_cmd = ['--no-optional-locks', 'status', '-u', '--porcelain', patches_output] - if git(patches_output, status_cmd).stdout.strip(): - git(patches_output, ['add', patches_output]) + if lib.utils.get_git_output(patches_output, status_cmd): + lib.utils.call_git(patches_output, ['add', patches_output]) - sha = git(repo, ['sha']).stdout.strip() + sha = lib.utils.get_git_output(repo, 'sha') cmt_msg = f"patches: {repo.name}: {patches_output.name}: sync as of {sha}" - git_loud(patches_output, ['c', '-m', cmt_msg]) + lib.utils.call_git(patches_output, ['c', '-m', cmt_msg], show_cmd=True) - git(patches_output, ['push']) + lib.utils.call_git(patches_output, 'push') if __name__ == '__main__': @@ -93,6 +70,6 @@ def sync(repo, patches_output): raise RuntimeError(f"Derived patches folder ('{patches_folder}') does not exist!") if args.apply: - apply(args.directory, sorted(patches_folder.iterdir())) + lib.utils.call_git(args.directory, ['am', *sorted(patches_folder.iterdir())]) if args.sync: sync(args.directory, patches_folder) diff --git a/python/scripts/cbl_report.py b/python/scripts/cbl_report.py index 8299b8f72..1aa52d6e1 100755 --- a/python/scripts/cbl_report.py +++ b/python/scripts/cbl_report.py @@ -10,7 +10,6 @@ import os from pathlib import Path import shutil -import subprocess import sys import zoneinfo @@ -103,21 +102,6 @@ def get_report_worktree(): return Path(os.environ['CBL'], 'current-report') -def git(repo, cmd, capture_output=True, check=True, env=None, show_command=True): - if not shutil.which('git'): - raise RuntimeError('git could not be found!') - command = ['git', '-C', repo, *cmd] - if show_command: - lib.utils.print_cmd(command) - if env: - env = os.environ.copy() | env - return subprocess.run(command, capture_output=capture_output, check=check, env=env, text=True) - - -def git_check_success(repo, cmd): - return git(repo, cmd, check=False, show_command=False).returncode == 0 - - def parse_parameters(): parser = ArgumentParser() subparsers = parser.add_subparsers(help='Action to perform', required=True) @@ -212,11 +196,12 @@ def parse_parameters(): def local_branch_exists(repo, branch): - return git_check_success(repo, ['rev-parse', '--verify', branch]) + return lib.utils.call_git(repo, ['rev-parse', '--verify', branch], check=False).returncode == 0 def remote_branch_exists(repo, branch): - return git_check_success(repo, ['ls-remote', '--exit-code', '--heads', 'origin', branch]) + return lib.utils.call_git(repo, ['ls-remote', '--exit-code', '--heads', 'origin', branch], + check=False).returncode == 0 def generate_devices(devices): @@ -252,10 +237,7 @@ def generate_item(args): if not Path('Makefile').exists(): raise RuntimeError('Not in a kernel tree?') - proc = subprocess.run(['b4', 'prep', '--show-info'], - capture_output=True, - check=True, - text=True) + proc = lib.utils.chronic(['b4', 'prep', '--show-info']) info = dict(map(str.strip, item.split(':', 1)) for item in proc.stdout.splitlines()) commits = [key for key in info if key.startswith('commit-')] series = [key for key in info if key.startswith('series-v')] @@ -270,11 +252,7 @@ def generate_item(args): print(f" * `{title}` ({', '.join(md_links)})") elif item_type == 'pr': - proc = subprocess.run(['gh', 'pr', 'view', '--json', 'title,url'], - capture_output=True, - check=True, - text=True) - gh_json = json.loads(proc.stdout) + gh_json = json.loads(lib.utils.chronic(['gh', 'pr', 'view', '--json', 'title,url']).stdout) print(f"* [`{gh_json['title']}`]({gh_json['url']})") @@ -438,12 +416,8 @@ def create_monthly_report_file(report_file, report_date): def get_yearly_commits(year, source, branch='main', git_log_args=None, update=True): if update: - subprocess.run(['git', 'remote', 'update', '--prune', 'origin'], - capture_output=True, - check=True, - cwd=source) + lib.utils.call_git(source, ['remote', 'update', '--prune', 'origin']) git_log_cmd = [ - 'git', 'log', '--format=%H %s', '--no-merges', @@ -455,11 +429,7 @@ def get_yearly_commits(year, source, branch='main', git_log_args=None, update=Tr git_log_cmd += git_log_args else: git_log_cmd.append('--author=Nathan Chancellor') - git_log_output = subprocess.run(git_log_cmd, - capture_output=True, - check=True, - cwd=source, - text=True) + git_log_output = lib.utils.call_git(source, git_log_cmd) return dict(item.split(' ', 1) for item in git_log_output.stdout.splitlines()) @@ -757,8 +727,9 @@ def finalize_report(args): # Rebase changes if requested if args.rebase or args.all: - git(worktree, ['rebase', '-i', '--autosquash', 'origin/main'], - env={'GIT_SEQUENCE_EDITOR': shutil.which('true')}) + lib.utils.call_git(worktree, ['rebase', '-i', '--autosquash', 'origin/main'], + env={'GIT_SEQUENCE_EDITOR': shutil.which('true')}, + show_cmd=True) # Get branch based on user's request date = get_prev_datetime() if args.prev_month else get_current_datetime() @@ -766,21 +737,21 @@ def finalize_report(args): # Merge branch into main branch = get_report_branch(date) if args.merge or args.all: - git(repo, ['merge', branch]) + lib.utils.call_git(repo, ['merge', branch], show_cmd=True) # Remove worktree ('--force' due to submodules) if args.remove_worktree or args.all: - git(repo, ['worktree', 'remove', '--force', worktree]) + lib.utils.call_git(repo, ['worktree', 'remove', '--force', worktree], show_cmd=True) # Delete branch locally and remotely if necessary if args.delete_branch or args.all: - git(repo, ['branch', '--delete', '--force', branch]) + lib.utils.call_git(repo, ['branch', '--delete', '--force', branch], show_cmd=True) if remote_branch_exists(repo, branch): - git(repo, ['push', 'origin', f":{branch}"]) + lib.utils.call_git(repo, ['push', 'origin', f":{branch}"], show_cmd=True) # Push main if requested if args.push or args.all: - git(repo, ['push']) + lib.utils.call_git(repo, 'push', show_cmd=True) def new_report(args): @@ -802,7 +773,7 @@ def new_report(args): # Update source repo to ensure remote branch check is up to date if args.update or args.all: - git(repo, ['remote', 'update', '--prune', 'origin']) + lib.utils.call_git(repo, ['remote', 'update', '--prune', 'origin'], show_cmd=True) push_to_remote = False worktree_add = ['worktree', 'add'] @@ -815,14 +786,16 @@ def new_report(args): push_to_remote = True # Create worktree - git(repo, worktree_add) + lib.utils.call_git(repo, worktree_add, show_cmd=True) # Push new branch if needed if (args.push or args.all) and push_to_remote: - git(worktree, ['push', '--set-upstream', 'origin', branch]) + lib.utils.call_git(worktree, ['push', '--set-upstream', 'origin', branch], + show_cmd=True) # Update submodules, as that is how the theme is checked out - git(worktree, ['submodule', 'update', '--init', '--recursive']) + lib.utils.call_git(worktree, ['submodule', 'update', '--init', '--recursive'], + show_cmd=True) # Create new report file if necessary if args.create_report or args.all: @@ -837,8 +810,10 @@ def new_report(args): commit_date = report_date.strftime('%a %b %d %H:%M:%S %Y %z') create_monthly_report_file(report, report_date) - git(worktree, ['add', report]) - git(worktree, ['commit', '-m', commit_title, '--date', commit_date, '--signoff']) + lib.utils.call_git(worktree, ['add', report], show_cmd=True) + lib.utils.call_git(worktree, + ['commit', '-m', commit_title, '--date', commit_date, '--signoff'], + show_cmd=True) def update_report(args): @@ -855,13 +830,13 @@ def update_report(args): if not (editor := shutil.which(os.environ.get('EDITOR', 'vim'))): raise RuntimeError("$EDITOR not set or vim could not be found on your system!") - subprocess.run([editor, report], check=True) + lib.utils.run([editor, report]) if args.commit or (args.all and args.commit): - git(worktree, ['add', report]) - git(worktree, ['c', '--fixup', args.commit]) + lib.utils.call_git(worktree, ['add', report], show_cmd=True) + lib.utils.call_git(worktree, ['c', '--fixup', args.commit], show_cmd=True) if args.push or args.all: - git(worktree, ['push']) + lib.utils.call_git(worktree, 'push', show_cmd=True) def yearly_report(args): @@ -873,8 +848,9 @@ def yearly_report(args): commit_date = report_date.strftime('%a %b %d %H:%M:%S %Y %z') create_yearly_report_file(report, report_date, args.year) - git(repo, ['add', report]) - git(repo, ['commit', '-m', commit_title, '--date', commit_date]) + lib.utils.call_git(repo, ['add', report], show_cmd=True) + lib.utils.call_git(repo, ['commit', '-m', commit_title, '--date', commit_date], + show_cmd=True) if __name__ == '__main__': diff --git a/python/scripts/cbl_vmm.py b/python/scripts/cbl_vmm.py index 6f9895942..9fca57674 100755 --- a/python/scripts/cbl_vmm.py +++ b/python/scripts/cbl_vmm.py @@ -51,13 +51,8 @@ def iso_is_url(iso): return 'http://' in iso or 'https://' in iso -def run_cmd(cmd): - lib.utils.print_cmd(cmd) - subprocess.run(cmd, check=True) - - def wget(location, url): - run_cmd(['wget', '-c', '-O', location, url]) + lib.utils.run(['wget', '-c', '-O', location, url], show_cmd=True) class VirtualMachine: @@ -103,7 +98,7 @@ def __init__(self, arch, cmdline, cores, gdb, graphical, initrd, iso, kernel, kv raise RuntimeError('kernel passed without initrd and one could not be found!') # Clear any previous hosts using the chosen SSH port. - run_cmd(['ssh-keygen', '-R', f"[localhost]:{ssh_port}"]) + lib.utils.run(['ssh-keygen', '-R', f"[localhost]:{ssh_port}"], show_cmd=True) Path.home().joinpath('.ssh/known_hosts.old').unlink(missing_ok=True) # QEMU configuration @@ -180,7 +175,8 @@ def handle_action(self, action): def create_disk_img(self): self.primary_disk_img.parent.mkdir(exist_ok=True, parents=True) - run_cmd(['qemu-img', 'create', '-f', 'qcow2', self.primary_disk_img, self.size]) + lib.utils.run(['qemu-img', 'create', '-f', 'qcow2', self.primary_disk_img, self.size], + show_cmd=True) def get_display_args(self, graphical): if graphical: @@ -257,13 +253,10 @@ def run(self): # Get access to root privileges permission before opening virtiofsd in # the background print('Requesting root privileges to run virtiofsd...') - run_cmd([sudo, 'true']) + lib.utils.run_as_root('true') base_virtiofsd_cmd = [sudo, virtiofsd] - virtiofsd_version_text = subprocess.run([*base_virtiofsd_cmd, '--version'], - capture_output=True, - check=True, - text=True).stdout + virtiofsd_version_text = lib.utils.chronic([*base_virtiofsd_cmd, '--version']).stdout group_name = grp.getgrgid(os.getgid()).gr_name # C / QEMU / Reference implementation (deprecated) @@ -289,7 +282,7 @@ def run(self): with self.vfsd_log.open('w', encoding='utf-8') as file, \ subprocess.Popen(virtiofsd_cmd, stderr=file, stdout=file) as vfsd: try: - run_cmd([qemu, *self.qemu_args, *self.get_drive_args()]) + lib.utils.run([qemu, *self.qemu_args, *self.get_drive_args()], show_cmd=True) except subprocess.CalledProcessError as err: # If virtiofsd is dead, it is pretty likely that it was the # cause of QEMU failing so add to the existing exception using @@ -359,7 +352,7 @@ def can_use_kvm(self): ) check_el1_32.chmod(0o755) try: - subprocess.run(check_el1_32, check=True) + lib.utils.run(check_el1_32) except subprocess.CalledProcessError: pass # we'll return false below else: diff --git a/python/scripts/gen_patch_cc.py b/python/scripts/gen_patch_cc.py index 7732aebe2..c76664943 100755 --- a/python/scripts/gen_patch_cc.py +++ b/python/scripts/gen_patch_cc.py @@ -4,7 +4,12 @@ from argparse import ArgumentParser from pathlib import Path -import subprocess +import sys + +sys.path.append(str(Path(__file__).resolve().parents[1])) +# pylint: disable=wrong-import-position +import lib.utils +# pylint: enable=wrong-import-position parser = ArgumentParser(description='Generate Cc: lines for patch') parser.add_argument( @@ -26,13 +31,9 @@ # Show raw scripts/get_maintainer.pl output, which can help with trimming up or # modifying the list of addresses to send the patch to. -subprocess.run([get_maint, path], check=True, cwd=repo) +lib.utils.run([get_maint, path], cwd=repo) print() -addrs = subprocess.run([get_maint, '--no-n', '--no-rolestats', path], - capture_output=True, - check=True, - cwd=repo, - text=True).stdout -for addr in addrs.splitlines(): +for addr in lib.utils.chronic([get_maint, '--no-n', '--no-rolestats', path], + cwd=repo).stdout.splitlines(): print(f"Cc: {addr}") diff --git a/python/scripts/korg_tc.py b/python/scripts/korg_tc.py index b247016f2..f10e4ae0c 100755 --- a/python/scripts/korg_tc.py +++ b/python/scripts/korg_tc.py @@ -8,7 +8,6 @@ import platform import shlex import shutil -import subprocess import sys import requests @@ -121,8 +120,7 @@ def handle(self): raise RuntimeError(f"Compression extension ('{comp_ext}') not supported!") tar_input = response.content if not local_tarball.exists() else None - lib.utils.print_cmd(tar_cmd) - subprocess.run(tar_cmd, check=True, input=tar_input) + lib.utils.run(tar_cmd, input=tar_input, show_cmd=True) class ToolchainManager: diff --git a/python/scripts/nas_sync.py b/python/scripts/nas_sync.py index 1cd0bfd4c..69331b46e 100755 --- a/python/scripts/nas_sync.py +++ b/python/scripts/nas_sync.py @@ -7,7 +7,6 @@ import json import os from pathlib import Path -import subprocess import sys import requests @@ -120,14 +119,12 @@ def download_items(targets, network_folder): # Download and update repo if not (repo_path := Path(os.environ['CBL_SRC_M'], repo_name)).exists(): repo_path.parent.mkdir(exist_ok=True, parents=True) - subprocess.run(['git', 'clone', '--mirror', repo_url, repo_path], check=True) - subprocess.run(['git', 'remote', 'update', '--prune'], check=True, cwd=repo_path) + lib.utils.call_git_loud(None, ['clone', '--mirror', repo_url, repo_path]) + lib.utils.call_git_loud(repo_path, ['remote', 'update', '--prune']) # Create bundles repo_bundle = Path(bundles_folder, f"{repo_name}.bundle") repo_bundle.unlink(missing_ok=True) - subprocess.run(['git', 'bundle', 'create', repo_bundle, '--all'], - check=True, - cwd=repo_path) + lib.utils.call_git_loud(repo_path, ['bundle', 'create', repo_bundle, '--all']) elif target == 'debian': debian_arches = ['amd64', 'arm64', 'armhf', 'i386'] diff --git a/python/scripts/oci_rm.py b/python/scripts/oci_rm.py index ec117d4dd..fce4a2b19 100755 --- a/python/scripts/oci_rm.py +++ b/python/scripts/oci_rm.py @@ -4,8 +4,15 @@ from argparse import ArgumentParser import json +from pathlib import Path import shutil import subprocess +import sys + +sys.path.append(str(Path(__file__).resolve().parents[1])) +# pylint: disable=wrong-import-position +import lib.utils +# pylint: enable=wrong-import-position def fzf(target, fzf_input): @@ -16,8 +23,7 @@ def fzf(target, fzf_input): def oci_json(target): - podman_cmd = ['podman', target, 'ls', '--all', '--format', 'json'] - podman_out = subprocess.run(podman_cmd, capture_output=True, check=True, text=True).stdout + podman_out = lib.utils.chronic(['podman', target, 'ls', '--all', '--format', 'json']).stdout return json.loads(podman_out) @@ -37,9 +43,7 @@ def parse_arguments(): def podman_rm(target, items): - podman_cmd = ['podman', target, 'rm', '--force', *items] - print(f"$ {' '.join(podman_cmd)}") - subprocess.run(podman_cmd, check=True) + lib.utils.run(['podman', target, 'rm', '--force', *items], show_cmd=True) print() diff --git a/python/scripts/test_boot_utils.py b/python/scripts/test_boot_utils.py index fcc76aaee..40d8124fc 100755 --- a/python/scripts/test_boot_utils.py +++ b/python/scripts/test_boot_utils.py @@ -3,8 +3,12 @@ from argparse import ArgumentParser import os from pathlib import Path -import subprocess -import shlex +import sys + +sys.path.append(str(Path(__file__).resolve().parents[1])) +# pylint: disable=wrong-import-position +import lib.utils +# pylint: enable=wrong-import-position BOOT_QEMU = Path(os.environ['CBL_GIT'], 'boot-utils/boot-qemu.py') @@ -59,5 +63,4 @@ raise FileNotFoundError(f"{kernel_dir} does not exist?") boot_utils_cmd = [BOOT_QEMU, '-a', boot_utils_arch, '-k', kernel_dir, '-t', '90s'] - print(f"\n$ {' '.join(shlex.quote(str(elem)) for elem in boot_utils_cmd)}", flush=True) - subprocess.run(boot_utils_cmd, check=True) + lib.utils.run(boot_utils_cmd, show_cmd=True) diff --git a/python/scripts/tmxrun.py b/python/scripts/tmxrun.py index 3cbd94ed2..649baa5f3 100755 --- a/python/scripts/tmxrun.py +++ b/python/scripts/tmxrun.py @@ -2,7 +2,6 @@ from argparse import ArgumentParser from pathlib import Path -import subprocess import sys sys.path.append(str(Path(__file__).resolve().parents[1])) @@ -55,4 +54,4 @@ CMD_STR = f"dbxe -- fish -c '{CMD_STR}'" tmx_cmd.append(CMD_STR) -subprocess.run(tmx_cmd, check=True) +lib.utils.run(tmx_cmd) diff --git a/python/scripts/upd_distro.py b/python/scripts/upd_distro.py index 1096cfbdf..b85dd03cc 100755 --- a/python/scripts/upd_distro.py +++ b/python/scripts/upd_distro.py @@ -4,7 +4,6 @@ from argparse import ArgumentParser from pathlib import Path -import subprocess import sys sys.path.append(str(Path(__file__).resolve().parents[1])) @@ -16,9 +15,7 @@ def brew(brew_args): - brew_cmd = ['/opt/homebrew/bin/brew', *brew_args] - lib.utils.print_cmd(brew_cmd) - subprocess.run(brew_cmd, check=True) + lib.utils.run(['/opt/homebrew/bin/brew', *brew_args], show_cmd=True) parser = ArgumentParser(description='Update distribution') @@ -37,9 +34,7 @@ def brew(brew_args): os_rel = lib.setup.get_os_rel() if os_rel['ID'] == 'arch': - try: - subprocess.run(['checkupdates'], check=True) - except subprocess.CalledProcessError: + if not lib.utils.run_check_rc_zero(['checkupdates']): sys.exit(0) cmd_func = lib.setup.pacman diff --git a/python/setup/alpine.py b/python/setup/alpine.py index f7b447535..176408742 100755 --- a/python/setup/alpine.py +++ b/python/setup/alpine.py @@ -7,7 +7,6 @@ from pathlib import Path import re import shutil -import subprocess import sys import time @@ -97,7 +96,7 @@ def setup_user(user_name, user_password): '--shell', shutil.which('fish'), user_name, ] # yapf: disable - subprocess.run(useradd_cmd, check=True) + lib.utils.run(useradd_cmd) lib.setup.chpasswd(user_name, user_password) user_groups = [ @@ -110,7 +109,7 @@ def setup_user(user_name, user_password): 'wheel', ] for group in user_groups: - subprocess.run(['addgroup', user_name, group], check=True) + lib.utils.run(['addgroup', user_name, group]) # Setup doas doas_conf, doas_text = lib.utils.path_and_text('/etc/doas.d/doas.conf') @@ -131,15 +130,15 @@ def setup_podman(user_name): rc_conf.write_text(rc_conf_txt.replace(rc_cgroup_mode_line, rc_cgroup_mode), encoding='utf-8') - subprocess.run(['rc-update', 'add', 'cgroups'], check=True) - subprocess.run(['rc-service', 'cgroups', 'start'], check=True) + lib.utils.run(['rc-update', 'add', 'cgroups']) + lib.utils.run(['rc-service', 'cgroups', 'start']) modules, modules_text = lib.utils.path_and_text('/etc/modules') if 'tun' not in modules_text: modules.write_text(f"{modules_text}tun\n", encoding='utf-8') if not (make_root_rshared := Path('/etc/local.d/make_root_rshared.start')).exists(): - subprocess.run(['rc-update', 'add', 'local', 'default'], check=True) + lib.utils.run(['rc-update', 'add', 'local', 'default']) make_root_rshared.write_text('#!/bin/sh\n\nmount --make-rshared /\n', encoding='utf-8') make_root_rshared.chmod(0o755) @@ -161,4 +160,4 @@ def setup_podman(user_name): print("[INFO] Powering off machine in 10 seconds, hit Ctrl-C to cancel...") time.sleep(10) - subprocess.run('poweroff', check=True) + lib.utils.run('poweroff') diff --git a/python/setup/arch.py b/python/setup/arch.py index 7d3a6bba3..d8f46dca0 100755 --- a/python/setup/arch.py +++ b/python/setup/arch.py @@ -12,7 +12,6 @@ from pathlib import Path import re import shutil -import subprocess import sys from tempfile import TemporaryDirectory @@ -96,7 +95,7 @@ def update_if_necessary(self): self.path.write_text(new_text, encoding='utf-8') self._reload_data_from_file() - subprocess.run(['mkinitcpio', '-P'], check=True) + lib.utils.run(['mkinitcpio', '-P']) def add_hetzner_mirror_to_repos(config): @@ -141,7 +140,7 @@ def adjust_gnome_power_settings(): 'org.gnome.settings-daemon.plugins.power', 'sleep-inactive-ac-type', 'nothing', ] # yapf: disable - subprocess.run(gdm_cmd, check=True) + lib.utils.run(gdm_cmd) def configure_networking(): @@ -215,7 +214,7 @@ def configure_systemd_boot(init=True, conf='linux.conf'): linux_conf.write_text(new_text, encoding='utf-8') # Ensure that the new configuration is the default on the machine. - subprocess.run(['bootctl', 'set-default', linux_conf.name], check=True) + lib.utils.run(['bootctl', 'set-default', linux_conf.name]) def convert_boot_to_xbootldr(fstab, dryrun): @@ -308,7 +307,7 @@ def enable_reflector(): reflector_drop_in.parent.mkdir(exist_ok=True) reflector_drop_in.write_text(reflector_drop_in_text, encoding='utf-8') reflector_drop_in.chmod(0o644) - subprocess.run(['systemctl', 'daemon-reload'], check=True) + lib.utils.run(['systemctl', 'daemon-reload']) lib.setup.systemctl_enable(['reflector.timer']) @@ -321,7 +320,7 @@ def enable_reflector(): # For archinstall, which causes ^M in /etc/fstab def fix_fstab(): - subprocess.run(['dos2unix', '/etc/fstab'], check=True) + lib.utils.run(['dos2unix', '/etc/fstab']) def get_cmdline_additions(): @@ -343,8 +342,7 @@ def get_findmnt_info(path=''): findmnt_cmd = ['findmnt', '-J', '-o', ','.join(fields)] if path: findmnt_cmd.append(path) - findmnt_proc = subprocess.run(findmnt_cmd, capture_output=True, check=True, text=True) - filesystems = json.loads(findmnt_proc.stdout)['filesystems'] + filesystems = json.loads(lib.utils.chronic(findmnt_cmd).stdout)['filesystems'] if path: return filesystems[0] return filesystems @@ -590,8 +588,8 @@ def pacman_install_packages(): def pacman_key_setup(): - subprocess.run(['pacman-key', '--init'], check=True) - subprocess.run(['pacman-key', '--populate', 'archlinux'], check=True) + lib.utils.run(['pacman-key', '--init']) + lib.utils.run(['pacman-key', '--populate', 'archlinux']) def pacman_settings(dryrun=False): @@ -641,7 +639,7 @@ def pacman_settings(dryrun=False): if not (nathan_db := Path('/var/lib/pacman/sync/nathan.db')).exists(): with TemporaryDirectory() as tempdir: Path(tempdir).chmod(0o755) # avoid permission errors from pacman - subprocess.run(['pacman', '--dbpath', tempdir, '-Sy'], check=True) + lib.utils.run(['pacman', '--dbpath', tempdir, '-Sy']) shutil.move(Path(tempdir, *nathan_db.parts[-2:]), nathan_db) @@ -715,7 +713,7 @@ def setup_user(username, userpass): lib.setup.add_user_to_group('uucp', username) else: fish = Path(shutil.which('fish')).resolve() - subprocess.run(['useradd', '-G', 'wheel,uucp', '-m', '-s', fish, username], check=True) + lib.utils.run(['useradd', '-G', 'wheel,uucp', '-m', '-s', fish, username]) lib.setup.chpasswd(username, userpass) diff --git a/python/setup/deb.py b/python/setup/deb.py index 53b1c5ea8..b6cb966e6 100755 --- a/python/setup/deb.py +++ b/python/setup/deb.py @@ -5,13 +5,13 @@ import os from pathlib import Path import shutil -import subprocess import sys import tempfile sys.path.append(str(Path(__file__).resolve().parents[1])) # pylint: disable=wrong-import-position import lib.setup +import lib.utils # pylint: enable=wrong-import-position @@ -31,10 +31,7 @@ def apt_upgrade(upgrade_args=None): def get_dpkg_arch(): - return subprocess.run(['dpkg', '--print-architecture'], - capture_output=True, - check=True, - text=True).stdout.strip() + return lib.utils.chronic(['dpkg', '--print-architecture']).stdout.strip() def install_initial_packages(): @@ -61,14 +58,14 @@ def setup_doas(username, root_password): if lib.setup.get_glibc_version() > (2, 33, 0): tmp_dir = Path(tempfile.mkdtemp()) doas_deb = Path(tmp_dir, doas_deb_file) - lib.setup.curl([ + lib.utils.curl([ '-o', doas_deb, f"http://http.us.debian.org/debian/pool/main/o/opendoas/{doas_deb_file}", ]) else: doas_deb = Path(env_folder, 'bin/packages', doas_deb_file) - subprocess.run(['dpkg', '-i', doas_deb], check=True) + lib.utils.run(['dpkg', '-i', doas_deb]) doas_conf = Path('/etc/doas.conf') doas_conf_text = ('# Allow me to be root for 5 minutes at a time\n' @@ -89,12 +86,12 @@ def setup_doas(username, root_password): def setup_docker(username): - subprocess.run(['groupadd', '-f', 'docker'], check=True) + lib.utils.run(['groupadd', '-f', 'docker']) lib.setup.add_user_to_group('docker', username) # Pick up potential previous changes to daemon.json file for service in ['containerd', 'docker']: - subprocess.run(['systemctl', 'restart', f"{service}.service"], check=True) + lib.utils.run(['systemctl', 'restart', f"{service}.service"]) def setup_libvirt(username): @@ -109,11 +106,11 @@ def setup_locales(): 'locales locales/locales_to_be_generated multiselect en_US.UTF-8 UTF-8', ] for command in commands: - subprocess.run(['debconf-set-selections'], check=True, input=command, text=True) + lib.utils.run('debconf-set-selections', input=command) Path('/etc/locale.gen').unlink(missing_ok=True) - subprocess.run(['dpkg-reconfigure', '--frontend', 'noninteractive', 'locales'], check=True) + lib.utils.run(['dpkg-reconfigure', '--frontend', 'noninteractive', 'locales']) def update_and_install_packages(additional_packages=None): diff --git a/python/setup/debian.py b/python/setup/debian.py index b25928557..603f25159 100755 --- a/python/setup/debian.py +++ b/python/setup/debian.py @@ -8,7 +8,6 @@ import platform import re import shutil -import subprocess import sys import deb @@ -36,8 +35,8 @@ def pi_setup(user_name): if not lib.setup.is_pi(): return - subprocess.run(['raspi-config', '--expand-rootfs'], check=True) - subprocess.run(['raspi-config', 'nonint', 'do_serial', '0'], check=True) + lib.utils.run(['raspi-config', '--expand-rootfs']) + lib.utils.run(['raspi-config', 'nonint', 'do_serial', '0']) ip_addr = f"192.168.4.{205 if platform.machine() == 'aarch64' else 199}" dhcpcd_conf, dhcpcd_conf_txt = lib.utils.path_and_text('/etc/dhcpcd.conf') @@ -114,7 +113,7 @@ def setup_repos(): tailscale_gpg_key = Path('/usr/share/keyrings/tailscale-archive-keyring.gpg') lib.setup.fetch_gpg_key(f"{base_tailscale_url}.noarmor.gpg", tailscale_gpg_key) - tailscale_repo_txt = lib.setup.curl([f"{base_tailscale_url}.tailscale-keyring.list"]) + tailscale_repo_txt = lib.utils.curl([f"{base_tailscale_url}.tailscale-keyring.list"]) Path(apt_sources, 'tailscale.list').write_bytes(tailscale_repo_txt) diff --git a/python/setup/equinix.py b/python/setup/equinix.py index 083dd686e..04f584010 100755 --- a/python/setup/equinix.py +++ b/python/setup/equinix.py @@ -3,10 +3,8 @@ # Copyright (C) 2022-2023 Nathan Chancellor from argparse import ArgumentParser -from os import environ as env from pathlib import Path import shutil -import subprocess import sys import time @@ -36,9 +34,8 @@ def create_user(user_name, user_password): if lib.setup.user_exists(user_name): raise RuntimeError(f"user ('{user_name}') already exists?") - subprocess.run( - ['useradd', '-m', '-G', 'sudo' if lib.setup.group_exists('sudo') else 'wheel', user_name], - check=True) + lib.utils.run( + ['useradd', '-m', '-G', 'sudo' if lib.setup.group_exists('sudo') else 'wheel', user_name]) lib.setup.chpasswd(user_name, user_password) root_ssh = Path.home().joinpath('.ssh') @@ -60,9 +57,9 @@ def partition_drive(drive_path, mountpoint, username): raise RuntimeError(f"volume ('{volume}') already exists?") if shutil.which('sgdisk'): - subprocess.run(['sgdisk', '-N', '1', '-t', '1:8300', drive_path], check=True) + lib.utils.run(['sgdisk', '-N', '1', '-t', '1:8300', drive_path]) else: - subprocess.run([ + lib.utils.run([ 'parted', '-s', drive_path, @@ -74,21 +71,13 @@ def partition_drive(drive_path, mountpoint, username): '0%', '100%', ], - check=True) + check=True) # Let everything sync up time.sleep(10) - subprocess.run(['mkfs', '-t', 'ext4', volume], - check=True, - env={ - **env, - 'E2FSPROGS_LIBMAGIC_SUPPRESS': '1', - }) + lib.utils.run(['mkfs', '-t', 'ext4', volume], env={'E2FSPROGS_LIBMAGIC_SUPPRESS': '1'}) - vol_uuid = subprocess.run(['blkid', '-o', 'value', '-s', 'UUID', volume], - capture_output=True, - check=True, - text=True).stdout.strip() + vol_uuid = lib.utils.chronic(['blkid', '-o', 'value', '-s', 'UUID', volume]).stdout.strip() fstab = lib.setup.Fstab() fstab[mountpoint] = lib.setup.FstabItem(f"UUID={vol_uuid}", mountpoint, 'ext4', 'defaults', '0', @@ -96,7 +85,7 @@ def partition_drive(drive_path, mountpoint, username): fstab.write() mountpoint.mkdir(exist_ok=True, parents=True) - subprocess.run(['mount', '-a'], check=True) + lib.utils.run(['mount', '-a']) if mountpoint != Path('/home'): lib.setup.chown(username, mountpoint) diff --git a/python/setup/fedora.py b/python/setup/fedora.py index 8347fe769..6f924e05a 100755 --- a/python/setup/fedora.py +++ b/python/setup/fedora.py @@ -5,7 +5,6 @@ from pathlib import Path import re import shutil -import subprocess import sys sys.path.append(str(Path(__file__).resolve().parents[1])) @@ -40,7 +39,7 @@ def dnf_add_repo(repo_url): # https://github.com/rpm-software-management/dnf5/issues/1537 if get_fedora_version() >= 41: local_dst = Path('/etc/yum.repos.d', repo_url.rsplit('/', 1)[1]) - subprocess.run(['curl', '-LSs', '-o', local_dst, repo_url], check=True) + lib.utils.curl(['-o', local_dst, repo_url]) else: lib.setup.dnf(['config-manager', '--add-repo', repo_url]) @@ -60,13 +59,13 @@ def early_pi_fixups(): lvmsysdev = Path('/etc/lvm/devices/system.devices') if lvmsysdev.exists() and '/dev/mmcblk' not in lvmsysdev.read_text(encoding='utf-8'): lvmsysdev.unlink() - subprocess.run(['vgimportdevices', '-a'], check=True) - subprocess.run(['vgchange', '-ay'], check=True) + lib.utils.run(['vgimportdevices', '-a']) + lib.utils.run(['vgchange', '-ay']) # arm-setup-installer extends the size of the physical partition and # LVM partition but not the XFS partition, so just do that and # circumvent the rest of this function's logic. - subprocess.run(['xfs_growfs', '-d', '/'], check=True) + lib.utils.run(['xfs_growfs', '-d', '/']) # Ensure 'rhgb quiet' is removed for all current and future kernels, as it # hurts debugging early boot failures. Make sure the serial console is set @@ -81,10 +80,7 @@ def early_pi_fixups(): if not (match := re.search(r'rd.lvm.lv=(.*)/root', grub_txt)): raise RuntimeError('Cannot find rd.lvm.lv value in /etc/default/grub?') grub_vg_name = match.groups()[0] - sys_vg_name = subprocess.run(['vgs', '--noheading', '-o', 'vg_name'], - capture_output=True, - check=True, - text=True).stdout.strip() + sys_vg_name = lib.utils.chronic(['vgs', '--noheading', '-o', 'vg_name']).stdout.strip() if len(sys_vg_name.split(' ')) != 1: raise RuntimeError('More than one VG found?') if grub_vg_name != sys_vg_name: @@ -103,7 +99,7 @@ def early_pi_fixups(): '--remove-args', ' '.join(remove_args), '--update-kernel', 'ALL', ] # yapf: disable - subprocess.run(grubby_cmd, check=True) + lib.utils.run(grubby_cmd) def get_fedora_version(): @@ -128,17 +124,16 @@ def resize_rootfs(): if lib.setup.is_pi(): return - df_out = subprocess.run(['df', '-T'], capture_output=True, check=True, text=True).stdout - for line in df_out.split('\n'): + for line in lib.utils.chronic(['df', '-T']).stdout.splitlines(): if '/dev/mapper/' in line: dev_mapper_path, dev_mapper_fs_type = line.split(' ')[0:2] # This can fail if it is already resized to max so don't bother # checking the return code. - subprocess.run(['lvextend', '-l', '+100%FREE', dev_mapper_path], check=False) + lib.utils.run(['lvextend', '-l', '+100%FREE', dev_mapper_path], check=False) if dev_mapper_fs_type == 'xfs': - subprocess.run(['xfs_growfs', dev_mapper_path], check=True) + lib.utils.run(['xfs_growfs', dev_mapper_path]) break @@ -257,7 +252,7 @@ def setup_docker(username): if not shutil.which('docker'): return - subprocess.run(['groupadd', '-f', 'docker'], check=True) + lib.utils.run(['groupadd', '-f', 'docker']) lib.setup.add_user_to_group('docker', username) lib.setup.systemctl_enable(['docker']) @@ -269,7 +264,7 @@ def setup_kernel_args(): # Until firmware supports new IORT RMR patches args = ['arm-smmu.disable_bypass=0', 'iommu.passthrough=1'] grubby_cmd = ['grubby', '--args', ' '.join(args), '--update-kernel', 'ALL'] - subprocess.run(grubby_cmd, check=True) + lib.utils.run(grubby_cmd) def setup_libvirt(username): @@ -283,8 +278,8 @@ def setup_mosh(): if not shutil.which('firewall-cmd'): return - subprocess.run(['firewall-cmd', '--add-port=60000-61000/udp', '--permanent'], check=True) - subprocess.run(['firewall-cmd', '--reload'], check=True) + lib.utils.run(['firewall-cmd', '--add-port=60000-61000/udp', '--permanent']) + lib.utils.run(['firewall-cmd', '--reload']) def setup_pi(username): diff --git a/python/setup/macos.py b/python/setup/macos.py index 5098d183a..7a16bba64 100755 --- a/python/setup/macos.py +++ b/python/setup/macos.py @@ -6,11 +6,17 @@ import os import re import shutil -import subprocess +from subprocess import CalledProcessError +import sys + +sys.path.append(str(Path(__file__).resolve().parents[1])) +# pylint: disable=wrong-import-position +import lib.utils +# pylint: enable=wrong-import-position def brew(brew_args): - subprocess.run([get_brew_path(), *brew_args], check=True) + lib.utils.run([get_brew_path(), *brew_args]) def clone_env_plugins(): @@ -20,14 +26,14 @@ def clone_env_plugins(): if not env_folder.exists(): env_folder.parent.mkdir(exist_ok=True, parents=True) repo_clone(env_folder) - subprocess.run(['git', 'pull'], check=True, cwd=env_folder) + lib.utils.call_git_loud(env_folder, 'pull') forked_fisher_plugins = ['hydro'] for plugin in [Path(github_folder, elem) for elem in forked_fisher_plugins]: if not plugin.exists(): plugin.parent.mkdir(exist_ok=True, parents=True) repo_clone(plugin, 'personal') - subprocess.run(['git', 'remote', 'update'], check=True, cwd=plugin) + lib.utils.call_git_loud(plugin, ['remote', 'update']) def get_brew_bin(): @@ -51,11 +57,11 @@ def get_home(): def brew_gh(gh_args): - subprocess.run([Path(get_brew_bin(), 'gh'), *gh_args], check=True) + lib.utils.run([Path(get_brew_bin(), 'gh'), *gh_args]) def brew_git(git_args): - subprocess.run([Path(get_brew_bin(), 'git'), *git_args], check=True) + lib.utils.run([Path(get_brew_bin(), 'git'), *git_args]) def install_packages(): @@ -110,18 +116,15 @@ def setup_gh(): try: brew_gh(['auth', 'status']) - except subprocess.CalledProcessError: + except CalledProcessError: brew_gh(['auth', 'login']) def setup_homebrew(): if not get_brew_path().exists(): - install_sh = subprocess.run( - ['curl', '-fLSs', 'https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh'], - capture_output=True, - check=True, - text=True).stdout - subprocess.run(['/bin/bash', '-c', install_sh], check=True) + install_sh = lib.utils.curl( + ['https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh']).decode('utf-8') + lib.utils.run(['/bin/bash', '-c', install_sh]) def setup_ssh(): @@ -146,9 +149,9 @@ def setup_ssh(): shutil.rmtree(keys_folder) try: - subprocess.run(['ssh-add', '-l'], check=True) - except subprocess.CalledProcessError: - subprocess.run(['ssh-add', ssh_key], check=True) + lib.utils.run(['ssh-add', '-l']) + except CalledProcessError: + lib.utils.run(['ssh-add', ssh_key]) gh_conf_text = Path(home, '.config/gh/config.yml').read_text(encoding='utf-8') if re.search(r'^git_protocol:\s+(.*)$', gh_conf_text, flags=re.M).groups()[0] != 'ssh': @@ -189,7 +192,7 @@ def setup_fish(): '\n' 'git_setup\n' 'vim_setup\n') - subprocess.run([Path(get_brew_bin(), 'fish'), '-c', fish_script], check=True) + lib.utils.run([Path(get_brew_bin(), 'fish'), '-c', fish_script]) if __name__ == '__main__': diff --git a/python/setup/ubuntu.py b/python/setup/ubuntu.py index c145f78a3..1b9996a82 100755 --- a/python/setup/ubuntu.py +++ b/python/setup/ubuntu.py @@ -5,7 +5,6 @@ from argparse import ArgumentParser import getpass from pathlib import Path -import subprocess import sys import deb @@ -17,7 +16,7 @@ def apt_add_repo(repo_to_add): - subprocess.run(['apt-add-repository', '-y', repo_to_add], check=True) + lib.utils.run(['apt-add-repository', '-y', repo_to_add]) def parse_arguments():