From c2f447c9f2252c0f102291db6c9f58bc5118d59f Mon Sep 17 00:00:00 2001 From: Andrew Halberstadt Date: Mon, 6 Oct 2025 14:32:48 -0400 Subject: [PATCH] style: enable lint and formatting on src/taskgraph/run-task --- .pre-commit-config.yaml | 1 - src/taskgraph/run-task/fetch-content | 131 +++++++++---------- src/taskgraph/run-task/robustcheckout.py | 2 +- src/taskgraph/run-task/run-task | 155 ++++++++++++----------- 4 files changed, 147 insertions(+), 142 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 97970aa96..c283ab007 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -43,6 +43,5 @@ repos: stages: [commit-msg] exclude: | (?x)^( - src/taskgraph/run-task/| taskcluster/scripts/external_tools ) diff --git a/src/taskgraph/run-task/fetch-content b/src/taskgraph/run-task/fetch-content index 24d68cc56..bcca069a6 100755 --- a/src/taskgraph/run-task/fetch-content +++ b/src/taskgraph/run-task/fetch-content @@ -75,7 +75,7 @@ def rename_after_close(fname, *args, **kwargs): manager. """ path = pathlib.Path(fname) - tmp = path.with_name("%s.tmp" % path.name) + tmp = path.with_name(f"{path.name}.tmp") try: with tmp.open(*args, **kwargs) as fh: yield fh @@ -135,13 +135,11 @@ def retrier(attempts=5, sleeptime=10, max_sleeptime=300, sleepscale=1.5, jitter= jitter = jitter or 0 # py35 barfs on the next line if jitter is None if jitter > sleeptime: # To prevent negative sleep times - raise Exception( - "jitter ({}) must be less than sleep time ({})".format(jitter, sleeptime) - ) + raise Exception(f"jitter ({jitter}) must be less than sleep time ({sleeptime})") sleeptime_real = sleeptime for _ in range(attempts): - log("attempt %i/%i" % (_ + 1, attempts)) + log(f"attempt {_ + 1}/{attempts}") yield sleeptime_real @@ -159,9 +157,7 @@ def retrier(attempts=5, sleeptime=10, max_sleeptime=300, sleepscale=1.5, jitter= # Don't need to sleep the last time if _ < attempts - 1: - log( - "sleeping for %.2fs (attempt %i/%i)" % (sleeptime_real, _ + 1, attempts) - ) + log(f"sleeping for {sleeptime_real:.2f}s (attempt {_ + 1}/{attempts})") time.sleep(sleeptime_real) @@ -178,7 +174,7 @@ def stream_download(url, sha256=None, size=None, headers=None): content, it should be streamed to a file or memory and only operated on after the generator is exhausted without raising. """ - log("Downloading %s" % url) + log(f"Downloading {url}") headers = headers or [] h = hashlib.sha256() @@ -194,7 +190,7 @@ def stream_download(url, sha256=None, size=None, headers=None): kwargs = {} if certifi: ssl_context = ssl.create_default_context(cafile=certifi.where()) - kwargs["context"] = context = ssl_context + kwargs["context"] = ssl_context with urllib.request.urlopen(req, timeout=60, **kwargs) as fh: if not url.endswith(".gz") and fh.info().get("Content-Encoding") == "gzip": fh = gzip.GzipFile(fileobj=fh) @@ -206,14 +202,12 @@ def stream_download(url, sha256=None, size=None, headers=None): content_length = int(content_length) except ValueError: raise IntegrityError( - "content-length header for %s is not an integer; got %s" - % (url, content_length) + f"content-length header for {url} is not an integer; got {content_length}" ) if size: if size != content_length: raise IntegrityError( - "size mismatch on %s: wanted %d; content-length is %d, x-cache-status is: %s" - % ( + "size mismatch on {}: wanted {}; content-length is {}, x-cache-status is: {}".format( url, size, content_length, @@ -236,25 +230,20 @@ def stream_download(url, sha256=None, size=None, headers=None): duration = time.time() - t0 digest = h.hexdigest() - log( - "%s resolved to %d bytes with sha256 %s in %.3fs" - % (url, length, digest, duration) - ) + log(f"{url} resolved to {length} bytes with sha256 {digest} in {duration:.3f}s") if size: if size == length: - log("Verified size of %s" % url) + log(f"Verified size of {url}") else: - raise IntegrityError( - "size mismatch on %s: wanted %d; got %d" % (url, size, length) - ) + raise IntegrityError(f"size mismatch on {url}: wanted {size}; got {length}") if sha256: if digest == sha256: - log("Verified sha256 integrity of %s" % url) + log(f"Verified sha256 integrity of {url}") else: raise IntegrityError( - "sha256 mismatch on %s: wanted %s; got %s" % (url, sha256, digest) + f"sha256 mismatch on {url}: wanted {sha256}; got {digest}" ) @@ -271,7 +260,7 @@ def download_to_path(url, path, sha256=None, size=None, headers=None): for _ in retrier(attempts=5, sleeptime=60): try: - log("Downloading %s to %s" % (url, path)) + log(f"Downloading {url} to {path}") with rename_after_close(path, "wb") as fh: for chunk in stream_download( @@ -281,7 +270,7 @@ def download_to_path(url, path, sha256=None, size=None, headers=None): return except Exception as e: - log("Download failed: {}".format(e)) + log(f"Download failed: {e}") continue raise Exception("Download failed, no more retries!") @@ -292,7 +281,7 @@ def download_to_memory(url, sha256=None, size=None): for _ in retrier(attempts=5, sleeptime=60): data = b"" - log("Downloading %s" % (url)) + log(f"Downloading {url}") try: for chunk in stream_download(url, sha256=sha256, size=size): @@ -300,7 +289,7 @@ def download_to_memory(url, sha256=None, size=None): return data except Exception as e: - log("Download failed: {}".format(e)) + log(f"Download failed: {e}") continue raise Exception("Download failed, no more retries!") @@ -313,8 +302,8 @@ def gpg_verify_path(path: pathlib.Path, public_key_data: bytes, signature_data: bytes with GPG public key data. ``signature_data`` contains a signed GPG document to use with ``gpg --verify``. """ - log("Validating GPG signature of %s" % path) - log("GPG key data:\n%s" % public_key_data.decode("ascii")) + log(f"Validating GPG signature of {path}") + log("GPG key data:\n{}".format(public_key_data.decode("ascii"))) with tempfile.TemporaryDirectory() as td: try: @@ -326,7 +315,7 @@ def gpg_verify_path(path: pathlib.Path, public_key_data: bytes, signature_data: log("Verifying GPG signature...") subprocess.run( - gpg_args + ["--verify", "-", "%s" % path], + gpg_args + ["--verify", "-", f"{path}"], input=signature_data, check=True, ) @@ -343,7 +332,7 @@ def gpg_verify_path(path: pathlib.Path, public_key_data: bytes, signature_data: class ArchiveTypeNotSupported(Exception): def __init__(self, path: pathlib.Path): - super(Exception, self).__init__("Archive type not supported for %s" % path) + super(Exception, self).__init__(f"Archive type not supported for {path}") def open_stream(path: pathlib.Path): @@ -373,7 +362,7 @@ def open_stream(path: pathlib.Path): headers[:512], tarfile.ENCODING, "surrogateescape" ): return "tar", fh - except Exception as e: + except Exception: pass raise ArchiveTypeNotSupported(path) @@ -395,7 +384,7 @@ def extract_archive(path, dest_dir): path = path.resolve() dest_dir = dest_dir.resolve() - log("Extracting %s to %s" % (path, dest_dir)) + log(f"Extracting {path} to {dest_dir}") t0 = time.time() # We pipe input to the decompressor program so that we can apply @@ -421,7 +410,7 @@ def extract_archive(path, dest_dir): args = ["unzip", "-q", "-o", str(path)] pipe_stdin = False else: - raise ValueError("unknown archive format: %s" % path) + raise ValueError(f"unknown archive format: {path}") if args: with ifh, subprocess.Popen( @@ -438,9 +427,9 @@ def extract_archive(path, dest_dir): p.stdin.write(chunk) if p.returncode: - raise Exception("%r exited %d" % (args, p.returncode)) + raise Exception(f"{args!r} exited {p.returncode}") - log("%s extracted in %.3fs" % (path, time.time() - t0)) + log(f"{path} extracted in {time.time() - t0:.3f}s") def should_repack_archive( @@ -483,14 +472,16 @@ def should_repack_archive( return True -EXECUTABLE_SIGNATURES = set([ - b"\xFE\xED\xFA\xCE", # mach-o 32-bits big endian - b"\xCE\xFA\xED\xFE", # mach-o 32-bits little endian - b"\xFE\xED\xFA\xCF", # mach-o 64-bits big endian - b"\xCF\xFA\xED\xFE", # mach-o 64-bits little endian - b"\xCA\xFE\xBA\xBE", # mach-o FAT binary - b"\x7F\x45\x4C\x46", # Elf binary -]) +EXECUTABLE_SIGNATURES = set( + [ + b"\xfe\xed\xfa\xce", # mach-o 32-bits big endian + b"\xce\xfa\xed\xfe", # mach-o 32-bits little endian + b"\xfe\xed\xfa\xcf", # mach-o 64-bits big endian + b"\xcf\xfa\xed\xfe", # mach-o 64-bits little endian + b"\xca\xfe\xba\xbe", # mach-o FAT binary + b"\x7f\x45\x4c\x46", # Elf binary + ] +) def repack_archive( @@ -513,7 +504,7 @@ def repack_archive( raise typ = archive_type(dest) if not typ: - raise Exception("Archive type not supported for %s" % dest.name) + raise Exception(f"Archive type not supported for {dest.name}") if dest.suffixes[-2:] != [".tar", ".zst"]: raise Exception("Only producing .tar.zst archives is supported.") @@ -525,7 +516,7 @@ def repack_archive( stripped = "/".join(name.split("/")[strip_components:]) if not stripped: raise Exception( - "Stripping %d components would remove files" % strip_components + f"Stripping {strip_components} components would remove files" ) name = stripped return prefix + name @@ -585,7 +576,7 @@ def repack_archive( elif stat.S_ISREG(mode) or stat.S_IFMT(mode) == 0: tar.addfile(tarinfo, zip.open(filename)) else: - raise Exception("Unsupported file mode %o" % stat.S_IFMT(mode)) + raise Exception(f"Unsupported file mode {stat.S_IFMT(mode):o}") elif orig_typ == "tar": if typ == "zip": @@ -641,7 +632,7 @@ def fetch_and_extract(url, dest_dir, extract=True, sha256=None, size=None): try: extract_archive(dest_path, dest_dir) - log("Removing %s" % dest_path) + log(f"Removing {dest_path}") dest_path.unlink() except ArchiveTypeNotSupported: pass @@ -683,10 +674,14 @@ def _github_submodule_required(repo: str, commit: str): if e.status == 404: return False # If we get a non 2xx status code that isn't a 404, something has gone horribly wrong on the github side, log it and return True - log("Got {} from github while checking for submodules in {} which was unexpected. Cannot check whether the repo has submodules or not".format(e.status, repo)) + log( + f"Got {e.status} from github while checking for submodules in {repo} which was unexpected. Cannot check whether the repo has submodules or not" + ) return True except Exception as e: - log("Got an unexpected `{}` exception while checking for submodules in {}. Cannot check whether the repo has submodules or not".format(e, repo)) + log( + f"Got an unexpected `{e}` exception while checking for submodules in {repo}. Cannot check whether the repo has submodules or not" + ) return True @@ -718,7 +713,7 @@ def git_checkout_archive( ["git", "ls-remote", repo, "refs/heads/" + commit] ) revision, _ = ref_output.decode().split(maxsplit=1) - log("Fetching revision {}".format(revision)) + log(f"Fetching revision {revision}") return _git_checkout_github_archive(dest_path, repo, commit, prefix) with tempfile.TemporaryDirectory() as td: @@ -729,7 +724,7 @@ def git_checkout_archive( # This could be faster with a shallow clone. However, Git requires a ref # to initiate a clone. Since the commit-ish may not refer to a ref, we # simply perform a full clone followed by a checkout. - print("cloning %s to %s" % (repo, git_dir)) + print(f"cloning {repo} to {git_dir}") env = os.environ.copy() keypath = "" @@ -738,7 +733,7 @@ def git_checkout_archive( os.environ.get("TASKCLUSTER_PROXY_URL"), "secrets", "v1", - "secret/{keypath}".format(keypath=ssh_key), + f"secret/{ssh_key}", ) taskcluster_secret = b"".join(stream_download(taskcluster_secret_url)) taskcluster_secret = json.loads(taskcluster_secret) @@ -748,11 +743,7 @@ def git_checkout_archive( keypath.write_text(sshkey) keypath.chmod(0o600) - env = { - "GIT_SSH_COMMAND": "ssh -o 'StrictHostKeyChecking no' -i {keypath}".format( - keypath=keypath - ) - } + env = {"GIT_SSH_COMMAND": f"ssh -o 'StrictHostKeyChecking no' -i {keypath}"} subprocess.run(["git", "clone", "-n", repo, str(git_dir)], check=True, env=env) @@ -795,7 +786,7 @@ def git_checkout_archive( if keypath: os.remove(keypath) - print("creating archive %s of commit %s" % (dest_path, commit)) + print(f"creating archive {dest_path} of commit {commit}") exclude_dot_git = [] if include_dot_git else ["--exclude=.git"] proc = subprocess.Popen( [ @@ -873,7 +864,11 @@ def command_static_url(args): dl_dest, dest, args.strip_components, args.add_prefix ): repack_archive( - dl_dest, dest, args.strip_components, args.add_prefix, args.force_archive + dl_dest, + dest, + args.strip_components, + args.add_prefix, + args.force_archive, ) elif dl_dest != dest: log(f"Renaming {dl_dest} to {dest}") @@ -888,16 +883,14 @@ def command_static_url(args): raise if dl_dest != dest and dl_dest.exists(): - log("Removing %s" % dl_dest) + log(f"Removing {dl_dest}") dl_dest.unlink() def api(root_url, service, version, path): # taskcluster-lib-urls is not available when this script runs, so # simulate its behavior: - return "{root_url}/api/{service}/{version}/{path}".format( - root_url=root_url, service=service, version=version, path=path - ) + return f"{root_url}/api/{service}/{version}/{path}" def get_hash(fetch, root_url): @@ -951,7 +944,7 @@ def command_task_artifacts(args): } ], } - print("PERFHERDER_DATA: {}".format(json.dumps(perfherder_data)), file=sys.stderr) + print(f"PERFHERDER_DATA: {json.dumps(perfherder_data)}", file=sys.stderr) upload_dir = os.environ.get("UPLOAD_DIR") if os.environ.get("MOZ_AUTOMATION", "0") == "1" and upload_dir: upload_path = pathlib.Path(upload_dir) / "perfherder-data-fetch-content.json" @@ -990,7 +983,7 @@ def main(): ) url.add_argument( "--gpg-sig-url", - help="URL containing signed GPG document validating " "URL to fetch", + help="URL containing signed GPG document validating URL to fetch", ) url.add_argument( "--gpg-key-env", help="Environment variable containing GPG key to validate" @@ -1005,7 +998,7 @@ def main(): url.add_argument( "--add-prefix", default="", - help="Prefix to add to file names in the downloaded " "archive", + help="Prefix to add to file names in the downloaded archive", ) url.add_argument( "-H", @@ -1013,7 +1006,7 @@ def main(): default=[], action="append", dest="headers", - help="Header to send as part of the request, can be passed " "multiple times", + help="Header to send as part of the request, can be passed multiple times", ) url.add_argument( "--force-archive", diff --git a/src/taskgraph/run-task/robustcheckout.py b/src/taskgraph/run-task/robustcheckout.py index 153ca0d73..68cece7e3 100644 --- a/src/taskgraph/run-task/robustcheckout.py +++ b/src/taskgraph/run-task/robustcheckout.py @@ -453,7 +453,7 @@ def handlenetworkfailure(): ) # Do a backoff on retries to mitigate the thundering herd - # problem. This is an exponential backoff with a multipler + # problem. This is an exponential backoff with a multiplier # plus random jitter thrown in for good measure. # With the default settings, backoffs will be: # 1) 2.5 - 6.5 diff --git a/src/taskgraph/run-task/run-task b/src/taskgraph/run-task/run-task index 42fd4d020..c446df247 100755 --- a/src/taskgraph/run-task/run-task +++ b/src/taskgraph/run-task/run-task @@ -13,33 +13,30 @@ the requested process and prints its output, prefixing it with the current time to improve log usefulness. """ -import sys - -if sys.version_info[0:2] < (3, 5): - print("run-task requires Python 3.5+") - sys.exit(1) - import argparse import datetime import errno +import grp import io import json import os import platform +import pwd import re import shutil -import signal import socket import stat import subprocess +import sys import time import urllib.error import urllib.request from pathlib import Path -from threading import Thread from typing import Optional -SECRET_BASEURL_TPL = "{}/secrets/v1/secret/{{}}".format(os.environ.get("TASKCLUSTER_PROXY_URL", "http://taskcluster").rstrip('/')) +SECRET_BASEURL_TPL = "{}/secrets/v1/secret/{{}}".format( + os.environ.get("TASKCLUSTER_PROXY_URL", "http://taskcluster").rstrip("/") +) GITHUB_SSH_FINGERPRINT = ( b"github.com ssh-ed25519 " @@ -106,7 +103,7 @@ IS_MACOSX = sys.platform == "darwin" IS_POSIX = os.name == "posix" IS_WINDOWS = os.name == "nt" -# Both mercurial and git use sha1 as revision idenfiers. Luckily, both define +# Both mercurial and git use sha1 as revision identifiers. Luckily, both define # the same value as the null revision. # # https://github.com/git/git/blob/dc04167d378fb29d30e1647ff6ff51dd182bc9a3/t/oid-info/hash-info#L7 @@ -148,8 +145,7 @@ def _call_windows_retry(func, args=(), retry_max=5, retry_delay=0.5): retry_count += 1 print( - '%s() failed for "%s". Reason: %s (%s). Retrying...' - % (func.__name__, args, e.strerror, e.errno) + f'{func.__name__}() failed for "{args}". Reason: {e.strerror} ({e.errno}). Retrying...' ) time.sleep(retry_count * retry_delay) else: @@ -209,7 +205,7 @@ def remove(path): and path[1] == ":" and path[2] in (os.pathsep, os.altsep) ): - path = "\\\\?\\%s" % path + path = f"\\\\?\\{path}" if os.path.isfile(path) or os.path.islink(path): # Verify the file or link is read/write for the current user @@ -294,30 +290,27 @@ def run_command(prefix, args, *, extra_env=None, cwd=None): def get_posix_user_group(user, group): - import grp - import pwd - try: user_record = pwd.getpwnam(user) except KeyError: - print("could not find user %s; specify a valid user with --user" % user) + print(f"could not find user {user}; specify a valid user with --user") sys.exit(1) try: group_record = grp.getgrnam(group) except KeyError: - print("could not find group %s; specify a valid group with --group" % group) + print(f"could not find group {group}; specify a valid group with --group") sys.exit(1) # Most tasks use worker:worker. We require they have a specific numeric ID # because otherwise it is too easy for files written to caches to have # mismatched numeric IDs, which results in permissions errors. if user_record.pw_name == "worker" and user_record.pw_uid != 1000: - print("user `worker` must have uid=1000; got %d" % user_record.pw_uid) + print(f"user `worker` must have uid=1000; got {user_record.pw_uid}") sys.exit(1) if group_record.gr_name == "worker" and group_record.gr_gid != 1000: - print("group `worker` must have gid=1000; got %d" % group_record.gr_gid) + print(f"group `worker` must have gid=1000; got {group_record.gr_gid}") sys.exit(1) # Find all groups to which this user is a member. @@ -475,16 +468,17 @@ def configure_cache_posix(cache, user, group, untrusted_caches, running_as_root) elif missing: print( - "error: requirements for populated cache %s differ from " - "this task" % cache + f"error: requirements for populated cache {cache} differ from this task" ) print( - "cache requirements: %s" - % " ".join(sorted(s.decode("utf-8") for s in wanted_requirements)) + "cache requirements: {}".format( + " ".join(sorted(s.decode("utf-8") for s in wanted_requirements)) + ) ) print( - "our requirements: %s" - % " ".join(sorted(s.decode("utf-8") for s in our_requirements)) + "our requirements: {}".format( + " ".join(sorted(s.decode("utf-8") for s in our_requirements)) + ) ) if any(s.startswith((b"uid=", b"gid=")) for s in missing): print(CACHE_UID_GID_MISMATCH) @@ -497,7 +491,7 @@ def configure_cache_posix(cache, user, group, untrusted_caches, running_as_root) print("") print("audit log:") - with open(audit_path, "r") as fh: + with open(audit_path) as fh: print(fh.read()) return True @@ -514,10 +508,10 @@ def configure_cache_posix(cache, user, group, untrusted_caches, running_as_root) # happen because run-task should be the first thing that touches a # cache. print( - "error: cache %s is not empty and is missing a " + f"error: cache {cache} is not empty and is missing a " ".cacherequires file; the cache names for this task are " "likely mis-configured or TASKCLUSTER_CACHES is not set " - "properly" % cache + "properly" ) write_audit_entry(audit_path, b"missing .cacherequires") @@ -536,7 +530,7 @@ def configure_volume_posix(volume, user, group, running_as_root): volume_files = os.listdir(volume) if volume_files: print(NON_EMPTY_VOLUME % volume) - print("entries in root directory: %s" % " ".join(sorted(volume_files))) + print("entries in root directory: {}".format(" ".join(sorted(volume_files)))) sys.exit(1) # The volume is almost certainly owned by root:root. Chown it so it @@ -616,9 +610,9 @@ def git_checkout( env["GIT_SSH_COMMAND"] = " ".join( [ "ssh", - "-oIdentityFile={}".format(ssh_key_file.as_posix()), + f"-oIdentityFile={ssh_key_file.as_posix()}", "-oStrictHostKeyChecking=yes", - "-oUserKnownHostsFile={}".format(ssh_known_hosts_file.as_posix()), + f"-oUserKnownHostsFile={ssh_known_hosts_file.as_posix()}", ] ) elif ssh_key_file or ssh_known_hosts_file: @@ -628,7 +622,14 @@ def git_checkout( # Bypass Git's "safe directory" feature as the destination could be # coming from a cache and therefore cloned by a different user. - args = ["git", "config", "--global", "--add", "safe.directory", Path(destination_path).as_posix()] + args = [ + "git", + "config", + "--global", + "--add", + "safe.directory", + Path(destination_path).as_posix(), + ] retry_required_command(b"vcs", args, extra_env=env) if not os.path.exists(destination_path): @@ -738,18 +739,16 @@ def git_checkout( if head_repo.endswith("/"): head_repo = head_repo[:-1] - tinderbox_link = "{}/commit/{}".format(head_repo, commit_hash) + tinderbox_link = f"{head_repo}/commit/{commit_hash}" repo_name = head_repo.split("/")[-1] else: tinderbox_link = head_repo repo_name = head_repo msg = ( - "TinderboxPrint:" - "{commit_hash}\n".format( - commit_hash=commit_hash, link=tinderbox_link, name=repo_name - ) + f"TinderboxPrint:" + f"{commit_hash}\n" ) print_line(b"vcs", msg.encode("utf-8")) @@ -795,7 +794,7 @@ def hg_checkout( # This is where OCC installs it in the AMIs. hg_bin = r"C:\Program Files\Mercurial\hg.exe" if not os.path.exists(hg_bin): - print("could not find Mercurial executable: %s" % hg_bin) + print(f"could not find Mercurial executable: {hg_bin}") sys.exit(1) else: raise RuntimeError("Must be running on mac, posix or windows") @@ -872,30 +871,30 @@ def add_vcs_arguments(parser, project, name): """Adds arguments to ArgumentParser to control VCS options for a project.""" parser.add_argument( - "--%s-checkout" % project, - help="Directory where %s checkout should be created" % name, + f"--{project}-checkout", + help=f"Directory where {name} checkout should be created", ) parser.add_argument( - "--%s-sparse-profile" % project, - help="Path to sparse profile for %s checkout" % name, + f"--{project}-sparse-profile", + help=f"Path to sparse profile for {name} checkout", ) def collect_vcs_options(args, project, name): - checkout = getattr(args, "%s_checkout" % project) - sparse_profile = getattr(args, "%s_sparse_profile" % project) + checkout = getattr(args, f"{project}_checkout") + sparse_profile = getattr(args, f"{project}_sparse_profile") env_prefix = project.upper() - repo_type = os.environ.get("%s_REPOSITORY_TYPE" % env_prefix) - base_repo = os.environ.get("%s_BASE_REPOSITORY" % env_prefix) - base_ref = os.environ.get("%s_BASE_REF" % env_prefix) - base_rev = os.environ.get("%s_BASE_REV" % env_prefix) - head_repo = os.environ.get("%s_HEAD_REPOSITORY" % env_prefix) - revision = os.environ.get("%s_HEAD_REV" % env_prefix) - ref = os.environ.get("%s_HEAD_REF" % env_prefix) - pip_requirements = os.environ.get("%s_PIP_REQUIREMENTS" % env_prefix) - private_key_secret = os.environ.get("%s_SSH_SECRET_NAME" % env_prefix) + repo_type = os.environ.get(f"{env_prefix}_REPOSITORY_TYPE") + base_repo = os.environ.get(f"{env_prefix}_BASE_REPOSITORY") + base_ref = os.environ.get(f"{env_prefix}_BASE_REF") + base_rev = os.environ.get(f"{env_prefix}_BASE_REV") + head_repo = os.environ.get(f"{env_prefix}_HEAD_REPOSITORY") + revision = os.environ.get(f"{env_prefix}_HEAD_REV") + ref = os.environ.get(f"{env_prefix}_HEAD_REF") + pip_requirements = os.environ.get(f"{env_prefix}_PIP_REQUIREMENTS") + private_key_secret = os.environ.get(f"{env_prefix}_SSH_SECRET_NAME") store_path = os.environ.get("HG_STORE_PATH") @@ -982,7 +981,7 @@ def vcs_checkout_from_args(options): elif options["repo-type"] == "hg": if not revision and not ref: raise RuntimeError( - "Hg requires that at least one of a ref or revision " "is provided" + "Hg requires that at least one of a ref or revision is provided" ) revision = hg_checkout( @@ -1001,7 +1000,7 @@ def vcs_checkout_from_args(options): shutil.rmtree(ssh_dir, ignore_errors=True) pass - os.environ["%s_HEAD_REV" % options["env-prefix"]] = revision + os.environ["{}_HEAD_REV".format(options["env-prefix"])] = revision def install_pip_requirements(repositories): @@ -1014,8 +1013,18 @@ def install_pip_requirements(repositories): # TODO: Stop using system Python (#381) if shutil.which("uv"): - user_site_dir = subprocess.run([sys.executable, "-msite", "--user-site"], capture_output=True, text=True).stdout.strip() - cmd = ["uv", "pip", "install", "--python", sys.executable, "--target", user_site_dir] + user_site_dir = subprocess.run( + [sys.executable, "-msite", "--user-site"], capture_output=True, text=True + ).stdout.strip() + cmd = [ + "uv", + "pip", + "install", + "--python", + sys.executable, + "--target", + user_site_dir, + ] else: cmd = [sys.executable, "-mpip", "install", "--user", "--break-system-packages"] @@ -1032,19 +1041,27 @@ def _display_python_versions(): print_line( b"setup", b"Python version: %s\n" % platform.python_version().encode("utf-8") ) - + print_line(b"setup", b"Subprocess python version: " + b"\n") try: - subprocess.run(["uv", "python", "list","--no-cache","--only-installed"], check=True) + subprocess.run( + ["uv", "python", "list", "--no-cache", "--only-installed"], check=True + ) except (FileNotFoundError, subprocess.CalledProcessError): - - python_versions = ["python","python3"] + python_versions = ["python", "python3"] for python_executable in python_versions: try: - subprocess.run([python_executable, "-c", "import platform; print(platform.python_version())"], check=True) + subprocess.run( + [ + python_executable, + "-c", + "import platform; print(platform.python_version())", + ], + check=True, + ) except FileNotFoundError: pass @@ -1133,10 +1150,7 @@ def main(args): for cache in caches: if not os.path.isdir(cache): - print( - "error: cache %s is not a directory; this should never " - "happen" % cache - ) + print(f"error: cache {cache} is not a directory; this should never happen") return 1 purge = configure_cache_posix( @@ -1189,7 +1203,7 @@ def main(args): # special cache files in the cache's root directory and working # directory purging could blow them away, disallow this scenario. if os.path.exists(os.path.join(checkout, ".cacherequires")): - print("error: cannot perform vcs checkout into cache root: %s" % checkout) + print(f"error: cannot perform vcs checkout into cache root: {checkout}") sys.exit(1) # TODO given the performance implications, consider making this a fatal @@ -1281,8 +1295,7 @@ def main(args): "UV_CACHE_DIR", "npm_config_cache", ] + [ - "{}_PATH".format(repository["project"].upper()) - for repository in repositories + "{}_PATH".format(repository["project"].upper()) for repository in repositories ]: if k in os.environ: os.environ[k] = os.path.abspath(os.environ[k])