Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -41,8 +41,11 @@ repos:
hooks:
- id: conventional-pre-commit
stages: [commit-msg]
# TODO remove 'fetch-content' once Gecko no longer needs to use it with Python 3.8
# https://bugzilla.mozilla.org/show_bug.cgi?id=1990567#c7
exclude: |
(?x)^(
src/taskgraph/run-task/fetch-content |
src/taskgraph/run-task/robustcheckout.py |
taskcluster/scripts/external_tools
)
10 changes: 10 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,15 @@
# Change Log

## [17.1.0] - 2025-10-22

### Added

- Support for "graph_config" verifications

### Fixed

- Added Python 3.8 support back to `fetch-content` script for Gecko workaround

## [17.0.0] - 2025-10-17

### Changed
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
### Project
[project]
name = "taskcluster-taskgraph"
version = "17.0.0"
version = "17.1.0"
description = "Build taskcluster taskgraphs"
readme = "README.rst"
authors = [
Expand Down
40 changes: 15 additions & 25 deletions src/taskgraph/run-task/fetch-content
Original file line number Diff line number Diff line change
Expand Up @@ -413,12 +413,9 @@ def extract_archive(path, dest_dir):
raise ValueError(f"unknown archive format: {path}")

if args:
with (
ifh,
subprocess.Popen(
args, cwd=str(dest_dir), bufsize=0, stdin=subprocess.PIPE
) as p,
):
with ifh, subprocess.Popen(
args, cwd=str(dest_dir), bufsize=0, stdin=subprocess.PIPE
) as p:
while True:
if not pipe_stdin:
break
Expand Down Expand Up @@ -530,13 +527,10 @@ def repack_archive(
with rename_after_close(dest, "wb") as fh:
ctx = ZstdCompressor()
if orig_typ in ("exec", None):
with (
ctx.stream_writer(fh) as compressor,
tarfile.open(
fileobj=compressor,
mode="w:",
) as tar,
):
with ctx.stream_writer(fh) as compressor, tarfile.open(
fileobj=compressor,
mode="w:",
) as tar:
tarinfo = tarfile.TarInfo()
tarinfo.name = filter(orig.name) if filter else orig.name
st = orig.stat()
Expand All @@ -549,10 +543,9 @@ def repack_archive(
assert typ == "tar"
zip = zipfile.ZipFile(ifh)
# Convert the zip stream to a tar on the fly.
with (
ctx.stream_writer(fh) as compressor,
tarfile.open(fileobj=compressor, mode="w:") as tar,
):
with ctx.stream_writer(fh) as compressor, tarfile.open(
fileobj=compressor, mode="w:"
) as tar:
for zipinfo in zip.infolist():
if zipinfo.is_dir():
continue
Expand Down Expand Up @@ -594,14 +587,11 @@ def repack_archive(
# To apply the filter, we need to open the tar stream and
# tweak it.
origtar = tarfile.open(fileobj=ifh, mode="r|")
with (
ctx.stream_writer(fh) as compressor,
tarfile.open(
fileobj=compressor,
mode="w:",
format=origtar.format,
) as tar,
):
with ctx.stream_writer(fh) as compressor, tarfile.open(
fileobj=compressor,
mode="w:",
format=origtar.format,
) as tar:
for tarinfo in origtar:
if tarinfo.isdir():
continue
Expand Down
2 changes: 1 addition & 1 deletion uv.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Loading