Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 1 addition & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -11,15 +11,14 @@ classifiers = [
"Development Status :: 5 - Production/Stable",
"Environment :: Console",
"License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
"Programming Language :: Python :: 3.13",
"Topic :: Software Development",
]
requires-python = ">=3.8"
requires-python = ">=3.9"
dependencies = [
"appdirs>=1.4",
"cookiecutter~=2.1",
Expand Down
4 changes: 2 additions & 2 deletions src/taskgraph/actions/registry.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,13 +30,13 @@ def is_json(data):
return True


@functools.lru_cache(maxsize=None)
@functools.cache
def read_taskcluster_yml(filename):
"""Load and parse .taskcluster.yml, cached to save some time"""
return yaml.load_yaml(filename)


@functools.lru_cache(maxsize=None)
@functools.cache
def hash_taskcluster_yml(filename):
"""
Generate a hash of the given .taskcluster.yml. This is the first 10 digits
Expand Down
3 changes: 1 addition & 2 deletions src/taskgraph/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@
import sys
from dataclasses import dataclass
from pathlib import Path
from typing import Dict

from voluptuous import ALLOW_EXTRA, All, Any, Extra, Length, Optional, Required

Expand Down Expand Up @@ -113,7 +112,7 @@

@dataclass(frozen=True, eq=False)
class GraphConfig:
_config: Dict
_config: dict
root_dir: str

_PATH_MODIFIED = False
Expand Down
15 changes: 8 additions & 7 deletions src/taskgraph/docker.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,10 +11,11 @@
import sys
import tarfile
import tempfile
from collections.abc import Generator
from io import BytesIO
from pathlib import Path
from textwrap import dedent
from typing import Any, Dict, Generator, List, Optional, Union
from typing import Any, Optional, Union

from taskcluster.exceptions import TaskclusterRestFailure

Expand Down Expand Up @@ -246,7 +247,7 @@ def build_image(

def load_image(
url: str, imageName: Optional[str] = None, imageTag: Optional[str] = None
) -> Dict[str, str]:
) -> dict[str, str]:
"""Load docker image from URL as imageName:tag.

Downloads a zstd-compressed docker image tarball from the given URL and
Expand Down Expand Up @@ -286,7 +287,7 @@ def load_image(
else:
imageTag = "latest"

info: Dict[str, str] = {}
info: dict[str, str] = {}

def download_and_modify_image() -> Generator[bytes, None, None]:
# This function downloads and edits the downloaded tar file on the fly.
Expand Down Expand Up @@ -377,14 +378,14 @@ def download_and_modify_image() -> Generator[bytes, None, None]:
return info


def _index(l: List, s: str) -> Optional[int]:
def _index(l: list, s: str) -> Optional[int]:
try:
return l.index(s)
except ValueError:
pass


def _resolve_image(image: Union[str, Dict[str, str]], graph_config: GraphConfig) -> str:
def _resolve_image(image: Union[str, dict[str, str]], graph_config: GraphConfig) -> str:
image_task_id = None

# Standard case, image comes from the task definition.
Expand Down Expand Up @@ -419,12 +420,12 @@ def _resolve_image(image: Union[str, Dict[str, str]], graph_config: GraphConfig)

def load_task(
graph_config: GraphConfig,
task: Union[str, Dict[str, Any]],
task: Union[str, dict[str, Any]],
remove: bool = True,
user: Optional[str] = None,
custom_image: Optional[str] = None,
interactive: Optional[bool] = False,
volumes: Optional[Dict[str, str]] = None,
volumes: Optional[dict[str, str]] = None,
) -> int:
"""Load and run a task interactively in a Docker container.

Expand Down
4 changes: 2 additions & 2 deletions src/taskgraph/generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
wait,
)
from dataclasses import dataclass
from typing import Callable, Dict, Optional, Union
from typing import Callable, Optional, Union

from . import filter_tasks
from .config import GraphConfig, load_graph_config
Expand Down Expand Up @@ -42,7 +42,7 @@ class KindNotFound(Exception):
class Kind:
name: str
path: str
config: Dict
config: dict
graph_config: GraphConfig

def _get_loader(self) -> Callable:
Expand Down
5 changes: 2 additions & 3 deletions src/taskgraph/graph.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@

import collections
from dataclasses import dataclass
from typing import FrozenSet


@dataclass(frozen=True)
Expand All @@ -24,8 +23,8 @@ class Graph:
node `left` to node `right`..
"""

nodes: FrozenSet
edges: FrozenSet
nodes: frozenset
edges: frozenset

def transitive_closure(self, nodes, reverse=False):
"""Return the transitive closure of <nodes>: the graph containing all
Expand Down
4 changes: 2 additions & 2 deletions src/taskgraph/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
from concurrent.futures import ProcessPoolExecutor, as_completed
from pathlib import Path
from textwrap import dedent
from typing import Any, List
from typing import Any
from urllib.parse import urlparse

import appdirs
Expand Down Expand Up @@ -412,7 +412,7 @@ def show_taskgraph(options):
overrides = {
"target-kinds": options.get("target_kinds"),
}
parameters: List[Any[str, Parameters]] = options.pop("parameters")
parameters: list[Any[str, Parameters]] = options.pop("parameters")
if not parameters:
parameters = [
parameters_loader(None, strict=False, overrides=overrides)
Expand Down
7 changes: 3 additions & 4 deletions src/taskgraph/optimize/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@
import logging
from abc import ABCMeta, abstractmethod
from collections import defaultdict
from typing import Dict, Set

from slugid import nice as slugid

Expand Down Expand Up @@ -367,9 +366,9 @@ def replace_tasks(

def get_subgraph(
target_task_graph: TaskGraph,
removed_tasks: Set[str],
replaced_tasks: Set[str],
label_to_taskid: Dict[str, str],
removed_tasks: set[str],
replaced_tasks: set[str],
label_to_taskid: dict[str, str],
decision_task_id: str,
):
"""
Expand Down
40 changes: 25 additions & 15 deletions src/taskgraph/run-task/fetch-content
Original file line number Diff line number Diff line change
Expand Up @@ -413,9 +413,12 @@ def extract_archive(path, dest_dir):
raise ValueError(f"unknown archive format: {path}")

if args:
with ifh, subprocess.Popen(
args, cwd=str(dest_dir), bufsize=0, stdin=subprocess.PIPE
) as p:
with (
ifh,
subprocess.Popen(
args, cwd=str(dest_dir), bufsize=0, stdin=subprocess.PIPE
) as p,
):
while True:
if not pipe_stdin:
break
Expand Down Expand Up @@ -527,10 +530,13 @@ def repack_archive(
with rename_after_close(dest, "wb") as fh:
ctx = ZstdCompressor()
if orig_typ in ("exec", None):
with ctx.stream_writer(fh) as compressor, tarfile.open(
fileobj=compressor,
mode="w:",
) as tar:
with (
ctx.stream_writer(fh) as compressor,
tarfile.open(
fileobj=compressor,
mode="w:",
) as tar,
):
tarinfo = tarfile.TarInfo()
tarinfo.name = filter(orig.name) if filter else orig.name
st = orig.stat()
Expand All @@ -543,9 +549,10 @@ def repack_archive(
assert typ == "tar"
zip = zipfile.ZipFile(ifh)
# Convert the zip stream to a tar on the fly.
with ctx.stream_writer(fh) as compressor, tarfile.open(
fileobj=compressor, mode="w:"
) as tar:
with (
ctx.stream_writer(fh) as compressor,
tarfile.open(fileobj=compressor, mode="w:") as tar,
):
for zipinfo in zip.infolist():
if zipinfo.is_dir():
continue
Expand Down Expand Up @@ -587,11 +594,14 @@ def repack_archive(
# To apply the filter, we need to open the tar stream and
# tweak it.
origtar = tarfile.open(fileobj=ifh, mode="r|")
with ctx.stream_writer(fh) as compressor, tarfile.open(
fileobj=compressor,
mode="w:",
format=origtar.format,
) as tar:
with (
ctx.stream_writer(fh) as compressor,
tarfile.open(
fileobj=compressor,
mode="w:",
format=origtar.format,
) as tar,
):
for tarinfo in origtar:
if tarinfo.isdir():
continue
Expand Down
14 changes: 7 additions & 7 deletions src/taskgraph/task.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
# file, You can obtain one at http://mozilla.org/MPL/2.0/.

from dataclasses import dataclass, field
from typing import Any, Dict, List, Union
from typing import Any, Union


@dataclass
Expand Down Expand Up @@ -33,14 +33,14 @@ class Task:

kind: str
label: str
attributes: Dict
task: Dict
attributes: dict
task: dict
description: str = ""
task_id: Union[str, None] = field(default=None, init=False)
optimization: Union[Dict[str, Any], None] = field(default=None)
dependencies: Dict = field(default_factory=dict)
soft_dependencies: List = field(default_factory=list)
if_dependencies: List = field(default_factory=list)
optimization: Union[dict[str, Any], None] = field(default=None)
dependencies: dict = field(default_factory=dict)
soft_dependencies: list = field(default_factory=list)
if_dependencies: list = field(default_factory=list)

def __post_init__(self):
self.attributes["kind"] = self.kind
Expand Down
3 changes: 1 addition & 2 deletions src/taskgraph/taskgraph.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
# file, You can obtain one at http://mozilla.org/MPL/2.0/.

from dataclasses import dataclass
from typing import Dict

from .graph import Graph
from .task import Task
Expand All @@ -21,7 +20,7 @@ class TaskGraph:
tasks are "linked from" their dependents.
"""

tasks: Dict[str, Task]
tasks: dict[str, Task]
graph: Graph

def __post_init__(self):
Expand Down
10 changes: 5 additions & 5 deletions src/taskgraph/transforms/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
import functools
import re
from dataclasses import dataclass, field
from typing import Dict, List, Union
from typing import Union

from taskgraph.task import Task

Expand Down Expand Up @@ -42,14 +42,14 @@ class TransformConfig:
path: str

# the parsed contents of kind.yml
config: Dict
config: dict

# the parameters for this task-graph generation run
params: Parameters

# a dict of all the tasks associated with the kind dependencies of the
# current kind
kind_dependencies_tasks: Dict[str, Task]
kind_dependencies_tasks: dict[str, Task]

# Global configuration of the taskgraph
graph_config: GraphConfig
Expand All @@ -58,7 +58,7 @@ class TransformConfig:
write_artifacts: bool

@property
@functools.lru_cache(maxsize=None)
@functools.cache
def repo_configs(self):
repositories = self.graph_config["taskgraph"]["repositories"]
if len(repositories) == 1:
Expand Down Expand Up @@ -119,7 +119,7 @@ class TransformSequence:
sequence.
"""

_transforms: List = field(default_factory=list)
_transforms: list = field(default_factory=list)

def __call__(self, config, items):
for xform in self._transforms:
Expand Down
6 changes: 3 additions & 3 deletions src/taskgraph/transforms/run/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
consistency.
"""

from typing import Any, Dict, List, Union
from typing import Any, Union

from taskgraph.transforms.base import TransformConfig
from taskgraph.util import json, path
Expand Down Expand Up @@ -139,7 +139,7 @@ def support_vcs_checkout(config, task, taskdesc, repo_configs, sparse=False):

def should_use_cache(
name: str,
use_caches: Union[bool, List[str]],
use_caches: Union[bool, list[str]],
has_checkout: bool,
) -> bool:
# Never enable the checkout cache if there's no clone. This allows
Expand All @@ -155,7 +155,7 @@ def should_use_cache(


def support_caches(
config: TransformConfig, task: Dict[str, Any], taskdesc: Dict[str, Any]
config: TransformConfig, task: dict[str, Any], taskdesc: dict[str, Any]
):
"""Add caches for common tools."""
run = task["run"]
Expand Down
Loading
Loading