Skip to content

Commit 41211fd

Browse files
fix: replaced taskcluster rest api calls with package
fix: replaced taskcluster rest api calls with package
2 parents 351ad5d + 7377f2b commit 41211fd

17 files changed

+1736
-674
lines changed

pyproject.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,7 @@ dependencies = [
2929
"redo>=2.0",
3030
"requests>=2.25",
3131
"slugid>=2.0",
32+
"taskcluster>=55.0",
3233
"taskcluster-urls>=11.0",
3334
"voluptuous>=0.12.1",
3435
]

src/taskgraph/actions/cancel.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ def cancel_action(parameters, graph_config, input, task_group_id, task_id):
2626
# Note that this is limited by the scopes afforded to generic actions to
2727
# only cancel tasks with the level-specific schedulerId.
2828
try:
29-
cancel_task(task_id, use_proxy=True)
29+
cancel_task(task_id)
3030
except requests.HTTPError as e:
3131
if e.response.status_code == 409:
3232
# A 409 response indicates that this task is past its deadline. It

src/taskgraph/actions/cancel_all.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@ def cancel_all_action(parameters, graph_config, input, task_group_id, task_id):
3535
def do_cancel_task(task_id):
3636
logger.info(f"Cancelling task {task_id}")
3737
try:
38-
cancel_task(task_id, use_proxy=True)
38+
cancel_task(task_id)
3939
except requests.HTTPError as e:
4040
if e.response.status_code == 409:
4141
# A 409 response indicates that this task is past its deadline. It

src/taskgraph/actions/retrigger.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -147,7 +147,7 @@ def retrigger_action(parameters, graph_config, input, task_group_id, task_id):
147147
)
148148

149149
task = taskcluster.get_task_definition(task_id)
150-
label = task["metadata"]["name"]
150+
label = task["metadata"]["name"] # type: ignore
151151

152152
with_downstream = " "
153153
to_run = [label]
@@ -201,7 +201,7 @@ def rerun_action(parameters, graph_config, input, task_group_id, task_id):
201201
decision_task_id, full_task_graph, label_to_taskid = fetch_graph_and_labels(
202202
parameters, graph_config, task_group_id=task_group_id
203203
)
204-
label = task["metadata"]["name"]
204+
label = task["metadata"]["name"] # type: ignore
205205
if task_id not in label_to_taskid.values():
206206
logger.error(
207207
f"Refusing to rerun {label}: taskId {task_id} not in decision task {decision_task_id} label_to_taskid!"

src/taskgraph/actions/util.py

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -56,12 +56,10 @@ def fetch_graph_and_labels(parameters, graph_config, task_group_id=None):
5656
# for old ones
5757
def fetch_action(task_id):
5858
logger.info(f"fetching label-to-taskid.json for action task {task_id}")
59-
try:
60-
run_label_to_id = get_artifact(task_id, "public/label-to-taskid.json")
59+
run_label_to_id = get_artifact(task_id, "public/label-to-taskid.json")
60+
if label_to_taskid and run_label_to_id:
6161
label_to_taskid.update(run_label_to_id)
62-
except HTTPError as e:
63-
if e.response.status_code != 404:
64-
raise
62+
else:
6563
logger.debug(f"No label-to-taskid.json found for {task_id}: {e}")
6664

6765
# for backwards compatibility, look up actions via pushlog-id
@@ -84,7 +82,7 @@ def fetch_cron(task_id):
8482
logger.info(f"fetching label-to-taskid.json for cron task {task_id}")
8583
try:
8684
run_label_to_id = get_artifact(task_id, "public/label-to-taskid.json")
87-
label_to_taskid.update(run_label_to_id)
85+
label_to_taskid.update(run_label_to_id) # type: ignore
8886
except HTTPError as e:
8987
if e.response.status_code != 404:
9088
raise

src/taskgraph/create.py

Lines changed: 3 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -4,15 +4,14 @@
44

55

66
import logging
7-
import os
87
import sys
98
from concurrent import futures
109

1110
from slugid import nice as slugid
1211

1312
from taskgraph.util import json
1413
from taskgraph.util.parameterization import resolve_timestamps
15-
from taskgraph.util.taskcluster import CONCURRENCY, get_session
14+
from taskgraph.util.taskcluster import CONCURRENCY, get_session, get_taskcluster_client
1615
from taskgraph.util.time import current_json_time
1716

1817
logger = logging.getLogger(__name__)
@@ -104,9 +103,6 @@ def submit(task_id, label, task_def):
104103

105104

106105
def create_task(session, task_id, label, task_def):
107-
# create the task using 'http://taskcluster/queue', which is proxied to the queue service
108-
# with credentials appropriate to this task.
109-
110106
# Resolve timestamps
111107
now = current_json_time(datetime_format=True)
112108
task_def = resolve_timestamps(now, task_def)
@@ -123,16 +119,5 @@ def create_task(session, task_id, label, task_def):
123119
return
124120

125121
logger.info(f"Creating task with taskId {task_id} for {label}")
126-
proxy_url = os.environ.get("TASKCLUSTER_PROXY_URL", "http://taskcluster").rstrip(
127-
"/"
128-
)
129-
res = session.put(
130-
f"{proxy_url}/queue/v1/task/{task_id}",
131-
json=task_def,
132-
)
133-
if res.status_code != 200:
134-
try:
135-
logger.error(res.json()["message"])
136-
except Exception:
137-
logger.error(res.text)
138-
res.raise_for_status()
122+
queue = get_taskcluster_client("queue")
123+
queue.createTask(task_id, task_def)

src/taskgraph/docker.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -425,9 +425,10 @@ def load_task(
425425

426426
if "payload" not in task_def or not (image := task_def["payload"].get("image")):
427427
print("Tasks without a `payload.image` are not supported!")
428+
428429
return 1
429430

430-
command = task_def["payload"].get("command")
431+
command = task_def["payload"].get("command") # type: ignore
431432
if not command or not command[0].endswith("run-task"):
432433
print("Only tasks using `run-task` are supported!")
433434
return 1
@@ -470,12 +471,12 @@ def load_task(
470471
# Set some env vars the worker would normally set.
471472
env = {
472473
"RUN_ID": "0",
473-
"TASK_GROUP_ID": task_def.get("taskGroupId", ""),
474+
"TASK_GROUP_ID": task_def.get("taskGroupId", ""), # type: ignore
474475
"TASK_ID": task_id,
475-
"TASKCLUSTER_ROOT_URL": get_root_url(False),
476+
"TASKCLUSTER_ROOT_URL": get_root_url(),
476477
}
477478
# Add the task's environment variables.
478-
env.update(task_def["payload"].get("env", {}))
479+
env.update(task_def["payload"].get("env", {})) # type: ignore
479480

480481
# run-task expects the worker to mount a volume for each path defined in
481482
# TASKCLUSTER_CACHES, delete them to avoid needing to do the same.

src/taskgraph/optimize/strategies.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -48,14 +48,15 @@ def should_replace_task(self, task, params, deadline, arg):
4848
status = status_task(task_id)
4949
# status can be `None` if we're in `testing` mode
5050
# (e.g. test-action-callback)
51-
if not status or status.get("state") in ("exception", "failed"):
51+
if not status or status.get("state") in ("exception", "failed"): # type: ignore
5252
logger.debug(
5353
f"not replacing {task.label} with {task_id} because it is in failed or exception state"
5454
)
5555
continue
5656

5757
if deadline and datetime.strptime(
58-
status["expires"], self.fmt
58+
status["expires"], # type: ignore
59+
self.fmt,
5960
) < datetime.strptime(deadline, self.fmt):
6061
logger.debug(
6162
f"not replacing {task.label} with {task_id} because it expires before {deadline}"

src/taskgraph/transforms/run/run_task.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -183,9 +183,8 @@ def script_url(config, script):
183183
if "MOZ_AUTOMATION" in os.environ and "TASK_ID" not in os.environ:
184184
raise Exception("TASK_ID must be defined to use run-task on generic-worker")
185185
task_id = os.environ.get("TASK_ID", "<TASK_ID>")
186-
# use_proxy = False to avoid having all generic-workers turn on proxy
187186
# Assumes the cluster allows anonymous downloads of public artifacts
188-
tc_url = taskcluster.get_root_url(False)
187+
tc_url = taskcluster.get_root_url()
189188
# TODO: Use util/taskcluster.py:get_artifact_url once hack for Bug 1405889 is removed
190189
return f"{tc_url}/api/queue/v1/task/{task_id}/artifacts/public/{script}"
191190

src/taskgraph/util/parameterization.py

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -90,11 +90,7 @@ def repl(match):
9090
f"task '{label}' has no dependency named '{dependency}'"
9191
)
9292

93-
use_proxy = False
94-
if not artifact_name.startswith("public/"):
95-
use_proxy = True
96-
97-
return get_artifact_url(task_id, artifact_name, use_proxy=use_proxy)
93+
return get_artifact_url(task_id, artifact_name)
9894

9995
return ARTIFACT_REFERENCE_PATTERN.sub(repl, val)
10096

0 commit comments

Comments
 (0)