Skip to content

Commit f13d1ee

Browse files
committed
Allow artifact base names for S3 uploads to differ from GL canonical names
Related: #4268 Signed-off-by: Tobias Wolf <wolf@b1-systems.de> On-behalf-of: SAP <tobias.wolf@sap.com>
1 parent 68c1f3f commit f13d1ee

File tree

5 files changed

+61
-58
lines changed

5 files changed

+61
-58
lines changed

src/gardenlinux/s3/__main__.py

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -9,11 +9,6 @@
99

1010
from .s3_artifacts import S3Artifacts
1111

12-
_ARGS_ACTION_ALLOWED = [
13-
"download-artifacts-from-bucket",
14-
"upload-artifacts-to-bucket",
15-
]
16-
1712

1813
def main() -> None:
1914
"""
@@ -25,17 +20,22 @@ def main() -> None:
2520
parser = argparse.ArgumentParser()
2621

2722
parser.add_argument("--bucket", dest="bucket")
28-
parser.add_argument("--cname", required=False, dest="cname")
2923
parser.add_argument("--path", required=False, dest="path")
3024
parser.add_argument("--dry-run", action="store_true")
3125

32-
parser.add_argument("action", nargs="?", choices=_ARGS_ACTION_ALLOWED)
26+
subparsers = parser.add_subparsers(dest="action")
27+
28+
download_parser = subparsers.add_parser("download-artifacts-from-bucket")
29+
download_parser.add_argument("--cname", required=False, dest="cname")
30+
31+
upload_parser = subparsers.add_parser("upload-artifacts-to-bucket")
32+
upload_parser.add_argument("--artifact-name", required=False, dest="artifact_name")
3333

3434
args = parser.parse_args()
3535

3636
if args.action == "download-artifacts-from-bucket":
3737
S3Artifacts(args.bucket).download_to_directory(args.cname, args.path)
3838
elif args.action == "upload-artifacts-to-bucket":
3939
S3Artifacts(args.bucket).upload_from_directory(
40-
args.cname, args.path, dry_run=args.dry_run
40+
args.artifact_name, args.path, dry_run=args.dry_run
4141
)

src/gardenlinux/s3/s3_artifacts.py

Lines changed: 37 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -98,15 +98,15 @@ def download_to_directory(
9898

9999
def upload_from_directory(
100100
self,
101-
cname: str,
101+
base_name: str,
102102
artifacts_dir: PathLike[str] | str,
103103
delete_before_push: bool = False,
104104
dry_run: bool = False,
105105
) -> None:
106106
"""
107107
Pushes S3 artifacts to the underlying bucket.
108108
109-
:param cname: Canonical name of the GardenLinux S3 artifacts
109+
:param base_name: Base name of the GardenLinux S3 artifacts
110110
:param artifacts_dir: Path of the image artifacts
111111
:param delete_before_push: True to delete objects before upload
112112
@@ -115,35 +115,32 @@ def upload_from_directory(
115115

116116
artifacts_dir = Path(artifacts_dir)
117117

118-
cname_object = CName(cname)
119-
120118
if not artifacts_dir.is_dir():
121119
raise RuntimeError(f"Artifacts directory given is invalid: {artifacts_dir}")
122120

123-
release_file = artifacts_dir.joinpath(f"{cname}.release")
124-
release_timestamp = stat(release_file).st_ctime
125-
126-
cname_object.load_from_release_file(release_file)
121+
release_file = artifacts_dir.joinpath(f"{base_name}.release")
127122

128-
if cname_object.arch is None:
129-
raise RuntimeError(
130-
"Architecture could not be determined from GardenLinux canonical name or release file"
131-
)
123+
cname_object = CName.new_from_release_file(release_file)
132124

133125
if cname_object.version_and_commit_id is None:
134126
raise RuntimeError(
135-
"Version information could not be determined from GardenLinux canonical name or release file"
127+
"Version information could not be determined from release file"
136128
)
137129

130+
arch = cname_object.arch
138131
feature_list = cname_object.feature_set
139-
requirements_file = artifacts_dir.joinpath(f"{cname}.requirements")
132+
release_timestamp = stat(release_file).st_ctime
133+
requirements_file = artifacts_dir.joinpath(f"{base_name}.requirements")
140134
require_uefi = None
141135
secureboot = None
142136

143137
if requirements_file.exists():
144138
requirements_config = ConfigParser(allow_unnamed_section=True)
145139
requirements_config.read(requirements_file)
146140

141+
if requirements_config.has_option(UNNAMED_SECTION, "arch"):
142+
arch = requirements_config.get(UNNAMED_SECTION, "arch")
143+
147144
if requirements_config.has_option(UNNAMED_SECTION, "uefi"):
148145
require_uefi = requirements_config.getboolean(UNNAMED_SECTION, "uefi")
149146

@@ -152,16 +149,25 @@ def upload_from_directory(
152149
UNNAMED_SECTION, "secureboot"
153150
)
154151

152+
if arch is None:
153+
raise RuntimeError(
154+
"Architecture could not be determined from release or requirements file"
155+
)
156+
155157
if require_uefi is None:
156158
require_uefi = "_usi" in feature_list
157159

158160
if secureboot is None:
159161
secureboot = "_trustedboot" in feature_list
160162

161-
commit_hash = cname_object.commit_hash
163+
# RegEx for S3 supported characters
164+
re_object = re.compile("[^a-zA-Z0-9\\s+\\-=.\\_:/@]")
165+
166+
arch = re_object.sub("+", arch)
167+
commit_id_or_hash = cname_object.commit_hash
162168

163-
if commit_hash is None:
164-
commit_hash = ""
169+
if commit_id_or_hash is None:
170+
commit_id_or_hash = cname_object.commit_id
165171

166172
version_epoch = str(cname_object.version_epoch)
167173

@@ -170,9 +176,9 @@ def upload_from_directory(
170176

171177
metadata = {
172178
"platform": cname_object.feature_set_platform,
173-
"architecture": cname_object.arch,
179+
"architecture": arch,
174180
"base_image": None,
175-
"build_committish": commit_hash,
181+
"build_committish": commit_id_or_hash,
176182
"build_timestamp": datetime.fromtimestamp(release_timestamp).isoformat(),
177183
"gardenlinux_epoch": {version_epoch},
178184
"logs": None,
@@ -181,7 +187,7 @@ def upload_from_directory(
181187
"secureboot": secureboot,
182188
"published_image_metadata": None,
183189
"s3_bucket": self._bucket.name,
184-
"s3_key": f"meta/singles/{cname}",
190+
"s3_key": f"meta/singles/{base_name}",
185191
"test_result": None,
186192
"version": cname_object.version,
187193
"paths": [],
@@ -192,39 +198,34 @@ def upload_from_directory(
192198
if platform_variant is not None:
193199
metadata["platform_variant"] = platform_variant
194200

195-
re_object = re.compile("[^a-zA-Z0-9\\s+\\-=.\\_:/@]")
201+
base_name_length = len(base_name)
196202

197203
for artifact in artifacts_dir.iterdir():
198-
if not artifact.match(f"{cname}*"):
204+
if not artifact.match(f"{base_name}*"):
199205
continue
200206

201-
if not artifact.name.startswith(cname):
202-
raise RuntimeError(
203-
f"Artifact name '{artifact.name}' does not start with cname '{cname}'"
204-
)
205-
206-
s3_key = f"objects/{cname}/{artifact.name}"
207+
s3_key = f"objects/{base_name}/{artifact.name}"
207208

208209
with artifact.open("rb") as fp:
209210
md5sum = file_digest(fp, "md5").hexdigest()
210211
sha256sum = file_digest(fp, "sha256").hexdigest()
211212

212-
suffix = artifact.name[len(cname) :]
213+
suffixes = "".join(artifact.name)[1 + base_name_length:]
213214

214215
artifact_metadata = {
215216
"name": artifact.name,
216217
"s3_bucket_name": self._bucket.name,
217218
"s3_key": s3_key,
218-
"suffix": suffix,
219+
"suffix": re_object.sub("+", suffixes),
219220
"md5sum": md5sum,
220221
"sha256sum": sha256sum,
221222
}
222223

223224
s3_tags = {
224-
"architecture": re_object.sub("+", cname_object.arch),
225+
"architecture": arch,
225226
"platform": re_object.sub("+", cname_object.platform),
226227
"version": re_object.sub("+", cname_object.version), # type: ignore[arg-type]
227-
"committish": commit_hash,
228+
"committish": commit_id_or_hash,
228229
"md5sum": md5sum,
229230
"sha256sum": sha256sum,
230231
}
@@ -246,13 +247,15 @@ def upload_from_directory(
246247
else:
247248
if delete_before_push:
248249
self._bucket.delete_objects(
249-
Delete={"Objects": [{"Key": f"meta/singles/{cname}"}]}
250+
Delete={"Objects": [{"Key": f"meta/singles/{base_name}"}]}
250251
)
251252

252253
with TemporaryFile(mode="wb+") as fp:
253254
fp.write(yaml.dump(metadata).encode("utf-8"))
254255
fp.seek(0)
255256

256257
self._bucket.upload_fileobj(
257-
fp, f"meta/singles/{cname}", ExtraArgs={"ContentType": "text/yaml"}
258+
fp,
259+
f"meta/singles/{base_name}",
260+
ExtraArgs={"ContentType": "text/yaml"},
258261
)

tests/s3/conftest.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -27,12 +27,12 @@ def make_cname(
2727
flavor: str = "container",
2828
arch: str = "amd64",
2929
version: str = "1234.1",
30-
commit: str = "abc123",
30+
commit: str = "abc123long",
3131
) -> str:
3232
"""
3333
Helper function to build cname. Can be used to customized the cname.
3434
"""
35-
return f"{flavor}-{arch}-{version}-{commit}"
35+
return f"{flavor}-{arch}-{version}-{commit[:8]}"
3636

3737

3838
# Helpers to compute digests for fake files

tests/s3/test_main.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -15,11 +15,11 @@
1515
"__main__.py",
1616
"--bucket",
1717
"test-bucket",
18-
"--cname",
19-
"test-cname",
2018
"--path",
2119
"some/path",
2220
"download-artifacts-from-bucket",
21+
"--cname",
22+
"test-cname",
2323
],
2424
"download_to_directory",
2525
["test-cname", "some/path"],
@@ -30,11 +30,11 @@
3030
"__main__.py",
3131
"--bucket",
3232
"test-bucket",
33-
"--cname",
34-
"test-cname",
3533
"--path",
3634
"some/path",
3735
"upload-artifacts-to-bucket",
36+
"--artifact-name",
37+
"test-cname",
3838
],
3939
"upload_from_directory",
4040
["test-cname", "some/path"],

tests/s3/test_s3_artifacts.py

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -11,9 +11,9 @@
1111
from .conftest import S3Env
1212

1313
RELEASE_DATA = """
14-
GARDENLINUX_CNAME="container-amd64-1234.1-abc123"
14+
GARDENLINUX_CNAME="container-amd64-1234.1-abc123lo"
1515
GARDENLINUX_VERSION=1234.1
16-
GARDENLINUX_COMMIT_ID="abc123"
16+
GARDENLINUX_COMMIT_ID="abc123lo"
1717
GARDENLINUX_COMMIT_ID_LONG="abc123long"
1818
GARDENLINUX_FEATURES="_usi,_trustedboot"
1919
GARDENLINUX_FEATURES_ELEMENTS=
@@ -174,9 +174,9 @@ def test_upload_from_directory_invalid_dir_raises(s3_setup: S3Env) -> None:
174174
artifacts.upload_from_directory(env.cname, "/invalid/path")
175175

176176

177-
def test_upload_from_directory_version_mismatch_raises(s3_setup: S3Env) -> None:
177+
def test_upload_from_directory_version_mismatch(s3_setup: S3Env) -> None:
178178
"""
179-
RuntimeError if version in release file does not match cname.
179+
Validate that the release file may contain a different version not matching the artifact name.
180180
"""
181181
# Arrange
182182
env = s3_setup
@@ -186,8 +186,7 @@ def test_upload_from_directory_version_mismatch_raises(s3_setup: S3Env) -> None:
186186
artifacts = S3Artifacts(env.bucket_name)
187187

188188
# Act / Assert
189-
with pytest.raises(RuntimeError, match="failed consistency check"):
190-
artifacts.upload_from_directory(env.cname, env.tmp_path)
189+
artifacts.upload_from_directory(env.cname, env.tmp_path)
191190

192191

193192
def test_upload_from_directory_succeeds_because_of_release_file(
@@ -226,8 +225,10 @@ def test_upload_from_directory_invalid_artifact_name(s3_setup: S3Env) -> None:
226225
assert len(list(bucket.objects.filter(Prefix=f"meta/singles/{env.cname}"))) == 1
227226

228227

229-
def test_upload_from_directory_commit_mismatch_raises(s3_setup: S3Env) -> None:
230-
"""Raise RuntimeError when commit ID is not matching with cname."""
228+
def test_upload_from_directory_commit_mismatch(s3_setup: S3Env) -> None:
229+
"""
230+
Validate that the release file may contain a different commit hash not matching the artifact name.
231+
"""
231232
# Arrange
232233
env = s3_setup
233234
release_path = env.tmp_path / f"{env.cname}.release"
@@ -236,8 +237,7 @@ def test_upload_from_directory_commit_mismatch_raises(s3_setup: S3Env) -> None:
236237
artifacts = S3Artifacts(env.bucket_name)
237238

238239
# Act / Assert
239-
with pytest.raises(RuntimeError, match="failed consistency check"):
240-
artifacts.upload_from_directory(env.cname, env.tmp_path)
240+
artifacts.upload_from_directory(env.cname, env.tmp_path)
241241

242242

243243
def test_upload_from_directory_with_platform_variant(s3_setup: S3Env) -> None:

0 commit comments

Comments
 (0)