Skip to content

Commit 32caa13

Browse files
committed
Tests: Use original CName and use dataclass instead of tuple for fixture
1 parent 445a4e4 commit 32caa13

File tree

3 files changed

+85
-64
lines changed

3 files changed

+85
-64
lines changed

tests/s3/conftest.py

Lines changed: 33 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1,17 +1,39 @@
11
import pytest
2-
from pathlib import Path
2+
from dataclasses import dataclass
33
from moto import mock_aws
44
from hashlib import md5, sha256
55
import boto3
66

7+
from gardenlinux.features.cname import CName as RealCName
8+
9+
BUCKET_NAME = "test-bucket"
10+
REGION = "us-east-1"
11+
712

813
# Dummy CName replacement
9-
class DummyCName:
10-
def __init__(self, cname): # pylint: disable=unused-argument
11-
self.platform = "aws"
12-
self.arch = "amd64"
13-
self.version = "1234.1"
14-
self.commit_id = "abc123"
14+
class DummyCName(RealCName):
15+
def __init__(self, cname: str): # pylint: disable=unused-argument
16+
super().__init__(cname)
17+
18+
19+
@dataclass(frozen=True)
20+
class S3Env:
21+
s3: object
22+
bucket_name: str
23+
tmp_path: str
24+
cname: str
25+
26+
27+
def make_cname(
28+
flavor: str = "testcname",
29+
arch: str = "amd64",
30+
version: str = "1234.1",
31+
commit: str = "abc123",
32+
) -> str:
33+
"""
34+
Helper function to build cname. Can be used to customized the cname.
35+
"""
36+
return f"{flavor}-{arch}-{version}-{commit}"
1537

1638

1739
# Helpers to compute digests for fake files
@@ -36,11 +58,11 @@ def s3_setup(tmp_path, monkeypatch):
3658
Provides a clean S3 setup for each test.
3759
"""
3860
with mock_aws():
39-
s3 = boto3.resource("s3", region_name="us-east-1")
40-
bucket_name = "test-bucket"
41-
s3.create_bucket(Bucket=bucket_name)
61+
s3 = boto3.resource("s3", region_name=REGION)
62+
s3.create_bucket(Bucket=BUCKET_NAME)
4263

4364
monkeypatch.setattr("gardenlinux.s3.s3_artifacts.CName", DummyCName)
4465
monkeypatch.setattr("gardenlinux.s3.s3_artifacts.file_digest", dummy_digest)
4566

46-
yield s3, bucket_name, tmp_path
67+
cname = make_cname()
68+
yield S3Env(s3, BUCKET_NAME, tmp_path, cname)

tests/s3/test_bucket.py

Lines changed: 18 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -20,10 +20,10 @@ def test_objects_empty(s3_setup):
2020
List objects from empty bucket.
2121
"""
2222
# Arrange
23-
s3, bucket_name, _ = s3_setup
23+
env = s3_setup
2424

2525
# Act
26-
bucket = Bucket(bucket_name, s3_resource_config={"region_name": REGION})
26+
bucket = Bucket(env.bucket_name, s3_resource_config={"region_name": REGION})
2727

2828
# Assert
2929
assert list(bucket.objects) == []
@@ -35,13 +35,13 @@ def test_upload_file_and_list(s3_setup):
3535
to list it
3636
"""
3737
# Arrange
38-
s3, bucket_name, tmp_path = s3_setup
38+
env = s3_setup
3939

40-
test_file = tmp_path / "example.txt"
40+
test_file = env.tmp_path / "example.txt"
4141
test_file.write_text("hello moto")
4242

4343
# Act
44-
bucket = Bucket(bucket_name, s3_resource_config={"region_name": REGION})
44+
bucket = Bucket(env.bucket_name, s3_resource_config={"region_name": REGION})
4545
bucket.upload_file(str(test_file), "example.txt")
4646

4747
all_keys = [obj.key for obj in bucket.objects]
@@ -55,12 +55,12 @@ def test_download_file(s3_setup):
5555
Try to download a file pre-existing in the bucket
5656
"""
5757
# Arrange
58-
s3, bucket_name, tmp_path = s3_setup
59-
s3.Object(bucket_name, "file.txt").put(Body=b"some data")
58+
env = s3_setup
59+
env.s3.Object(env.bucket_name, "file.txt").put(Body=b"some data")
6060

6161
# Act
62-
bucket = Bucket(bucket_name, s3_resource_config={"region_name": REGION})
63-
target_path = tmp_path / "downloaded.txt"
62+
bucket = Bucket(env.bucket_name, s3_resource_config={"region_name": REGION})
63+
target_path = env.tmp_path / "downloaded.txt"
6464
bucket.download_file("file.txt", str(target_path))
6565

6666
# Assert
@@ -72,15 +72,15 @@ def test_upload_fileobj(s3_setup):
7272
Upload a file-like in-memory object to the bucket
7373
"""
7474
# Arrange
75-
s3, bucket_name, _ = s3_setup
75+
env = s3_setup
7676

7777
# Act
7878
# Create in-memory binary stream (file content)
7979
data = io.BytesIO(b"Test Data")
80-
bucket = Bucket(bucket_name, s3_resource_config={"region_name": REGION})
80+
bucket = Bucket(env.bucket_name, s3_resource_config={"region_name": REGION})
8181
bucket.upload_fileobj(data, "binary.obj")
8282

83-
obj = s3.Object(bucket_name, "binary.obj").get()
83+
obj = env.s3.Object(env.bucket_name, "binary.obj").get()
8484

8585
# Assert
8686
assert obj["Body"].read() == b"Test Data"
@@ -91,12 +91,12 @@ def test_download_fileobj(s3_setup):
9191
Download data into a in-memory object
9292
"""
9393
# Arange
94-
s3, bucket_name, _ = s3_setup
94+
env = s3_setup
9595
# Put some object in the bucket
96-
s3.Object(bucket_name, "somekey").put(Body=b"123abc")
96+
env.s3.Object(env.bucket_name, "somekey").put(Body=b"123abc")
9797

9898
# Act
99-
bucket = Bucket(bucket_name, s3_resource_config={"region_name": REGION})
99+
bucket = Bucket(env.bucket_name, s3_resource_config={"region_name": REGION})
100100
# Create empty in-memory bytestream to act as a writable file
101101
output = io.BytesIO()
102102
bucket.download_fileobj("somekey", output)
@@ -115,11 +115,11 @@ def test_getattr_delegates(s3_setup):
115115
the call to the real boto3 bucket.
116116
"""
117117
# Arrange
118-
_, bucket_name, _ = s3_setup
118+
env = s3_setup
119119

120120
# Act
121-
bucket = Bucket(bucket_name, s3_resource_config={"region_name": REGION})
121+
bucket = Bucket(env.bucket_name, s3_resource_config={"region_name": REGION})
122122

123123
# Assert
124124
# __getattr__ should delegate this to the underlying boto3 Bucket object
125-
assert bucket.name == bucket_name
125+
assert bucket.name == env.bucket_name

tests/s3/test_s3_artifacts.py

Lines changed: 34 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,6 @@
44

55
from gardenlinux.s3.s3_artifacts import S3Artifacts
66

7-
CNAME = "testcname"
87
RELEASE_DATA = """
98
GARDENLINUX_VERSION = 1234.1
109
GARDENLINUX_COMMIT_ID = abc123
@@ -15,13 +14,13 @@
1514

1615
def test_s3artifacts_init_success(s3_setup):
1716
# Arrange
18-
_, bucket_name, _ = s3_setup
17+
env = s3_setup
1918

2019
# Act
21-
s3_artifacts = S3Artifacts(bucket_name)
20+
s3_artifacts = S3Artifacts(env.bucket_name)
2221

2322
# Assert
24-
assert s3_artifacts._bucket.name == bucket_name
23+
assert s3_artifacts._bucket.name == env.bucket_name
2524

2625

2726
def tets_s3artifacts_invalid_bucket():
@@ -35,22 +34,22 @@ def test_download_to_directory_success(s3_setup):
3534
Test download of multiple files to a directory on disk.
3635
"""
3736
# Arrange
38-
s3, bucket_name, _ = s3_setup
39-
bucket = s3.Bucket(bucket_name)
37+
env = s3_setup
38+
bucket = env.s3.Bucket(env.bucket_name)
4039

41-
bucket.put_object(Key=f"meta/singles/{CNAME}", Body=b"metadata")
42-
bucket.put_object(Key=f"objects/{CNAME}/file1", Body=b"data1")
43-
bucket.put_object(Key=f"objects/{CNAME}/file2", Body=b"data2")
40+
bucket.put_object(Key=f"meta/singles/{env.cname}", Body=b"metadata")
41+
bucket.put_object(Key=f"objects/{env.cname}/file1", Body=b"data1")
42+
bucket.put_object(Key=f"objects/{env.cname}/file2", Body=b"data2")
4443

4544
with TemporaryDirectory() as tmpdir:
4645
outdir = Path(tmpdir)
4746

4847
# Act
49-
artifacts = S3Artifacts(bucket_name)
50-
artifacts.download_to_directory(CNAME, outdir)
48+
artifacts = S3Artifacts(env.bucket_name)
49+
artifacts.download_to_directory(env.cname, outdir)
5150

5251
# Assert
53-
assert (outdir / f"{CNAME}.s3_metadata.yaml").read_bytes() == b"metadata"
52+
assert (outdir / f"{env.cname}.s3_metadata.yaml").read_bytes() == b"metadata"
5453
assert (outdir / "file1").read_bytes() == b"data1"
5554
assert (outdir / "file2").read_bytes() == b"data2"
5655

@@ -60,67 +59,67 @@ def test_download_to_directory_invalid_path(s3_setup):
6059
Test proper handling of download attempt to invalid path.
6160
"""
6261
# Arrange
63-
_, bucket_name, _ = s3_setup
64-
artifacts = S3Artifacts(bucket_name)
62+
env = s3_setup
63+
artifacts = S3Artifacts(env.bucket_name)
6564

6665
# Act / Assert
6766
with pytest.raises(RuntimeError):
68-
artifacts.download_to_directory({CNAME}, "/invalid/path/does/not/exist")
67+
artifacts.download_to_directory({env.cname}, "/invalid/path/does/not/exist")
6968

7069

7170
def test_upload_from_directory_success(s3_setup):
7271
"""
7372
Test upload of multiple artifacts from disk to bucket
7473
"""
7574
# Arrange
76-
s3, bucket_name, tmp_path = s3_setup
75+
env = s3_setup
7776

78-
release_path = tmp_path / f"{CNAME}.release"
77+
release_path = env.tmp_path / f"{env.cname}.release"
7978
release_path.write_text(RELEASE_DATA)
8079

81-
for filename in [f"{CNAME}-file1", f"{CNAME}-file2"]:
82-
(tmp_path / filename).write_bytes(b"dummy content")
80+
for filename in [f"{env.cname}-file1", f"{env.cname}-file2"]:
81+
(env.tmp_path / filename).write_bytes(b"dummy content")
8382

8483
# Act
85-
artifacts = S3Artifacts(bucket_name)
86-
artifacts.upload_from_directory(CNAME, tmp_path)
84+
artifacts = S3Artifacts(env.bucket_name)
85+
artifacts.upload_from_directory(env.cname, env.tmp_path)
8786

8887
# Assert
89-
bucket = s3.Bucket(bucket_name)
88+
bucket = env.s3.Bucket(env.bucket_name)
9089
keys = [obj.key for obj in bucket.objects.all()]
91-
assert f"objects/{CNAME}/{CNAME}-file1" in keys
92-
assert f"objects/{CNAME}/{CNAME}-file2" in keys
93-
assert f"meta/singles/{CNAME}" in keys
90+
assert f"objects/{env.cname}/{env.cname}-file1" in keys
91+
assert f"objects/{env.cname}/{env.cname}-file2" in keys
92+
assert f"meta/singles/{env.cname}" in keys
9493

9594

9695
def test_upload_from_directory_with_delete(s3_setup):
9796
"""
9897
Test that upload_from_directory deletes existing files before uploading
9998
when delete_before_push=True.
10099
"""
101-
s3, bucket_name, tmp_path = s3_setup
102-
bucket = s3.Bucket(bucket_name)
100+
env = s3_setup
101+
bucket = env.s3.Bucket(env.bucket_name)
103102

104103
# Arrange: create release and artifact files locally
105-
release = tmp_path / f"{CNAME}.release"
104+
release = env.tmp_path / f"{env.cname}.release"
106105
release.write_text(RELEASE_DATA)
107106

108-
artifact = tmp_path / f"{CNAME}.kernel"
107+
artifact = env.tmp_path / f"{env.cname}.kernel"
109108
artifact.write_bytes(b"fake")
110109

111110
# Arrange: put dummy existing objects to be deleted
112-
bucket.put_object(Key=f"objects/{CNAME}/{artifact.name}", Body=b"old data")
113-
bucket.put_object(Key=f"meta/singles/{CNAME}", Body=b"old metadata")
111+
bucket.put_object(Key=f"objects/{env.cname}/{artifact.name}", Body=b"old data")
112+
bucket.put_object(Key=f"meta/singles/{env.cname}", Body=b"old metadata")
114113

115-
artifacts = S3Artifacts(bucket_name)
114+
artifacts = S3Artifacts(env.bucket_name)
116115

117116
# Act
118-
artifacts.upload_from_directory(CNAME, tmp_path, delete_before_push=True)
117+
artifacts.upload_from_directory(env.cname, env.tmp_path, delete_before_push=True)
119118

120119
# Assert
121120
keys = [obj.key for obj in bucket.objects.all()]
122121

123122
# The old key should no longer be present as old data (no duplicates)
124123
# but the new upload file key should exist (artifact uploaded)
125-
assert f"objects/{CNAME}/{artifact.name}" in keys
126-
assert f"meta/singles/{CNAME}" in keys
124+
assert f"objects/{env.cname}/{artifact.name}" in keys
125+
assert f"meta/singles/{env.cname}" in keys

0 commit comments

Comments
 (0)