1+ import io
12import pytest
3+ import boto3
24from unittest .mock import MagicMock , patch
35from pathlib import Path
46from tempfile import TemporaryDirectory
57from hashlib import md5 , sha256
8+ from moto import mock_aws
9+
610from gardenlinux .s3 .s3_artifacts import S3Artifacts
711
12+ CNAME = "testcname"
13+
814
915# Dummy CName replacement
1016class DummyCName :
@@ -31,195 +37,141 @@ def dummy_digest(data: bytes, algo: str) -> str:
3137 raise ValueError (f"Unsupported algo: { algo } " )
3238
3339
34- @patch ( "gardenlinux.s3.s3_artifacts.Bucket" )
35- def test_s3artifacts_init_success ( mock_bucket_class ):
40+ @pytest . fixture ( autouse = True )
41+ def s3_setup ( tmp_path , monkeypatch ):
3642 """
37- Sanity test to assert correct instantiation of S3Artifacts object
43+ Provides a clean S3 setup for each test.
3844 """
39- mock_bucket_instance = MagicMock ()
40- mock_bucket_class .return_value = mock_bucket_instance
45+ with mock_aws ():
46+ s3 = boto3 .resource ("s3" , region_name = "us-east-1" )
47+ bucket_name = "test-bucket"
48+ s3 .create_bucket (Bucket = bucket_name )
4149
42- s3 = S3Artifacts ("my-bucket" )
50+ monkeypatch .setattr ("gardenlinux.s3.s3_artifacts.CName" , DummyCName )
51+ monkeypatch .setattr ("gardenlinux.s3.s3_artifacts.file_digest" , dummy_digest )
4352
44- mock_bucket_class .assert_called_once_with ("my-bucket" , None , None )
45- assert s3 ._bucket == mock_bucket_instance
53+ yield s3 , bucket_name , tmp_path
4654
4755
48- @patch ("gardenlinux.s3.s3_artifacts.Bucket" )
49- def test_s3_artifacts_invalid_bucket (mock_bucket ):
50- """
51- Sanity test to check proper Error raising when using non-existing bucket
52- """
53- # Simulate an exception being raised when trying to create the Bucket
54- mock_bucket .side_effect = RuntimeError ("Bucket does not exist" )
56+ def test_s3artifacts_init_success (s3_setup ):
57+ # Arrange
58+ _ , bucket_name , _ = s3_setup
59+
60+ # Act
61+ s3_artifacts = S3Artifacts (bucket_name )
5562
56- with pytest . raises ( RuntimeError , match = "Bucket does not exist" ):
57- S3Artifacts ( "invalid-bucket" )
63+ # Assert
64+ assert s3_artifacts . _bucket . name == bucket_name
5865
5966
60- @patch ("gardenlinux.s3.s3_artifacts.CName" , new = DummyCName )
61- @patch ("gardenlinux.s3.s3_artifacts.Bucket" )
62- def test_download_to_directory_success (mock_bucket_class ):
67+ def tets_s3artifacts_invalid_bucket ():
68+ # Act / Assert
69+ with pytest .raises (Exception ):
70+ S3Artifacts ("unknown-bucket" )
71+
72+
73+ def test_download_to_directory_success (s3_setup ):
6374 """
64- Test download of mutliple files to directory on disk.
75+ Test download of multiple files to a directory on disk.
6576 """
6677 # Arrange
67- # Create mock bucket instance
68- mock_bucket = MagicMock ()
69-
70- # Mock release object
71- release_object = MagicMock ()
72- release_object .key = "meta/singles/testcname"
73-
74- # Mock objects to be downloaded
75- s3_obj1 = MagicMock ()
76- s3_obj1 .key = "objects/testcname/file1"
77- s3_obj2 = MagicMock ()
78- s3_obj2 .key = "objects/testcname/file2"
79-
80- # Mock return value of .filter().all() from boto3
81- class MockFilterReturn :
82- def all (self ):
83- return [s3_obj1 , s3_obj2 ]
84-
85- # Mock teh behaviour of .objects.filter(Prefix=...)
86- # Lets us simulate different responses depending on prefix
87- def filter_side_effect (Prefix ):
88- # When fetching metadata
89- if Prefix == "meta/singles/testcname" :
90- return [release_object ] # return list with release file
91- # When fetching actual artifact
92- elif Prefix == "objects/testcname" :
93- return MockFilterReturn () # return mock object
94- return [] # Nothing found
78+ s3 , bucket_name , tmp_path = s3_setup
79+ bucket = s3 .Bucket (bucket_name )
9580
96- # Act
97- mock_bucket . objects . filter . side_effect = filter_side_effect
98- mock_bucket_class . return_value = mock_bucket
81+ bucket . put_object ( Key = f"meta/singles/ { CNAME } " , Body = b"metadata" )
82+ bucket . put_object ( Key = f" objects/ { CNAME } /file1" , Body = b"data1" )
83+ bucket . put_object ( Key = f"objects/ { CNAME } /file2" , Body = b"data2" )
9984
10085 with TemporaryDirectory () as tmpdir :
101- artifacts_dir = Path (tmpdir )
86+ outdir = Path (tmpdir )
10287
103- s3 = S3Artifacts ("test-bucket" )
104- s3 .download_to_directory ("testcname" , artifacts_dir )
88+ # Act
89+ artifacts = S3Artifacts (bucket_name )
90+ artifacts .download_to_directory (CNAME , outdir )
10591
10692 # Assert
107- # Validate download_file called with correct metadata path
108- mock_bucket .download_file .assert_any_call (
109- "meta/singles/testcname" ,
110- artifacts_dir / "testcname.s3_metadata.yaml" ,
111- )
93+ assert (outdir / f"{ CNAME } .s3_metadata.yaml" ).read_bytes () == b"metadata"
94+ assert (outdir / "file1" ).read_bytes () == b"data1"
95+ assert (outdir / "file2" ).read_bytes () == b"data2"
11296
113- # Validate files were downloaded from object keys
114- mock_bucket .download_file .assert_any_call (
115- "objects/testcname/file1" , artifacts_dir / "file1"
116- )
117- mock_bucket .download_file .assert_any_call (
118- "objects/testcname/file2" , artifacts_dir / "file2"
119- )
12097
121- assert mock_bucket .download_file .call_count == 3
122-
123-
124- @patch ("gardenlinux.s3.s3_artifacts.Bucket" )
125- def test_download_to_directory_invalid_path (mock_bucket ):
98+ def test_download_to_directory_invalid_path (s3_setup ):
12699 """
127- Sanity Test to test behaviour on invalid paths
100+ Test proper handling of download attempt to invalid path.
128101 """
129- s3 = S3Artifacts ("bucket" )
102+ # Arrange
103+ s3 , bucket_name , _ = s3_setup
104+ artifacts = S3Artifacts (bucket_name )
105+
106+ # Act / Assert
130107 with pytest .raises (RuntimeError ):
131- s3 .download_to_directory ("test-cname" , "/invalid/path/does/not/exist" )
108+ artifacts .download_to_directory ({ CNAME } , "/invalid/path/does/not/exist" )
132109
133110
134- @patch ("gardenlinux.s3.s3_artifacts.file_digest" , side_effect = dummy_digest )
135- @patch ("gardenlinux.s3.s3_artifacts.CName" , new = DummyCName )
136- @patch ("gardenlinux.s3.s3_artifacts.Bucket" )
137- def test_upload_from_directory_success (mock_bucket_class , mock_digest ):
111+ def test_upload_from_directory_success (s3_setup ):
138112 """
139113 Test upload of multiple artifacts from disk to bucket
140114 """
141115 # Arrange
142- mock_bucket = MagicMock ()
143- mock_bucket .name = "test-bucket"
144- mock_bucket_class .return_value = mock_bucket
145-
146- # Create a fake .release file
116+ s3 , bucket_name , tmp_path = s3_setup
147117 release_data = """
148118 GARDENLINUX_VERSION = 1234.1
149119 GARDENLINUX_COMMIT_ID = abc123
150120 GARDENLINUX_COMMIT_ID_LONG = abc123long
151121 GARDENLINUX_FEATURES = _usi,_trustedboot
152122 """
153123
154- # Create a fake release file and two artifact files
155- with TemporaryDirectory () as tmpdir :
156- artifacts_dir = Path (tmpdir )
157- cname = "testcname"
124+ release_path = tmp_path / f"{ CNAME } .release"
125+ release_path .write_text (release_data )
158126
159- # Write .release file
160- release_path = artifacts_dir / f"{ cname } .release"
161- release_path .write_text (release_data )
127+ for filename in [f"{ CNAME } -file1" , f"{ CNAME } -file2" ]:
128+ (tmp_path / filename ).write_bytes (b"dummy content" )
162129
163- # Create dummy files for upload
164- for name in [ f" { cname } -file1" , f" { cname } -file2" ]:
165- ( artifacts_dir / name ). write_bytes ( b"dummy content" )
130+ # Act
131+ artifacts = S3Artifacts ( bucket_name )
132+ artifacts . upload_from_directory ( CNAME , tmp_path )
166133
167- s3 = S3Artifacts ("test-bucket" )
134+ # Assert
135+ bucket = s3 .Bucket (bucket_name )
136+ keys = [obj .key for obj in bucket .objects .all ()]
137+ assert f"objects/{ CNAME } /{ CNAME } -file1" in keys
138+ assert f"objects/{ CNAME } /{ CNAME } -file2" in keys
139+ assert f"meta/singles/{ CNAME } " in keys
168140
169- # Act
170- s3 .upload_from_directory (cname , artifacts_dir )
171141
172- # Assert
173- calls = mock_bucket .upload_file .call_args_list
174-
175- # Check that for each file, upload_file was called with ExtraArgs containing "Tagging"
176- for name in [f"{ cname } -file1" , f"{ cname } -file2" ]:
177- key = f"objects/{ cname } /{ name } "
178- path = artifacts_dir / name
179-
180- # Look for a call with matching positional args (path, key)
181- matching_calls = [
182- call
183- for call in calls
184- if call .args [0 ] == path
185- and call .args [1 ] == key
186- and isinstance (call .kwargs .get ("ExtraArgs" ), dict )
187- and "Tagging" in call .kwargs ["ExtraArgs" ]
188- ]
189- assert matching_calls , f"upload_file was not called with Tagging for { name } "
190-
191-
192- @patch ("gardenlinux.s3.s3_artifacts.file_digest" , side_effect = dummy_digest )
193- @patch ("gardenlinux.s3.s3_artifacts.CName" , new = DummyCName )
194- @patch ("gardenlinux.s3.s3_artifacts.Bucket" )
195- def test_upload_from_directory_with_delete (mock_bucket_class , mock_digest , tmp_path ):
142+ def test_upload_from_directory_with_delete (s3_setup ):
196143 """
197144 Test that upload_from_directory deletes existing files before uploading
198- when delete_before_push=True
145+ when delete_before_push=True.
199146 """
200- mock_bucket = MagicMock ()
201- mock_bucket .name = "test-bucket"
202- mock_bucket_class .return_value = mock_bucket
203-
204- s3 = S3Artifacts ("test-bucket" )
205- cname = "test-cname"
147+ s3 , bucket_name , tmp_path = s3_setup
148+ bucket = s3 .Bucket (bucket_name )
206149
207- release = tmp_path / f"{ cname } .release"
150+ # Arrange: create release and artifact files locally
151+ release = tmp_path / f"{ CNAME } .release"
208152 release .write_text (
209153 "GARDENLINUX_VERSION = 1234.1\n "
210154 "GARDENLINUX_COMMIT_ID = abc123\n "
211155 "GARDENLINUX_COMMIT_ID_LONG = abc123long\n "
212156 "GARDENLINUX_FEATURES = _usi,_trustedboot\n "
213157 )
214158
215- artifact = tmp_path / f"{ cname } .kernel"
159+ artifact = tmp_path / f"{ CNAME } .kernel"
216160 artifact .write_bytes (b"fake" )
217161
218- s3 .upload_from_directory (cname , tmp_path , delete_before_push = True )
162+ # Arrange: put dummy existing objects to be deleted
163+ bucket .put_object (Key = f"objects/{ CNAME } /{ artifact .name } " , Body = b"old data" )
164+ bucket .put_object (Key = f"meta/singles/{ CNAME } " , Body = b"old metadata" )
219165
220- mock_bucket .delete_objects .assert_any_call (
221- Delete = {"Objects" : [{"Key" : f"objects/{ cname } /{ artifact .name } " }]}
222- )
223- mock_bucket .delete_objects .assert_any_call (
224- Delete = {"Objects" : [{"Key" : f"meta/singles/{ cname } " }]}
225- )
166+ artifacts = S3Artifacts (bucket_name )
167+
168+ # Act
169+ artifacts .upload_from_directory (CNAME , tmp_path , delete_before_push = True )
170+
171+ # Assert
172+ keys = [obj .key for obj in bucket .objects .all ()]
173+
174+ # The old key should no longer be present as old data (no duplicates)
175+ # but the new upload file key should exist (artifact uploaded)
176+ assert f"objects/{ CNAME } /{ artifact .name } " in keys
177+ assert f"meta/singles/{ CNAME } " in keys
0 commit comments