44
55from gardenlinux .s3 .s3_artifacts import S3Artifacts
66
7- CNAME = "testcname"
87RELEASE_DATA = """
98 GARDENLINUX_VERSION = 1234.1
109 GARDENLINUX_COMMIT_ID = abc123
1514
1615def test_s3artifacts_init_success (s3_setup ):
1716 # Arrange
18- _ , bucket_name , _ = s3_setup
17+ env = s3_setup
1918
2019 # Act
21- s3_artifacts = S3Artifacts (bucket_name )
20+ s3_artifacts = S3Artifacts (env . bucket_name )
2221
2322 # Assert
24- assert s3_artifacts ._bucket .name == bucket_name
23+ assert s3_artifacts ._bucket .name == env . bucket_name
2524
2625
2726def tets_s3artifacts_invalid_bucket ():
@@ -35,22 +34,22 @@ def test_download_to_directory_success(s3_setup):
3534 Test download of multiple files to a directory on disk.
3635 """
3736 # Arrange
38- s3 , bucket_name , _ = s3_setup
39- bucket = s3 .Bucket (bucket_name )
37+ env = s3_setup
38+ bucket = env . s3 .Bucket (env . bucket_name )
4039
41- bucket .put_object (Key = f"meta/singles/{ CNAME } " , Body = b"metadata" )
42- bucket .put_object (Key = f"objects/{ CNAME } /file1" , Body = b"data1" )
43- bucket .put_object (Key = f"objects/{ CNAME } /file2" , Body = b"data2" )
40+ bucket .put_object (Key = f"meta/singles/{ env . cname } " , Body = b"metadata" )
41+ bucket .put_object (Key = f"objects/{ env . cname } /file1" , Body = b"data1" )
42+ bucket .put_object (Key = f"objects/{ env . cname } /file2" , Body = b"data2" )
4443
4544 with TemporaryDirectory () as tmpdir :
4645 outdir = Path (tmpdir )
4746
4847 # Act
49- artifacts = S3Artifacts (bucket_name )
50- artifacts .download_to_directory (CNAME , outdir )
48+ artifacts = S3Artifacts (env . bucket_name )
49+ artifacts .download_to_directory (env . cname , outdir )
5150
5251 # Assert
53- assert (outdir / f"{ CNAME } .s3_metadata.yaml" ).read_bytes () == b"metadata"
52+ assert (outdir / f"{ env . cname } .s3_metadata.yaml" ).read_bytes () == b"metadata"
5453 assert (outdir / "file1" ).read_bytes () == b"data1"
5554 assert (outdir / "file2" ).read_bytes () == b"data2"
5655
@@ -60,67 +59,67 @@ def test_download_to_directory_invalid_path(s3_setup):
6059 Test proper handling of download attempt to invalid path.
6160 """
6261 # Arrange
63- _ , bucket_name , _ = s3_setup
64- artifacts = S3Artifacts (bucket_name )
62+ env = s3_setup
63+ artifacts = S3Artifacts (env . bucket_name )
6564
6665 # Act / Assert
6766 with pytest .raises (RuntimeError ):
68- artifacts .download_to_directory ({CNAME }, "/invalid/path/does/not/exist" )
67+ artifacts .download_to_directory ({env . cname }, "/invalid/path/does/not/exist" )
6968
7069
7170def test_upload_from_directory_success (s3_setup ):
7271 """
7372 Test upload of multiple artifacts from disk to bucket
7473 """
7574 # Arrange
76- s3 , bucket_name , tmp_path = s3_setup
75+ env = s3_setup
7776
78- release_path = tmp_path / f"{ CNAME } .release"
77+ release_path = env . tmp_path / f"{ env . cname } .release"
7978 release_path .write_text (RELEASE_DATA )
8079
81- for filename in [f"{ CNAME } -file1" , f"{ CNAME } -file2" ]:
82- (tmp_path / filename ).write_bytes (b"dummy content" )
80+ for filename in [f"{ env . cname } -file1" , f"{ env . cname } -file2" ]:
81+ (env . tmp_path / filename ).write_bytes (b"dummy content" )
8382
8483 # Act
85- artifacts = S3Artifacts (bucket_name )
86- artifacts .upload_from_directory (CNAME , tmp_path )
84+ artifacts = S3Artifacts (env . bucket_name )
85+ artifacts .upload_from_directory (env . cname , env . tmp_path )
8786
8887 # Assert
89- bucket = s3 .Bucket (bucket_name )
88+ bucket = env . s3 .Bucket (env . bucket_name )
9089 keys = [obj .key for obj in bucket .objects .all ()]
91- assert f"objects/{ CNAME } /{ CNAME } -file1" in keys
92- assert f"objects/{ CNAME } /{ CNAME } -file2" in keys
93- assert f"meta/singles/{ CNAME } " in keys
90+ assert f"objects/{ env . cname } /{ env . cname } -file1" in keys
91+ assert f"objects/{ env . cname } /{ env . cname } -file2" in keys
92+ assert f"meta/singles/{ env . cname } " in keys
9493
9594
9695def test_upload_from_directory_with_delete (s3_setup ):
9796 """
9897 Test that upload_from_directory deletes existing files before uploading
9998 when delete_before_push=True.
10099 """
101- s3 , bucket_name , tmp_path = s3_setup
102- bucket = s3 .Bucket (bucket_name )
100+ env = s3_setup
101+ bucket = env . s3 .Bucket (env . bucket_name )
103102
104103 # Arrange: create release and artifact files locally
105- release = tmp_path / f"{ CNAME } .release"
104+ release = env . tmp_path / f"{ env . cname } .release"
106105 release .write_text (RELEASE_DATA )
107106
108- artifact = tmp_path / f"{ CNAME } .kernel"
107+ artifact = env . tmp_path / f"{ env . cname } .kernel"
109108 artifact .write_bytes (b"fake" )
110109
111110 # Arrange: put dummy existing objects to be deleted
112- bucket .put_object (Key = f"objects/{ CNAME } /{ artifact .name } " , Body = b"old data" )
113- bucket .put_object (Key = f"meta/singles/{ CNAME } " , Body = b"old metadata" )
111+ bucket .put_object (Key = f"objects/{ env . cname } /{ artifact .name } " , Body = b"old data" )
112+ bucket .put_object (Key = f"meta/singles/{ env . cname } " , Body = b"old metadata" )
114113
115- artifacts = S3Artifacts (bucket_name )
114+ artifacts = S3Artifacts (env . bucket_name )
116115
117116 # Act
118- artifacts .upload_from_directory (CNAME , tmp_path , delete_before_push = True )
117+ artifacts .upload_from_directory (env . cname , env . tmp_path , delete_before_push = True )
119118
120119 # Assert
121120 keys = [obj .key for obj in bucket .objects .all ()]
122121
123122 # The old key should no longer be present as old data (no duplicates)
124123 # but the new upload file key should exist (artifact uploaded)
125- assert f"objects/{ CNAME } /{ artifact .name } " in keys
126- assert f"meta/singles/{ CNAME } " in keys
124+ assert f"objects/{ env . cname } /{ artifact .name } " in keys
125+ assert f"meta/singles/{ env . cname } " in keys
0 commit comments