88
99import io
1010import pytest
11- import boto3
12- from moto import mock_aws
1311from pathlib import Path
1412
1513from gardenlinux .s3 .bucket import Bucket
1614
1715
18- BUCKET_NAME = "test-bucket"
1916REGION = "us-east-1"
2017
2118
22- @mock_aws
23- def test_objects_empty ():
19+ def test_objects_empty (s3_setup ):
2420 """
2521 List objects from empty bucket.
2622 """
2723 # Arrange
28- s3 = boto3 .resource ("s3" , region_name = REGION )
29- s3 .create_bucket (Bucket = BUCKET_NAME )
24+ s3 , bucket_name , _ = s3_setup
3025
3126 # Act
32- bucket = Bucket (bucket_name = BUCKET_NAME , s3_resource_config = {"region_name" : REGION })
33- objects = list (bucket .objects )
27+ bucket = Bucket (bucket_name , s3_resource_config = {"region_name" : REGION })
3428
3529 # Assert
36- assert objects == []
30+ assert list ( bucket . objects ) == []
3731
3832
39- @mock_aws
40- def test_upload_file_and_list (tmp_path ):
33+ def test_upload_file_and_list (s3_setup ):
4134 """
4235 Create a fake file in a temporary directory, upload and try
4336 to list it
4437 """
4538 # Arrange
46- s3 = boto3 .resource ("s3" , region_name = REGION )
47- s3 .create_bucket (Bucket = BUCKET_NAME )
39+ s3 , bucket_name , tmp_path = s3_setup
4840
4941 test_file = tmp_path / "example.txt"
5042 test_file .write_text ("hello moto" )
5143
5244 # Act
53- bucket = Bucket (bucket_name = BUCKET_NAME , s3_resource_config = {"region_name" : REGION })
45+ bucket = Bucket (bucket_name , s3_resource_config = {"region_name" : REGION })
5446 bucket .upload_file (str (test_file ), "example.txt" )
5547
5648 all_keys = [obj .key for obj in bucket .objects ]
@@ -59,87 +51,76 @@ def test_upload_file_and_list(tmp_path):
5951 assert "example.txt" in all_keys
6052
6153
62- @mock_aws
63- def test_download_file (tmp_path ):
54+ def test_download_file (s3_setup ):
6455 """
6556 Try to download a file pre-existing in the bucket
6657 """
6758 # Arrange
68- s3 = boto3 .resource ("s3" , region_name = REGION )
69- s3 .create_bucket (Bucket = BUCKET_NAME )
70- s3 .Object (BUCKET_NAME , "file.txt" ).put (Body = b"some data" )
59+ s3 , bucket_name , tmp_path = s3_setup
60+ s3 .Object (bucket_name , "file.txt" ).put (Body = b"some data" )
7161
7262 # Act
73- bucket = Bucket (bucket_name = BUCKET_NAME , s3_resource_config = {"region_name" : REGION })
74-
63+ bucket = Bucket (bucket_name , s3_resource_config = {"region_name" : REGION })
7564 target_path = tmp_path / "downloaded.txt"
7665 bucket .download_file ("file.txt" , str (target_path ))
7766
7867 # Assert
7968 assert target_path .read_text () == "some data"
8069
8170
82- @mock_aws
83- def test_upload_fileobj ():
71+ def test_upload_fileobj (s3_setup ):
8472 """
8573 Upload a file-like in-memory object to the bucket
8674 """
8775 # Arrange
88- s3 = boto3 .resource ("s3" , region_name = REGION )
89- s3 .create_bucket (Bucket = BUCKET_NAME )
76+ s3 , bucket_name , _ = s3_setup
9077
9178 # Act
9279 # Create in-memory binary stream (file content)
9380 data = io .BytesIO (b"Test Data" )
94- bucket = Bucket (bucket_name = BUCKET_NAME , s3_resource_config = {"region_name" : REGION })
81+ bucket = Bucket (bucket_name , s3_resource_config = {"region_name" : REGION })
9582 bucket .upload_fileobj (data , "binary.obj" )
9683
97- obj = s3 .Object (BUCKET_NAME , "binary.obj" ).get ()
84+ obj = s3 .Object (bucket_name , "binary.obj" ).get ()
9885
9986 # Assert
10087 assert obj ["Body" ].read () == b"Test Data"
10188
10289
103- @mock_aws
104- def test_download_fileobj ():
90+ def test_download_fileobj (s3_setup ):
10591 """
10692 Download data into a in-memory object
10793 """
10894 # Arange
109- s3 = boto3 .resource ("s3" , region_name = REGION )
110- s3 .create_bucket (Bucket = BUCKET_NAME )
95+ s3 , bucket_name , _ = s3_setup
11196 # Put some object in the bucket
112- s3 .Object (BUCKET_NAME , "somekey" ).put (Body = b"123abc" )
97+ s3 .Object (bucket_name , "somekey" ).put (Body = b"123abc" )
11398
11499 # Act
115- bucket = Bucket (bucket_name = BUCKET_NAME , s3_resource_config = {"region_name" : REGION })
116-
100+ bucket = Bucket (bucket_name , s3_resource_config = {"region_name" : REGION })
117101 # Create empty in-memory bytestream to act as a writable file
118102 output = io .BytesIO ()
119103 bucket .download_fileobj ("somekey" , output )
120-
121104 # Reset binary cursor to prepare for read
122105 output .seek (0 )
123106
124107 # Assert
125108 assert output .read () == b"123abc"
126109
127110
128- @mock_aws
129- def test_getattr_delegates ():
111+ def test_getattr_delegates (s3_setup ):
130112 """
131113 Verify that attribute access is delegated to the underlying boto3 Bucket.
132114
133115 This checks that accessing e.g. `.name` on our custom Bucket works by forwarding
134116 the call to the real boto3 bucket.
135117 """
136118 # Arrange
137- s3 = boto3 .resource ("s3" , region_name = REGION )
138- s3 .create_bucket (Bucket = BUCKET_NAME )
119+ _ , bucket_name , _ = s3_setup
139120
140121 # Act
141- bucket = Bucket (bucket_name = BUCKET_NAME , s3_resource_config = {"region_name" : REGION })
122+ bucket = Bucket (bucket_name , s3_resource_config = {"region_name" : REGION })
142123
143124 # Assert
144125 # __getattr__ should delegate this to the underlying boto3 Bucket object
145- assert bucket .name == BUCKET_NAME
126+ assert bucket .name == bucket_name
0 commit comments