1
1
import pytest
2
- from unittest . mock import MagicMock , patch
2
+ import boto3
3
3
from pathlib import Path
4
4
from tempfile import TemporaryDirectory
5
5
from hashlib import md5 , sha256
6
+ from moto import mock_aws
7
+
6
8
from gardenlinux .s3 .s3_artifacts import S3Artifacts
7
9
10
+ CNAME = "testcname"
11
+
8
12
9
13
# Dummy CName replacement
10
14
class DummyCName :
11
- def __init__ (self , cname ):
15
+ def __init__ (self , cname ): # pylint: disable=unused-argument
12
16
self .platform = "aws"
13
17
self .arch = "amd64"
14
18
self .version = "1234.1"
@@ -31,195 +35,141 @@ def dummy_digest(data: bytes, algo: str) -> str:
31
35
raise ValueError (f"Unsupported algo: { algo } " )
32
36
33
37
34
- @patch ( "gardenlinux.s3.s3_artifacts.Bucket" )
35
- def test_s3artifacts_init_success ( mock_bucket_class ):
38
+ @pytest . fixture ( autouse = True )
39
+ def s3_setup ( tmp_path , monkeypatch ):
36
40
"""
37
- Sanity test to assert correct instantiation of S3Artifacts object
41
+ Provides a clean S3 setup for each test.
38
42
"""
39
- mock_bucket_instance = MagicMock ()
40
- mock_bucket_class .return_value = mock_bucket_instance
43
+ with mock_aws ():
44
+ s3 = boto3 .resource ("s3" , region_name = "us-east-1" )
45
+ bucket_name = "test-bucket"
46
+ s3 .create_bucket (Bucket = bucket_name )
41
47
42
- s3 = S3Artifacts ("my-bucket" )
48
+ monkeypatch .setattr ("gardenlinux.s3.s3_artifacts.CName" , DummyCName )
49
+ monkeypatch .setattr ("gardenlinux.s3.s3_artifacts.file_digest" , dummy_digest )
43
50
44
- mock_bucket_class .assert_called_once_with ("my-bucket" , None , None )
45
- assert s3 ._bucket == mock_bucket_instance
51
+ yield s3 , bucket_name , tmp_path
46
52
47
53
48
- @patch ("gardenlinux.s3.s3_artifacts.Bucket" )
49
- def test_s3_artifacts_invalid_bucket (mock_bucket ):
50
- """
51
- Sanity test to check proper Error raising when using non-existing bucket
52
- """
53
- # Simulate an exception being raised when trying to create the Bucket
54
- mock_bucket .side_effect = RuntimeError ("Bucket does not exist" )
54
+ def test_s3artifacts_init_success (s3_setup ):
55
+ # Arrange
56
+ _ , bucket_name , _ = s3_setup
57
+
58
+ # Act
59
+ s3_artifacts = S3Artifacts (bucket_name )
55
60
56
- with pytest . raises ( RuntimeError , match = "Bucket does not exist" ):
57
- S3Artifacts ( "invalid-bucket" )
61
+ # Assert
62
+ assert s3_artifacts . _bucket . name == bucket_name
58
63
59
64
60
- @patch ("gardenlinux.s3.s3_artifacts.CName" , new = DummyCName )
61
- @patch ("gardenlinux.s3.s3_artifacts.Bucket" )
62
- def test_download_to_directory_success (mock_bucket_class ):
65
+ def tets_s3artifacts_invalid_bucket ():
66
+ # Act / Assert
67
+ with pytest .raises (Exception ):
68
+ S3Artifacts ("unknown-bucket" )
69
+
70
+
71
+ def test_download_to_directory_success (s3_setup ):
63
72
"""
64
- Test download of mutliple files to directory on disk.
73
+ Test download of multiple files to a directory on disk.
65
74
"""
66
75
# Arrange
67
- # Create mock bucket instance
68
- mock_bucket = MagicMock ()
69
-
70
- # Mock release object
71
- release_object = MagicMock ()
72
- release_object .key = "meta/singles/testcname"
73
-
74
- # Mock objects to be downloaded
75
- s3_obj1 = MagicMock ()
76
- s3_obj1 .key = "objects/testcname/file1"
77
- s3_obj2 = MagicMock ()
78
- s3_obj2 .key = "objects/testcname/file2"
79
-
80
- # Mock return value of .filter().all() from boto3
81
- class MockFilterReturn :
82
- def all (self ):
83
- return [s3_obj1 , s3_obj2 ]
84
-
85
- # Mock teh behaviour of .objects.filter(Prefix=...)
86
- # Lets us simulate different responses depending on prefix
87
- def filter_side_effect (Prefix ):
88
- # When fetching metadata
89
- if Prefix == "meta/singles/testcname" :
90
- return [release_object ] # return list with release file
91
- # When fetching actual artifact
92
- elif Prefix == "objects/testcname" :
93
- return MockFilterReturn () # return mock object
94
- return [] # Nothing found
76
+ s3 , bucket_name , _ = s3_setup
77
+ bucket = s3 .Bucket (bucket_name )
95
78
96
- # Act
97
- mock_bucket . objects . filter . side_effect = filter_side_effect
98
- mock_bucket_class . return_value = mock_bucket
79
+ bucket . put_object ( Key = f"meta/singles/ { CNAME } " , Body = b"metadata" )
80
+ bucket . put_object ( Key = f" objects/ { CNAME } /file1" , Body = b"data1" )
81
+ bucket . put_object ( Key = f"objects/ { CNAME } /file2" , Body = b"data2" )
99
82
100
83
with TemporaryDirectory () as tmpdir :
101
- artifacts_dir = Path (tmpdir )
84
+ outdir = Path (tmpdir )
102
85
103
- s3 = S3Artifacts ("test-bucket" )
104
- s3 .download_to_directory ("testcname" , artifacts_dir )
86
+ # Act
87
+ artifacts = S3Artifacts (bucket_name )
88
+ artifacts .download_to_directory (CNAME , outdir )
105
89
106
90
# Assert
107
- # Validate download_file called with correct metadata path
108
- mock_bucket .download_file .assert_any_call (
109
- "meta/singles/testcname" ,
110
- artifacts_dir / "testcname.s3_metadata.yaml" ,
111
- )
91
+ assert (outdir / f"{ CNAME } .s3_metadata.yaml" ).read_bytes () == b"metadata"
92
+ assert (outdir / "file1" ).read_bytes () == b"data1"
93
+ assert (outdir / "file2" ).read_bytes () == b"data2"
112
94
113
- # Validate files were downloaded from object keys
114
- mock_bucket .download_file .assert_any_call (
115
- "objects/testcname/file1" , artifacts_dir / "file1"
116
- )
117
- mock_bucket .download_file .assert_any_call (
118
- "objects/testcname/file2" , artifacts_dir / "file2"
119
- )
120
95
121
- assert mock_bucket .download_file .call_count == 3
122
-
123
-
124
- @patch ("gardenlinux.s3.s3_artifacts.Bucket" )
125
- def test_download_to_directory_invalid_path (mock_bucket ):
96
+ def test_download_to_directory_invalid_path (s3_setup ):
126
97
"""
127
- Sanity Test to test behaviour on invalid paths
98
+ Test proper handling of download attempt to invalid path.
128
99
"""
129
- s3 = S3Artifacts ("bucket" )
100
+ # Arrange
101
+ _ , bucket_name , _ = s3_setup
102
+ artifacts = S3Artifacts (bucket_name )
103
+
104
+ # Act / Assert
130
105
with pytest .raises (RuntimeError ):
131
- s3 .download_to_directory ("test-cname" , "/invalid/path/does/not/exist" )
106
+ artifacts .download_to_directory ({ CNAME } , "/invalid/path/does/not/exist" )
132
107
133
108
134
- @patch ("gardenlinux.s3.s3_artifacts.file_digest" , side_effect = dummy_digest )
135
- @patch ("gardenlinux.s3.s3_artifacts.CName" , new = DummyCName )
136
- @patch ("gardenlinux.s3.s3_artifacts.Bucket" )
137
- def test_upload_from_directory_success (mock_bucket_class , mock_digest ):
109
+ def test_upload_from_directory_success (s3_setup ):
138
110
"""
139
111
Test upload of multiple artifacts from disk to bucket
140
112
"""
141
113
# Arrange
142
- mock_bucket = MagicMock ()
143
- mock_bucket .name = "test-bucket"
144
- mock_bucket_class .return_value = mock_bucket
145
-
146
- # Create a fake .release file
114
+ s3 , bucket_name , tmp_path = s3_setup
147
115
release_data = """
148
116
GARDENLINUX_VERSION = 1234.1
149
117
GARDENLINUX_COMMIT_ID = abc123
150
118
GARDENLINUX_COMMIT_ID_LONG = abc123long
151
119
GARDENLINUX_FEATURES = _usi,_trustedboot
152
120
"""
153
121
154
- # Create a fake release file and two artifact files
155
- with TemporaryDirectory () as tmpdir :
156
- artifacts_dir = Path (tmpdir )
157
- cname = "testcname"
122
+ release_path = tmp_path / f"{ CNAME } .release"
123
+ release_path .write_text (release_data )
158
124
159
- # Write .release file
160
- release_path = artifacts_dir / f"{ cname } .release"
161
- release_path .write_text (release_data )
125
+ for filename in [f"{ CNAME } -file1" , f"{ CNAME } -file2" ]:
126
+ (tmp_path / filename ).write_bytes (b"dummy content" )
162
127
163
- # Create dummy files for upload
164
- for name in [ f" { cname } -file1" , f" { cname } -file2" ]:
165
- ( artifacts_dir / name ). write_bytes ( b"dummy content" )
128
+ # Act
129
+ artifacts = S3Artifacts ( bucket_name )
130
+ artifacts . upload_from_directory ( CNAME , tmp_path )
166
131
167
- s3 = S3Artifacts ("test-bucket" )
132
+ # Assert
133
+ bucket = s3 .Bucket (bucket_name )
134
+ keys = [obj .key for obj in bucket .objects .all ()]
135
+ assert f"objects/{ CNAME } /{ CNAME } -file1" in keys
136
+ assert f"objects/{ CNAME } /{ CNAME } -file2" in keys
137
+ assert f"meta/singles/{ CNAME } " in keys
168
138
169
- # Act
170
- s3 .upload_from_directory (cname , artifacts_dir )
171
139
172
- # Assert
173
- calls = mock_bucket .upload_file .call_args_list
174
-
175
- # Check that for each file, upload_file was called with ExtraArgs containing "Tagging"
176
- for name in [f"{ cname } -file1" , f"{ cname } -file2" ]:
177
- key = f"objects/{ cname } /{ name } "
178
- path = artifacts_dir / name
179
-
180
- # Look for a call with matching positional args (path, key)
181
- matching_calls = [
182
- call
183
- for call in calls
184
- if call .args [0 ] == path
185
- and call .args [1 ] == key
186
- and isinstance (call .kwargs .get ("ExtraArgs" ), dict )
187
- and "Tagging" in call .kwargs ["ExtraArgs" ]
188
- ]
189
- assert matching_calls , f"upload_file was not called with Tagging for { name } "
190
-
191
-
192
- @patch ("gardenlinux.s3.s3_artifacts.file_digest" , side_effect = dummy_digest )
193
- @patch ("gardenlinux.s3.s3_artifacts.CName" , new = DummyCName )
194
- @patch ("gardenlinux.s3.s3_artifacts.Bucket" )
195
- def test_upload_from_directory_with_delete (mock_bucket_class , mock_digest , tmp_path ):
140
+ def test_upload_from_directory_with_delete (s3_setup ):
196
141
"""
197
142
Test that upload_from_directory deletes existing files before uploading
198
- when delete_before_push=True
143
+ when delete_before_push=True.
199
144
"""
200
- mock_bucket = MagicMock ()
201
- mock_bucket .name = "test-bucket"
202
- mock_bucket_class .return_value = mock_bucket
203
-
204
- s3 = S3Artifacts ("test-bucket" )
205
- cname = "test-cname"
145
+ s3 , bucket_name , tmp_path = s3_setup
146
+ bucket = s3 .Bucket (bucket_name )
206
147
207
- release = tmp_path / f"{ cname } .release"
148
+ # Arrange: create release and artifact files locally
149
+ release = tmp_path / f"{ CNAME } .release"
208
150
release .write_text (
209
151
"GARDENLINUX_VERSION = 1234.1\n "
210
152
"GARDENLINUX_COMMIT_ID = abc123\n "
211
153
"GARDENLINUX_COMMIT_ID_LONG = abc123long\n "
212
154
"GARDENLINUX_FEATURES = _usi,_trustedboot\n "
213
155
)
214
156
215
- artifact = tmp_path / f"{ cname } .kernel"
157
+ artifact = tmp_path / f"{ CNAME } .kernel"
216
158
artifact .write_bytes (b"fake" )
217
159
218
- s3 .upload_from_directory (cname , tmp_path , delete_before_push = True )
160
+ # Arrange: put dummy existing objects to be deleted
161
+ bucket .put_object (Key = f"objects/{ CNAME } /{ artifact .name } " , Body = b"old data" )
162
+ bucket .put_object (Key = f"meta/singles/{ CNAME } " , Body = b"old metadata" )
219
163
220
- mock_bucket .delete_objects .assert_any_call (
221
- Delete = {"Objects" : [{"Key" : f"objects/{ cname } /{ artifact .name } " }]}
222
- )
223
- mock_bucket .delete_objects .assert_any_call (
224
- Delete = {"Objects" : [{"Key" : f"meta/singles/{ cname } " }]}
225
- )
164
+ artifacts = S3Artifacts (bucket_name )
165
+
166
+ # Act
167
+ artifacts .upload_from_directory (CNAME , tmp_path , delete_before_push = True )
168
+
169
+ # Assert
170
+ keys = [obj .key for obj in bucket .objects .all ()]
171
+
172
+ # The old key should no longer be present as old data (no duplicates)
173
+ # but the new upload file key should exist (artifact uploaded)
174
+ assert f"objects/{ CNAME } /{ artifact .name } " in keys
175
+ assert f"meta/singles/{ CNAME } " in keys
0 commit comments