From 7c1412c5d36a34c46703d7938a52117ba4df88f0 Mon Sep 17 00:00:00 2001 From: Arne Franken Date: Sat, 25 May 2024 12:29:28 +0200 Subject: [PATCH 01/14] Verify ACL and Bucket tests ACLs on objects are only allowed to be set when ObjectOwnership is correctly set on the bucket. Store ObjectOwnership. TBD: verification of ObjectOwnership, GET/PUT Ownership. --- .../s3mock/its/{AclIT.kt => AclITV2.kt} | 21 ++++++++-- .../adobe/testing/s3mock/its/BucketV1IT.kt | 12 +++--- .../adobe/testing/s3mock/its/BucketV2IT.kt | 12 +++--- .../testing/s3mock/BucketController.java | 8 +++- .../ObjectOwnershipHeaderConverter.java | 40 +++++++++++++++++++ .../testing/s3mock/S3MockConfiguration.java | 5 +++ .../testing/s3mock/service/BucketService.java | 13 ++++-- .../testing/s3mock/store/BucketMetadata.java | 12 +++++- .../testing/s3mock/store/BucketStore.java | 6 ++- .../s3mock/store/StoreConfiguration.java | 6 ++- .../testing/s3mock/util/AwsHttpHeaders.java | 1 + .../testing/s3mock/BucketControllerTest.kt | 3 +- .../s3mock/service/MultipartServiceTest.kt | 2 +- .../testing/s3mock/service/ServiceTestBase.kt | 2 + .../testing/s3mock/store/BucketStoreTest.kt | 21 +++++----- .../s3mock/store/StoreConfigurationTest.kt | 3 +- .../testing/s3mock/store/StoreTestBase.kt | 2 + .../store/StoresWithExistingFileRootTest.kt | 3 +- 18 files changed, 134 insertions(+), 38 deletions(-) rename integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/{AclIT.kt => AclITV2.kt} (88%) create mode 100644 server/src/main/java/com/adobe/testing/s3mock/ObjectOwnershipHeaderConverter.java diff --git a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/AclIT.kt b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/AclITV2.kt similarity index 88% rename from integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/AclIT.kt rename to integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/AclITV2.kt index bbb229c35..840fabfc4 100644 --- a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/AclIT.kt +++ b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/AclITV2.kt @@ -22,22 +22,35 @@ import org.junit.jupiter.api.Test import org.junit.jupiter.api.TestInfo import software.amazon.awssdk.services.s3.S3Client import software.amazon.awssdk.services.s3.model.AccessControlPolicy +import software.amazon.awssdk.services.s3.model.CreateBucketRequest import software.amazon.awssdk.services.s3.model.GetObjectAclRequest import software.amazon.awssdk.services.s3.model.Grant import software.amazon.awssdk.services.s3.model.Grantee import software.amazon.awssdk.services.s3.model.ObjectCannedACL +import software.amazon.awssdk.services.s3.model.ObjectOwnership import software.amazon.awssdk.services.s3.model.Owner import software.amazon.awssdk.services.s3.model.Permission.FULL_CONTROL import software.amazon.awssdk.services.s3.model.PutObjectAclRequest import software.amazon.awssdk.services.s3.model.Type.CANONICAL_USER -internal class AclIT : S3TestBase() { +internal class AclITV2 : S3TestBase() { private val s3ClientV2: S3Client = createS3ClientV2() @Test + @S3VerifiedSuccess(year = 2024) fun testPutCannedAcl_OK(testInfo: TestInfo) { val sourceKey = UPLOAD_FILE_NAME - val (bucketName, _) = givenBucketAndObjectV2(testInfo, sourceKey) + val bucketName = bucketName(testInfo) + + //create bucket that sets ownership to non-default to allow setting ACLs. + s3ClientV2.createBucket(CreateBucketRequest + .builder() + .bucket(bucketName) + .objectOwnership(ObjectOwnership.OBJECT_WRITER) + .build() + ) + + givenObjectV2(bucketName, sourceKey) s3ClientV2.putObjectAcl( PutObjectAclRequest @@ -58,8 +71,8 @@ internal class AclIT : S3TestBase() { .build() ).also { assertThat(it.sdkHttpResponse().isSuccessful).isTrue() - assertThat(it.owner().id()).isEqualTo(DEFAULT_OWNER.id) - assertThat(it.owner().displayName()).isEqualTo(DEFAULT_OWNER.displayName) + assertThat(it.owner().id()).isNotBlank() + assertThat(it.owner().displayName()).isNotBlank() assertThat(it.grants().size).isEqualTo(1) assertThat(it.grants()[0].permission()).isEqualTo(FULL_CONTROL) } diff --git a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/BucketV1IT.kt b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/BucketV1IT.kt index 1977a606a..462fd3f17 100644 --- a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/BucketV1IT.kt +++ b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/BucketV1IT.kt @@ -75,7 +75,7 @@ internal class BucketV1IT : S3TestBase() { } @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun testCreateAndDeleteBucket(testInfo: TestInfo) { val bucketName = bucketName(testInfo) s3Client.createBucket(bucketName) @@ -88,7 +88,7 @@ internal class BucketV1IT : S3TestBase() { } @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun testFailureDeleteNonEmptyBucket(testInfo: TestInfo) { val bucketName = bucketName(testInfo) s3Client.createBucket(bucketName) @@ -101,7 +101,7 @@ internal class BucketV1IT : S3TestBase() { } @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun testBucketDoesExistV2_ok(testInfo: TestInfo) { val bucketName = bucketName(testInfo) s3Client.createBucket(bucketName) @@ -112,7 +112,7 @@ internal class BucketV1IT : S3TestBase() { } @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun testBucketDoesExistV2_failure(testInfo: TestInfo) { val bucketName = bucketName(testInfo) @@ -121,7 +121,7 @@ internal class BucketV1IT : S3TestBase() { } @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun duplicateBucketCreation(testInfo: TestInfo) { val bucketName = bucketName(testInfo) s3Client.createBucket(bucketName) @@ -135,7 +135,7 @@ internal class BucketV1IT : S3TestBase() { } @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun duplicateBucketDeletion(testInfo: TestInfo) { val bucketName = bucketName(testInfo) s3Client.createBucket(bucketName) diff --git a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/BucketV2IT.kt b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/BucketV2IT.kt index c0290047f..446731a09 100644 --- a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/BucketV2IT.kt +++ b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/BucketV2IT.kt @@ -48,7 +48,7 @@ internal class BucketV2IT : S3TestBase() { private val s3ClientV2: S3Client = createS3ClientV2() @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun createAndDeleteBucket(testInfo: TestInfo) { val bucketName = bucketName(testInfo) s3ClientV2.createBucket(CreateBucketRequest.builder().bucket(bucketName).build()) @@ -71,7 +71,7 @@ internal class BucketV2IT : S3TestBase() { } @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun getBucketLocation(testInfo: TestInfo) { val bucketName = givenBucketV2(testInfo) val bucketLocation = s3ClientV2.getBucketLocation(GetBucketLocationRequest.builder().bucket(bucketName).build()) @@ -80,7 +80,7 @@ internal class BucketV2IT : S3TestBase() { } @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun duplicateBucketCreation(testInfo: TestInfo) { val bucketName = bucketName(testInfo) s3ClientV2.createBucket(CreateBucketRequest.builder().bucket(bucketName).build()) @@ -112,7 +112,7 @@ internal class BucketV2IT : S3TestBase() { } @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun duplicateBucketDeletion(testInfo: TestInfo) { val bucketName = bucketName(testInfo) s3ClientV2.createBucket(CreateBucketRequest.builder().bucket(bucketName).build()) @@ -143,7 +143,7 @@ internal class BucketV2IT : S3TestBase() { } @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun getBucketLifecycle_notFound(testInfo: TestInfo) { val bucketName = bucketName(testInfo) s3ClientV2.createBucket(CreateBucketRequest.builder().bucket(bucketName).build()) @@ -167,7 +167,7 @@ internal class BucketV2IT : S3TestBase() { } @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun putGetDeleteBucketLifecycle(testInfo: TestInfo) { val bucketName = bucketName(testInfo) s3ClientV2.createBucket(CreateBucketRequest.builder().bucket(bucketName).build()) diff --git a/server/src/main/java/com/adobe/testing/s3mock/BucketController.java b/server/src/main/java/com/adobe/testing/s3mock/BucketController.java index f29b18875..8d826647e 100644 --- a/server/src/main/java/com/adobe/testing/s3mock/BucketController.java +++ b/server/src/main/java/com/adobe/testing/s3mock/BucketController.java @@ -17,6 +17,7 @@ package com.adobe.testing.s3mock; import static com.adobe.testing.s3mock.util.AwsHttpHeaders.X_AMZ_BUCKET_OBJECT_LOCK_ENABLED; +import static com.adobe.testing.s3mock.util.AwsHttpHeaders.X_AMZ_OBJECT_OWNERSHIP; import static com.adobe.testing.s3mock.util.AwsHttpParameters.CONTINUATION_TOKEN; import static com.adobe.testing.s3mock.util.AwsHttpParameters.ENCODING_TYPE; import static com.adobe.testing.s3mock.util.AwsHttpParameters.KEY_MARKER; @@ -57,6 +58,7 @@ import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; import software.amazon.awssdk.regions.Region; +import software.amazon.awssdk.services.s3.model.ObjectOwnership; /** * Handles requests related to buckets. @@ -119,10 +121,12 @@ public ResponseEntity listBuckets() { ) public ResponseEntity createBucket(@PathVariable final String bucketName, @RequestHeader(value = X_AMZ_BUCKET_OBJECT_LOCK_ENABLED, - required = false, defaultValue = "false") boolean objectLockEnabled) { + required = false, defaultValue = "false") boolean objectLockEnabled, + @RequestHeader(value = X_AMZ_OBJECT_OWNERSHIP, + required = false, defaultValue = "BucketOwnerEnforced") ObjectOwnership objectOwnership) { bucketService.verifyBucketNameIsAllowed(bucketName); bucketService.verifyBucketDoesNotExist(bucketName); - bucketService.createBucket(bucketName, objectLockEnabled); + bucketService.createBucket(bucketName, objectLockEnabled, objectOwnership); return ResponseEntity.ok().build(); } diff --git a/server/src/main/java/com/adobe/testing/s3mock/ObjectOwnershipHeaderConverter.java b/server/src/main/java/com/adobe/testing/s3mock/ObjectOwnershipHeaderConverter.java new file mode 100644 index 000000000..c660f8cf4 --- /dev/null +++ b/server/src/main/java/com/adobe/testing/s3mock/ObjectOwnershipHeaderConverter.java @@ -0,0 +1,40 @@ +/* + * Copyright 2017-2024 Adobe. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.adobe.testing.s3mock; + +import com.adobe.testing.s3mock.util.AwsHttpHeaders; +import org.springframework.core.convert.converter.Converter; +import org.springframework.lang.NonNull; +import org.springframework.lang.Nullable; +import software.amazon.awssdk.services.s3.model.ObjectOwnership; + +/** + * Converts values of the {@link AwsHttpHeaders#X_AMZ_OBJECT_OWNERSHIP} which is sent by the Amazon + * client. + * Example: x-amz-object-ownership: ObjectWriter + * API Reference + * API Reference + * API Reference + */ +class ObjectOwnershipHeaderConverter implements Converter { + + @Override + @Nullable + public ObjectOwnership convert(@NonNull String source) { + return ObjectOwnership.fromValue(source); + } +} diff --git a/server/src/main/java/com/adobe/testing/s3mock/S3MockConfiguration.java b/server/src/main/java/com/adobe/testing/s3mock/S3MockConfiguration.java index 954312ae9..aa1a5529b 100644 --- a/server/src/main/java/com/adobe/testing/s3mock/S3MockConfiguration.java +++ b/server/src/main/java/com/adobe/testing/s3mock/S3MockConfiguration.java @@ -211,6 +211,11 @@ HttpRangeHeaderConverter httpRangeHeaderConverter() { return new HttpRangeHeaderConverter(); } + @Bean + ObjectOwnershipHeaderConverter objectOwnershipHeaderConverter() { + return new ObjectOwnershipHeaderConverter(); + } + /** * {@link ResponseEntityExceptionHandler} dealing with {@link S3Exception}s; Serializes them to * response output as suitable ErrorResponses. diff --git a/server/src/main/java/com/adobe/testing/s3mock/service/BucketService.java b/server/src/main/java/com/adobe/testing/s3mock/service/BucketService.java index 64f89dc15..d8169ea88 100644 --- a/server/src/main/java/com/adobe/testing/s3mock/service/BucketService.java +++ b/server/src/main/java/com/adobe/testing/s3mock/service/BucketService.java @@ -1,5 +1,5 @@ /* - * Copyright 2017-2023 Adobe. + * Copyright 2017-2024 Adobe. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -50,6 +50,7 @@ import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; import java.util.function.UnaryOperator; +import software.amazon.awssdk.services.s3.model.ObjectOwnership; import software.amazon.awssdk.utils.http.SdkHttpUtils; public class BucketService { @@ -98,8 +99,14 @@ public Bucket getBucket(String bucketName) { * * @return the Bucket */ - public Bucket createBucket(String bucketName, boolean objectLockEnabled) { - return Bucket.from(bucketStore.createBucket(bucketName, objectLockEnabled)); + public Bucket createBucket(String bucketName, + boolean objectLockEnabled, + ObjectOwnership objectOwnership) { + return Bucket.from( + bucketStore.createBucket(bucketName, + objectLockEnabled, + objectOwnership) + ); } public boolean deleteBucket(String bucketName) { diff --git a/server/src/main/java/com/adobe/testing/s3mock/store/BucketMetadata.java b/server/src/main/java/com/adobe/testing/s3mock/store/BucketMetadata.java index 213b233b5..b0b984bc6 100644 --- a/server/src/main/java/com/adobe/testing/s3mock/store/BucketMetadata.java +++ b/server/src/main/java/com/adobe/testing/s3mock/store/BucketMetadata.java @@ -22,6 +22,7 @@ import java.util.HashMap; import java.util.Map; import java.util.UUID; +import software.amazon.awssdk.services.s3.model.ObjectOwnership; /** * Represents a bucket in S3, used to serialize and deserialize all metadata locally. @@ -31,6 +32,7 @@ public record BucketMetadata( String creationDate, ObjectLockConfiguration objectLockConfiguration, BucketLifecycleConfiguration bucketLifecycleConfiguration, + ObjectOwnership objectOwnership, Path path, Map objects ) { @@ -38,11 +40,13 @@ public record BucketMetadata( public BucketMetadata(String name, String creationDate, ObjectLockConfiguration objectLockConfiguration, BucketLifecycleConfiguration bucketLifecycleConfiguration, + ObjectOwnership objectOwnership, Path path) { this(name, creationDate, objectLockConfiguration, bucketLifecycleConfiguration, + objectOwnership, path, new HashMap<>()); } @@ -50,13 +54,17 @@ public BucketMetadata(String name, String creationDate, public BucketMetadata withObjectLockConfiguration( ObjectLockConfiguration objectLockConfiguration) { return new BucketMetadata(name(), creationDate(), objectLockConfiguration, - bucketLifecycleConfiguration(), path()); + bucketLifecycleConfiguration(), + objectOwnership(), + path()); } public BucketMetadata withBucketLifecycleConfiguration( BucketLifecycleConfiguration bucketLifecycleConfiguration) { return new BucketMetadata(name(), creationDate(), objectLockConfiguration(), - bucketLifecycleConfiguration, path()); + bucketLifecycleConfiguration, + objectOwnership(), + path()); } public boolean doesKeyExist(String key) { diff --git a/server/src/main/java/com/adobe/testing/s3mock/store/BucketStore.java b/server/src/main/java/com/adobe/testing/s3mock/store/BucketStore.java index 6c446d63a..fe75784a4 100644 --- a/server/src/main/java/com/adobe/testing/s3mock/store/BucketStore.java +++ b/server/src/main/java/com/adobe/testing/s3mock/store/BucketStore.java @@ -35,6 +35,7 @@ import org.apache.commons.io.FileUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import software.amazon.awssdk.services.s3.model.ObjectOwnership; /** * Stores buckets and their metadata created in S3Mock. @@ -178,7 +179,9 @@ private List findBucketPaths() { * @throws IllegalStateException if the bucket cannot be created or the bucket already exists but * is not a directory. */ - public BucketMetadata createBucket(String bucketName, boolean objectLockEnabled) { + public BucketMetadata createBucket(String bucketName, + boolean objectLockEnabled, + ObjectOwnership objectOwnership) { var bucketMetadata = getBucketMetadata(bucketName); if (bucketMetadata != null) { throw new IllegalStateException("Bucket already exists."); @@ -193,6 +196,7 @@ public BucketMetadata createBucket(String bucketName, boolean objectLockEnabled) objectLockEnabled ? new ObjectLockConfiguration(ObjectLockEnabled.ENABLED, null) : null, null, + objectOwnership, bucketFolder.toPath() ); writeToDisk(newBucketMetadata); diff --git a/server/src/main/java/com/adobe/testing/s3mock/store/StoreConfiguration.java b/server/src/main/java/com/adobe/testing/s3mock/store/StoreConfiguration.java index 5ec64586f..dbd37191c 100644 --- a/server/src/main/java/com/adobe/testing/s3mock/store/StoreConfiguration.java +++ b/server/src/main/java/com/adobe/testing/s3mock/store/StoreConfiguration.java @@ -32,6 +32,7 @@ import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; +import software.amazon.awssdk.services.s3.model.ObjectOwnership; @Configuration @EnableConfigurationProperties(StoreProperties.class) @@ -76,7 +77,10 @@ BucketStore bucketStore(StoreProperties properties, File rootFolder, List { - bucketStore.createBucket(name, false); + bucketStore.createBucket(name, + false, + ObjectOwnership.BUCKET_OWNER_ENFORCED + ); LOG.info("Creating initial bucket {}.", name); }); diff --git a/server/src/main/java/com/adobe/testing/s3mock/util/AwsHttpHeaders.java b/server/src/main/java/com/adobe/testing/s3mock/util/AwsHttpHeaders.java index 306eea987..744336e50 100644 --- a/server/src/main/java/com/adobe/testing/s3mock/util/AwsHttpHeaders.java +++ b/server/src/main/java/com/adobe/testing/s3mock/util/AwsHttpHeaders.java @@ -53,6 +53,7 @@ public final class AwsHttpHeaders { public static final String X_AMZ_DELETE_MARKER = "x-amz-delete-marker"; public static final String X_AMZ_BUCKET_OBJECT_LOCK_ENABLED = "x-amz-bucket-object-lock-enabled"; + public static final String X_AMZ_OBJECT_OWNERSHIP = "x-amz-object-ownership"; public static final String X_AMZ_OBJECT_ATTRIBUTES = "x-amz-object-attributes"; public static final String X_AMZ_CHECKSUM_ALGORITHM = "x-amz-checksum-algorithm"; public static final String X_AMZ_SDK_CHECKSUM_ALGORITHM = "x-amz-sdk-checksum-algorithm"; diff --git a/server/src/test/kotlin/com/adobe/testing/s3mock/BucketControllerTest.kt b/server/src/test/kotlin/com/adobe/testing/s3mock/BucketControllerTest.kt index 40534cdfa..f576eed9f 100644 --- a/server/src/test/kotlin/com/adobe/testing/s3mock/BucketControllerTest.kt +++ b/server/src/test/kotlin/com/adobe/testing/s3mock/BucketControllerTest.kt @@ -56,6 +56,7 @@ import org.springframework.http.HttpMethod import org.springframework.http.HttpStatus import org.springframework.http.MediaType import org.springframework.web.util.UriComponentsBuilder +import software.amazon.awssdk.services.s3.model.ObjectOwnership.BUCKET_OWNER_ENFORCED import java.nio.file.Paths import java.time.Instant @@ -166,7 +167,7 @@ internal class BucketControllerTest : BaseControllerTest() { @Test fun testCreateBucket_InternalServerError() { - whenever(bucketService.createBucket(TEST_BUCKET_NAME, false)) + whenever(bucketService.createBucket(TEST_BUCKET_NAME, false, BUCKET_OWNER_ENFORCED)) .thenThrow(IllegalStateException("THIS IS EXPECTED")) val headers = HttpHeaders().apply { diff --git a/server/src/test/kotlin/com/adobe/testing/s3mock/service/MultipartServiceTest.kt b/server/src/test/kotlin/com/adobe/testing/s3mock/service/MultipartServiceTest.kt index 8923ef09c..8147432b2 100644 --- a/server/src/test/kotlin/com/adobe/testing/s3mock/service/MultipartServiceTest.kt +++ b/server/src/test/kotlin/com/adobe/testing/s3mock/service/MultipartServiceTest.kt @@ -158,7 +158,7 @@ internal class MultipartServiceTest : ServiceTestBase() { val uploadId = "uploadId" val bucketName = "bucketName" whenever(bucketStore.getBucketMetadata(bucketName)) - .thenReturn(BucketMetadata(null, null, null, null, null)) + .thenReturn(BucketMetadata(null, null, null, null, null, null)) whenever( multipartStore.getMultipartUpload( ArgumentMatchers.any( diff --git a/server/src/test/kotlin/com/adobe/testing/s3mock/service/ServiceTestBase.kt b/server/src/test/kotlin/com/adobe/testing/s3mock/service/ServiceTestBase.kt index d45a5b9a9..6623c232a 100644 --- a/server/src/test/kotlin/com/adobe/testing/s3mock/service/ServiceTestBase.kt +++ b/server/src/test/kotlin/com/adobe/testing/s3mock/service/ServiceTestBase.kt @@ -26,6 +26,7 @@ import com.adobe.testing.s3mock.store.ObjectStore import com.adobe.testing.s3mock.store.S3ObjectMetadata import org.mockito.kotlin.whenever import org.springframework.boot.test.mock.mockito.MockBean +import software.amazon.awssdk.services.s3.model.ObjectOwnership.BUCKET_OWNER_ENFORCED import java.nio.file.Files import java.util.Date import java.util.UUID @@ -128,6 +129,7 @@ internal abstract class ServiceTestBase { Date().toString(), null, null, + BUCKET_OWNER_ENFORCED, Files.createTempDirectory(bucketName) ) } diff --git a/server/src/test/kotlin/com/adobe/testing/s3mock/store/BucketStoreTest.kt b/server/src/test/kotlin/com/adobe/testing/s3mock/store/BucketStoreTest.kt index 01504b296..2d9a1a64c 100644 --- a/server/src/test/kotlin/com/adobe/testing/s3mock/store/BucketStoreTest.kt +++ b/server/src/test/kotlin/com/adobe/testing/s3mock/store/BucketStoreTest.kt @@ -29,6 +29,8 @@ import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMock import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureWebMvc import org.springframework.boot.test.context.SpringBootTest import org.springframework.boot.test.mock.mockito.MockBean +import software.amazon.awssdk.services.s3.model.ObjectOwnership +import software.amazon.awssdk.services.s3.model.ObjectOwnership.BUCKET_OWNER_ENFORCED @AutoConfigureWebMvc @AutoConfigureMockMvc @@ -40,14 +42,15 @@ internal class BucketStoreTest : StoreTestBase() { @Test fun testCreateBucket() { - val bucket = bucketStore.createBucket(TEST_BUCKET_NAME, false) + val bucket = bucketStore.createBucket(TEST_BUCKET_NAME, false, + BUCKET_OWNER_ENFORCED) assertThat(bucket.name).endsWith(TEST_BUCKET_NAME) assertThat(bucket.path).exists() } @Test fun testDoesBucketExist_ok() { - bucketStore.createBucket(TEST_BUCKET_NAME, false) + bucketStore.createBucket(TEST_BUCKET_NAME, false, BUCKET_OWNER_ENFORCED) val doesBucketExist = bucketStore.doesBucketExist(TEST_BUCKET_NAME) @@ -67,9 +70,9 @@ internal class BucketStoreTest : StoreTestBase() { val bucketName2 = "myNüwNämeZwöei" val bucketName3 = "myNüwNämeDrü" - bucketStore.createBucket(bucketName1, false) - bucketStore.createBucket(bucketName2, false) - bucketStore.createBucket(bucketName3, false) + bucketStore.createBucket(bucketName1, false, BUCKET_OWNER_ENFORCED) + bucketStore.createBucket(bucketName2, false, BUCKET_OWNER_ENFORCED) + bucketStore.createBucket(bucketName3, false, BUCKET_OWNER_ENFORCED) val buckets = bucketStore.listBuckets() @@ -78,7 +81,7 @@ internal class BucketStoreTest : StoreTestBase() { @Test fun testCreateAndGetBucket() { - bucketStore.createBucket(TEST_BUCKET_NAME, false) + bucketStore.createBucket(TEST_BUCKET_NAME, false, BUCKET_OWNER_ENFORCED) val bucket = bucketStore.getBucketMetadata(TEST_BUCKET_NAME) assertThat(bucket).isNotNull() @@ -87,7 +90,7 @@ internal class BucketStoreTest : StoreTestBase() { @Test fun testCreateAndGetBucketWithObjectLock() { - bucketStore.createBucket(TEST_BUCKET_NAME, true) + bucketStore.createBucket(TEST_BUCKET_NAME, true, BUCKET_OWNER_ENFORCED) val bucket = bucketStore.getBucketMetadata(TEST_BUCKET_NAME) assertThat(bucket).isNotNull() @@ -99,7 +102,7 @@ internal class BucketStoreTest : StoreTestBase() { @Test fun testStoreAndGetBucketLifecycleConfiguration() { - bucketStore.createBucket(TEST_BUCKET_NAME, true) + bucketStore.createBucket(TEST_BUCKET_NAME, true, BUCKET_OWNER_ENFORCED) val filter1 = LifecycleRuleFilter(null, null, "documents/", null, null) val transition1 = Transition(null, 30, StorageClass.GLACIER) @@ -118,7 +121,7 @@ internal class BucketStoreTest : StoreTestBase() { @Test fun testCreateAndDeleteBucket() { - bucketStore.createBucket(TEST_BUCKET_NAME, false) + bucketStore.createBucket(TEST_BUCKET_NAME, false, BUCKET_OWNER_ENFORCED) val bucketDeleted = bucketStore.deleteBucket(TEST_BUCKET_NAME) val bucket = bucketStore.getBucketMetadata(TEST_BUCKET_NAME) diff --git a/server/src/test/kotlin/com/adobe/testing/s3mock/store/StoreConfigurationTest.kt b/server/src/test/kotlin/com/adobe/testing/s3mock/store/StoreConfigurationTest.kt index 1a19db376..bd938a9f5 100644 --- a/server/src/test/kotlin/com/adobe/testing/s3mock/store/StoreConfigurationTest.kt +++ b/server/src/test/kotlin/com/adobe/testing/s3mock/store/StoreConfigurationTest.kt @@ -20,6 +20,7 @@ import org.apache.commons.io.FileUtils import org.assertj.core.api.Assertions.assertThat import org.junit.jupiter.api.Test import org.junit.jupiter.api.io.TempDir +import software.amazon.awssdk.services.s3.model.ObjectOwnership import java.io.IOException import java.nio.file.Files import java.nio.file.Path @@ -60,7 +61,7 @@ internal class StoreConfigurationTest { val bucketMetadata = BucketMetadata( existingBucketName, Instant.now().toString(), - null, null, existingBucket + null, null, ObjectOwnership.BUCKET_OWNER_ENFORCED, existingBucket ) val metaFile = Paths.get(existingBucket.toString(), BUCKET_META_FILE) OBJECT_MAPPER.writeValue(metaFile.toFile(), bucketMetadata) diff --git a/server/src/test/kotlin/com/adobe/testing/s3mock/store/StoreTestBase.kt b/server/src/test/kotlin/com/adobe/testing/s3mock/store/StoreTestBase.kt index c3ddb424a..1e32619a4 100644 --- a/server/src/test/kotlin/com/adobe/testing/s3mock/store/StoreTestBase.kt +++ b/server/src/test/kotlin/com/adobe/testing/s3mock/store/StoreTestBase.kt @@ -21,6 +21,7 @@ import org.apache.http.entity.ContentType import org.springframework.beans.factory.annotation.Autowired import org.springframework.http.HttpHeaders import org.springframework.http.MediaType +import software.amazon.awssdk.services.s3.model.ObjectOwnership import java.io.File import java.nio.file.Paths import java.util.Date @@ -36,6 +37,7 @@ internal abstract class StoreTestBase { Date().toString(), null, null, + ObjectOwnership.BUCKET_OWNER_ENFORCED, Paths.get(rootFolder.toString(), bucketName), mapOf() ) diff --git a/server/src/test/kotlin/com/adobe/testing/s3mock/store/StoresWithExistingFileRootTest.kt b/server/src/test/kotlin/com/adobe/testing/s3mock/store/StoresWithExistingFileRootTest.kt index 15a578857..8651aea89 100644 --- a/server/src/test/kotlin/com/adobe/testing/s3mock/store/StoresWithExistingFileRootTest.kt +++ b/server/src/test/kotlin/com/adobe/testing/s3mock/store/StoresWithExistingFileRootTest.kt @@ -27,6 +27,7 @@ import org.springframework.boot.test.context.SpringBootTest import org.springframework.boot.test.context.TestConfiguration import org.springframework.boot.test.mock.mockito.MockBean import org.springframework.context.annotation.Bean +import software.amazon.awssdk.services.s3.model.ObjectOwnership import java.io.File import java.util.UUID @@ -49,7 +50,7 @@ internal class StoresWithExistingFileRootTest : StoreTestBase() { @Test fun testBucketStoreWithExistingRoot() { - bucketStore.createBucket(TEST_BUCKET_NAME, false) + bucketStore.createBucket(TEST_BUCKET_NAME, false, ObjectOwnership.BUCKET_OWNER_ENFORCED) val bucket = bucketStore.getBucketMetadata(TEST_BUCKET_NAME) assertThatThrownBy { testBucketStore.getBucketMetadata(TEST_BUCKET_NAME) } From 884f0181425255177fed293bd4d42553bbe65682 Mon Sep 17 00:00:00 2001 From: Arne Franken Date: Sat, 25 May 2024 13:00:33 +0200 Subject: [PATCH 02/14] Verify GetPutDeleteObjectV1IT One test succeeded in 2022, failed in 2024: generatePresignedUrlWithResponseHeaderOverrides Not yet sure why we receive a 404 when we make the request to the presigned URL... --- .../s3mock/its/GetPutDeleteObjectV1IT.kt | 39 ++++++++++--------- 1 file changed, 20 insertions(+), 19 deletions(-) diff --git a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/GetPutDeleteObjectV1IT.kt b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/GetPutDeleteObjectV1IT.kt index fd148f6fd..196f63696 100644 --- a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/GetPutDeleteObjectV1IT.kt +++ b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/GetPutDeleteObjectV1IT.kt @@ -16,6 +16,7 @@ package com.adobe.testing.s3mock.its import com.adobe.testing.s3mock.util.DigestUtil.hexDigest +import com.amazonaws.HttpMethod import com.amazonaws.services.s3.AmazonS3 import com.amazonaws.services.s3.Headers import com.amazonaws.services.s3.model.AmazonS3Exception @@ -31,7 +32,6 @@ import com.amazonaws.services.s3.model.ResponseHeaderOverrides import com.amazonaws.services.s3.model.SSEAwsKeyManagementParams import com.amazonaws.services.s3.transfer.TransferManager import org.apache.http.HttpHost -import org.apache.http.HttpResponse import org.apache.http.client.methods.HttpGet import org.apache.http.impl.client.HttpClients import org.assertj.core.api.Assertions.assertThat @@ -58,7 +58,7 @@ internal class GetPutDeleteObjectV1IT : S3TestBase() { private val transferManagerV1: TransferManager = createTransferManagerV1() @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun putObjectWhereKeyContainsPathFragments(testInfo: TestInfo) { val (bucketName, _) = givenBucketAndObjectV1(testInfo, UPLOAD_FILE_NAME) val objectExist = s3Client.doesObjectExist(bucketName, UPLOAD_FILE_NAME) @@ -70,7 +70,7 @@ internal class GetPutDeleteObjectV1IT : S3TestBase() { */ @ParameterizedTest(name = ParameterizedTest.INDEX_PLACEHOLDER + " uploadWithSigning={0}, uploadChunked={1}") @CsvSource(value = ["true, true", "true, false", "false, true", "false, false"]) - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun shouldUploadAndDownloadObject(uploadWithSigning: Boolean, uploadChunked: Boolean, testInfo: TestInfo) { val bucketName = givenBucketV1(testInfo) @@ -91,7 +91,7 @@ internal class GetPutDeleteObjectV1IT : S3TestBase() { * https://docs.aws.amazon.com/AmazonS3/latest/userguide/object-keys.html */ @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun shouldTolerateWeirdCharactersInObjectKey(testInfo: TestInfo) { val bucketName = givenBucketV1(testInfo) val uploadFile = File(UPLOAD_FILE_NAME) @@ -108,7 +108,7 @@ internal class GetPutDeleteObjectV1IT : S3TestBase() { * Stores a file in a previously created bucket. Downloads the file again and compares checksums */ @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun shouldUploadAndDownloadStream(testInfo: TestInfo) { val bucketName = givenBucketV1(testInfo) val resourceId = UUID.randomUUID().toString() @@ -135,7 +135,7 @@ internal class GetPutDeleteObjectV1IT : S3TestBase() { * Tests if Object can be uploaded with KMS and Metadata can be retrieved. */ @Test - @S3VerifiedFailure(year = 2022, + @S3VerifiedFailure(year = 2024, reason = "No KMS configuration for AWS test account") fun shouldUploadWithEncryption(testInfo: TestInfo) { val bucketName = givenBucketV1(testInfo) @@ -162,7 +162,7 @@ internal class GetPutDeleteObjectV1IT : S3TestBase() { * Tests if Object can be uploaded with wrong KMS Key. */ @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun shouldNotUploadWithWrongEncryptionKey(testInfo: TestInfo) { Configuration().apply { this.setMaxStackTraceElementsDisplayed(10000) @@ -185,7 +185,7 @@ internal class GetPutDeleteObjectV1IT : S3TestBase() { * Tests if Object can be uploaded with wrong KMS Key. */ @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun shouldNotUploadStreamingWithWrongEncryptionKey(testInfo: TestInfo) { val bucketName = givenBucketV1(testInfo) val bytes = UPLOAD_FILE_NAME.toByteArray() @@ -210,7 +210,7 @@ internal class GetPutDeleteObjectV1IT : S3TestBase() { * Tests if the Metadata of an existing file can be retrieved. */ @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun shouldGetObjectMetadata(testInfo: TestInfo) { val bucketName = givenBucketV1(testInfo) val nonExistingFileName = randomName @@ -243,7 +243,7 @@ internal class GetPutDeleteObjectV1IT : S3TestBase() { * Tests if an object can be deleted. */ @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun shouldDeleteObject(testInfo: TestInfo) { val (bucketName, _) = givenBucketAndObjectV1(testInfo, UPLOAD_FILE_NAME) s3Client.deleteObject(bucketName, UPLOAD_FILE_NAME) @@ -256,7 +256,7 @@ internal class GetPutDeleteObjectV1IT : S3TestBase() { * Tests if multiple objects can be deleted. */ @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun shouldBatchDeleteObjects(testInfo: TestInfo) { val bucketName = givenBucketV1(testInfo) val uploadFile1 = File(UPLOAD_FILE_NAME) @@ -292,7 +292,7 @@ internal class GetPutDeleteObjectV1IT : S3TestBase() { * Tests if Error is thrown when DeleteObjectsRequest contains nonExisting key. */ @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun shouldThrowOnBatchDeleteObjectsWrongKey(testInfo: TestInfo) { val bucketName = givenBucketV1(testInfo) val uploadFile1 = File(UPLOAD_FILE_NAME) @@ -318,7 +318,7 @@ internal class GetPutDeleteObjectV1IT : S3TestBase() { * Tests if an object can be uploaded asynchronously. */ @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun shouldUploadInParallel(testInfo: TestInfo) { val bucketName = givenBucketV1(testInfo) val uploadFile = File(UPLOAD_FILE_NAME) @@ -336,7 +336,7 @@ internal class GetPutDeleteObjectV1IT : S3TestBase() { * Verify that range-downloads work. */ @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun checkRangeDownloads(testInfo: TestInfo) { val bucketName = givenBucketV1(testInfo) val uploadFile = File(UPLOAD_FILE_NAME) @@ -374,7 +374,7 @@ internal class GetPutDeleteObjectV1IT : S3TestBase() { } @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun testGetObject_successWithMatchingEtag(testInfo: TestInfo) { val uploadFile = File(UPLOAD_FILE_NAME) val (bucketName, putObjectResult) = givenBucketAndObjectV1(testInfo, UPLOAD_FILE_NAME) @@ -390,7 +390,7 @@ internal class GetPutDeleteObjectV1IT : S3TestBase() { } @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun testGetObject_failureWithMatchingEtag(testInfo: TestInfo) { val uploadFile = File(UPLOAD_FILE_NAME) val (bucketName, putObjectResult) = givenBucketAndObjectV1(testInfo, UPLOAD_FILE_NAME) @@ -407,7 +407,7 @@ internal class GetPutDeleteObjectV1IT : S3TestBase() { } @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun testGetObject_successWithNonMatchingEtag(testInfo: TestInfo) { val uploadFile = File(UPLOAD_FILE_NAME) val (bucketName, putObjectResult) = givenBucketAndObjectV1(testInfo, UPLOAD_FILE_NAME) @@ -424,7 +424,7 @@ internal class GetPutDeleteObjectV1IT : S3TestBase() { } @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun testGetObject_failureWithNonMatchingEtag(testInfo: TestInfo) { val uploadFile = File(UPLOAD_FILE_NAME) val (bucketName, putObjectResult) = givenBucketAndObjectV1(testInfo, UPLOAD_FILE_NAME) @@ -442,7 +442,7 @@ internal class GetPutDeleteObjectV1IT : S3TestBase() { } @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2022) //TODO: failed in 2024. Not sure why yet.... fun generatePresignedUrlWithResponseHeaderOverrides(testInfo: TestInfo) { val (bucketName, _) = givenBucketAndObjectV1(testInfo, UPLOAD_FILE_NAME) val presignedUrlRequest = GeneratePresignedUrlRequest(bucketName, UPLOAD_FILE_NAME).apply { @@ -456,6 +456,7 @@ internal class GetPutDeleteObjectV1IT : S3TestBase() { this.expires = "expires" } ) + this.method = HttpMethod.GET } val resourceUrl = s3Client.generatePresignedUrl(presignedUrlRequest) HttpClients.createDefault().use { From d4e31d970d2492bbfe92ac8bcf134500a9ab19ef Mon Sep 17 00:00:00 2001 From: Arne Franken Date: Sat, 25 May 2024 13:01:05 +0200 Subject: [PATCH 03/14] Verify ErrorResponsesV2IT Every test a success. --- .../testing/s3mock/its/ErrorResponsesV2IT.kt | 24 +++++++++---------- 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/ErrorResponsesV2IT.kt b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/ErrorResponsesV2IT.kt index 2d597454a..39c44f99d 100644 --- a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/ErrorResponsesV2IT.kt +++ b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/ErrorResponsesV2IT.kt @@ -42,7 +42,7 @@ internal class ErrorResponsesV2IT : S3TestBase() { private val s3ClientV2: S3Client = createS3ClientV2() @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun getObject_noSuchKey(testInfo: TestInfo) { val bucketName = givenBucketV2(testInfo) val req = GetObjectRequest.builder().bucket(bucketName).key(NON_EXISTING_KEY).build() @@ -53,7 +53,7 @@ internal class ErrorResponsesV2IT : S3TestBase() { } @Test - @S3VerifiedTodo + @S3VerifiedSuccess(year = 2024) fun getObject_noSuchKey_startingSlash(testInfo: TestInfo) { val bucketName = givenBucketV2(testInfo) val req = GetObjectRequest.builder().bucket(bucketName).key("/$NON_EXISTING_KEY").build() @@ -64,7 +64,7 @@ internal class ErrorResponsesV2IT : S3TestBase() { } @Test - @S3VerifiedTodo + @S3VerifiedSuccess(year = 2024) fun putObject_noSuchBucket() { val uploadFile = File(UPLOAD_FILE_NAME) @@ -83,7 +83,7 @@ internal class ErrorResponsesV2IT : S3TestBase() { } @Test - @S3VerifiedTodo + @S3VerifiedSuccess(year = 2024) fun copyObjectToNonExistingDestination_noSuchBucket(testInfo: TestInfo) { val sourceKey = UPLOAD_FILE_NAME val (bucketName, _) = givenBucketAndObjectV2(testInfo, UPLOAD_FILE_NAME) @@ -104,7 +104,7 @@ internal class ErrorResponsesV2IT : S3TestBase() { } @Test - @S3VerifiedTodo + @S3VerifiedSuccess(year = 2024) fun deleteObject_noSuchBucket() { assertThatThrownBy { s3ClientV2.deleteObject( @@ -120,7 +120,7 @@ internal class ErrorResponsesV2IT : S3TestBase() { } @Test - @S3VerifiedTodo + @S3VerifiedSuccess(year = 2024) fun deleteObject_nonExistent_OK(testInfo: TestInfo) { val bucketName = givenBucketV2(testInfo) @@ -134,7 +134,7 @@ internal class ErrorResponsesV2IT : S3TestBase() { } @Test - @S3VerifiedTodo + @S3VerifiedSuccess(year = 2024) fun deleteObjects_noSuchBucket() { assertThatThrownBy { s3ClientV2.deleteObjects( @@ -157,7 +157,7 @@ internal class ErrorResponsesV2IT : S3TestBase() { } @Test - @S3VerifiedTodo + @S3VerifiedSuccess(year = 2024) fun deleteBucket_noSuchBucket() { assertThatThrownBy { s3ClientV2.deleteBucket( @@ -172,7 +172,7 @@ internal class ErrorResponsesV2IT : S3TestBase() { } @Test - @S3VerifiedTodo + @S3VerifiedSuccess(year = 2024) fun multipartUploads_noSuchBucket() { assertThatThrownBy { s3ClientV2.createMultipartUpload( @@ -188,7 +188,7 @@ internal class ErrorResponsesV2IT : S3TestBase() { } @Test - @S3VerifiedTodo + @S3VerifiedSuccess(year = 2024) fun listMultipartUploads_noSuchBucket() { assertThatThrownBy { s3ClientV2.listMultipartUploads( @@ -203,7 +203,7 @@ internal class ErrorResponsesV2IT : S3TestBase() { } @Test - @S3VerifiedTodo + @S3VerifiedSuccess(year = 2024) fun abortMultipartUpload_noSuchBucket() { assertThatThrownBy { s3ClientV2.abortMultipartUpload( @@ -220,7 +220,7 @@ internal class ErrorResponsesV2IT : S3TestBase() { } @Test - @S3VerifiedTodo + @S3VerifiedSuccess(year = 2024) fun uploadMultipart_invalidPartNumber(testInfo: TestInfo) { val bucketName = givenBucketV1(testInfo) val uploadFile = File(UPLOAD_FILE_NAME) From 398bf5d5c08dd5034a327eba532283fd64f8a67d Mon Sep 17 00:00:00 2001 From: Arne Franken Date: Sat, 25 May 2024 13:01:20 +0200 Subject: [PATCH 04/14] Verify ErrorResponsesV1IT Every test a success. --- .../testing/s3mock/its/ErrorResponsesV1IT.kt | 44 +++++++++---------- 1 file changed, 22 insertions(+), 22 deletions(-) diff --git a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/ErrorResponsesV1IT.kt b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/ErrorResponsesV1IT.kt index c389cd27d..3cff2fd53 100644 --- a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/ErrorResponsesV1IT.kt +++ b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/ErrorResponsesV1IT.kt @@ -47,7 +47,7 @@ internal class ErrorResponsesV1IT : S3TestBase() { private val transferManagerV1: TransferManager = createTransferManagerV1() @Test - @S3VerifiedTodo + @S3VerifiedSuccess(year = 2024) fun getObject_noSuchKey(testInfo: TestInfo) { val bucketName = givenBucketV1(testInfo) val getObjectRequest = GetObjectRequest(bucketName, NON_EXISTING_KEY) @@ -57,7 +57,7 @@ internal class ErrorResponsesV1IT : S3TestBase() { } @Test - @S3VerifiedTodo + @S3VerifiedSuccess(year = 2024) fun getObject_noSuchKey_startingSlash(testInfo: TestInfo) { val bucketName = givenBucketV1(testInfo) val getObjectRequest = GetObjectRequest(bucketName, "/$NON_EXISTING_KEY") @@ -67,7 +67,7 @@ internal class ErrorResponsesV1IT : S3TestBase() { } @Test - @S3VerifiedTodo + @S3VerifiedSuccess(year = 2024) fun putObject_noSuchBucket() { val uploadFile = File(UPLOAD_FILE_NAME) assertThatThrownBy { @@ -84,7 +84,7 @@ internal class ErrorResponsesV1IT : S3TestBase() { } @Test - @S3VerifiedTodo + @S3VerifiedSuccess(year = 2024) fun putObjectEncrypted_noSuchBucket() { val uploadFile = File(UPLOAD_FILE_NAME) PutObjectRequest(randomName, UPLOAD_FILE_NAME, uploadFile).apply { @@ -104,7 +104,7 @@ internal class ErrorResponsesV1IT : S3TestBase() { } @Test - @S3VerifiedTodo + @S3VerifiedSuccess(year = 2024) fun copyObjectToNonExistingDestination_noSuchBucket(testInfo: TestInfo) { val sourceKey = UPLOAD_FILE_NAME val (bucketName, _) = givenBucketAndObjectV1(testInfo, UPLOAD_FILE_NAME) @@ -117,7 +117,7 @@ internal class ErrorResponsesV1IT : S3TestBase() { } @Test - @S3VerifiedTodo + @S3VerifiedSuccess(year = 2024) fun copyObjectEncryptedToNonExistingDestination_noSuchBucket(testInfo: TestInfo) { val sourceKey = UPLOAD_FILE_NAME val (bucketName, _) = givenBucketAndObjectV1(testInfo, sourceKey) @@ -132,7 +132,7 @@ internal class ErrorResponsesV1IT : S3TestBase() { } @Test - @S3VerifiedTodo + @S3VerifiedSuccess(year = 2024) fun getObjectMetadata_noSuchBucket() { assertThatThrownBy { s3Client.getObjectMetadata( @@ -145,7 +145,7 @@ internal class ErrorResponsesV1IT : S3TestBase() { } @Test - @S3VerifiedTodo + @S3VerifiedSuccess(year = 2024) fun deleteFrom_noSuchBucket() { assertThatThrownBy { s3Client.deleteObject( @@ -158,14 +158,14 @@ internal class ErrorResponsesV1IT : S3TestBase() { } @Test - @S3VerifiedTodo + @S3VerifiedSuccess(year = 2024) fun deleteObject_nonExistent_OK(testInfo: TestInfo) { val bucketName = givenBucketV1(testInfo) s3Client.deleteObject(bucketName, randomName) } @Test - @S3VerifiedTodo + @S3VerifiedSuccess(year = 2024) fun batchDeleteObjects_noSuchBucket() { val multiObjectDeleteRequest = DeleteObjectsRequest(randomName).apply { this.keys = listOf(KeyVersion("1_$UPLOAD_FILE_NAME")) @@ -176,7 +176,7 @@ internal class ErrorResponsesV1IT : S3TestBase() { } @Test - @S3VerifiedTodo + @S3VerifiedSuccess(year = 2024) fun deleteBucket_noSuchBucket() { assertThatThrownBy { s3Client.deleteBucket(randomName) } .isInstanceOf(AmazonS3Exception::class.java) @@ -184,7 +184,7 @@ internal class ErrorResponsesV1IT : S3TestBase() { } @Test - @S3VerifiedTodo + @S3VerifiedSuccess(year = 2024) fun listObjects_noSuchBucket() { assertThatThrownBy { s3Client.listObjects( @@ -197,7 +197,7 @@ internal class ErrorResponsesV1IT : S3TestBase() { } @Test - @S3VerifiedTodo + @S3VerifiedSuccess(year = 2024) fun uploadParallel_noSuchBucket() { val uploadFile = File(UPLOAD_FILE_NAME) assertThatThrownBy { @@ -211,7 +211,7 @@ internal class ErrorResponsesV1IT : S3TestBase() { } @Test - @S3VerifiedTodo + @S3VerifiedSuccess(year = 2024) fun multipartUploads_noSuchBucket() { assertThatThrownBy { s3Client.initiateMultipartUpload( @@ -223,7 +223,7 @@ internal class ErrorResponsesV1IT : S3TestBase() { } @Test - @S3VerifiedTodo + @S3VerifiedSuccess(year = 2024) fun listMultipartUploads_noSuchBucket() { assertThatThrownBy { s3Client.listMultipartUploads( @@ -235,7 +235,7 @@ internal class ErrorResponsesV1IT : S3TestBase() { } @Test - @S3VerifiedTodo + @S3VerifiedSuccess(year = 2024) fun abortMultipartUpload_noSuchBucket() { assertThatThrownBy { s3Client.abortMultipartUpload( @@ -251,7 +251,7 @@ internal class ErrorResponsesV1IT : S3TestBase() { } @Test - @S3VerifiedTodo + @S3VerifiedSuccess(year = 2024) fun uploadMultipart_invalidPartNumber(testInfo: TestInfo) { val bucketName = givenBucketV1(testInfo) val uploadFile = File(UPLOAD_FILE_NAME) @@ -281,7 +281,7 @@ internal class ErrorResponsesV1IT : S3TestBase() { } @Test - @S3VerifiedTodo + @S3VerifiedSuccess(year = 2024) fun completeMultipartUploadWithNonExistingPartNumber(testInfo: TestInfo) { val bucketName = givenBucketV1(testInfo) val uploadFile = File(UPLOAD_FILE_NAME) @@ -317,7 +317,7 @@ internal class ErrorResponsesV1IT : S3TestBase() { } @Test - @S3VerifiedTodo + @S3VerifiedSuccess(year = 2024) @Throws(Exception::class) fun rangeDownloadsFromNonExistingBucket() { val transferManager = createTransferManagerV1() @@ -333,7 +333,7 @@ internal class ErrorResponsesV1IT : S3TestBase() { } @Test - @S3VerifiedTodo + @S3VerifiedSuccess(year = 2024) @Throws(Exception::class) fun rangeDownloadsFromNonExistingObject(testInfo: TestInfo) { val bucketName = givenBucketV1(testInfo) @@ -352,7 +352,7 @@ internal class ErrorResponsesV1IT : S3TestBase() { } @Test - @S3VerifiedTodo + @S3VerifiedSuccess(year = 2024) @Throws(InterruptedException::class) fun multipartCopyToNonExistingBucket(testInfo: TestInfo) { val sourceBucket = givenBucketV1(testInfo) @@ -385,7 +385,7 @@ internal class ErrorResponsesV1IT : S3TestBase() { } @Test - @S3VerifiedTodo + @S3VerifiedSuccess(year = 2024) @Throws(InterruptedException::class) fun multipartCopyNonExistingObject(testInfo: TestInfo) { val sourceBucket = givenBucketV1(testInfo) From 1d75b78acfc596aa1af5de40ae6adb1a08162b2c Mon Sep 17 00:00:00 2001 From: Arne Franken Date: Sat, 25 May 2024 13:01:47 +0200 Subject: [PATCH 05/14] Verify CrtAsyncV2IT Every test a success. --- .../kotlin/com/adobe/testing/s3mock/its/CrtAsyncV2IT.kt | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/CrtAsyncV2IT.kt b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/CrtAsyncV2IT.kt index f74c556df..0a23fb43e 100644 --- a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/CrtAsyncV2IT.kt +++ b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/CrtAsyncV2IT.kt @@ -43,7 +43,7 @@ internal class CrtAsyncV2IT : S3TestBase() { private val autoS3CrtAsyncClientV2: S3AsyncClient = createAutoS3CrtAsyncClientV2() @Test - @S3VerifiedTodo + @S3VerifiedSuccess(year = 2024) fun testPutObject_etagCreation(testInfo: TestInfo) { val uploadFile = File(UPLOAD_FILE_NAME) val uploadFileIs: InputStream = FileInputStream(uploadFile) @@ -72,7 +72,7 @@ internal class CrtAsyncV2IT : S3TestBase() { } @Test - @S3VerifiedTodo + @S3VerifiedSuccess(year = 2024) fun testPutGetObject_successWithMatchingEtag(testInfo: TestInfo) { val uploadFile = File(UPLOAD_FILE_NAME) val bucketName = randomName @@ -103,7 +103,7 @@ internal class CrtAsyncV2IT : S3TestBase() { } @Test - @S3VerifiedTodo + @S3VerifiedSuccess(year = 2024) fun testMultipartUpload(testInfo: TestInfo) { val bucketName = givenBucketV2(testInfo) val uploadFile = File(UPLOAD_FILE_NAME) From ed371364ea943653e6a9283e0a990a6740273f66 Mon Sep 17 00:00:00 2001 From: Arne Franken Date: Sat, 25 May 2024 13:02:40 +0200 Subject: [PATCH 06/14] Verify CorsV2IT Every test a failure. Will need to find out how to manually set credentials for plain HTTP requests against S3... --- .../src/test/kotlin/com/adobe/testing/s3mock/its/CorsV2IT.kt | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/CorsV2IT.kt b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/CorsV2IT.kt index 9d4042ddf..c804d44f8 100644 --- a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/CorsV2IT.kt +++ b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/CorsV2IT.kt @@ -39,7 +39,8 @@ internal class CorsV2IT : S3TestBase() { private val httpClient: CloseableHttpClient = HttpClients.createDefault() @Test - @S3VerifiedTodo + @S3VerifiedFailure(year = 2024, + reason = "No credentials sent in plain HTTP request") fun testPutObject_cors(testInfo: TestInfo) { val bucketName = givenBucketV2(testInfo) val httpclient = HttpClientBuilder.create().build() @@ -72,6 +73,8 @@ internal class CorsV2IT : S3TestBase() { } @Test + @S3VerifiedFailure(year = 2024, + reason = "No credentials sent in plain HTTP request") fun testGetBucket_cors(testInfo: TestInfo) { val targetBucket = givenBucketV2(testInfo) val httpOptions = HttpOptions("/$targetBucket").apply { From a97f2f3cb2912ff23bd923779f087e692ea01022 Mon Sep 17 00:00:00 2001 From: Arne Franken Date: Sat, 25 May 2024 17:30:56 +0200 Subject: [PATCH 07/14] Verify Copy / Object ITs Handling for checksums and storageClass was surprising. For some APIs, S3 does not return checksums unless specified in the request. This is reflected in the tests, but not in the S3Mock. The StorageClass "STANDARD" is the default storageClass and is not returned by many APIs when requesting asset metadata. --- integration-tests/pom.xml | 5 ++ .../testing/s3mock/its/CopyObjectV1IT.kt | 41 ++++++----- .../testing/s3mock/its/CopyObjectV2IT.kt | 63 +++++++++++++--- .../s3mock/its/GetPutDeleteObjectV2IT.kt | 73 +++++++++++-------- .../testing/s3mock/its/MultiPartUploadV2IT.kt | 3 +- .../adobe/testing/s3mock/its/S3TestBase.kt | 58 +++++++++++---- pom.xml | 5 ++ .../testing/s3mock/ObjectController.java | 49 +++++++------ .../com/adobe/testing/s3mock/S3Exception.java | 30 ++++++-- .../s3mock/dto/GetObjectAttributesOutput.java | 1 - .../testing/s3mock/service/ObjectService.java | 19 ++--- .../testing/s3mock/service/ServiceBase.java | 16 +++- .../testing/s3mock/store/ObjectStore.java | 34 +++++++-- .../s3mock/store/S3ObjectMetadata.java | 2 +- .../adobe/testing/s3mock/util/HeaderUtil.java | 15 ++++ .../testing/s3mock/ObjectControllerTest.kt | 2 +- .../testing/s3mock/store/ObjectStoreTest.kt | 7 +- ...OutputTest_testSerialization_multiPart.xml | 2 +- ...tesOutputTest_testSerialization_object.xml | 2 +- 19 files changed, 296 insertions(+), 131 deletions(-) diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index 72bbaf3d0..abfabb26a 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -47,6 +47,11 @@ + + com.amazonaws + aws-java-sdk-core + test + com.amazonaws aws-java-sdk-s3 diff --git a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/CopyObjectV1IT.kt b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/CopyObjectV1IT.kt index d778959fc..d056a259c 100644 --- a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/CopyObjectV1IT.kt +++ b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/CopyObjectV1IT.kt @@ -47,7 +47,7 @@ internal class CopyObjectV1IT : S3TestBase() { * compares checksums of original and copied object. */ @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun shouldCopyObject(testInfo: TestInfo) { val sourceKey = UPLOAD_FILE_NAME val (bucketName, putObjectResult) = givenBucketAndObjectV1(testInfo, sourceKey) @@ -65,7 +65,7 @@ internal class CopyObjectV1IT : S3TestBase() { } @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun testCopyObject_successMatch(testInfo: TestInfo) { val sourceKey = UPLOAD_FILE_NAME val (bucketName, putObjectResult) = givenBucketAndObjectV1(testInfo, sourceKey) @@ -85,7 +85,7 @@ internal class CopyObjectV1IT : S3TestBase() { } @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun testCopyObject_successNoneMatch(testInfo: TestInfo) { val sourceKey = UPLOAD_FILE_NAME val (bucketName, putObjectResult) = givenBucketAndObjectV1(testInfo, sourceKey) @@ -105,7 +105,7 @@ internal class CopyObjectV1IT : S3TestBase() { } @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun testCopyObject_failureMatch(testInfo: TestInfo) { val sourceKey = UPLOAD_FILE_NAME val (bucketName, _) = givenBucketAndObjectV1(testInfo, sourceKey) @@ -127,7 +127,7 @@ internal class CopyObjectV1IT : S3TestBase() { } @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun testCopyObject_failureNoneMatch(testInfo: TestInfo) { val sourceKey = UPLOAD_FILE_NAME val (bucketName, putObjectResult) = givenBucketAndObjectV1(testInfo, sourceKey) @@ -153,7 +153,7 @@ internal class CopyObjectV1IT : S3TestBase() { * Downloads the object; compares checksums of original and copied object. */ @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun shouldCopyObjectToSameKey(testInfo: TestInfo) { val bucketName = givenBucketV1(testInfo) val uploadFile = File(UPLOAD_FILE_NAME) @@ -164,16 +164,19 @@ internal class CopyObjectV1IT : S3TestBase() { val putObjectResult = PutObjectRequest(bucketName, sourceKey, uploadFile).withMetadata(objectMetadata).let { s3Client.putObject(it) } - //TODO: this is actually illegal on S3. when copying to the same key like this, S3 will throw: - // This copy request is illegal because it is trying to copy an object to itself without - // changing the object's metadata, storage class, website redirect location or encryption attributes. - CopyObjectRequest(bucketName, sourceKey, bucketName, sourceKey).also { + + CopyObjectRequest(bucketName, sourceKey, bucketName, sourceKey).apply { + this.newObjectMetadata = ObjectMetadata().apply { + this.userMetadata = mapOf("test-key1" to "test-value1") + } + }.also { s3Client.copyObject(it) } s3Client.getObject(bucketName, sourceKey).use { val copiedObjectMetadata = it.objectMetadata - assertThat(copiedObjectMetadata.userMetadata["test-key"]).isEqualTo("test-value") + assertThat(copiedObjectMetadata.userMetadata["test-key"]).isNull() + assertThat(copiedObjectMetadata.userMetadata["test-key1"]).isEqualTo("test-value1") val objectContent = it.objectContent val copiedDigest = DigestUtil.hexDigest(objectContent) @@ -186,7 +189,7 @@ internal class CopyObjectV1IT : S3TestBase() { * Downloads the object; compares checksums of original and copied object. */ @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun shouldCopyObjectWithReplaceToSameKey(testInfo: TestInfo) { val bucketName = givenBucketV1(testInfo) val uploadFile = File(UPLOAD_FILE_NAME) @@ -231,7 +234,7 @@ internal class CopyObjectV1IT : S3TestBase() { * the new user metadata specified during copy request. */ @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun shouldCopyObjectWithNewUserMetadata(testInfo: TestInfo) { val sourceKey = UPLOAD_FILE_NAME val (bucketName, putObjectResult) = givenBucketAndObjectV1(testInfo, sourceKey) @@ -261,7 +264,7 @@ internal class CopyObjectV1IT : S3TestBase() { * the source object user metadata; */ @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun shouldCopyObjectWithSourceUserMetadata(testInfo: TestInfo) { val bucketName = givenBucketV1(testInfo) val uploadFile = File(UPLOAD_FILE_NAME) @@ -292,7 +295,7 @@ internal class CopyObjectV1IT : S3TestBase() { * @see .shouldCopyObject */ @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun shouldCopyObjectToKeyNeedingEscaping(testInfo: TestInfo) { val bucketName = givenBucketV1(testInfo) val uploadFile = File(UPLOAD_FILE_NAME) @@ -316,7 +319,7 @@ internal class CopyObjectV1IT : S3TestBase() { * @see .shouldCopyObject */ @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun shouldCopyObjectFromKeyNeedingEscaping(testInfo: TestInfo) { val bucketName = givenBucketV1(testInfo) val uploadFile = File(UPLOAD_FILE_NAME) @@ -368,7 +371,7 @@ internal class CopyObjectV1IT : S3TestBase() { * Tests that an object won't be copied with wrong encryption Key. */ @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun shouldNotObjectCopyWithWrongEncryptionKey(testInfo: TestInfo) { val sourceKey = UPLOAD_FILE_NAME val (bucketName, _) = givenBucketAndObjectV1(testInfo, sourceKey) @@ -387,7 +390,7 @@ internal class CopyObjectV1IT : S3TestBase() { * Tests that a copy request for a non-existing object throws the correct error. */ @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun shouldThrowNoSuchKeyOnCopyForNonExistingKey(testInfo: TestInfo) { val bucketName = givenBucketV1(testInfo) val sourceKey = randomName @@ -401,7 +404,7 @@ internal class CopyObjectV1IT : S3TestBase() { } @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun multipartCopy() { //content larger than default part threshold of 5MiB val contentLen = 10 * _1MB diff --git a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/CopyObjectV2IT.kt b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/CopyObjectV2IT.kt index eedf00e96..fd66cfd3b 100644 --- a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/CopyObjectV2IT.kt +++ b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/CopyObjectV2IT.kt @@ -45,7 +45,7 @@ internal class CopyObjectV2IT : S3TestBase() { private val s3ClientV2: S3Client = createS3ClientV2() @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun testCopyObject(testInfo: TestInfo) { val sourceKey = UPLOAD_FILE_NAME val (bucketName, putObjectResult) = givenBucketAndObjectV2(testInfo, sourceKey) @@ -72,7 +72,7 @@ internal class CopyObjectV2IT : S3TestBase() { } @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun testCopyObject_successMatch(testInfo: TestInfo) { val sourceKey = UPLOAD_FILE_NAME val (bucketName, putObjectResult) = givenBucketAndObjectV2(testInfo, sourceKey) @@ -101,7 +101,7 @@ internal class CopyObjectV2IT : S3TestBase() { } @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun testCopyObject_successNoneMatch(testInfo: TestInfo) { val sourceKey = UPLOAD_FILE_NAME val (bucketName, putObjectResult) = givenBucketAndObjectV2(testInfo, sourceKey) @@ -129,7 +129,7 @@ internal class CopyObjectV2IT : S3TestBase() { } @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun testCopyObject_failureMatch(testInfo: TestInfo) { val sourceKey = UPLOAD_FILE_NAME val (bucketName, _) = givenBucketAndObjectV2(testInfo, sourceKey) @@ -153,7 +153,7 @@ internal class CopyObjectV2IT : S3TestBase() { } @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun testCopyObject_failureNoneMatch(testInfo: TestInfo) { val sourceKey = UPLOAD_FILE_NAME val (bucketName, putObjectResult) = givenBucketAndObjectV2(testInfo, sourceKey) @@ -177,7 +177,7 @@ internal class CopyObjectV2IT : S3TestBase() { } @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun testCopyObjectToSameBucketAndKey(testInfo: TestInfo) { val bucketName = givenBucketV2(testInfo) val uploadFile = File(UPLOAD_FILE_NAME) @@ -238,7 +238,48 @@ internal class CopyObjectV2IT : S3TestBase() { } @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) + fun testCopyObjectToSameBucketAndKey_throws(testInfo: TestInfo) { + val bucketName = givenBucketV2(testInfo) + val uploadFile = File(UPLOAD_FILE_NAME) + val sourceKey = UPLOAD_FILE_NAME + s3ClientV2.putObject(PutObjectRequest + .builder() + .bucket(bucketName) + .key(sourceKey) + .metadata(mapOf("test-key" to "test-value")) + .build(), + RequestBody.fromFile(uploadFile) + ) + val sourceLastModified = s3ClientV2.headObject( + HeadObjectRequest + .builder() + .bucket(bucketName) + .key(sourceKey) + .build() + ).lastModified() + + await("wait until source object is 5 seconds old").until { + sourceLastModified.plusSeconds(5).isBefore(Instant.now()) + } + + assertThatThrownBy { + s3ClientV2.copyObject( + CopyObjectRequest + .builder() + .sourceBucket(bucketName) + .sourceKey(sourceKey) + .destinationBucket(bucketName) + .destinationKey(sourceKey) + .build() + ) + }.isInstanceOf(S3Exception::class.java) + .hasMessageContaining("Service: S3, Status Code: 400") + .hasMessageContaining("This copy request is illegal because it is trying to copy an object to itself without changing the object's metadata, storage class, website redirect location or encryption attributes.") + } + + @Test + @S3VerifiedSuccess(year = 2024) fun testCopyObjectWithNewMetadata(testInfo: TestInfo) { val sourceKey = UPLOAD_FILE_NAME val (bucketName, putObjectResult) = givenBucketAndObjectV2(testInfo, sourceKey) @@ -271,7 +312,7 @@ internal class CopyObjectV2IT : S3TestBase() { } @Test - @S3VerifiedTodo + @S3VerifiedSuccess(year = 2024) fun testCopyObject_storageClass(testInfo: TestInfo) { val sourceKey = UPLOAD_FILE_NAME val uploadFile = File(UPLOAD_FILE_NAME) @@ -281,7 +322,7 @@ internal class CopyObjectV2IT : S3TestBase() { PutObjectRequest.builder() .bucket(bucketName) .key(sourceKey) - .storageClass(StorageClass.DEEP_ARCHIVE) + .storageClass(StorageClass.REDUCED_REDUNDANCY) .build(), RequestBody.fromFile(uploadFile) ) @@ -295,6 +336,8 @@ internal class CopyObjectV2IT : S3TestBase() { .sourceKey(sourceKey) .destinationBucket(destinationBucketName) .destinationKey(destinationKey) + //must set storage class other than "STANDARD" to it gets applied. + .storageClass(StorageClass.STANDARD_IA) .build()) s3ClientV2.getObject(GetObjectRequest @@ -303,7 +346,7 @@ internal class CopyObjectV2IT : S3TestBase() { .key(destinationKey) .build() ).use { - assertThat(it.response().storageClass()).isEqualTo(StorageClass.DEEP_ARCHIVE) + assertThat(it.response().storageClass()).isEqualTo(StorageClass.STANDARD_IA) } } } diff --git a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/GetPutDeleteObjectV2IT.kt b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/GetPutDeleteObjectV2IT.kt index ede234133..4db31a2f2 100644 --- a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/GetPutDeleteObjectV2IT.kt +++ b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/GetPutDeleteObjectV2IT.kt @@ -30,6 +30,7 @@ import software.amazon.awssdk.core.sync.RequestBody import software.amazon.awssdk.services.s3.S3AsyncClient import software.amazon.awssdk.services.s3.S3Client import software.amazon.awssdk.services.s3.model.ChecksumAlgorithm +import software.amazon.awssdk.services.s3.model.ChecksumMode import software.amazon.awssdk.services.s3.model.GetObjectAttributesRequest import software.amazon.awssdk.services.s3.model.GetObjectRequest import software.amazon.awssdk.services.s3.model.HeadObjectRequest @@ -62,6 +63,7 @@ internal class GetPutDeleteObjectV2IT : S3TestBase() { * * https://docs.aws.amazon.com/AmazonS3/latest/userguide/object-keys.html */ + @S3VerifiedSuccess(year = 2024) @ParameterizedTest @MethodSource(value = ["charsSafe", "charsSpecial", "charsToAvoid"]) fun testPutHeadGetObject_keyNames_safe(key: String, testInfo: TestInfo) { @@ -81,9 +83,7 @@ internal class GetPutDeleteObjectV2IT : S3TestBase() { .bucket(bucketName) .key(key) .build() - ).also { - assertThat(it.storageClass()).isEqualTo(StorageClass.STANDARD) - } + ) s3ClientV2.getObject( GetObjectRequest.builder() @@ -95,7 +95,7 @@ internal class GetPutDeleteObjectV2IT : S3TestBase() { } } - @S3VerifiedTodo + @S3VerifiedSuccess(year = 2024) @ParameterizedTest @MethodSource(value = ["storageClasses"]) fun testPutObject_storageClass(storageClass: StorageClass, testInfo: TestInfo) { @@ -129,11 +129,17 @@ internal class GetPutDeleteObjectV2IT : S3TestBase() { .build() ).use { assertThat(it.response().eTag()).isEqualTo(eTag) + if (storageClass == StorageClass.STANDARD) { + //storageClass STANDARD is never returned from S3 APIs... + assertThat(it.response().storageClass()).isNull() + } else { + assertThat(it.response().storageClass()).isEqualTo(storageClass) + } } } + @S3VerifiedSuccess(year = 2024) @ParameterizedTest - @S3VerifiedTodo @MethodSource(value = ["testFileNames"]) fun testPutObject_etagCreation_sync(testFileName: String, testInfo: TestInfo) { testEtagCreation(testFileName, s3ClientV2, testInfo) @@ -161,8 +167,8 @@ internal class GetPutDeleteObjectV2IT : S3TestBase() { } } + @S3VerifiedSuccess(year = 2024) @ParameterizedTest - @S3VerifiedTodo @MethodSource(value = ["testFileNames"]) fun testPutObject_etagCreation_async(testFileName: String) { testEtagCreation(testFileName, s3AsyncClientV2) @@ -195,7 +201,7 @@ internal class GetPutDeleteObjectV2IT : S3TestBase() { } @Test - @S3VerifiedTodo + @S3VerifiedSuccess(year = 2024) fun testPutObject_getObjectAttributes(testInfo: TestInfo) { val uploadFile = File(UPLOAD_FILE_NAME) val expectedChecksum = DigestUtil.checksumFor(uploadFile.toPath(), Algorithm.SHA1) @@ -220,14 +226,16 @@ internal class GetPutDeleteObjectV2IT : S3TestBase() { ObjectAttributes.CHECKSUM) .build() ).also { - assertThat(it.eTag()).isEqualTo(eTag) + // + assertThat(it.eTag()).isEqualTo(eTag.trim('"')) + //default storageClass is STANDARD, which is never returned from APIs assertThat(it.storageClass()).isEqualTo(StorageClass.STANDARD) assertThat(it.objectSize()).isEqualTo(File(UPLOAD_FILE_NAME).length()) assertThat(it.checksum().checksumSHA1()).isEqualTo(expectedChecksum) } } - @S3VerifiedTodo + @S3VerifiedSuccess(year = 2024) @ParameterizedTest @MethodSource(value = ["checksumAlgorithms"]) fun testPutObject_checksumAlgorithm_http(checksumAlgorithm: ChecksumAlgorithm) { @@ -240,7 +248,7 @@ internal class GetPutDeleteObjectV2IT : S3TestBase() { } } - @S3VerifiedTodo + @S3VerifiedSuccess(year = 2024) @ParameterizedTest @MethodSource(value = ["checksumAlgorithms"]) fun testPutObject_checksumAlgorithm_https(checksumAlgorithm: ChecksumAlgorithm) { @@ -276,6 +284,7 @@ internal class GetPutDeleteObjectV2IT : S3TestBase() { GetObjectRequest.builder() .bucket(bucketName) .key(testFileName) + .checksumMode(ChecksumMode.ENABLED) .build() ).use { val getChecksum = it.response().checksum(checksumAlgorithm) @@ -287,6 +296,7 @@ internal class GetPutDeleteObjectV2IT : S3TestBase() { HeadObjectRequest.builder() .bucket(bucketName) .key(testFileName) + .checksumMode(ChecksumMode.ENABLED) .build() ).also { val headChecksum = it.checksum(checksumAlgorithm) @@ -295,7 +305,7 @@ internal class GetPutDeleteObjectV2IT : S3TestBase() { } } - @S3VerifiedTodo + @S3VerifiedSuccess(year = 2024) @ParameterizedTest @MethodSource(value = ["checksumAlgorithms"]) fun testPutObject_checksumAlgorithm_async_http(checksumAlgorithm: ChecksumAlgorithm) { @@ -315,7 +325,7 @@ internal class GetPutDeleteObjectV2IT : S3TestBase() { testChecksumAlgorithm_async(TEST_IMAGE_LARGE, checksumAlgorithm, autoS3CrtAsyncClientV2Http) } - @S3VerifiedTodo + @S3VerifiedSuccess(year = 2024) @ParameterizedTest @MethodSource(value = ["checksumAlgorithms"]) fun testPutObject_checksumAlgorithm_async_https(checksumAlgorithm: ChecksumAlgorithm) { @@ -361,6 +371,7 @@ internal class GetPutDeleteObjectV2IT : S3TestBase() { GetObjectRequest.builder() .bucket(bucketName) .key(testFileName) + .checksumMode(ChecksumMode.ENABLED) .build() ).use { val getChecksum = it.response().checksum(checksumAlgorithm) @@ -372,6 +383,7 @@ internal class GetPutDeleteObjectV2IT : S3TestBase() { HeadObjectRequest.builder() .bucket(bucketName) .key(testFileName) + .checksumMode(ChecksumMode.ENABLED) .build() ).also { val headChecksum = it.checksum(checksumAlgorithm) @@ -390,7 +402,7 @@ internal class GetPutDeleteObjectV2IT : S3TestBase() { else -> error("Unknown checksum algorithm") } - @S3VerifiedTodo + @S3VerifiedSuccess(year = 2024) @ParameterizedTest @MethodSource(value = ["checksumAlgorithms"]) fun testPutObject_checksum(checksumAlgorithm: ChecksumAlgorithm, testInfo: TestInfo) { @@ -406,7 +418,7 @@ internal class GetPutDeleteObjectV2IT : S3TestBase() { .build(), RequestBody.fromFile(uploadFile) ).also { - val putChecksum = it.checksum(checksumAlgorithm) + val putChecksum = it.checksum(checksumAlgorithm)!! assertThat(putChecksum).isNotBlank assertThat(putChecksum).isEqualTo(expectedChecksum) } @@ -415,6 +427,7 @@ internal class GetPutDeleteObjectV2IT : S3TestBase() { GetObjectRequest.builder() .bucket(bucketName) .key(UPLOAD_FILE_NAME) + .checksumMode(ChecksumMode.ENABLED) .build() ).use { val getChecksum = it.response().checksum(checksumAlgorithm) @@ -426,6 +439,7 @@ internal class GetPutDeleteObjectV2IT : S3TestBase() { HeadObjectRequest.builder() .bucket(bucketName) .key(UPLOAD_FILE_NAME) + .checksumMode(ChecksumMode.ENABLED) .build() ).also { val headChecksum = it.checksum(checksumAlgorithm) @@ -435,7 +449,7 @@ internal class GetPutDeleteObjectV2IT : S3TestBase() { } @Test - @S3VerifiedTodo + @S3VerifiedSuccess(year = 2024) fun testPutObject_wrongChecksum(testInfo: TestInfo) { val uploadFile = File(UPLOAD_FILE_NAME) val expectedChecksum = "wrongChecksum" @@ -453,7 +467,8 @@ internal class GetPutDeleteObjectV2IT : S3TestBase() { ) } .isInstanceOf(S3Exception::class.java) - .hasMessageContaining("The Content-MD5 or checksum value that you specified did not match what the server received.") + .hasMessageContaining("Service: S3, Status Code: 400") + .hasMessageContaining("Value for x-amz-checksum-sha1 header is invalid.") } /** @@ -461,7 +476,7 @@ internal class GetPutDeleteObjectV2IT : S3TestBase() { * https://docs.aws.amazon.com/AmazonS3/latest/userguide/object-keys.html */ @Test - @S3VerifiedTodo + @S3VerifiedSuccess(year = 2024) fun testPutObject_safeCharacters(testInfo: TestInfo) { val uploadFile = File(UPLOAD_FILE_NAME) val bucketName = givenBucketV2(testInfo) @@ -500,7 +515,7 @@ internal class GetPutDeleteObjectV2IT : S3TestBase() { * https://docs.aws.amazon.com/AmazonS3/latest/userguide/object-keys.html */ @Test - @S3VerifiedTodo + @S3VerifiedSuccess(year = 2024) fun testPutObject_specialHandlingCharacters(testInfo: TestInfo) { val uploadFile = File(UPLOAD_FILE_NAME) val bucketName = givenBucketV2(testInfo) @@ -535,7 +550,7 @@ internal class GetPutDeleteObjectV2IT : S3TestBase() { } @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun testPutGetDeleteObject_twoBuckets(testInfo: TestInfo) { val bucket1 = givenRandomBucketV2() val bucket2 = givenRandomBucketV2() @@ -556,7 +571,7 @@ internal class GetPutDeleteObjectV2IT : S3TestBase() { } @Test - @S3VerifiedTodo + @S3VerifiedSuccess(year = 2024) fun testPutGetHeadObject_storeHeaders(testInfo: TestInfo) { val bucket = givenRandomBucketV2() val uploadFile = File(UPLOAD_FILE_NAME) @@ -609,7 +624,7 @@ internal class GetPutDeleteObjectV2IT : S3TestBase() { } @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun testGetObject_successWithMatchingEtag(testInfo: TestInfo) { val uploadFile = File(UPLOAD_FILE_NAME) val matchingEtag = FileInputStream(uploadFile).let { @@ -633,7 +648,7 @@ internal class GetPutDeleteObjectV2IT : S3TestBase() { } @Test - @S3VerifiedTodo + @S3VerifiedSuccess(year = 2024) fun testGetObject_successWithSameLength(testInfo: TestInfo) { val uploadFile = File(UPLOAD_FILE_NAME) val matchingEtag = FileInputStream(uploadFile).let { @@ -653,7 +668,7 @@ internal class GetPutDeleteObjectV2IT : S3TestBase() { } @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun testGetObject_successWithMatchingWildcardEtag(testInfo: TestInfo) { val (bucketName, putObjectResponse) = givenBucketAndObjectV2(testInfo, UPLOAD_FILE_NAME) val eTag = putObjectResponse.eTag() @@ -671,7 +686,7 @@ internal class GetPutDeleteObjectV2IT : S3TestBase() { } @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun testHeadObject_successWithNonMatchEtag(testInfo: TestInfo) { val uploadFile = File(UPLOAD_FILE_NAME) val expectedEtag = FileInputStream(uploadFile).let { @@ -697,7 +712,7 @@ internal class GetPutDeleteObjectV2IT : S3TestBase() { } @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun testHeadObject_failureWithNonMatchWildcardEtag(testInfo: TestInfo) { val uploadFile = File(UPLOAD_FILE_NAME) val expectedEtag = FileInputStream(uploadFile).let { @@ -724,7 +739,7 @@ internal class GetPutDeleteObjectV2IT : S3TestBase() { } @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun testHeadObject_failureWithMatchEtag(testInfo: TestInfo) { val expectedEtag = FileInputStream(File(UPLOAD_FILE_NAME)).let { "\"${DigestUtil.hexDigest(it)}\"" @@ -750,7 +765,7 @@ internal class GetPutDeleteObjectV2IT : S3TestBase() { } @Test - @S3VerifiedTodo + @S3VerifiedSuccess(year = 2024) fun testGetObject_rangeDownloads(testInfo: TestInfo) { val uploadFile = File(UPLOAD_FILE_NAME) val (bucketName, putObjectResponse) = givenBucketAndObjectV2(testInfo, UPLOAD_FILE_NAME) @@ -790,7 +805,7 @@ internal class GetPutDeleteObjectV2IT : S3TestBase() { } @Test - @S3VerifiedTodo + @S3VerifiedSuccess(year = 2024) fun testGetObject_rangeDownloads_finalBytes_prefixOffset(testInfo: TestInfo) { val bucketName = givenBucketV2(testInfo) val key = givenObjectV2WithRandomBytes(bucketName) @@ -809,7 +824,7 @@ internal class GetPutDeleteObjectV2IT : S3TestBase() { } @Test - @S3VerifiedTodo + @S3VerifiedSuccess(year = 2024) fun testGetObject_rangeDownloads_finalBytes_suffixOffset(testInfo: TestInfo) { val bucketName = givenBucketV2(testInfo) val key = givenObjectV2WithRandomBytes(bucketName) diff --git a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/MultiPartUploadV2IT.kt b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/MultiPartUploadV2IT.kt index 6074949ea..8f6c689a1 100644 --- a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/MultiPartUploadV2IT.kt +++ b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/MultiPartUploadV2IT.kt @@ -519,7 +519,8 @@ internal class MultiPartUploadV2IT : S3TestBase() { ) } .isInstanceOf(S3Exception::class.java) - .hasMessageContaining("The Content-MD5 or checksum value that you specified did not match what the server received.") + .hasMessageContaining("Service: S3, Status Code: 400") + .hasMessageContaining("Value for x-amz-checksum-sha1 header is invalid.") } private fun UploadPartRequest.Builder.checksum( diff --git a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/S3TestBase.kt b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/S3TestBase.kt index 06723ac43..9bd34f1c3 100644 --- a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/S3TestBase.kt +++ b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/S3TestBase.kt @@ -56,6 +56,7 @@ import software.amazon.awssdk.services.s3.model.DeleteBucketRequest import software.amazon.awssdk.services.s3.model.DeleteObjectRequest import software.amazon.awssdk.services.s3.model.DeleteObjectResponse import software.amazon.awssdk.services.s3.model.EncodingType +import software.amazon.awssdk.services.s3.model.GetObjectAttributesResponse import software.amazon.awssdk.services.s3.model.GetObjectLockConfigurationRequest import software.amazon.awssdk.services.s3.model.GetObjectRequest import software.amazon.awssdk.services.s3.model.GetObjectResponse @@ -352,9 +353,18 @@ internal abstract class S3TestBase { } private fun deleteBucket(bucket: Bucket) { - _s3ClientV2.deleteBucket(DeleteBucketRequest.builder().bucket(bucket.name()).build()) - val bucketDeleted = _s3ClientV2.waiter() - .waitUntilBucketNotExists(HeadBucketRequest.builder().bucket(bucket.name()).build()) + _s3ClientV2.deleteBucket(DeleteBucketRequest + .builder() + .bucket(bucket.name()) + .build() + ) + val bucketDeleted = _s3ClientV2 + .waiter() + .waitUntilBucketNotExists(HeadBucketRequest + .builder() + .bucket(bucket.name()) + .build() + ) bucketDeleted.matched().exception().get().also { assertThat(it).isNotNull } @@ -387,8 +397,13 @@ internal abstract class S3TestBase { private fun isObjectLockEnabled(bucket: Bucket): Boolean { return try { ObjectLockEnabled.ENABLED == _s3ClientV2.getObjectLockConfiguration( - GetObjectLockConfigurationRequest.builder().bucket(bucket.name()).build() - ).objectLockConfiguration().objectLockEnabled() + GetObjectLockConfigurationRequest + .builder() + .bucket(bucket.name()) + .build() + ) + .objectLockConfiguration() + .objectLockEnabled() } catch (e: S3Exception) { //#getObjectLockConfiguration throws S3Exception if not set false @@ -397,14 +412,20 @@ internal abstract class S3TestBase { private fun deleteMultipartUploads(bucket: Bucket) { _s3ClientV2.listMultipartUploads( - ListMultipartUploadsRequest.builder().bucket(bucket.name()).build() - ).uploads().forEach(Consumer { upload: MultipartUpload -> + ListMultipartUploadsRequest + .builder() + .bucket(bucket.name()) + .build() + ).uploads().forEach { _s3ClientV2.abortMultipartUpload( - AbortMultipartUploadRequest.builder().bucket(bucket.name()).key(upload.key()) - .uploadId(upload.uploadId()).build() + AbortMultipartUploadRequest + .builder() + .bucket(bucket.name()) + .key(it.key()) + .uploadId(it.uploadId()) + .build() ) } - ) } private val s3Endpoint: String? @@ -564,43 +585,47 @@ internal abstract class S3TestBase { else -> throw IllegalArgumentException("Unknown checksum algorithm") } - fun S3Response.checksum(checksumAlgorithm: ChecksumAlgorithm): String { - fun S3Response.checksumSHA1(): String { + fun S3Response.checksum(checksumAlgorithm: ChecksumAlgorithm): String? { + fun S3Response.checksumSHA1(): String? { return when (this) { is GetObjectResponse -> this.checksumSHA1() is PutObjectResponse -> this.checksumSHA1() is HeadObjectResponse -> this.checksumSHA1() is UploadPartResponse -> this.checksumSHA1() + is GetObjectAttributesResponse -> this.checksum().checksumSHA1() else -> throw RuntimeException("Unexpected response type ${this::class.java}") } } - fun S3Response.checksumSHA256(): String { + fun S3Response.checksumSHA256(): String? { return when (this) { is GetObjectResponse -> this.checksumSHA256() is PutObjectResponse -> this.checksumSHA256() is HeadObjectResponse -> this.checksumSHA256() is UploadPartResponse -> this.checksumSHA256() + is GetObjectAttributesResponse -> this.checksum().checksumSHA256() else -> throw RuntimeException("Unexpected response type ${this::class.java}") } } - fun S3Response.checksumCRC32(): String { + fun S3Response.checksumCRC32(): String? { return when (this) { is GetObjectResponse -> this.checksumCRC32() is PutObjectResponse -> this.checksumCRC32() is HeadObjectResponse -> this.checksumCRC32() is UploadPartResponse -> this.checksumCRC32() + is GetObjectAttributesResponse -> this.checksum().checksumCRC32() else -> throw RuntimeException("Unexpected response type ${this::class.java}") } } - fun S3Response.checksumCRC32C(): String { + fun S3Response.checksumCRC32C(): String? { return when (this) { is GetObjectResponse -> this.checksumCRC32C() is PutObjectResponse -> this.checksumCRC32C() is HeadObjectResponse -> this.checksumCRC32C() is UploadPartResponse -> this.checksumCRC32C() + is GetObjectAttributesResponse -> this.checksum().checksumCRC32C() else -> throw RuntimeException("Unexpected response type ${this::class.java}") } } @@ -650,6 +675,9 @@ internal abstract class S3TestBase { .filter { it != StorageClass.UNKNOWN_TO_SDK_VERSION } .filter { it != StorageClass.SNOW } .filter { it != StorageClass.EXPRESS_ONEZONE } + .filter { it != StorageClass.GLACIER } + .filter { it != StorageClass.DEEP_ARCHIVE } + .filter { it != StorageClass.OUTPOSTS } .map { it } .stream() } diff --git a/pom.xml b/pom.xml index 3eadc66f5..77618fe4f 100644 --- a/pom.xml +++ b/pom.xml @@ -160,6 +160,11 @@ ${project.version} + + com.amazonaws + aws-java-sdk-core + ${aws.version} + com.amazonaws aws-java-sdk-s3 diff --git a/server/src/main/java/com/adobe/testing/s3mock/ObjectController.java b/server/src/main/java/com/adobe/testing/s3mock/ObjectController.java index 9111790ee..b0d59333b 100644 --- a/server/src/main/java/com/adobe/testing/s3mock/ObjectController.java +++ b/server/src/main/java/com/adobe/testing/s3mock/ObjectController.java @@ -16,6 +16,7 @@ package com.adobe.testing.s3mock; +import static com.adobe.testing.s3mock.dto.StorageClass.STANDARD; import static com.adobe.testing.s3mock.service.ObjectService.getChecksum; import static com.adobe.testing.s3mock.util.AwsHttpHeaders.CONTENT_MD5; import static com.adobe.testing.s3mock.util.AwsHttpHeaders.MetadataDirective.METADATA_DIRECTIVE_COPY; @@ -51,6 +52,7 @@ import static com.adobe.testing.s3mock.util.HeaderUtil.encryptionHeadersFrom; import static com.adobe.testing.s3mock.util.HeaderUtil.mediaTypeFrom; import static com.adobe.testing.s3mock.util.HeaderUtil.overrideHeadersFrom; +import static com.adobe.testing.s3mock.util.HeaderUtil.storageClassHeadersFrom; import static com.adobe.testing.s3mock.util.HeaderUtil.storeHeadersFrom; import static com.adobe.testing.s3mock.util.HeaderUtil.userMetadataFrom; import static com.adobe.testing.s3mock.util.HeaderUtil.userMetadataHeadersFrom; @@ -192,15 +194,15 @@ public ResponseEntity headObject(@PathVariable String bucketName, objectService.verifyObjectMatching(match, noneMatch, s3ObjectMetadata); return ResponseEntity.ok() .eTag(s3ObjectMetadata.etag()) - .header(HttpHeaders.ACCEPT_RANGES, RANGES_BYTES) - .headers(headers -> headers.setAll(s3ObjectMetadata.storeHeaders())) - .headers(headers -> headers.setAll(userMetadataHeadersFrom(s3ObjectMetadata))) - .headers(headers -> headers.setAll(s3ObjectMetadata.encryptionHeaders())) - .headers(h -> h.setAll(checksumHeaderFrom(s3ObjectMetadata))) - .header(X_AMZ_STORAGE_CLASS, s3ObjectMetadata.storageClass().toString()) .lastModified(s3ObjectMetadata.lastModified()) .contentLength(Long.parseLong(s3ObjectMetadata.size())) .contentType(mediaTypeFrom(s3ObjectMetadata.contentType())) + .header(HttpHeaders.ACCEPT_RANGES, RANGES_BYTES) + .headers(h -> h.setAll(s3ObjectMetadata.storeHeaders())) + .headers(h -> h.setAll(userMetadataHeadersFrom(s3ObjectMetadata))) + .headers(h -> h.setAll(s3ObjectMetadata.encryptionHeaders())) + .headers(h -> h.setAll(checksumHeaderFrom(s3ObjectMetadata))) + .headers(h -> h.setAll(storageClassHeadersFrom(s3ObjectMetadata))) .build(); } else { return ResponseEntity.status(NOT_FOUND).build(); @@ -274,15 +276,15 @@ public ResponseEntity getObject(@PathVariable String buck .ok() .eTag(s3ObjectMetadata.etag()) .header(HttpHeaders.ACCEPT_RANGES, RANGES_BYTES) - .headers(headers -> headers.setAll(s3ObjectMetadata.storeHeaders())) - .headers(headers -> headers.setAll(userMetadataHeadersFrom(s3ObjectMetadata))) - .headers(headers -> headers.setAll(s3ObjectMetadata.encryptionHeaders())) - .headers(h -> h.setAll(checksumHeaderFrom(s3ObjectMetadata))) - .header(X_AMZ_STORAGE_CLASS, s3ObjectMetadata.storageClass().toString()) .lastModified(s3ObjectMetadata.lastModified()) .contentLength(Long.parseLong(s3ObjectMetadata.size())) .contentType(mediaTypeFrom(s3ObjectMetadata.contentType())) - .headers(headers -> headers.setAll(overrideHeadersFrom(queryParams))) + .headers(h -> h.setAll(s3ObjectMetadata.storeHeaders())) + .headers(h -> h.setAll(userMetadataHeadersFrom(s3ObjectMetadata))) + .headers(h -> h.setAll(s3ObjectMetadata.encryptionHeaders())) + .headers(h -> h.setAll(checksumHeaderFrom(s3ObjectMetadata))) + .headers(h -> h.setAll(storageClassHeadersFrom(s3ObjectMetadata))) + .headers(h -> h.setAll(overrideHeadersFrom(queryParams))) .body(outputStream -> Files.copy(s3ObjectMetadata.dataPath(), outputStream)); } @@ -545,17 +547,25 @@ public ResponseEntity getObjectAttributes( S3ObjectMetadata s3ObjectMetadata = objectService.verifyObjectExists(bucketName, key.key()); objectService.verifyObjectMatching(match, noneMatch, s3ObjectMetadata); + //S3Mock stores the etag with the additional quotation marks needed in the headers. This + // response does not use eTag as a header, so it must not contain the quotation marks. + String etag = s3ObjectMetadata.etag().replace("\"", ""); + long objectSize = Long.parseLong(s3ObjectMetadata.size()); + //in object attributes, S3 returns STANDARD, in all other APIs it returns null... + StorageClass storageClass = s3ObjectMetadata.storageClass() == null + ? STANDARD + : s3ObjectMetadata.storageClass(); GetObjectAttributesOutput response = new GetObjectAttributesOutput( getChecksum(s3ObjectMetadata), objectAttributes.contains(ObjectAttributes.ETAG.toString()) - ? s3ObjectMetadata.etag() + ? etag : null, null, //parts not supported right now objectAttributes.contains(ObjectAttributes.OBJECT_SIZE.toString()) - ? Long.parseLong(s3ObjectMetadata.size()) + ? objectSize : null, objectAttributes.contains(ObjectAttributes.STORAGE_CLASS.toString()) - ? s3ObjectMetadata.storageClass() + ? storageClass : null ); @@ -682,6 +692,7 @@ public ResponseEntity copyObject(@PathVariable String bucketNa @RequestHeader(value = X_AMZ_COPY_SOURCE_IF_MATCH, required = false) List match, @RequestHeader(value = X_AMZ_COPY_SOURCE_IF_NONE_MATCH, required = false) List noneMatch, + @RequestHeader(value = X_AMZ_STORAGE_CLASS, required = false) StorageClass storageClass, @RequestHeader HttpHeaders httpHeaders) { //TODO: needs modified-since handling, see API @@ -694,17 +705,13 @@ public ResponseEntity copyObject(@PathVariable String bucketNa metadata = userMetadataFrom(httpHeaders); } - //TODO: this is potentially illegal on S3. S3 throws a 400: - // "This copy request is illegal because it is trying to copy an object to itself without - // changing the object's metadata, storage class, website redirect location or encryption - // attributes." - var copyObjectResult = objectService.copyS3Object(copySource.bucket(), copySource.key(), bucketName, key.key(), encryptionHeadersFrom(httpHeaders), - metadata); + metadata, + storageClass); //return version id / copy source version id //return expiration diff --git a/server/src/main/java/com/adobe/testing/s3mock/S3Exception.java b/server/src/main/java/com/adobe/testing/s3mock/S3Exception.java index 39d7f84bf..aac596785 100644 --- a/server/src/main/java/com/adobe/testing/s3mock/S3Exception.java +++ b/server/src/main/java/com/adobe/testing/s3mock/S3Exception.java @@ -30,7 +30,8 @@ * API Reference */ public class S3Exception extends RuntimeException { - private static final String INVALID_REQUEST = "InvalidRequest"; + private static final String INVALID_REQUEST_CODE = "InvalidRequest"; + private static final String BAD_REQUEST_CODE = "BadRequest"; public static final S3Exception INVALID_PART_NUMBER = new S3Exception(BAD_REQUEST.value(), "InvalidArgument", "Part number must be an integer between 1 and 10000, inclusive"); @@ -78,23 +79,28 @@ public class S3Exception extends RuntimeException { new S3Exception(CONFLICT.value(), "BucketAlreadyOwnedByYou", "Your previous request to create the named bucket succeeded and you already own it."); public static final S3Exception NOT_FOUND_BUCKET_OBJECT_LOCK = - new S3Exception(BAD_REQUEST.value(), INVALID_REQUEST, + new S3Exception(BAD_REQUEST.value(), INVALID_REQUEST_CODE, "Bucket is missing Object Lock Configuration"); public static final S3Exception NOT_FOUND_OBJECT_LOCK = new S3Exception(NOT_FOUND.value(), "NotFound", "The specified object does not have a ObjectLock configuration"); public static final S3Exception INVALID_REQUEST_RETAINDATE = - new S3Exception(BAD_REQUEST.value(), INVALID_REQUEST, + new S3Exception(BAD_REQUEST.value(), INVALID_REQUEST_CODE, "The retain until date must be in the future!"); public static final S3Exception INVALID_REQUEST_MAXKEYS = - new S3Exception(BAD_REQUEST.value(), INVALID_REQUEST, + new S3Exception(BAD_REQUEST.value(), INVALID_REQUEST_CODE, "maxKeys should be non-negative"); public static final S3Exception INVALID_REQUEST_ENCODINGTYPE = - new S3Exception(BAD_REQUEST.value(), INVALID_REQUEST, + new S3Exception(BAD_REQUEST.value(), INVALID_REQUEST_CODE, "encodingtype can only be none or 'url'"); + public static final S3Exception INVALID_COPY_REQUEST_SAME_KEY = + new S3Exception(BAD_REQUEST.value(), INVALID_REQUEST_CODE, + "This copy request is illegal because it is trying to copy an object to itself without " + + "changing the object's metadata, storage class, website redirect location or " + + "encryption attributes."); public static final S3Exception BAD_REQUEST_MD5 = - new S3Exception(BAD_REQUEST.value(), "BadRequest", + new S3Exception(BAD_REQUEST.value(), BAD_REQUEST_CODE, "Content-MD5 does not match object md5"); public static final S3Exception BAD_REQUEST_CONTENT = new S3Exception(BAD_REQUEST.value(), "UnexpectedContent", @@ -103,6 +109,18 @@ public class S3Exception extends RuntimeException { new S3Exception(BAD_REQUEST.value(), "BadDigest", "The Content-MD5 or checksum value that you specified did " + "not match what the server received."); + public static final S3Exception BAD_CHECKSUM_SHA1 = + new S3Exception(BAD_REQUEST.value(), BAD_REQUEST_CODE, + "Value for x-amz-checksum-sha1 header is invalid."); + public static final S3Exception BAD_CHECKSUM_SHA256 = + new S3Exception(BAD_REQUEST.value(), BAD_REQUEST_CODE, + "Value for x-amz-checksum-sha256 header is invalid."); + public static final S3Exception BAD_CHECKSUM_CRC32 = + new S3Exception(BAD_REQUEST.value(), BAD_REQUEST_CODE, + "Value for x-amz-checksum-crc32 header is invalid."); + public static final S3Exception BAD_CHECKSUM_CRC32C = + new S3Exception(BAD_REQUEST.value(), BAD_REQUEST_CODE, + "Value for x-amz-checksum-crc32c header is invalid."); private final int status; private final String code; private final String message; diff --git a/server/src/main/java/com/adobe/testing/s3mock/dto/GetObjectAttributesOutput.java b/server/src/main/java/com/adobe/testing/s3mock/dto/GetObjectAttributesOutput.java index 2eb44411d..5e3c146b4 100644 --- a/server/src/main/java/com/adobe/testing/s3mock/dto/GetObjectAttributesOutput.java +++ b/server/src/main/java/com/adobe/testing/s3mock/dto/GetObjectAttributesOutput.java @@ -44,7 +44,6 @@ public record GetObjectAttributesOutput( ) { public GetObjectAttributesOutput { - etag = normalizeEtag(etag); if (xmlns == null) { xmlns = "http://s3.amazonaws.com/doc/2006-03-01/"; } diff --git a/server/src/main/java/com/adobe/testing/s3mock/service/ObjectService.java b/server/src/main/java/com/adobe/testing/s3mock/service/ObjectService.java index 3a9e1c912..90541f174 100644 --- a/server/src/main/java/com/adobe/testing/s3mock/service/ObjectService.java +++ b/server/src/main/java/com/adobe/testing/s3mock/service/ObjectService.java @@ -16,7 +16,6 @@ package com.adobe.testing.s3mock.service; -import static com.adobe.testing.s3mock.S3Exception.BAD_DIGEST; import static com.adobe.testing.s3mock.S3Exception.BAD_REQUEST_CONTENT; import static com.adobe.testing.s3mock.S3Exception.BAD_REQUEST_MD5; import static com.adobe.testing.s3mock.S3Exception.INVALID_REQUEST_RETAINDATE; @@ -81,7 +80,8 @@ public CopyObjectResult copyS3Object(String sourceBucketName, String destinationBucketName, String destinationKey, Map encryptionHeaders, - Map userMetadata) { + Map userMetadata, + StorageClass storageClass) { var sourceBucketMetadata = bucketStore.getBucketMetadata(sourceBucketName); var destinationBucketMetadata = bucketStore.getBucketMetadata(destinationBucketName); var sourceId = sourceBucketMetadata.getID(sourceKey); @@ -91,7 +91,11 @@ public CopyObjectResult copyS3Object(String sourceBucketName, // source and destination is the same, pretend we copied - S3 does the same. if (sourceKey.equals(destinationKey) && sourceBucketName.equals(destinationBucketName)) { - return objectStore.pretendToCopyS3Object(sourceBucketMetadata, sourceId, userMetadata); + return objectStore.pretendToCopyS3Object(sourceBucketMetadata, + sourceId, + userMetadata, + encryptionHeaders, + storageClass); } // source must be copied to destination @@ -99,7 +103,7 @@ public CopyObjectResult copyS3Object(String sourceBucketName, try { return objectStore.copyS3Object(sourceBucketMetadata, sourceId, destinationBucketMetadata, destinationId, destinationKey, - encryptionHeaders, userMetadata); + encryptionHeaders, userMetadata, storageClass); } catch (Exception e) { //something went wrong with writing the destination file, clean up ID from BucketStore. bucketStore.removeFromBucket(destinationKey, destinationBucketName); @@ -255,13 +259,6 @@ public void verifyRetention(Retention retention) { } } - public void verifyChecksum(Path path, String checksum, ChecksumAlgorithm checksumAlgorithm) { - String checksumFor = DigestUtil.checksumFor(path, checksumAlgorithm.toAlgorithm()); - if (!checksum.equals(checksumFor)) { - throw BAD_DIGEST; - } - } - public void verifyMd5(Path input, String contentMd5) { try { try (var stream = Files.newInputStream(input)) { diff --git a/server/src/main/java/com/adobe/testing/s3mock/service/ServiceBase.java b/server/src/main/java/com/adobe/testing/s3mock/service/ServiceBase.java index c0cd35c02..dd6154f51 100644 --- a/server/src/main/java/com/adobe/testing/s3mock/service/ServiceBase.java +++ b/server/src/main/java/com/adobe/testing/s3mock/service/ServiceBase.java @@ -16,6 +16,10 @@ package com.adobe.testing.s3mock.service; +import static com.adobe.testing.s3mock.S3Exception.BAD_CHECKSUM_CRC32; +import static com.adobe.testing.s3mock.S3Exception.BAD_CHECKSUM_CRC32C; +import static com.adobe.testing.s3mock.S3Exception.BAD_CHECKSUM_SHA1; +import static com.adobe.testing.s3mock.S3Exception.BAD_CHECKSUM_SHA256; import static com.adobe.testing.s3mock.S3Exception.BAD_DIGEST; import static com.adobe.testing.s3mock.S3Exception.BAD_REQUEST_CONTENT; import static com.adobe.testing.s3mock.util.AwsHttpHeaders.X_AMZ_DECODED_CONTENT_LENGTH; @@ -39,10 +43,14 @@ abstract class ServiceBase { public void verifyChecksum(Path path, String checksum, ChecksumAlgorithm checksumAlgorithm) { - if (checksum != null && checksumAlgorithm != null) { - String checksumFor = DigestUtil.checksumFor(path, checksumAlgorithm.toAlgorithm()); - if (!checksum.equals(checksumFor)) { - throw BAD_DIGEST; + String checksumFor = DigestUtil.checksumFor(path, checksumAlgorithm.toAlgorithm()); + if (!checksum.equals(checksumFor)) { + switch (checksumAlgorithm) { + case SHA1 -> throw BAD_CHECKSUM_SHA1; + case SHA256 -> throw BAD_CHECKSUM_SHA256; + case CRC32 -> throw BAD_CHECKSUM_CRC32; + case CRC32C -> throw BAD_CHECKSUM_CRC32C; + default -> throw BAD_DIGEST; } } } diff --git a/server/src/main/java/com/adobe/testing/s3mock/store/ObjectStore.java b/server/src/main/java/com/adobe/testing/s3mock/store/ObjectStore.java index 25925c05e..9cc2999a7 100644 --- a/server/src/main/java/com/adobe/testing/s3mock/store/ObjectStore.java +++ b/server/src/main/java/com/adobe/testing/s3mock/store/ObjectStore.java @@ -16,6 +16,7 @@ package com.adobe.testing.s3mock.store; +import static com.adobe.testing.s3mock.S3Exception.INVALID_COPY_REQUEST_SAME_KEY; import static com.adobe.testing.s3mock.util.AwsHttpHeaders.X_AMZ_SERVER_SIDE_ENCRYPTION_AWS_KMS_KEY_ID; import static com.adobe.testing.s3mock.util.DigestUtil.hexDigest; @@ -53,7 +54,7 @@ public class ObjectStore extends StoreBase { private static final Logger LOG = LoggerFactory.getLogger(ObjectStore.class); private static final String META_FILE = "objectMetadata.json"; - private static final String ACL_FILE = "objectAcl.xml"; + private static final String ACL_FILE = "objectAcl.json"; private static final String DATA_FILE = "binaryData"; /** @@ -304,7 +305,8 @@ public CopyObjectResult copyS3Object(BucketMetadata sourceBucket, UUID destinationId, String destinationKey, Map encryptionHeaders, - Map userMetadata) { + Map userMetadata, + StorageClass storageClass) { var sourceObject = getS3ObjectMetadata(sourceBucket, sourceId); if (sourceObject == null) { return null; @@ -318,13 +320,14 @@ public CopyObjectResult copyS3Object(BucketMetadata sourceBucket, sourceObject.dataPath(), userMetadata == null || userMetadata.isEmpty() ? sourceObject.userMetadata() : userMetadata, - encryptionHeaders, + encryptionHeaders == null || encryptionHeaders.isEmpty() + ? sourceObject.encryptionHeaders() : encryptionHeaders, null, sourceObject.tags(), sourceObject.checksumAlgorithm(), sourceObject.checksum(), sourceObject.owner(), - sourceObject.storageClass() + storageClass != null ? storageClass : sourceObject.storageClass() ); return new CopyObjectResult(copiedObject.modificationDate(), copiedObject.etag()); } @@ -337,12 +340,16 @@ public CopyObjectResult copyS3Object(BucketMetadata sourceBucket, */ public CopyObjectResult pretendToCopyS3Object(BucketMetadata sourceBucket, UUID sourceId, - Map userMetadata) { + Map userMetadata, + Map encryptionHeaders, + StorageClass storageClass) { var sourceObject = getS3ObjectMetadata(sourceBucket, sourceId); if (sourceObject == null) { return null; } + verifyPretendCopy(sourceObject, userMetadata, encryptionHeaders, storageClass); + writeMetafile(sourceBucket, new S3ObjectMetadata( sourceObject.id(), sourceObject.key(), @@ -359,14 +366,27 @@ public CopyObjectResult pretendToCopyS3Object(BucketMetadata sourceBucket, sourceObject.retention(), sourceObject.owner(), sourceObject.storeHeaders(), - sourceObject.encryptionHeaders(), + encryptionHeaders == null || encryptionHeaders.isEmpty() + ? sourceObject.encryptionHeaders() : encryptionHeaders, sourceObject.checksumAlgorithm(), sourceObject.checksum(), - sourceObject.storageClass() + storageClass != null ? storageClass : sourceObject.storageClass() )); return new CopyObjectResult(sourceObject.modificationDate(), sourceObject.etag()); } + private void verifyPretendCopy(S3ObjectMetadata sourceObject, + Map userMetadata, + Map encryptionHeaders, + StorageClass storageClass) { + var userDataUnChanged = userMetadata == null || userMetadata.isEmpty(); + var encryptionHeadersUnChanged = encryptionHeaders == null || encryptionHeaders.isEmpty(); + var storageClassUnChanged = storageClass == null || storageClass == sourceObject.storageClass(); + if (userDataUnChanged && storageClassUnChanged && encryptionHeadersUnChanged) { + throw INVALID_COPY_REQUEST_SAME_KEY; + } + } + /** * Removes an object key from a bucket. * diff --git a/server/src/main/java/com/adobe/testing/s3mock/store/S3ObjectMetadata.java b/server/src/main/java/com/adobe/testing/s3mock/store/S3ObjectMetadata.java index 7a4ce168c..a68f655eb 100644 --- a/server/src/main/java/com/adobe/testing/s3mock/store/S3ObjectMetadata.java +++ b/server/src/main/java/com/adobe/testing/s3mock/store/S3ObjectMetadata.java @@ -64,6 +64,6 @@ public record S3ObjectMetadata( tags = Objects.requireNonNullElse(tags, new ArrayList<>()); storeHeaders = storeHeaders == null ? Collections.emptyMap() : storeHeaders; encryptionHeaders = encryptionHeaders == null ? Collections.emptyMap() : encryptionHeaders; - storageClass = storageClass == null ? StorageClass.STANDARD : storageClass; + storageClass = storageClass == StorageClass.STANDARD ? null : storageClass; } } diff --git a/server/src/main/java/com/adobe/testing/s3mock/util/HeaderUtil.java b/server/src/main/java/com/adobe/testing/s3mock/util/HeaderUtil.java index c7861feaa..eb1fe855f 100644 --- a/server/src/main/java/com/adobe/testing/s3mock/util/HeaderUtil.java +++ b/server/src/main/java/com/adobe/testing/s3mock/util/HeaderUtil.java @@ -25,11 +25,13 @@ import static com.adobe.testing.s3mock.util.AwsHttpHeaders.X_AMZ_CONTENT_SHA256; import static com.adobe.testing.s3mock.util.AwsHttpHeaders.X_AMZ_SDK_CHECKSUM_ALGORITHM; import static com.adobe.testing.s3mock.util.AwsHttpHeaders.X_AMZ_SERVER_SIDE_ENCRYPTION; +import static com.adobe.testing.s3mock.util.AwsHttpHeaders.X_AMZ_STORAGE_CLASS; import static org.apache.commons.lang3.StringUtils.equalsIgnoreCase; import static org.apache.commons.lang3.StringUtils.isNotBlank; import static org.apache.commons.lang3.StringUtils.startsWithIgnoreCase; import com.adobe.testing.s3mock.dto.ChecksumAlgorithm; +import com.adobe.testing.s3mock.dto.StorageClass; import com.adobe.testing.s3mock.store.S3ObjectMetadata; import java.util.AbstractMap.SimpleEntry; import java.util.HashMap; @@ -80,6 +82,19 @@ public static Map userMetadataHeadersFrom(S3ObjectMetadata s3Obj return metadataHeaders; } + /** + * Creates response headers from S3ObjectMetadata storageclass. + * @param s3ObjectMetadata {@link S3ObjectMetadata} S3Object where data will be extracted + */ + public static Map storageClassHeadersFrom(S3ObjectMetadata s3ObjectMetadata) { + Map headers = new HashMap<>(); + StorageClass storageClass = s3ObjectMetadata.storageClass(); + if (storageClass != null) { + headers.put(X_AMZ_STORAGE_CLASS, storageClass.toString()); + } + return headers; + } + /** * Retrieves user metadata from request. * @param headers {@link HttpHeaders} diff --git a/server/src/test/kotlin/com/adobe/testing/s3mock/ObjectControllerTest.kt b/server/src/test/kotlin/com/adobe/testing/s3mock/ObjectControllerTest.kt index 28bb65bf8..98f15b34c 100644 --- a/server/src/test/kotlin/com/adobe/testing/s3mock/ObjectControllerTest.kt +++ b/server/src/test/kotlin/com/adobe/testing/s3mock/ObjectControllerTest.kt @@ -635,7 +635,7 @@ internal class ObjectControllerTest : BaseControllerTest() { encryptionHeaders(encryption, encryptionKey), null, null, - StorageClass.STANDARD + null ) } diff --git a/server/src/test/kotlin/com/adobe/testing/s3mock/store/ObjectStoreTest.kt b/server/src/test/kotlin/com/adobe/testing/s3mock/store/ObjectStoreTest.kt index bb14855b9..4eb11db29 100644 --- a/server/src/test/kotlin/com/adobe/testing/s3mock/store/ObjectStoreTest.kt +++ b/server/src/test/kotlin/com/adobe/testing/s3mock/store/ObjectStoreTest.kt @@ -250,13 +250,13 @@ internal class ObjectStoreTest : StoreTestBase() { objectStore.copyS3Object( metadataFrom(sourceBucketName), sourceId, metadataFrom(destinationBucketName), - destinationId, destinationObjectName, emptyMap(), NO_USER_METADATA + destinationId, destinationObjectName, emptyMap(), NO_USER_METADATA, StorageClass.STANDARD_IA ) objectStore.getS3ObjectMetadata(metadataFrom(destinationBucketName), destinationId).also { assertThat(it.encryptionHeaders).isEmpty() assertThat(sourceFile).hasSameBinaryContentAs(it.dataPath.toFile()) - assertThat(it.storageClass).isEqualTo(StorageClass.GLACIER) + assertThat(it.storageClass).isEqualTo(StorageClass.STANDARD_IA) } } @@ -288,7 +288,8 @@ internal class ObjectStoreTest : StoreTestBase() { destinationId, destinationObjectName, encryptionHeaders(), - NO_USER_METADATA + NO_USER_METADATA, + StorageClass.STANDARD_IA ) objectStore.getS3ObjectMetadata(metadataFrom(destinationBucketName), destinationId).also { assertThat(it.encryptionHeaders).isEqualTo(encryptionHeaders()) diff --git a/server/src/test/resources/com/adobe/testing/s3mock/dto/GetObjectAttributesOutputTest_testSerialization_multiPart.xml b/server/src/test/resources/com/adobe/testing/s3mock/dto/GetObjectAttributesOutputTest_testSerialization_multiPart.xml index e53e2d785..32097d16d 100644 --- a/server/src/test/resources/com/adobe/testing/s3mock/dto/GetObjectAttributesOutputTest_testSerialization_multiPart.xml +++ b/server/src/test/resources/com/adobe/testing/s3mock/dto/GetObjectAttributesOutputTest_testSerialization_multiPart.xml @@ -17,7 +17,7 @@ --> - "etag" + etag 1000 false diff --git a/server/src/test/resources/com/adobe/testing/s3mock/dto/GetObjectAttributesOutputTest_testSerialization_object.xml b/server/src/test/resources/com/adobe/testing/s3mock/dto/GetObjectAttributesOutputTest_testSerialization_object.xml index b2ec1ff6a..9dda4df78 100644 --- a/server/src/test/resources/com/adobe/testing/s3mock/dto/GetObjectAttributesOutputTest_testSerialization_object.xml +++ b/server/src/test/resources/com/adobe/testing/s3mock/dto/GetObjectAttributesOutputTest_testSerialization_object.xml @@ -17,7 +17,7 @@ --> - "etag" + etag STANDARD 1 From b192a71828df7de353ada272b309737de2d963e9 Mon Sep 17 00:00:00 2001 From: Arne Franken Date: Sat, 25 May 2024 18:22:27 +0200 Subject: [PATCH 08/14] Verify LegalHoldV2IT --- .../adobe/testing/s3mock/its/LegalHoldV2IT.kt | 55 ++++++++++++++++--- 1 file changed, 46 insertions(+), 9 deletions(-) diff --git a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/LegalHoldV2IT.kt b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/LegalHoldV2IT.kt index eacd98a33..b79bc91e4 100644 --- a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/LegalHoldV2IT.kt +++ b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/LegalHoldV2IT.kt @@ -16,7 +16,6 @@ package com.adobe.testing.s3mock.its -import org.assertj.core.api.Assertions import org.assertj.core.api.Assertions.assertThat import org.assertj.core.api.Assertions.assertThatThrownBy import org.junit.jupiter.api.Test @@ -37,7 +36,7 @@ internal class LegalHoldV2IT : S3TestBase() { private val s3ClientV2: S3Client = createS3ClientV2() @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun testGetLegalHoldNoBucketLockConfiguration(testInfo: TestInfo) { val sourceKey = UPLOAD_FILE_NAME val (bucketName, _) = givenBucketAndObjectV1(testInfo, sourceKey) @@ -56,15 +55,23 @@ internal class LegalHoldV2IT : S3TestBase() { } @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun testGetLegalHoldNoObjectLockConfiguration(testInfo: TestInfo) { val uploadFile = File(UPLOAD_FILE_NAME) val sourceKey = UPLOAD_FILE_NAME val bucketName = bucketName(testInfo) - s3ClientV2.createBucket(CreateBucketRequest.builder().bucket(bucketName) - .objectLockEnabledForBucket(true).build()) + s3ClientV2.createBucket(CreateBucketRequest + .builder() + .bucket(bucketName) + .objectLockEnabledForBucket(true) + .build() + ) s3ClientV2.putObject( - PutObjectRequest.builder().bucket(bucketName).key(sourceKey).build(), + PutObjectRequest + .builder() + .bucket(bucketName) + .key(sourceKey) + .build(), RequestBody.fromFile(uploadFile) ) @@ -82,7 +89,7 @@ internal class LegalHoldV2IT : S3TestBase() { } @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun testPutAndGetLegalHold(testInfo: TestInfo) { val uploadFile = File(UPLOAD_FILE_NAME) val sourceKey = UPLOAD_FILE_NAME @@ -94,7 +101,11 @@ internal class LegalHoldV2IT : S3TestBase() { .build() ) s3ClientV2.putObject( - PutObjectRequest.builder().bucket(bucketName).key(sourceKey).build(), + PutObjectRequest + .builder() + .bucket(bucketName) + .key(sourceKey) + .build(), RequestBody.fromFile(uploadFile) ) @@ -102,7 +113,11 @@ internal class LegalHoldV2IT : S3TestBase() { .builder() .bucket(bucketName) .key(sourceKey) - .legalHold(ObjectLockLegalHold.builder().status(ObjectLockLegalHoldStatus.ON).build()) + .legalHold(ObjectLockLegalHold + .builder() + .status(ObjectLockLegalHoldStatus.ON) + .build() + ) .build() ) @@ -115,5 +130,27 @@ internal class LegalHoldV2IT : S3TestBase() { ).also { assertThat(it.legalHold().status()).isEqualTo(ObjectLockLegalHoldStatus.ON) } + + s3ClientV2.putObjectLegalHold(PutObjectLegalHoldRequest + .builder() + .bucket(bucketName) + .key(sourceKey) + .legalHold(ObjectLockLegalHold + .builder() + .status(ObjectLockLegalHoldStatus.OFF) + .build() + ) + .build() + ) + + s3ClientV2.getObjectLegalHold( + GetObjectLegalHoldRequest + .builder() + .bucket(bucketName) + .key(sourceKey) + .build() + ).also { + assertThat(it.legalHold().status()).isEqualTo(ObjectLockLegalHoldStatus.OFF) + } } } From 54b315079acbc4d5f68f409ec5e695f597782da2 Mon Sep 17 00:00:00 2001 From: Arne Franken Date: Sat, 25 May 2024 18:52:05 +0200 Subject: [PATCH 09/14] Verify ListObject*ITs All tests succeeded --- .../adobe/testing/s3mock/its/ListObjectV1IT.kt | 16 ++++++++-------- .../testing/s3mock/its/ListObjectV1MaxKeysIT.kt | 12 ++++++------ .../s3mock/its/ListObjectV1PaginationIT.kt | 2 +- .../adobe/testing/s3mock/its/ListObjectV2IT.kt | 6 +++--- .../testing/s3mock/its/ListObjectVersionsV2IT.kt | 1 + 5 files changed, 19 insertions(+), 18 deletions(-) diff --git a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/ListObjectV1IT.kt b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/ListObjectV1IT.kt index 2a015ebeb..9be2e623f 100644 --- a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/ListObjectV1IT.kt +++ b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/ListObjectV1IT.kt @@ -81,7 +81,7 @@ internal class ListObjectV1IT : S3TestBase() { */ @ParameterizedTest @MethodSource("data") - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun listV1(parameters: Param, testInfo: TestInfo) { val bucketName = givenBucketV1(testInfo) // create all expected objects @@ -125,7 +125,7 @@ internal class ListObjectV1IT : S3TestBase() { */ @ParameterizedTest @MethodSource("data") - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun listV2(parameters: Param, testInfo: TestInfo) { val bucketName = givenBucketV1(testInfo) // create all expected objects @@ -167,7 +167,7 @@ internal class ListObjectV1IT : S3TestBase() { * https://docs.aws.amazon.com/AmazonS3/latest/userguide/object-keys.html */ @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun shouldListWithCorrectObjectNames(testInfo: TestInfo) { val bucketName = givenBucketV1(testInfo) val uploadFile = File(UPLOAD_FILE_NAME) @@ -190,7 +190,7 @@ internal class ListObjectV1IT : S3TestBase() { * https://docs.aws.amazon.com/AmazonS3/latest/userguide/object-keys.html */ @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun shouldListV2WithCorrectObjectNames(testInfo: TestInfo) { val bucketName = givenBucketV1(testInfo) val uploadFile = File(UPLOAD_FILE_NAME) @@ -224,7 +224,7 @@ internal class ListObjectV1IT : S3TestBase() { * is currently no low-level testing infrastructure in place. */ @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun shouldHonorEncodingType(testInfo: TestInfo) { val bucketName = givenBucketV1(testInfo) val uploadFile = File(UPLOAD_FILE_NAME) @@ -247,7 +247,7 @@ internal class ListObjectV1IT : S3TestBase() { * The same as [shouldHonorEncodingType] but for V2 API. */ @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun shouldHonorEncodingTypeV2(testInfo: TestInfo) { val bucketName = givenBucketV1(testInfo) val uploadFile = File(UPLOAD_FILE_NAME) @@ -269,7 +269,7 @@ internal class ListObjectV1IT : S3TestBase() { } @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun shouldGetObjectListing(testInfo: TestInfo) { val bucketName = givenBucketV1(testInfo) val uploadFile = File(UPLOAD_FILE_NAME) @@ -285,7 +285,7 @@ internal class ListObjectV1IT : S3TestBase() { * Stores files in a previously created bucket. List files using ListObjectsV2Request */ @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun shouldUploadAndListV2Objects(testInfo: TestInfo) { val bucketName = givenBucketV1(testInfo) val uploadFile = File(UPLOAD_FILE_NAME) diff --git a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/ListObjectV1MaxKeysIT.kt b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/ListObjectV1MaxKeysIT.kt index 46ae69afa..bbf3c0101 100644 --- a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/ListObjectV1MaxKeysIT.kt +++ b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/ListObjectV1MaxKeysIT.kt @@ -25,7 +25,7 @@ internal class ListObjectV1MaxKeysIT : S3TestBase() { val s3Client: AmazonS3 = createS3ClientV1() @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun returnsLimitedAmountOfObjectsBasedOnMaxKeys(testInfo: TestInfo) { val bucketName = givenBucketWithTwoObjects(testInfo) val request = ListObjectsRequest().withBucketName(bucketName).withMaxKeys(1) @@ -37,7 +37,7 @@ internal class ListObjectV1MaxKeysIT : S3TestBase() { } @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun returnsAllObjectsIfMaxKeysIsDefault(testInfo: TestInfo) { val bucketName = givenBucketWithTwoObjects(testInfo) val request = ListObjectsRequest().withBucketName(bucketName) @@ -49,7 +49,7 @@ internal class ListObjectV1MaxKeysIT : S3TestBase() { } @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun returnsAllObjectsIfMaxKeysEqualToAmountOfObjects(testInfo: TestInfo) { val bucketName = givenBucketWithTwoObjects(testInfo) val request = ListObjectsRequest().withBucketName(bucketName).withMaxKeys(2) @@ -61,7 +61,7 @@ internal class ListObjectV1MaxKeysIT : S3TestBase() { } @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun returnsAllObjectsIfMaxKeysMoreThanAmountOfObjects(testInfo: TestInfo) { val bucketName = givenBucketWithTwoObjects(testInfo) val request = ListObjectsRequest().withBucketName(bucketName).withMaxKeys(3) @@ -73,7 +73,7 @@ internal class ListObjectV1MaxKeysIT : S3TestBase() { } @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun returnsEmptyListIfMaxKeysIsZero(testInfo: TestInfo) { val bucketName = givenBucketWithTwoObjects(testInfo) val request = ListObjectsRequest().withBucketName(bucketName).withMaxKeys(0) @@ -85,7 +85,7 @@ internal class ListObjectV1MaxKeysIT : S3TestBase() { } @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun returnsAllObjectsIfMaxKeysIsNegative(testInfo: TestInfo) { val bucketName = givenBucketWithTwoObjects(testInfo) val request = ListObjectsRequest().withBucketName(bucketName).withMaxKeys(-1) diff --git a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/ListObjectV1PaginationIT.kt b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/ListObjectV1PaginationIT.kt index c045f95f3..0524c758c 100644 --- a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/ListObjectV1PaginationIT.kt +++ b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/ListObjectV1PaginationIT.kt @@ -25,7 +25,7 @@ internal class ListObjectV1PaginationIT : S3TestBase() { val s3Client: AmazonS3 = createS3ClientV1() @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun shouldTruncateAndReturnNextMarker(testInfo: TestInfo) { val bucketName = givenBucketWithTwoObjects(testInfo) val request = ListObjectsRequest().withBucketName(bucketName).withMaxKeys(1) diff --git a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/ListObjectV2IT.kt b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/ListObjectV2IT.kt index 431e54e1e..68e825b41 100644 --- a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/ListObjectV2IT.kt +++ b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/ListObjectV2IT.kt @@ -34,7 +34,7 @@ internal class ListObjectV2IT : S3TestBase() { private val s3ClientV2: S3Client = createS3ClientV2() @Test - @S3VerifiedTodo + @S3VerifiedSuccess(year = 2024) fun testPutObjectsListObjectsV2_checksumAlgorithm_sha256(testInfo: TestInfo) { val uploadFile = File(UPLOAD_FILE_NAME) val bucketName = givenBucketV2(testInfo) @@ -71,7 +71,7 @@ internal class ListObjectV2IT : S3TestBase() { } @Test - @S3VerifiedTodo + @S3VerifiedSuccess(year = 2024) fun testPutObjectsListObjectsV1_checksumAlgorithm_sha256(testInfo: TestInfo) { val uploadFile = File(UPLOAD_FILE_NAME) val bucketName = givenBucketV2(testInfo) @@ -113,7 +113,7 @@ internal class ListObjectV2IT : S3TestBase() { * https://docs.aws.amazon.com/AmazonS3/latest/userguide/object-keys.html */ @Test - @S3VerifiedTodo + @S3VerifiedSuccess(year = 2024) fun shouldListV2WithCorrectObjectNames(testInfo: TestInfo) { val bucketName = givenBucketV2(testInfo) val uploadFile = File(UPLOAD_FILE_NAME) diff --git a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/ListObjectVersionsV2IT.kt b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/ListObjectVersionsV2IT.kt index e80da6620..aa4fb8a8d 100644 --- a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/ListObjectVersionsV2IT.kt +++ b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/ListObjectVersionsV2IT.kt @@ -32,6 +32,7 @@ internal class ListObjectVersionsV2IT : S3TestBase() { private val s3ClientV2: S3Client = createS3ClientV2() @Test + @S3VerifiedSuccess(year = 2024) fun testPutObjects_listObjectVersions(testInfo: TestInfo) { val uploadFile = File(UPLOAD_FILE_NAME) val bucketName = givenBucketV2(testInfo) From 71bcbfa8c1f888b9d23ba2c4f1f0287c68fdc390 Mon Sep 17 00:00:00 2001 From: Arne Franken Date: Sun, 26 May 2024 16:18:32 +0200 Subject: [PATCH 10/14] Verify Multipart*ITs Many tests succeeded. Fix: Multipart checksums were not correctly implemented. --- .../testing/s3mock/its/MultiPartUploadV1IT.kt | 28 +++--- .../testing/s3mock/its/MultiPartUploadV2IT.kt | 98 +++++++++++-------- .../testing/s3mock/MultipartController.java | 8 +- .../dto/CompleteMultipartUploadResult.java | 17 +++- .../s3mock/service/MultipartService.java | 13 +-- .../testing/s3mock/store/MultipartStore.java | 23 +++-- .../adobe/testing/s3mock/util/DigestUtil.java | 42 +++++++- .../dto/CompleteMultipartUploadResultTest.kt | 2 +- .../s3mock/store/MultipartStoreTest.kt | 42 +++++--- .../testing/s3mock/util/DigestUtilTest.kt | 23 +++++ ...Test_testChecksumOfMultipleFiles_testFile1 | 1 + ...Test_testChecksumOfMultipleFiles_testFile2 | 1 + 12 files changed, 205 insertions(+), 93 deletions(-) create mode 100644 server/src/test/resources/com/adobe/testing/s3mock/util/DigestUtilTest_testChecksumOfMultipleFiles_testFile1 create mode 100644 server/src/test/resources/com/adobe/testing/s3mock/util/DigestUtilTest_testChecksumOfMultipleFiles_testFile2 diff --git a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/MultiPartUploadV1IT.kt b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/MultiPartUploadV1IT.kt index fa6bb8ae2..b5f33bb58 100644 --- a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/MultiPartUploadV1IT.kt +++ b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/MultiPartUploadV1IT.kt @@ -49,7 +49,7 @@ internal class MultiPartUploadV1IT : S3TestBase() { * Tests if user metadata can be passed by multipart upload. */ @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun testMultipartUpload_withUserMetadata(testInfo: TestInfo) { val bucketName = givenBucketV1(testInfo) val uploadFile = File(UPLOAD_FILE_NAME) @@ -93,7 +93,7 @@ internal class MultiPartUploadV1IT : S3TestBase() { * Tests if a multipart upload with the last part being smaller than 5MB works. */ @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun shouldAllowMultipartUploads(testInfo: TestInfo) { val bucketName = givenBucketV1(testInfo) val uploadFile = File(UPLOAD_FILE_NAME) @@ -147,7 +147,7 @@ internal class MultiPartUploadV1IT : S3TestBase() { } @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun shouldInitiateMultipartAndRetrieveParts(testInfo: TestInfo) { val bucketName = givenBucketV1(testInfo) val uploadFile = File(UPLOAD_FILE_NAME) @@ -193,7 +193,7 @@ internal class MultiPartUploadV1IT : S3TestBase() { * Tests if not yet completed / aborted multipart uploads are listed. */ @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun shouldListMultipartUploads(testInfo: TestInfo) { val bucketName = givenBucketV1(testInfo) assertThat( @@ -220,7 +220,7 @@ internal class MultiPartUploadV1IT : S3TestBase() { * Tests if empty parts list of not yet completed multipart upload is returned. */ @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun shouldListEmptyPartListForMultipartUpload(testInfo: TestInfo) { val bucketName = givenBucketV1(testInfo) assertThat( @@ -244,7 +244,7 @@ internal class MultiPartUploadV1IT : S3TestBase() { * Tests that an exception is thrown when listing parts if the upload id is unknown. */ @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun shouldThrowOnListMultipartUploadsWithUnknownId(testInfo: TestInfo) { val bucketName = givenBucketV1(testInfo) assertThatThrownBy { s3Client.listParts(ListPartsRequest(bucketName, "NON_EXISTENT_KEY", @@ -257,7 +257,7 @@ internal class MultiPartUploadV1IT : S3TestBase() { * Tests if not yet completed / aborted multipart uploads are listed with prefix filtering. */ @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun shouldListMultipartUploadsWithPrefix(testInfo: TestInfo) { val bucketName = givenBucketV1(testInfo) s3Client.initiateMultipartUpload( @@ -280,7 +280,7 @@ internal class MultiPartUploadV1IT : S3TestBase() { * Tests if multipart uploads are stored and can be retrieved by bucket. */ @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun shouldListMultipartUploadsWithBucket(testInfo: TestInfo) { // create multipart upload 1 val bucketName1 = givenBucketV1(testInfo) @@ -312,7 +312,7 @@ internal class MultiPartUploadV1IT : S3TestBase() { * Tests if a multipart upload can be aborted. */ @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun shouldAbortMultipartUpload(testInfo: TestInfo) { val bucketName = givenBucketV1(testInfo) assertThat(s3Client.listMultipartUploads(ListMultipartUploadsRequest(bucketName)).multipartUploads).isEmpty() @@ -345,7 +345,7 @@ internal class MultiPartUploadV1IT : S3TestBase() { * irrespective of the number of parts uploaded before. */ @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun shouldAdherePartsInCompleteMultipartUploadRequest(testInfo: TestInfo) { val bucketName = givenBucketV1(testInfo) val key = UUID.randomUUID().toString() @@ -392,7 +392,7 @@ internal class MultiPartUploadV1IT : S3TestBase() { * aborted. */ @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun shouldListPartsOnCompleteOrAbort(testInfo: TestInfo) { val bucketName = givenBucketV1(testInfo) val key = randomName @@ -427,7 +427,7 @@ internal class MultiPartUploadV1IT : S3TestBase() { * Upload two objects, copy as parts without length, complete multipart. */ @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun shouldCopyPartsAndComplete(testInfo: TestInfo) { //Initiate upload in random bucket val bucketName2 = givenRandomBucketV1() @@ -492,7 +492,7 @@ internal class MultiPartUploadV1IT : S3TestBase() { * Requests parts for the uploadId; compares etag of upload response and parts list. */ @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun shouldCopyObjectPart(testInfo: TestInfo) { val sourceKey = UPLOAD_FILE_NAME val (bucketName, putObjectResult) = givenBucketAndObjectV1(testInfo, UPLOAD_FILE_NAME) @@ -538,7 +538,7 @@ internal class MultiPartUploadV1IT : S3TestBase() { * Tries to copy part of a non-existing object to a new bucket. */ @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun shouldThrowNoSuchKeyOnCopyObjectPartForNonExistingKey(testInfo: TestInfo) { val sourceKey = "NON_EXISTENT_KEY" val destinationBucketName = givenRandomBucketV1() diff --git a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/MultiPartUploadV2IT.kt b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/MultiPartUploadV2IT.kt index 8f6c689a1..22e47c8c2 100644 --- a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/MultiPartUploadV2IT.kt +++ b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/MultiPartUploadV2IT.kt @@ -31,12 +31,13 @@ import org.springframework.web.util.UriUtils import software.amazon.awssdk.awscore.exception.AwsErrorDetails import software.amazon.awssdk.awscore.exception.AwsServiceException import software.amazon.awssdk.core.async.AsyncRequestBody -import software.amazon.awssdk.core.checksums.Algorithm +import software.amazon.awssdk.core.checksums.Algorithm.CRC32 import software.amazon.awssdk.core.sync.RequestBody import software.amazon.awssdk.services.s3.S3AsyncClient import software.amazon.awssdk.services.s3.S3Client import software.amazon.awssdk.services.s3.model.AbortMultipartUploadRequest import software.amazon.awssdk.services.s3.model.ChecksumAlgorithm +import software.amazon.awssdk.services.s3.model.ChecksumMode import software.amazon.awssdk.services.s3.model.CompleteMultipartUploadRequest import software.amazon.awssdk.services.s3.model.CompletedMultipartUpload import software.amazon.awssdk.services.s3.model.CompletedPart @@ -69,7 +70,7 @@ internal class MultiPartUploadV2IT : S3TestBase() { private val transferManagerV2: S3TransferManager = createTransferManagerV2() @Test - @S3VerifiedTodo + @S3VerifiedSuccess(year = 2024) fun testMultipartUpload_asyncClient(testInfo: TestInfo) { val bucketName = givenBucketV2(testInfo) val uploadFile = File(UPLOAD_FILE_NAME) @@ -82,7 +83,7 @@ internal class MultiPartUploadV2IT : S3TestBase() { .build(), AsyncRequestBody.fromFile(uploadFile) ).join().also { - assertThat(it.checksumCRC32()).isEqualTo(DigestUtil.checksumFor(uploadFile.toPath(), Algorithm.CRC32)) + assertThat(it.checksumCRC32()).isEqualTo(DigestUtil.checksumFor(uploadFile.toPath(), CRC32)) } s3AsyncClientV2.waiter() @@ -112,7 +113,7 @@ internal class MultiPartUploadV2IT : S3TestBase() { } @Test - @S3VerifiedTodo + @S3VerifiedSuccess(year = 2024) fun testMultipartUpload_transferManager(testInfo: TestInfo) { val bucketName = givenBucketV2(testInfo) val uploadFile = File(UPLOAD_FILE_NAME) @@ -167,7 +168,7 @@ internal class MultiPartUploadV2IT : S3TestBase() { * Tests if user metadata can be passed by multipart upload. */ @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun testMultipartUpload_withUserMetadata(testInfo: TestInfo) { val bucketName = givenBucketV2(testInfo) val uploadFile = File(UPLOAD_FILE_NAME) @@ -226,7 +227,7 @@ internal class MultiPartUploadV2IT : S3TestBase() { * Tests if a multipart upload with the last part being smaller than 5MB works. */ @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun testMultipartUpload(testInfo: TestInfo) { val bucketName = givenBucketV2(testInfo) val uploadFile = File(UPLOAD_FILE_NAME) @@ -313,14 +314,18 @@ internal class MultiPartUploadV2IT : S3TestBase() { * Tests if a multipart upload with the last part being smaller than 5MB works. */ @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun testMultipartUpload_checksum(testInfo: TestInfo) { val bucketName = givenBucketV2(testInfo) val uploadFile = File(TEST_IMAGE_TIFF) //construct uploadfile >5MB - val uploadBytes = readStreamIntoByteArray(uploadFile.inputStream()) + - readStreamIntoByteArray(uploadFile.inputStream()) + - readStreamIntoByteArray(uploadFile.inputStream()) + val tempFile = Files.newTemporaryFile().also { + (readStreamIntoByteArray(uploadFile.inputStream()) + + readStreamIntoByteArray(uploadFile.inputStream()) + + readStreamIntoByteArray(uploadFile.inputStream())) + .inputStream() + .copyTo(it.outputStream()) + } val initiateMultipartUploadResult = s3ClientV2 .createMultipartUpload( @@ -332,29 +337,39 @@ internal class MultiPartUploadV2IT : S3TestBase() { ) val uploadId = initiateMultipartUploadResult.uploadId() // upload part 1, <5MB - val etag1 = s3ClientV2.uploadPart( + val partResponse1 = s3ClientV2.uploadPart( UploadPartRequest .builder() .bucket(initiateMultipartUploadResult.bucket()) .key(initiateMultipartUploadResult.key()) .uploadId(uploadId) + .checksumAlgorithm(ChecksumAlgorithm.CRC32) .partNumber(1) - .contentLength(uploadBytes.size.toLong()).build(), + .contentLength(tempFile.length()).build(), //.lastPart(true) - RequestBody.fromBytes(uploadBytes), - ).eTag() + RequestBody.fromFile(tempFile), + ) + val etag1 = partResponse1.eTag() + val checksum1 = partResponse1.checksumCRC32() // upload part 2, <5MB - val etag2 = s3ClientV2.uploadPart( + val partResponse2 = s3ClientV2.uploadPart( UploadPartRequest .builder() .bucket(initiateMultipartUploadResult.bucket()) .key(initiateMultipartUploadResult.key()) .uploadId(uploadId) + .checksumAlgorithm(ChecksumAlgorithm.CRC32) .partNumber(2) .contentLength(uploadFile.length()).build(), //.lastPart(true) RequestBody.fromFile(uploadFile), - ).eTag() + ) + val etag2 = partResponse2.eTag() + val checksum2 = partResponse2.checksumCRC32() + val localChecksum1 = DigestUtil.checksumFor(tempFile.toPath(), CRC32) + assertThat(checksum1).isEqualTo(localChecksum1) + val localChecksum2 = DigestUtil.checksumFor(uploadFile.toPath(), CRC32) + assertThat(checksum2).isEqualTo(localChecksum2) val completeMultipartUpload = s3ClientV2.completeMultipartUpload( CompleteMultipartUploadRequest @@ -370,11 +385,13 @@ internal class MultiPartUploadV2IT : S3TestBase() { .builder() .eTag(etag1) .partNumber(1) + .checksumCRC32(checksum1) .build(), CompletedPart .builder() .eTag(etag2) .partNumber(2) + .checksumCRC32(checksum2) .build() ) .build() @@ -382,9 +399,7 @@ internal class MultiPartUploadV2IT : S3TestBase() { .build() ) - val uploadFileBytes = readStreamIntoByteArray(uploadFile.inputStream()) - - (DigestUtils.md5(uploadBytes) + DigestUtils.md5(readStreamIntoByteArray(uploadFile.inputStream()))).also { + (DigestUtils.md5(tempFile.readBytes()) + DigestUtils.md5(readStreamIntoByteArray(uploadFile.inputStream()))).also { // verify special etag assertThat(completeMultipartUpload.eTag()).isEqualTo("\"${DigestUtils.md5Hex(it)}-2\"") } @@ -394,13 +409,14 @@ internal class MultiPartUploadV2IT : S3TestBase() { .builder() .bucket(bucketName) .key(TEST_IMAGE_TIFF) + .checksumMode(ChecksumMode.ENABLED) .build() ).use { // verify content size - assertThat(it.response().contentLength()).isEqualTo(uploadBytes.size.toLong() + uploadFileBytes.size.toLong()) + assertThat(it.response().contentLength()).isEqualTo(tempFile.length() + uploadFile.length()) // verify contents - assertThat(readStreamIntoByteArray(it.buffered())).isEqualTo(concatByteArrays(uploadBytes, uploadFileBytes)) - assertThat(it.response().checksumCRC32()).isEqualTo("0qCRZA==") //TODO: fix checksum for random byte uploads + assertThat(readStreamIntoByteArray(it.buffered())).isEqualTo(tempFile.readBytes() + uploadFile.readBytes()) + assertThat(it.response().checksumCRC32()).isEqualTo("oGk6qg==-2") } assertThat(completeMultipartUpload.location()) @@ -408,7 +424,7 @@ internal class MultiPartUploadV2IT : S3TestBase() { } - @S3VerifiedTodo + @S3VerifiedSuccess(year = 2024) @ParameterizedTest @MethodSource(value = ["checksumAlgorithms"]) fun testUploadPart_checksumAlgorithm(checksumAlgorithm: ChecksumAlgorithm, testInfo: TestInfo) { @@ -420,6 +436,7 @@ internal class MultiPartUploadV2IT : S3TestBase() { .builder() .bucket(bucketName) .key(UPLOAD_FILE_NAME) + .checksumAlgorithm(checksumAlgorithm) .build() ) val uploadId = initiateMultipartUploadResult.uploadId() @@ -450,7 +467,7 @@ internal class MultiPartUploadV2IT : S3TestBase() { ) } - @S3VerifiedTodo + @S3VerifiedSuccess(year = 2024) @ParameterizedTest @MethodSource(value = ["checksumAlgorithms"]) fun testMultipartUpload_checksum(checksumAlgorithm: ChecksumAlgorithm, testInfo: TestInfo) { @@ -462,6 +479,7 @@ internal class MultiPartUploadV2IT : S3TestBase() { .builder() .bucket(bucketName) .key(UPLOAD_FILE_NAME) + .checksumAlgorithm(checksumAlgorithm) .build() ) val uploadId = initiateMultipartUploadResult.uploadId() @@ -489,7 +507,7 @@ internal class MultiPartUploadV2IT : S3TestBase() { } @Test - @S3VerifiedTodo + @S3VerifiedSuccess(year = 2024) fun testMultipartUpload_wrongChecksum(testInfo: TestInfo) { val bucketName = givenBucketV2(testInfo) val uploadFile = File(UPLOAD_FILE_NAME) @@ -536,7 +554,7 @@ internal class MultiPartUploadV2IT : S3TestBase() { } @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun testInitiateMultipartAndRetrieveParts(testInfo: TestInfo) { val bucketName = givenBucketV2(testInfo) val uploadFile = File(UPLOAD_FILE_NAME) @@ -588,7 +606,7 @@ internal class MultiPartUploadV2IT : S3TestBase() { * Tests if not yet completed / aborted multipart uploads are listed. */ @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun testListMultipartUploads_ok(testInfo: TestInfo) { val bucketName = givenBucketV2(testInfo) assertThat( @@ -629,7 +647,7 @@ internal class MultiPartUploadV2IT : S3TestBase() { * Tests if empty parts list of not yet completed multipart upload is returned. */ @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun testListMultipartUploads_empty(testInfo: TestInfo) { val bucketName = givenBucketV2(testInfo) assertThat( @@ -670,7 +688,7 @@ internal class MultiPartUploadV2IT : S3TestBase() { * Tests that an exception is thrown when listing parts if the upload id is unknown. */ @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun testListMultipartUploads_throwOnUnknownId(testInfo: TestInfo) { val bucketName = givenBucketV2(testInfo) @@ -692,7 +710,7 @@ internal class MultiPartUploadV2IT : S3TestBase() { * Tests if not yet completed / aborted multipart uploads are listed with prefix filtering. */ @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun testListMultipartUploads_withPrefix(testInfo: TestInfo) { val bucketName = givenBucketV2(testInfo) s3ClientV2 @@ -722,7 +740,7 @@ internal class MultiPartUploadV2IT : S3TestBase() { * Tests if multipart uploads are stored and can be retrieved by bucket. */ @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun testListMultipartUploads_multipleBuckets(testInfo: TestInfo) { // create multipart upload 1 val bucketName1 = givenBucketV2(testInfo) @@ -781,7 +799,7 @@ internal class MultiPartUploadV2IT : S3TestBase() { * Tests if a multipart upload can be aborted. */ @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun testAbortMultipartUpload(testInfo: TestInfo) { val bucketName = givenBucketV2(testInfo) assertThat( @@ -865,7 +883,7 @@ internal class MultiPartUploadV2IT : S3TestBase() { * irrespective of the number of parts uploaded before. */ @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun testCompleteMultipartUpload_partLeftOut(testInfo: TestInfo) { val bucketName = givenBucketV2(testInfo) val key = randomName @@ -948,7 +966,7 @@ internal class MultiPartUploadV2IT : S3TestBase() { * aborted. */ @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun testListParts_completeAndAbort(testInfo: TestInfo) { val bucketName = givenBucketV2(testInfo) val key = randomName @@ -1037,7 +1055,7 @@ internal class MultiPartUploadV2IT : S3TestBase() { * Upload two objects, copy as parts without length, complete multipart. */ @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun shouldCopyPartsAndComplete(testInfo: TestInfo) { //Initiate upload val bucketName2 = givenRandomBucketV2() @@ -1136,7 +1154,7 @@ internal class MultiPartUploadV2IT : S3TestBase() { * Requests parts for the uploadId; compares etag of upload response and parts list. */ @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun shouldCopyObjectPart(testInfo: TestInfo) { val sourceKey = UPLOAD_FILE_NAME val uploadFile = File(sourceKey) @@ -1186,7 +1204,7 @@ internal class MultiPartUploadV2IT : S3TestBase() { * Tries to copy part of a non-existing object to a new bucket. */ @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun shouldThrowNoSuchKeyOnCopyObjectPartForNonExistingKey(testInfo: TestInfo) { val sourceKey = "NON_EXISTENT_KEY" val destinationBucket = givenRandomBucketV2() @@ -1273,7 +1291,7 @@ internal class MultiPartUploadV2IT : S3TestBase() { } @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun testUploadPartCopy_successNoneMatch(testInfo: TestInfo) { val sourceKey = UPLOAD_FILE_NAME val uploadFile = File(sourceKey) @@ -1320,7 +1338,7 @@ internal class MultiPartUploadV2IT : S3TestBase() { } @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun testUploadPartCopy_failureMatch(testInfo: TestInfo) { val sourceKey = UPLOAD_FILE_NAME val uploadFile = File(sourceKey) @@ -1359,7 +1377,7 @@ internal class MultiPartUploadV2IT : S3TestBase() { } @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun testUploadPartCopy_failureNoneMatch(testInfo: TestInfo) { val sourceKey = UPLOAD_FILE_NAME val uploadFile = File(sourceKey) diff --git a/server/src/main/java/com/adobe/testing/s3mock/MultipartController.java b/server/src/main/java/com/adobe/testing/s3mock/MultipartController.java index 21ccb4849..fb4279631 100644 --- a/server/src/main/java/com/adobe/testing/s3mock/MultipartController.java +++ b/server/src/main/java/com/adobe/testing/s3mock/MultipartController.java @@ -385,14 +385,14 @@ public ResponseEntity completeMultipartUpload( encryptionHeadersFrom(httpHeaders), locationWithEncodedKey); - String checksum = result.getRight().checksum(); - ChecksumAlgorithm checksumAlgorithm = result.getRight().checksumAlgorithm(); + String checksum = result.checksum(); + ChecksumAlgorithm checksumAlgorithm = result.multipartUploadInfo().checksumAlgorithm(); - //return checksum and encryption headers. + //return encryption headers. //return version id return ResponseEntity .ok() .headers(h -> h.setAll(checksumHeaderFrom(checksum, checksumAlgorithm))) - .body(result.getLeft()); + .body(result); } } diff --git a/server/src/main/java/com/adobe/testing/s3mock/dto/CompleteMultipartUploadResult.java b/server/src/main/java/com/adobe/testing/s3mock/dto/CompleteMultipartUploadResult.java index 5f636f883..59b30a6c5 100644 --- a/server/src/main/java/com/adobe/testing/s3mock/dto/CompleteMultipartUploadResult.java +++ b/server/src/main/java/com/adobe/testing/s3mock/dto/CompleteMultipartUploadResult.java @@ -18,6 +18,8 @@ import static com.adobe.testing.s3mock.util.EtagUtil.normalizeEtag; +import com.adobe.testing.s3mock.store.MultipartUploadInfo; +import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonRootName; import com.fasterxml.jackson.dataformat.xml.annotation.JacksonXmlProperty; @@ -38,7 +40,11 @@ public record CompleteMultipartUploadResult( String etag, //workaround for adding xmlns attribute to root element only. @JacksonXmlProperty(isAttribute = true, localName = "xmlns") - String xmlns + String xmlns, + + @JsonIgnore + MultipartUploadInfo multipartUploadInfo, + String checksum ) { public CompleteMultipartUploadResult { etag = normalizeEtag(etag); @@ -47,7 +53,12 @@ public record CompleteMultipartUploadResult( } } - public CompleteMultipartUploadResult(String location, String bucket, String key, String etag) { - this(location, bucket, key, etag, null); + public CompleteMultipartUploadResult(String location, + String bucket, + String key, + String etag, + MultipartUploadInfo multipartUploadInfo, + String checksum) { + this(location, bucket, key, etag, null, multipartUploadInfo, checksum); } } diff --git a/server/src/main/java/com/adobe/testing/s3mock/service/MultipartService.java b/server/src/main/java/com/adobe/testing/s3mock/service/MultipartService.java index bbff96f47..d05c741a3 100644 --- a/server/src/main/java/com/adobe/testing/s3mock/service/MultipartService.java +++ b/server/src/main/java/com/adobe/testing/s3mock/service/MultipartService.java @@ -35,7 +35,6 @@ import com.adobe.testing.s3mock.dto.StorageClass; import com.adobe.testing.s3mock.store.BucketStore; import com.adobe.testing.s3mock.store.MultipartStore; -import com.adobe.testing.s3mock.store.MultipartUploadInfo; import java.nio.file.Path; import java.util.Collections; import java.util.Date; @@ -43,7 +42,6 @@ import java.util.Map; import java.util.UUID; import java.util.stream.Collectors; -import org.apache.commons.lang3.tuple.Pair; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.http.HttpRange; @@ -177,7 +175,7 @@ public void abortMultipartUpload(String bucketName, String key, String uploadId) * * @return etag of the uploaded file. */ - public Pair completeMultipartUpload( + public CompleteMultipartUploadResult completeMultipartUpload( String bucketName, String key, String uploadId, @@ -190,10 +188,9 @@ public Pair completeMultipar return null; } var multipartUploadInfo = multipartStore.getMultipartUploadInfo(bucketMetadata, uploadId); - var etag = multipartStore - .completeMultipartUpload(bucketMetadata, key, id, uploadId, parts, encryptionHeaders); - return Pair.of(new CompleteMultipartUploadResult(location, bucketName, key, etag), - multipartUploadInfo); + return multipartStore + .completeMultipartUpload(bucketMetadata, key, id, uploadId, parts, encryptionHeaders, + multipartUploadInfo, location); } /** @@ -221,7 +218,6 @@ public InitiateMultipartUploadResult createMultipartUpload(String bucketName, String checksum, ChecksumAlgorithm checksumAlgorithm) { var bucketMetadata = bucketStore.getBucketMetadata(bucketName); - //TODO: add upload to bucket var id = bucketStore.addKeyToBucket(key, bucketName); try { @@ -288,7 +284,6 @@ public void verifyMultipartParts(String bucketName, String key, var bucketMetadata = bucketStore.getBucketMetadata(bucketName); var id = bucketMetadata.getID(key); if (id == null) { - //TODO: is this the correct error? throw INVALID_PART; } verifyMultipartParts(bucketName, id, uploadId); diff --git a/server/src/main/java/com/adobe/testing/s3mock/store/MultipartStore.java b/server/src/main/java/com/adobe/testing/s3mock/store/MultipartStore.java index 98eb33cfc..098a37ed9 100644 --- a/server/src/main/java/com/adobe/testing/s3mock/store/MultipartStore.java +++ b/server/src/main/java/com/adobe/testing/s3mock/store/MultipartStore.java @@ -25,6 +25,7 @@ import static org.apache.commons.lang3.StringUtils.isBlank; import com.adobe.testing.s3mock.dto.ChecksumAlgorithm; +import com.adobe.testing.s3mock.dto.CompleteMultipartUploadResult; import com.adobe.testing.s3mock.dto.CompletedPart; import com.adobe.testing.s3mock.dto.MultipartUpload; import com.adobe.testing.s3mock.dto.Owner; @@ -234,9 +235,14 @@ public String putPart(BucketMetadata bucket, * * @return etag of the uploaded file. */ - public String completeMultipartUpload(BucketMetadata bucket, String key, UUID id, - String uploadId, List parts, Map encryptionHeaders) { - var uploadInfo = getMultipartUploadInfo(bucket, uploadId); + public CompleteMultipartUploadResult completeMultipartUpload(BucketMetadata bucket, + String key, + UUID id, + String uploadId, + List parts, + Map encryptionHeaders, + MultipartUploadInfo uploadInfo, + String location) { if (uploadInfo == null) { throw new IllegalArgumentException("Unknown upload " + uploadId); } @@ -252,7 +258,7 @@ public String completeMultipartUpload(BucketMetadata bucket, String key, UUID id try (var inputStream = toInputStream(partsPaths)) { tempFile = Files.createTempFile("completeMultipartUpload", ""); inputStream.transferTo(Files.newOutputStream(tempFile)); - var checksumFor = checksumFor(tempFile, uploadInfo); + var checksumFor = checksumFor(partsPaths, uploadInfo); var etag = hexDigestMultipart(partsPaths); objectStore.storeS3ObjectMetadata(bucket, id, @@ -270,7 +276,8 @@ public String completeMultipartUpload(BucketMetadata bucket, String key, UUID id uploadInfo.storageClass() ); FileUtils.deleteDirectory(partFolder.toFile()); - return etag; + return new CompleteMultipartUploadResult(location, uploadInfo.bucket(), + key, etag, uploadInfo, checksumFor); } catch (IOException e) { throw new IllegalStateException(String.format( "Error finishing multipart upload bucket=%s, key=%s, id=%s, uploadId=%s", @@ -282,9 +289,9 @@ public String completeMultipartUpload(BucketMetadata bucket, String key, UUID id } } - private String checksumFor(Path path, MultipartUploadInfo uploadInfo) { + private String checksumFor(List paths, MultipartUploadInfo uploadInfo) { if (uploadInfo.checksumAlgorithm() != null) { - return DigestUtil.checksumFor(path, uploadInfo.checksumAlgorithm().toAlgorithm()); + return DigestUtil.checksumMultipart(paths, uploadInfo.checksumAlgorithm().toAlgorithm()); } return null; } @@ -463,7 +470,7 @@ private Path getUploadMetadataPath(BucketMetadata bucket, String uploadId) { return getPartsFolder(bucket, uploadId).resolve(MULTIPART_UPLOAD_META_FILE); } - public MultipartUploadInfo getUploadMetadata(BucketMetadata bucket, String uploadId) { + private MultipartUploadInfo getUploadMetadata(BucketMetadata bucket, String uploadId) { var metaPath = getUploadMetadataPath(bucket, uploadId); if (Files.exists(metaPath)) { diff --git a/server/src/main/java/com/adobe/testing/s3mock/util/DigestUtil.java b/server/src/main/java/com/adobe/testing/s3mock/util/DigestUtil.java index 9cdf368be..6dd99f65c 100644 --- a/server/src/main/java/com/adobe/testing/s3mock/util/DigestUtil.java +++ b/server/src/main/java/com/adobe/testing/s3mock/util/DigestUtil.java @@ -73,6 +73,17 @@ public static String checksumFor(Path path, Algorithm algorithm) { * @return the checksum */ public static String checksumFor(InputStream is, Algorithm algorithm) { + return BinaryUtils.toBase64(checksum(is, algorithm)); + } + + /** + * Calculate a checksum for the given inputstream and algorithm. + * + * @param is InputStream containing the bytes to generate the checksum for + * @param algorithm algorithm to use + * @return the checksum + */ + public static byte[] checksum(InputStream is, Algorithm algorithm) { SdkChecksum sdkChecksum = SdkChecksum.forAlgorithm(algorithm); try { byte[] buffer = new byte[4096]; @@ -80,16 +91,32 @@ public static String checksumFor(InputStream is, Algorithm algorithm) { while ((read = is.read(buffer)) != -1) { sdkChecksum.update(buffer, 0, read); } - return BinaryUtils.toBase64(sdkChecksum.getChecksumBytes()); + return sdkChecksum.getChecksumBytes(); } catch (IOException e) { throw new IllegalStateException(CHECKSUM_COULD_NOT_BE_CALCULATED, e); } } + private static byte[] checksum(List paths, Algorithm algorithm) { + SdkChecksum sdkChecksum = SdkChecksum.forAlgorithm(algorithm); + var allChecksums = new byte[0]; + for (var path : paths) { + try (var inputStream = Files.newInputStream(path)) { + allChecksums = ArrayUtils.addAll(allChecksums, checksum(inputStream, algorithm)); + } catch (IOException e) { + throw new IllegalStateException("Could not read from path " + path, e); + } + } + sdkChecksum.update(allChecksums, 0, allChecksums.length); + allChecksums = sdkChecksum.getChecksumBytes(); + return allChecksums; + } + /** * Calculates a hex encoded MD5 digest for the contents of a list of paths. * This is a special case that emulates how AWS calculates the MD5 Checksums of the parts of a - * Multipart upload: + * Multipart upload. + * API * * Stackoverflow * @@ -110,6 +137,17 @@ public static String hexDigestMultipart(List paths) { return DigestUtils.md5Hex(md5(null, paths)) + "-" + paths.size(); } + /** + * Calculates the checksum for a list of paths. + * For multipart uploads, AWS takes the checksum of all parts, concatenates them, and then takes + * the checksum again. Then, they add a hyphen and the number of parts used to calculate the + * checksum. + * API + */ + public static String checksumMultipart(List paths, Algorithm algorithm) { + return BinaryUtils.toBase64(checksum(paths, algorithm)) + "-" + paths.size(); + } + public static String hexDigest(byte[] bytes) { return DigestUtils.md5Hex(bytes); } diff --git a/server/src/test/kotlin/com/adobe/testing/s3mock/dto/CompleteMultipartUploadResultTest.kt b/server/src/test/kotlin/com/adobe/testing/s3mock/dto/CompleteMultipartUploadResultTest.kt index dfc571055..75762ae8a 100644 --- a/server/src/test/kotlin/com/adobe/testing/s3mock/dto/CompleteMultipartUploadResultTest.kt +++ b/server/src/test/kotlin/com/adobe/testing/s3mock/dto/CompleteMultipartUploadResultTest.kt @@ -24,7 +24,7 @@ internal class CompleteMultipartUploadResultTest { @Test @Throws(IOException::class) fun testSerialization(testInfo: TestInfo) { - val iut = CompleteMultipartUploadResult("location", "bucket", "key", "etag") + val iut = CompleteMultipartUploadResult("location", "bucket", "key", "etag", null, null) assertThat(iut).isNotNull() DtoTestUtil.serializeAndAssert(iut, testInfo) } diff --git a/server/src/test/kotlin/com/adobe/testing/s3mock/store/MultipartStoreTest.kt b/server/src/test/kotlin/com/adobe/testing/s3mock/store/MultipartStoreTest.kt index 90ecee59d..194478174 100644 --- a/server/src/test/kotlin/com/adobe/testing/s3mock/store/MultipartStoreTest.kt +++ b/server/src/test/kotlin/com/adobe/testing/s3mock/store/MultipartStoreTest.kt @@ -15,7 +15,9 @@ */ package com.adobe.testing.s3mock.store +import com.adobe.testing.s3mock.dto.ChecksumAlgorithm import com.adobe.testing.s3mock.dto.CompletedPart +import com.adobe.testing.s3mock.dto.MultipartUpload import com.adobe.testing.s3mock.dto.Owner import com.adobe.testing.s3mock.dto.Part import com.adobe.testing.s3mock.dto.StorageClass @@ -165,6 +167,7 @@ internal class MultipartStoreTest : StoreTestBase() { emptyMap(), StorageClass.STANDARD, null, null ) val uploadId = multipartUpload.uploadId + val multipartUploadInfo = multipartUploadInfo(multipartUpload) multipartStore .putPart( metadataFrom(TEST_BUCKET_NAME), id, uploadId, "1", @@ -176,10 +179,10 @@ internal class MultipartStoreTest : StoreTestBase() { tempFile2, emptyMap() ) - val etag = + val result = multipartStore.completeMultipartUpload( metadataFrom(TEST_BUCKET_NAME), fileName, id, - uploadId, getParts(2), emptyMap() + uploadId, getParts(2), emptyMap(), multipartUploadInfo, "location" ) val allMd5s = DigestUtils.md5("Part1") + DigestUtils.md5("Part2") @@ -195,7 +198,7 @@ internal class MultipartStoreTest : StoreTestBase() { TEST_BUCKET_NAME, id.toString(), "objectMetadata.json" ).toFile() ).exists() - assertThat(etag).isEqualTo(DigestUtils.md5Hex(allMd5s) + "-2") + assertThat(result.etag).isEqualTo("\"${DigestUtils.md5Hex(allMd5s)}-2\"") } @Test @@ -216,6 +219,7 @@ internal class MultipartStoreTest : StoreTestBase() { emptyMap(), StorageClass.STANDARD, null, null ) val uploadId = multipartUpload.uploadId + val multipartUploadInfo = multipartUploadInfo(multipartUpload) multipartStore .putPart(metadataFrom(TEST_BUCKET_NAME), id, uploadId, "1", tempFile1, emptyMap()) multipartStore @@ -223,7 +227,7 @@ internal class MultipartStoreTest : StoreTestBase() { multipartStore.completeMultipartUpload( metadataFrom(TEST_BUCKET_NAME), fileName, id, uploadId, - getParts(2), emptyMap() + getParts(2), emptyMap(), multipartUploadInfo, "location" ) objectStore.getS3ObjectMetadata(metadataFrom(TEST_BUCKET_NAME), id).also { @@ -302,6 +306,7 @@ internal class MultipartStoreTest : StoreTestBase() { emptyMap(), StorageClass.STANDARD, null, null ) val uploadId = multipartUpload.uploadId + val multipartUploadInfo = multipartUploadInfo(multipartUpload) val tempFile = Files.createTempFile("", "") ByteArrayInputStream("Part1".toByteArray()).transferTo(Files.newOutputStream(tempFile)) multipartStore @@ -312,7 +317,7 @@ internal class MultipartStoreTest : StoreTestBase() { multipartStore.completeMultipartUpload( metadataFrom(TEST_BUCKET_NAME), fileName, id, uploadId, - getParts(1), emptyMap() + getParts(1), emptyMap(), multipartUploadInfo, "location" ) assertThat( @@ -335,7 +340,7 @@ internal class MultipartStoreTest : StoreTestBase() { StorageClass.STANDARD, null, null ) val uploadId = multipartUpload.uploadId - + val multipartUploadInfo = multipartUploadInfo(multipartUpload) val uploads = multipartStore.listMultipartUploads(bucketMetadata, NO_PREFIX) assertThat(uploads).hasSize(1) uploads.iterator().next().also { @@ -347,7 +352,7 @@ internal class MultipartStoreTest : StoreTestBase() { multipartStore.completeMultipartUpload( bucketMetadata, fileName, id, uploadId, getParts(0), - emptyMap() + emptyMap(), multipartUploadInfo, "location" ) assertThat(multipartStore.listMultipartUploads(bucketMetadata, NO_PREFIX)).isEmpty() @@ -371,6 +376,7 @@ internal class MultipartStoreTest : StoreTestBase() { StorageClass.STANDARD, null, null ) val uploadId1 = multipartUpload1.uploadId + val multipartUploadInfo1 = multipartUploadInfo(multipartUpload1) val fileName2 = "PartFile2" val id2 = managedId() val multipartUpload2 = multipartStore @@ -380,7 +386,7 @@ internal class MultipartStoreTest : StoreTestBase() { StorageClass.STANDARD, null, null ) val uploadId2 = multipartUpload2.uploadId - + val multipartUploadInfo2 = multipartUploadInfo(multipartUpload2) multipartStore.listMultipartUploads(bucketMetadata1, NO_PREFIX).also { assertThat(it).hasSize(1) it[0].also { @@ -403,11 +409,11 @@ internal class MultipartStoreTest : StoreTestBase() { multipartStore.completeMultipartUpload( bucketMetadata1, fileName1, id1, uploadId1, - getParts(0), emptyMap() + getParts(0), emptyMap(), multipartUploadInfo1, "location" ) multipartStore.completeMultipartUpload( bucketMetadata2, fileName2, id2, uploadId2, - getParts(0), emptyMap() + getParts(0), emptyMap(), multipartUploadInfo2, "location" ) assertThat(multipartStore.listMultipartUploads(bucketMetadata1, NO_PREFIX)).isEmpty() @@ -575,7 +581,7 @@ internal class MultipartStoreTest : StoreTestBase() { StorageClass.STANDARD, null, null ) val uploadId = multipartUpload.uploadId - + val multipartUploadInfo = multipartUploadInfo(multipartUpload) for (i in 1..10) { val tempFile = Files.createTempFile("", "") ByteArrayInputStream(("$i\n").toByteArray(StandardCharsets.UTF_8)) @@ -588,7 +594,7 @@ internal class MultipartStoreTest : StoreTestBase() { } multipartStore.completeMultipartUpload( metadataFrom(TEST_BUCKET_NAME), filename, id, uploadId, - getParts(10), emptyMap() + getParts(10), emptyMap(), multipartUploadInfo, "location" ) val s = objectStore.getS3ObjectMetadata(metadataFrom(TEST_BUCKET_NAME), id) .dataPath @@ -626,6 +632,18 @@ internal class MultipartStoreTest : StoreTestBase() { } + private fun multipartUploadInfo(multipartUpload: MultipartUpload?) = MultipartUploadInfo( + multipartUpload, + "application/octet-stream", + mapOf(), + mapOf(), + mapOf(), + "bucket", + null, + "checksum", + ChecksumAlgorithm.CRC32 + ) + companion object { private val idCache: MutableList = Collections.synchronizedList(arrayListOf()) diff --git a/server/src/test/kotlin/com/adobe/testing/s3mock/util/DigestUtilTest.kt b/server/src/test/kotlin/com/adobe/testing/s3mock/util/DigestUtilTest.kt index 48f152684..278b2b977 100644 --- a/server/src/test/kotlin/com/adobe/testing/s3mock/util/DigestUtilTest.kt +++ b/server/src/test/kotlin/com/adobe/testing/s3mock/util/DigestUtilTest.kt @@ -19,6 +19,8 @@ import org.apache.commons.codec.digest.DigestUtils import org.assertj.core.api.Assertions.assertThat import org.junit.jupiter.api.Test import org.junit.jupiter.api.TestInfo +import software.amazon.awssdk.core.checksums.Algorithm +import software.amazon.awssdk.utils.BinaryUtils internal class DigestUtilTest { @Test @@ -41,4 +43,25 @@ internal class DigestUtilTest { assertThat(DigestUtil.hexDigestMultipart(files)).isEqualTo(expected) } + + @Test + fun testChecksumOfMultipleFiles(testInfo: TestInfo) { + //yes, this is correct - AWS calculates a Multipart digest by calculating the digest of every + //file involved, and then calculates the digest on the result. + //a hyphen with the part count is added as a suffix. + val expected = "${ + BinaryUtils.toBase64(DigestUtils.sha256( + DigestUtils.sha256("Part1") //testFile1 + + DigestUtils.sha256("Part2") //testFile2 + )) + }-2" + + //files contain the exact content seen above + val files = listOf( + TestUtil.getTestFile(testInfo, "testFile1").toPath(), + TestUtil.getTestFile(testInfo, "testFile2").toPath() + ) + + assertThat(DigestUtil.checksumMultipart(files, Algorithm.SHA256)).isEqualTo(expected) + } } diff --git a/server/src/test/resources/com/adobe/testing/s3mock/util/DigestUtilTest_testChecksumOfMultipleFiles_testFile1 b/server/src/test/resources/com/adobe/testing/s3mock/util/DigestUtilTest_testChecksumOfMultipleFiles_testFile1 new file mode 100644 index 000000000..4b668a1ed --- /dev/null +++ b/server/src/test/resources/com/adobe/testing/s3mock/util/DigestUtilTest_testChecksumOfMultipleFiles_testFile1 @@ -0,0 +1 @@ +Part1 \ No newline at end of file diff --git a/server/src/test/resources/com/adobe/testing/s3mock/util/DigestUtilTest_testChecksumOfMultipleFiles_testFile2 b/server/src/test/resources/com/adobe/testing/s3mock/util/DigestUtilTest_testChecksumOfMultipleFiles_testFile2 new file mode 100644 index 000000000..86dbe2084 --- /dev/null +++ b/server/src/test/resources/com/adobe/testing/s3mock/util/DigestUtilTest_testChecksumOfMultipleFiles_testFile2 @@ -0,0 +1 @@ +Part2 \ No newline at end of file From ef07114b746a94bcfdcedd46447364c4cdcc6465 Mon Sep 17 00:00:00 2001 From: Arne Franken Date: Sun, 26 May 2024 16:34:20 +0200 Subject: [PATCH 11/14] Verify generatePresignedUrlWithResponseHeaderOverrides The problem was the request config, we were setting the local host and port in addition to the full presigned Url. --- .../adobe/testing/s3mock/its/GetPutDeleteObjectV1IT.kt | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/GetPutDeleteObjectV1IT.kt b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/GetPutDeleteObjectV1IT.kt index 196f63696..271a4087f 100644 --- a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/GetPutDeleteObjectV1IT.kt +++ b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/GetPutDeleteObjectV1IT.kt @@ -47,6 +47,7 @@ import java.io.FileInputStream import java.io.InputStream import java.util.UUID import java.util.stream.Collectors +import javax.net.ssl.HostnameVerifier import kotlin.math.min /** @@ -442,7 +443,7 @@ internal class GetPutDeleteObjectV1IT : S3TestBase() { } @Test - @S3VerifiedSuccess(year = 2022) //TODO: failed in 2024. Not sure why yet.... + @S3VerifiedSuccess(year = 2024) fun generatePresignedUrlWithResponseHeaderOverrides(testInfo: TestInfo) { val (bucketName, _) = givenBucketAndObjectV1(testInfo, UPLOAD_FILE_NAME) val presignedUrlRequest = GeneratePresignedUrlRequest(bucketName, UPLOAD_FILE_NAME).apply { @@ -458,13 +459,11 @@ internal class GetPutDeleteObjectV1IT : S3TestBase() { ) this.method = HttpMethod.GET } - val resourceUrl = s3Client.generatePresignedUrl(presignedUrlRequest) + val resourceUrl = createS3ClientV1(serviceEndpointHttp).generatePresignedUrl(presignedUrlRequest) HttpClients.createDefault().use { val getObject = HttpGet(resourceUrl.toString()) it.execute( - HttpHost( - host, httpPort - ), getObject + getObject ).also { response -> assertThat(response.getFirstHeader(Headers.CACHE_CONTROL).value).isEqualTo("cacheControl") assertThat(response.getFirstHeader(Headers.CONTENT_DISPOSITION).value).isEqualTo("contentDisposition") From 92c78cff7da17746d0d1e8279f74f83e28aa99d6 Mon Sep 17 00:00:00 2001 From: Arne Franken Date: Sun, 26 May 2024 16:47:51 +0200 Subject: [PATCH 12/14] Verify ObjectTagging*ITs --- .../com/adobe/testing/s3mock/its/ObjectTaggingV1IT.kt | 8 ++++---- .../com/adobe/testing/s3mock/its/ObjectTaggingV2IT.kt | 8 ++++---- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/ObjectTaggingV1IT.kt b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/ObjectTaggingV1IT.kt index 4b01eb3cc..fcf0267e3 100644 --- a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/ObjectTaggingV1IT.kt +++ b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/ObjectTaggingV1IT.kt @@ -33,7 +33,7 @@ internal class ObjectTaggingV1IT : S3TestBase() { val s3Client: AmazonS3 = createS3ClientV1() @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun testPutAndGetObjectTagging(testInfo: TestInfo) { val (bucketName, _) = givenBucketAndObjectV1(testInfo, UPLOAD_FILE_NAME) val s3Object = s3Client.getObject(bucketName, UPLOAD_FILE_NAME) @@ -52,7 +52,7 @@ internal class ObjectTaggingV1IT : S3TestBase() { } @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun testPutObjectAndGetObjectTagging_withTagging(testInfo: TestInfo) { val bucketName = givenBucketV1(testInfo) val uploadFile = File(UPLOAD_FILE_NAME) @@ -77,7 +77,7 @@ internal class ObjectTaggingV1IT : S3TestBase() { * Verify that tagging with multiple tags can be obtained and returns expected content. */ @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun testPutObjectAndGetObjectTagging_multipleTags(testInfo: TestInfo) { val bucketName = givenBucketV1(testInfo) val uploadFile = File(UPLOAD_FILE_NAME) @@ -101,7 +101,7 @@ internal class ObjectTaggingV1IT : S3TestBase() { @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun testGetObjectTagging_noTags(testInfo: TestInfo) { val (bucketName, _) = givenBucketAndObjectV1(testInfo, UPLOAD_FILE_NAME) val s3Object = s3Client.getObject(bucketName, UPLOAD_FILE_NAME) diff --git a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/ObjectTaggingV2IT.kt b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/ObjectTaggingV2IT.kt index 491936909..acc019d06 100644 --- a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/ObjectTaggingV2IT.kt +++ b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/ObjectTaggingV2IT.kt @@ -30,7 +30,7 @@ internal class ObjectTaggingV2IT : S3TestBase() { private val s3ClientV2: S3Client = createS3ClientV2() @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun testGetObjectTagging_noTags(testInfo: TestInfo) { val bucketName = givenBucketV2(testInfo) s3ClientV2.putObject( @@ -48,7 +48,7 @@ internal class ObjectTaggingV2IT : S3TestBase() { } @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun testPutAndGetObjectTagging(testInfo: TestInfo) { val bucketName = givenBucketV2(testInfo) val key = "foo" @@ -77,7 +77,7 @@ internal class ObjectTaggingV2IT : S3TestBase() { } @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun testPutObjectAndGetObjectTagging_withTagging(testInfo: TestInfo) { val bucketName = givenBucketV2(testInfo) s3ClientV2.putObject( @@ -98,7 +98,7 @@ internal class ObjectTaggingV2IT : S3TestBase() { * Verify that tagging with multiple tags can be obtained and returns expected content. */ @Test - @S3VerifiedSuccess(year = 2022) + @S3VerifiedSuccess(year = 2024) fun testPutObjectAndGetObjectTagging_multipleTags(testInfo: TestInfo) { val bucketName = givenBucketV2(testInfo) val tag1 = Tag.builder().key("tag1").value("foo").build() From e06fafa1018a98d9804be0311d5c20984d149968 Mon Sep 17 00:00:00 2001 From: Arne Franken Date: Sun, 26 May 2024 16:49:40 +0200 Subject: [PATCH 13/14] Verify PresignedUrlV2IT --- ...resignedUriV2IT.kt => PresignedUrlV2IT.kt} | 65 +++++++------------ .../adobe/testing/s3mock/its/S3TestBase.kt | 2 + 2 files changed, 26 insertions(+), 41 deletions(-) rename integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/{PresignedUriV2IT.kt => PresignedUrlV2IT.kt} (90%) diff --git a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/PresignedUriV2IT.kt b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/PresignedUrlV2IT.kt similarity index 90% rename from integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/PresignedUriV2IT.kt rename to integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/PresignedUrlV2IT.kt index e25e422f5..380fae4ff 100644 --- a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/PresignedUriV2IT.kt +++ b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/PresignedUrlV2IT.kt @@ -15,8 +15,8 @@ */ package com.adobe.testing.s3mock.its +import com.adobe.testing.s3mock.dto.InitiateMultipartUploadResult import com.adobe.testing.s3mock.util.DigestUtil -import org.apache.http.HttpHost import org.apache.http.HttpStatus import org.apache.http.client.methods.HttpDelete import org.apache.http.client.methods.HttpGet @@ -53,13 +53,14 @@ import java.nio.file.Files import java.nio.file.Path import java.time.Duration -internal class PresignedUriV2IT : S3TestBase() { +internal class PresignedUrlV2IT : S3TestBase() { private val httpClient: CloseableHttpClient = HttpClients.createDefault() private val s3ClientV2: S3Client = createS3ClientV2() - private val s3Presigner: S3Presigner = createS3Presigner() + private val s3Presigner: S3Presigner = createS3Presigner(serviceEndpointHttp) @Test - fun testPresignedUri_getObject(testInfo: TestInfo) { + @S3VerifiedSuccess(year = 2024) + fun testPresignedUrl_getObject(testInfo: TestInfo) { val key = UPLOAD_FILE_NAME val (bucketName, _) = givenBucketAndObjectV2(testInfo, key) @@ -82,9 +83,6 @@ internal class PresignedUriV2IT : S3TestBase() { HttpGet(presignedUrlString).also { get -> httpClient.execute( - HttpHost( - host, httpPort - ), get ).use { assertThat(it.statusLine.statusCode).isEqualTo(HttpStatus.SC_OK) @@ -96,7 +94,8 @@ internal class PresignedUriV2IT : S3TestBase() { } @Test - fun testPresignedUri_putObject(testInfo: TestInfo) { + @S3VerifiedSuccess(year = 2024) + fun testPresignedUrl_putObject(testInfo: TestInfo) { val key = UPLOAD_FILE_NAME val bucketName = givenBucketV2(testInfo) @@ -121,9 +120,6 @@ internal class PresignedUriV2IT : S3TestBase() { this.entity = FileEntity(File(UPLOAD_FILE_NAME)) }.also { put -> httpClient.execute( - HttpHost( - host, httpPort - ), put ).use { assertThat(it.statusLine.statusCode).isEqualTo(HttpStatus.SC_OK) @@ -143,7 +139,8 @@ internal class PresignedUriV2IT : S3TestBase() { } @Test - fun testPresignedUri_createMultipartUpload(testInfo: TestInfo) { + @S3VerifiedFailure(year = 2024, reason = "S3 returns no multipart uploads.") + fun testPresignedUrl_createMultipartUpload(testInfo: TestInfo) { val key = UPLOAD_FILE_NAME val bucketName = givenBucketV2(testInfo) @@ -164,25 +161,16 @@ internal class PresignedUriV2IT : S3TestBase() { val presignedUrlString = presignCreateMultipartUpload.url().toString() assertThat(presignedUrlString).isNotBlank() - HttpPost(presignedUrlString).apply { - this.entity = StringEntity( - """ - - bucketName - fileName - uploadId - - """ - ) - }.also { post -> - httpClient.execute( - HttpHost( - host, httpPort - ), + val uploadId = HttpPost(presignedUrlString) + .let { post -> + httpClient.execute( post ).use { assertThat(it.statusLine.statusCode).isEqualTo(HttpStatus.SC_OK) - } + val result = MAPPER.readValue(it.entity.content, InitiateMultipartUploadResult::class.java) + assertThat(result).isNotNull + result + }.uploadId } s3ClientV2.listMultipartUploads( @@ -190,6 +178,7 @@ internal class PresignedUriV2IT : S3TestBase() { .builder() .bucket(bucketName) .keyMarker(key) + .uploadIdMarker(uploadId) .build() ).also { assertThat(it.uploads()).hasSize(1) @@ -197,7 +186,8 @@ internal class PresignedUriV2IT : S3TestBase() { } @Test - fun testPresignedUri_abortMultipartUpload(testInfo: TestInfo) { + @S3VerifiedSuccess(year = 2024) + fun testPresignedUrl_abortMultipartUpload(testInfo: TestInfo) { val key = UPLOAD_FILE_NAME val bucketName = givenBucketV2(testInfo) val file = File(UPLOAD_FILE_NAME) @@ -242,9 +232,6 @@ internal class PresignedUriV2IT : S3TestBase() { HttpDelete(presignedUrlString).also { delete -> httpClient.execute( - HttpHost( - host, httpPort - ), delete ).use { assertThat(it.statusLine.statusCode).isEqualTo(HttpStatus.SC_NO_CONTENT) @@ -263,7 +250,8 @@ internal class PresignedUriV2IT : S3TestBase() { } @Test - fun testPresignedUri_completeMultipartUpload(testInfo: TestInfo) { + @S3VerifiedSuccess(year = 2024) + fun testPresignedUrl_completeMultipartUpload(testInfo: TestInfo) { val key = UPLOAD_FILE_NAME val bucketName = givenBucketV2(testInfo) val file = File(UPLOAD_FILE_NAME) @@ -318,9 +306,6 @@ internal class PresignedUriV2IT : S3TestBase() { """) }.also { post -> httpClient.execute( - HttpHost( - host, httpPort - ), post ).use { assertThat(it.statusLine.statusCode).isEqualTo(HttpStatus.SC_OK) @@ -340,7 +325,8 @@ internal class PresignedUriV2IT : S3TestBase() { @Test - fun testPresignedUri_uploadPart(testInfo: TestInfo) { + @S3VerifiedSuccess(year = 2024) + fun testPresignedUrl_uploadPart(testInfo: TestInfo) { val key = UPLOAD_FILE_NAME val bucketName = givenBucketV2(testInfo) val file = File(UPLOAD_FILE_NAME) @@ -373,13 +359,10 @@ internal class PresignedUriV2IT : S3TestBase() { val presignedUrlString = presignUploadPart.url().toString() assertThat(presignedUrlString).isNotBlank() - val httpPut = HttpPut(presignedUrlString).apply { + HttpPut(presignedUrlString).apply { this.entity = FileEntity(File(UPLOAD_FILE_NAME)) }.also { put -> httpClient.execute( - HttpHost( - host, httpPort - ), put ).use { assertThat(it.statusLine.statusCode).isEqualTo(HttpStatus.SC_OK) diff --git a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/S3TestBase.kt b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/S3TestBase.kt index 9bd34f1c3..0f4a08d0d 100644 --- a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/S3TestBase.kt +++ b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/S3TestBase.kt @@ -27,6 +27,7 @@ import com.amazonaws.services.s3.model.PutObjectRequest import com.amazonaws.services.s3.model.PutObjectResult import com.amazonaws.services.s3.transfer.TransferManager import com.amazonaws.services.s3.transfer.TransferManagerBuilder +import com.fasterxml.jackson.dataformat.xml.XmlMapper import org.apache.http.conn.ssl.NoopHostnameVerifier import org.apache.http.conn.ssl.SSLConnectionSocketFactory import org.assertj.core.api.Assertions.assertThat @@ -655,6 +656,7 @@ internal abstract class S3TestBase { const val BUFFER_SIZE = 128 * 1024 private const val THREAD_COUNT = 50 private const val PREFIX = "prefix" + val MAPPER = XmlMapper.builder().build() private val TEST_FILE_NAMES = listOf( SAMPLE_FILE, SAMPLE_FILE_LARGE, From 18d389c3721abb8db68e35efc941817f7a2f591d Mon Sep 17 00:00:00 2001 From: Arne Franken Date: Sun, 26 May 2024 16:59:01 +0200 Subject: [PATCH 14/14] Changelog for 3.9.0 --- CHANGELOG.md | 22 ++++++++++++++++------ 1 file changed, 16 insertions(+), 6 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 13ac121e3..1e67503ea 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -12,7 +12,7 @@ * [Planned changes](#planned-changes) * [CURRENT - 3.x - THIS VERSION IS UNDER ACTIVE DEVELOPMENT](#current---3x---this-version-is-under-active-development) * [3.10.0 - PLANNED](#3100---planned) - * [3.9.0 - PLANNED](#390---planned) + * [3.9.0](#390) * [3.8.0](#380) * [3.7.3](#373) * [3.7.2](#372) @@ -131,6 +131,7 @@ Version 3.x is JDK17 LTS bytecode compatible, with Docker and JUnit / direct Jav * Features and fixes * Support Versions in APIs * Add "DeleteObjectTagging" API + * Support "Ownership" in buckets. ACLs should be * Refactorings * TBD * Version updates @@ -142,13 +143,22 @@ Version 3.x is JDK17 LTS bytecode compatible, with Docker and JUnit / direct Jav * Features and fixes * Persist metadata for parts, validate checksum on multipart completion (fixes #1205) * Refactorings - * Migrate Unit tests to Kotlin + * Migrate Unit tests to Kotlin + * Run ITs against real S3, fix code or tests in case of errors + * Fix Checksums for Multiparts + * Add ObjectOwnership config for Buckets, setting ACLs is not allowed otherwise + * Fix StorageClass, it's not returned for most APIs if it's "STANDARD" * Version updates - * Bump aws-v2.version from 2.25.49 to 2.25.54 - * Bump com.amazonaws:aws-java-sdk-s3 from 1.12.720 to 1.12.724 - * Bump actions/checkout from 4.1.5 to 4.1.6 - * Bump github/codeql-action from 3.25.4 to 3.25.5 + * Bump aws-v2.version from 2.25.49 to 2.25.59 + * Bump com.amazonaws:aws-java-sdk-s3 from 1.12.720 to 1.12.729 + * Bump kotlin.version from 1.9.24 to 2.0.0 + * Bump alpine from 3.19.1 to 3.20.0 in /docker + * Bump org.codehaus.mojo:exec-maven-plugin from 3.2.0 to 3.3.0 + * Bump com.github.ekryd.sortpom:sortpom-maven-plugin from 3.4.1 to 4.0.0 * Bump license-maven-plugin-git.version from 4.4 to 4.5 + * Bump actions/checkout from 4.1.5 to 4.1.6 + * Bump github/codeql-action from 3.25.4 to 3.25.6 + * Bump step-security/harden-runner from 2.7.1 to 2.8.0 ## 3.8.0 3.x is JDK17 LTS bytecode compatible, with Docker and JUnit / direct Java integration.