diff --git a/.mvn/wrapper/maven-wrapper.properties b/.mvn/wrapper/maven-wrapper.properties
index d58dfb70b..44f3cf2c1 100644
--- a/.mvn/wrapper/maven-wrapper.properties
+++ b/.mvn/wrapper/maven-wrapper.properties
@@ -1,19 +1,2 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-wrapperVersion=3.3.2
distributionType=only-script
-distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.9.9/apache-maven-3.9.9-bin.zip
+distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.9.11/apache-maven-3.9.11-bin.zip
diff --git a/CHANGELOG.md b/CHANGELOG.md
index d5ec856cf..7d0f67b85 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -8,6 +8,7 @@ Whenever a 3rd party library is updated, S3Mock will update it's MINOR version.
* [PLANNED - 5.x - RELEASE TBD](#planned---5x---release-tbd)
* [Planned changes](#planned-changes)
* [CURRENT - 4.x - THIS VERSION IS UNDER ACTIVE DEVELOPMENT](#current---4x---this-version-is-under-active-development)
+ * [4.10.0 - PLANNED](#4100---planned)
* [4.9.0 - PLANNED](#490---planned)
* [4.8.0](#480)
* [4.7.0](#470)
@@ -149,7 +150,7 @@ Version 4.x is JDK17 LTS bytecode compatible, with Docker and JUnit / direct Jav
**The current major version 4 will receive new features, dependency updates and bug fixes on a continuous basis.**
-## 4.9.0 - PLANNED
+## 4.10.0 - PLANNED
Version 4.x is JDK17 LTS bytecode compatible, with Docker and JUnit / direct Java integration.
* Features and fixes
@@ -161,6 +162,29 @@ Version 4.x is JDK17 LTS bytecode compatible, with Docker and JUnit / direct Jav
* Version updates (build dependencies)
* TBD
+## 4.9.0 - PLANNED
+Version 4.x is JDK17 LTS bytecode compatible, with Docker and JUnit / direct Java integration.
+
+* Features and fixes
+ * Let S3Mock validate bucket names according to AWS rules
+* Refactorings
+ * Let TaggingHeaderConverter convert XML tags
+ * Let Spring convert StorageClass in postObject
+ * Fix build errors: skip JavaDoc generation for POM type modules.
+ * Build robustness: execute unit and integration tests in parallel and in random order.
+ * Faster startup time through lazy initialization
+ * Build: move "checkstyle.xml" to "etc/". The "build-config" module was never necessary.
+ * Build: update Google Checkstyle to the latest version and fix violations.
+ * Build: use ktlint-maven-plugin to validate Kotlin code style.
+* Version updates (deliverable dependencies)
+ * TBD
+* Version updates (build dependencies)
+ * Bump com.puppycrawl.tools:checkstyle from 11.0.0 to 11.0.1
+ * Bump actions/stale from 9.1.0 to 10.0.0
+ * Bump github/codeql-action from 3.29.11 to 3.30.1
+ * Bump maven from 3.9.9 to 3.9.11
+ * Bump maven wrapper from 3.3.2 to 3.3.3
+
## 4.8.0
Version 4.x is JDK17 LTS bytecode compatible, with Docker and JUnit / direct Java integration.
diff --git a/build-config/pom.xml b/build-config/pom.xml
deleted file mode 100644
index a8db1500a..000000000
--- a/build-config/pom.xml
+++ /dev/null
@@ -1,47 +0,0 @@
-
-
-
- 4.0.0
-
- com.adobe.testing
- s3mock-parent
- 4.8.1-SNAPSHOT
-
-
- s3mock-build-config
- jar
- S3Mock - Build Configuration
-
-
-
-
- maven-deploy-plugin
-
- true
-
-
-
- maven-surefire-plugin
-
- true
-
-
-
-
-
diff --git a/build-config/src/main/resources/build-config/checkstyle.xml b/build-config/src/main/resources/build-config/checkstyle.xml
deleted file mode 100644
index c50922dd8..000000000
--- a/build-config/src/main/resources/build-config/checkstyle.xml
+++ /dev/null
@@ -1,267 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/docker/pom.xml b/docker/pom.xml
index 789f0a647..2c0644a0b 100644
--- a/docker/pom.xml
+++ b/docker/pom.xml
@@ -30,6 +30,10 @@
S3Mock - Docker
+
+ true
+
+
com.adobe.testing
diff --git a/etc/checkstyle.xml b/etc/checkstyle.xml
new file mode 100644
index 000000000..ed94bd8df
--- /dev/null
+++ b/etc/checkstyle.xml
@@ -0,0 +1,502 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml
index 90a08cde6..5726cb0a6 100644
--- a/integration-tests/pom.xml
+++ b/integration-tests/pom.xml
@@ -208,6 +208,10 @@
maven-checkstyle-plugin
+
+ com.github.gantsign.maven
+ ktlint-maven-plugin
+
org.apache.maven.plugins
maven-surefire-plugin
@@ -274,7 +278,6 @@
${it.s3mock.port_https}
${it.s3mock.port_http}
- random
diff --git a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/AclIT.kt b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/AclIT.kt
index 2347932b6..6573476d7 100644
--- a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/AclIT.kt
+++ b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/AclIT.kt
@@ -36,47 +36,53 @@ internal class AclIT : S3TestBase() {
val sourceKey = UPLOAD_FILE_NAME
val bucketName = bucketName(testInfo)
- //create bucket that sets ownership to non-default to allow setting ACLs.
- s3Client.createBucket {
- it.bucket(bucketName)
- it.objectOwnership(ObjectOwnership.OBJECT_WRITER)
- }.also {
- assertThat(it.sdkHttpResponse().isSuccessful).isTrue()
- }
+ // create bucket that sets ownership to non-default to allow setting ACLs.
+ s3Client
+ .createBucket {
+ it.bucket(bucketName)
+ it.objectOwnership(ObjectOwnership.OBJECT_WRITER)
+ }.also {
+ assertThat(it.sdkHttpResponse().isSuccessful).isTrue()
+ }
givenObject(bucketName, sourceKey)
- s3Client.putObjectAcl {
- it.bucket(bucketName)
- it.key(sourceKey)
- it.acl(ObjectCannedACL.PRIVATE)
- }.also {
- assertThat(it.sdkHttpResponse().isSuccessful).isTrue()
- }
+ s3Client
+ .putObjectAcl {
+ it.bucket(bucketName)
+ it.key(sourceKey)
+ it.acl(ObjectCannedACL.PRIVATE)
+ }.also {
+ assertThat(it.sdkHttpResponse().isSuccessful).isTrue()
+ }
- s3Client.getObjectAcl {
- it.bucket(bucketName)
- it.key(sourceKey)
- }.also { resp ->
- assertThat(resp.sdkHttpResponse().isSuccessful).isTrue()
- assertThat(resp.owner().id()).isNotBlank()
- assertThat(resp.owner().displayName()).isNotBlank()
- assertThat(resp.grants()).hasSize(1)
- assertThat(resp.grants().first().permission()).isEqualTo(FULL_CONTROL)
- }
+ s3Client
+ .getObjectAcl {
+ it.bucket(bucketName)
+ it.key(sourceKey)
+ }.also { resp ->
+ assertThat(resp.sdkHttpResponse().isSuccessful).isTrue()
+ assertThat(resp.owner().id()).isNotBlank()
+ assertThat(resp.owner().displayName()).isNotBlank()
+ assertThat(resp.grants()).hasSize(1)
+ assertThat(resp.grants().first().permission()).isEqualTo(FULL_CONTROL)
+ }
}
@Test
- @S3VerifiedFailure(year = 2022,
- reason = "Owner and Grantee not available on test AWS account.")
+ @S3VerifiedFailure(
+ year = 2022,
+ reason = "Owner and Grantee not available on test AWS account.",
+ )
fun `get ACL returns canned 'private' ACL`(testInfo: TestInfo) {
val sourceKey = UPLOAD_FILE_NAME
val (bucketName, _) = givenBucketAndObject(testInfo, sourceKey)
- val acl = s3Client.getObjectAcl {
- it.bucket(bucketName)
- it.key(sourceKey)
- }
+ val acl =
+ s3Client.getObjectAcl {
+ it.bucket(bucketName)
+ it.key(sourceKey)
+ }
acl.owner().also { owner ->
assertThat(owner.id()).isEqualTo(DEFAULT_OWNER.id)
@@ -87,19 +93,25 @@ internal class AclIT : S3TestBase() {
assertThat(it).hasSize(1)
}
- acl.grants().first().also { grant ->
- assertThat(grant.permission()).isEqualTo(FULL_CONTROL)
- }.grantee().also { grantee ->
- assertThat(grantee).isNotNull
- assertThat(grantee.id()).isEqualTo(DEFAULT_OWNER.id)
- assertThat(grantee.displayName()).isEqualTo(DEFAULT_OWNER.displayName)
- assertThat(grantee.type()).isEqualTo(CANONICAL_USER)
- }
+ acl
+ .grants()
+ .first()
+ .also { grant ->
+ assertThat(grant.permission()).isEqualTo(FULL_CONTROL)
+ }.grantee()
+ .also { grantee ->
+ assertThat(grantee).isNotNull
+ assertThat(grantee.id()).isEqualTo(DEFAULT_OWNER.id)
+ assertThat(grantee.displayName()).isEqualTo(DEFAULT_OWNER.displayName)
+ assertThat(grantee.type()).isEqualTo(CANONICAL_USER)
+ }
}
@Test
- @S3VerifiedFailure(year = 2022,
- reason = "Owner and Grantee not available on test AWS account.")
+ @S3VerifiedFailure(
+ year = 2022,
+ reason = "Owner and Grantee not available on test AWS account.",
+ )
fun `put ACL returns OK, get ACL returns the ACL`(testInfo: TestInfo) {
val sourceKey = UPLOAD_FILE_NAME
val (bucketName, _) = givenBucketAndObject(testInfo, sourceKey)
@@ -116,22 +128,25 @@ internal class AclIT : S3TestBase() {
it.id(userId)
it.displayName(userName)
}
- it.grants(
- Grant.builder()
- .permission(FULL_CONTROL)
- .grantee {
- it.id(granteeId)
- it.displayName(granteeName)
- it.type(CANONICAL_USER)
- }.build()
- ).build()
+ it
+ .grants(
+ Grant
+ .builder()
+ .permission(FULL_CONTROL)
+ .grantee {
+ it.id(granteeId)
+ it.displayName(granteeName)
+ it.type(CANONICAL_USER)
+ }.build(),
+ ).build()
}
}
- val acl = s3Client.getObjectAcl {
- it.bucket(bucketName)
- it.key(sourceKey)
- }
+ val acl =
+ s3Client.getObjectAcl {
+ it.bucket(bucketName)
+ it.key(sourceKey)
+ }
acl.owner().also {
assertThat(it).isNotNull
assertThat(it.id()).isEqualTo(userId)
@@ -140,13 +155,16 @@ internal class AclIT : S3TestBase() {
assertThat(acl.grants()).hasSize(1)
- acl.grants()[0].also {
- assertThat(it.permission()).isEqualTo(FULL_CONTROL)
- }.grantee().also {
- assertThat(it).isNotNull
- assertThat(it.id()).isEqualTo(granteeId)
- assertThat(it.displayName()).isEqualTo(granteeName)
- assertThat(it.type()).isEqualTo(CANONICAL_USER)
- }
+ acl
+ .grants()[0]
+ .also {
+ assertThat(it.permission()).isEqualTo(FULL_CONTROL)
+ }.grantee()
+ .also {
+ assertThat(it).isNotNull
+ assertThat(it.id()).isEqualTo(granteeId)
+ assertThat(it.displayName()).isEqualTo(granteeName)
+ assertThat(it.type()).isEqualTo(CANONICAL_USER)
+ }
}
}
diff --git a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/AwsChunkedEncodingIT.kt b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/AwsChunkedEncodingIT.kt
index ebc99f306..83684e929 100644
--- a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/AwsChunkedEncodingIT.kt
+++ b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/AwsChunkedEncodingIT.kt
@@ -29,7 +29,6 @@ import software.amazon.awssdk.services.s3.model.ChecksumMode
* Chunked encoding with signing is only active in AWS SDK v2 when endpoint is http
*/
internal class AwsChunkedEncodingIT : S3TestBase() {
-
private val s3Client = createS3Client(serviceEndpointHttp, true)
/**
@@ -40,42 +39,45 @@ internal class AwsChunkedEncodingIT : S3TestBase() {
@Test
@S3VerifiedFailure(
year = 2023,
- reason = "Only works with http endpoints"
+ reason = "Only works with http endpoints",
)
fun `put object with checksum returns correct checksum, get object returns checksum`(testInfo: TestInfo) {
val bucket = givenBucket(testInfo)
- val expectedEtag = UPLOAD_FILE.inputStream().use {
- "\"${DigestUtil.hexDigest(it)}\""
- }
+ val expectedEtag =
+ UPLOAD_FILE.inputStream().use {
+ "\"${DigestUtil.hexDigest(it)}\""
+ }
val expectedChecksum = DigestUtil.checksumFor(UPLOAD_FILE_PATH, DefaultChecksumAlgorithm.SHA256)
- val putObjectResponse = s3Client.putObject(
- {
- it.bucket(bucket)
- it.key(UPLOAD_FILE_NAME)
- it.checksumAlgorithm(ChecksumAlgorithm.SHA256)
- },
- RequestBody.fromFile(UPLOAD_FILE)
- )
+ val putObjectResponse =
+ s3Client.putObject(
+ {
+ it.bucket(bucket)
+ it.key(UPLOAD_FILE_NAME)
+ it.checksumAlgorithm(ChecksumAlgorithm.SHA256)
+ },
+ RequestBody.fromFile(UPLOAD_FILE),
+ )
putObjectResponse.checksumSHA256().also { checksum ->
assertThat(checksum).isNotBlank()
assertThat(checksum).isEqualTo(expectedChecksum)
}
- s3Client.getObject {
- it.bucket(bucket)
- it.checksumMode(ChecksumMode.ENABLED)
- it.key(UPLOAD_FILE_NAME)
- }.use {
- assertThat(it.response().eTag()).isEqualTo(expectedEtag)
- assertThat(it.response().contentLength()).isEqualTo(UPLOAD_FILE_LENGTH)
- assertThat(it.response().contentEncoding()).isNotEqualTo("aws-chunked")
- it.response().checksumSHA256().also { checksum ->
- assertThat(checksum).isNotBlank()
- assertThat(checksum).isEqualTo(expectedChecksum)
+ s3Client
+ .getObject {
+ it.bucket(bucket)
+ it.checksumMode(ChecksumMode.ENABLED)
+ it.key(UPLOAD_FILE_NAME)
+ }.use {
+ assertThat(it.response().eTag()).isEqualTo(expectedEtag)
+ assertThat(it.response().contentLength()).isEqualTo(UPLOAD_FILE_LENGTH)
+ assertThat(it.response().contentEncoding()).isNotEqualTo("aws-chunked")
+ it.response().checksumSHA256().also { checksum ->
+ assertThat(checksum).isNotBlank()
+ assertThat(checksum).isEqualTo(expectedChecksum)
+ }
}
- }
}
/**
@@ -86,36 +88,38 @@ internal class AwsChunkedEncodingIT : S3TestBase() {
@Test
@S3VerifiedFailure(
year = 2023,
- reason = "Only works with http endpoints"
+ reason = "Only works with http endpoints",
)
fun `put object creates correct etag, get object returns etag`(testInfo: TestInfo) {
val bucket = givenBucket(testInfo)
- val expectedEtag = UPLOAD_FILE.inputStream().use {
- "\"${DigestUtil.hexDigest(it)}\""
- }
+ val expectedEtag =
+ UPLOAD_FILE.inputStream().use {
+ "\"${DigestUtil.hexDigest(it)}\""
+ }
s3Client.putObject(
{
it.bucket(bucket)
it.key(UPLOAD_FILE_NAME)
},
- RequestBody.fromFile(UPLOAD_FILE)
+ RequestBody.fromFile(UPLOAD_FILE),
)
- s3Client.getObject {
- it.bucket(bucket)
- it.key(UPLOAD_FILE_NAME)
- }.use {
- assertThat(it.response().eTag()).isEqualTo(expectedEtag)
- assertThat(it.response().contentLength()).isEqualTo(UPLOAD_FILE_LENGTH)
- assertThat(it.response().contentEncoding()).isNotEqualTo("aws-chunked")
- }
+ s3Client
+ .getObject {
+ it.bucket(bucket)
+ it.key(UPLOAD_FILE_NAME)
+ }.use {
+ assertThat(it.response().eTag()).isEqualTo(expectedEtag)
+ assertThat(it.response().contentLength()).isEqualTo(UPLOAD_FILE_LENGTH)
+ assertThat(it.response().contentEncoding()).isNotEqualTo("aws-chunked")
+ }
}
@Test
@S3VerifiedFailure(
year = 2023,
- reason = "Only works with http endpoints"
+ reason = "Only works with http endpoints",
)
fun `put object creates correct content-encoding, get object returns content-encoding`(testInfo: TestInfo) {
val bucket = givenBucket(testInfo)
@@ -127,14 +131,15 @@ internal class AwsChunkedEncodingIT : S3TestBase() {
it.key(UPLOAD_FILE_NAME)
it.contentEncoding(customEncoding)
},
- RequestBody.fromFile(UPLOAD_FILE)
+ RequestBody.fromFile(UPLOAD_FILE),
)
- s3Client.getObject {
- it.bucket(bucket)
- it.key(UPLOAD_FILE_NAME)
- }.use {
- assertThat(it.response().contentEncoding()).isEqualTo(customEncoding)
- }
+ s3Client
+ .getObject {
+ it.bucket(bucket)
+ it.key(UPLOAD_FILE_NAME)
+ }.use {
+ assertThat(it.response().contentEncoding()).isEqualTo(customEncoding)
+ }
}
}
diff --git a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/BucketIT.kt b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/BucketIT.kt
index 510752854..723370e39 100644
--- a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/BucketIT.kt
+++ b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/BucketIT.kt
@@ -47,7 +47,6 @@ import java.util.concurrent.TimeUnit
* Test the application using the AmazonS3 SDK V2.
*/
internal class BucketIT : S3TestBase() {
-
private val s3Client: S3Client = createS3Client()
@Test
@@ -56,24 +55,26 @@ internal class BucketIT : S3TestBase() {
val bucketName = bucketName(testInfo)
s3Client.createBucket { it.bucket(bucketName) }
- val bucketCreatedResponse = s3Client
- .waiter()
- .waitUntilBucketExists { it.bucket(bucketName) }
- .matched()
- .response()
- .get()
+ val bucketCreatedResponse =
+ s3Client
+ .waiter()
+ .waitUntilBucketExists { it.bucket(bucketName) }
+ .matched()
+ .response()
+ .get()
assertThat(bucketCreatedResponse).isNotNull
- //does not throw exception if bucket exists.
+ // does not throw exception if bucket exists.
s3Client.headBucket { it.bucket(bucketName) }
s3Client.deleteBucket { it.bucket(bucketName) }
- val deletionException = s3Client
- .waiter()
- .waitUntilBucketNotExists { it.bucket(bucketName) }
- .matched()
- .exception()
- .get()
+ val deletionException =
+ s3Client
+ .waiter()
+ .waitUntilBucketNotExists { it.bucket(bucketName) }
+ .matched()
+ .exception()
+ .get()
assertThat(deletionException).isInstanceOf(NoSuchBucketException::class.java)
}
@@ -82,36 +83,40 @@ internal class BucketIT : S3TestBase() {
* Requests always fail claiming that the XML is not well-formed, even though it is generated by their own SDK...
*/
@Test
- @S3VerifiedFailure(year = 2025,
- reason = "The XML you provided was not well-formed or did not validate against our published schema")
+ @S3VerifiedFailure(
+ year = 2025,
+ reason = "The XML you provided was not well-formed or did not validate against our published schema",
+ )
fun `creating a bucket with configuration is successful`(testInfo: TestInfo) {
val bucketName = bucketName(testInfo)
- val createBucketResponse = s3Client.createBucket {
- it.bucket(bucketName)
- it.createBucketConfiguration { cfg ->
- cfg.locationConstraint("ap-southeast-5")
- cfg.bucket { b ->
- b.dataRedundancy(DataRedundancy.SINGLE_AVAILABILITY_ZONE)
- b.type(BucketType.DIRECTORY)
- }
- cfg.location { loc ->
- loc.name("SomeName")
- loc.type(LocationType.AVAILABILITY_ZONE)
+ val createBucketResponse =
+ s3Client.createBucket {
+ it.bucket(bucketName)
+ it.createBucketConfiguration { cfg ->
+ cfg.locationConstraint("ap-southeast-5")
+ cfg.bucket { b ->
+ b.dataRedundancy(DataRedundancy.SINGLE_AVAILABILITY_ZONE)
+ b.type(BucketType.DIRECTORY)
+ }
+ cfg.location { loc ->
+ loc.name("SomeName")
+ loc.type(LocationType.AVAILABILITY_ZONE)
+ }
}
}
- }
assertThat(createBucketResponse.sdkHttpResponse().statusCode()).isEqualTo(200)
assertThat(createBucketResponse.location()).isEqualTo("/$bucketName")
- val bucketCreatedResponse = s3Client
- .waiter()
- .waitUntilBucketExists { it.bucket(bucketName) }
- .matched()
- .response()
- .get()
+ val bucketCreatedResponse =
+ s3Client
+ .waiter()
+ .waitUntilBucketExists { it.bucket(bucketName) }
+ .matched()
+ .response()
+ .get()
assertThat(bucketCreatedResponse).isNotNull
- //does not throw exception if bucket exists.
+ // does not throw exception if bucket exists.
s3Client.headBucket { it.bucket(bucketName) }
}
@@ -130,13 +135,15 @@ internal class BucketIT : S3TestBase() {
}
@Test
- @S3VerifiedFailure(year = 2025,
- reason = "Default owner does not exist in S3.")
+ @S3VerifiedFailure(
+ year = 2025,
+ reason = "Default owner does not exist in S3.",
+ )
fun `creating and listing multiple buckets is successful`(testInfo: TestInfo) {
val bucketName = bucketName(testInfo)
- givenBucket("${bucketName}-1")
- givenBucket("${bucketName}-2")
- givenBucket("${bucketName}-3")
+ givenBucket("$bucketName-1")
+ givenBucket("$bucketName-2")
+ givenBucket("$bucketName-3")
// the returned creation date might strip off the millisecond-part, resulting in rounding down
// and account for a clock-skew in the Docker container of up to a minute.
val creationDate = Instant.now().minus(1, ChronoUnit.MINUTES)
@@ -150,9 +157,9 @@ internal class BucketIT : S3TestBase() {
"bucket-a",
"bucket-b",
// the buckets we created in this test
- "${bucketName}-1",
- "${bucketName}-2",
- "${bucketName}-3"
+ "$bucketName-1",
+ "$bucketName-2",
+ "$bucketName-3",
)
assertThat(it[2].creationDate()).isAfterOrEqualTo(creationDate)
assertThat(it[3].creationDate()).isAfterOrEqualTo(creationDate)
@@ -166,96 +173,106 @@ internal class BucketIT : S3TestBase() {
}
@Test
- @S3VerifiedFailure(year = 2025,
- reason = "Default owner does not exist in S3.")
+ @S3VerifiedFailure(
+ year = 2025,
+ reason = "Default owner does not exist in S3.",
+ )
fun `creating and listing multiple buckets limiting by prefix is successful`(testInfo: TestInfo) {
val bucketName = bucketName(testInfo)
- givenBucket("${bucketName}-1")
- givenBucket("${bucketName}-2")
- givenBucket("${bucketName}-3")
+ givenBucket("$bucketName-1")
+ givenBucket("$bucketName-2")
+ givenBucket("$bucketName-3")
// the returned creation date might strip off the millisecond-part, resulting in rounding down
// and account for a clock-skew in the Docker container of up to a minute.
val creationDate = Instant.now().minus(1, ChronoUnit.MINUTES)
- s3Client.listBuckets {
- it.prefix(bucketName)
- }.also {
- assertThat(it.hasBuckets()).isTrue
- it.buckets().also {
- assertThat(it.size).isEqualTo(3)
- assertThat(it.map { b -> b.name() }).containsExactly(
- // the buckets we created in this test
- "${bucketName}-1",
- "${bucketName}-2",
- "${bucketName}-3"
- )
- assertThat(it[0].creationDate()).isAfterOrEqualTo(creationDate)
- assertThat(it[1].creationDate()).isAfterOrEqualTo(creationDate)
- assertThat(it[2].creationDate()).isAfterOrEqualTo(creationDate)
+ s3Client
+ .listBuckets {
+ it.prefix(bucketName)
+ }.also {
+ assertThat(it.hasBuckets()).isTrue
+ it.buckets().also {
+ assertThat(it.size).isEqualTo(3)
+ assertThat(it.map { b -> b.name() }).containsExactly(
+ // the buckets we created in this test
+ "$bucketName-1",
+ "$bucketName-2",
+ "$bucketName-3",
+ )
+ assertThat(it[0].creationDate()).isAfterOrEqualTo(creationDate)
+ assertThat(it[1].creationDate()).isAfterOrEqualTo(creationDate)
+ assertThat(it[2].creationDate()).isAfterOrEqualTo(creationDate)
+ }
+ assertThat(it.prefix()).isEqualTo(bucketName)
+ assertThat(it.continuationToken()).isNull()
+ assertThat(it.owner().displayName()).isEqualTo("s3-mock-file-store")
+ assertThat(it.owner().id()).isEqualTo("79a59df900b949e55d96a1e698fbacedfd6e09d98eacf8f8d5218e7cd47ef2be")
}
- assertThat(it.prefix()).isEqualTo(bucketName)
- assertThat(it.continuationToken()).isNull()
- assertThat(it.owner().displayName()).isEqualTo("s3-mock-file-store")
- assertThat(it.owner().id()).isEqualTo("79a59df900b949e55d96a1e698fbacedfd6e09d98eacf8f8d5218e7cd47ef2be")
- }
}
@Test
- @S3VerifiedFailure(year = 2025,
- reason = "Default owner does not exist in S3.")
+ @S3VerifiedFailure(
+ year = 2025,
+ reason = "Default owner does not exist in S3.",
+ )
fun `creating and listing multiple buckets limiting by maxBuckets is successful`(testInfo: TestInfo) {
val bucketName = bucketName(testInfo)
- givenBucket("${bucketName}-1")
- givenBucket("${bucketName}-2")
- givenBucket("${bucketName}-3")
+ givenBucket("$bucketName-1")
+ givenBucket("$bucketName-2")
+ givenBucket("$bucketName-3")
// the returned creation date might strip off the millisecond-part, resulting in rounding down
// and account for a clock-skew in the Docker container of up to a minute.
val creationDate = Instant.now().minus(1, ChronoUnit.MINUTES)
- val continuationToken = s3Client.listBuckets {
- it.maxBuckets(4)
- }.also {
- assertThat(it.hasBuckets()).isTrue
- it.buckets().also {
- assertThat(it.size).isEqualTo(4)
- assertThat(it.map { b -> b.name() }).containsExactly(
- // the default buckets
- "bucket-a",
- "bucket-b",
- // the buckets we created in this test
- "${bucketName}-1",
- "${bucketName}-2"
- )
- assertThat(it[2].creationDate()).isAfterOrEqualTo(creationDate)
- assertThat(it[3].creationDate()).isAfterOrEqualTo(creationDate)
- }
- assertThat(it.prefix()).isNull()
- assertThat(it.continuationToken()).isNotNull
- assertThat(it.owner().displayName()).isEqualTo("s3-mock-file-store")
- assertThat(it.owner().id()).isEqualTo("79a59df900b949e55d96a1e698fbacedfd6e09d98eacf8f8d5218e7cd47ef2be")
- }.continuationToken()
-
- s3Client.listBuckets {
- it.continuationToken(continuationToken)
- }.also {
- assertThat(it.hasBuckets()).isTrue
- it.buckets().also {
- assertThat(it.size).isEqualTo(1)
- assertThat(it.map { b -> b.name() }).containsExactly(
- "${bucketName}-3"
- )
- assertThat(it[0].creationDate()).isAfterOrEqualTo(creationDate)
+ val continuationToken =
+ s3Client
+ .listBuckets {
+ it.maxBuckets(4)
+ }.also {
+ assertThat(it.hasBuckets()).isTrue
+ it.buckets().also {
+ assertThat(it.size).isEqualTo(4)
+ assertThat(it.map { b -> b.name() }).containsExactly(
+ // the default buckets
+ "bucket-a",
+ "bucket-b",
+ // the buckets we created in this test
+ "$bucketName-1",
+ "$bucketName-2",
+ )
+ assertThat(it[2].creationDate()).isAfterOrEqualTo(creationDate)
+ assertThat(it[3].creationDate()).isAfterOrEqualTo(creationDate)
+ }
+ assertThat(it.prefix()).isNull()
+ assertThat(it.continuationToken()).isNotNull
+ assertThat(it.owner().displayName()).isEqualTo("s3-mock-file-store")
+ assertThat(it.owner().id()).isEqualTo("79a59df900b949e55d96a1e698fbacedfd6e09d98eacf8f8d5218e7cd47ef2be")
+ }.continuationToken()
+
+ s3Client
+ .listBuckets {
+ it.continuationToken(continuationToken)
+ }.also {
+ assertThat(it.hasBuckets()).isTrue
+ it.buckets().also {
+ assertThat(it.size).isEqualTo(1)
+ assertThat(it.map { b -> b.name() }).containsExactly(
+ "$bucketName-3",
+ )
+ assertThat(it[0].creationDate()).isAfterOrEqualTo(creationDate)
+ }
+ assertThat(it.prefix()).isNull()
+ assertThat(it.continuationToken()).isNull()
+ assertThat(it.owner().displayName()).isEqualTo("s3-mock-file-store")
+ assertThat(it.owner().id()).isEqualTo("79a59df900b949e55d96a1e698fbacedfd6e09d98eacf8f8d5218e7cd47ef2be")
}
- assertThat(it.prefix()).isNull()
- assertThat(it.continuationToken()).isNull()
- assertThat(it.owner().displayName()).isEqualTo("s3-mock-file-store")
- assertThat(it.owner().id()).isEqualTo("79a59df900b949e55d96a1e698fbacedfd6e09d98eacf8f8d5218e7cd47ef2be")
- }
}
@Test
- @S3VerifiedFailure(year = 2025,
- reason = "Default buckets do not exist in S3.")
+ @S3VerifiedFailure(
+ year = 2025,
+ reason = "Default buckets do not exist in S3.",
+ )
fun `default buckets were created`(testInfo: TestInfo) {
s3Client.listBuckets().also {
assertThat(it.buckets())
@@ -279,12 +296,13 @@ internal class BucketIT : S3TestBase() {
fun `by default, bucket versioning is turned off`(testInfo: TestInfo) {
val bucketName = givenBucket(testInfo)
- s3Client.getBucketVersioning {
- it.bucket(bucketName)
- }.also {
- assertThat(it.status()).isNull()
- assertThat(it.mfaDelete()).isNull()
- }
+ s3Client
+ .getBucketVersioning {
+ it.bucket(bucketName)
+ }.also {
+ assertThat(it.status()).isNull()
+ assertThat(it.mfaDelete()).isNull()
+ }
}
@Test
@@ -298,11 +316,12 @@ internal class BucketIT : S3TestBase() {
}
}
- s3Client.getBucketVersioning {
- it.bucket(bucketName)
- }.also {
- assertThat(it.status()).isEqualTo(BucketVersioningStatus.ENABLED)
- }
+ s3Client
+ .getBucketVersioning {
+ it.bucket(bucketName)
+ }.also {
+ assertThat(it.status()).isEqualTo(BucketVersioningStatus.ENABLED)
+ }
}
@Test
@@ -316,11 +335,12 @@ internal class BucketIT : S3TestBase() {
}
}
- s3Client.getBucketVersioning {
- it.bucket(bucketName)
- }.also {
- assertThat(it.status()).isEqualTo(BucketVersioningStatus.ENABLED)
- }
+ s3Client
+ .getBucketVersioning {
+ it.bucket(bucketName)
+ }.also {
+ assertThat(it.status()).isEqualTo(BucketVersioningStatus.ENABLED)
+ }
s3Client.putBucketVersioning {
it.bucket(bucketName)
@@ -329,11 +349,12 @@ internal class BucketIT : S3TestBase() {
}
}
- s3Client.getBucketVersioning {
- it.bucket(bucketName)
- }.also {
- assertThat(it.status()).isEqualTo(BucketVersioningStatus.SUSPENDED)
- }
+ s3Client
+ .getBucketVersioning {
+ it.bucket(bucketName)
+ }.also {
+ assertThat(it.status()).isEqualTo(BucketVersioningStatus.SUSPENDED)
+ }
}
@Test
@@ -349,12 +370,13 @@ internal class BucketIT : S3TestBase() {
}
}
- s3Client.getBucketVersioning {
- it.bucket(bucketName)
- }.also {
- assertThat(it.status()).isEqualTo(BucketVersioningStatus.ENABLED)
- assertThat(it.mfaDelete()).isEqualTo(MFADeleteStatus.ENABLED)
- }
+ s3Client
+ .getBucketVersioning {
+ it.bucket(bucketName)
+ }.also {
+ assertThat(it.status()).isEqualTo(BucketVersioningStatus.ENABLED)
+ assertThat(it.mfaDelete()).isEqualTo(MFADeleteStatus.ENABLED)
+ }
}
@Test
@@ -363,18 +385,18 @@ internal class BucketIT : S3TestBase() {
val bucketName = bucketName(testInfo)
s3Client.createBucket { it.bucket(bucketName) }
- val createdResponse = s3Client
- .waiter()
- .waitUntilBucketExists { it.bucket(bucketName) }
- .matched()
- .response()
- .get()
+ val createdResponse =
+ s3Client
+ .waiter()
+ .waitUntilBucketExists { it.bucket(bucketName) }
+ .matched()
+ .response()
+ .get()
assertThat(createdResponse).isNotNull
assertThatThrownBy {
s3Client.createBucket { it.bucket(bucketName) }
- }
- .isInstanceOf(AwsServiceException::class.java)
+ }.isInstanceOf(AwsServiceException::class.java)
.hasMessageContaining("Service: S3, Status Code: 409")
.asInstanceOf(InstanceOfAssertFactories.type(AwsServiceException::class.java))
.extracting(AwsServiceException::awsErrorDetails)
@@ -402,18 +424,18 @@ internal class BucketIT : S3TestBase() {
}
s3Client.deleteBucket { it.bucket(bucketName) }
- val deletionException = s3Client
- .waiter()
- .waitUntilBucketNotExists { it.bucket(bucketName) }
- .matched()
- .exception()
- .get()
+ val deletionException =
+ s3Client
+ .waiter()
+ .waitUntilBucketNotExists { it.bucket(bucketName) }
+ .matched()
+ .exception()
+ .get()
assertThat(deletionException).isInstanceOf(NoSuchBucketException::class.java)
assertThatThrownBy {
s3Client.deleteBucket(DeleteBucketRequest.builder().bucket(bucketName).build())
- }
- .isInstanceOf(AwsServiceException::class.java)
+ }.isInstanceOf(AwsServiceException::class.java)
.hasMessageContaining("Service: S3, Status Code: 404")
.asInstanceOf(InstanceOfAssertFactories.type(AwsServiceException::class.java))
.extracting(AwsServiceException::awsErrorDetails)
@@ -427,17 +449,18 @@ internal class BucketIT : S3TestBase() {
val bucketName = bucketName(testInfo)
s3Client.createBucket { it.bucket(bucketName) }
- val bucketCreatedResponse = s3Client
- .waiter()
- .waitUntilBucketExists { it.bucket(bucketName) }
- .matched()
- .response()!!.get()
+ val bucketCreatedResponse =
+ s3Client
+ .waiter()
+ .waitUntilBucketExists { it.bucket(bucketName) }
+ .matched()
+ .response()!!
+ .get()
assertThat(bucketCreatedResponse).isNotNull
assertThatThrownBy {
s3Client.getBucketLifecycleConfiguration { it.bucket(bucketName) }
- }
- .isInstanceOf(AwsServiceException::class.java)
+ }.isInstanceOf(AwsServiceException::class.java)
.hasMessageContaining("Service: S3, Status Code: 404")
.asInstanceOf(InstanceOfAssertFactories.type(AwsServiceException::class.java))
.extracting(AwsServiceException::awsErrorDetails)
@@ -451,36 +474,36 @@ internal class BucketIT : S3TestBase() {
val bucketName = bucketName(testInfo)
s3Client.createBucket { it.bucket(bucketName) }
- val createdResponse = s3Client
- .waiter()
- .waitUntilBucketExists { it.bucket(bucketName) }
- .matched()
- .response()!!.get()
+ val createdResponse =
+ s3Client
+ .waiter()
+ .waitUntilBucketExists { it.bucket(bucketName) }
+ .matched()
+ .response()!!
+ .get()
assertThat(createdResponse).isNotNull
- val configuration = BucketLifecycleConfiguration
- .builder()
- .rules(
- LifecycleRule
- .builder()
- .id(bucketName)
- .abortIncompleteMultipartUpload(
- AbortIncompleteMultipartUpload
- .builder()
- .daysAfterInitiation(2)
- .build()
- )
- .expiration(
- LifecycleExpiration
- .builder()
- .days(2)
- .build()
- )
- .filter(LifecycleRuleFilter.fromPrefix("myprefix/"))
- .status(ExpirationStatus.ENABLED)
- .build()
- )
- .build()
+ val configuration =
+ BucketLifecycleConfiguration
+ .builder()
+ .rules(
+ LifecycleRule
+ .builder()
+ .id(bucketName)
+ .abortIncompleteMultipartUpload(
+ AbortIncompleteMultipartUpload
+ .builder()
+ .daysAfterInitiation(2)
+ .build(),
+ ).expiration(
+ LifecycleExpiration
+ .builder()
+ .days(2)
+ .build(),
+ ).filter(LifecycleRuleFilter.fromPrefix("myprefix/"))
+ .status(ExpirationStatus.ENABLED)
+ .build(),
+ ).build()
s3Client.putBucketLifecycleConfiguration {
it.bucket(bucketName)
@@ -495,17 +518,15 @@ internal class BucketIT : S3TestBase() {
assertThat(it.sdkHttpResponse().statusCode()).isEqualTo(204)
}
-
// give AWS time to actually delete the lifecycleConfiguration, otherwise the following call
// will not fail as expected...
TimeUnit.SECONDS.sleep(3)
assertThatThrownBy {
s3Client.getBucketLifecycleConfiguration(
- GetBucketLifecycleConfigurationRequest.builder().bucket(bucketName).build()
+ GetBucketLifecycleConfigurationRequest.builder().bucket(bucketName).build(),
)
- }
- .isInstanceOf(AwsServiceException::class.java)
+ }.isInstanceOf(AwsServiceException::class.java)
.hasMessageContaining("Service: S3, Status Code: 404")
.asInstanceOf(InstanceOfAssertFactories.type(AwsServiceException::class.java))
.extracting(AwsServiceException::awsErrorDetails)
diff --git a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/ConcurrencyIT.kt b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/ConcurrencyIT.kt
index 990d35f45..aba266810 100644
--- a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/ConcurrencyIT.kt
+++ b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/ConcurrencyIT.kt
@@ -36,7 +36,7 @@ internal class ConcurrencyIT : S3TestBase() {
@Test
@S3VerifiedFailure(
year = 2022,
- reason = "No need to test S3 concurrency."
+ reason = "No need to test S3 concurrency.",
)
fun `concurrent bucket puts, gets and deletes are successful`(testInfo: TestInfo) {
val bucketName = givenBucket(testInfo)
@@ -54,31 +54,38 @@ internal class ConcurrencyIT : S3TestBase() {
private val DONE = AtomicInteger(0)
}
- inner class Runner(val bucketName: String, val key: String) : Callable {
+ inner class Runner(
+ val bucketName: String,
+ val key: String,
+ ) : Callable {
override fun call(): Boolean {
LATCH.countDown()
- s3Client.putObject(
- {
+ s3Client
+ .putObject(
+ {
+ it.bucket(bucketName)
+ it.key(key)
+ },
+ RequestBody.empty(),
+ ).let { response ->
+ assertThat(response.eTag()).isNotBlank
+ }
+
+ s3Client
+ .getObject {
it.bucket(bucketName)
it.key(key)
- }, RequestBody.empty()
- ).let { response ->
- assertThat(response.eTag()).isNotBlank
- }
-
- s3Client.getObject {
- it.bucket(bucketName)
- it.key(key)
- }.use {
- assertThat(it.response().eTag()).isNotBlank
- }
+ }.use {
+ assertThat(it.response().eTag()).isNotBlank
+ }
- s3Client.deleteObject {
- it.bucket(bucketName)
- it.key(key)
- }.let { response ->
- assertThat(response.deleteMarker()).isTrue
- }
+ s3Client
+ .deleteObject {
+ it.bucket(bucketName)
+ it.key(key)
+ }.let { response ->
+ assertThat(response.deleteMarker()).isTrue
+ }
DONE.incrementAndGet()
return true
}
diff --git a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/CopyObjectIT.kt b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/CopyObjectIT.kt
index cb849c4d0..335d9355b 100644
--- a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/CopyObjectIT.kt
+++ b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/CopyObjectIT.kt
@@ -40,7 +40,6 @@ import java.util.concurrent.TimeUnit
* Test the application using the AmazonS3 SDK V2.
*/
internal class CopyObjectIT : S3TestBase() {
-
private val s3Client: S3Client = createS3Client()
private val transferManager = createTransferManager()
@@ -52,22 +51,26 @@ internal class CopyObjectIT : S3TestBase() {
val destinationBucketName = givenBucket()
val destinationKey = "copyOf/$sourceKey"
- s3Client.copyObject {
- it.sourceBucket(bucketName)
- it.sourceKey(sourceKey)
- it.destinationBucket(destinationBucketName)
- it.destinationKey(destinationKey)
- }.copyObjectResult().eTag().let { eTag ->
- assertThat(eTag).isEqualTo(putObjectResult.eTag())
- }
+ s3Client
+ .copyObject {
+ it.sourceBucket(bucketName)
+ it.sourceKey(sourceKey)
+ it.destinationBucket(destinationBucketName)
+ it.destinationKey(destinationKey)
+ }.copyObjectResult()
+ .eTag()
+ .let { eTag ->
+ assertThat(eTag).isEqualTo(putObjectResult.eTag())
+ }
- s3Client.getObject {
- it.bucket(destinationBucketName)
- it.key(destinationKey)
- }.use {
- val copiedDigest = DigestUtil.hexDigest(it)
- assertThat("\"$copiedDigest\"").isEqualTo(putObjectResult.eTag())
- }
+ s3Client
+ .getObject {
+ it.bucket(destinationBucketName)
+ it.key(destinationKey)
+ }.use {
+ val copiedDigest = DigestUtil.hexDigest(it)
+ assertThat("\"$copiedDigest\"").isEqualTo(putObjectResult.eTag())
+ }
}
@Test
@@ -78,29 +81,35 @@ internal class CopyObjectIT : S3TestBase() {
val destinationBucketName = givenBucket()
val destinationKey = "copyOf/$sourceKey"
- val putObjectResult = s3Client.putObject(
- {
- it.bucket(bucketName)
- it.key(sourceKey)
- }, RequestBody.fromFile(UPLOAD_FILE)
- )
-
- s3Client.copyObject {
- it.sourceBucket(bucketName)
- it.sourceKey(sourceKey)
- it.destinationBucket(destinationBucketName)
- it.destinationKey(destinationKey)
- }.copyObjectResult().eTag().let { eTag ->
- assertThat(eTag).isEqualTo(putObjectResult.eTag())
- }
+ val putObjectResult =
+ s3Client.putObject(
+ {
+ it.bucket(bucketName)
+ it.key(sourceKey)
+ },
+ RequestBody.fromFile(UPLOAD_FILE),
+ )
+
+ s3Client
+ .copyObject {
+ it.sourceBucket(bucketName)
+ it.sourceKey(sourceKey)
+ it.destinationBucket(destinationBucketName)
+ it.destinationKey(destinationKey)
+ }.copyObjectResult()
+ .eTag()
+ .let { eTag ->
+ assertThat(eTag).isEqualTo(putObjectResult.eTag())
+ }
- s3Client.getObject {
- it.bucket(destinationBucketName)
- it.key(destinationKey)
- }.use {
- val copiedDigest = DigestUtil.hexDigest(it)
- assertThat("\"$copiedDigest\"").isEqualTo(putObjectResult.eTag())
- }
+ s3Client
+ .getObject {
+ it.bucket(destinationBucketName)
+ it.key(destinationKey)
+ }.use {
+ val copiedDigest = DigestUtil.hexDigest(it)
+ assertThat("\"$copiedDigest\"").isEqualTo(putObjectResult.eTag())
+ }
}
@Test
@@ -113,23 +122,27 @@ internal class CopyObjectIT : S3TestBase() {
val matchingEtag = putObjectResult.eTag()
- s3Client.copyObject {
- it.sourceBucket(bucketName)
- it.sourceKey(sourceKey)
- it.destinationBucket(destinationBucketName)
- it.destinationKey(destinationKey)
- it.copySourceIfMatch(matchingEtag)
- }.copyObjectResult().eTag().let { eTag ->
- assertThat(eTag).isEqualTo(putObjectResult.eTag())
- }
+ s3Client
+ .copyObject {
+ it.sourceBucket(bucketName)
+ it.sourceKey(sourceKey)
+ it.destinationBucket(destinationBucketName)
+ it.destinationKey(destinationKey)
+ it.copySourceIfMatch(matchingEtag)
+ }.copyObjectResult()
+ .eTag()
+ .let { eTag ->
+ assertThat(eTag).isEqualTo(putObjectResult.eTag())
+ }
- s3Client.getObject {
- it.bucket(destinationBucketName)
- it.key(destinationKey)
- }.use {
- val copiedDigest = DigestUtil.hexDigest(it)
- assertThat("\"$copiedDigest\"").isEqualTo(matchingEtag)
- }
+ s3Client
+ .getObject {
+ it.bucket(destinationBucketName)
+ it.key(destinationKey)
+ }.use {
+ val copiedDigest = DigestUtil.hexDigest(it)
+ assertThat("\"$copiedDigest\"").isEqualTo(matchingEtag)
+ }
}
@Test
@@ -143,24 +156,28 @@ internal class CopyObjectIT : S3TestBase() {
val matchingEtag = putObjectResult.eTag()
- s3Client.copyObject {
- it.sourceBucket(bucketName)
- it.sourceKey(sourceKey)
- it.destinationBucket(destinationBucketName)
- it.destinationKey(destinationKey)
- it.copySourceIfMatch(matchingEtag)
- it.copySourceIfUnmodifiedSince(now)
- }.copyObjectResult().eTag().let { eTag ->
- assertThat(eTag).isEqualTo(putObjectResult.eTag())
- }
+ s3Client
+ .copyObject {
+ it.sourceBucket(bucketName)
+ it.sourceKey(sourceKey)
+ it.destinationBucket(destinationBucketName)
+ it.destinationKey(destinationKey)
+ it.copySourceIfMatch(matchingEtag)
+ it.copySourceIfUnmodifiedSince(now)
+ }.copyObjectResult()
+ .eTag()
+ .let { eTag ->
+ assertThat(eTag).isEqualTo(putObjectResult.eTag())
+ }
- s3Client.getObject {
- it.bucket(destinationBucketName)
- it.key(destinationKey)
- }.use {
- val copiedDigest = DigestUtil.hexDigest(it)
- assertThat("\"$copiedDigest\"").isEqualTo(matchingEtag)
- }
+ s3Client
+ .getObject {
+ it.bucket(destinationBucketName)
+ it.key(destinationKey)
+ }.use {
+ val copiedDigest = DigestUtil.hexDigest(it)
+ assertThat("\"$copiedDigest\"").isEqualTo(matchingEtag)
+ }
}
@Test
@@ -174,23 +191,27 @@ internal class CopyObjectIT : S3TestBase() {
val matchingEtag = putObjectResult.eTag()
- s3Client.copyObject {
- it.sourceBucket(bucketName)
- it.sourceKey(sourceKey)
- it.destinationBucket(destinationBucketName)
- it.destinationKey(destinationKey)
- it.copySourceIfModifiedSince(now)
- }.copyObjectResult().eTag().let { eTag ->
- assertThat(eTag).isEqualTo(putObjectResult.eTag())
- }
+ s3Client
+ .copyObject {
+ it.sourceBucket(bucketName)
+ it.sourceKey(sourceKey)
+ it.destinationBucket(destinationBucketName)
+ it.destinationKey(destinationKey)
+ it.copySourceIfModifiedSince(now)
+ }.copyObjectResult()
+ .eTag()
+ .let { eTag ->
+ assertThat(eTag).isEqualTo(putObjectResult.eTag())
+ }
- s3Client.getObject {
- it.bucket(destinationBucketName)
- it.key(destinationKey)
- }.use {
- val copiedDigest = DigestUtil.hexDigest(it)
- assertThat("\"$copiedDigest\"").isEqualTo(matchingEtag)
- }
+ s3Client
+ .getObject {
+ it.bucket(destinationBucketName)
+ it.key(destinationKey)
+ }.use {
+ val copiedDigest = DigestUtil.hexDigest(it)
+ assertThat("\"$copiedDigest\"").isEqualTo(matchingEtag)
+ }
}
@Test
@@ -213,8 +234,7 @@ internal class CopyObjectIT : S3TestBase() {
it.copySourceIfModifiedSince(now)
it.copySourceIfNoneMatch(matchingEtag)
}
- }
- .isInstanceOf(S3Exception::class.java)
+ }.isInstanceOf(S3Exception::class.java)
.hasMessageContaining("Service: S3, Status Code: 412")
.hasMessageContaining(PRECONDITION_FAILED.message)
}
@@ -231,23 +251,27 @@ internal class CopyObjectIT : S3TestBase() {
val matchingEtag = putObjectResult.eTag()
- s3Client.copyObject {
- it.sourceBucket(bucketName)
- it.sourceKey(sourceKey)
- it.destinationBucket(destinationBucketName)
- it.destinationKey(destinationKey)
- it.copySourceIfUnmodifiedSince(now)
- }.copyObjectResult().eTag().let { eTag ->
- assertThat(eTag).isEqualTo(putObjectResult.eTag())
- }
+ s3Client
+ .copyObject {
+ it.sourceBucket(bucketName)
+ it.sourceKey(sourceKey)
+ it.destinationBucket(destinationBucketName)
+ it.destinationKey(destinationKey)
+ it.copySourceIfUnmodifiedSince(now)
+ }.copyObjectResult()
+ .eTag()
+ .let { eTag ->
+ assertThat(eTag).isEqualTo(putObjectResult.eTag())
+ }
- s3Client.getObject {
- it.bucket(destinationBucketName)
- it.key(destinationKey)
- }.use {
- val copiedDigest = DigestUtil.hexDigest(it)
- assertThat("\"$copiedDigest\"").isEqualTo(matchingEtag)
- }
+ s3Client
+ .getObject {
+ it.bucket(destinationBucketName)
+ it.key(destinationKey)
+ }.use {
+ val copiedDigest = DigestUtil.hexDigest(it)
+ assertThat("\"$copiedDigest\"").isEqualTo(matchingEtag)
+ }
}
@Test
@@ -259,23 +283,27 @@ internal class CopyObjectIT : S3TestBase() {
val destinationKey = "copyOf/$sourceKey"
val noneMatchingEtag = "\"${randomName}\""
- s3Client.copyObject {
- it.sourceBucket(bucketName)
- it.sourceKey(sourceKey)
- it.destinationBucket(destinationBucketName)
- it.destinationKey(destinationKey)
- it.copySourceIfNoneMatch(noneMatchingEtag)
- }.copyObjectResult().eTag().let { eTag ->
- assertThat(eTag).isEqualTo(putObjectResult.eTag())
- }
+ s3Client
+ .copyObject {
+ it.sourceBucket(bucketName)
+ it.sourceKey(sourceKey)
+ it.destinationBucket(destinationBucketName)
+ it.destinationKey(destinationKey)
+ it.copySourceIfNoneMatch(noneMatchingEtag)
+ }.copyObjectResult()
+ .eTag()
+ .let { eTag ->
+ assertThat(eTag).isEqualTo(putObjectResult.eTag())
+ }
- s3Client.getObject {
- it.bucket(destinationBucketName)
- it.key(destinationKey)
- }.use {
- val copiedDigest = DigestUtil.hexDigest(it)
- assertThat("\"$copiedDigest\"").isEqualTo(putObjectResult.eTag())
- }
+ s3Client
+ .getObject {
+ it.bucket(destinationBucketName)
+ it.key(destinationKey)
+ }.use {
+ val copiedDigest = DigestUtil.hexDigest(it)
+ assertThat("\"$copiedDigest\"").isEqualTo(putObjectResult.eTag())
+ }
}
@Test
@@ -295,8 +323,7 @@ internal class CopyObjectIT : S3TestBase() {
it.destinationKey(destinationKey)
it.copySourceIfMatch(noneMatchingEtag)
}
- }
- .isInstanceOf(S3Exception::class.java)
+ }.isInstanceOf(S3Exception::class.java)
.hasMessageContaining("Service: S3, Status Code: 412")
.hasMessageContaining(PRECONDITION_FAILED.message)
}
@@ -318,8 +345,7 @@ internal class CopyObjectIT : S3TestBase() {
it.destinationKey(destinationKey)
it.copySourceIfNoneMatch(matchingEtag)
}
- }
- .isInstanceOf(S3Exception::class.java)
+ }.isInstanceOf(S3Exception::class.java)
.hasMessageContaining("Service: S3, Status Code: 412")
.hasMessageContaining(PRECONDITION_FAILED.message)
}
@@ -329,18 +355,21 @@ internal class CopyObjectIT : S3TestBase() {
fun `copy object succeeds with same bucket and key with REPLACE and changing metadata`(testInfo: TestInfo) {
val bucketName = givenBucket(testInfo)
val sourceKey = UPLOAD_FILE_NAME
- val putObjectResult = s3Client.putObject(
- {
- it.bucket(bucketName)
- it.key(sourceKey)
- it.metadata(mapOf("test-key" to "test-value"))
- },
- RequestBody.fromFile(UPLOAD_FILE)
- )
- val sourceLastModified = s3Client.headObject {
- it.bucket(bucketName)
- it.key(sourceKey)
- }.lastModified()
+ val putObjectResult =
+ s3Client.putObject(
+ {
+ it.bucket(bucketName)
+ it.key(sourceKey)
+ it.metadata(mapOf("test-key" to "test-value"))
+ },
+ RequestBody.fromFile(UPLOAD_FILE),
+ )
+ val sourceLastModified =
+ s3Client
+ .headObject {
+ it.bucket(bucketName)
+ it.key(sourceKey)
+ }.lastModified()
await("wait until source object is 5 seconds old").until {
sourceLastModified.plusSeconds(5).isBefore(Instant.now())
@@ -355,25 +384,26 @@ internal class CopyObjectIT : S3TestBase() {
it.metadataDirective(MetadataDirective.REPLACE)
}
- s3Client.getObject {
- it.bucket(bucketName)
- it.key(sourceKey)
- }.use {
- val response = it.response()
- val copiedObjectMetadata = response.metadata()
- assertThat(copiedObjectMetadata["test-key2"]).isEqualTo("test-value2")
- assertThat(copiedObjectMetadata["test-key"]).isNull()
+ s3Client
+ .getObject {
+ it.bucket(bucketName)
+ it.key(sourceKey)
+ }.use {
+ val response = it.response()
+ val copiedObjectMetadata = response.metadata()
+ assertThat(copiedObjectMetadata["test-key2"]).isEqualTo("test-value2")
+ assertThat(copiedObjectMetadata["test-key"]).isNull()
- val length = response.contentLength()
- assertThat(length).isEqualTo(UPLOAD_FILE_LENGTH)
+ val length = response.contentLength()
+ assertThat(length).isEqualTo(UPLOAD_FILE_LENGTH)
- val copiedDigest = DigestUtil.hexDigest(it)
- assertThat("\"$copiedDigest\"").isEqualTo(putObjectResult.eTag())
+ val copiedDigest = DigestUtil.hexDigest(it)
+ assertThat("\"$copiedDigest\"").isEqualTo(putObjectResult.eTag())
- //we waited for 5 seconds above, so last modified dates should be about 5 seconds apart
- val between = Duration.between(sourceLastModified, response.lastModified())
- assertThat(between).isCloseTo(Duration.of(5, SECONDS), Duration.of(1, SECONDS))
- }
+ // we waited for 5 seconds above, so last modified dates should be about 5 seconds apart
+ val between = Duration.between(sourceLastModified, response.lastModified())
+ assertThat(between).isCloseTo(Duration.of(5, SECONDS), Duration.of(1, SECONDS))
+ }
}
@Test
@@ -388,13 +418,15 @@ internal class CopyObjectIT : S3TestBase() {
it.key(sourceKey)
it.metadata(mapOf("test-key" to "test-value"))
},
- RequestBody.fromFile(UPLOAD_FILE)
+ RequestBody.fromFile(UPLOAD_FILE),
)
- val sourceLastModified = s3Client.headObject {
- it.bucket(bucketName)
- it.key(sourceKey)
- }.lastModified()
+ val sourceLastModified =
+ s3Client
+ .headObject {
+ it.bucket(bucketName)
+ it.key(sourceKey)
+ }.lastModified()
await("wait until source object is 5 seconds old").until {
sourceLastModified.plusSeconds(5).isBefore(Instant.now())
@@ -409,7 +441,9 @@ internal class CopyObjectIT : S3TestBase() {
}
}.isInstanceOf(S3Exception::class.java)
.hasMessageContaining("Service: S3, Status Code: 400")
- .hasMessageContaining("This copy request is illegal because it is trying to copy an object to itself without changing the object's metadata, storage class, website redirect location or encryption attributes.")
+ .hasMessageContaining(
+ "This copy request is illegal because it is trying to copy an object to itself without changing the object's metadata, storage class, website redirect location or encryption attributes.",
+ )
}
@Test
@@ -422,14 +456,15 @@ internal class CopyObjectIT : S3TestBase() {
val metadata = mapOf("test-key2" to "test-value2")
- val putObjectResult = s3Client.putObject(
- {
- it.bucket(bucketName)
- it.key(sourceKey)
- it.metadata(metadata)
- },
- RequestBody.fromFile(UPLOAD_FILE)
- )
+ val putObjectResult =
+ s3Client.putObject(
+ {
+ it.bucket(bucketName)
+ it.key(sourceKey)
+ it.metadata(metadata)
+ },
+ RequestBody.fromFile(UPLOAD_FILE),
+ )
s3Client.copyObject {
it.sourceBucket(bucketName)
@@ -438,14 +473,15 @@ internal class CopyObjectIT : S3TestBase() {
it.destinationKey(destinationKey)
}
- s3Client.getObject {
- it.bucket(destinationBucketName)
- it.key(destinationKey)
- }.use {
- val copiedDigest = DigestUtil.hexDigest(it)
- assertThat("\"$copiedDigest\"").isEqualTo(putObjectResult.eTag())
- assertThat(it.response().metadata()).isEqualTo(metadata)
- }
+ s3Client
+ .getObject {
+ it.bucket(destinationBucketName)
+ it.key(destinationKey)
+ }.use {
+ val copiedDigest = DigestUtil.hexDigest(it)
+ assertThat("\"$copiedDigest\"").isEqualTo(putObjectResult.eTag())
+ assertThat(it.response().metadata()).isEqualTo(metadata)
+ }
}
@Test
@@ -467,14 +503,15 @@ internal class CopyObjectIT : S3TestBase() {
it.metadataDirective(MetadataDirective.REPLACE)
}
- s3Client.getObject {
- it.bucket(destinationBucketName)
- it.key(destinationKey)
- }.use {
- val copiedDigest = DigestUtil.hexDigest(it)
- assertThat("\"$copiedDigest\"").isEqualTo(putObjectResult.eTag())
- assertThat(it.response().metadata()).isEqualTo(metadata)
- }
+ s3Client
+ .getObject {
+ it.bucket(destinationBucketName)
+ it.key(destinationKey)
+ }.use {
+ val copiedDigest = DigestUtil.hexDigest(it)
+ assertThat("\"$copiedDigest\"").isEqualTo(putObjectResult.eTag())
+ assertThat(it.response().metadata()).isEqualTo(metadata)
+ }
}
@Test
@@ -489,7 +526,7 @@ internal class CopyObjectIT : S3TestBase() {
it.key(sourceKey)
it.storageClass(StorageClass.REDUCED_REDUNDANCY)
},
- RequestBody.fromFile(UPLOAD_FILE)
+ RequestBody.fromFile(UPLOAD_FILE),
)
val destinationBucketName = givenBucket()
@@ -499,17 +536,19 @@ internal class CopyObjectIT : S3TestBase() {
it.sourceBucket(bucketName)
it.sourceKey(sourceKey)
it.destinationBucket(destinationBucketName)
- it.destinationKey(destinationKey)
- //must set storage class other than "STANDARD" to it gets applied.
+ it
+ .destinationKey(destinationKey)
+ // must set storage class other than "STANDARD" to it gets applied.
.storageClass(StorageClass.STANDARD_IA)
}
- s3Client.getObject {
- it.bucket(destinationBucketName)
- it.key(destinationKey)
- }.use {
- assertThat(it.response().storageClass()).isEqualTo(StorageClass.STANDARD_IA)
- }
+ s3Client
+ .getObject {
+ it.bucket(destinationBucketName)
+ it.key(destinationKey)
+ }.use {
+ assertThat(it.response().storageClass()).isEqualTo(StorageClass.STANDARD_IA)
+ }
}
@Test
@@ -524,7 +563,7 @@ internal class CopyObjectIT : S3TestBase() {
it.key(sourceKey)
it.contentDisposition("")
},
- RequestBody.fromFile(UPLOAD_FILE)
+ RequestBody.fromFile(UPLOAD_FILE),
)
val destinationBucketName = givenBucket()
@@ -539,17 +578,20 @@ internal class CopyObjectIT : S3TestBase() {
it.contentDisposition("attachment")
}
- s3Client.getObject {
- it.bucket(destinationBucketName)
- it.key(destinationKey)
- }.use {
- assertThat(it.response().contentDisposition()).isEqualTo("attachment")
- }
+ s3Client
+ .getObject {
+ it.bucket(destinationBucketName)
+ it.key(destinationKey)
+ }.use {
+ assertThat(it.response().contentDisposition()).isEqualTo("attachment")
+ }
}
@Test
- @S3VerifiedFailure(year = 2025,
- reason = "Requests specifying Server Side Encryption with Customer provided keys must provide a valid encryption algorithm")
+ @S3VerifiedFailure(
+ year = 2025,
+ reason = "Requests specifying Server Side Encryption with Customer provided keys must provide a valid encryption algorithm",
+ )
fun `copy object succeeds with encryption`(testInfo: TestInfo) {
val sourceKey = UPLOAD_FILE_NAME
val (bucketName, putObjectResponse) = givenBucketAndObject(testInfo, sourceKey)
@@ -564,12 +606,13 @@ internal class CopyObjectIT : S3TestBase() {
it.sseCustomerKey(TEST_ENC_KEY_ID)
}
- s3Client.headObject {
- it.bucket(destinationBucketName)
- it.key(destinationKey)
- }.also {
- assertThat(it.eTag()).isEqualTo(putObjectResponse.eTag())
- }
+ s3Client
+ .headObject {
+ it.bucket(destinationBucketName)
+ it.key(destinationKey)
+ }.also {
+ assertThat(it.eTag()).isEqualTo(putObjectResponse.eTag())
+ }
}
@Test
@@ -617,32 +660,42 @@ internal class CopyObjectIT : S3TestBase() {
@Test
@S3VerifiedSuccess(year = 2025)
fun `copy object with transfermanager succeeds`(testInfo: TestInfo) {
- //content larger than default part threshold of 8MiB
- val contentLen = 20 * _1MB
+ // content larger than default part threshold of 8MiB
+ val contentLen = 20 * ONE_MB
val sourceKey = UPLOAD_FILE_NAME
val bucketName = givenBucket(testInfo)
val destinationBucketName = givenBucket()
val destinationKey = "copyOf/$sourceKey"
- val upload = transferManager.upload {
- it.putObjectRequest {
- it.key(sourceKey)
- it.bucket(bucketName)
- }
- it.requestBody(AsyncRequestBody.fromInputStream(randomInputStream(contentLen),
- contentLen.toLong(),
- Executors.newFixedThreadPool(10)))
- }.completionFuture().join()
-
- transferManager.copy {
- it.copyObjectRequest {
- it.sourceBucket(bucketName)
- it.sourceKey(sourceKey)
- it.destinationBucket(destinationBucketName)
- it.destinationKey(destinationKey)
+ val upload =
+ transferManager
+ .upload {
+ it.putObjectRequest {
+ it.key(sourceKey)
+ it.bucket(bucketName)
+ }
+ it.requestBody(
+ AsyncRequestBody.fromInputStream(
+ randomInputStream(contentLen),
+ contentLen.toLong(),
+ Executors.newFixedThreadPool(10),
+ ),
+ )
+ }.completionFuture()
+ .join()
+
+ transferManager
+ .copy {
+ it.copyObjectRequest {
+ it.sourceBucket(bucketName)
+ it.sourceKey(sourceKey)
+ it.destinationBucket(destinationBucketName)
+ it.destinationKey(destinationKey)
+ }
+ }.completionFuture()
+ .join()
+ .also {
+ assertThat(it.response().copyObjectResult().eTag()).isEqualTo(upload.response().eTag())
}
- }.completionFuture().join().also {
- assertThat(it.response().copyObjectResult().eTag()).isEqualTo(upload.response().eTag())
- }
}
}
diff --git a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/CorsIT.kt b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/CorsIT.kt
index 6a121d921..78b8ef384 100644
--- a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/CorsIT.kt
+++ b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/CorsIT.kt
@@ -27,7 +27,6 @@ import org.junit.jupiter.api.Test
import org.junit.jupiter.api.TestInfo
import java.util.UUID
-
/**
* Test the application using the AmazonS3 SDK V2.
*/
@@ -35,23 +34,27 @@ internal class CorsIT : S3TestBase() {
private val httpClient: CloseableHttpClient = createHttpClient()
@Test
- @S3VerifiedFailure(year = 2024,
- reason = "No credentials sent in plain HTTP request")
+ @S3VerifiedFailure(
+ year = 2024,
+ reason = "No credentials sent in plain HTTP request",
+ )
fun testPutObject_cors(testInfo: TestInfo) {
val bucketName = givenBucket(testInfo)
- val optionsRequest = HttpOptions("$serviceEndpoint/$bucketName/testObjectName").apply {
- addHeader("Origin", "http://localhost/")
- }
+ val optionsRequest =
+ HttpOptions("$serviceEndpoint/$bucketName/testObjectName").apply {
+ addHeader("Origin", "http://localhost/")
+ }
httpClient.execute(optionsRequest).also {
assertThat(it.getFirstHeader("Allow").value).contains("PUT")
}
val byteArray = UUID.randomUUID().toString().toByteArray()
val expectedEtag = "\"${DigestUtil.hexDigest(byteArray)}\""
- val putObject = HttpPut("$serviceEndpoint/$bucketName/testObjectName").apply {
- entity = ByteArrayEntity(byteArray)
- addHeader("Origin", "http://localhost/")
- }
+ val putObject =
+ HttpPut("$serviceEndpoint/$bucketName/testObjectName").apply {
+ entity = ByteArrayEntity(byteArray)
+ addHeader("Origin", "http://localhost/")
+ }
httpClient.execute(putObject).use {
assertThat(it.statusLine.statusCode).isEqualTo(HttpStatus.SC_OK)
@@ -62,15 +65,18 @@ internal class CorsIT : S3TestBase() {
}
@Test
- @S3VerifiedFailure(year = 2024,
- reason = "No credentials sent in plain HTTP request")
+ @S3VerifiedFailure(
+ year = 2024,
+ reason = "No credentials sent in plain HTTP request",
+ )
fun testGetBucket_cors(testInfo: TestInfo) {
val targetBucket = givenBucket(testInfo)
- val httpOptions = HttpOptions("$serviceEndpoint/$targetBucket").apply {
- addHeader("Origin", "http://someurl.com")
- addHeader("Access-Control-Request-Method", "GET")
- addHeader("Access-Control-Request-Headers", "Content-Type, x-requested-with")
- }
+ val httpOptions =
+ HttpOptions("$serviceEndpoint/$targetBucket").apply {
+ addHeader("Origin", "http://someurl.com")
+ addHeader("Access-Control-Request-Method", "GET")
+ addHeader("Access-Control-Request-Headers", "Content-Type, x-requested-with")
+ }
httpClient.execute(httpOptions).use {
assertThat(it.getFirstHeader("Access-Control-Allow-Origin").value).isEqualTo("http://someurl.com")
diff --git a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/CrtAsyncIT.kt b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/CrtAsyncIT.kt
index 328d1fd2e..87c2b16c1 100644
--- a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/CrtAsyncIT.kt
+++ b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/CrtAsyncIT.kt
@@ -31,16 +31,18 @@ import java.io.ByteArrayInputStream
import java.nio.charset.StandardCharsets
internal class CrtAsyncIT : S3TestBase() {
-
private val autoS3CrtAsyncClient: S3AsyncClient = createAutoS3CrtAsyncClient()
private val transferManager: S3TransferManager = createTransferManager()
@Test
@S3VerifiedSuccess(year = 2025)
fun testPutObject_etagCreation() {
- val expectedEtag = UPLOAD_FILE.inputStream().use {
- "\"${DigestUtil.hexDigest(it)}\""
- }
+ val expectedEtag =
+ UPLOAD_FILE
+ .inputStream()
+ .use {
+ "\"${DigestUtil.hexDigest(it)}\""
+ }
val bucketName = randomName
autoS3CrtAsyncClient
@@ -48,18 +50,22 @@ internal class CrtAsyncIT : S3TestBase() {
it.bucket(bucketName)
}.join()
- val putObjectResponse = autoS3CrtAsyncClient.putObject(
- {
- it.bucket(bucketName)
- it.key(UPLOAD_FILE_NAME)
- },
- AsyncRequestBody.fromFile(UPLOAD_FILE)
- ).join()
-
- putObjectResponse.eTag().also {
- assertThat(it).isNotBlank
- assertThat(it).isEqualTo(expectedEtag)
- }
+ val putObjectResponse =
+ autoS3CrtAsyncClient
+ .putObject(
+ {
+ it.bucket(bucketName)
+ it.key(UPLOAD_FILE_NAME)
+ },
+ AsyncRequestBody.fromFile(UPLOAD_FILE),
+ ).join()
+
+ putObjectResponse
+ .eTag()
+ .also {
+ assertThat(it).isNotBlank
+ assertThat(it).isEqualTo(expectedEtag)
+ }
}
@Test
@@ -71,24 +77,29 @@ internal class CrtAsyncIT : S3TestBase() {
it.bucket(bucketName)
}.join()
- val eTag = autoS3CrtAsyncClient.putObject(
- {
- it.bucket(bucketName)
- it.key(UPLOAD_FILE_NAME)
- },
- AsyncRequestBody.fromFile(UPLOAD_FILE)
- ).join().eTag()
+ val eTag =
+ autoS3CrtAsyncClient
+ .putObject(
+ {
+ it.bucket(bucketName)
+ it.key(UPLOAD_FILE_NAME)
+ },
+ AsyncRequestBody.fromFile(UPLOAD_FILE),
+ ).join()
+ .eTag()
- autoS3CrtAsyncClient.getObject(
- {
- it.bucket(bucketName)
- it.key(UPLOAD_FILE_NAME)
- },
- AsyncResponseTransformer.toBytes()
- ).join().also {
- assertThat(it.response().eTag()).isEqualTo(eTag)
- assertThat(it.response().contentLength()).isEqualTo(UPLOAD_FILE_LENGTH)
- }
+ autoS3CrtAsyncClient
+ .getObject(
+ {
+ it.bucket(bucketName)
+ it.key(UPLOAD_FILE_NAME)
+ },
+ AsyncResponseTransformer.toBytes(),
+ ).join()
+ .also {
+ assertThat(it.response().eTag()).isEqualTo(eTag)
+ assertThat(it.response().contentLength()).isEqualTo(UPLOAD_FILE_LENGTH)
+ }
}
@Test
@@ -96,52 +107,61 @@ internal class CrtAsyncIT : S3TestBase() {
fun testMultipartUpload(testInfo: TestInfo) {
val bucketName = givenBucket(testInfo)
val objectMetadata = mapOf("key" to "value")
- val createMultipartUploadResponseCompletableFuture = autoS3CrtAsyncClient
- .createMultipartUpload {
- it.bucket(bucketName)
- it.key(UPLOAD_FILE_NAME)
- it.metadata(objectMetadata)
- }
+ val createMultipartUploadResponseCompletableFuture =
+ autoS3CrtAsyncClient
+ .createMultipartUpload {
+ it.bucket(bucketName)
+ it.key(UPLOAD_FILE_NAME)
+ it.metadata(objectMetadata)
+ }
val initiateMultipartUploadResult = createMultipartUploadResponseCompletableFuture.join()
val uploadId = initiateMultipartUploadResult.uploadId()
// upload part 1, >5MB
val randomBytes = randomBytes()
val partETag = uploadPart(bucketName, UPLOAD_FILE_NAME, uploadId, 1, randomBytes)
// upload part 2, <5MB
- val uploadPartResponse = autoS3CrtAsyncClient.uploadPart(
- {
- it.bucket(initiateMultipartUploadResult.bucket())
- it.key(initiateMultipartUploadResult.key())
- it.uploadId(uploadId)
- it.partNumber(2)
- it.contentLength(UPLOAD_FILE_LENGTH)
- //it.lastPart(true)
- },
- AsyncRequestBody.fromFile(UPLOAD_FILE),
- ).join()
-
- val completeMultipartUploadResponse = autoS3CrtAsyncClient.completeMultipartUpload {
- it.bucket(initiateMultipartUploadResult.bucket())
- it.key(initiateMultipartUploadResult.key())
- it.uploadId(initiateMultipartUploadResult.uploadId())
- it.multipartUpload {
- it.parts(
- {
- it.eTag(partETag)
- it.partNumber(1)
- },
+ val uploadPartResponse =
+ autoS3CrtAsyncClient
+ .uploadPart(
{
- it.eTag(uploadPartResponse.eTag())
+ it.bucket(initiateMultipartUploadResult.bucket())
+ it.key(initiateMultipartUploadResult.key())
+ it.uploadId(uploadId)
it.partNumber(2)
- })
- }
- }.join()
+ it.contentLength(UPLOAD_FILE_LENGTH)
+ // it.lastPart(true)
+ },
+ AsyncRequestBody.fromFile(UPLOAD_FILE),
+ ).join()
+
+ val completeMultipartUploadResponse =
+ autoS3CrtAsyncClient
+ .completeMultipartUpload {
+ it.bucket(initiateMultipartUploadResult.bucket())
+ it.key(initiateMultipartUploadResult.key())
+ it.uploadId(initiateMultipartUploadResult.uploadId())
+ it.multipartUpload {
+ it.parts(
+ {
+ it.eTag(partETag)
+ it.partNumber(1)
+ },
+ {
+ it.eTag(uploadPartResponse.eTag())
+ it.partNumber(2)
+ },
+ )
+ }
+ }.join()
// Verify only 1st and 3rd counts
- val getObjectResponse = autoS3CrtAsyncClient.getObject({
- it.bucket(bucketName)
- it.key(UPLOAD_FILE_NAME)
- }, AsyncResponseTransformer.toBytes()).join()
+ val getObjectResponse =
+ autoS3CrtAsyncClient
+ .getObject({
+ it.bucket(bucketName)
+ it.key(UPLOAD_FILE_NAME)
+ }, AsyncResponseTransformer.toBytes())
+ .join()
val uploadFileBytes = readStreamIntoByteArray(UPLOAD_FILE.inputStream())
(DigestUtils.md5(randomBytes) + DigestUtils.md5(uploadFileBytes)).also {
@@ -166,9 +186,9 @@ internal class CrtAsyncIT : S3TestBase() {
key: String,
uploadId: String,
partNumber: Int,
- randomBytes: ByteArray
- ): String {
- return autoS3CrtAsyncClient
+ randomBytes: ByteArray,
+ ): String =
+ autoS3CrtAsyncClient
.uploadPart(
{
it.bucket(bucketName)
@@ -177,10 +197,9 @@ internal class CrtAsyncIT : S3TestBase() {
it.partNumber(partNumber)
it.contentLength(randomBytes.size.toLong())
},
- AsyncRequestBody.fromBytes(randomBytes)
+ AsyncRequestBody.fromBytes(randomBytes),
).join()
.eTag()
- }
@Test
@S3VerifiedSuccess(year = 2025)
@@ -188,26 +207,29 @@ internal class CrtAsyncIT : S3TestBase() {
val bucketName = givenBucket(testInfo)
val body = AsyncRequestBody.forBlockingInputStream(null)
- val putObjectResponseFuture = autoS3CrtAsyncClient.putObject(
- {
- it.bucket(bucketName)
- it.key(UPLOAD_FILE_NAME)
- },
- body
- )
+ val putObjectResponseFuture =
+ autoS3CrtAsyncClient.putObject(
+ {
+ it.bucket(bucketName)
+ it.key(UPLOAD_FILE_NAME)
+ },
+ body,
+ )
val randomBytes = randomBytes()
body.writeInputStream(ByteArrayInputStream(randomBytes))
putObjectResponseFuture.join()
- val getObjectResponse = autoS3CrtAsyncClient.getObject(
- {
- it.bucket(bucketName)
- it.key(UPLOAD_FILE_NAME)
- },
- AsyncResponseTransformer.toBytes()
- ).join()
+ val getObjectResponse =
+ autoS3CrtAsyncClient
+ .getObject(
+ {
+ it.bucket(bucketName)
+ it.key(UPLOAD_FILE_NAME)
+ },
+ AsyncResponseTransformer.toBytes(),
+ ).join()
// verify content size
assertThat(getObjectResponse.response().contentLength())
@@ -224,31 +246,34 @@ internal class CrtAsyncIT : S3TestBase() {
val bucketName = givenBucket(testInfo)
val body = AsyncRequestBody.forBlockingInputStream(null)
- val upload = transferManager
- .upload {
- it.requestBody(body)
- it.putObjectRequest {
- it.bucket(bucketName)
- it.key(UPLOAD_FILE_NAME)
+ val upload =
+ transferManager
+ .upload {
+ it.requestBody(body)
+ it.putObjectRequest {
+ it.bucket(bucketName)
+ it.key(UPLOAD_FILE_NAME)
+ }
}
- }
val randomBytes = randomBytes()
body.writeInputStream(ByteArrayInputStream(randomBytes))
upload.completionFuture().join()
- val download = transferManager
- .download(
- DownloadRequest
- .builder()
- .getObjectRequest {
- it.bucket(bucketName)
- it.key(UPLOAD_FILE_NAME)
- }
- .responseTransformer(AsyncResponseTransformer.toBytes())
- .build()
- ).completionFuture().join().result()
+ val download =
+ transferManager
+ .download(
+ DownloadRequest
+ .builder()
+ .getObjectRequest {
+ it.bucket(bucketName)
+ it.key(UPLOAD_FILE_NAME)
+ }.responseTransformer(AsyncResponseTransformer.toBytes())
+ .build(),
+ ).completionFuture()
+ .join()
+ .result()
// verify content size
assertThat(download.response().contentLength())
diff --git a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/GetPutDeleteObjectIT.kt b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/GetPutDeleteObjectIT.kt
index 047bc4c67..160eb45e0 100644
--- a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/GetPutDeleteObjectIT.kt
+++ b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/GetPutDeleteObjectIT.kt
@@ -48,7 +48,6 @@ import java.time.temporal.ChronoUnit
import kotlin.math.min
internal class GetPutDeleteObjectIT : S3TestBase() {
-
private val s3Client: S3Client = createS3Client()
private val s3ClientHttp: S3Client = createS3Client(serviceEndpointHttp)
private val s3AsyncClient: S3AsyncClient = createS3AsyncClient()
@@ -64,11 +63,12 @@ internal class GetPutDeleteObjectIT : S3TestBase() {
val key = UPLOAD_FILE_NAME
val bucketName = givenBucket(testInfo)
- s3Client.putObject({
- it.bucket(bucketName)
- it.key(key)
- },
- RequestBody.fromFile(UPLOAD_FILE)
+ s3Client.putObject(
+ {
+ it.bucket(bucketName)
+ it.key(key)
+ },
+ RequestBody.fromFile(UPLOAD_FILE),
)
s3Client.headObject {
@@ -76,12 +76,13 @@ internal class GetPutDeleteObjectIT : S3TestBase() {
it.key(key)
}
- s3Client.getObject {
- it.bucket(bucketName)
- it.key(key)
- }.use {
- assertThat(it.response().contentLength()).isEqualTo(UPLOAD_FILE_LENGTH)
- }
+ s3Client
+ .getObject {
+ it.bucket(bucketName)
+ it.key(key)
+ }.use {
+ assertThat(it.response().contentLength()).isEqualTo(UPLOAD_FILE_LENGTH)
+ }
s3Client.deleteObject {
it.bucket(bucketName)
@@ -93,8 +94,7 @@ internal class GetPutDeleteObjectIT : S3TestBase() {
it.bucket(bucketName)
it.key(key)
}
- }
- .isInstanceOf(NoSuchKeyException::class.java)
+ }.isInstanceOf(NoSuchKeyException::class.java)
}
@Test
@@ -104,11 +104,12 @@ internal class GetPutDeleteObjectIT : S3TestBase() {
val bucketName = givenBucket(testInfo)
val keys = listOf("$key-1", "$key-2", "$key-3")
keys.forEach { key ->
- s3Client.putObject({
- it.bucket(bucketName)
- it.key(key)
- },
- RequestBody.fromFile(UPLOAD_FILE)
+ s3Client.putObject(
+ {
+ it.bucket(bucketName)
+ it.key(key)
+ },
+ RequestBody.fromFile(UPLOAD_FILE),
)
}
@@ -129,12 +130,10 @@ internal class GetPutDeleteObjectIT : S3TestBase() {
it.bucket(bucketName)
it.key(key)
}
- }
- .isInstanceOf(NoSuchKeyException::class.java)
+ }.isInstanceOf(NoSuchKeyException::class.java)
}
}
-
@Test
@S3VerifiedSuccess(year = 2025)
fun getObject_noSuchKey(testInfo: TestInfo) {
@@ -146,7 +145,7 @@ internal class GetPutDeleteObjectIT : S3TestBase() {
it.key(NON_EXISTING_KEY)
}
}.isInstanceOf(
- NoSuchKeyException::class.java
+ NoSuchKeyException::class.java,
).hasMessageContaining(NO_SUCH_KEY)
}
@@ -161,31 +160,28 @@ internal class GetPutDeleteObjectIT : S3TestBase() {
it.key("/$NON_EXISTING_KEY")
}
}.isInstanceOf(
- NoSuchKeyException::class.java
+ NoSuchKeyException::class.java,
).hasMessageContaining(NO_SUCH_KEY)
}
@Test
@S3VerifiedSuccess(year = 2025)
fun putObject_noSuchBucket() {
-
assertThatThrownBy {
s3Client.putObject(
{
it.bucket(randomName)
it.key(UPLOAD_FILE_NAME)
},
- RequestBody.fromFile(UPLOAD_FILE)
+ RequestBody.fromFile(UPLOAD_FILE),
)
- }
- .isInstanceOf(NoSuchBucketException::class.java)
+ }.isInstanceOf(NoSuchBucketException::class.java)
.hasMessageContaining(NO_SUCH_BUCKET)
}
@Test
@S3VerifiedSuccess(year = 2025)
fun putObjectEncrypted_noSuchBucket() {
-
assertThatThrownBy {
s3Client.putObject(
{
@@ -194,10 +190,9 @@ internal class GetPutDeleteObjectIT : S3TestBase() {
it.serverSideEncryption(ServerSideEncryption.AWS_KMS)
it.ssekmsKeyId(TEST_ENC_KEY_ID)
},
- RequestBody.fromFile(UPLOAD_FILE)
+ RequestBody.fromFile(UPLOAD_FILE),
)
- }
- .isInstanceOf(NoSuchBucketException::class.java)
+ }.isInstanceOf(NoSuchBucketException::class.java)
.hasMessageContaining(NO_SUCH_BUCKET)
}
@@ -210,10 +205,10 @@ internal class GetPutDeleteObjectIT : S3TestBase() {
it.key(UPLOAD_FILE_NAME)
}
}
- //TODO: not sure why AWS SDK v2 does not return the correct exception here, S3Mock returns the correct error message.
+ // TODO: not sure why AWS SDK v2 does not return the correct exception here, S3Mock returns the correct error message.
.isInstanceOf(NoSuchKeyException::class.java)
- //.isInstanceOf(NoSuchBucketException::class.java)
- //.hasMessageContaining(NO_SUCH_BUCKET)
+ // .isInstanceOf(NoSuchBucketException::class.java)
+ // .hasMessageContaining(NO_SUCH_BUCKET)
}
@Test
@@ -226,10 +221,9 @@ internal class GetPutDeleteObjectIT : S3TestBase() {
it.bucket(bucketName)
it.key(NON_EXISTING_KEY)
}
- }
- .isInstanceOf(NoSuchKeyException::class.java)
- //TODO: not sure why AWS SDK v2 does not return the correct error message, S3Mock returns the correct message.
- //.hasMessageContaining(NO_SUCH_KEY)
+ }.isInstanceOf(NoSuchKeyException::class.java)
+ // TODO: not sure why AWS SDK v2 does not return the correct error message, S3Mock returns the correct message.
+ // .hasMessageContaining(NO_SUCH_KEY)
}
@Test
@@ -247,8 +241,7 @@ internal class GetPutDeleteObjectIT : S3TestBase() {
it.destinationBucket(destinationBucketName)
it.destinationKey(destinationKey)
}
- }
- .isInstanceOf(NoSuchBucketException::class.java)
+ }.isInstanceOf(NoSuchBucketException::class.java)
.hasMessageContaining(NO_SUCH_BUCKET)
}
@@ -260,8 +253,7 @@ internal class GetPutDeleteObjectIT : S3TestBase() {
it.bucket(randomName)
it.key(NON_EXISTING_KEY)
}
- }
- .isInstanceOf(NoSuchBucketException::class.java)
+ }.isInstanceOf(NoSuchBucketException::class.java)
.hasMessageContaining(NO_SUCH_BUCKET)
}
@@ -288,8 +280,7 @@ internal class GetPutDeleteObjectIT : S3TestBase() {
})
}
}
- }
- .isInstanceOf(NoSuchBucketException::class.java)
+ }.isInstanceOf(NoSuchBucketException::class.java)
.hasMessageContaining(NO_SUCH_BUCKET)
}
@@ -300,8 +291,7 @@ internal class GetPutDeleteObjectIT : S3TestBase() {
s3Client.deleteBucket {
it.bucket(randomName)
}
- }
- .isInstanceOf(NoSuchBucketException::class.java)
+ }.isInstanceOf(NoSuchBucketException::class.java)
.hasMessageContaining(NO_SUCH_BUCKET)
}
@@ -331,14 +321,18 @@ internal class GetPutDeleteObjectIT : S3TestBase() {
@S3VerifiedSuccess(year = 2025)
@ParameterizedTest
@MethodSource(value = ["charsSafe", "charsSpecial", "charsToAvoid"])
- fun testPutHeadGetObject_keyNames_safe(key: String, testInfo: TestInfo) {
+ fun testPutHeadGetObject_keyNames_safe(
+ key: String,
+ testInfo: TestInfo,
+ ) {
val bucketName = givenBucket(testInfo)
- s3Client.putObject({
+ s3Client.putObject(
+ {
it.bucket(bucketName)
it.key(key)
},
- RequestBody.fromFile(UPLOAD_FILE)
+ RequestBody.fromFile(UPLOAD_FILE),
)
s3Client.headObject {
@@ -346,55 +340,67 @@ internal class GetPutDeleteObjectIT : S3TestBase() {
it.key(key)
}
- s3Client.getObject {
- it.bucket(bucketName)
- it.key(key)
- }.use {
- assertThat(it.response().contentLength()).isEqualTo(UPLOAD_FILE_LENGTH)
- }
+ s3Client
+ .getObject {
+ it.bucket(bucketName)
+ it.key(key)
+ }.use {
+ assertThat(it.response().contentLength()).isEqualTo(UPLOAD_FILE_LENGTH)
+ }
}
@S3VerifiedSuccess(year = 2025)
@ParameterizedTest
@MethodSource(value = ["storageClasses"])
- fun testPutObject_storageClass(storageClass: StorageClass, testInfo: TestInfo) {
+ fun testPutObject_storageClass(
+ storageClass: StorageClass,
+ testInfo: TestInfo,
+ ) {
val bucketName = givenBucket(testInfo)
val key = UPLOAD_FILE_NAME
- val eTag = s3Client.putObject({
+ val eTag =
+ s3Client
+ .putObject(
+ {
+ it.bucket(bucketName)
+ it.key(key)
+ it.storageClass(storageClass)
+ },
+ RequestBody.fromFile(UPLOAD_FILE),
+ ).eTag()
+
+ s3Client
+ .headObject {
it.bucket(bucketName)
it.key(key)
- it.storageClass(storageClass)
- },
- RequestBody.fromFile(UPLOAD_FILE)
- ).eTag()
-
- s3Client.headObject {
- it.bucket(bucketName)
- it.key(key)
- }.also {
- assertThat(it.eTag()).isEqualTo(eTag)
- }
+ }.also {
+ assertThat(it.eTag()).isEqualTo(eTag)
+ }
- s3Client.getObject {
- it.bucket(bucketName)
- it.key(key)
- }.use {
- assertThat(it.response().eTag()).isEqualTo(eTag)
- if (storageClass == StorageClass.STANDARD) {
- //storageClass STANDARD is never returned from S3 APIs...
- assertThat(it.response().storageClass()).isNull()
- } else {
- assertThat(it.response().storageClass()).isEqualTo(storageClass)
+ s3Client
+ .getObject {
+ it.bucket(bucketName)
+ it.key(key)
+ }.use {
+ assertThat(it.response().eTag()).isEqualTo(eTag)
+ if (storageClass == StorageClass.STANDARD) {
+ // storageClass STANDARD is never returned from S3 APIs...
+ assertThat(it.response().storageClass()).isNull()
+ } else {
+ assertThat(it.response().storageClass()).isEqualTo(storageClass)
+ }
}
- }
}
@S3VerifiedSuccess(year = 2025)
@ParameterizedTest
@MethodSource(value = ["testFileNames"])
- fun testPutObject_etagCreation_sync(testFileName: String, testInfo: TestInfo) {
+ fun testPutObject_etagCreation_sync(
+ testFileName: String,
+ testInfo: TestInfo,
+ ) {
testEtagCreation(testFileName, s3Client, testInfo)
testEtagCreation(testFileName, s3ClientHttp, testInfo)
}
@@ -402,22 +408,26 @@ internal class GetPutDeleteObjectIT : S3TestBase() {
private fun testEtagCreation(
testFileName: String,
s3Client: S3Client,
- testInfo: TestInfo
+ testInfo: TestInfo,
) {
val uploadFile = File(testFileName)
- val expectedEtag = uploadFile.inputStream().use {
- "\"${DigestUtil.hexDigest(it)}\""
- }
+ val expectedEtag =
+ uploadFile.inputStream().use {
+ "\"${DigestUtil.hexDigest(it)}\""
+ }
val bucketName = givenBucket(testInfo)
- s3Client.putObject({
- it.bucket(bucketName)
- it.key(testFileName)
- },
- RequestBody.fromFile(uploadFile)
- ).eTag().also {
- assertThat(it).isNotBlank
- assertThat(it).isEqualTo(expectedEtag)
- }
+ s3Client
+ .putObject(
+ {
+ it.bucket(bucketName)
+ it.key(testFileName)
+ },
+ RequestBody.fromFile(uploadFile),
+ ).eTag()
+ .also {
+ assertThat(it).isNotBlank
+ assertThat(it).isEqualTo(expectedEtag)
+ }
}
@S3VerifiedSuccess(year = 2025)
@@ -434,22 +444,27 @@ internal class GetPutDeleteObjectIT : S3TestBase() {
private fun testEtagCreation(
testFileName: String,
- s3Client: S3AsyncClient
+ s3Client: S3AsyncClient,
) {
val uploadFile = File(testFileName)
- val expectedEtag = uploadFile.inputStream().use {
- "\"${DigestUtil.hexDigest(it)}\""
- }
+ val expectedEtag =
+ uploadFile.inputStream().use {
+ "\"${DigestUtil.hexDigest(it)}\""
+ }
val bucketName = givenBucket(randomName)
- s3Client.putObject({
- it.bucket(bucketName)
- it.key(testFileName)
- },
- AsyncRequestBody.fromFile(uploadFile)
- ).join().eTag().also {
- assertThat(it).isNotBlank
- assertThat(it).isEqualTo(expectedEtag)
- }
+ s3Client
+ .putObject(
+ {
+ it.bucket(bucketName)
+ it.key(testFileName)
+ },
+ AsyncRequestBody.fromFile(uploadFile),
+ ).join()
+ .eTag()
+ .also {
+ assertThat(it).isNotBlank
+ assertThat(it).isEqualTo(expectedEtag)
+ }
}
@Test
@@ -458,31 +473,35 @@ internal class GetPutDeleteObjectIT : S3TestBase() {
val expectedChecksum = DigestUtil.checksumFor(UPLOAD_FILE_PATH, DefaultChecksumAlgorithm.SHA1)
val bucketName = givenBucket(testInfo)
- val eTag = s3Client.putObject({
+ val eTag =
+ s3Client
+ .putObject(
+ {
+ it.bucket(bucketName)
+ it.key(UPLOAD_FILE_NAME)
+ it.checksumAlgorithm(ChecksumAlgorithm.SHA1)
+ },
+ RequestBody.fromFile(UPLOAD_FILE),
+ ).eTag()
+
+ s3Client
+ .getObjectAttributes {
it.bucket(bucketName)
it.key(UPLOAD_FILE_NAME)
- it.checksumAlgorithm(ChecksumAlgorithm.SHA1)
- },
- RequestBody.fromFile(UPLOAD_FILE)
- ).eTag()
-
- s3Client.getObjectAttributes {
- it.bucket(bucketName)
- it.key(UPLOAD_FILE_NAME)
- it.objectAttributes(
- ObjectAttributes.OBJECT_SIZE,
- ObjectAttributes.STORAGE_CLASS,
- ObjectAttributes.E_TAG,
- ObjectAttributes.CHECKSUM
- )
- }.also {
- assertThat(it.eTag()).isEqualTo(eTag.trim('"'))
- //default storageClass is STANDARD, which is never returned from APIs except by GetObjectAttributes
- assertThat(it.storageClass()).isEqualTo(StorageClass.STANDARD)
- assertThat(it.objectSize()).isEqualTo(UPLOAD_FILE_LENGTH)
- assertThat(it.checksum().checksumSHA1()).isEqualTo(expectedChecksum)
- assertThat(it.checksum().checksumType()).isEqualTo(ChecksumType.FULL_OBJECT)
- }
+ it.objectAttributes(
+ ObjectAttributes.OBJECT_SIZE,
+ ObjectAttributes.STORAGE_CLASS,
+ ObjectAttributes.E_TAG,
+ ObjectAttributes.CHECKSUM,
+ )
+ }.also {
+ assertThat(it.eTag()).isEqualTo(eTag.trim('"'))
+ // default storageClass is STANDARD, which is never returned from APIs except by GetObjectAttributes
+ assertThat(it.storageClass()).isEqualTo(StorageClass.STANDARD)
+ assertThat(it.objectSize()).isEqualTo(UPLOAD_FILE_LENGTH)
+ assertThat(it.checksum().checksumSHA1()).isEqualTo(expectedChecksum)
+ assertThat(it.checksum().checksumType()).isEqualTo(ChecksumType.FULL_OBJECT)
+ }
}
@Test
@@ -490,20 +509,23 @@ internal class GetPutDeleteObjectIT : S3TestBase() {
fun testPutObject_objectMetadata(testInfo: TestInfo) {
val bucketName = givenBucket(testInfo)
- s3Client.putObject({
+ s3Client
+ .putObject(
+ {
+ it.bucket(bucketName)
+ it.key(UPLOAD_FILE_NAME)
+ it.metadata(mapOf("key1" to "value1", "key2" to "value2"))
+ },
+ RequestBody.fromFile(UPLOAD_FILE),
+ ).eTag()
+
+ s3Client
+ .getObject {
it.bucket(bucketName)
it.key(UPLOAD_FILE_NAME)
- it.metadata(mapOf("key1" to "value1", "key2" to "value2"))
- },
- RequestBody.fromFile(UPLOAD_FILE)
- ).eTag()
-
- s3Client.getObject {
- it.bucket(bucketName)
- it.key(UPLOAD_FILE_NAME)
- }.let {
- assertThat(it.response().metadata()).containsAllEntriesOf(mapOf("key1" to "value1", "key2" to "value2"))
- }
+ }.let {
+ assertThat(it.response().metadata()).containsAllEntriesOf(mapOf("key1" to "value1", "key2" to "value2"))
+ }
}
@S3VerifiedSuccess(year = 2025)
@@ -527,44 +549,49 @@ internal class GetPutDeleteObjectIT : S3TestBase() {
}
private fun testChecksumAlgorithm(
- testFileName: String,
- checksumAlgorithm: software.amazon.awssdk.checksums.spi.ChecksumAlgorithm,
- s3Client: S3Client,
+ testFileName: String,
+ checksumAlgorithm: software.amazon.awssdk.checksums.spi.ChecksumAlgorithm,
+ s3Client: S3Client,
) {
val uploadFile = File(testFileName)
val expectedChecksum = DigestUtil.checksumFor(uploadFile.toPath(), checksumAlgorithm)
val bucketName = givenBucket(randomName)
- s3Client.putObject({
+ s3Client
+ .putObject(
+ {
+ it.bucket(bucketName)
+ it.key(testFileName)
+ it.checksumAlgorithm(checksumAlgorithm.toAlgorithm())
+ },
+ RequestBody.fromFile(uploadFile),
+ ).also {
+ val putChecksum = it.checksum(checksumAlgorithm.toAlgorithm())
+ assertThat(putChecksum).isNotBlank
+ assertThat(putChecksum).isEqualTo(expectedChecksum)
+ }
+
+ s3Client
+ .getObject {
it.bucket(bucketName)
it.key(testFileName)
- it.checksumAlgorithm(checksumAlgorithm.toAlgorithm())
- }, RequestBody.fromFile(uploadFile)
- ).also {
- val putChecksum = it.checksum(checksumAlgorithm.toAlgorithm())
- assertThat(putChecksum).isNotBlank
- assertThat(putChecksum).isEqualTo(expectedChecksum)
- }
-
- s3Client.getObject {
- it.bucket(bucketName)
- it.key(testFileName)
- it.checksumMode(ChecksumMode.ENABLED)
- }.use {
- val getChecksum = it.response().checksum(checksumAlgorithm.toAlgorithm())
- assertThat(getChecksum).isNotBlank
- assertThat(getChecksum).isEqualTo(expectedChecksum)
- }
+ it.checksumMode(ChecksumMode.ENABLED)
+ }.use {
+ val getChecksum = it.response().checksum(checksumAlgorithm.toAlgorithm())
+ assertThat(getChecksum).isNotBlank
+ assertThat(getChecksum).isEqualTo(expectedChecksum)
+ }
- s3Client.headObject {
- it.bucket(bucketName)
- it.key(testFileName)
- it.checksumMode(ChecksumMode.ENABLED)
- }.also {
- val headChecksum = it.checksum(checksumAlgorithm.toAlgorithm())
- assertThat(headChecksum).isNotBlank
- assertThat(headChecksum).isEqualTo(expectedChecksum)
- }
+ s3Client
+ .headObject {
+ it.bucket(bucketName)
+ it.key(testFileName)
+ it.checksumMode(ChecksumMode.ENABLED)
+ }.also {
+ val headChecksum = it.checksum(checksumAlgorithm.toAlgorithm())
+ assertThat(headChecksum).isNotBlank
+ assertThat(headChecksum).isEqualTo(expectedChecksum)
+ }
}
@S3VerifiedSuccess(year = 2025)
@@ -608,49 +635,56 @@ internal class GetPutDeleteObjectIT : S3TestBase() {
}
private fun testChecksumAlgorithm_async(
- testFileName: String,
- checksumAlgorithm: software.amazon.awssdk.checksums.spi.ChecksumAlgorithm,
- s3Client: S3AsyncClient,
+ testFileName: String,
+ checksumAlgorithm: software.amazon.awssdk.checksums.spi.ChecksumAlgorithm,
+ s3Client: S3AsyncClient,
) {
val uploadFile = File(testFileName)
val expectedChecksum = DigestUtil.checksumFor(uploadFile.toPath(), checksumAlgorithm)
val bucketName = givenBucket(randomName)
- s3Client.putObject({
+ s3Client
+ .putObject(
+ {
+ it.bucket(bucketName)
+ it.key(testFileName)
+ it.checksumAlgorithm(checksumAlgorithm.toAlgorithm())
+ },
+ AsyncRequestBody.fromFile(uploadFile),
+ ).join()
+ .also {
+ val putChecksum = it.checksum(checksumAlgorithm.toAlgorithm())
+ assertThat(putChecksum).isNotBlank
+ assertThat(putChecksum).isEqualTo(expectedChecksum)
+ }
+
+ this.s3Client
+ .getObject {
it.bucket(bucketName)
it.key(testFileName)
- it.checksumAlgorithm(checksumAlgorithm.toAlgorithm())
- },
- AsyncRequestBody.fromFile(uploadFile)
- ).join().also {
- val putChecksum = it.checksum(checksumAlgorithm.toAlgorithm())
- assertThat(putChecksum).isNotBlank
- assertThat(putChecksum).isEqualTo(expectedChecksum)
- }
-
- this.s3Client.getObject {
- it.bucket(bucketName)
- it.key(testFileName)
- it.checksumMode(ChecksumMode.ENABLED)
- }.use {
- val getChecksum = it.response().checksum(checksumAlgorithm.toAlgorithm())
- assertThat(getChecksum).isNotBlank
- assertThat(getChecksum).isEqualTo(expectedChecksum)
- }
+ it.checksumMode(ChecksumMode.ENABLED)
+ }.use {
+ val getChecksum = it.response().checksum(checksumAlgorithm.toAlgorithm())
+ assertThat(getChecksum).isNotBlank
+ assertThat(getChecksum).isEqualTo(expectedChecksum)
+ }
- this.s3Client.headObject {
- it.bucket(bucketName)
- it.key(testFileName)
- it.checksumMode(ChecksumMode.ENABLED)
- }.also {
- val headChecksum = it.checksum(checksumAlgorithm.toAlgorithm())
- assertThat(headChecksum).isNotBlank
- assertThat(headChecksum).isEqualTo(expectedChecksum)
- }
+ this.s3Client
+ .headObject {
+ it.bucket(bucketName)
+ it.key(testFileName)
+ it.checksumMode(ChecksumMode.ENABLED)
+ }.also {
+ val headChecksum = it.checksum(checksumAlgorithm.toAlgorithm())
+ assertThat(headChecksum).isNotBlank
+ assertThat(headChecksum).isEqualTo(expectedChecksum)
+ }
}
- private fun PutObjectRequest.Builder
- .checksum(checksum: String, checksumAlgorithm: ChecksumAlgorithm): PutObjectRequest.Builder =
+ private fun PutObjectRequest.Builder.checksum(
+ checksum: String,
+ checksumAlgorithm: ChecksumAlgorithm,
+ ): PutObjectRequest.Builder =
when (checksumAlgorithm) {
ChecksumAlgorithm.SHA1 -> checksumSHA1(checksum)
ChecksumAlgorithm.SHA256 -> checksumSHA256(checksum)
@@ -663,41 +697,47 @@ internal class GetPutDeleteObjectIT : S3TestBase() {
@S3VerifiedSuccess(year = 2025)
@ParameterizedTest
@MethodSource(value = ["checksumAlgorithms"])
- fun testPutObject_checksum(checksumAlgorithm: software.amazon.awssdk.checksums.spi.ChecksumAlgorithm,
- testInfo: TestInfo) {
+ fun testPutObject_checksum(
+ checksumAlgorithm: software.amazon.awssdk.checksums.spi.ChecksumAlgorithm,
+ testInfo: TestInfo,
+ ) {
val expectedChecksum = DigestUtil.checksumFor(UPLOAD_FILE_PATH, checksumAlgorithm)
val bucketName = givenBucket(testInfo)
- s3Client.putObject({
- it.checksum(expectedChecksum, checksumAlgorithm.toAlgorithm())
- it.bucket(bucketName).key(UPLOAD_FILE_NAME)
- },
- RequestBody.fromFile(UPLOAD_FILE)
- ).also {
- val putChecksum = it.checksum(checksumAlgorithm.toAlgorithm())!!
- assertThat(putChecksum).isNotBlank
- assertThat(putChecksum).isEqualTo(expectedChecksum)
- }
+ s3Client
+ .putObject(
+ {
+ it.checksum(expectedChecksum, checksumAlgorithm.toAlgorithm())
+ it.bucket(bucketName).key(UPLOAD_FILE_NAME)
+ },
+ RequestBody.fromFile(UPLOAD_FILE),
+ ).also {
+ val putChecksum = it.checksum(checksumAlgorithm.toAlgorithm())!!
+ assertThat(putChecksum).isNotBlank
+ assertThat(putChecksum).isEqualTo(expectedChecksum)
+ }
- s3Client.getObject {
- it.bucket(bucketName)
- it.key(UPLOAD_FILE_NAME)
- it.checksumMode(ChecksumMode.ENABLED)
- }.use {
- val getChecksum = it.response().checksum(checksumAlgorithm.toAlgorithm())
- assertThat(getChecksum).isNotBlank
- assertThat(getChecksum).isEqualTo(expectedChecksum)
- }
+ s3Client
+ .getObject {
+ it.bucket(bucketName)
+ it.key(UPLOAD_FILE_NAME)
+ it.checksumMode(ChecksumMode.ENABLED)
+ }.use {
+ val getChecksum = it.response().checksum(checksumAlgorithm.toAlgorithm())
+ assertThat(getChecksum).isNotBlank
+ assertThat(getChecksum).isEqualTo(expectedChecksum)
+ }
- s3Client.headObject {
- it.bucket(bucketName)
- it.key(UPLOAD_FILE_NAME)
- it.checksumMode(ChecksumMode.ENABLED)
- }.also {
- val headChecksum = it.checksum(checksumAlgorithm.toAlgorithm())
- assertThat(headChecksum).isNotBlank
- assertThat(headChecksum).isEqualTo(expectedChecksum)
- }
+ s3Client
+ .headObject {
+ it.bucket(bucketName)
+ it.key(UPLOAD_FILE_NAME)
+ it.checksumMode(ChecksumMode.ENABLED)
+ }.also {
+ val headChecksum = it.checksum(checksumAlgorithm.toAlgorithm())
+ assertThat(headChecksum).isNotBlank
+ assertThat(headChecksum).isEqualTo(expectedChecksum)
+ }
}
@Test
@@ -708,15 +748,15 @@ internal class GetPutDeleteObjectIT : S3TestBase() {
val bucketName = givenBucket(testInfo)
assertThatThrownBy {
- s3Client.putObject({
+ s3Client.putObject(
+ {
it.checksum(expectedChecksum, checksumAlgorithm)
it.bucket(bucketName)
it.key(UPLOAD_FILE_NAME)
},
- RequestBody.fromFile(UPLOAD_FILE)
+ RequestBody.fromFile(UPLOAD_FILE),
)
- }
- .isInstanceOf(S3Exception::class.java)
+ }.isInstanceOf(S3Exception::class.java)
.hasMessageContaining("Service: S3, Status Code: 400")
.hasMessageContaining("Value for x-amz-checksum-sha1 header is invalid.")
}
@@ -727,16 +767,16 @@ internal class GetPutDeleteObjectIT : S3TestBase() {
val bucketName = givenBucket(testInfo)
assertThatThrownBy {
- s3Client.putObject({
+ s3Client.putObject(
+ {
it.ssekmsKeyId(TEST_WRONG_KEY_ID)
it.serverSideEncryption(ServerSideEncryption.AWS_KMS)
it.bucket(bucketName)
it.key(UPLOAD_FILE_NAME)
},
- RequestBody.fromFile(UPLOAD_FILE)
+ RequestBody.fromFile(UPLOAD_FILE),
)
- }
- .isInstanceOf(S3Exception::class.java)
+ }.isInstanceOf(S3Exception::class.java)
.hasMessageContaining("Service: S3, Status Code: 400")
.hasMessageContaining("Invalid keyId 'key-ID-WRONGWRONGWRONG'")
}
@@ -752,26 +792,31 @@ internal class GetPutDeleteObjectIT : S3TestBase() {
val key = "someKey${charsSafeKey()}"
- val eTag = s3Client.putObject({
+ val eTag =
+ s3Client
+ .putObject(
+ {
+ it.bucket(bucketName)
+ it.key(key)
+ },
+ RequestBody.fromFile(UPLOAD_FILE),
+ ).eTag()
+
+ s3Client
+ .headObject {
it.bucket(bucketName)
it.key(key)
- },
- RequestBody.fromFile(UPLOAD_FILE)
- ).eTag()
-
- s3Client.headObject {
- it.bucket(bucketName)
- it.key(key)
- }.also {
- assertThat(it.eTag()).isEqualTo(eTag)
- }
+ }.also {
+ assertThat(it.eTag()).isEqualTo(eTag)
+ }
- s3Client.getObject {
- it.bucket(bucketName)
- it.key(key)
- }.use {
- assertThat(eTag).isEqualTo(it.response().eTag())
- }
+ s3Client
+ .getObject {
+ it.bucket(bucketName)
+ it.key(key)
+ }.use {
+ assertThat(eTag).isEqualTo(it.response().eTag())
+ }
}
/**
@@ -785,28 +830,34 @@ internal class GetPutDeleteObjectIT : S3TestBase() {
val key = "someKey${charsSpecialKey()}"
- val eTag = s3Client.putObject({
+ val eTag =
+ s3Client
+ .putObject(
+ {
+ it.bucket(bucketName)
+ it.key(key)
+ },
+ RequestBody.fromFile(UPLOAD_FILE),
+ ).eTag()
+
+ s3Client
+ .headObject(
+ HeadObjectRequest
+ .builder()
+ .bucket(bucketName)
+ .key(key)
+ .build(),
+ ).also {
+ assertThat(it.eTag()).isEqualTo(eTag)
+ }
+
+ s3Client
+ .getObject {
it.bucket(bucketName)
it.key(key)
- },
- RequestBody.fromFile(UPLOAD_FILE)
- ).eTag()
-
- s3Client.headObject(
- HeadObjectRequest.builder()
- .bucket(bucketName)
- .key(key)
- .build()
- ).also {
- assertThat(it.eTag()).isEqualTo(eTag)
- }
-
- s3Client.getObject {
- it.bucket(bucketName)
- it.key(key)
- }.use {
- assertThat(eTag).isEqualTo(it.response().eTag())
- }
+ }.use {
+ assertThat(eTag).isEqualTo(it.response().eTag())
+ }
}
@Test
@@ -825,25 +876,28 @@ internal class GetPutDeleteObjectIT : S3TestBase() {
.hasMessageContaining("Service: S3, Status Code: 404")
getObject(bucket2, UPLOAD_FILE_NAME).use {
- assertThat(getObject(bucket2, UPLOAD_FILE_NAME).use {it.response().eTag()}).isEqualTo(it.response().eTag())
- }
+ assertThat(getObject(bucket2, UPLOAD_FILE_NAME).use { it.response().eTag() }).isEqualTo(it.response().eTag())
+ }
}
@Test
@S3VerifiedSuccess(year = 2025)
fun testPutGetHeadObject_storeHeaders() {
val bucket = givenBucket()
- val contentDisposition = ContentDisposition.formData()
- .name("file")
- .filename("sampleFile.txt")
- .build()
- .toString()
+ val contentDisposition =
+ ContentDisposition
+ .formData()
+ .name("file")
+ .filename("sampleFile.txt")
+ .build()
+ .toString()
val expires = Instant.now()
val encoding = "SomeEncoding"
val contentLanguage = "SomeLanguage"
val cacheControl = "SomeCacheControl"
- s3Client.putObject({
+ s3Client.putObject(
+ {
it.bucket(bucket)
it.key(UPLOAD_FILE_NAME)
it.contentDisposition(contentDisposition)
@@ -852,7 +906,7 @@ internal class GetPutDeleteObjectIT : S3TestBase() {
it.contentLanguage(contentLanguage)
it.cacheControl(cacheControl)
},
- RequestBody.fromFile(UPLOAD_FILE)
+ RequestBody.fromFile(UPLOAD_FILE),
)
getObject(bucket, UPLOAD_FILE_NAME).use {
@@ -866,17 +920,17 @@ internal class GetPutDeleteObjectIT : S3TestBase() {
assertThat(it.response().cacheControl()).isEqualTo(cacheControl)
}
-
- s3Client.headObject {
- it.bucket(bucket)
- it.key(UPLOAD_FILE_NAME)
- }.also {
- assertThat(it.contentDisposition()).isEqualTo(contentDisposition)
- assertThat(it.contentEncoding()).isEqualTo(encoding)
- assertThat(it.expires()).isEqualTo(expires.truncatedTo(ChronoUnit.SECONDS))
- assertThat(it.contentLanguage()).isEqualTo(contentLanguage)
- assertThat(it.cacheControl()).isEqualTo(cacheControl)
- }
+ s3Client
+ .headObject {
+ it.bucket(bucket)
+ it.key(UPLOAD_FILE_NAME)
+ }.also {
+ assertThat(it.contentDisposition()).isEqualTo(contentDisposition)
+ assertThat(it.contentEncoding()).isEqualTo(encoding)
+ assertThat(it.expires()).isEqualTo(expires.truncatedTo(ChronoUnit.SECONDS))
+ assertThat(it.contentLanguage()).isEqualTo(contentLanguage)
+ assertThat(it.cacheControl()).isEqualTo(cacheControl)
+ }
}
@Test
@@ -885,11 +939,13 @@ internal class GetPutDeleteObjectIT : S3TestBase() {
val (bucketName, putObjectResponse) = givenBucketAndObject(testInfo, UPLOAD_FILE_NAME)
val matchingEtag = putObjectResponse.eTag()
- s3Client.putObject ({
- it.bucket(bucketName)
- it.key(UPLOAD_FILE_NAME)
- it.ifMatch(matchingEtag)
- }, RequestBody.fromFile(UPLOAD_FILE)
+ s3Client.putObject(
+ {
+ it.bucket(bucketName)
+ it.key(UPLOAD_FILE_NAME)
+ it.ifMatch(matchingEtag)
+ },
+ RequestBody.fromFile(UPLOAD_FILE),
)
}
@@ -905,7 +961,8 @@ internal class GetPutDeleteObjectIT : S3TestBase() {
it.bucket(bucketName)
it.key(UPLOAD_FILE_NAME)
it.ifNoneMatch(nonMatchingEtag)
- }, RequestBody.fromFile(UPLOAD_FILE)
+ },
+ RequestBody.fromFile(UPLOAD_FILE),
)
}.isInstanceOf(S3Exception::class.java)
.hasMessageContaining("Service: S3, Status Code: 412")
@@ -922,7 +979,8 @@ internal class GetPutDeleteObjectIT : S3TestBase() {
it.bucket(bucketName)
it.key(UPLOAD_FILE_NAME)
it.ifNoneMatch(nonMatchingEtag)
- }, RequestBody.fromFile(UPLOAD_FILE)
+ },
+ RequestBody.fromFile(UPLOAD_FILE),
)
}
@@ -938,7 +996,8 @@ internal class GetPutDeleteObjectIT : S3TestBase() {
it.bucket(bucketName)
it.key(UPLOAD_FILE_NAME)
it.ifMatch(nonMatchingEtag)
- }, RequestBody.fromFile(UPLOAD_FILE)
+ },
+ RequestBody.fromFile(UPLOAD_FILE),
)
}.isInstanceOf(S3Exception::class.java)
.hasMessageContaining("Service: S3, Status Code: 412")
@@ -956,15 +1015,18 @@ internal class GetPutDeleteObjectIT : S3TestBase() {
it.bucket(bucketName)
it.key(UPLOAD_FILE_NAME)
it.ifMatch(nonMatchingEtag)
- }, RequestBody.fromFile(UPLOAD_FILE)
+ },
+ RequestBody.fromFile(UPLOAD_FILE),
)
}.isInstanceOf(S3Exception::class.java)
.hasMessageContaining("Service: S3, Status Code: 404")
}
@Test
- @S3VerifiedFailure(year = 2025,
- reason = "Supported only on directory buckets. S3 returns: A header you provided implies functionality that is not implemented.")
+ @S3VerifiedFailure(
+ year = 2025,
+ reason = "Supported only on directory buckets. S3 returns: A header you provided implies functionality that is not implemented.",
+ )
fun `DELETE object succeeds with if-match=true`(testInfo: TestInfo) {
val (bucketName, putObjectResponse) = givenBucketAndObject(testInfo, UPLOAD_FILE_NAME)
val expectedEtag = putObjectResponse.eTag()
@@ -977,8 +1039,10 @@ internal class GetPutDeleteObjectIT : S3TestBase() {
}
@Test
- @S3VerifiedFailure(year = 2025,
- reason = "Supported only on directory buckets. S3 returns: A header you provided implies functionality that is not implemented.")
+ @S3VerifiedFailure(
+ year = 2025,
+ reason = "Supported only on directory buckets. S3 returns: A header you provided implies functionality that is not implemented.",
+ )
fun `DELETE object succeeds with if-match=true with wildcard`(testInfo: TestInfo) {
val matchingEtag = WILDCARD
val (bucketName, _) = givenBucketAndObject(testInfo, UPLOAD_FILE_NAME)
@@ -991,8 +1055,10 @@ internal class GetPutDeleteObjectIT : S3TestBase() {
}
@Test
- @S3VerifiedFailure(year = 2025,
- reason = "Supported only on directory buckets. S3 returns: A header you provided implies functionality that is not implemented.")
+ @S3VerifiedFailure(
+ year = 2025,
+ reason = "Supported only on directory buckets. S3 returns: A header you provided implies functionality that is not implemented.",
+ )
fun `DELETE object succeeds with if-match-size=true`(testInfo: TestInfo) {
val (bucketName, _) = givenBucketAndObject(testInfo, UPLOAD_FILE_NAME)
@@ -1003,17 +1069,20 @@ internal class GetPutDeleteObjectIT : S3TestBase() {
}
}
-
@Test
- @S3VerifiedFailure(year = 2025,
- reason = "Supported only on directory buckets. S3 returns: A header you provided implies functionality that is not implemented.")
+ @S3VerifiedFailure(
+ year = 2025,
+ reason = "Supported only on directory buckets. S3 returns: A header you provided implies functionality that is not implemented.",
+ )
fun `DELETE object succeeds with if-match-last-modified-time=true`(testInfo: TestInfo) {
val (bucketName, _) = givenBucketAndObject(testInfo, UPLOAD_FILE_NAME)
- val lastModified = s3Client.headObject {
- it.bucket(bucketName)
- it.key(UPLOAD_FILE_NAME)
- }.lastModified()
+ val lastModified =
+ s3Client
+ .headObject {
+ it.bucket(bucketName)
+ it.key(UPLOAD_FILE_NAME)
+ }.lastModified()
s3Client.deleteObject {
it.bucket(bucketName)
@@ -1023,8 +1092,10 @@ internal class GetPutDeleteObjectIT : S3TestBase() {
}
@Test
- @S3VerifiedFailure(year = 2025,
- reason = "Supported only on directory buckets. S3 returns: A header you provided implies functionality that is not implemented.")
+ @S3VerifiedFailure(
+ year = 2025,
+ reason = "Supported only on directory buckets. S3 returns: A header you provided implies functionality that is not implemented.",
+ )
fun `DELETE object fails with if-match=false`(testInfo: TestInfo) {
val nonMatchingEtag = "\"$randomName\""
val (bucketName, _) = givenBucketAndObject(testInfo, UPLOAD_FILE_NAME)
@@ -1040,8 +1111,10 @@ internal class GetPutDeleteObjectIT : S3TestBase() {
}
@Test
- @S3VerifiedFailure(year = 2025,
- reason = "Supported only on directory buckets. S3 returns: A header you provided implies functionality that is not implemented.")
+ @S3VerifiedFailure(
+ year = 2025,
+ reason = "Supported only on directory buckets. S3 returns: A header you provided implies functionality that is not implemented.",
+ )
fun `DELETE object fails with if-match-size=false`(testInfo: TestInfo) {
val nonMatchingSize = 0L
val (bucketName, _) = givenBucketAndObject(testInfo, UPLOAD_FILE_NAME)
@@ -1057,8 +1130,10 @@ internal class GetPutDeleteObjectIT : S3TestBase() {
}
@Test
- @S3VerifiedFailure(year = 2025,
- reason = "Supported only on directory buckets. S3 returns: A header you provided implies functionality that is not implemented.")
+ @S3VerifiedFailure(
+ year = 2025,
+ reason = "Supported only on directory buckets. S3 returns: A header you provided implies functionality that is not implemented.",
+ )
fun `DELETE object fails with if-match-last-modified-time=false`(testInfo: TestInfo) {
val lastModifiedTime = Instant.now().minusSeconds(60)
val (bucketName, _) = givenBucketAndObject(testInfo, UPLOAD_FILE_NAME)
@@ -1079,17 +1154,20 @@ internal class GetPutDeleteObjectIT : S3TestBase() {
val (bucketName, putObjectResponse) = givenBucketAndObject(testInfo, UPLOAD_FILE_NAME)
val expectedEtag = putObjectResponse.eTag()
- s3Client.headObject {
- it.bucket(bucketName)
- it.key(UPLOAD_FILE_NAME)
- it.ifMatch(expectedEtag)
- }.also {
- assertThat(it.eTag()).isEqualTo(expectedEtag)
- }
+ s3Client
+ .headObject {
+ it.bucket(bucketName)
+ it.key(UPLOAD_FILE_NAME)
+ it.ifMatch(expectedEtag)
+ }.also {
+ assertThat(it.eTag()).isEqualTo(expectedEtag)
+ }
}
- @Disabled("Spring Boot sends a 412 for this request even though the controller returns a 200 OK." +
- "This test succeeds against the AWS S3 API.")
+ @Disabled(
+ "Spring Boot sends a 412 for this request even though the controller returns a 200 OK." +
+ "This test succeeds against the AWS S3 API.",
+ )
@Test
@S3VerifiedSuccess(year = 2025)
fun `HEAD object succeeds with if-match=true and if-unmodified-since=false`(testInfo: TestInfo) {
@@ -1097,14 +1175,15 @@ internal class GetPutDeleteObjectIT : S3TestBase() {
val (bucketName, putObjectResponse) = givenBucketAndObject(testInfo, UPLOAD_FILE_NAME)
val expectedEtag = putObjectResponse.eTag()
- s3Client.headObject {
- it.bucket(bucketName)
- it.key(UPLOAD_FILE_NAME)
- it.ifMatch(expectedEtag)
- it.ifUnmodifiedSince(now)
- }.also {
- assertThat(it.eTag()).isEqualTo(expectedEtag)
- }
+ s3Client
+ .headObject {
+ it.bucket(bucketName)
+ it.key(UPLOAD_FILE_NAME)
+ it.ifMatch(expectedEtag)
+ it.ifUnmodifiedSince(now)
+ }.also {
+ assertThat(it.eTag()).isEqualTo(expectedEtag)
+ }
}
@Test
@@ -1130,13 +1209,14 @@ internal class GetPutDeleteObjectIT : S3TestBase() {
val (bucketName, putObjectResponse) = givenBucketAndObject(testInfo, UPLOAD_FILE_NAME)
val expectedEtag = putObjectResponse.eTag()
- s3Client.headObject {
- it.bucket(bucketName)
- it.key(UPLOAD_FILE_NAME)
- it.ifNoneMatch(nonMatchingEtag)
- }.also {
- assertThat(it.eTag()).isEqualTo(expectedEtag)
- }
+ s3Client
+ .headObject {
+ it.bucket(bucketName)
+ it.key(UPLOAD_FILE_NAME)
+ it.ifNoneMatch(nonMatchingEtag)
+ }.also {
+ assertThat(it.eTag()).isEqualTo(expectedEtag)
+ }
}
@Test
@@ -1180,13 +1260,14 @@ internal class GetPutDeleteObjectIT : S3TestBase() {
val (bucketName, putObjectResponse) = givenBucketAndObject(testInfo, UPLOAD_FILE_NAME)
val expectedEtag = putObjectResponse.eTag()
- s3Client.headObject {
- it.bucket(bucketName)
- it.key(UPLOAD_FILE_NAME)
- it.ifModifiedSince(now)
- }.also {
- assertThat(it.eTag()).isEqualTo(expectedEtag)
- }
+ s3Client
+ .headObject {
+ it.bucket(bucketName)
+ it.key(UPLOAD_FILE_NAME)
+ it.ifModifiedSince(now)
+ }.also {
+ assertThat(it.eTag()).isEqualTo(expectedEtag)
+ }
}
@Test
@@ -1212,13 +1293,14 @@ internal class GetPutDeleteObjectIT : S3TestBase() {
val expectedEtag = putObjectResponse.eTag()
val now = Instant.now().plusSeconds(60)
- s3Client.headObject {
- it.bucket(bucketName)
- it.key(UPLOAD_FILE_NAME)
- it.ifUnmodifiedSince(now)
- }.also {
- assertThat(it.eTag()).isEqualTo(expectedEtag)
- }
+ s3Client
+ .headObject {
+ it.bucket(bucketName)
+ it.key(UPLOAD_FILE_NAME)
+ it.ifUnmodifiedSince(now)
+ }.also {
+ assertThat(it.eTag()).isEqualTo(expectedEtag)
+ }
}
@Test
@@ -1243,14 +1325,15 @@ internal class GetPutDeleteObjectIT : S3TestBase() {
val (bucketName, putObjectResponse) = givenBucketAndObject(testInfo, UPLOAD_FILE_NAME)
val matchingEtag = putObjectResponse.eTag()
- s3Client.getObject {
- it.bucket(bucketName)
- it.key(UPLOAD_FILE_NAME)
- it.ifMatch(matchingEtag)
- }.use {
- assertThat(it.response().eTag()).isEqualTo(matchingEtag)
- assertThat(it.response().contentLength()).isEqualTo(UPLOAD_FILE_LENGTH)
- }
+ s3Client
+ .getObject {
+ it.bucket(bucketName)
+ it.key(UPLOAD_FILE_NAME)
+ it.ifMatch(matchingEtag)
+ }.use {
+ assertThat(it.response().eTag()).isEqualTo(matchingEtag)
+ assertThat(it.response().contentLength()).isEqualTo(UPLOAD_FILE_LENGTH)
+ }
}
@Test
@@ -1260,15 +1343,16 @@ internal class GetPutDeleteObjectIT : S3TestBase() {
val (bucketName, putObjectResponse) = givenBucketAndObject(testInfo, UPLOAD_FILE_NAME)
val matchingEtag = putObjectResponse.eTag()
- s3Client.getObject {
- it.bucket(bucketName)
- it.key(UPLOAD_FILE_NAME)
- it.ifMatch(matchingEtag)
- it.ifUnmodifiedSince(now)
- }.use {
- assertThat(it.response().eTag()).isEqualTo(matchingEtag)
- assertThat(it.response().contentLength()).isEqualTo(UPLOAD_FILE_LENGTH)
- }
+ s3Client
+ .getObject {
+ it.bucket(bucketName)
+ it.key(UPLOAD_FILE_NAME)
+ it.ifMatch(matchingEtag)
+ it.ifUnmodifiedSince(now)
+ }.use {
+ assertThat(it.response().eTag()).isEqualTo(matchingEtag)
+ assertThat(it.response().contentLength()).isEqualTo(UPLOAD_FILE_LENGTH)
+ }
}
@Test
@@ -1278,16 +1362,16 @@ internal class GetPutDeleteObjectIT : S3TestBase() {
val eTag = putObjectResponse.eTag()
val matchingEtag = WILDCARD
- s3Client.getObject {
- it.bucket(bucketName)
- it.key(UPLOAD_FILE_NAME)
- it.ifMatch(matchingEtag)
- }.use {
- assertThat(it.response().eTag()).isEqualTo(eTag)
- }
+ s3Client
+ .getObject {
+ it.bucket(bucketName)
+ it.key(UPLOAD_FILE_NAME)
+ it.ifMatch(matchingEtag)
+ }.use {
+ assertThat(it.response().eTag()).isEqualTo(eTag)
+ }
}
-
@Test
@S3VerifiedSuccess(year = 2025)
fun `GET object succeeds with if-none-match=true`(testInfo: TestInfo) {
@@ -1296,14 +1380,15 @@ internal class GetPutDeleteObjectIT : S3TestBase() {
val noneMatchingEtag = "\"$randomName\""
- s3Client.getObject {
- it.bucket(bucketName)
- it.key(UPLOAD_FILE_NAME)
- it.ifNoneMatch(noneMatchingEtag)
- }.use {
- assertThat(it.response().eTag()).isEqualTo(matchingEtag)
- assertThat(it.response().contentLength()).isEqualTo(UPLOAD_FILE_LENGTH)
- }
+ s3Client
+ .getObject {
+ it.bucket(bucketName)
+ it.key(UPLOAD_FILE_NAME)
+ it.ifNoneMatch(noneMatchingEtag)
+ }.use {
+ assertThat(it.response().eTag()).isEqualTo(matchingEtag)
+ assertThat(it.response().contentLength()).isEqualTo(UPLOAD_FILE_LENGTH)
+ }
}
@Test
@@ -1328,13 +1413,14 @@ internal class GetPutDeleteObjectIT : S3TestBase() {
val now = Instant.now().minusSeconds(60)
val (bucketName, _) = givenBucketAndObject(testInfo, UPLOAD_FILE_NAME)
- s3Client.getObject {
- it.bucket(bucketName)
- it.key(UPLOAD_FILE_NAME)
- it.ifModifiedSince(now)
- }.use {
- assertThat(it.response().eTag()).isNotNull()
- }
+ s3Client
+ .getObject {
+ it.bucket(bucketName)
+ it.key(UPLOAD_FILE_NAME)
+ it.ifModifiedSince(now)
+ }.use {
+ assertThat(it.response().eTag()).isNotNull()
+ }
}
@Test
@@ -1376,13 +1462,14 @@ internal class GetPutDeleteObjectIT : S3TestBase() {
val (bucketName, _) = givenBucketAndObject(testInfo, UPLOAD_FILE_NAME)
val now = Instant.now().plusSeconds(60)
- s3Client.getObject {
- it.bucket(bucketName)
- it.key(UPLOAD_FILE_NAME)
- it.ifUnmodifiedSince(now)
- }.use {
- assertThat(it.response().eTag()).isNotNull()
- }
+ s3Client
+ .getObject {
+ it.bucket(bucketName)
+ it.key(UPLOAD_FILE_NAME)
+ it.ifUnmodifiedSince(now)
+ }.use {
+ assertThat(it.response().eTag()).isNotNull()
+ }
}
@Test
@@ -1409,31 +1496,33 @@ internal class GetPutDeleteObjectIT : S3TestBase() {
val smallRequestStartBytes = 1L
val smallRequestEndBytes = 2L
- s3Client.getObject {
- it.bucket(bucketName)
- it.key(UPLOAD_FILE_NAME)
- it.ifMatch(eTag)
- it.range("bytes=$smallRequestStartBytes-$smallRequestEndBytes")
- }.use {
- assertThat(it.response().contentLength()).isEqualTo(smallRequestEndBytes)
- assertThat(it.response().contentRange())
- .isEqualTo("bytes $smallRequestStartBytes-$smallRequestEndBytes/$UPLOAD_FILE_LENGTH")
- }
+ s3Client
+ .getObject {
+ it.bucket(bucketName)
+ it.key(UPLOAD_FILE_NAME)
+ it.ifMatch(eTag)
+ it.range("bytes=$smallRequestStartBytes-$smallRequestEndBytes")
+ }.use {
+ assertThat(it.response().contentLength()).isEqualTo(smallRequestEndBytes)
+ assertThat(it.response().contentRange())
+ .isEqualTo("bytes $smallRequestStartBytes-$smallRequestEndBytes/$UPLOAD_FILE_LENGTH")
+ }
val largeRequestStartBytes = 0L
val largeRequestEndBytes = 1000L
- s3Client.getObject {
- it.bucket(bucketName)
- it.key(UPLOAD_FILE_NAME)
- it.range("bytes=$largeRequestStartBytes-$largeRequestEndBytes")
- }.use {
- assertThat(it.response().contentLength()).isEqualTo(min(UPLOAD_FILE_LENGTH, largeRequestEndBytes + 1))
- assertThat(it.response().contentRange())
- .isEqualTo(
- "bytes $largeRequestStartBytes-${min(UPLOAD_FILE_LENGTH - 1, largeRequestEndBytes)}/$UPLOAD_FILE_LENGTH"
- )
- }
+ s3Client
+ .getObject {
+ it.bucket(bucketName)
+ it.key(UPLOAD_FILE_NAME)
+ it.range("bytes=$largeRequestStartBytes-$largeRequestEndBytes")
+ }.use {
+ assertThat(it.response().contentLength()).isEqualTo(min(UPLOAD_FILE_LENGTH, largeRequestEndBytes + 1))
+ assertThat(it.response().contentRange())
+ .isEqualTo(
+ "bytes $largeRequestStartBytes-${min(UPLOAD_FILE_LENGTH - 1, largeRequestEndBytes)}/$UPLOAD_FILE_LENGTH",
+ )
+ }
}
@Test
@@ -1442,15 +1531,16 @@ internal class GetPutDeleteObjectIT : S3TestBase() {
val bucketName = givenBucket(testInfo)
val key = givenObjectV2WithRandomBytes(bucketName)
val startBytes = 4500L
- val totalBytes = _5MB.toInt()
- s3Client.getObject {
- it.bucket(bucketName)
- it.key(key)
- it.range("bytes=$startBytes-")
- }.use {
- assertThat(it.response().contentLength()).isEqualTo(totalBytes - startBytes)
- assertThat(it.response().contentRange()).isEqualTo("bytes $startBytes-${totalBytes-1}/$totalBytes")
- }
+ val totalBytes = FIVE_MB
+ s3Client
+ .getObject {
+ it.bucket(bucketName)
+ it.key(key)
+ it.range("bytes=$startBytes-")
+ }.use {
+ assertThat(it.response().contentLength()).isEqualTo(totalBytes - startBytes)
+ assertThat(it.response().contentRange()).isEqualTo("bytes $startBytes-${totalBytes - 1}/$totalBytes")
+ }
}
@Test
@@ -1459,23 +1549,26 @@ internal class GetPutDeleteObjectIT : S3TestBase() {
val bucketName = givenBucket(testInfo)
val key = givenObjectV2WithRandomBytes(bucketName)
val endBytes = 500L
- val totalBytes = _5MB.toInt()
- s3Client.getObject {
- it.bucket(bucketName)
- it.key(key)
- it.range("bytes=-$endBytes")
- }.use {
- assertThat(it.response().contentLength()).isEqualTo(endBytes)
- assertThat(it.response().contentRange()).isEqualTo("bytes ${totalBytes-endBytes}-${totalBytes-1}/$totalBytes")
- }
+ val totalBytes = FIVE_MB
+ s3Client
+ .getObject {
+ it.bucket(bucketName)
+ it.key(key)
+ it.range("bytes=-$endBytes")
+ }.use {
+ assertThat(it.response().contentLength()).isEqualTo(endBytes)
+ assertThat(it.response().contentRange()).isEqualTo("bytes ${totalBytes - endBytes}-${totalBytes - 1}/$totalBytes")
+ }
}
/**
* Tests if Object can be uploaded with KMS and Metadata can be retrieved.
*/
@Test
- @S3VerifiedFailure(year = 2023,
- reason = "No KMS configuration for AWS test account")
+ @S3VerifiedFailure(
+ year = 2023,
+ reason = "No KMS configuration for AWS test account",
+ )
fun testPutObject_withEncryption(testInfo: TestInfo) {
val bucketName = givenBucket(testInfo)
@@ -1483,50 +1576,57 @@ internal class GetPutDeleteObjectIT : S3TestBase() {
val sseCustomerKey = "someCustomerKey"
val sseCustomerKeyMD5 = "someCustomerKeyMD5"
val ssekmsEncryptionContext = "someEncryptionContext"
- s3Client.putObject({
+ s3Client
+ .putObject(
+ {
+ it.bucket(bucketName)
+ it.key(UPLOAD_FILE_NAME)
+ it.ssekmsKeyId(TEST_ENC_KEY_ID)
+ it.sseCustomerAlgorithm(sseCustomerAlgorithm)
+ it.sseCustomerKey(sseCustomerKey)
+ it.sseCustomerKeyMD5(sseCustomerKeyMD5)
+ it.ssekmsEncryptionContext(ssekmsEncryptionContext)
+ it.serverSideEncryption(ServerSideEncryption.AWS_KMS)
+ },
+ RequestBody.fromFile(UPLOAD_FILE),
+ ).also {
+ assertThat(it.ssekmsKeyId()).isEqualTo(TEST_ENC_KEY_ID)
+ assertThat(it.sseCustomerAlgorithm()).isEqualTo(sseCustomerAlgorithm)
+ assertThat(it.sseCustomerKeyMD5()).isEqualTo(sseCustomerKeyMD5)
+ assertThat(it.serverSideEncryption()).isEqualTo(ServerSideEncryption.AWS_KMS)
+ }
+
+ s3Client
+ .getObject {
it.bucket(bucketName)
it.key(UPLOAD_FILE_NAME)
- it.ssekmsKeyId(TEST_ENC_KEY_ID)
- it.sseCustomerAlgorithm(sseCustomerAlgorithm)
- it.sseCustomerKey(sseCustomerKey)
- it.sseCustomerKeyMD5(sseCustomerKeyMD5)
- it.ssekmsEncryptionContext(ssekmsEncryptionContext)
- it.serverSideEncryption(ServerSideEncryption.AWS_KMS)
- },
- RequestBody.fromFile(UPLOAD_FILE)
- ).also {
- assertThat(it.ssekmsKeyId()).isEqualTo(TEST_ENC_KEY_ID)
- assertThat(it.sseCustomerAlgorithm()).isEqualTo(sseCustomerAlgorithm)
- assertThat(it.sseCustomerKeyMD5()).isEqualTo(sseCustomerKeyMD5)
- assertThat(it.serverSideEncryption()).isEqualTo(ServerSideEncryption.AWS_KMS)
- }
-
-
- s3Client.getObject {
- it.bucket(bucketName)
- it.key(UPLOAD_FILE_NAME)
- }.use {
- assertThat(it.response().ssekmsKeyId()).isEqualTo(TEST_ENC_KEY_ID)
- assertThat(it.response().sseCustomerAlgorithm()).isEqualTo(sseCustomerAlgorithm)
- assertThat(it.response().sseCustomerKeyMD5()).isEqualTo(sseCustomerKeyMD5)
- assertThat(it.response().serverSideEncryption()).isEqualTo(ServerSideEncryption.AWS_KMS)
- }
+ }.use {
+ assertThat(it.response().ssekmsKeyId()).isEqualTo(TEST_ENC_KEY_ID)
+ assertThat(it.response().sseCustomerAlgorithm()).isEqualTo(sseCustomerAlgorithm)
+ assertThat(it.response().sseCustomerKeyMD5()).isEqualTo(sseCustomerKeyMD5)
+ assertThat(it.response().serverSideEncryption()).isEqualTo(ServerSideEncryption.AWS_KMS)
+ }
}
@S3VerifiedSuccess(year = 2025)
@ParameterizedTest(name = ParameterizedTest.INDEX_PLACEHOLDER + " uploadWithSigning={0}, uploadChunked={1}")
@CsvSource(value = ["true, true", "true, false", "false, true", "false, false"])
- fun testPutGetObject_signingAndChunkedEncoding(uploadWithSigning: Boolean, uploadChunked: Boolean, testInfo: TestInfo) {
+ fun testPutGetObject_signingAndChunkedEncoding(
+ uploadWithSigning: Boolean,
+ uploadChunked: Boolean,
+ testInfo: TestInfo,
+ ) {
val key = UPLOAD_FILE_NAME
val bucketName = givenBucket(testInfo)
val s3Client = createS3Client(chunkedEncodingEnabled = uploadChunked)
- s3Client.putObject({
- it.bucket(bucketName)
- it.key(key)
- },
- RequestBody.fromFile(UPLOAD_FILE)
+ s3Client.putObject(
+ {
+ it.bucket(bucketName)
+ it.key(key)
+ },
+ RequestBody.fromFile(UPLOAD_FILE),
)
s3Client.headObject {
@@ -1534,21 +1634,23 @@ internal class GetPutDeleteObjectIT : S3TestBase() {
it.key(key)
}
- s3Client.getObject {
- it.bucket(bucketName)
- it.key(key)
- }.use {
- assertThat(it.response().contentLength()).isEqualTo(UPLOAD_FILE_LENGTH)
- }
+ s3Client
+ .getObject {
+ it.bucket(bucketName)
+ it.key(key)
+ }.use {
+ assertThat(it.response().contentLength()).isEqualTo(UPLOAD_FILE_LENGTH)
+ }
}
private fun givenObjectV2WithRandomBytes(bucketName: String): String {
val key = randomName
- s3Client.putObject({
+ s3Client.putObject(
+ {
it.bucket(bucketName)
it.key(key)
},
- RequestBody.fromBytes(random5MBytes())
+ RequestBody.fromBytes(random5MBytes()),
)
return key
}
diff --git a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/KotlinSDKIT.kt b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/KotlinSDKIT.kt
index f567527e5..d793320ed 100644
--- a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/KotlinSDKIT.kt
+++ b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/KotlinSDKIT.kt
@@ -31,25 +31,28 @@ internal class KotlinSDKIT : S3TestBase() {
private val s3Client = createS3ClientKotlin()
@Test
- @S3VerifiedFailure(year = 2025,
- reason = "The unspecified location constraint is incompatible for the region specific endpoint this request was sent to.")
- fun createAndDeleteBucket(testInfo: TestInfo): Unit = runBlocking {
- val bucketName = bucketName(testInfo)
- s3Client.createBucket(CreateBucketRequest { bucket = bucketName })
-
- s3Client.waitUntilBucketExists(HeadBucketRequest { bucket = bucketName })
-
- // does not throw exception if bucket exists.
- s3Client.headBucket(HeadBucketRequest { bucket = bucketName })
-
- s3Client.deleteBucket(DeleteBucketRequest { bucket = bucketName })
- s3Client.waitUntilBucketNotExists(HeadBucketRequest { bucket = bucketName })
-
- // throws exception if bucket does not exist.
- assertThatThrownBy {
- runBlocking {
- s3Client.headBucket(HeadBucketRequest { bucket = bucketName })
- }
- }.isInstanceOf(S3Exception::class.java)
- }
+ @S3VerifiedFailure(
+ year = 2025,
+ reason = "The unspecified location constraint is incompatible for the region specific endpoint this request was sent to.",
+ )
+ fun createAndDeleteBucket(testInfo: TestInfo): Unit =
+ runBlocking {
+ val bucketName = bucketName(testInfo)
+ s3Client.createBucket(CreateBucketRequest { bucket = bucketName })
+
+ s3Client.waitUntilBucketExists(HeadBucketRequest { bucket = bucketName })
+
+ // does not throw exception if bucket exists.
+ s3Client.headBucket(HeadBucketRequest { bucket = bucketName })
+
+ s3Client.deleteBucket(DeleteBucketRequest { bucket = bucketName })
+ s3Client.waitUntilBucketNotExists(HeadBucketRequest { bucket = bucketName })
+
+ // throws exception if bucket does not exist.
+ assertThatThrownBy {
+ runBlocking {
+ s3Client.headBucket(HeadBucketRequest { bucket = bucketName })
+ }
+ }.isInstanceOf(S3Exception::class.java)
+ }
}
diff --git a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/LegalHoldIT.kt b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/LegalHoldIT.kt
index fb7c2afe9..7f030be08 100644
--- a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/LegalHoldIT.kt
+++ b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/LegalHoldIT.kt
@@ -26,7 +26,6 @@ import software.amazon.awssdk.services.s3.model.ObjectLockLegalHoldStatus
import software.amazon.awssdk.services.s3.model.S3Exception
internal class LegalHoldIT : S3TestBase() {
-
private val s3Client: S3Client = createS3Client()
@Test
@@ -59,7 +58,7 @@ internal class LegalHoldIT : S3TestBase() {
it.bucket(bucketName)
it.key(sourceKey)
},
- RequestBody.fromFile(UPLOAD_FILE)
+ RequestBody.fromFile(UPLOAD_FILE),
)
assertThatThrownBy {
@@ -86,7 +85,7 @@ internal class LegalHoldIT : S3TestBase() {
it.bucket(bucketName)
it.key(sourceKey)
},
- RequestBody.fromFile(UPLOAD_FILE)
+ RequestBody.fromFile(UPLOAD_FILE),
)
s3Client.putObjectLegalHold {
@@ -97,12 +96,13 @@ internal class LegalHoldIT : S3TestBase() {
}
}
- s3Client.getObjectLegalHold {
- it.bucket(bucketName)
- it.key(sourceKey)
- }.also {
- assertThat(it.legalHold().status()).isEqualTo(ObjectLockLegalHoldStatus.ON)
- }
+ s3Client
+ .getObjectLegalHold {
+ it.bucket(bucketName)
+ it.key(sourceKey)
+ }.also {
+ assertThat(it.legalHold().status()).isEqualTo(ObjectLockLegalHoldStatus.ON)
+ }
s3Client.putObjectLegalHold {
it.bucket(bucketName)
@@ -112,11 +112,12 @@ internal class LegalHoldIT : S3TestBase() {
}
}
- s3Client.getObjectLegalHold {
- it.bucket(bucketName)
- it.key(sourceKey)
- }.also {
- assertThat(it.legalHold().status()).isEqualTo(ObjectLockLegalHoldStatus.OFF)
- }
+ s3Client
+ .getObjectLegalHold {
+ it.bucket(bucketName)
+ it.key(sourceKey)
+ }.also {
+ assertThat(it.legalHold().status()).isEqualTo(ObjectLockLegalHoldStatus.OFF)
+ }
}
}
diff --git a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/ListObjectVersionsIT.kt b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/ListObjectVersionsIT.kt
index 85b1d7473..1ae085f37 100644
--- a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/ListObjectVersionsIT.kt
+++ b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/ListObjectVersionsIT.kt
@@ -38,30 +38,35 @@ internal class ListObjectVersionsIT : S3TestBase() {
}
}
- val version1 = s3Client.putObject(
- {
- it.bucket(bucketName)
- it.key("$UPLOAD_FILE_NAME-1")
- },
- RequestBody.fromFile(UPLOAD_FILE)
- ).versionId()
-
- val version2 = s3Client.putObject(
- {
+ val version1 =
+ s3Client
+ .putObject(
+ {
+ it.bucket(bucketName)
+ it.key("$UPLOAD_FILE_NAME-1")
+ },
+ RequestBody.fromFile(UPLOAD_FILE),
+ ).versionId()
+
+ val version2 =
+ s3Client
+ .putObject(
+ {
+ it.bucket(bucketName)
+ it.key("$UPLOAD_FILE_NAME-1")
+ },
+ RequestBody.fromFile(UPLOAD_FILE),
+ ).versionId()
+
+ s3Client
+ .listObjectVersions {
it.bucket(bucketName)
- it.key("$UPLOAD_FILE_NAME-1")
- },
- RequestBody.fromFile(UPLOAD_FILE)
- ).versionId()
-
- s3Client.listObjectVersions {
- it.bucket(bucketName)
- }.also {
- assertThat(it.versions())
- .hasSize(2)
- .extracting("versionId", "isLatest")
- .containsExactly(Tuple(version2, true), Tuple(version1, false))
- }
+ }.also {
+ assertThat(it.versions())
+ .hasSize(2)
+ .extracting("versionId", "isLatest")
+ .containsExactly(Tuple(version2, true), Tuple(version1, false))
+ }
}
@Test
@@ -75,30 +80,35 @@ internal class ListObjectVersionsIT : S3TestBase() {
}
}
- val version1 = s3Client.putObject(
- {
- it.bucket(bucketName)
- it.key("$UPLOAD_FILE_NAME-1")
- },
- RequestBody.fromFile(UPLOAD_FILE)
- ).versionId()
-
- val version2 = s3Client.putObject(
- {
+ val version1 =
+ s3Client
+ .putObject(
+ {
+ it.bucket(bucketName)
+ it.key("$UPLOAD_FILE_NAME-1")
+ },
+ RequestBody.fromFile(UPLOAD_FILE),
+ ).versionId()
+
+ val version2 =
+ s3Client
+ .putObject(
+ {
+ it.bucket(bucketName)
+ it.key("$UPLOAD_FILE_NAME-2")
+ },
+ RequestBody.fromFile(UPLOAD_FILE),
+ ).versionId()
+
+ s3Client
+ .listObjectVersions {
it.bucket(bucketName)
- it.key("$UPLOAD_FILE_NAME-2")
- },
- RequestBody.fromFile(UPLOAD_FILE)
- ).versionId()
-
- s3Client.listObjectVersions {
- it.bucket(bucketName)
- }.also {
- assertThat(it.versions())
- .hasSize(2)
- .extracting("versionId", "isLatest")
- .containsExactly(Tuple(version1, true), Tuple(version2, true))
- }
+ }.also {
+ assertThat(it.versions())
+ .hasSize(2)
+ .extracting("versionId", "isLatest")
+ .containsExactly(Tuple(version1, true), Tuple(version2, true))
+ }
}
@Test
@@ -111,7 +121,7 @@ internal class ListObjectVersionsIT : S3TestBase() {
it.bucket(bucketName)
it.key("$UPLOAD_FILE_NAME-1")
},
- RequestBody.fromFile(UPLOAD_FILE)
+ RequestBody.fromFile(UPLOAD_FILE),
)
s3Client.putObject(
@@ -119,17 +129,18 @@ internal class ListObjectVersionsIT : S3TestBase() {
it.bucket(bucketName)
it.key("$UPLOAD_FILE_NAME-2")
},
- RequestBody.fromFile(UPLOAD_FILE)
+ RequestBody.fromFile(UPLOAD_FILE),
)
- s3Client.listObjectVersions {
- it.bucket(bucketName)
- }.also {
- assertThat(it.versions())
- .hasSize(2)
- .extracting("versionId", "isLatest")
- .containsExactly(Tuple("null", true), Tuple("null", true))
- }
+ s3Client
+ .listObjectVersions {
+ it.bucket(bucketName)
+ }.also {
+ assertThat(it.versions())
+ .hasSize(2)
+ .extracting("versionId", "isLatest")
+ .containsExactly(Tuple("null", true), Tuple("null", true))
+ }
}
@Test
@@ -143,46 +154,53 @@ internal class ListObjectVersionsIT : S3TestBase() {
}
}
- val version1 = s3Client.putObject(
- {
- it.bucket(bucketName)
- it.key("$UPLOAD_FILE_NAME-1")
- },
- RequestBody.fromFile(UPLOAD_FILE)
- ).versionId()
-
- val version2 = s3Client.putObject(
- {
- it.bucket(bucketName)
- it.key("$UPLOAD_FILE_NAME-2")
- },
- RequestBody.fromFile(UPLOAD_FILE)
- ).versionId()
-
- val version3 = s3Client.putObject(
- {
- it.bucket(bucketName)
- it.key("$UPLOAD_FILE_NAME-3")
- },
- RequestBody.fromFile(UPLOAD_FILE)
- ).versionId()
+ val version1 =
+ s3Client
+ .putObject(
+ {
+ it.bucket(bucketName)
+ it.key("$UPLOAD_FILE_NAME-1")
+ },
+ RequestBody.fromFile(UPLOAD_FILE),
+ ).versionId()
+
+ val version2 =
+ s3Client
+ .putObject(
+ {
+ it.bucket(bucketName)
+ it.key("$UPLOAD_FILE_NAME-2")
+ },
+ RequestBody.fromFile(UPLOAD_FILE),
+ ).versionId()
+
+ val version3 =
+ s3Client
+ .putObject(
+ {
+ it.bucket(bucketName)
+ it.key("$UPLOAD_FILE_NAME-3")
+ },
+ RequestBody.fromFile(UPLOAD_FILE),
+ ).versionId()
s3Client.deleteObject {
it.bucket(bucketName)
it.key("$UPLOAD_FILE_NAME-3")
}
- s3Client.listObjectVersions {
- it.bucket(bucketName)
- }.also {
- assertThat(it.versions())
- .hasSize(3)
- .extracting("versionId", "isLatest")
- .containsExactlyInAnyOrder(Tuple(version1, true), Tuple(version2, true), Tuple(version3, false))
- assertThat(it.deleteMarkers())
- .hasSize(1)
- .extracting("key")
- .containsExactly("$UPLOAD_FILE_NAME-3")
- }
+ s3Client
+ .listObjectVersions {
+ it.bucket(bucketName)
+ }.also {
+ assertThat(it.versions())
+ .hasSize(3)
+ .extracting("versionId", "isLatest")
+ .containsExactlyInAnyOrder(Tuple(version1, true), Tuple(version2, true), Tuple(version3, false))
+ assertThat(it.deleteMarkers())
+ .hasSize(1)
+ .extracting("key")
+ .containsExactly("$UPLOAD_FILE_NAME-3")
+ }
}
}
diff --git a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/ListObjectsIT.kt b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/ListObjectsIT.kt
index 2fd5d3120..766e1e7c6 100644
--- a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/ListObjectsIT.kt
+++ b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/ListObjectsIT.kt
@@ -46,7 +46,7 @@ internal class ListObjectsIT : S3TestBase() {
it.key("$UPLOAD_FILE_NAME-1")
it.checksumAlgorithm(ChecksumAlgorithm.SHA256)
},
- RequestBody.fromFile(UPLOAD_FILE)
+ RequestBody.fromFile(UPLOAD_FILE),
)
s3Client.putObject(
@@ -54,20 +54,21 @@ internal class ListObjectsIT : S3TestBase() {
it.bucket(bucketName).key("$UPLOAD_FILE_NAME-2")
it.checksumAlgorithm(ChecksumAlgorithm.SHA256)
},
- RequestBody.fromFile(UPLOAD_FILE)
+ RequestBody.fromFile(UPLOAD_FILE),
)
- s3Client.listObjectsV2 {
- it.bucket(bucketName)
- }.also {
- assertThat(it.contents())
- .hasSize(2)
- .extracting(S3Object::checksumAlgorithm)
- .containsOnly(
- Tuple(arrayListOf(ChecksumAlgorithm.SHA256)),
- Tuple(arrayListOf(ChecksumAlgorithm.SHA256))
- )
- }
+ s3Client
+ .listObjectsV2 {
+ it.bucket(bucketName)
+ }.also {
+ assertThat(it.contents())
+ .hasSize(2)
+ .extracting(S3Object::checksumAlgorithm)
+ .containsOnly(
+ Tuple(arrayListOf(ChecksumAlgorithm.SHA256)),
+ Tuple(arrayListOf(ChecksumAlgorithm.SHA256)),
+ )
+ }
}
@Test
@@ -80,7 +81,7 @@ internal class ListObjectsIT : S3TestBase() {
it.bucket(bucketName).key("$UPLOAD_FILE_NAME-1")
it.checksumAlgorithm(ChecksumAlgorithm.SHA256)
},
- RequestBody.fromFile(UPLOAD_FILE)
+ RequestBody.fromFile(UPLOAD_FILE),
)
s3Client.putObject(
@@ -88,20 +89,21 @@ internal class ListObjectsIT : S3TestBase() {
it.bucket(bucketName).key("$UPLOAD_FILE_NAME-2")
it.checksumAlgorithm(ChecksumAlgorithm.SHA256)
},
- RequestBody.fromFile(UPLOAD_FILE)
+ RequestBody.fromFile(UPLOAD_FILE),
)
- s3Client.listObjects {
- it.bucket(bucketName)
- }.also {
- assertThat(it.contents())
- .hasSize(2)
- .extracting(S3Object::checksumAlgorithm)
- .containsOnly(
- Tuple(arrayListOf(ChecksumAlgorithm.SHA256)),
- Tuple(arrayListOf(ChecksumAlgorithm.SHA256))
- )
- }
+ s3Client
+ .listObjects {
+ it.bucket(bucketName)
+ }.also {
+ assertThat(it.contents())
+ .hasSize(2)
+ .extracting(S3Object::checksumAlgorithm)
+ .containsOnly(
+ Tuple(arrayListOf(ChecksumAlgorithm.SHA256)),
+ Tuple(arrayListOf(ChecksumAlgorithm.SHA256)),
+ )
+ }
}
/**
@@ -121,19 +123,20 @@ internal class ListObjectsIT : S3TestBase() {
it.bucket(bucketName)
it.key(key)
},
- RequestBody.fromFile(UPLOAD_FILE)
+ RequestBody.fromFile(UPLOAD_FILE),
)
- s3Client.listObjects {
- it.bucket(bucketName)
- it.prefix(prefix)
- it.encodingType(EncodingType.URL)
- }.also { listing ->
- listing.contents().also {
- assertThat(it).hasSize(1)
- assertThat(it[0].key()).isEqualTo(key)
+ s3Client
+ .listObjects {
+ it.bucket(bucketName)
+ it.prefix(prefix)
+ it.encodingType(EncodingType.URL)
+ }.also { listing ->
+ listing.contents().also {
+ assertThat(it).hasSize(1)
+ assertThat(it[0].key()).isEqualTo(key)
+ }
}
- }
}
/**
@@ -153,19 +156,20 @@ internal class ListObjectsIT : S3TestBase() {
it.bucket(bucketName)
it.key(key)
},
- RequestBody.fromFile(UPLOAD_FILE)
+ RequestBody.fromFile(UPLOAD_FILE),
)
- s3Client.listObjectsV2 {
- it.bucket(bucketName)
- it.prefix(prefix)
- it.encodingType(EncodingType.URL)
- }.also { listing ->
- listing.contents().also {
- assertThat(it).hasSize(1)
- assertThat(it[0].key()).isEqualTo(key)
+ s3Client
+ .listObjectsV2 {
+ it.bucket(bucketName)
+ it.prefix(prefix)
+ it.encodingType(EncodingType.URL)
+ }.also { listing ->
+ listing.contents().also {
+ assertThat(it).hasSize(1)
+ assertThat(it[0].key()).isEqualTo(key)
+ }
}
- }
}
/**
@@ -189,19 +193,20 @@ internal class ListObjectsIT : S3TestBase() {
it.bucket(bucketName)
it.key(key)
},
- RequestBody.fromFile(UPLOAD_FILE)
+ RequestBody.fromFile(UPLOAD_FILE),
)
- s3Client.listObjects {
- it.bucket(bucketName)
- it.prefix(prefix)
- it.encodingType(EncodingType.URL)
- }.also { listing ->
- listing.contents().also {
- assertThat(it).hasSize(1)
- assertThat(it[0].key()).isEqualTo(key)
+ s3Client
+ .listObjects {
+ it.bucket(bucketName)
+ it.prefix(prefix)
+ it.encodingType(EncodingType.URL)
+ }.also { listing ->
+ listing.contents().also {
+ assertThat(it).hasSize(1)
+ assertThat(it[0].key()).isEqualTo(key)
+ }
}
- }
}
/**
@@ -225,115 +230,126 @@ internal class ListObjectsIT : S3TestBase() {
it.bucket(bucketName)
it.key(key)
},
- RequestBody.fromFile(UPLOAD_FILE)
+ RequestBody.fromFile(UPLOAD_FILE),
)
- s3Client.listObjectsV2 {
- it.bucket(bucketName)
- it.prefix(prefix)
- it.encodingType(EncodingType.URL)
- }.also { listing ->
- listing.contents().also {
- assertThat(it).hasSize(1)
- assertThat(it[0].key()).isEqualTo(key)
+ s3Client
+ .listObjectsV2 {
+ it.bucket(bucketName)
+ it.prefix(prefix)
+ it.encodingType(EncodingType.URL)
+ }.also { listing ->
+ listing.contents().also {
+ assertThat(it).hasSize(1)
+ assertThat(it[0].key()).isEqualTo(key)
+ }
}
- }
}
@ParameterizedTest
@MethodSource("data")
@S3VerifiedSuccess(year = 2025)
- fun listV1(parameters: Param, testInfo: TestInfo) {
+ fun listV1(
+ parameters: Param,
+ testInfo: TestInfo,
+ ) {
val bucketName = givenBucket(testInfo)
- for(key in ALL_OBJECTS) {
+ for (key in ALL_OBJECTS) {
s3Client.putObject(
{
it.bucket(bucketName)
it.key(key)
},
- RequestBody.fromFile(UPLOAD_FILE)
+ RequestBody.fromFile(UPLOAD_FILE),
)
}
// listV2 automatically decodes the keys so the expected keys have to be decoded
val expectedDecodedKeys = parameters.decodedKeys()
- s3Client.listObjects {
- it.bucket(bucketName)
- it.prefix(parameters.prefix)
- it.delimiter(parameters.delimiter)
- it.marker(parameters.startAfter)
- it.encodingType(parameters.expectedEncoding)
- }.also { listing ->
- LOG.info("list V1, prefix='{}', delimiter='{}', startAfter='{}': Objects: {} Prefixes: {}",
- parameters.prefix,
- parameters.delimiter,
- parameters.startAfter,
- listing.contents().joinToString("\n ") { s: S3Object -> SdkHttpUtils.urlDecode(s.key()) },
- listing.commonPrefixes().joinToString("\n ", transform = CommonPrefix::prefix)
- )
- listing.commonPrefixes().also {
- assertThat(it.map { s: CommonPrefix -> SdkHttpUtils.urlDecode(s.prefix()) })
- .containsExactlyInAnyOrder(*parameters.expectedPrefixes)
- }
- listing.contents().also {
- assertThat(it.map { s: S3Object -> SdkHttpUtils.urlDecode(s.key()) }).isEqualTo(listOf(*expectedDecodedKeys))
- }
- if (parameters.expectedEncoding != null) {
- assertThat(listing.encodingType().toString()).isEqualTo(parameters.expectedEncoding)
- } else {
- assertThat(listing.encodingType()).isNull()
+ s3Client
+ .listObjects {
+ it.bucket(bucketName)
+ it.prefix(parameters.prefix)
+ it.delimiter(parameters.delimiter)
+ it.marker(parameters.startAfter)
+ it.encodingType(parameters.expectedEncoding)
+ }.also { listing ->
+ LOG.info(
+ "list V1, prefix='{}', delimiter='{}', startAfter='{}': Objects: {} Prefixes: {}",
+ parameters.prefix,
+ parameters.delimiter,
+ parameters.startAfter,
+ listing.contents().joinToString("\n ") { s: S3Object -> SdkHttpUtils.urlDecode(s.key()) },
+ listing.commonPrefixes().joinToString("\n ", transform = CommonPrefix::prefix),
+ )
+ listing.commonPrefixes().also {
+ assertThat(it.map { s: CommonPrefix -> SdkHttpUtils.urlDecode(s.prefix()) })
+ .containsExactlyInAnyOrder(*parameters.expectedPrefixes)
+ }
+ listing.contents().also {
+ assertThat(it.map { s: S3Object -> SdkHttpUtils.urlDecode(s.key()) }).isEqualTo(listOf(*expectedDecodedKeys))
+ }
+ if (parameters.expectedEncoding != null) {
+ assertThat(listing.encodingType().toString()).isEqualTo(parameters.expectedEncoding)
+ } else {
+ assertThat(listing.encodingType()).isNull()
+ }
}
- }
}
@ParameterizedTest
@MethodSource("data")
@S3VerifiedSuccess(year = 2025)
- fun listV2(parameters: Param, testInfo: TestInfo) {
+ fun listV2(
+ parameters: Param,
+ testInfo: TestInfo,
+ ) {
val bucketName = givenBucket(testInfo)
- for(key in ALL_OBJECTS) {
+ for (key in ALL_OBJECTS) {
s3Client.putObject(
{
it.bucket(bucketName)
it.key(key)
},
- RequestBody.fromFile(UPLOAD_FILE)
+ RequestBody.fromFile(UPLOAD_FILE),
)
}
// listV2 automatically decodes the keys so the expected keys have to be decoded
val expectedDecodedKeys = parameters.decodedKeys()
- s3Client.listObjectsV2 {
- it.bucket(bucketName)
- it.prefix(parameters.prefix)
- it.delimiter(parameters.delimiter)
- it.startAfter(parameters.startAfter)
- it.encodingType(parameters.expectedEncoding)
- }.also { listing ->
- LOG.info("list V2, prefix='{}', delimiter='{}', startAfter='{}': Objects: {} Prefixes: {}",
- parameters.prefix,
- parameters.delimiter,
- parameters.startAfter,
- listing.contents().joinToString("\n ") { s: S3Object -> SdkHttpUtils.urlDecode(s.key()) },
- listing.commonPrefixes().joinToString("\n ", transform = CommonPrefix::prefix)
- )
- listing.commonPrefixes().also {
- assertThat(it.map { s: CommonPrefix -> SdkHttpUtils.urlDecode(s.prefix()) })
- .containsExactlyInAnyOrder(*parameters.expectedPrefixes)
- }
- listing.contents().also {
- assertThat(it.map { s: S3Object -> SdkHttpUtils.urlDecode(s.key()) }).isEqualTo(listOf(*expectedDecodedKeys))
- }
- if (parameters.expectedEncoding != null) {
- assertThat(listing.encodingType().toString()).isEqualTo(parameters.expectedEncoding)
- } else {
- assertThat(listing.encodingType()).isNull()
+ s3Client
+ .listObjectsV2 {
+ it.bucket(bucketName)
+ it.prefix(parameters.prefix)
+ it.delimiter(parameters.delimiter)
+ it.startAfter(parameters.startAfter)
+ it.encodingType(parameters.expectedEncoding)
+ }.also { listing ->
+ LOG.info(
+ "list V2, prefix='{}', delimiter='{}', startAfter='{}': Objects: {} Prefixes: {}",
+ parameters.prefix,
+ parameters.delimiter,
+ parameters.startAfter,
+ listing.contents().joinToString("\n ") { s: S3Object -> SdkHttpUtils.urlDecode(s.key()) },
+ listing.commonPrefixes().joinToString("\n ", transform = CommonPrefix::prefix),
+ )
+ listing.commonPrefixes().also {
+ assertThat(it.map { s: CommonPrefix -> SdkHttpUtils.urlDecode(s.prefix()) })
+ .containsExactlyInAnyOrder(*parameters.expectedPrefixes)
+ }
+ listing.contents().also {
+ assertThat(it.map { s: S3Object -> SdkHttpUtils.urlDecode(s.key()) }).isEqualTo(listOf(*expectedDecodedKeys))
+ }
+ if (parameters.expectedEncoding != null) {
+ assertThat(listing.encodingType().toString()).isEqualTo(parameters.expectedEncoding)
+ } else {
+ assertThat(listing.encodingType()).isNull()
+ }
}
- }
}
@Test
@@ -343,42 +359,47 @@ internal class ListObjectsIT : S3TestBase() {
val maxKeys = 10
val listedObjects = mutableListOf()
- val continuationToken1 = s3Client.listObjectsV2 {
- it.bucket(bucketName)
- it.maxKeys(maxKeys)
- }.let { listing ->
- assertThat(listing.contents().size).isEqualTo(maxKeys)
- assertThat(listing.isTruncated).isTrue
- assertThat(listing.maxKeys()).isEqualTo(maxKeys)
- assertThat(listing.nextContinuationToken()).isNotNull
- listedObjects.addAll(listing.contents().map(S3Object::key))
- listing.nextContinuationToken()
- }
-
- val continuationToken2 = s3Client.listObjectsV2 {
- it.bucket(bucketName)
- it.maxKeys(maxKeys)
- it.continuationToken(continuationToken1)
- }.let { listing ->
- assertThat(listing.contents().size).isEqualTo(maxKeys)
- assertThat(listing.isTruncated).isTrue
- assertThat(listing.maxKeys()).isEqualTo(maxKeys)
- assertThat(listing.nextContinuationToken()).isNotNull
- listedObjects.addAll(listing.contents().map(S3Object::key))
- listing.nextContinuationToken()
- }
-
- s3Client.listObjectsV2 {
- it.bucket(bucketName)
- it.maxKeys(maxKeys)
- it.continuationToken(continuationToken2)
- }.also { listing ->
- assertThat(listing.contents().size).isEqualTo(maxKeys)
- assertThat(listing.isTruncated).isFalse
- assertThat(listing.maxKeys()).isEqualTo(maxKeys)
- assertThat(listing.nextContinuationToken()).isNull()
- listedObjects.addAll(listing.contents().map(S3Object::key))
- }
+ val continuationToken1 =
+ s3Client
+ .listObjectsV2 {
+ it.bucket(bucketName)
+ it.maxKeys(maxKeys)
+ }.let { listing ->
+ assertThat(listing.contents().size).isEqualTo(maxKeys)
+ assertThat(listing.isTruncated).isTrue
+ assertThat(listing.maxKeys()).isEqualTo(maxKeys)
+ assertThat(listing.nextContinuationToken()).isNotNull
+ listedObjects.addAll(listing.contents().map(S3Object::key))
+ listing.nextContinuationToken()
+ }
+
+ val continuationToken2 =
+ s3Client
+ .listObjectsV2 {
+ it.bucket(bucketName)
+ it.maxKeys(maxKeys)
+ it.continuationToken(continuationToken1)
+ }.let { listing ->
+ assertThat(listing.contents().size).isEqualTo(maxKeys)
+ assertThat(listing.isTruncated).isTrue
+ assertThat(listing.maxKeys()).isEqualTo(maxKeys)
+ assertThat(listing.nextContinuationToken()).isNotNull
+ listedObjects.addAll(listing.contents().map(S3Object::key))
+ listing.nextContinuationToken()
+ }
+
+ s3Client
+ .listObjectsV2 {
+ it.bucket(bucketName)
+ it.maxKeys(maxKeys)
+ it.continuationToken(continuationToken2)
+ }.also { listing ->
+ assertThat(listing.contents().size).isEqualTo(maxKeys)
+ assertThat(listing.isTruncated).isFalse
+ assertThat(listing.maxKeys()).isEqualTo(maxKeys)
+ assertThat(listing.nextContinuationToken()).isNull()
+ listedObjects.addAll(listing.contents().map(S3Object::key))
+ }
assertThat(listedObjects).hasSize(30)
assertThat(listedObjects).hasSameElementsAs(keys)
@@ -388,71 +409,76 @@ internal class ListObjectsIT : S3TestBase() {
@S3VerifiedSuccess(year = 2025)
fun returnsAllObjectsIfMaxKeysIsDefault(testInfo: TestInfo) {
val (bucketName, _) = givenBucketAndObjects(testInfo, 30)
- s3Client.listObjectsV2 {
- it.bucket(bucketName)
- }.also { listing ->
- assertThat(listing.contents().size).isEqualTo(30)
- assertThat(listing.isTruncated).isFalse
- assertThat(listing.maxKeys()).isEqualTo(1000)
- }
+ s3Client
+ .listObjectsV2 {
+ it.bucket(bucketName)
+ }.also { listing ->
+ assertThat(listing.contents().size).isEqualTo(30)
+ assertThat(listing.isTruncated).isFalse
+ assertThat(listing.maxKeys()).isEqualTo(1000)
+ }
}
@Test
@S3VerifiedSuccess(year = 2025)
fun returnsAllObjectsIfMaxKeysEqualToAmountOfObjects(testInfo: TestInfo) {
val (bucketName, _) = givenBucketAndObjects(testInfo, 30)
- s3Client.listObjectsV2 {
- it.bucket(bucketName)
- it.maxKeys(30)
- }.also { listing ->
- assertThat(listing.contents().size).isEqualTo(30)
- assertThat(listing.isTruncated).isFalse
- assertThat(listing.maxKeys()).isEqualTo(30)
- }
+ s3Client
+ .listObjectsV2 {
+ it.bucket(bucketName)
+ it.maxKeys(30)
+ }.also { listing ->
+ assertThat(listing.contents().size).isEqualTo(30)
+ assertThat(listing.isTruncated).isFalse
+ assertThat(listing.maxKeys()).isEqualTo(30)
+ }
}
@Test
@S3VerifiedSuccess(year = 2025)
fun returnsAllObjectsIfMaxKeysMoreThanAmountOfObjects(testInfo: TestInfo) {
val (bucketName, _) = givenBucketAndObjects(testInfo, 30)
- s3Client.listObjectsV2 {
- it.bucket(bucketName)
- it.maxKeys(400)
- }.also { listing ->
- assertThat(listing.contents().size).isEqualTo(30)
- assertThat(listing.isTruncated).isFalse
- assertThat(listing.maxKeys()).isEqualTo(400)
- }
+ s3Client
+ .listObjectsV2 {
+ it.bucket(bucketName)
+ it.maxKeys(400)
+ }.also { listing ->
+ assertThat(listing.contents().size).isEqualTo(30)
+ assertThat(listing.isTruncated).isFalse
+ assertThat(listing.maxKeys()).isEqualTo(400)
+ }
}
@Test
@S3VerifiedSuccess(year = 2025)
fun returnsEmptyListIfMaxKeysIsZero(testInfo: TestInfo) {
val (bucketName, _) = givenBucketAndObjects(testInfo, 30)
- s3Client.listObjects {
- it.bucket(bucketName)
- it.maxKeys(0)
- }.also { listing ->
- assertThat(listing.contents()).isEmpty()
- assertThat(listing.isTruncated).isFalse
- assertThat(listing.maxKeys()).isEqualTo(0)
- assertThat(listing.nextMarker()).isNull()
- }
+ s3Client
+ .listObjects {
+ it.bucket(bucketName)
+ it.maxKeys(0)
+ }.also { listing ->
+ assertThat(listing.contents()).isEmpty()
+ assertThat(listing.isTruncated).isFalse
+ assertThat(listing.maxKeys()).isEqualTo(0)
+ assertThat(listing.nextMarker()).isNull()
+ }
}
@Test
@S3VerifiedSuccess(year = 2025)
fun returnsEmptyListIfMaxKeysIsZeroV2(testInfo: TestInfo) {
val (bucketName, _) = givenBucketAndObjects(testInfo, 30)
- s3Client.listObjectsV2 {
- it.bucket(bucketName)
- it.maxKeys(0)
- }.also { listing ->
- assertThat(listing.contents()).isEmpty()
- assertThat(listing.isTruncated).isFalse
- assertThat(listing.maxKeys()).isEqualTo(0)
- assertThat(listing.nextContinuationToken()).isNull()
- }
+ s3Client
+ .listObjectsV2 {
+ it.bucket(bucketName)
+ it.maxKeys(0)
+ }.also { listing ->
+ assertThat(listing.contents()).isEmpty()
+ assertThat(listing.isTruncated).isFalse
+ assertThat(listing.maxKeys()).isEqualTo(0)
+ assertThat(listing.nextContinuationToken()).isNull()
+ }
}
@Test
@@ -463,62 +489,73 @@ internal class ListObjectsIT : S3TestBase() {
it.bucket(randomName)
it.prefix(UPLOAD_FILE_NAME)
}
- }
- .isInstanceOf(NoSuchBucketException::class.java)
+ }.isInstanceOf(NoSuchBucketException::class.java)
.hasMessageContaining(NO_SUCH_BUCKET)
}
companion object {
private const val NO_SUCH_BUCKET = "The specified bucket does not exist"
- private val ALL_OBJECTS = arrayOf(
- "3330/0", "33309/0", "a",
- "b", "b/1", "b/1/1", "b/1/2", "b/2",
- "c/1", "c/1/1",
- "d:1", "d:1:1",
- "eor.txt", "foo/eor.txt"
- )
+ private val ALL_OBJECTS =
+ arrayOf(
+ "3330/0",
+ "33309/0",
+ "a",
+ "b",
+ "b/1",
+ "b/1/1",
+ "b/1/2",
+ "b/2",
+ "c/1",
+ "c/1/1",
+ "d:1",
+ "d:1:1",
+ "eor.txt",
+ "foo/eor.txt",
+ )
- private fun param(prefix: String?, delimiter: String?, startAfter: String?): Param {
- return Param(prefix, delimiter, startAfter)
- }
+ private fun param(
+ prefix: String?,
+ delimiter: String?,
+ startAfter: String?,
+ ): Param = Param(prefix, delimiter, startAfter)
/**
* Parameter factory.
*/
@JvmStatic
- fun data(): Iterable {
- return listOf( //
- param(null, null, null).keys(*ALL_OBJECTS), //
- param("", null, null).keys(*ALL_OBJECTS), //
- param(null, "", null).keys(*ALL_OBJECTS), //
- param(null, "/", null).keys("a", "b", "d:1", "d:1:1", "eor.txt")
+ fun data(): Iterable =
+ listOf( //
+ param(null, null, null).keys(*ALL_OBJECTS), //
+ param("", null, null).keys(*ALL_OBJECTS), //
+ param(null, "", null).keys(*ALL_OBJECTS), //
+ param(null, "/", null)
+ .keys("a", "b", "d:1", "d:1:1", "eor.txt")
.prefixes("3330/", "foo/", "c/", "b/", "33309/"),
- param("", "", null).keys(*ALL_OBJECTS), //
- param("/", null, null), //
- param("b", null, null).keys("b", "b/1", "b/1/1", "b/1/2", "b/2"), //
- param("b/", null, null).keys("b/1", "b/1/1", "b/1/2", "b/2"), //
- param("b", "", null).keys("b", "b/1", "b/1/1", "b/1/2", "b/2"), //
- param("b", "/", null).keys("b").prefixes("b/"), //
- param("b/", "/", null).keys("b/1", "b/2").prefixes("b/1/"), //
- param("b/1", "/", null).keys("b/1").prefixes("b/1/"), //
- param("b/1/", "/", null).keys("b/1/1", "b/1/2"), //
- param("c", "/", null).prefixes("c/"), //
- param("c/", "/", null).keys("c/1").prefixes("c/1/"), //
- param("eor", "/", null).keys("eor.txt"), //
+ param("", "", null).keys(*ALL_OBJECTS), //
+ param("/", null, null), //
+ param("b", null, null).keys("b", "b/1", "b/1/1", "b/1/2", "b/2"), //
+ param("b/", null, null).keys("b/1", "b/1/1", "b/1/2", "b/2"), //
+ param("b", "", null).keys("b", "b/1", "b/1/1", "b/1/2", "b/2"), //
+ param("b", "/", null).keys("b").prefixes("b/"), //
+ param("b/", "/", null).keys("b/1", "b/2").prefixes("b/1/"), //
+ param("b/1", "/", null).keys("b/1").prefixes("b/1/"), //
+ param("b/1/", "/", null).keys("b/1/1", "b/1/2"), //
+ param("c", "/", null).prefixes("c/"), //
+ param("c/", "/", null).keys("c/1").prefixes("c/1/"), //
+ param("eor", "/", null).keys("eor.txt"), //
// start after existing key
- param("b", null, "b/1/1").keys("b/1/2", "b/2"), //
+ param("b", null, "b/1/1").keys("b/1/2", "b/2"), //
// start after non-existing key
param("b", null, "b/0").keys("b/1", "b/1/1", "b/1/2", "b/2"),
param("3330/", null, null).keys("3330/0"),
param(null, null, null).encodedKeys(*ALL_OBJECTS),
- param("b/1", "/", null).encodedKeys("b/1").prefixes("b/1/")
+ param("b/1", "/", null).encodedKeys("b/1").prefixes("b/1/"),
)
- }
class Param(
val prefix: String?,
val delimiter: String?,
- val startAfter: String?
+ val startAfter: String?,
) {
var expectedKeys: Array = arrayOfNulls(0)
var expectedPrefixes: Array = arrayOfNulls(0)
@@ -530,27 +567,25 @@ internal class ListObjectsIT : S3TestBase() {
}
fun encodedKeys(vararg expectedKeys: String): Param {
- this.expectedKeys = arrayOf(*expectedKeys)
- .map { toEncode: String? -> SdkHttpUtils.urlEncodeIgnoreSlashes(toEncode) }
- .toTypedArray()
+ this.expectedKeys =
+ arrayOf(*expectedKeys)
+ .map { toEncode: String? -> SdkHttpUtils.urlEncodeIgnoreSlashes(toEncode) }
+ .toTypedArray()
expectedEncoding = "url"
return this
}
- fun decodedKeys(): Array {
- return arrayOf(*expectedKeys)
+ fun decodedKeys(): Array =
+ arrayOf(*expectedKeys)
.map { toDecode: String? -> SdkHttpUtils.urlDecode(toDecode) }
.toTypedArray()
- }
fun prefixes(vararg expectedPrefixes: String?): Param {
this.expectedPrefixes = arrayOf(*expectedPrefixes)
return this
}
- override fun toString(): String {
- return "prefix=$prefix, delimiter=$delimiter"
- }
+ override fun toString(): String = "prefix=$prefix, delimiter=$delimiter"
}
}
}
diff --git a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/MultipartIT.kt b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/MultipartIT.kt
index 924859940..ead892cb3 100644
--- a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/MultipartIT.kt
+++ b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/MultipartIT.kt
@@ -55,7 +55,6 @@ import java.time.Instant
import java.util.UUID
import java.util.concurrent.CompletionException
-
internal class MultipartIT : S3TestBase() {
private val s3Client: S3Client = createS3Client()
private val s3AsyncClient: S3AsyncClient = createS3AsyncClient()
@@ -66,16 +65,18 @@ internal class MultipartIT : S3TestBase() {
@S3VerifiedSuccess(year = 2025)
fun testMultipartUpload_asyncClient(testInfo: TestInfo) {
val bucketName = givenBucket(testInfo)
- s3CrtAsyncClient.putObject(
- {
- it.bucket(bucketName)
- it.key(UPLOAD_FILE_NAME)
- it.checksumAlgorithm(ChecksumAlgorithm.CRC32)
- },
- AsyncRequestBody.fromFile(UPLOAD_FILE)
- ).join().also {
- assertThat(it.checksumCRC32()).isEqualTo(DigestUtil.checksumFor(UPLOAD_FILE_PATH, DefaultChecksumAlgorithm.CRC32))
- }
+ s3CrtAsyncClient
+ .putObject(
+ {
+ it.bucket(bucketName)
+ it.key(UPLOAD_FILE_NAME)
+ it.checksumAlgorithm(ChecksumAlgorithm.CRC32)
+ },
+ AsyncRequestBody.fromFile(UPLOAD_FILE),
+ ).join()
+ .also {
+ assertThat(it.checksumCRC32()).isEqualTo(DigestUtil.checksumFor(UPLOAD_FILE_PATH, DefaultChecksumAlgorithm.CRC32))
+ }
s3AsyncClient.waiter().waitUntilObjectExists {
it.bucket(bucketName)
@@ -83,17 +84,19 @@ internal class MultipartIT : S3TestBase() {
}
val uploadDigest = hexDigest(UPLOAD_FILE)
- val downloadedDigest = s3Client.getObject {
- it.bucket(bucketName)
- it.key(UPLOAD_FILE_NAME)
- }.use { response ->
- Files.newTemporaryFile().let {
- response.transferTo(newOutputStream(it.toPath()))
- assertThat(it).hasSize(UPLOAD_FILE_LENGTH)
- assertThat(it).hasSameBinaryContentAs(UPLOAD_FILE)
- hexDigest(it)
- }
- }
+ val downloadedDigest =
+ s3Client
+ .getObject {
+ it.bucket(bucketName)
+ it.key(UPLOAD_FILE_NAME)
+ }.use { response ->
+ Files.newTemporaryFile().let {
+ response.transferTo(newOutputStream(it.toPath()))
+ assertThat(it).hasSize(UPLOAD_FILE_LENGTH)
+ assertThat(it).hasSameBinaryContentAs(UPLOAD_FILE)
+ hexDigest(it)
+ }
+ }
assertThat(uploadDigest).isEqualTo(downloadedDigest)
}
@@ -108,27 +111,30 @@ internal class MultipartIT : S3TestBase() {
it.key(UPLOAD_FILE_NAME)
}
it.source(UPLOAD_FILE)
- }.completionFuture().join()
-
- s3Client.getObject {
- it.bucket(bucketName)
- it.key(UPLOAD_FILE_NAME)
- }.use {
- assertThat(it.response().contentLength()).isEqualTo(UPLOAD_FILE_LENGTH)
- }
+ }.completionFuture()
+ .join()
- val downloadFile = Files.newTemporaryFile()
- transferManager.downloadFile {
- it.getObjectRequest {
+ s3Client
+ .getObject {
it.bucket(bucketName)
it.key(UPLOAD_FILE_NAME)
+ }.use {
+ assertThat(it.response().contentLength()).isEqualTo(UPLOAD_FILE_LENGTH)
}
- it.destination(downloadFile)
- }.also { download ->
- download.completionFuture().join().response().also {
- assertThat(it.contentLength()).isEqualTo(UPLOAD_FILE_LENGTH)
+
+ val downloadFile = Files.newTemporaryFile()
+ transferManager
+ .downloadFile {
+ it.getObjectRequest {
+ it.bucket(bucketName)
+ it.key(UPLOAD_FILE_NAME)
+ }
+ it.destination(downloadFile)
+ }.also { download ->
+ download.completionFuture().join().response().also {
+ assertThat(it.contentLength()).isEqualTo(UPLOAD_FILE_LENGTH)
+ }
}
- }
assertThat(downloadFile.length()).isEqualTo(UPLOAD_FILE_LENGTH)
assertThat(downloadFile).hasSameBinaryContentAs(UPLOAD_FILE)
}
@@ -141,23 +147,25 @@ internal class MultipartIT : S3TestBase() {
fun testMultipartUpload_withUserMetadata(testInfo: TestInfo) {
val bucketName = givenBucket(testInfo)
val objectMetadata = mapOf("key" to "value")
- val initiateMultipartUploadResult = s3Client
- .createMultipartUpload {
- it.bucket(bucketName)
- it.key(UPLOAD_FILE_NAME)
- it.metadata(objectMetadata)
- }
+ val initiateMultipartUploadResult =
+ s3Client
+ .createMultipartUpload {
+ it.bucket(bucketName)
+ it.key(UPLOAD_FILE_NAME)
+ it.metadata(objectMetadata)
+ }
val uploadId = initiateMultipartUploadResult.uploadId()
- val uploadPartResult = s3Client.uploadPart(
- {
- it.bucket(initiateMultipartUploadResult.bucket())
- it.key(initiateMultipartUploadResult.key())
- it.uploadId(uploadId)
- it.partNumber(1)
- it.contentLength(UPLOAD_FILE_LENGTH)
- },
- RequestBody.fromFile(UPLOAD_FILE),
- )
+ val uploadPartResult =
+ s3Client.uploadPart(
+ {
+ it.bucket(initiateMultipartUploadResult.bucket())
+ it.key(initiateMultipartUploadResult.key())
+ it.uploadId(uploadId)
+ it.partNumber(1)
+ it.contentLength(UPLOAD_FILE_LENGTH)
+ },
+ RequestBody.fromFile(UPLOAD_FILE),
+ )
s3Client.completeMultipartUpload {
it.bucket(initiateMultipartUploadResult.bucket())
@@ -171,12 +179,13 @@ internal class MultipartIT : S3TestBase() {
}
}
- s3Client.getObject {
- it.bucket(initiateMultipartUploadResult.bucket())
- it.key(initiateMultipartUploadResult.key())
- }.use {
- assertThat(it.response().metadata()).isEqualTo(objectMetadata)
- }
+ s3Client
+ .getObject {
+ it.bucket(initiateMultipartUploadResult.bucket())
+ it.key(initiateMultipartUploadResult.key())
+ }.use {
+ assertThat(it.response().metadata()).isEqualTo(objectMetadata)
+ }
}
/**
@@ -187,44 +196,48 @@ internal class MultipartIT : S3TestBase() {
fun testMultipartUpload(testInfo: TestInfo) {
val bucketName = givenBucket(testInfo)
val objectMetadata = mapOf(Pair("key", "value"))
- val initiateMultipartUploadResult = s3Client.createMultipartUpload {
- it.bucket(bucketName)
- it.key(UPLOAD_FILE_NAME)
- it.metadata(objectMetadata)
- }
+ val initiateMultipartUploadResult =
+ s3Client.createMultipartUpload {
+ it.bucket(bucketName)
+ it.key(UPLOAD_FILE_NAME)
+ it.metadata(objectMetadata)
+ }
val uploadId = initiateMultipartUploadResult.uploadId()
// upload part 1, >5MB
val randomBytes = randomBytes()
val etag1 = uploadPart(bucketName, UPLOAD_FILE_NAME, uploadId, 1, randomBytes)
// upload part 2, <5MB
- val etag2 = s3Client.uploadPart(
- {
- it.bucket(initiateMultipartUploadResult.bucket())
- it.key(initiateMultipartUploadResult.key())
- it.uploadId(uploadId)
- it.partNumber(2)
- it.contentLength(UPLOAD_FILE_LENGTH)
- },
- RequestBody.fromFile(UPLOAD_FILE),
- ).eTag()
-
- val completeMultipartUpload = s3Client.completeMultipartUpload {
- it.bucket(initiateMultipartUploadResult.bucket())
- it.key(initiateMultipartUploadResult.key())
- it.uploadId(initiateMultipartUploadResult.uploadId())
- it.multipartUpload {
- it.parts(
- {
- it.eTag(etag1)
- it.partNumber(1)
- },
+ val etag2 =
+ s3Client
+ .uploadPart(
{
- it.eTag(etag2)
+ it.bucket(initiateMultipartUploadResult.bucket())
+ it.key(initiateMultipartUploadResult.key())
+ it.uploadId(uploadId)
it.partNumber(2)
- }
- )
+ it.contentLength(UPLOAD_FILE_LENGTH)
+ },
+ RequestBody.fromFile(UPLOAD_FILE),
+ ).eTag()
+
+ val completeMultipartUpload =
+ s3Client.completeMultipartUpload {
+ it.bucket(initiateMultipartUploadResult.bucket())
+ it.key(initiateMultipartUploadResult.key())
+ it.uploadId(initiateMultipartUploadResult.uploadId())
+ it.multipartUpload {
+ it.parts(
+ {
+ it.eTag(etag1)
+ it.partNumber(1)
+ },
+ {
+ it.eTag(etag2)
+ it.partNumber(2)
+ },
+ )
+ }
}
- }
val uploadFileBytes = readStreamIntoByteArray(UPLOAD_FILE.inputStream())
@@ -233,19 +246,20 @@ internal class MultipartIT : S3TestBase() {
assertThat(completeMultipartUpload.eTag()).isEqualTo("\"${DigestUtils.md5Hex(it)}-2\"")
}
- s3Client.getObject {
- it.bucket(bucketName)
- it.key(UPLOAD_FILE_NAME)
- }.use {
- // verify content size
- assertThat(it.response().contentLength()).isEqualTo(randomBytes.size.toLong() + uploadFileBytes.size.toLong())
- // verify contents
- assertThat(readStreamIntoByteArray(it.buffered())).isEqualTo(concatByteArrays(randomBytes, uploadFileBytes))
- assertThat(it.response().metadata()).isEqualTo(objectMetadata)
- }
+ s3Client
+ .getObject {
+ it.bucket(bucketName)
+ it.key(UPLOAD_FILE_NAME)
+ }.use {
+ // verify content size
+ assertThat(it.response().contentLength()).isEqualTo(randomBytes.size.toLong() + uploadFileBytes.size.toLong())
+ // verify contents
+ assertThat(readStreamIntoByteArray(it.buffered())).isEqualTo(concatByteArrays(randomBytes, uploadFileBytes))
+ assertThat(it.response().metadata()).isEqualTo(objectMetadata)
+ }
assertThat(completeMultipartUpload.location())
- .isEqualTo("${serviceEndpoint}/$bucketName/${UriUtils.encode(UPLOAD_FILE_NAME, StandardCharsets.UTF_8)}")
+ .isEqualTo("$serviceEndpoint/$bucketName/${UriUtils.encode(UPLOAD_FILE_NAME, StandardCharsets.UTF_8)}")
}
@Test
@@ -253,17 +267,21 @@ internal class MultipartIT : S3TestBase() {
fun `multipartupload send checksum in create and complete`(testInfo: TestInfo) {
val bucketName = givenBucket(testInfo)
val uploadFile = File(TEST_IMAGE_TIFF)
- //construct UPLOAD_FILE >5MB
- val tempFile = Files.newTemporaryFile().also { file ->
- (readStreamIntoByteArray(uploadFile.inputStream()) +
- readStreamIntoByteArray(uploadFile.inputStream()) +
- readStreamIntoByteArray(uploadFile.inputStream()))
- .inputStream().use {
- it.copyTo(file.outputStream())
- }
- }
+ // construct UPLOAD_FILE >5MB
+ val tempFile =
+ Files.newTemporaryFile().also { file ->
+ (
+ readStreamIntoByteArray(uploadFile.inputStream()) +
+ readStreamIntoByteArray(uploadFile.inputStream()) +
+ readStreamIntoByteArray(uploadFile.inputStream())
+ ).inputStream()
+ .use {
+ it.copyTo(file.outputStream())
+ }
+ }
- val initiateMultipartUploadResult = s3Client.createMultipartUpload {
+ val initiateMultipartUploadResult =
+ s3Client.createMultipartUpload {
it.bucket(bucketName)
it.key(TEST_IMAGE_TIFF)
it.checksumAlgorithm(ChecksumAlgorithm.CRC32)
@@ -273,30 +291,33 @@ internal class MultipartIT : S3TestBase() {
assertThat(initiateMultipartUploadResult.checksumType()).isEqualTo(ChecksumType.COMPOSITE)
val uploadId = initiateMultipartUploadResult.uploadId()
// upload part 1, <5MB
- val partResponse1 = s3Client.uploadPart(
- {
- it.bucket(initiateMultipartUploadResult.bucket())
- it.key(initiateMultipartUploadResult.key())
- it.uploadId(uploadId)
- it.checksumAlgorithm(ChecksumAlgorithm.CRC32)
- it.partNumber(1)
- it.contentLength(tempFile.length())
- },
- RequestBody.fromFile(tempFile),
- )
+ val partResponse1 =
+ s3Client.uploadPart(
+ {
+ it.bucket(initiateMultipartUploadResult.bucket())
+ it.key(initiateMultipartUploadResult.key())
+ it.uploadId(uploadId)
+ it.checksumAlgorithm(ChecksumAlgorithm.CRC32)
+ it.partNumber(1)
+ it.contentLength(tempFile.length())
+ },
+ RequestBody.fromFile(tempFile),
+ )
val etag1 = partResponse1.eTag()
val checksum1 = partResponse1.checksumCRC32()
// upload part 2, <5MB
- val partResponse2 = s3Client.uploadPart({
- it.bucket(initiateMultipartUploadResult.bucket())
- it.key(initiateMultipartUploadResult.key())
- it.uploadId(uploadId)
- it.checksumAlgorithm(ChecksumAlgorithm.CRC32)
- it.partNumber(2)
- it.contentLength(uploadFile.length())
- },
- RequestBody.fromFile(uploadFile),
- )
+ val partResponse2 =
+ s3Client.uploadPart(
+ {
+ it.bucket(initiateMultipartUploadResult.bucket())
+ it.key(initiateMultipartUploadResult.key())
+ it.uploadId(uploadId)
+ it.checksumAlgorithm(ChecksumAlgorithm.CRC32)
+ it.partNumber(2)
+ it.contentLength(uploadFile.length())
+ },
+ RequestBody.fromFile(uploadFile),
+ )
val etag2 = partResponse2.eTag()
val checksum2 = partResponse2.checksumCRC32()
val localChecksum1 = DigestUtil.checksumFor(tempFile.toPath(), DefaultChecksumAlgorithm.CRC32)
@@ -304,69 +325,72 @@ internal class MultipartIT : S3TestBase() {
val localChecksum2 = DigestUtil.checksumFor(uploadFile.toPath(), DefaultChecksumAlgorithm.CRC32)
assertThat(checksum2).isEqualTo(localChecksum2)
- val completeMultipartUpload = s3Client.completeMultipartUpload {
- it.bucket(initiateMultipartUploadResult.bucket())
- it.key(initiateMultipartUploadResult.key())
- it.uploadId(initiateMultipartUploadResult.uploadId())
- it.multipartUpload {
- it.parts(
- {
- it.eTag(etag1)
- it.partNumber(1)
- it.checksumCRC32(checksum1)
- },
- {
- it.eTag(etag2)
- it.partNumber(2)
- it.checksumCRC32(checksum2)
- }
- )
+ val completeMultipartUpload =
+ s3Client.completeMultipartUpload {
+ it.bucket(initiateMultipartUploadResult.bucket())
+ it.key(initiateMultipartUploadResult.key())
+ it.uploadId(initiateMultipartUploadResult.uploadId())
+ it.multipartUpload {
+ it.parts(
+ {
+ it.eTag(etag1)
+ it.partNumber(1)
+ it.checksumCRC32(checksum1)
+ },
+ {
+ it.eTag(etag2)
+ it.partNumber(2)
+ it.checksumCRC32(checksum2)
+ },
+ )
+ }
}
- }
(DigestUtils.md5(tempFile.readBytes()) + DigestUtils.md5(readStreamIntoByteArray(uploadFile.inputStream()))).also {
// verify special etag
assertThat(completeMultipartUpload.eTag()).isEqualTo("\"${DigestUtils.md5Hex(it)}-2\"")
}
- s3Client.getObject {
- it.bucket(bucketName)
- it.key(TEST_IMAGE_TIFF)
- it.checksumMode(ChecksumMode.ENABLED)
- }.use {
- // verify content size
- assertThat(it.response().contentLength()).isEqualTo(tempFile.length() + uploadFile.length())
- // verify contents
- assertThat(readStreamIntoByteArray(it.buffered())).isEqualTo(tempFile.readBytes() + uploadFile.readBytes())
- assertThat(it.response().checksumCRC32()).isEqualTo("oGk6qg==-2")
- }
+ s3Client
+ .getObject {
+ it.bucket(bucketName)
+ it.key(TEST_IMAGE_TIFF)
+ it.checksumMode(ChecksumMode.ENABLED)
+ }.use {
+ // verify content size
+ assertThat(it.response().contentLength()).isEqualTo(tempFile.length() + uploadFile.length())
+ // verify contents
+ assertThat(readStreamIntoByteArray(it.buffered())).isEqualTo(tempFile.readBytes() + uploadFile.readBytes())
+ assertThat(it.response().checksumCRC32()).isEqualTo("oGk6qg==-2")
+ }
assertThat(completeMultipartUpload.location())
- .isEqualTo("${serviceEndpoint}/$bucketName/${UriUtils.encode(TEST_IMAGE_TIFF, StandardCharsets.UTF_8)}")
+ .isEqualTo("$serviceEndpoint/$bucketName/${UriUtils.encode(TEST_IMAGE_TIFF, StandardCharsets.UTF_8)}")
- //verify completeMultipartUpload is idempotent
- val completeMultipartUpload1 = s3Client.completeMultipartUpload {
- it.bucket(initiateMultipartUploadResult.bucket())
- it.key(initiateMultipartUploadResult.key())
- it.uploadId(initiateMultipartUploadResult.uploadId())
- it.multipartUpload {
- it.parts(
- {
- it.eTag(etag1)
- it.partNumber(1)
- it.checksumCRC32(checksum1)
- },
- {
- it.eTag(etag2)
- it.partNumber(2)
- it.checksumCRC32(checksum2)
- }
- )
+ // verify completeMultipartUpload is idempotent
+ val completeMultipartUpload1 =
+ s3Client.completeMultipartUpload {
+ it.bucket(initiateMultipartUploadResult.bucket())
+ it.key(initiateMultipartUploadResult.key())
+ it.uploadId(initiateMultipartUploadResult.uploadId())
+ it.multipartUpload {
+ it.parts(
+ {
+ it.eTag(etag1)
+ it.partNumber(1)
+ it.checksumCRC32(checksum1)
+ },
+ {
+ it.eTag(etag2)
+ it.partNumber(2)
+ it.checksumCRC32(checksum2)
+ },
+ )
+ }
}
- }
- //for unknown reasons, a simple equals call fails on both objects.
- //assertThat(completeMultipartUpload).isEqualTo(completeMultipartUpload1)
+ // for unknown reasons, a simple equals call fails on both objects.
+ // assertThat(completeMultipartUpload).isEqualTo(completeMultipartUpload1)
assertThat(completeMultipartUpload.location()).isEqualTo(completeMultipartUpload1.location())
assertThat(completeMultipartUpload.bucket()).isEqualTo(completeMultipartUpload1.bucket())
assertThat(completeMultipartUpload.key()).isEqualTo(completeMultipartUpload1.key())
@@ -380,47 +404,54 @@ internal class MultipartIT : S3TestBase() {
fun `multipartupload send checksum in create only`(testInfo: TestInfo) {
val bucketName = givenBucket(testInfo)
val uploadFile = File(TEST_IMAGE_TIFF)
- //construct uploadfile >5MB
- val tempFile = Files.newTemporaryFile().also { file ->
- (readStreamIntoByteArray(uploadFile.inputStream()) +
- readStreamIntoByteArray(uploadFile.inputStream()) +
- readStreamIntoByteArray(uploadFile.inputStream()))
- .inputStream().use {
- it.copyTo(file.outputStream())
- }
- }
+ // construct uploadfile >5MB
+ val tempFile =
+ Files.newTemporaryFile().also { file ->
+ (
+ readStreamIntoByteArray(uploadFile.inputStream()) +
+ readStreamIntoByteArray(uploadFile.inputStream()) +
+ readStreamIntoByteArray(uploadFile.inputStream())
+ ).inputStream()
+ .use {
+ it.copyTo(file.outputStream())
+ }
+ }
- val initiateMultipartUploadResult = s3Client.createMultipartUpload {
+ val initiateMultipartUploadResult =
+ s3Client.createMultipartUpload {
it.bucket(bucketName)
it.key(TEST_IMAGE_TIFF)
it.checksumAlgorithm(ChecksumAlgorithm.CRC32)
}
val uploadId = initiateMultipartUploadResult.uploadId()
// upload part 1, <5MB
- val partResponse1 = s3Client.uploadPart(
- {
- it.bucket(initiateMultipartUploadResult.bucket())
- it.key(initiateMultipartUploadResult.key())
- it.uploadId(uploadId)
- it.checksumAlgorithm(ChecksumAlgorithm.CRC32)
- it.partNumber(1)
- it.contentLength(tempFile.length())
- },
- RequestBody.fromFile(tempFile),
- )
+ val partResponse1 =
+ s3Client.uploadPart(
+ {
+ it.bucket(initiateMultipartUploadResult.bucket())
+ it.key(initiateMultipartUploadResult.key())
+ it.uploadId(uploadId)
+ it.checksumAlgorithm(ChecksumAlgorithm.CRC32)
+ it.partNumber(1)
+ it.contentLength(tempFile.length())
+ },
+ RequestBody.fromFile(tempFile),
+ )
val etag1 = partResponse1.eTag()
val checksum1 = partResponse1.checksumCRC32()
// upload part 2, <5MB
- val partResponse2 = s3Client.uploadPart({
- it.bucket(initiateMultipartUploadResult.bucket())
- it.key(initiateMultipartUploadResult.key())
- it.uploadId(uploadId)
- it.checksumAlgorithm(ChecksumAlgorithm.CRC32)
- it.partNumber(2)
- it.contentLength(uploadFile.length())
- },
- RequestBody.fromFile(uploadFile),
- )
+ val partResponse2 =
+ s3Client.uploadPart(
+ {
+ it.bucket(initiateMultipartUploadResult.bucket())
+ it.key(initiateMultipartUploadResult.key())
+ it.uploadId(uploadId)
+ it.checksumAlgorithm(ChecksumAlgorithm.CRC32)
+ it.partNumber(2)
+ it.contentLength(uploadFile.length())
+ },
+ RequestBody.fromFile(uploadFile),
+ )
val etag2 = partResponse2.eTag()
val checksum2 = partResponse2.checksumCRC32()
val localChecksum1 = DigestUtil.checksumFor(tempFile.toPath(), DefaultChecksumAlgorithm.CRC32)
@@ -442,45 +473,51 @@ internal class MultipartIT : S3TestBase() {
{
it.eTag(etag2)
it.partNumber(2)
- }
+ },
)
}
}
}.isInstanceOf(S3Exception::class.java)
.hasMessageContaining("Service: S3, Status Code: 400")
- .hasMessageContaining("The upload was created using a crc32 checksum. The complete request must include the " +
- "checksum for each part. It was missing for part 1 in the request.")
+ .hasMessageContaining(
+ "The upload was created using a crc32 checksum. The complete request must include the " +
+ "checksum for each part. It was missing for part 1 in the request.",
+ )
}
-
@S3VerifiedSuccess(year = 2025)
@ParameterizedTest
@MethodSource(value = ["checksumAlgorithms"])
- fun testUploadPart_checksumAlgorithm_initiate(checksumAlgorithm: software.amazon.awssdk.checksums.spi.ChecksumAlgorithm,
- testInfo: TestInfo) {
+ fun testUploadPart_checksumAlgorithm_initiate(
+ checksumAlgorithm: software.amazon.awssdk.checksums.spi.ChecksumAlgorithm,
+ testInfo: TestInfo,
+ ) {
val bucketName = givenBucket(testInfo)
val expectedChecksum = DigestUtil.checksumFor(UPLOAD_FILE_PATH, checksumAlgorithm)
- val initiateMultipartUploadResult = s3Client.createMultipartUpload {
- it.bucket(bucketName)
- it.key(UPLOAD_FILE_NAME)
- it.checksumAlgorithm(checksumAlgorithm.toAlgorithm())
- }
+ val initiateMultipartUploadResult =
+ s3Client.createMultipartUpload {
+ it.bucket(bucketName)
+ it.key(UPLOAD_FILE_NAME)
+ it.checksumAlgorithm(checksumAlgorithm.toAlgorithm())
+ }
val uploadId = initiateMultipartUploadResult.uploadId()
- s3Client.uploadPart({
- it.bucket(initiateMultipartUploadResult.bucket())
- it.key(initiateMultipartUploadResult.key())
- it.uploadId(uploadId)
- it.checksumAlgorithm(checksumAlgorithm.toAlgorithm())
- it.partNumber(1)
- it.contentLength(UPLOAD_FILE_LENGTH).build()
- },
- RequestBody.fromFile(UPLOAD_FILE),
- ).also {
- val actualChecksum = it.checksum(checksumAlgorithm.toAlgorithm())
- assertThat(actualChecksum).isNotBlank
- assertThat(actualChecksum).isEqualTo(expectedChecksum)
- }
+ s3Client
+ .uploadPart(
+ {
+ it.bucket(initiateMultipartUploadResult.bucket())
+ it.key(initiateMultipartUploadResult.key())
+ it.uploadId(uploadId)
+ it.checksumAlgorithm(checksumAlgorithm.toAlgorithm())
+ it.partNumber(1)
+ it.contentLength(UPLOAD_FILE_LENGTH).build()
+ },
+ RequestBody.fromFile(UPLOAD_FILE),
+ ).also {
+ val actualChecksum = it.checksum(checksumAlgorithm.toAlgorithm())
+ assertThat(actualChecksum).isNotBlank
+ assertThat(actualChecksum).isEqualTo(expectedChecksum)
+ }
s3Client.abortMultipartUpload {
it.bucket(bucketName)
it.key(UPLOAD_FILE_NAME)
@@ -493,16 +530,18 @@ internal class MultipartIT : S3TestBase() {
@MethodSource(value = ["checksumAlgorithms"])
fun testUploadPart_checksumAlgorithm_complete(
checksumAlgorithm: software.amazon.awssdk.checksums.spi.ChecksumAlgorithm,
- testInfo: TestInfo
+ testInfo: TestInfo,
) {
val bucketName = givenBucket(testInfo)
- val initiateMultipartUploadResult = s3Client.createMultipartUpload {
- it.bucket(bucketName)
- it.key(UPLOAD_FILE_NAME)
- }
+ val initiateMultipartUploadResult =
+ s3Client.createMultipartUpload {
+ it.bucket(bucketName)
+ it.key(UPLOAD_FILE_NAME)
+ }
val uploadId = initiateMultipartUploadResult.uploadId()
- s3Client.uploadPart({
+ s3Client.uploadPart(
+ {
it.bucket(initiateMultipartUploadResult.bucket())
it.key(initiateMultipartUploadResult.key())
it.uploadId(uploadId)
@@ -520,39 +559,43 @@ internal class MultipartIT : S3TestBase() {
it.checksumType(ChecksumType.COMPOSITE)
it.checksum("WRONG CHECKSUM", checksumAlgorithm.toAlgorithm())
}
- }
- .isInstanceOf(S3Exception::class.java)
+ }.isInstanceOf(S3Exception::class.java)
.hasMessageContaining("Service: S3, Status Code: 400")
}
@S3VerifiedSuccess(year = 2025)
@ParameterizedTest
@MethodSource(value = ["checksumAlgorithms"])
- fun testMultipartUpload_checksum(checksumAlgorithm: software.amazon.awssdk.checksums.spi.ChecksumAlgorithm,
- testInfo: TestInfo) {
+ fun testMultipartUpload_checksum(
+ checksumAlgorithm: software.amazon.awssdk.checksums.spi.ChecksumAlgorithm,
+ testInfo: TestInfo,
+ ) {
val bucketName = givenBucket(testInfo)
val expectedChecksum = DigestUtil.checksumFor(UPLOAD_FILE_PATH, checksumAlgorithm)
- val initiateMultipartUploadResult = s3Client.createMultipartUpload {
- it.bucket(bucketName)
- it.key(UPLOAD_FILE_NAME)
- it.checksumAlgorithm(checksumAlgorithm.toAlgorithm())
- }
+ val initiateMultipartUploadResult =
+ s3Client.createMultipartUpload {
+ it.bucket(bucketName)
+ it.key(UPLOAD_FILE_NAME)
+ it.checksumAlgorithm(checksumAlgorithm.toAlgorithm())
+ }
val uploadId = initiateMultipartUploadResult.uploadId()
- s3Client.uploadPart({
- it.bucket(initiateMultipartUploadResult.bucket())
- it.key(initiateMultipartUploadResult.key())
- it.uploadId(uploadId)
- it.checksum(expectedChecksum, checksumAlgorithm.toAlgorithm())
- it.partNumber(1)
- it.contentLength(UPLOAD_FILE_LENGTH).build()
- },
- RequestBody.fromFile(UPLOAD_FILE),
- ).also {
- val actualChecksum = it.checksum(checksumAlgorithm.toAlgorithm())
- assertThat(actualChecksum).isNotBlank
- assertThat(actualChecksum).isEqualTo(expectedChecksum)
- }
+ s3Client
+ .uploadPart(
+ {
+ it.bucket(initiateMultipartUploadResult.bucket())
+ it.key(initiateMultipartUploadResult.key())
+ it.uploadId(uploadId)
+ it.checksum(expectedChecksum, checksumAlgorithm.toAlgorithm())
+ it.partNumber(1)
+ it.contentLength(UPLOAD_FILE_LENGTH).build()
+ },
+ RequestBody.fromFile(UPLOAD_FILE),
+ ).also {
+ val actualChecksum = it.checksum(checksumAlgorithm.toAlgorithm())
+ assertThat(actualChecksum).isNotBlank
+ assertThat(actualChecksum).isEqualTo(expectedChecksum)
+ }
s3Client.abortMultipartUpload {
it.bucket(bucketName)
it.key(UPLOAD_FILE_NAME)
@@ -566,14 +609,16 @@ internal class MultipartIT : S3TestBase() {
val bucketName = givenBucket(testInfo)
val expectedChecksum = "wrongChecksum"
val checksumAlgorithm = ChecksumAlgorithm.SHA1
- val initiateMultipartUploadResult = s3Client.createMultipartUpload {
- it.bucket(bucketName)
- it.key(UPLOAD_FILE_NAME)
- }
+ val initiateMultipartUploadResult =
+ s3Client.createMultipartUpload {
+ it.bucket(bucketName)
+ it.key(UPLOAD_FILE_NAME)
+ }
val uploadId = initiateMultipartUploadResult.uploadId()
assertThatThrownBy {
- s3Client.uploadPart({
+ s3Client.uploadPart(
+ {
it.bucket(initiateMultipartUploadResult.bucket())
it.key(initiateMultipartUploadResult.key())
it.uploadId(uploadId)
@@ -583,15 +628,14 @@ internal class MultipartIT : S3TestBase() {
},
RequestBody.fromFile(UPLOAD_FILE),
)
- }
- .isInstanceOf(S3Exception::class.java)
+ }.isInstanceOf(S3Exception::class.java)
.hasMessageContaining("Service: S3, Status Code: 400")
.hasMessageContaining("Value for x-amz-checksum-sha1 header is invalid.")
}
private fun UploadPartRequest.Builder.checksum(
checksum: String,
- checksumAlgorithm: ChecksumAlgorithm
+ checksumAlgorithm: ChecksumAlgorithm,
): UploadPartRequest.Builder =
when (checksumAlgorithm) {
ChecksumAlgorithm.SHA1 -> checksumSHA1(checksum)
@@ -604,7 +648,7 @@ internal class MultipartIT : S3TestBase() {
private fun CompleteMultipartUploadRequest.Builder.checksum(
checksum: String,
- checksumAlgorithm: ChecksumAlgorithm
+ checksumAlgorithm: ChecksumAlgorithm,
): CompleteMultipartUploadRequest.Builder =
when (checksumAlgorithm) {
ChecksumAlgorithm.SHA1 -> checksumSHA1(checksum)
@@ -621,7 +665,8 @@ internal class MultipartIT : S3TestBase() {
val bucketName = givenBucket(testInfo)
val objectMetadata = mapOf("key" to "value")
val hash = UPLOAD_FILE.inputStream().use { DigestUtils.md5Hex(it) }
- val initiateMultipartUploadResult = s3Client.createMultipartUpload {
+ val initiateMultipartUploadResult =
+ s3Client.createMultipartUpload {
it.bucket(bucketName)
it.key(UPLOAD_FILE_NAME)
it.metadata(objectMetadata)
@@ -636,7 +681,7 @@ internal class MultipartIT : S3TestBase() {
it.uploadId(uploadId)
it.partNumber(1)
it.contentLength(UPLOAD_FILE_LENGTH)
- //.lastPart(true)
+ // .lastPart(true)
},
RequestBody.fromFile(UPLOAD_FILE),
)
@@ -648,18 +693,20 @@ internal class MultipartIT : S3TestBase() {
it.uploadId(uploadId)
it.partNumber(2)
it.contentLength(UPLOAD_FILE_LENGTH)
- //.lastPart(true)
+ // .lastPart(true)
},
RequestBody.fromFile(UPLOAD_FILE),
)
- val partListing = s3Client.listParts {
- it.bucket(bucketName)
- it.key(key)
- it.uploadId(uploadId)
- }.also {
- assertThat(it.parts()).hasSize(2)
- }
+ val partListing =
+ s3Client
+ .listParts {
+ it.bucket(bucketName)
+ it.key(key)
+ it.uploadId(uploadId)
+ }.also {
+ assertThat(it.parts()).hasSize(2)
+ }
partListing.parts()[0].also {
assertThat(it.eTag()).isEqualTo("\"$hash\"")
@@ -680,7 +727,8 @@ internal class MultipartIT : S3TestBase() {
val bucketName = givenBucket(testInfo)
val objectMetadata = mapOf("key" to "value")
val hash = UPLOAD_FILE.inputStream().use { DigestUtils.md5Hex(it) }
- val initiateMultipartUploadResult = s3Client.createMultipartUpload {
+ val initiateMultipartUploadResult =
+ s3Client.createMultipartUpload {
it.bucket(bucketName)
it.key(UPLOAD_FILE_NAME)
it.metadata(objectMetadata)
@@ -696,34 +744,38 @@ internal class MultipartIT : S3TestBase() {
it.uploadId(uploadId)
it.partNumber(i)
it.contentLength(UPLOAD_FILE_LENGTH)
- //.lastPart(true)
+ // .lastPart(true)
},
RequestBody.fromFile(UPLOAD_FILE),
)
}
- val partListing1 = s3Client.listParts {
- it.bucket(bucketName)
- it.key(key)
- it.uploadId(uploadId)
- it.maxParts(5)
- }.also {
- assertThat(it.parts()).hasSize(5)
- assertThat(it.nextPartNumberMarker()).isEqualTo(5)
- assertThat(it.isTruncated).isTrue
- }
+ val partListing1 =
+ s3Client
+ .listParts {
+ it.bucket(bucketName)
+ it.key(key)
+ it.uploadId(uploadId)
+ it.maxParts(5)
+ }.also {
+ assertThat(it.parts()).hasSize(5)
+ assertThat(it.nextPartNumberMarker()).isEqualTo(5)
+ assertThat(it.isTruncated).isTrue
+ }
- val partListing2 = s3Client.listParts {
- it.bucket(bucketName)
- it.key(key)
- it.uploadId(uploadId)
- it.maxParts(5)
- it.partNumberMarker(partListing1.nextPartNumberMarker())
- }.also {
- assertThat(it.parts()).hasSize(5)
- //assertThat(it.nextPartNumberMarker()).isNull()
- assertThat(it.isTruncated).isFalse
- }
+ val partListing2 =
+ s3Client
+ .listParts {
+ it.bucket(bucketName)
+ it.key(key)
+ it.uploadId(uploadId)
+ it.maxParts(5)
+ it.partNumberMarker(partListing1.nextPartNumberMarker())
+ }.also {
+ assertThat(it.parts()).hasSize(5)
+ // assertThat(it.nextPartNumberMarker()).isNull()
+ assertThat(it.isTruncated).isFalse
+ }
partListing1.parts()[0].also {
assertThat(it.eTag()).isEqualTo("\"$hash\"")
@@ -743,25 +795,29 @@ internal class MultipartIT : S3TestBase() {
fun `list parts is empty if no parts were uploaded`(testInfo: TestInfo) {
val bucketName = givenBucket(testInfo)
assertThat(
- s3Client.listMultipartUploads {
- it.bucket(bucketName)
- }.uploads()
+ s3Client
+ .listMultipartUploads {
+ it.bucket(bucketName)
+ }.uploads(),
).isEmpty()
- val uploadId = s3Client.createMultipartUpload {
- it.bucket(bucketName)
- it.key(UPLOAD_FILE_NAME)
- }.uploadId()
+ val uploadId =
+ s3Client
+ .createMultipartUpload {
+ it.bucket(bucketName)
+ it.key(UPLOAD_FILE_NAME)
+ }.uploadId()
- s3Client.listParts {
- it.bucket(bucketName)
- it.key(UPLOAD_FILE_NAME)
- it.uploadId(uploadId)
- }.also {
- assertThat(it.parts()).isEmpty()
- assertThat(it.bucket()).isEqualTo(bucketName)
- assertThat(it.uploadId()).isEqualTo(uploadId)
- assertThat(SdkHttpUtils.urlDecode(it.key())).isEqualTo(UPLOAD_FILE_NAME)
- }
+ s3Client
+ .listParts {
+ it.bucket(bucketName)
+ it.key(UPLOAD_FILE_NAME)
+ it.uploadId(uploadId)
+ }.also {
+ assertThat(it.parts()).isEmpty()
+ assertThat(it.bucket()).isEqualTo(bucketName)
+ assertThat(it.uploadId()).isEqualTo(uploadId)
+ assertThat(SdkHttpUtils.urlDecode(it.key())).isEqualTo(UPLOAD_FILE_NAME)
+ }
}
@Test
@@ -775,8 +831,7 @@ internal class MultipartIT : S3TestBase() {
it.key("NON_EXISTENT_KEY")
it.uploadId(UUID.randomUUID().toString())
}
- }
- .isInstanceOf(AwsServiceException::class.java)
+ }.isInstanceOf(AwsServiceException::class.java)
.hasMessageContaining("Service: S3, Status Code: 404")
}
@@ -785,28 +840,33 @@ internal class MultipartIT : S3TestBase() {
fun `list MultipartUploads returns OK`(testInfo: TestInfo) {
val bucketName = givenBucket(testInfo)
assertThat(
- s3Client.listMultipartUploads {
- it.bucket(bucketName)
- }.uploads()
+ s3Client
+ .listMultipartUploads {
+ it.bucket(bucketName)
+ }.uploads(),
).isEmpty()
- val uploadId = s3Client.createMultipartUpload {
- it.bucket(bucketName)
- it.key(UPLOAD_FILE_NAME)
- }.uploadId()
+ val uploadId =
+ s3Client
+ .createMultipartUpload {
+ it.bucket(bucketName)
+ it.key(UPLOAD_FILE_NAME)
+ }.uploadId()
- s3Client.listMultipartUploads {
- it.bucket(bucketName)
- }.also { listing ->
- assertThat(listing.uploads()).isNotEmpty
- assertThat(listing.bucket()).isEqualTo(bucketName)
- assertThat(listing.uploads()).hasSize(1)
-
- listing.uploads()[0]
- .also {
- assertThat(it.uploadId()).isEqualTo(uploadId)
- assertThat(it.key()).isEqualTo(UPLOAD_FILE_NAME)
- }
- }
+ s3Client
+ .listMultipartUploads {
+ it.bucket(bucketName)
+ }.also { listing ->
+ assertThat(listing.uploads()).isNotEmpty
+ assertThat(listing.bucket()).isEqualTo(bucketName)
+ assertThat(listing.uploads()).hasSize(1)
+
+ listing
+ .uploads()[0]
+ .also {
+ assertThat(it.uploadId()).isEqualTo(uploadId)
+ assertThat(it.key()).isEqualTo(UPLOAD_FILE_NAME)
+ }
+ }
}
@Test
@@ -822,10 +882,11 @@ internal class MultipartIT : S3TestBase() {
it.key("key2")
}
- val listing = s3Client.listMultipartUploads {
- it.bucket(bucketName)
- it.prefix("key2")
- }
+ val listing =
+ s3Client.listMultipartUploads {
+ it.bucket(bucketName)
+ it.prefix("key2")
+ }
assertThat(listing.uploads()).hasSize(1)
assertThat(listing.uploads()[0].key()).isEqualTo("key2")
}
@@ -834,38 +895,42 @@ internal class MultipartIT : S3TestBase() {
@S3VerifiedSuccess(year = 2025)
fun `list MultipartUploads by bucket returns OK`(testInfo: TestInfo) {
// create multipart upload 1
- val bucketName1 = givenBucket(testInfo)
- .also { name ->
- s3Client.createMultipartUpload {
- it.bucket(name)
- it.key("key1")
+ val bucketName1 =
+ givenBucket(testInfo)
+ .also { name ->
+ s3Client.createMultipartUpload {
+ it.bucket(name)
+ it.key("key1")
}
- }
+ }
// create multipart upload 2
- val bucketName2 = givenBucket()
- .also { name ->
- s3Client.createMultipartUpload {
- it.bucket(name)
- it.key("key2")
+ val bucketName2 =
+ givenBucket()
+ .also { name ->
+ s3Client.createMultipartUpload {
+ it.bucket(name)
+ it.key("key2")
+ }
}
- }
// assert multipart upload 1
- s3Client.listMultipartUploads {
- it.bucket(bucketName1)
- }.also {
- assertThat(it.uploads()).hasSize(1)
- assertThat(it.uploads()[0].key()).isEqualTo("key1")
- }
+ s3Client
+ .listMultipartUploads {
+ it.bucket(bucketName1)
+ }.also {
+ assertThat(it.uploads()).hasSize(1)
+ assertThat(it.uploads()[0].key()).isEqualTo("key1")
+ }
// assert multipart upload 2
- s3Client.listMultipartUploads {
- it.bucket(bucketName2)
- }.also {
- assertThat(it.uploads()).hasSize(1)
- assertThat(it.uploads()[0].key()).isEqualTo("key2")
- }
+ s3Client
+ .listMultipartUploads {
+ it.bucket(bucketName2)
+ }.also {
+ assertThat(it.uploads()).hasSize(1)
+ assertThat(it.uploads()[0].key()).isEqualTo("key2")
+ }
}
@Test
@@ -878,23 +943,26 @@ internal class MultipartIT : S3TestBase() {
it.key("key$i")
}
}
- val uploads1 = s3Client.listMultipartUploads {
- it.bucket(bucketName)
- it.maxUploads(5)
- }.also {
- assertThat(it.uploads()).hasSize(5)
- assertThat(it.nextUploadIdMarker()).isNotNull
- assertThat(it.nextKeyMarker()).isNotNull
- }
+ val uploads1 =
+ s3Client
+ .listMultipartUploads {
+ it.bucket(bucketName)
+ it.maxUploads(5)
+ }.also {
+ assertThat(it.uploads()).hasSize(5)
+ assertThat(it.nextUploadIdMarker()).isNotNull
+ assertThat(it.nextKeyMarker()).isNotNull
+ }
- s3Client.listMultipartUploads {
- it.bucket(bucketName)
- it.uploadIdMarker(uploads1.nextUploadIdMarker())
- it.keyMarker(uploads1.nextKeyMarker())
- }.also {
- assertThat(it.uploads()).hasSize(5)
- assertThat(it.uploads()[0].key()).isEqualTo("key5")
- }
+ s3Client
+ .listMultipartUploads {
+ it.bucket(bucketName)
+ it.uploadIdMarker(uploads1.nextUploadIdMarker())
+ it.keyMarker(uploads1.nextKeyMarker())
+ }.also {
+ assertThat(it.uploads()).hasSize(5)
+ assertThat(it.uploads()[0].key()).isEqualTo("key5")
+ }
}
/**
@@ -905,30 +973,34 @@ internal class MultipartIT : S3TestBase() {
fun testAbortMultipartUpload(testInfo: TestInfo) {
val bucketName = givenBucket(testInfo)
assertThat(
- s3Client.listMultipartUploads {
+ s3Client
+ .listMultipartUploads {
it.bucket(bucketName)
- }.hasUploads()
+ }.hasUploads(),
).isFalse
- val uploadId = s3Client
- .createMultipartUpload {
- it.bucket(bucketName)
- it.key(UPLOAD_FILE_NAME)
- }.uploadId()
+ val uploadId =
+ s3Client
+ .createMultipartUpload {
+ it.bucket(bucketName)
+ it.key(UPLOAD_FILE_NAME)
+ }.uploadId()
val randomBytes = randomBytes()
val partETag = uploadPart(bucketName, UPLOAD_FILE_NAME, uploadId, 1, randomBytes)
assertThat(
- s3Client.listMultipartUploads {
- it.bucket(bucketName)
- }.hasUploads()
+ s3Client
+ .listMultipartUploads {
+ it.bucket(bucketName)
+ }.hasUploads(),
).isTrue
- s3Client.listParts {
- it.bucket(bucketName)
- it.key(UPLOAD_FILE_NAME)
- it.uploadId(uploadId)
- }.parts()
+ s3Client
+ .listParts {
+ it.bucket(bucketName)
+ it.key(UPLOAD_FILE_NAME)
+ it.uploadId(uploadId)
+ }.parts()
.also {
assertThat(it).hasSize(1)
assertThat(it[0].eTag()).isEqualTo(partETag)
@@ -940,9 +1012,10 @@ internal class MultipartIT : S3TestBase() {
it.uploadId(uploadId)
}
assertThat(
- s3Client.listMultipartUploads {
- it.bucket(bucketName)
- }.hasUploads()
+ s3Client
+ .listMultipartUploads {
+ it.bucket(bucketName)
+ }.hasUploads(),
).isFalse
// List parts, make sure we find no parts
@@ -952,8 +1025,7 @@ internal class MultipartIT : S3TestBase() {
it.key(UPLOAD_FILE_NAME)
it.uploadId(uploadId)
}
- }
- .isInstanceOf(AwsServiceException::class.java)
+ }.isInstanceOf(AwsServiceException::class.java)
.hasMessageContaining("Service: S3, Status Code: 404")
.asInstanceOf(InstanceOfAssertFactories.type(AwsServiceException::class.java))
.extracting(AwsServiceException::awsErrorDetails)
@@ -971,43 +1043,47 @@ internal class MultipartIT : S3TestBase() {
val bucketName = givenBucket(testInfo)
val key = randomName
assertThat(
- s3Client.listMultipartUploads {
- it.bucket(bucketName)
- }.uploads()
+ s3Client
+ .listMultipartUploads {
+ it.bucket(bucketName)
+ }.uploads(),
).isEmpty()
// Initiate upload
- val uploadId = s3Client
- .createMultipartUpload {
- it.bucket(bucketName)
- it.key(key)
- }.uploadId()
+ val uploadId =
+ s3Client
+ .createMultipartUpload {
+ it.bucket(bucketName)
+ it.key(key)
+ }.uploadId()
// Upload 3 parts
val randomBytes1 = randomBytes()
val partETag1 = uploadPart(bucketName, key, uploadId, 1, randomBytes1)
val randomBytes2 = randomBytes()
- uploadPart(bucketName, key, uploadId, 2, randomBytes2) //ignore output in this test.
+ uploadPart(bucketName, key, uploadId, 2, randomBytes2) // ignore output in this test.
val randomBytes3 = randomBytes()
val partETag3 = uploadPart(bucketName, key, uploadId, 3, randomBytes3)
// Try to complete with these parts
- val result = s3Client.completeMultipartUpload {
- it.bucket(bucketName)
- it.key(key)
- it.uploadId(uploadId)
- it.multipartUpload {
- it.parts({
- it.eTag(partETag1)
- it.partNumber(1)
- },
- {
- it.eTag(partETag3)
- it.partNumber(3)
- }
- )
+ val result =
+ s3Client.completeMultipartUpload {
+ it.bucket(bucketName)
+ it.key(key)
+ it.uploadId(uploadId)
+ it.multipartUpload {
+ it.parts(
+ {
+ it.eTag(partETag1)
+ it.partNumber(1)
+ },
+ {
+ it.eTag(partETag3)
+ it.partNumber(3)
+ },
+ )
+ }
}
- }
// Verify only 1st and 3rd counts
(DigestUtils.md5(randomBytes1) + DigestUtils.md5(randomBytes3)).also {
@@ -1015,15 +1091,16 @@ internal class MultipartIT : S3TestBase() {
assertThat(result.eTag()).isEqualTo("\"${DigestUtils.md5Hex(it)}-2\"")
}
- s3Client.getObject {
- it.bucket(bucketName)
- it.key(key)
- }.use {
- // verify content size
- assertThat(it.response().contentLength()).isEqualTo(randomBytes1.size.toLong() + randomBytes3.size)
- // verify contents
- assertThat(readStreamIntoByteArray(it.buffered())).isEqualTo(concatByteArrays(randomBytes1, randomBytes3))
- }
+ s3Client
+ .getObject {
+ it.bucket(bucketName)
+ it.key(key)
+ }.use {
+ // verify content size
+ assertThat(it.response().contentLength()).isEqualTo(randomBytes1.size.toLong() + randomBytes3.size)
+ // verify contents
+ assertThat(readStreamIntoByteArray(it.buffered())).isEqualTo(concatByteArrays(randomBytes1, randomBytes3))
+ }
}
/**
@@ -1036,28 +1113,31 @@ internal class MultipartIT : S3TestBase() {
val bucketName = givenBucket(testInfo)
val key = randomName
assertThat(
- s3Client.listMultipartUploads {
- it.bucket(bucketName)
- }.uploads()
+ s3Client
+ .listMultipartUploads {
+ it.bucket(bucketName)
+ }.uploads(),
).isEmpty()
// Initiate upload
- val uploadId = s3Client
- .createMultipartUpload {
- it.bucket(bucketName)
- it.key(key)
- }.uploadId()
+ val uploadId =
+ s3Client
+ .createMultipartUpload {
+ it.bucket(bucketName)
+ it.key(key)
+ }.uploadId()
// Upload part
val randomBytes = randomBytes()
val partETag = uploadPart(bucketName, key, uploadId, 1, randomBytes)
// List parts, make sure we find part 1
- s3Client.listParts {
- it.bucket(bucketName)
- it.key(key)
- it.uploadId(uploadId)
- }.parts()
+ s3Client
+ .listParts {
+ it.bucket(bucketName)
+ it.key(key)
+ it.uploadId(uploadId)
+ }.parts()
.also {
assertThat(it).hasSize(1)
assertThat(it[0].eTag()).isEqualTo(partETag)
@@ -1073,7 +1153,7 @@ internal class MultipartIT : S3TestBase() {
{
it.eTag(partETag)
it.partNumber(1)
- }
+ },
)
}
}
@@ -1085,8 +1165,7 @@ internal class MultipartIT : S3TestBase() {
it.key(key)
it.uploadId(uploadId)
}
- }
- .isInstanceOf(AwsServiceException::class.java)
+ }.isInstanceOf(AwsServiceException::class.java)
.hasMessageContaining("Service: S3, Status Code: 404")
.asInstanceOf(InstanceOfAssertFactories.type(AwsServiceException::class.java))
.extracting(AwsServiceException::awsErrorDetails)
@@ -1100,29 +1179,32 @@ internal class MultipartIT : S3TestBase() {
@Test
@S3VerifiedSuccess(year = 2025)
fun shouldCopyPartsAndComplete(testInfo: TestInfo) {
- //Initiate upload
+ // Initiate upload
val bucketName2 = givenBucket()
val multipartUploadKey = UUID.randomUUID().toString()
- val uploadId = s3Client.createMultipartUpload {
- it.bucket(bucketName2)
- it.key(multipartUploadKey)
- }.uploadId()
+ val uploadId =
+ s3Client
+ .createMultipartUpload {
+ it.bucket(bucketName2)
+ it.key(multipartUploadKey)
+ }.uploadId()
val parts: MutableList = ArrayList()
- //bucket for test data
+ // bucket for test data
val bucketName1 = givenBucket(testInfo)
- //create two objects, initiate copy part with full object length
+ // create two objects, initiate copy part with full object length
val sourceKeys = arrayOf(UUID.randomUUID().toString(), UUID.randomUUID().toString())
val allRandomBytes: MutableList = ArrayList()
for (i in sourceKeys.indices) {
val key = sourceKeys[i]
val partNumber = i + 1
val randomBytes = randomBytes()
- val metadata1 = HashMap().apply {
- this["contentLength"] = randomBytes.size.toString()
- }
+ val metadata1 =
+ HashMap().apply {
+ this["contentLength"] = randomBytes.size.toString()
+ }
ByteArrayInputStream(randomBytes).use { inputStream ->
s3Client.putObject(
{
@@ -1130,51 +1212,60 @@ internal class MultipartIT : S3TestBase() {
it.key(key)
it.metadata(metadata1)
},
- RequestBody.fromInputStream(inputStream, randomBytes.size.toLong())
+ RequestBody.fromInputStream(inputStream, randomBytes.size.toLong()),
)
}
- s3Client.uploadPartCopy {
- it.partNumber(partNumber)
- it.uploadId(uploadId)
- it.destinationBucket(bucketName2)
- it.destinationKey(multipartUploadKey)
- it.sourceKey(key)
- it.sourceBucket(bucketName1)
- }.also {
- val etag = it.copyPartResult().eTag()
- parts.add(CompletedPart.builder().eTag(etag).partNumber(partNumber).build())
- allRandomBytes.add(randomBytes)
- }
+ s3Client
+ .uploadPartCopy {
+ it.partNumber(partNumber)
+ it.uploadId(uploadId)
+ it.destinationBucket(bucketName2)
+ it.destinationKey(multipartUploadKey)
+ it.sourceKey(key)
+ it.sourceBucket(bucketName1)
+ }.also {
+ val etag = it.copyPartResult().eTag()
+ parts.add(
+ CompletedPart
+ .builder()
+ .eTag(etag)
+ .partNumber(partNumber)
+ .build(),
+ )
+ allRandomBytes.add(randomBytes)
+ }
}
assertThat(allRandomBytes).hasSize(2)
// Complete with parts
- val result = s3Client.completeMultipartUpload {
- it.bucket(bucketName2)
- it.key(multipartUploadKey)
- it.uploadId(uploadId)
- it.multipartUpload {
- it.parts(parts)
+ val result =
+ s3Client.completeMultipartUpload {
+ it.bucket(bucketName2)
+ it.key(multipartUploadKey)
+ it.uploadId(uploadId)
+ it.multipartUpload {
+ it.parts(parts)
+ }
}
- }
// Verify parts
(DigestUtils.md5(allRandomBytes[0]) + DigestUtils.md5(allRandomBytes[1])).also {
// verify etag
assertThat(result.eTag()).isEqualTo("\"${DigestUtils.md5Hex(it)}-2\"")
}
- s3Client.getObject {
- it.bucket(bucketName2)
- it.key(multipartUploadKey)
- }.use {
- // verify content size
- assertThat(it.response().contentLength()).isEqualTo(allRandomBytes[0].size.toLong() + allRandomBytes[1].size)
+ s3Client
+ .getObject {
+ it.bucket(bucketName2)
+ it.key(multipartUploadKey)
+ }.use {
+ // verify content size
+ assertThat(it.response().contentLength()).isEqualTo(allRandomBytes[0].size.toLong() + allRandomBytes[1].size)
- // verify contents
- assertThat(readStreamIntoByteArray(it.buffered()))
- .isEqualTo(concatByteArrays(allRandomBytes[0], allRandomBytes[1]))
- }
+ // verify contents
+ assertThat(readStreamIntoByteArray(it.buffered()))
+ .isEqualTo(concatByteArrays(allRandomBytes[0], allRandomBytes[1]))
+ }
}
/**
@@ -1190,32 +1281,35 @@ internal class MultipartIT : S3TestBase() {
val destinationKey = "copyOf/$sourceKey"
val objectMetadata = mapOf(Pair("key", "value"))
- val initiateMultipartUploadResult = s3Client.createMultipartUpload {
- it.bucket(destinationBucket)
- it.key(destinationKey)
- it.metadata(objectMetadata)
- }
+ val initiateMultipartUploadResult =
+ s3Client.createMultipartUpload {
+ it.bucket(destinationBucket)
+ it.key(destinationKey)
+ it.metadata(objectMetadata)
+ }
val uploadId = initiateMultipartUploadResult.uploadId()
- val result = s3Client.uploadPartCopy {
- it.uploadId(uploadId)
- it.destinationBucket(destinationBucket)
- it.destinationKey(destinationKey)
- it.sourceKey(sourceKey)
- it.sourceBucket(bucketName)
- it.partNumber(1)
- it.copySourceRange("bytes=0-${UPLOAD_FILE_LENGTH - 1}")
- }
+ val result =
+ s3Client.uploadPartCopy {
+ it.uploadId(uploadId)
+ it.destinationBucket(destinationBucket)
+ it.destinationKey(destinationKey)
+ it.sourceKey(sourceKey)
+ it.sourceBucket(bucketName)
+ it.partNumber(1)
+ it.copySourceRange("bytes=0-${UPLOAD_FILE_LENGTH - 1}")
+ }
val etag = result.copyPartResult().eTag()
- s3Client.listParts {
- it.bucket(initiateMultipartUploadResult.bucket())
- it.key(initiateMultipartUploadResult.key())
- it.uploadId(initiateMultipartUploadResult.uploadId())
- }.also {
- assertThat(it.parts()).hasSize(1)
- assertThat(it.parts()[0].eTag()).isEqualTo(etag)
- }
+ s3Client
+ .listParts {
+ it.bucket(initiateMultipartUploadResult.bucket())
+ it.key(initiateMultipartUploadResult.key())
+ it.uploadId(initiateMultipartUploadResult.uploadId())
+ }.also {
+ assertThat(it.parts()).hasSize(1)
+ assertThat(it.parts()[0].eTag()).isEqualTo(etag)
+ }
}
/**
@@ -1229,11 +1323,12 @@ internal class MultipartIT : S3TestBase() {
val destinationKey = "copyOf/$sourceKey"
val bucketName = givenBucket(testInfo)
val objectMetadata = mapOf(Pair("key", "value"))
- val initiateMultipartUploadResult = s3Client.createMultipartUpload {
- it.bucket(destinationBucket)
- it.key(destinationKey)
- it.metadata(objectMetadata)
- }
+ val initiateMultipartUploadResult =
+ s3Client.createMultipartUpload {
+ it.bucket(destinationBucket)
+ it.key(destinationKey)
+ it.metadata(objectMetadata)
+ }
val uploadId = initiateMultipartUploadResult.uploadId()
assertThatThrownBy {
@@ -1246,8 +1341,7 @@ internal class MultipartIT : S3TestBase() {
it.partNumber(1)
it.copySourceRange("bytes=0-5")
}
- }
- .isInstanceOf(AwsServiceException::class.java)
+ }.isInstanceOf(AwsServiceException::class.java)
.hasMessageContaining("Service: S3, Status Code: 404")
.asInstanceOf(InstanceOfAssertFactories.type(AwsServiceException::class.java))
.extracting(AwsServiceException::awsErrorDetails)
@@ -1264,35 +1358,38 @@ internal class MultipartIT : S3TestBase() {
val destinationKey = "copyOf/$sourceKey"
val matchingEtag = putObjectResponse.eTag()
- val initiateMultipartUploadResult = s3Client.createMultipartUpload {
- it.bucket(destinationBucket)
- it.key(destinationKey)
- }
+ val initiateMultipartUploadResult =
+ s3Client.createMultipartUpload {
+ it.bucket(destinationBucket)
+ it.key(destinationKey)
+ }
val uploadId = initiateMultipartUploadResult.uploadId()
- val result = s3Client.uploadPartCopy {
- it.uploadId(uploadId)
- it.destinationBucket(destinationBucket)
- it.destinationKey(destinationKey)
- it.sourceKey(sourceKey)
- it.sourceBucket(bucketName)
- it.partNumber(1)
- it.copySourceRange("bytes=0-${UPLOAD_FILE_LENGTH - 1}")
- it.copySourceIfMatch(matchingEtag)
- }
+ val result =
+ s3Client.uploadPartCopy {
+ it.uploadId(uploadId)
+ it.destinationBucket(destinationBucket)
+ it.destinationKey(destinationKey)
+ it.sourceKey(sourceKey)
+ it.sourceBucket(bucketName)
+ it.partNumber(1)
+ it.copySourceRange("bytes=0-${UPLOAD_FILE_LENGTH - 1}")
+ it.copySourceIfMatch(matchingEtag)
+ }
val etag = result.copyPartResult().eTag()
- s3Client.listParts(
- ListPartsRequest
- .builder()
- .bucket(initiateMultipartUploadResult.bucket())
- .key(initiateMultipartUploadResult.key())
- .uploadId(initiateMultipartUploadResult.uploadId())
- .build()
- ).also {
- assertThat(it.parts()).hasSize(1)
- assertThat(it.parts()[0].eTag()).isEqualTo(etag)
- }
+ s3Client
+ .listParts(
+ ListPartsRequest
+ .builder()
+ .bucket(initiateMultipartUploadResult.bucket())
+ .key(initiateMultipartUploadResult.key())
+ .uploadId(initiateMultipartUploadResult.uploadId())
+ .build(),
+ ).also {
+ assertThat(it.parts()).hasSize(1)
+ assertThat(it.parts()[0].eTag()).isEqualTo(etag)
+ }
}
@Test
@@ -1304,10 +1401,11 @@ internal class MultipartIT : S3TestBase() {
val destinationKey = "copyOf/$sourceKey"
val noneMatchingEtag = "\"${randomName}\""
- val initiateMultipartUploadResult = s3Client.createMultipartUpload {
- it.bucket(destinationBucket)
- it.key(destinationKey)
- }
+ val initiateMultipartUploadResult =
+ s3Client.createMultipartUpload {
+ it.bucket(destinationBucket)
+ it.key(destinationKey)
+ }
val uploadId = initiateMultipartUploadResult.uploadId()
assertThatThrownBy {
@@ -1321,8 +1419,7 @@ internal class MultipartIT : S3TestBase() {
it.copySourceRange("bytes=0-$UPLOAD_FILE_LENGTH")
it.copySourceIfMatch(noneMatchingEtag)
}
- }
- .isInstanceOf(S3Exception::class.java)
+ }.isInstanceOf(S3Exception::class.java)
.hasMessageContaining("Service: S3, Status Code: 412")
.hasMessageContaining(PRECONDITION_FAILED.message)
}
@@ -1336,37 +1433,40 @@ internal class MultipartIT : S3TestBase() {
val destinationKey = "copyOf/$sourceKey"
val matchingEtag = putObjectResponse.eTag()
- val initiateMultipartUploadResult = s3Client.createMultipartUpload {
- it.bucket(destinationBucket)
- it.key(destinationKey)
- }
+ val initiateMultipartUploadResult =
+ s3Client.createMultipartUpload {
+ it.bucket(destinationBucket)
+ it.key(destinationKey)
+ }
val uploadId = initiateMultipartUploadResult.uploadId()
val now = Instant.now().plusSeconds(60)
- val result = s3Client.uploadPartCopy {
- it.uploadId(uploadId)
- it.destinationBucket(destinationBucket)
- it.destinationKey(destinationKey)
- it.sourceKey(sourceKey)
- it.sourceBucket(bucketName)
- it.partNumber(1)
- it.copySourceRange("bytes=0-${UPLOAD_FILE_LENGTH - 1}")
- it.copySourceIfMatch(matchingEtag)
- it.copySourceIfUnmodifiedSince(now)
- }
+ val result =
+ s3Client.uploadPartCopy {
+ it.uploadId(uploadId)
+ it.destinationBucket(destinationBucket)
+ it.destinationKey(destinationKey)
+ it.sourceKey(sourceKey)
+ it.sourceBucket(bucketName)
+ it.partNumber(1)
+ it.copySourceRange("bytes=0-${UPLOAD_FILE_LENGTH - 1}")
+ it.copySourceIfMatch(matchingEtag)
+ it.copySourceIfUnmodifiedSince(now)
+ }
val etag = result.copyPartResult().eTag()
- s3Client.listParts(
- ListPartsRequest
- .builder()
- .bucket(initiateMultipartUploadResult.bucket())
- .key(initiateMultipartUploadResult.key())
- .uploadId(initiateMultipartUploadResult.uploadId())
- .build()
- ).also {
- assertThat(it.parts()).hasSize(1)
- assertThat(it.parts()[0].eTag()).isEqualTo(etag)
- }
+ s3Client
+ .listParts(
+ ListPartsRequest
+ .builder()
+ .bucket(initiateMultipartUploadResult.bucket())
+ .key(initiateMultipartUploadResult.key())
+ .uploadId(initiateMultipartUploadResult.uploadId())
+ .build(),
+ ).also {
+ assertThat(it.parts()).hasSize(1)
+ assertThat(it.parts()[0].eTag()).isEqualTo(etag)
+ }
}
@Test
@@ -1378,32 +1478,35 @@ internal class MultipartIT : S3TestBase() {
val destinationKey = "copyOf/$sourceKey"
val noneMatchingEtag = "\"${randomName}\""
- val initiateMultipartUploadResult = s3Client.createMultipartUpload {
- it.bucket(destinationBucket)
- it.key(destinationKey)
- }
+ val initiateMultipartUploadResult =
+ s3Client.createMultipartUpload {
+ it.bucket(destinationBucket)
+ it.key(destinationKey)
+ }
val uploadId = initiateMultipartUploadResult.uploadId()
- val result = s3Client.uploadPartCopy {
- it.uploadId(uploadId)
- it.destinationBucket(destinationBucket)
- it.destinationKey(destinationKey)
- it.sourceKey(sourceKey)
- it.sourceBucket(bucketName)
- it.partNumber(1)
- it.copySourceRange("bytes=0-${UPLOAD_FILE_LENGTH - 1}")
- it.copySourceIfNoneMatch(noneMatchingEtag)
- }
+ val result =
+ s3Client.uploadPartCopy {
+ it.uploadId(uploadId)
+ it.destinationBucket(destinationBucket)
+ it.destinationKey(destinationKey)
+ it.sourceKey(sourceKey)
+ it.sourceBucket(bucketName)
+ it.partNumber(1)
+ it.copySourceRange("bytes=0-${UPLOAD_FILE_LENGTH - 1}")
+ it.copySourceIfNoneMatch(noneMatchingEtag)
+ }
val etag = result.copyPartResult().eTag()
- s3Client.listParts {
- it.bucket(initiateMultipartUploadResult.bucket())
- it.key(initiateMultipartUploadResult.key())
- it.uploadId(initiateMultipartUploadResult.uploadId())
- }.also {
- assertThat(it.parts()).hasSize(1)
- assertThat(it.parts()[0].eTag()).isEqualTo(etag)
- }
+ s3Client
+ .listParts {
+ it.bucket(initiateMultipartUploadResult.bucket())
+ it.key(initiateMultipartUploadResult.key())
+ it.uploadId(initiateMultipartUploadResult.uploadId())
+ }.also {
+ assertThat(it.parts()).hasSize(1)
+ assertThat(it.parts()[0].eTag()).isEqualTo(etag)
+ }
}
@Test
@@ -1415,10 +1518,11 @@ internal class MultipartIT : S3TestBase() {
val destinationKey = "copyOf/$sourceKey"
val matchingEtag = putObjectResponse.eTag()
- val initiateMultipartUploadResult = s3Client.createMultipartUpload {
- it.bucket(destinationBucket)
- it.key(destinationKey)
- }
+ val initiateMultipartUploadResult =
+ s3Client.createMultipartUpload {
+ it.bucket(destinationBucket)
+ it.key(destinationKey)
+ }
val uploadId = initiateMultipartUploadResult.uploadId()
assertThatThrownBy {
@@ -1432,8 +1536,7 @@ internal class MultipartIT : S3TestBase() {
it.copySourceRange("bytes=0-$UPLOAD_FILE_LENGTH")
it.copySourceIfNoneMatch(matchingEtag)
}
- }
- .isInstanceOf(S3Exception::class.java)
+ }.isInstanceOf(S3Exception::class.java)
.hasMessageContaining("Service: S3, Status Code: 412")
.hasMessageContaining(PRECONDITION_FAILED.message)
}
@@ -1448,10 +1551,11 @@ internal class MultipartIT : S3TestBase() {
val destinationKey = "copyOf/$sourceKey"
val matchingEtag = putObjectResponse.eTag()
- val initiateMultipartUploadResult = s3Client.createMultipartUpload {
- it.bucket(destinationBucket)
- it.key(destinationKey)
- }
+ val initiateMultipartUploadResult =
+ s3Client.createMultipartUpload {
+ it.bucket(destinationBucket)
+ it.key(destinationKey)
+ }
val uploadId = initiateMultipartUploadResult.uploadId()
assertThatThrownBy {
@@ -1466,8 +1570,7 @@ internal class MultipartIT : S3TestBase() {
it.copySourceIfNoneMatch(matchingEtag)
it.copySourceIfModifiedSince(now)
}
- }
- .isInstanceOf(S3Exception::class.java)
+ }.isInstanceOf(S3Exception::class.java)
.hasMessageContaining("Service: S3, Status Code: 412")
.hasMessageContaining(PRECONDITION_FAILED.message)
}
@@ -1481,32 +1584,35 @@ internal class MultipartIT : S3TestBase() {
val destinationBucket = givenBucket()
val destinationKey = "copyOf/$sourceKey"
- val initiateMultipartUploadResult = s3Client.createMultipartUpload {
- it.bucket(destinationBucket)
- it.key(destinationKey)
- }
+ val initiateMultipartUploadResult =
+ s3Client.createMultipartUpload {
+ it.bucket(destinationBucket)
+ it.key(destinationKey)
+ }
val uploadId = initiateMultipartUploadResult.uploadId()
- val result = s3Client.uploadPartCopy {
- it.uploadId(uploadId)
- it.destinationBucket(destinationBucket)
- it.destinationKey(destinationKey)
- it.sourceKey(sourceKey)
- it.sourceBucket(bucketName)
- it.partNumber(1)
- it.copySourceRange("bytes=0-${UPLOAD_FILE_LENGTH - 1}")
- it.copySourceIfModifiedSince(now)
- }
+ val result =
+ s3Client.uploadPartCopy {
+ it.uploadId(uploadId)
+ it.destinationBucket(destinationBucket)
+ it.destinationKey(destinationKey)
+ it.sourceKey(sourceKey)
+ it.sourceBucket(bucketName)
+ it.partNumber(1)
+ it.copySourceRange("bytes=0-${UPLOAD_FILE_LENGTH - 1}")
+ it.copySourceIfModifiedSince(now)
+ }
val etag = result.copyPartResult().eTag()
- s3Client.listParts {
- it.bucket(initiateMultipartUploadResult.bucket())
- it.key(initiateMultipartUploadResult.key())
- it.uploadId(initiateMultipartUploadResult.uploadId())
- }.also {
- assertThat(it.parts()).hasSize(1)
- assertThat(it.parts()[0].eTag()).isEqualTo(etag)
- }
+ s3Client
+ .listParts {
+ it.bucket(initiateMultipartUploadResult.bucket())
+ it.key(initiateMultipartUploadResult.key())
+ it.uploadId(initiateMultipartUploadResult.uploadId())
+ }.also {
+ assertThat(it.parts()).hasSize(1)
+ assertThat(it.parts()[0].eTag()).isEqualTo(etag)
+ }
}
@Test
@@ -1517,10 +1623,11 @@ internal class MultipartIT : S3TestBase() {
val destinationBucket = givenBucket()
val destinationKey = "copyOf/$sourceKey"
- val initiateMultipartUploadResult = s3Client.createMultipartUpload {
- it.bucket(destinationBucket)
- it.key(destinationKey)
- }
+ val initiateMultipartUploadResult =
+ s3Client.createMultipartUpload {
+ it.bucket(destinationBucket)
+ it.key(destinationKey)
+ }
val now = Instant.now().plusSeconds(60)
@@ -1536,8 +1643,7 @@ internal class MultipartIT : S3TestBase() {
it.copySourceRange("bytes=0-$UPLOAD_FILE_LENGTH")
it.copySourceIfModifiedSince(now)
}
- }
- .isInstanceOf(S3Exception::class.java)
+ }.isInstanceOf(S3Exception::class.java)
.hasMessageContaining("Service: S3, Status Code: 412")
.hasMessageContaining(PRECONDITION_FAILED.message)
}
@@ -1550,33 +1656,36 @@ internal class MultipartIT : S3TestBase() {
val destinationBucket = givenBucket()
val destinationKey = "copyOf/$sourceKey"
- val initiateMultipartUploadResult = s3Client.createMultipartUpload {
- it.bucket(destinationBucket)
- it.key(destinationKey)
- }
+ val initiateMultipartUploadResult =
+ s3Client.createMultipartUpload {
+ it.bucket(destinationBucket)
+ it.key(destinationKey)
+ }
val uploadId = initiateMultipartUploadResult.uploadId()
val now = Instant.now().plusSeconds(60)
- val result = s3Client.uploadPartCopy {
- it.uploadId(uploadId)
- it.destinationBucket(destinationBucket)
- it.destinationKey(destinationKey)
- it.sourceKey(sourceKey)
- it.sourceBucket(bucketName)
- it.partNumber(1)
- it.copySourceRange("bytes=0-${UPLOAD_FILE_LENGTH - 1}")
- it.copySourceIfUnmodifiedSince(now)
- }
+ val result =
+ s3Client.uploadPartCopy {
+ it.uploadId(uploadId)
+ it.destinationBucket(destinationBucket)
+ it.destinationKey(destinationKey)
+ it.sourceKey(sourceKey)
+ it.sourceBucket(bucketName)
+ it.partNumber(1)
+ it.copySourceRange("bytes=0-${UPLOAD_FILE_LENGTH - 1}")
+ it.copySourceIfUnmodifiedSince(now)
+ }
val etag = result.copyPartResult().eTag()
- s3Client.listParts {
- it.bucket(initiateMultipartUploadResult.bucket())
- it.key(initiateMultipartUploadResult.key())
- it.uploadId(initiateMultipartUploadResult.uploadId())
- }.also {
- assertThat(it.parts()).hasSize(1)
- assertThat(it.parts()[0].eTag()).isEqualTo(etag)
- }
+ s3Client
+ .listParts {
+ it.bucket(initiateMultipartUploadResult.bucket())
+ it.key(initiateMultipartUploadResult.key())
+ it.uploadId(initiateMultipartUploadResult.uploadId())
+ }.also {
+ assertThat(it.parts()).hasSize(1)
+ assertThat(it.parts()[0].eTag()).isEqualTo(etag)
+ }
}
@Test
@@ -1588,11 +1697,11 @@ internal class MultipartIT : S3TestBase() {
val destinationBucket = givenBucket()
val destinationKey = "copyOf/$sourceKey"
- val initiateMultipartUploadResult = s3Client.createMultipartUpload {
- it.bucket(destinationBucket)
- it.key(destinationKey)
- }
-
+ val initiateMultipartUploadResult =
+ s3Client.createMultipartUpload {
+ it.bucket(destinationBucket)
+ it.key(destinationKey)
+ }
val uploadId = initiateMultipartUploadResult.uploadId()
assertThatThrownBy {
@@ -1606,8 +1715,7 @@ internal class MultipartIT : S3TestBase() {
it.copySourceRange("bytes=0-$UPLOAD_FILE_LENGTH")
it.copySourceIfUnmodifiedSince(now)
}
- }
- .isInstanceOf(S3Exception::class.java)
+ }.isInstanceOf(S3Exception::class.java)
.hasMessageContaining("Service: S3, Status Code: 412")
.hasMessageContaining(PRECONDITION_FAILED.message)
}
@@ -1620,13 +1728,10 @@ internal class MultipartIT : S3TestBase() {
it.bucket(randomName)
it.key(UPLOAD_FILE_NAME)
}
- }
- .isInstanceOf(NoSuchBucketException::class.java)
+ }.isInstanceOf(NoSuchBucketException::class.java)
.hasMessageContaining(NO_SUCH_BUCKET)
}
-
-
@Test
@S3VerifiedSuccess(year = 2025)
fun listMultipartUploads_noSuchBucket() {
@@ -1634,8 +1739,7 @@ internal class MultipartIT : S3TestBase() {
s3Client.listMultipartUploads {
it.bucket(randomName)
}
- }
- .isInstanceOf(NoSuchBucketException::class.java)
+ }.isInstanceOf(NoSuchBucketException::class.java)
.hasMessageContaining(NO_SUCH_BUCKET)
}
@@ -1648,8 +1752,7 @@ internal class MultipartIT : S3TestBase() {
it.key(UPLOAD_FILE_NAME)
it.uploadId(UUID.randomUUID().toString())
}
- }
- .isInstanceOf(NoSuchBucketException::class.java)
+ }.isInstanceOf(NoSuchBucketException::class.java)
.hasMessageContaining(NO_SUCH_BUCKET)
}
@@ -1657,15 +1760,16 @@ internal class MultipartIT : S3TestBase() {
@S3VerifiedSuccess(year = 2025)
fun transferManagerUpload_noSuchSourceBucket() {
assertThatThrownBy {
- transferManager.upload {
- it.putObjectRequest {
- it.bucket(randomName)
- it.key(UPLOAD_FILE_NAME)
- }
- it.requestBody(AsyncRequestBody.fromFile(UPLOAD_FILE))
- }.completionFuture().join()
- }
- .isInstanceOf(CompletionException::class.java)
+ transferManager
+ .upload {
+ it.putObjectRequest {
+ it.bucket(randomName)
+ it.key(UPLOAD_FILE_NAME)
+ }
+ it.requestBody(AsyncRequestBody.fromFile(UPLOAD_FILE))
+ }.completionFuture()
+ .join()
+ }.isInstanceOf(CompletionException::class.java)
.hasCauseInstanceOf(NoSuchBucketException::class.java)
.hasMessageContaining(NO_SUCH_BUCKET)
}
@@ -1679,22 +1783,23 @@ internal class MultipartIT : S3TestBase() {
val destinationKey = "copyOf/$sourceKey"
assertThatThrownBy {
- transferManager.copy {
- it.copyObjectRequest {
- it.sourceBucket(randomName)
- it.sourceKey(sourceKey)
- it.destinationBucket(destinationBucketName)
- it.destinationKey(destinationKey)
- }
- }.completionFuture().join()
- }
- .isInstanceOf(CompletionException::class.java)
+ transferManager
+ .copy {
+ it.copyObjectRequest {
+ it.sourceBucket(randomName)
+ it.sourceKey(sourceKey)
+ it.destinationBucket(destinationBucketName)
+ it.destinationKey(destinationKey)
+ }
+ }.completionFuture()
+ .join()
+ }.isInstanceOf(CompletionException::class.java)
.hasCauseInstanceOf(NoSuchKeyException::class.java)
- //TODO: not sure why AWS SDK v2 does not return the correct error message, S3Mock returns the correct message.
- //.hasMessageContaining(NO_SUCH_KEY)
- //TODO: not sure why AWS SDK v2 does not return the correct exception here, S3Mock returns the correct error message.
- //.hasCauseInstanceOf(NoSuchBucketException::class.java)
- //.hasMessageContaining(NO_SUCH_BUCKET)
+ // TODO: not sure why AWS SDK v2 does not return the correct error message, S3Mock returns the correct message.
+ // .hasMessageContaining(NO_SUCH_KEY)
+ // TODO: not sure why AWS SDK v2 does not return the correct exception here, S3Mock returns the correct error message.
+ // .hasCauseInstanceOf(NoSuchBucketException::class.java)
+ // .hasMessageContaining(NO_SUCH_BUCKET)
}
@Test
@@ -1706,36 +1811,39 @@ internal class MultipartIT : S3TestBase() {
val destinationKey = "copyOf/$sourceKey"
assertThatThrownBy {
- transferManager.copy {
- it.copyObjectRequest {
- it.sourceBucket(bucketName)
- it.sourceKey(randomName)
- it.destinationBucket(destinationBucketName)
- it.destinationKey(destinationKey)
- }
- }.completionFuture().join()
- }
- .isInstanceOf(CompletionException::class.java)
+ transferManager
+ .copy {
+ it.copyObjectRequest {
+ it.sourceBucket(bucketName)
+ it.sourceKey(randomName)
+ it.destinationBucket(destinationBucketName)
+ it.destinationKey(destinationKey)
+ }
+ }.completionFuture()
+ .join()
+ }.isInstanceOf(CompletionException::class.java)
.hasCauseInstanceOf(NoSuchKeyException::class.java)
- //TODO: not sure why AWS SDK v2 does not return the correct error message, S3Mock returns the correct message.
- //.hasMessageContaining(NO_SUCH_KEY)
+ // TODO: not sure why AWS SDK v2 does not return the correct error message, S3Mock returns the correct message.
+ // .hasMessageContaining(NO_SUCH_KEY)
}
@Test
@S3VerifiedSuccess(year = 2025)
fun uploadMultipart_invalidPartNumber(testInfo: TestInfo) {
val bucketName = givenBucket(testInfo)
- val initiateMultipartUploadResult = s3Client
- .createMultipartUpload {
- it.bucket(bucketName)
- it.key(UPLOAD_FILE_NAME)
- }
+ val initiateMultipartUploadResult =
+ s3Client
+ .createMultipartUpload {
+ it.bucket(bucketName)
+ it.key(UPLOAD_FILE_NAME)
+ }
val uploadId = initiateMultipartUploadResult.uploadId()
assertThat(
- s3Client.listMultipartUploads {
- it.bucket(bucketName)
- }.uploads()
+ s3Client
+ .listMultipartUploads {
+ it.bucket(bucketName)
+ }.uploads(),
).isNotEmpty
val invalidPartNumber = 0
@@ -1747,10 +1855,9 @@ internal class MultipartIT : S3TestBase() {
it.uploadId(uploadId)
it.partNumber(invalidPartNumber)
},
- RequestBody.fromFile(UPLOAD_FILE)
+ RequestBody.fromFile(UPLOAD_FILE),
)
- }
- .isInstanceOf(S3Exception::class.java)
+ }.isInstanceOf(S3Exception::class.java)
.hasMessageContaining(INVALID_PART_NUMBER)
}
@@ -1758,28 +1865,32 @@ internal class MultipartIT : S3TestBase() {
@S3VerifiedSuccess(year = 2025)
fun completeMultipartUpload_nonExistingPartNumber(testInfo: TestInfo) {
val bucketName = givenBucket(testInfo)
- val initiateMultipartUploadResult = s3Client
- .createMultipartUpload {
- it.bucket(bucketName)
- it.key(UPLOAD_FILE_NAME)
- }
+ val initiateMultipartUploadResult =
+ s3Client
+ .createMultipartUpload {
+ it.bucket(bucketName)
+ it.key(UPLOAD_FILE_NAME)
+ }
val uploadId = initiateMultipartUploadResult.uploadId()
assertThat(
- s3Client.listMultipartUploads {
- it.bucket(bucketName)
- }.uploads()
+ s3Client
+ .listMultipartUploads {
+ it.bucket(bucketName)
+ }.uploads(),
).isNotEmpty
- val eTag = s3Client.uploadPart(
- {
- it.bucket(initiateMultipartUploadResult.bucket())
- it.key(initiateMultipartUploadResult.key())
- it.uploadId(uploadId)
- it.partNumber(1)
- },
- RequestBody.fromFile(UPLOAD_FILE)
- ).eTag()
+ val eTag =
+ s3Client
+ .uploadPart(
+ {
+ it.bucket(initiateMultipartUploadResult.bucket())
+ it.key(initiateMultipartUploadResult.key())
+ it.uploadId(uploadId)
+ it.partNumber(1)
+ },
+ RequestBody.fromFile(UPLOAD_FILE),
+ ).eTag()
val invalidPartNumber = 10
assertThatThrownBy {
@@ -1794,8 +1905,7 @@ internal class MultipartIT : S3TestBase() {
})
}
}
- }
- .isInstanceOf(S3Exception::class.java)
+ }.isInstanceOf(S3Exception::class.java)
.hasMessageContaining(INVALID_PART)
}
@@ -1804,25 +1914,28 @@ internal class MultipartIT : S3TestBase() {
key: String,
uploadId: String,
partNumber: Int,
- randomBytes: ByteArray
+ randomBytes: ByteArray,
): String {
ByteArrayInputStream(randomBytes).use { inputStream ->
- return s3Client.uploadPart(
- {
- it.bucket(bucketName)
- it.key(key)
- it.uploadId(uploadId)
- it.partNumber(partNumber)
- it.contentLength(randomBytes.size.toLong())
- },
- RequestBody.fromInputStream(inputStream, randomBytes.size.toLong())
- ).eTag()
+ return s3Client
+ .uploadPart(
+ {
+ it.bucket(bucketName)
+ it.key(key)
+ it.uploadId(uploadId)
+ it.partNumber(partNumber)
+ it.contentLength(randomBytes.size.toLong())
+ },
+ RequestBody.fromInputStream(inputStream, randomBytes.size.toLong()),
+ ).eTag()
}
}
companion object {
private const val NO_SUCH_BUCKET = "The specified bucket does not exist"
private const val INVALID_PART_NUMBER = "Part number must be an integer between 1 and 10000, inclusive"
- private const val INVALID_PART = "One or more of the specified parts could not be found. The part might not have been uploaded, or the specified entity tag may not match the part's entity tag."
+ private const val INVALID_PART =
+ "One or more of the specified parts could not be found. " +
+ "The part might not have been uploaded, or the specified entity tag may not match the part's entity tag."
}
}
diff --git a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/ObjectTaggingIT.kt b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/ObjectTaggingIT.kt
index fd2cab735..1b3f7000b 100644
--- a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/ObjectTaggingIT.kt
+++ b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/ObjectTaggingIT.kt
@@ -36,14 +36,15 @@ internal class ObjectTaggingIT : S3TestBase() {
it.bucket(bucketName)
it.key(key)
},
- RequestBody.fromString("foo")
+ RequestBody.fromString("foo"),
)
assertThat(
- s3Client.getObjectTagging {
- it.bucket(bucketName)
- it.key(key)
- }.tagSet()
+ s3Client
+ .getObjectTagging {
+ it.bucket(bucketName)
+ it.key(key)
+ }.tagSet(),
).isEmpty()
}
@@ -64,13 +65,14 @@ internal class ObjectTaggingIT : S3TestBase() {
}
assertThat(
- s3Client.getObjectTagging {
- it.bucket(bucketName)
- it.key(key)
- }.tagSet()
+ s3Client
+ .getObjectTagging {
+ it.bucket(bucketName)
+ it.key(key)
+ }.tagSet(),
).contains(
tag1,
- tag2
+ tag2,
)
}
@@ -91,13 +93,14 @@ internal class ObjectTaggingIT : S3TestBase() {
}
assertThat(
- s3Client.getObjectTagging {
- it.bucket(bucketName)
- it.key(key)
- }.tagSet()
+ s3Client
+ .getObjectTagging {
+ it.bucket(bucketName)
+ it.key(key)
+ }.tagSet(),
).contains(
tag1,
- tag2
+ tag2,
)
s3Client.deleteObjectTagging {
@@ -106,10 +109,11 @@ internal class ObjectTaggingIT : S3TestBase() {
}
assertThat(
- s3Client.getObjectTagging {
- it.bucket(bucketName)
- it.key(key)
- }.tagSet()
+ s3Client
+ .getObjectTagging {
+ it.bucket(bucketName)
+ it.key(key)
+ }.tagSet(),
).isEmpty()
}
@@ -125,14 +129,15 @@ internal class ObjectTaggingIT : S3TestBase() {
it.key(key)
it.tagging("msv=foo")
},
- RequestBody.fromString("foo")
+ RequestBody.fromString("foo"),
)
assertThat(
- s3Client.getObjectTagging {
- it.bucket(bucketName)
- it.key(key)
- }.tagSet()
+ s3Client
+ .getObjectTagging {
+ it.bucket(bucketName)
+ it.key(key)
+ }.tagSet(),
).contains(tag("msv" to "foo"))
}
@@ -150,21 +155,30 @@ internal class ObjectTaggingIT : S3TestBase() {
it.key(key)
it.tagging(Tagging.builder().tagSet(tag1, tag2).build())
},
- RequestBody.fromString("foo")
+ RequestBody.fromString("foo"),
)
assertThat(
- s3Client.getObjectTagging {
- it.bucket(bucketName)
- it.key(key)
- }.tagSet()
+ s3Client
+ .getObjectTagging {
+ it.bucket(bucketName)
+ it.key(key)
+ }.tagSet(),
).contains(
tag1,
- tag2
+ tag2,
)
}
- private fun tag(key: String, value: String): Tag = Tag.builder().key(key).value(value).build()
+ private fun tag(
+ key: String,
+ value: String,
+ ): Tag =
+ Tag
+ .builder()
+ .key(key)
+ .value(value)
+ .build()
private fun tag(pair: Pair): Tag = tag(pair.first, pair.second)
}
diff --git a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/PlainHttpIT.kt b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/PlainHttpIT.kt
index 4d1d86ba8..c0e4f805d 100644
--- a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/PlainHttpIT.kt
+++ b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/PlainHttpIT.kt
@@ -48,15 +48,18 @@ internal class PlainHttpIT : S3TestBase() {
private val s3Client: S3Client = createS3Client()
@Test
- @S3VerifiedFailure(year = 2022,
- reason = "No credentials sent in plain HTTP request")
+ @S3VerifiedFailure(
+ year = 2022,
+ reason = "No credentials sent in plain HTTP request",
+ )
fun putObjectReturns200(testInfo: TestInfo) {
val targetBucket = givenBucket(testInfo)
val byteArray = UUID.randomUUID().toString().toByteArray()
- val putObject = HttpPut("$serviceEndpoint/$targetBucket/testObjectName").apply {
- entity = ByteArrayEntity(byteArray)
- addHeader(HttpHeaders.CONTENT_TYPE, MediaType.APPLICATION_OCTET_STREAM_VALUE)
- }
+ val putObject =
+ HttpPut("$serviceEndpoint/$targetBucket/testObjectName").apply {
+ entity = ByteArrayEntity(byteArray)
+ addHeader(HttpHeaders.CONTENT_TYPE, MediaType.APPLICATION_OCTET_STREAM_VALUE)
+ }
httpClient.execute(putObject).use {
assertThat(it.statusLine.statusCode).isEqualTo(HttpStatus.SC_OK)
@@ -64,14 +67,17 @@ internal class PlainHttpIT : S3TestBase() {
}
@Test
- @S3VerifiedFailure(year = 2022,
- reason = "No credentials sent in plain HTTP request")
+ @S3VerifiedFailure(
+ year = 2022,
+ reason = "No credentials sent in plain HTTP request",
+ )
fun testGetObject_withAcceptHeader(testInfo: TestInfo) {
val (targetBucket, _) = givenBucketAndObject(testInfo, UPLOAD_FILE_NAME)
- val getObject = HttpGet("$serviceEndpoint/$targetBucket/$UPLOAD_FILE_NAME").apply {
- addHeader(HttpHeaders.ACCEPT, MediaType.TEXT_PLAIN_VALUE)
- }
+ val getObject =
+ HttpGet("$serviceEndpoint/$targetBucket/$UPLOAD_FILE_NAME").apply {
+ addHeader(HttpHeaders.ACCEPT, MediaType.TEXT_PLAIN_VALUE)
+ }
httpClient.execute(getObject).use {
assertThat(it.statusLine.statusCode).isEqualTo(HttpStatus.SC_OK)
@@ -79,38 +85,46 @@ internal class PlainHttpIT : S3TestBase() {
}
@Test
- @S3VerifiedFailure(year = 2023,
- reason = "No credentials sent in plain HTTP request")
+ @S3VerifiedFailure(
+ year = 2023,
+ reason = "No credentials sent in plain HTTP request",
+ )
fun putHeadObject_withUserMetadata(testInfo: TestInfo) {
val targetBucket = givenBucket(testInfo)
val byteArray = UUID.randomUUID().toString().toByteArray()
val amzMetaHeaderKey = "x-amz-meta-my-key"
val amzMetaHeaderValue = "MY_DATA"
- val putObject = HttpPut("$serviceEndpoint/$targetBucket/testObjectName").apply {
- entity = ByteArrayEntity(byteArray)
- addHeader(HttpHeaders.CONTENT_TYPE, MediaType.APPLICATION_OCTET_STREAM_VALUE)
- addHeader(amzMetaHeaderKey, amzMetaHeaderValue)
- }
+ val putObject =
+ HttpPut("$serviceEndpoint/$targetBucket/testObjectName").apply {
+ entity = ByteArrayEntity(byteArray)
+ addHeader(HttpHeaders.CONTENT_TYPE, MediaType.APPLICATION_OCTET_STREAM_VALUE)
+ addHeader(amzMetaHeaderKey, amzMetaHeaderValue)
+ }
httpClient.execute(putObject).use {
assertThat(it.statusLine.statusCode).isEqualTo(HttpStatus.SC_OK)
}
val headObject = HttpHead("/$targetBucket/testObjectName")
- httpClient.execute(
- HttpHost(
- host, httpPort
- ), headObject
- ).use {
- assertThat(it.statusLine.statusCode).isEqualTo(HttpStatus.SC_OK)
- assertThat(it.getFirstHeader(amzMetaHeaderKey).name).isEqualTo(amzMetaHeaderKey)
- assertThat(it.getFirstHeader(amzMetaHeaderKey).value).isEqualTo(amzMetaHeaderValue)
- }
+ httpClient
+ .execute(
+ HttpHost(
+ host,
+ httpPort,
+ ),
+ headObject,
+ ).use {
+ assertThat(it.statusLine.statusCode).isEqualTo(HttpStatus.SC_OK)
+ assertThat(it.getFirstHeader(amzMetaHeaderKey).name).isEqualTo(amzMetaHeaderKey)
+ assertThat(it.getFirstHeader(amzMetaHeaderKey).value).isEqualTo(amzMetaHeaderValue)
+ }
}
@Test
- @S3VerifiedFailure(year = 2025,
- reason = "No credentials sent in plain HTTP request")
+ @S3VerifiedFailure(
+ year = 2025,
+ reason = "No credentials sent in plain HTTP request",
+ )
fun createBucketWithDisallowedName() {
HttpPut("$serviceEndpoint/$INVALID_BUCKET_NAME").also {
httpClient.execute(it).use { response ->
@@ -118,33 +132,39 @@ internal class PlainHttpIT : S3TestBase() {
assertThat(
InputStreamReader(response.entity.content)
.readLines()
- .joinToString(separator = ""))
- .isEqualTo("InvalidBucketName" +
- "The specified bucket is not valid.")
+ .joinToString(separator = ""),
+ ).isEqualTo(
+ "InvalidBucketName" +
+ "The specified bucket is not valid.",
+ )
}
}
-
}
@Test
- @S3VerifiedFailure(year = 2022,
- reason = "No credentials sent in plain HTTP request")
+ @S3VerifiedFailure(
+ year = 2022,
+ reason = "No credentials sent in plain HTTP request",
+ )
fun putObjectEncryptedWithAbsentKeyRef(testInfo: TestInfo) {
val targetBucket = givenBucket(testInfo)
- HttpPut("$serviceEndpoint/$targetBucket/testObjectName").apply {
- addHeader("x-amz-server-side-encryption", "aws:kms")
- entity = ByteArrayEntity(UUID.randomUUID().toString().toByteArray())
- }.also {
- httpClient.execute(it).use { response ->
- assertThat(response.statusLine.statusCode).isEqualTo(HttpStatus.SC_OK)
+ HttpPut("$serviceEndpoint/$targetBucket/testObjectName")
+ .apply {
+ addHeader("x-amz-server-side-encryption", "aws:kms")
+ entity = ByteArrayEntity(UUID.randomUUID().toString().toByteArray())
+ }.also {
+ httpClient.execute(it).use { response ->
+ assertThat(response.statusLine.statusCode).isEqualTo(HttpStatus.SC_OK)
+ }
}
- }
}
@Test
- @S3VerifiedFailure(year = 2022,
- reason = "No credentials sent in plain HTTP request")
+ @S3VerifiedFailure(
+ year = 2022,
+ reason = "No credentials sent in plain HTTP request",
+ )
fun listWithPrefixAndMissingSlash(testInfo: TestInfo) {
val (targetBucket, _) = givenBucketAndObject(testInfo, UPLOAD_FILE_NAME)
@@ -153,7 +173,6 @@ internal class PlainHttpIT : S3TestBase() {
assertThat(response.statusLine.statusCode).isEqualTo(HttpStatus.SC_OK)
}
}
-
}
@Test
@@ -167,17 +186,20 @@ internal class PlainHttpIT : S3TestBase() {
}
@Test
- @S3VerifiedFailure(year = 2022,
- reason = "No credentials sent in plain HTTP request")
+ @S3VerifiedFailure(
+ year = 2022,
+ reason = "No credentials sent in plain HTTP request",
+ )
fun testCorsHeaders_GET_PUT_HEAD(testInfo: TestInfo) {
val targetBucket = givenBucket(testInfo)
arrayOf("GET", "PUT", "HEAD").forEach { method ->
- val httpOptions = HttpOptions("$serviceEndpoint/$targetBucket").apply {
- setHeader("Origin", "http://someurl.com")
- setHeader("Access-Control-Request-Method", method)
- setHeader("Access-Control-Request-Headers", "Content-Type, x-requested-with")
- }
+ val httpOptions =
+ HttpOptions("$serviceEndpoint/$targetBucket").apply {
+ setHeader("Origin", "http://someurl.com")
+ setHeader("Access-Control-Request-Method", method)
+ setHeader("Access-Control-Request-Headers", "Content-Type, x-requested-with")
+ }
httpClient.execute(httpOptions).use {
assertThat(it.statusLine.statusCode).isEqualTo(HttpStatus.SC_OK)
@@ -192,35 +214,40 @@ internal class PlainHttpIT : S3TestBase() {
}
@Test
- @S3VerifiedFailure(year = 2022,
- reason = "No credentials sent in plain HTTP request")
+ @S3VerifiedFailure(
+ year = 2022,
+ reason = "No credentials sent in plain HTTP request",
+ )
fun testCorsHeaders_POST(testInfo: TestInfo) {
val targetBucket = givenBucket(testInfo)
arrayOf("POST").forEach { method ->
- val httpOptions = HttpOptions("$serviceEndpoint/$targetBucket?delete").apply {
- setHeader("Origin", "http://someurl.com")
- setHeader("Access-Control-Request-Method", method)
- setHeader("Access-Control-Request-Headers", "Content-Type, x-requested-with")
- }
+ val httpOptions =
+ HttpOptions("$serviceEndpoint/$targetBucket?delete").apply {
+ setHeader("Origin", "http://someurl.com")
+ setHeader("Access-Control-Request-Method", method)
+ setHeader("Access-Control-Request-Headers", "Content-Type, x-requested-with")
+ }
httpClient.execute(httpOptions).use {
assertThat(it.statusLine.statusCode).isEqualTo(HttpStatus.SC_OK)
- //for POST requests, Access-Control-Allow-Origin is always returned as "*"
+ // for POST requests, Access-Control-Allow-Origin is always returned as "*"
assertThat(it.getFirstHeader("Access-Control-Allow-Origin").value).isEqualTo("*")
assertThat(it.getFirstHeader("Access-Control-Allow-Methods").value).isEqualTo(method)
assertThat(it.getFirstHeader("Access-Control-Allow-Headers").value)
.isEqualTo("Content-Type, x-requested-with")
- //for POST requests, Access-Control-Allow-Credentials is never returned.
- //assertThat(it.getFirstHeader("Access-Control-Allow-Credentials").value).isEqualTo("true")
+ // for POST requests, Access-Control-Allow-Credentials is never returned.
+ // assertThat(it.getFirstHeader("Access-Control-Allow-Credentials").value).isEqualTo("true")
assertThat(it.getFirstHeader("Allow").value).contains(method)
}
}
}
@Test
- @S3VerifiedFailure(year = 2025,
- reason = "No credentials sent in plain HTTP request")
+ @S3VerifiedFailure(
+ year = 2025,
+ reason = "No credentials sent in plain HTTP request",
+ )
fun listBucketsUsesApplicationXmlContentType(testInfo: TestInfo) {
givenBucket(testInfo)
HttpGet("$serviceEndpoint$SLASH").also {
@@ -229,74 +256,87 @@ internal class PlainHttpIT : S3TestBase() {
}
@Test
- @S3VerifiedFailure(year = 2022,
- reason = "No credentials sent in plain HTTP request")
+ @S3VerifiedFailure(
+ year = 2022,
+ reason = "No credentials sent in plain HTTP request",
+ )
fun batchDeleteUsesApplicationXmlContentType(testInfo: TestInfo) {
val targetBucket = givenBucket(testInfo)
- HttpPost("$serviceEndpoint/$targetBucket?delete").apply {
- entity = StringEntity(
- """
+ HttpPost("$serviceEndpoint/$targetBucket?delete")
+ .apply {
+ entity =
+ StringEntity(
+ """
""",
- ContentType.APPLICATION_XML
- )
- }.also {
- assertApplicationXmlContentType(it)
- }
+ ContentType.APPLICATION_XML,
+ )
+ }.also {
+ assertApplicationXmlContentType(it)
+ }
}
@Test
- @S3VerifiedFailure(year = 2022,
- reason = "No credentials sent in plain HTTP request")
+ @S3VerifiedFailure(
+ year = 2022,
+ reason = "No credentials sent in plain HTTP request",
+ )
fun completeMultipartUsesApplicationXmlContentType(testInfo: TestInfo) {
val targetBucket = givenBucket(testInfo)
- val initiateMultipartUploadResult = s3Client
- .createMultipartUpload {
- it.bucket(targetBucket)
- it.key(UPLOAD_FILE_NAME)
- }
+ val initiateMultipartUploadResult =
+ s3Client
+ .createMultipartUpload {
+ it.bucket(targetBucket)
+ it.key(UPLOAD_FILE_NAME)
+ }
val uploadId = initiateMultipartUploadResult.uploadId()
- val uploadPartResult = s3Client.uploadPart(
- {
- it.bucket(initiateMultipartUploadResult.bucket())
- it.key(initiateMultipartUploadResult.key())
- it.uploadId(uploadId)
- it.partNumber(1)
- it.contentLength(UPLOAD_FILE_LENGTH)
- },
- RequestBody.fromFile(UPLOAD_FILE)
- )
-
+ val uploadPartResult =
+ s3Client.uploadPart(
+ {
+ it.bucket(initiateMultipartUploadResult.bucket())
+ it.key(initiateMultipartUploadResult.key())
+ it.uploadId(uploadId)
+ it.partNumber(1)
+ it.contentLength(UPLOAD_FILE_LENGTH)
+ },
+ RequestBody.fromFile(UPLOAD_FILE),
+ )
- HttpPost("$serviceEndpoint/$targetBucket/$UPLOAD_FILE_NAME?uploadId=$uploadId").apply {
- entity = StringEntity(
- """
+ HttpPost("$serviceEndpoint/$targetBucket/$UPLOAD_FILE_NAME?uploadId=$uploadId")
+ .apply {
+ entity =
+ StringEntity(
+ """
${uploadPartResult.eTag()}
1
""",
- ContentType.APPLICATION_XML
- )
- }.also {
- assertApplicationXmlContentType(it)
- }
+ ContentType.APPLICATION_XML,
+ )
+ }.also {
+ assertApplicationXmlContentType(it)
+ }
}
@Test
- @S3VerifiedFailure(year = 2022,
- reason = "No credentials sent in plain HTTP request")
+ @S3VerifiedFailure(
+ year = 2022,
+ reason = "No credentials sent in plain HTTP request",
+ )
fun putObjectWithSpecialCharactersInTheName(testInfo: TestInfo) {
- val fileNameWithSpecialCharacters = ("file=name\$Dollar;Semicolon"
- + "&Ampersand@At:Colon Space,Comma?Question-mark")
+ val fileNameWithSpecialCharacters = (
+ "file=name\$Dollar;Semicolon" +
+ "&Ampersand@At:Colon Space,Comma?Question-mark"
+ )
val targetBucket = givenBucket(testInfo)
HttpPut(
- "$serviceEndpoint/$targetBucket/${SdkHttpUtils.urlEncodeIgnoreSlashes(fileNameWithSpecialCharacters)}"
+ "$serviceEndpoint/$targetBucket/${SdkHttpUtils.urlEncodeIgnoreSlashes(fileNameWithSpecialCharacters)}",
).apply {
entity = ByteArrayEntity(UUID.randomUUID().toString().toByteArray())
}.also {
@@ -306,16 +346,20 @@ internal class PlainHttpIT : S3TestBase() {
}
assertThat(
- s3Client.listObjects {
- it.bucket(targetBucket)
- it.prefix(fileNameWithSpecialCharacters)
- }.contents()[0].key()
+ s3Client
+ .listObjects {
+ it.bucket(targetBucket)
+ it.prefix(fileNameWithSpecialCharacters)
+ }.contents()[0]
+ .key(),
).isEqualTo(fileNameWithSpecialCharacters)
}
@Test
- @S3VerifiedFailure(year = 2022,
- reason = "No credentials sent in plain HTTP request")
+ @S3VerifiedFailure(
+ year = 2022,
+ reason = "No credentials sent in plain HTTP request",
+ )
fun deleteNonExistingObjectReturns204(testInfo: TestInfo) {
val targetBucket = givenBucket(testInfo)
@@ -324,46 +368,51 @@ internal class PlainHttpIT : S3TestBase() {
assertThat(response.statusLine.statusCode).isEqualTo(HttpStatus.SC_NO_CONTENT)
}
}
-
}
@Test
- @S3VerifiedFailure(year = 2022,
- reason = "No credentials sent in plain HTTP request")
+ @S3VerifiedFailure(
+ year = 2022,
+ reason = "No credentials sent in plain HTTP request",
+ )
fun batchDeleteObjects(testInfo: TestInfo) {
val targetBucket = givenBucket(testInfo)
- HttpPost("$serviceEndpoint/$targetBucket?delete").apply {
- entity = StringEntity(
- """
+ HttpPost("$serviceEndpoint/$targetBucket?delete")
+ .apply {
+ entity =
+ StringEntity(
+ """
""",
- ContentType.APPLICATION_XML
- )
- }.also {
- httpClient.execute(it).use { response ->
- assertThat(response.statusLine.statusCode).isEqualTo(HttpStatus.SC_OK)
+ ContentType.APPLICATION_XML,
+ )
+ }.also {
+ httpClient.execute(it).use { response ->
+ assertThat(response.statusLine.statusCode).isEqualTo(HttpStatus.SC_OK)
+ }
}
- }
-
}
@Test
- @S3VerifiedFailure(year = 2022,
- reason = "No credentials sent in plain HTTP request")
+ @S3VerifiedFailure(
+ year = 2022,
+ reason = "No credentials sent in plain HTTP request",
+ )
fun headObjectWithUnknownContentType(testInfo: TestInfo) {
val targetBucket = givenBucket(testInfo)
val contentAsBytes = ByteArray(0)
val blankContentTypeFilename = UUID.randomUUID().toString()
- s3Client.putObject({
+ s3Client.putObject(
+ {
it.bucket(targetBucket)
it.key(blankContentTypeFilename)
it.contentType(UUID.randomUUID().toString())
it.contentLength(contentAsBytes.size.toLong())
},
- RequestBody.fromBytes(contentAsBytes)
+ RequestBody.fromBytes(contentAsBytes),
)
HttpHead("$serviceEndpoint/$targetBucket/$blankContentTypeFilename").also {
diff --git a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/PresignedUrlIT.kt b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/PresignedUrlIT.kt
index 7a25f7b80..7f3655ca1 100644
--- a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/PresignedUrlIT.kt
+++ b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/PresignedUrlIT.kt
@@ -16,6 +16,7 @@
package com.adobe.testing.s3mock.its
import com.adobe.testing.s3mock.dto.InitiateMultipartUploadResult
+import com.adobe.testing.s3mock.util.AwsHttpHeaders.X_AMZ_STORAGE_CLASS
import com.adobe.testing.s3mock.util.DigestUtil
import org.apache.http.HttpHeaders
import org.apache.http.HttpHeaders.CONTENT_TYPE
@@ -36,6 +37,7 @@ import org.junit.jupiter.api.TestInfo
import software.amazon.awssdk.core.sync.RequestBody
import software.amazon.awssdk.services.s3.S3Client
import software.amazon.awssdk.services.s3.model.CompletedPart
+import software.amazon.awssdk.services.s3.model.StorageClass
import software.amazon.awssdk.services.s3.presigner.S3Presigner
import tel.schich.awss3postobjectpresigner.S3PostObjectPresigner
import tel.schich.awss3postobjectpresigner.S3PostObjectRequest
@@ -49,57 +51,69 @@ internal class PresignedUrlIT : S3TestBase() {
private val s3PostObjectPresigner: S3PostObjectPresigner = createS3PostObjectPresigner()
@Test
- @S3VerifiedFailure(year = 2025,
- reason = "S3PostObjectPresigner does not create working presigned URLs.")
+ @S3VerifiedFailure(
+ year = 2025,
+ reason = "S3PostObjectPresigner does not create working presigned URLs.",
+ )
fun testPresignedUrl_postObject_largeFile(testInfo: TestInfo) {
val key = randomName
val bucketName = givenBucket(testInfo)
- val presignedUrlString = s3PostObjectPresigner.presignPost(
- S3PostObjectRequest
- .builder()
- .bucket(bucketName)
- .expiration(Duration.ofMinutes(1L))
- .build()
- ).uri().toString()
+ val presignedUrlString =
+ s3PostObjectPresigner
+ .presignPost(
+ S3PostObjectRequest
+ .builder()
+ .bucket(bucketName)
+ .expiration(Duration.ofMinutes(1L))
+ .build(),
+ ).uri()
+ .toString()
assertThat(presignedUrlString).isNotBlank()
val randomMBytes = randomMBytes(20)
val expectedEtag = randomMBytes.inputStream().use { "\"${DigestUtil.hexDigest(it)}\"" }
- HttpPost(presignedUrlString).apply {
- this.entity = MultipartEntityBuilder.create()
- .addTextBody("key", key)
- .addTextBody(CONTENT_TYPE, "application/octet-stream")
- //.addTextBody(X_AMZ_STORAGE_CLASS, "INTELLIGENT_TIERING")
- .addTextBody("tagging", "Tag NameTag Value")
- .addBinaryBody("file", randomMBytes.inputStream(), ContentType.APPLICATION_OCTET_STREAM, key)
- .build()
+ HttpPost(presignedUrlString)
+ .apply {
+ this.entity =
+ MultipartEntityBuilder
+ .create()
+ .addTextBody("key", key)
+ .addTextBody(CONTENT_TYPE, "application/octet-stream")
+ .addTextBody(X_AMZ_STORAGE_CLASS, "INTELLIGENT_TIERING")
+ .addTextBody("tagging", "Tag NameTag Value")
+ .addBinaryBody("file", randomMBytes.inputStream(), ContentType.APPLICATION_OCTET_STREAM, key)
+ .build()
}.also { post ->
- httpClient.execute(
- post
- ).use {
- assertThat(it.statusLine.statusCode).isEqualTo(HttpStatus.SC_OK)
- val actualEtag = it.getFirstHeader(HttpHeaders.ETAG).value
- assertThat(actualEtag).isEqualTo(expectedEtag)
+ httpClient
+ .execute(
+ post,
+ ).use {
+ assertThat(it.statusLine.statusCode).isEqualTo(HttpStatus.SC_OK)
+ val actualEtag = it.getFirstHeader(HttpHeaders.ETAG).value
+ assertThat(actualEtag).isEqualTo(expectedEtag)
+ }
}
- }
- s3Client.getObject {
- it.bucket(bucketName)
- it.key(key)
- }.use {
- val actualEtag = "\"${DigestUtil.hexDigest(it)}\""
- assertThat(actualEtag).isEqualTo(expectedEtag)
- }
- s3Client.getObjectTagging {
- it.bucket(bucketName)
- it.key(key)
- }.also {
- assertThat(it.tagSet()).hasSize(1)
- assertThat(it.tagSet()[0].key()).isEqualTo("Tag Name")
- assertThat(it.tagSet()[0].value()).isEqualTo("Tag Value")
- }
+ s3Client
+ .getObject {
+ it.bucket(bucketName)
+ it.key(key)
+ }.use {
+ val actualEtag = "\"${DigestUtil.hexDigest(it)}\""
+ assertThat(actualEtag).isEqualTo(expectedEtag)
+ assertThat(it.response().storageClass()).isEqualTo(StorageClass.INTELLIGENT_TIERING)
+ }
+ s3Client
+ .getObjectTagging {
+ it.bucket(bucketName)
+ it.key(key)
+ }.also {
+ assertThat(it.tagSet()).hasSize(1)
+ assertThat(it.tagSet()[0].key()).isEqualTo("Tag Name")
+ assertThat(it.tagSet()[0].value()).isEqualTo("Tag Value")
+ }
}
@Test
@@ -108,27 +122,32 @@ internal class PresignedUrlIT : S3TestBase() {
val key = UPLOAD_FILE_NAME
val (bucketName, _) = givenBucketAndObject(testInfo, key)
- val presignedUrlString = s3Presigner.presignGetObject {
- it.getObjectRequest {
- it.bucket(bucketName)
- it.key(key)
- }
- it.signatureDuration(Duration.ofMinutes(1L))
- }.url().toString()
+ val presignedUrlString =
+ s3Presigner
+ .presignGetObject {
+ it.getObjectRequest {
+ it.bucket(bucketName)
+ it.key(key)
+ }
+ it.signatureDuration(Duration.ofMinutes(1L))
+ }.url()
+ .toString()
assertThat(presignedUrlString).isNotBlank()
- val expectedEtag = UPLOAD_FILE.inputStream().use {
- "\"${DigestUtil.hexDigest(it)}\""
- }
- HttpGet(presignedUrlString).also { get ->
- httpClient.execute(
- get
- ).use {
- assertThat(it.statusLine.statusCode).isEqualTo(HttpStatus.SC_OK)
- val actualEtag = "\"${DigestUtil.hexDigest(it.entity.content)}\""
- assertThat(actualEtag).isEqualTo(expectedEtag)
+ val expectedEtag =
+ UPLOAD_FILE.inputStream().use {
+ "\"${DigestUtil.hexDigest(it)}\""
}
+ HttpGet(presignedUrlString).also { get ->
+ httpClient
+ .execute(
+ get,
+ ).use {
+ assertThat(it.statusLine.statusCode).isEqualTo(HttpStatus.SC_OK)
+ val actualEtag = "\"${DigestUtil.hexDigest(it.entity.content)}\""
+ assertThat(actualEtag).isEqualTo(expectedEtag)
+ }
}
}
@@ -140,39 +159,44 @@ internal class PresignedUrlIT : S3TestBase() {
val responseExpires = Instant.now()
- val presignedUrlString = s3Presigner.presignGetObject {
- it.getObjectRequest {
- it.bucket(bucketName)
- it.key(key)
- it.responseExpires(responseExpires)
- it.responseCacheControl("no-cache")
- it.responseContentDisposition("attachment; filename=\"$key\"")
- it.responseContentEncoding("encoding")
- it.responseContentType("application/json")
- it.responseContentLanguage("en")
- }
- it.signatureDuration(Duration.ofMinutes(1L))
- }.url().toString()
+ val presignedUrlString =
+ s3Presigner
+ .presignGetObject {
+ it.getObjectRequest {
+ it.bucket(bucketName)
+ it.key(key)
+ it.responseExpires(responseExpires)
+ it.responseCacheControl("no-cache")
+ it.responseContentDisposition("attachment; filename=\"$key\"")
+ it.responseContentEncoding("encoding")
+ it.responseContentType("application/json")
+ it.responseContentLanguage("en")
+ }
+ it.signatureDuration(Duration.ofMinutes(1L))
+ }.url()
+ .toString()
assertThat(presignedUrlString).isNotBlank()
- val expectedEtag = UPLOAD_FILE.inputStream().use {
- "\"${DigestUtil.hexDigest(it)}\""
- }
- HttpGet(presignedUrlString).also { get ->
- httpClient.execute(
- get
- ).use {
- assertThat(it.statusLine.statusCode).isEqualTo(HttpStatus.SC_OK)
- val actualEtag = "\"${DigestUtil.hexDigest(it.entity.content)}\""
- assertThat(actualEtag).isEqualTo(expectedEtag)
- //TODO: S3 SDK serializes date as 'Sun, 20 Apr 2025 22:07:04 GMT'
- //assertThat(it.getFirstHeader(HttpHeaders.EXPIRES).value).isEqualTo(responseExpires)
- assertThat(it.getFirstHeader(HttpHeaders.CACHE_CONTROL).value).isEqualTo("no-cache")
- assertThat(it.getFirstHeader("Content-Disposition").value).isEqualTo("attachment; filename=\"$key\"")
- assertThat(it.getFirstHeader(HttpHeaders.CONTENT_ENCODING).value).isEqualTo("encoding")
- assertThat(it.getFirstHeader(CONTENT_TYPE).value).isEqualTo("application/json")
- assertThat(it.getFirstHeader(HttpHeaders.CONTENT_LANGUAGE).value).isEqualTo("en")
+ val expectedEtag =
+ UPLOAD_FILE.inputStream().use {
+ "\"${DigestUtil.hexDigest(it)}\""
}
+ HttpGet(presignedUrlString).also { get ->
+ httpClient
+ .execute(
+ get,
+ ).use {
+ assertThat(it.statusLine.statusCode).isEqualTo(HttpStatus.SC_OK)
+ val actualEtag = "\"${DigestUtil.hexDigest(it.entity.content)}\""
+ assertThat(actualEtag).isEqualTo(expectedEtag)
+ // TODO: S3 SDK serializes date as 'Sun, 20 Apr 2025 22:07:04 GMT'
+ // assertThat(it.getFirstHeader(HttpHeaders.EXPIRES).value).isEqualTo(responseExpires)
+ assertThat(it.getFirstHeader(HttpHeaders.CACHE_CONTROL).value).isEqualTo("no-cache")
+ assertThat(it.getFirstHeader("Content-Disposition").value).isEqualTo("attachment; filename=\"$key\"")
+ assertThat(it.getFirstHeader(HttpHeaders.CONTENT_ENCODING).value).isEqualTo("encoding")
+ assertThat(it.getFirstHeader(CONTENT_TYPE).value).isEqualTo("application/json")
+ assertThat(it.getFirstHeader(HttpHeaders.CONTENT_LANGUAGE).value).isEqualTo("en")
+ }
}
}
@@ -182,25 +206,29 @@ internal class PresignedUrlIT : S3TestBase() {
val key = UPLOAD_FILE_NAME
val (bucketName, _) = givenBucketAndObject(testInfo, key)
- val presignedUrlString = s3Presigner.presignGetObject {
- it.getObjectRequest{
- it.bucket(bucketName)
- it.key(key)
- }
- it.signatureDuration(Duration.ofMinutes(1L))
- }.url().toString()
+ val presignedUrlString =
+ s3Presigner
+ .presignGetObject {
+ it.getObjectRequest {
+ it.bucket(bucketName)
+ it.key(key)
+ }
+ it.signatureDuration(Duration.ofMinutes(1L))
+ }.url()
+ .toString()
assertThat(presignedUrlString).isNotBlank()
HttpGet(presignedUrlString).also { get ->
get.setHeader(HttpHeaders.RANGE, "bytes=0-100")
- httpClient.execute(
- get
- ).use {
- assertThat(it.statusLine.statusCode).isEqualTo(HttpStatus.SC_PARTIAL_CONTENT)
- assertThat(it.getFirstHeader(HttpHeaders.CONTENT_LENGTH).value).isEqualTo("101")
- assertThat(it.getFirstHeader(HttpHeaders.CONTENT_RANGE).value).isEqualTo("bytes 0-100/63839")
- }
+ httpClient
+ .execute(
+ get,
+ ).use {
+ assertThat(it.statusLine.statusCode).isEqualTo(HttpStatus.SC_PARTIAL_CONTENT)
+ assertThat(it.getFirstHeader(HttpHeaders.CONTENT_LENGTH).value).isEqualTo("101")
+ assertThat(it.getFirstHeader(HttpHeaders.CONTENT_RANGE).value).isEqualTo("bytes 0-100/63839")
+ }
}
}
@@ -210,35 +238,42 @@ internal class PresignedUrlIT : S3TestBase() {
val key = UPLOAD_FILE_NAME
val bucketName = givenBucket(testInfo)
- val presignedUrlString = s3Presigner.presignPutObject {
- it.putObjectRequest {
- it.bucket(bucketName)
- it.key(key)
- }
- it.signatureDuration(Duration.ofMinutes(1L))
- }.url().toString()
+ val presignedUrlString =
+ s3Presigner
+ .presignPutObject {
+ it.putObjectRequest {
+ it.bucket(bucketName)
+ it.key(key)
+ }
+ it.signatureDuration(Duration.ofMinutes(1L))
+ }.url()
+ .toString()
assertThat(presignedUrlString).isNotBlank()
- HttpPut(presignedUrlString).apply {
- entity = FileEntity(UPLOAD_FILE)
- }.also { put ->
- httpClient.execute(
- put
- ).use {
- assertThat(it.statusLine.statusCode).isEqualTo(HttpStatus.SC_OK)
+ HttpPut(presignedUrlString)
+ .apply {
+ entity = FileEntity(UPLOAD_FILE)
+ }.also { put ->
+ httpClient
+ .execute(
+ put,
+ ).use {
+ assertThat(it.statusLine.statusCode).isEqualTo(HttpStatus.SC_OK)
+ }
+ }
+ val expectedEtag =
+ UPLOAD_FILE.inputStream().use {
+ "\"${DigestUtil.hexDigest(it)}\""
+ }
+ s3Client
+ .getObject {
+ it.bucket(bucketName)
+ it.key(key)
+ }.use {
+ val actualEtag = "\"${DigestUtil.hexDigest(it)}\""
+ assertThat(actualEtag).isEqualTo(expectedEtag)
}
- }
- val expectedEtag = UPLOAD_FILE.inputStream().use {
- "\"${DigestUtil.hexDigest(it)}\""
- }
- s3Client.getObject {
- it.bucket(bucketName)
- it.key(key)
- }.use {
- val actualEtag = "\"${DigestUtil.hexDigest(it)}\""
- assertThat(actualEtag).isEqualTo(expectedEtag)
- }
}
@Test
@@ -247,34 +282,40 @@ internal class PresignedUrlIT : S3TestBase() {
val key = randomName
val bucketName = givenBucket(testInfo)
- val presignedUrlString = s3Presigner.presignPutObject {
- it.putObjectRequest {
- it.bucket(bucketName)
- it.key(key)
- }
- it.signatureDuration(Duration.ofMinutes(1L))
- }.url().toString()
+ val presignedUrlString =
+ s3Presigner
+ .presignPutObject {
+ it.putObjectRequest {
+ it.bucket(bucketName)
+ it.key(key)
+ }
+ it.signatureDuration(Duration.ofMinutes(1L))
+ }.url()
+ .toString()
assertThat(presignedUrlString).isNotBlank()
val randomMBytes = randomMBytes(20)
- HttpPut(presignedUrlString).apply {
- this.entity = ByteArrayEntity(randomMBytes)
- }.also { put ->
- httpClient.execute(
- put
- ).use {
- assertThat(it.statusLine.statusCode).isEqualTo(HttpStatus.SC_OK)
+ HttpPut(presignedUrlString)
+ .apply {
+ this.entity = ByteArrayEntity(randomMBytes)
+ }.also { put ->
+ httpClient
+ .execute(
+ put,
+ ).use {
+ assertThat(it.statusLine.statusCode).isEqualTo(HttpStatus.SC_OK)
+ }
}
- }
val expectedEtag = randomMBytes.inputStream().use { "\"${DigestUtil.hexDigest(it)}\"" }
- s3Client.getObject {
- it.bucket(bucketName)
- it.key(key)
- }.use {
- val actualEtag = "\"${DigestUtil.hexDigest(it)}\""
- assertThat(actualEtag).isEqualTo(expectedEtag)
- }
+ s3Client
+ .getObject {
+ it.bucket(bucketName)
+ it.key(key)
+ }.use {
+ val actualEtag = "\"${DigestUtil.hexDigest(it)}\""
+ assertThat(actualEtag).isEqualTo(expectedEtag)
+ }
}
@Test
@@ -283,34 +324,40 @@ internal class PresignedUrlIT : S3TestBase() {
val key = UPLOAD_FILE_NAME
val bucketName = givenBucket(testInfo)
- val presignedUrlString = s3Presigner.presignCreateMultipartUpload {
- it.createMultipartUploadRequest{
- it.bucket(bucketName)
- it.key(key)
- }
- it.signatureDuration(Duration.ofMinutes(1L))
- }.url().toString()
+ val presignedUrlString =
+ s3Presigner
+ .presignCreateMultipartUpload {
+ it.createMultipartUploadRequest {
+ it.bucket(bucketName)
+ it.key(key)
+ }
+ it.signatureDuration(Duration.ofMinutes(1L))
+ }.url()
+ .toString()
assertThat(presignedUrlString).isNotBlank()
- val uploadId = HttpPost(presignedUrlString)
- .let { post ->
- httpClient.execute(
- post
- ).use {
- assertThat(it.statusLine.statusCode).isEqualTo(HttpStatus.SC_OK)
- val result = MAPPER.readValue(it.entity.content, InitiateMultipartUploadResult::class.java)
- assertThat(result).isNotNull
- result
- }.uploadId
- }
+ val uploadId =
+ HttpPost(presignedUrlString)
+ .let { post ->
+ httpClient
+ .execute(
+ post,
+ ).use {
+ assertThat(it.statusLine.statusCode).isEqualTo(HttpStatus.SC_OK)
+ val result = MAPPER.readValue(it.entity.content, InitiateMultipartUploadResult::class.java)
+ assertThat(result).isNotNull
+ result
+ }.uploadId
+ }
- s3Client.listMultipartUploads {
- it.bucket(bucketName)
- }.also {
- assertThat(it.uploads()).hasSize(1)
- assertThat(it.uploads()[0].uploadId()).isEqualTo(uploadId)
- }
+ s3Client
+ .listMultipartUploads {
+ it.bucket(bucketName)
+ }.also {
+ assertThat(it.uploads()).hasSize(1)
+ assertThat(it.uploads()[0].uploadId()).isEqualTo(uploadId)
+ }
}
@Test
@@ -319,10 +366,11 @@ internal class PresignedUrlIT : S3TestBase() {
val key = UPLOAD_FILE_NAME
val bucketName = givenBucket(testInfo)
- val createMultipartUpload = s3Client.createMultipartUpload {
- it.bucket(bucketName)
- it.key(key)
- }
+ val createMultipartUpload =
+ s3Client.createMultipartUpload {
+ it.bucket(bucketName)
+ it.key(key)
+ }
val uploadId = createMultipartUpload.uploadId()
s3Client.uploadPart(
@@ -332,34 +380,40 @@ internal class PresignedUrlIT : S3TestBase() {
it.uploadId(uploadId)
it.partNumber(1)
it.contentLength(UPLOAD_FILE_LENGTH)
- }, RequestBody.fromFile(UPLOAD_FILE),
+ },
+ RequestBody.fromFile(UPLOAD_FILE),
)
- val presignedUrlString = s3Presigner.presignAbortMultipartUpload {
- it.abortMultipartUploadRequest{
- it.bucket(bucketName)
- it.key(key)
- it.uploadId(uploadId)
- }
- it.signatureDuration(Duration.ofMinutes(1L))
- }.url().toString()
+ val presignedUrlString =
+ s3Presigner
+ .presignAbortMultipartUpload {
+ it.abortMultipartUploadRequest {
+ it.bucket(bucketName)
+ it.key(key)
+ it.uploadId(uploadId)
+ }
+ it.signatureDuration(Duration.ofMinutes(1L))
+ }.url()
+ .toString()
assertThat(presignedUrlString).isNotBlank()
HttpDelete(presignedUrlString).also { delete ->
- httpClient.execute(
- delete
- ).use {
- assertThat(it.statusLine.statusCode).isEqualTo(HttpStatus.SC_NO_CONTENT)
- }
+ httpClient
+ .execute(
+ delete,
+ ).use {
+ assertThat(it.statusLine.statusCode).isEqualTo(HttpStatus.SC_NO_CONTENT)
+ }
}
- s3Client.listMultipartUploads {
- it.bucket(bucketName)
- it.keyMarker(key)
- }.also {
- assertThat(it.uploads()).isEmpty()
- }
+ s3Client
+ .listMultipartUploads {
+ it.bucket(bucketName)
+ it.keyMarker(key)
+ }.also {
+ assertThat(it.uploads()).isEmpty()
+ }
}
@Test
@@ -368,116 +422,132 @@ internal class PresignedUrlIT : S3TestBase() {
val key = UPLOAD_FILE_NAME
val bucketName = givenBucket(testInfo)
- val createMultipartUpload = s3Client.createMultipartUpload {
- it.bucket(bucketName)
- it.key(key)
- }
-
- val uploadId = createMultipartUpload.uploadId()
- val uploadPartResult = s3Client.uploadPart(
- {
- it.bucket(createMultipartUpload.bucket())
- it.key(createMultipartUpload.key())
- it.uploadId(uploadId)
- it.partNumber(1)
- it.contentLength(UPLOAD_FILE_LENGTH)
- },
- RequestBody.fromFile(UPLOAD_FILE),
- )
-
- val presignedUrlString = s3Presigner.presignCompleteMultipartUpload {
- it.completeMultipartUploadRequest{
+ val createMultipartUpload =
+ s3Client.createMultipartUpload {
it.bucket(bucketName)
it.key(key)
- it.uploadId(uploadId)
}
- it.signatureDuration(Duration.ofMinutes(1L))
- }.url().toString()
+
+ val uploadId = createMultipartUpload.uploadId()
+ val uploadPartResult =
+ s3Client.uploadPart(
+ {
+ it.bucket(createMultipartUpload.bucket())
+ it.key(createMultipartUpload.key())
+ it.uploadId(uploadId)
+ it.partNumber(1)
+ it.contentLength(UPLOAD_FILE_LENGTH)
+ },
+ RequestBody.fromFile(UPLOAD_FILE),
+ )
+
+ val presignedUrlString =
+ s3Presigner
+ .presignCompleteMultipartUpload {
+ it.completeMultipartUploadRequest {
+ it.bucket(bucketName)
+ it.key(key)
+ it.uploadId(uploadId)
+ }
+ it.signatureDuration(Duration.ofMinutes(1L))
+ }.url()
+ .toString()
assertThat(presignedUrlString).isNotBlank()
- HttpPost(presignedUrlString).apply {
- setHeader("Content-Type", "application/xml")
- entity = StringEntity(
- """
+ HttpPost(presignedUrlString)
+ .apply {
+ setHeader("Content-Type", "application/xml")
+ entity =
+ StringEntity(
+ """
${uploadPartResult.eTag()}
1
- """)
- }.also { post ->
- httpClient.execute(
- post
- ).use {
- assertThat(it.statusLine.statusCode).isEqualTo(HttpStatus.SC_OK)
+ """,
+ )
+ }.also { post ->
+ httpClient
+ .execute(
+ post,
+ ).use {
+ assertThat(it.statusLine.statusCode).isEqualTo(HttpStatus.SC_OK)
+ }
}
- }
- s3Client.listMultipartUploads {
- it.bucket(bucketName)
- it.keyMarker(key)
- }.also {
- assertThat(it.uploads()).isEmpty()
- }
+ s3Client
+ .listMultipartUploads {
+ it.bucket(bucketName)
+ it.keyMarker(key)
+ }.also {
+ assertThat(it.uploads()).isEmpty()
+ }
}
-
@Test
@S3VerifiedSuccess(year = 2025)
fun testPresignedUrl_uploadPart(testInfo: TestInfo) {
val key = UPLOAD_FILE_NAME
val bucketName = givenBucket(testInfo)
- val createMultipartUpload = s3Client.createMultipartUpload {
- it.bucket(bucketName)
- it.key(key)
- }
+ val createMultipartUpload =
+ s3Client.createMultipartUpload {
+ it.bucket(bucketName)
+ it.key(key)
+ }
val uploadId = createMultipartUpload.uploadId()
- val presignedUrlString = s3Presigner.presignUploadPart {
- it.uploadPartRequest {
- it.bucket(createMultipartUpload.bucket())
- it.key(createMultipartUpload.key())
- it.uploadId(uploadId)
- it.partNumber(1)
- it.contentLength(UPLOAD_FILE_LENGTH)
- }
- it.signatureDuration(Duration.ofMinutes(1L))
- }.url().toString()
+ val presignedUrlString =
+ s3Presigner
+ .presignUploadPart {
+ it.uploadPartRequest {
+ it.bucket(createMultipartUpload.bucket())
+ it.key(createMultipartUpload.key())
+ it.uploadId(uploadId)
+ it.partNumber(1)
+ it.contentLength(UPLOAD_FILE_LENGTH)
+ }
+ it.signatureDuration(Duration.ofMinutes(1L))
+ }.url()
+ .toString()
assertThat(presignedUrlString).isNotBlank()
- HttpPut(presignedUrlString).apply {
- this.entity = FileEntity(UPLOAD_FILE)
- }.also { put ->
- httpClient.execute(
- put
- ).use { response ->
- assertThat(response.statusLine.statusCode).isEqualTo(HttpStatus.SC_OK)
- s3Client.completeMultipartUpload {
- it.bucket(bucketName)
- it.key(key)
- it.uploadId(uploadId)
- it.multipartUpload {
- it.parts(
- CompletedPart
- .builder()
- .eTag(response.getFirstHeader("ETag").value)
- .partNumber(1)
- .build()
- )
+ HttpPut(presignedUrlString)
+ .apply {
+ this.entity = FileEntity(UPLOAD_FILE)
+ }.also { put ->
+ httpClient
+ .execute(
+ put,
+ ).use { response ->
+ assertThat(response.statusLine.statusCode).isEqualTo(HttpStatus.SC_OK)
+ s3Client.completeMultipartUpload {
+ it.bucket(bucketName)
+ it.key(key)
+ it.uploadId(uploadId)
+ it.multipartUpload {
+ it.parts(
+ CompletedPart
+ .builder()
+ .eTag(response.getFirstHeader("ETag").value)
+ .partNumber(1)
+ .build(),
+ )
+ }
+ }
}
- }
}
- }
- s3Client.listMultipartUploads {
- it.bucket(bucketName)
- it.keyMarker(key)
- }.also {
- assertThat(it.uploads()).isEmpty()
- }
+ s3Client
+ .listMultipartUploads {
+ it.bucket(bucketName)
+ it.keyMarker(key)
+ }.also {
+ assertThat(it.uploads()).isEmpty()
+ }
}
}
diff --git a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/RealS3BackendUsedCondition.kt b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/RealS3BackendUsedCondition.kt
index e385f7a18..b1dcb5f88 100644
--- a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/RealS3BackendUsedCondition.kt
+++ b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/RealS3BackendUsedCondition.kt
@@ -28,11 +28,11 @@ import org.junit.platform.commons.util.AnnotationUtils
* Disable test annotated with S3VerifiedFailure when test runs against S3.
*/
class RealS3BackendUsedCondition : ExecutionCondition {
- override fun evaluateExecutionCondition(context: ExtensionContext): ConditionEvaluationResult {
- val failure = AnnotationUtils.findAnnotation(context.element, S3VerifiedFailure::class.java)
- if (failure.isPresent && System.getProperty("it.s3mock.endpoint") != null) {
- return ConditionEvaluationResult.disabled(failure.get().reason)
- }
- return ConditionEvaluationResult.enabled("")
+ override fun evaluateExecutionCondition(context: ExtensionContext): ConditionEvaluationResult {
+ val failure = AnnotationUtils.findAnnotation(context.element, S3VerifiedFailure::class.java)
+ if (failure.isPresent && System.getProperty("it.s3mock.endpoint") != null) {
+ return ConditionEvaluationResult.disabled(failure.get().reason)
}
+ return ConditionEvaluationResult.enabled("")
+ }
}
diff --git a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/RetentionIT.kt b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/RetentionIT.kt
index 7e62e61f7..e521f9af3 100644
--- a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/RetentionIT.kt
+++ b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/RetentionIT.kt
@@ -63,7 +63,7 @@ internal class RetentionIT : S3TestBase() {
it.bucket(bucketName)
it.key(sourceKey)
},
- RequestBody.fromFile(UPLOAD_FILE)
+ RequestBody.fromFile(UPLOAD_FILE),
)
assertThatThrownBy {
@@ -77,8 +77,10 @@ internal class RetentionIT : S3TestBase() {
}
@Test
- @S3VerifiedFailure(year = 2025,
- reason = "S3 Object Lock makes it impossible to delete the object until the retention period is over.")
+ @S3VerifiedFailure(
+ year = 2025,
+ reason = "S3 Object Lock makes it impossible to delete the object until the retention period is over.",
+ )
fun testPutAndGetRetention(testInfo: TestInfo) {
val sourceKey = UPLOAD_FILE_NAME
val bucketName = bucketName(testInfo)
@@ -91,7 +93,7 @@ internal class RetentionIT : S3TestBase() {
it.bucket(bucketName)
it.key(sourceKey)
},
- RequestBody.fromFile(UPLOAD_FILE)
+ RequestBody.fromFile(UPLOAD_FILE),
)
val retainUntilDate = Instant.now().plus(1, DAYS)
@@ -99,24 +101,27 @@ internal class RetentionIT : S3TestBase() {
it.bucket(bucketName)
it.key(sourceKey)
it.retention(
- ObjectLockRetention.builder()
+ ObjectLockRetention
+ .builder()
.mode(ObjectLockRetentionMode.COMPLIANCE)
.retainUntilDate(retainUntilDate)
- .build()
+ .build(),
)
}
- s3Client.getObjectRetention {
- it.bucket(bucketName)
- it.key(sourceKey)
- }.also {
- assertThat(it.retention().mode()).isEqualTo(ObjectLockRetentionMode.COMPLIANCE)
- //the returned date has MILLIS resolution, the local instant is in NANOS.
- assertThat(it.retention().retainUntilDate())
- .isCloseTo(
- retainUntilDate, within(1, MILLIS)
- )
- }
+ s3Client
+ .getObjectRetention {
+ it.bucket(bucketName)
+ it.key(sourceKey)
+ }.also {
+ assertThat(it.retention().mode()).isEqualTo(ObjectLockRetentionMode.COMPLIANCE)
+ // the returned date has MILLIS resolution, the local instant is in NANOS.
+ assertThat(it.retention().retainUntilDate())
+ .isCloseTo(
+ retainUntilDate,
+ within(1, MILLIS),
+ )
+ }
}
@Test
@@ -133,7 +138,7 @@ internal class RetentionIT : S3TestBase() {
it.bucket(bucketName)
it.key(sourceKey)
},
- RequestBody.fromFile(UPLOAD_FILE)
+ RequestBody.fromFile(UPLOAD_FILE),
)
val invalidRetainUntilDate = Instant.now().minus(1, DAYS)
@@ -142,10 +147,11 @@ internal class RetentionIT : S3TestBase() {
it.bucket(bucketName)
it.key(sourceKey)
it.retention(
- ObjectLockRetention.builder()
+ ObjectLockRetention
+ .builder()
.mode(ObjectLockRetentionMode.COMPLIANCE)
.retainUntilDate(invalidRetainUntilDate)
- .build()
+ .build(),
)
}
}.isInstanceOf(S3Exception::class.java)
diff --git a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/S3TestBase.kt b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/S3TestBase.kt
index e72e223f4..d4609aef3 100644
--- a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/S3TestBase.kt
+++ b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/S3TestBase.kt
@@ -15,6 +15,7 @@
*/
package com.adobe.testing.s3mock.its
+import aws.smithy.kotlin.runtime.net.url.Url
import com.fasterxml.jackson.dataformat.xml.XmlMapper
import org.apache.http.client.config.RequestConfig
import org.apache.http.impl.client.CloseableHttpClient
@@ -63,6 +64,7 @@ import java.io.IOException
import java.io.InputStream
import java.net.Socket
import java.net.URI
+import java.nio.file.Path
import java.security.KeyManagementException
import java.security.NoSuchAlgorithmException
import java.security.SecureRandom
@@ -77,156 +79,156 @@ import javax.net.ssl.TrustManager
import javax.net.ssl.X509ExtendedTrustManager
import kotlin.random.Random
-
/**
* Base type for S3 Mock integration tests. Sets up S3 Client, Certificates, initial Buckets, etc.
*/
internal abstract class S3TestBase {
- private val _s3Client: S3Client = createS3Client()
+ private val s3Client = createS3Client()
- protected fun createHttpClient(): CloseableHttpClient {
- return HttpClientBuilder
+ protected fun createHttpClient(): CloseableHttpClient =
+ HttpClientBuilder
.create()
.setSSLContext(createBlindlyTrustingSslContext())
.setDefaultRequestConfig(RequestConfig.custom().setExpectContinueEnabled(true).build())
.build()
- }
- protected fun createS3Client(endpoint: String = serviceEndpoint, chunkedEncodingEnabled: Boolean? = null): S3Client {
- return S3Client.builder()
+ protected fun createS3Client(
+ endpoint: String = serviceEndpoint,
+ chunkedEncodingEnabled: Boolean? = null,
+ ): S3Client =
+ S3Client
+ .builder()
.region(Region.of(s3Region))
.credentialsProvider(
- StaticCredentialsProvider.create(AwsBasicCredentials.create(s3AccessKeyId, s3SecretAccessKey))
- )
- .serviceConfiguration {
+ StaticCredentialsProvider.create(AwsBasicCredentials.create(s3AccessKeyId, s3SecretAccessKey)),
+ ).serviceConfiguration {
it.pathStyleAccessEnabled(true)
it.chunkedEncodingEnabled(chunkedEncodingEnabled)
- }
- .endpointOverride(URI.create(endpoint))
+ }.endpointOverride(URI.create(endpoint))
.httpClient(
ApacheHttpClient.builder().buildWithDefaults(
- AttributeMap.builder()
+ AttributeMap
+ .builder()
.put(SdkHttpConfigurationOption.TRUST_ALL_CERTIFICATES, true)
- .build()
- )
- )
- .build()
- }
+ .build(),
+ ),
+ ).build()
- protected fun createS3ClientKotlin(endpoint: String = serviceEndpointHttp): aws.sdk.kotlin.services.s3.S3Client {
- return aws.sdk.kotlin.services.s3.S3Client {
+ protected fun createS3ClientKotlin(endpoint: String = serviceEndpointHttp): aws.sdk.kotlin.services.s3.S3Client =
+ aws.sdk.kotlin.services.s3.S3Client {
region = s3Region
- credentialsProvider = aws.sdk.kotlin.runtime.auth.credentials.StaticCredentialsProvider {
+ credentialsProvider =
+ aws.sdk.kotlin.runtime.auth.credentials.StaticCredentialsProvider {
accessKeyId = s3AccessKeyId
secretAccessKey = s3SecretAccessKey
- }
+ }
forcePathStyle = true
- endpointUrl = aws.smithy.kotlin.runtime.net.url.Url.parse(endpoint)
+ endpointUrl =
+ Url
+ .parse(endpoint)
}
- }
- protected fun createS3AsyncClient(endpoint: String = serviceEndpoint): S3AsyncClient {
- return S3AsyncClient.builder()
+ protected fun createS3AsyncClient(endpoint: String = serviceEndpoint): S3AsyncClient =
+ S3AsyncClient
+ .builder()
.region(Region.of(s3Region))
.credentialsProvider(
- StaticCredentialsProvider.create(AwsBasicCredentials.create(s3AccessKeyId, s3SecretAccessKey))
- )
- .forcePathStyle(true)
+ StaticCredentialsProvider.create(AwsBasicCredentials.create(s3AccessKeyId, s3SecretAccessKey)),
+ ).forcePathStyle(true)
.endpointOverride(URI.create(endpoint))
- .httpClient(NettyNioAsyncHttpClient
- .builder()
- .connectionTimeout(Duration.ofMinutes(5))
- .maxConcurrency(100)
- .buildWithDefaults(
- AttributeMap.builder()
- .put(SdkHttpConfigurationOption.TRUST_ALL_CERTIFICATES, true)
- .build()
- )
- )
- .multipartEnabled(true)
+ .httpClient(
+ NettyNioAsyncHttpClient
+ .builder()
+ .connectionTimeout(Duration.ofMinutes(5))
+ .maxConcurrency(100)
+ .buildWithDefaults(
+ AttributeMap
+ .builder()
+ .put(SdkHttpConfigurationOption.TRUST_ALL_CERTIFICATES, true)
+ .build(),
+ ),
+ ).multipartEnabled(true)
.build()
- }
- protected fun createTransferManager(endpoint: String = serviceEndpoint,
- s3AsyncClient: S3AsyncClient = createAutoS3CrtAsyncClient(endpoint)): S3TransferManager {
- return S3TransferManager.builder()
+ protected fun createTransferManager(
+ endpoint: String = serviceEndpoint,
+ s3AsyncClient: S3AsyncClient = createAutoS3CrtAsyncClient(endpoint),
+ ): S3TransferManager =
+ S3TransferManager
+ .builder()
.s3Client(s3AsyncClient)
.build()
- }
/**
* Uses manual CRT client setup through AwsCrtAsyncHttpClient.builder()
*/
- protected fun createS3CrtAsyncClient(endpoint: String = serviceEndpoint): S3AsyncClient {
- return S3AsyncClient.builder()
+ protected fun createS3CrtAsyncClient(endpoint: String = serviceEndpoint): S3AsyncClient =
+ S3AsyncClient
+ .builder()
.region(Region.of(s3Region))
.credentialsProvider(
- StaticCredentialsProvider.create(AwsBasicCredentials.create(s3AccessKeyId, s3SecretAccessKey))
- )
- .forcePathStyle(true)
+ StaticCredentialsProvider.create(AwsBasicCredentials.create(s3AccessKeyId, s3SecretAccessKey)),
+ ).forcePathStyle(true)
.endpointOverride(URI.create(endpoint))
- .httpClient(AwsCrtAsyncHttpClient
- .builder()
- .connectionTimeout(Duration.ofMinutes(5))
- .maxConcurrency(100)
- .buildWithDefaults(
- AttributeMap.builder()
- .put(SdkHttpConfigurationOption.TRUST_ALL_CERTIFICATES, true)
- .build()
- )
- )
- .multipartEnabled(true)
+ .httpClient(
+ AwsCrtAsyncHttpClient
+ .builder()
+ .connectionTimeout(Duration.ofMinutes(5))
+ .maxConcurrency(100)
+ .buildWithDefaults(
+ AttributeMap
+ .builder()
+ .put(SdkHttpConfigurationOption.TRUST_ALL_CERTIFICATES, true)
+ .build(),
+ ),
+ ).multipartEnabled(true)
.build()
- }
/**
* Uses automated CRT client setup through S3AsyncClient.crtBuilder()
*/
protected fun createAutoS3CrtAsyncClient(endpoint: String = serviceEndpoint): S3CrtAsyncClient {
- //using S3AsyncClient.crtBuilder does not work, can't get it to ignore custom SSL certificates.
- return S3AsyncClient.crtBuilder()
+ // using S3AsyncClient.crtBuilder does not work, can't get it to ignore custom SSL certificates.
+ return S3AsyncClient
+ .crtBuilder()
.httpConfiguration {
- //this setting is ignored at runtime. Not sure why.
+ // this setting is ignored at runtime. Not sure why.
it.trustAllCertificatesEnabled(true)
- }
- .region(Region.of(s3Region))
+ }.region(Region.of(s3Region))
.credentialsProvider(
- StaticCredentialsProvider.create(AwsBasicCredentials.create(s3AccessKeyId, s3SecretAccessKey))
- )
- .forcePathStyle(true)
- //set endpoint to http(!)
+ StaticCredentialsProvider.create(AwsBasicCredentials.create(s3AccessKeyId, s3SecretAccessKey)),
+ ).forcePathStyle(true)
+ // set endpoint to http(!)
.endpointOverride(URI.create(endpoint))
.build() as S3CrtAsyncClient
}
- protected fun createS3Presigner(endpoint: String = serviceEndpoint): S3Presigner {
- return S3Presigner.builder()
+ protected fun createS3Presigner(endpoint: String = serviceEndpoint): S3Presigner =
+ S3Presigner
+ .builder()
.region(Region.of(s3Region))
.credentialsProvider(
- StaticCredentialsProvider.create(AwsBasicCredentials.create(s3AccessKeyId, s3SecretAccessKey))
- )
- .serviceConfiguration(S3Configuration.builder().pathStyleAccessEnabled(true).build())
+ StaticCredentialsProvider.create(AwsBasicCredentials.create(s3AccessKeyId, s3SecretAccessKey)),
+ ).serviceConfiguration(S3Configuration.builder().pathStyleAccessEnabled(true).build())
.endpointOverride(URI.create(endpoint))
.build()
- }
- protected fun createS3PostObjectPresigner(endpoint: String = serviceEndpoint): S3PostObjectPresigner {
- return S3PostObjectPresigner.builder()
+ protected fun createS3PostObjectPresigner(endpoint: String = serviceEndpoint): S3PostObjectPresigner =
+ S3PostObjectPresigner
+ .builder()
.region(Region.of(s3Region))
.credentialsProvider(
- StaticCredentialsProvider.create(AwsBasicCredentials.create(s3AccessKeyId, s3SecretAccessKey))
- )
- .serviceConfiguration(S3Configuration.builder().pathStyleAccessEnabled(true).build())
+ StaticCredentialsProvider.create(AwsBasicCredentials.create(s3AccessKeyId, s3SecretAccessKey)),
+ ).serviceConfiguration(S3Configuration.builder().pathStyleAccessEnabled(true).build())
.endpointOverride(URI.create(endpoint))
.build()
- }
/**
* Deletes all existing buckets.
*/
@AfterEach
fun cleanupStores() {
- _s3Client.listBuckets().buckets().forEach { bucket ->
+ s3Client.listBuckets().buckets().forEach { bucket ->
// Empty all buckets
deleteMultipartUploads(bucket)
deleteObjectsInBucket(bucket, isObjectLockEnabled(bucket))
@@ -238,14 +240,17 @@ internal abstract class S3TestBase {
}
protected fun bucketName(testInfo: TestInfo): String {
- val normalizedName = testInfo.testMethod.get().name
- .lowercase()
- .replace('_', '-')
- .replace(' ', '-')
- .replace(',', '-')
- .replace('\'', '-')
- .replace('=', '-')
- .let { if (it.length > 50) it.take(50) else it }
+ val normalizedName =
+ testInfo.testMethod
+ .get()
+ .name
+ .lowercase()
+ .replace('_', '-')
+ .replace(' ', '-')
+ .replace(',', '-')
+ .replace('\'', '-')
+ .replace('=', '-')
+ .let { if (it.length > 50) it.take(50) else it }
val bucketName = "$normalizedName-${Instant.now().nano}"
LOG.info("Bucketname=$bucketName")
return bucketName
@@ -254,15 +259,15 @@ internal abstract class S3TestBase {
fun givenBucket(testInfo: TestInfo): String = givenBucket(bucketName(testInfo))
fun givenBucket(bucketName: String = randomName): String {
- _s3Client.createBucket { it.bucket(bucketName) }
- val bucketCreated = _s3Client.waiter().waitUntilBucketExists { it.bucket(bucketName) }
+ s3Client.createBucket { it.bucket(bucketName) }
+ val bucketCreated = s3Client.waiter().waitUntilBucketExists { it.bucket(bucketName) }
val bucketCreatedResponse = bucketCreated.matched().response().get()
assertThat(bucketCreatedResponse).isNotNull
return bucketName
}
fun givenDirectoryBucket(bucketName: String = randomName): String {
- _s3Client.createBucket {
+ s3Client.createBucket {
it.bucket(bucketName)
it.createBucketConfiguration {
it.bucket {
@@ -270,49 +275,64 @@ internal abstract class S3TestBase {
}
}
}
- val bucketCreated = _s3Client.waiter().waitUntilBucketExists { it.bucket(bucketName) }
+ val bucketCreated = s3Client.waiter().waitUntilBucketExists { it.bucket(bucketName) }
val bucketCreatedResponse = bucketCreated.matched().response().get()
assertThat(bucketCreatedResponse).isNotNull
return bucketName
}
- fun givenObject(bucketName: String, key: String, fileName: String? = null): PutObjectResponse {
+ fun givenObject(
+ bucketName: String,
+ key: String,
+ fileName: String? = null,
+ ): PutObjectResponse {
val uploadFile = File(fileName ?: key)
- return _s3Client.putObject(
+ return s3Client.putObject(
{
it.bucket(bucketName)
it.key(key)
},
- RequestBody.fromFile(uploadFile)
+ RequestBody.fromFile(uploadFile),
)
}
- fun deleteObject(bucketName: String, key: String): DeleteObjectResponse {
- return _s3Client.deleteObject {
+ fun deleteObject(
+ bucketName: String,
+ key: String,
+ ): DeleteObjectResponse =
+ s3Client.deleteObject {
it.bucket(bucketName)
it.key(key)
}
- }
- fun getObject(bucketName: String, key: String): ResponseInputStream {
- return _s3Client.getObject {
+ fun getObject(
+ bucketName: String,
+ key: String,
+ ): ResponseInputStream =
+ s3Client.getObject {
it.bucket(bucketName)
it.key(key)
}
- }
- fun givenBucketAndObject(testInfo: TestInfo, key: String): Pair {
+ fun givenBucketAndObject(
+ testInfo: TestInfo,
+ key: String,
+ ): Pair {
val bucketName = givenBucket(testInfo)
val putObjectResponse = givenObject(bucketName, key)
return bucketName to putObjectResponse
}
- fun givenBucketAndObjects(testInfo: TestInfo, count: Int): Pair> {
+ fun givenBucketAndObjects(
+ testInfo: TestInfo,
+ count: Int,
+ ): Pair> {
val baseKey = randomName
val bucketName = givenBucket(testInfo)
- val keys = (0 until count).map { i ->
- "$baseKey-$i"
- }
+ val keys =
+ (0 until count).map { i ->
+ "$baseKey-$i"
+ }
keys.forEach { key ->
givenObject(bucketName, key, UPLOAD_FILE_NAME)
}
@@ -320,28 +340,33 @@ internal abstract class S3TestBase {
}
private fun deleteBucket(bucket: Bucket) {
- _s3Client.deleteBucket {
+ s3Client.deleteBucket {
it.bucket(bucket.name())
}
- val bucketDeleted = _s3Client
- .waiter()
- .waitUntilBucketNotExists {
- it.bucket(bucket.name())
- }
+ val bucketDeleted =
+ s3Client
+ .waiter()
+ .waitUntilBucketNotExists {
+ it.bucket(bucket.name())
+ }
bucketDeleted.matched().exception().get().also {
assertThat(it).isNotNull
}
}
- private fun deleteObjectsInBucket(bucket: Bucket, objectLockEnabled: Boolean) {
- _s3Client.listObjectVersions {
- it.bucket(bucket.name())
- it.encodingType(EncodingType.URL)
- }.also {
- it.versions().forEach { objectVersion ->
+ private fun deleteObjectsInBucket(
+ bucket: Bucket,
+ objectLockEnabled: Boolean,
+ ) {
+ s3Client
+ .listObjectVersions {
+ it.bucket(bucket.name())
+ it.encodingType(EncodingType.URL)
+ }.also {
+ it.versions().forEach { objectVersion ->
if (objectLockEnabled) {
- //must remove potential legal hold, otherwise object can't be deleted
- _s3Client.putObjectLegalHold {
+ // must remove potential legal hold, otherwise object can't be deleted
+ s3Client.putObjectLegalHold {
it.bucket(bucket.name())
it.key(objectVersion.key())
it.versionId(objectVersion.versionId())
@@ -350,54 +375,58 @@ internal abstract class S3TestBase {
}
}
}
- _s3Client.deleteObject {
+ s3Client.deleteObject {
it.bucket(bucket.name())
it.key(objectVersion.key())
it.versionId(objectVersion.versionId())
}
}
- it.deleteMarkers().forEach { marker ->
- if (objectLockEnabled) {
- //must remove potential legal hold, otherwise object can't be deleted
- _s3Client.putObjectLegalHold {
+ it.deleteMarkers().forEach { marker ->
+ if (objectLockEnabled) {
+ // must remove potential legal hold, otherwise object can't be deleted
+ s3Client.putObjectLegalHold {
+ it.bucket(bucket.name())
+ it.key(marker.key())
+ it.versionId(marker.versionId())
+ it.legalHold {
+ it.status(ObjectLockLegalHoldStatus.OFF)
+ }
+ }
+ }
+ s3Client.deleteObject {
it.bucket(bucket.name())
it.key(marker.key())
it.versionId(marker.versionId())
- it.legalHold {
- it.status(ObjectLockLegalHoldStatus.OFF)
- }
}
}
- _s3Client.deleteObject {
- it.bucket(bucket.name())
- it.key(marker.key())
- it.versionId(marker.versionId())
- }
}
- }
}
- private fun isObjectLockEnabled(bucket: Bucket): Boolean {
- return try {
- ObjectLockEnabled.ENABLED == _s3Client.getObjectLockConfiguration {
- it.bucket(bucket.name())
- }.objectLockConfiguration().objectLockEnabled()
+ private fun isObjectLockEnabled(bucket: Bucket): Boolean =
+ try {
+ ObjectLockEnabled.ENABLED ==
+ s3Client
+ .getObjectLockConfiguration {
+ it.bucket(bucket.name())
+ }.objectLockConfiguration()
+ .objectLockEnabled()
} catch (e: S3Exception) {
- //#getObjectLockConfiguration throws S3Exception if not set
+ // #getObjectLockConfiguration throws S3Exception if not set
false
}
- }
private fun deleteMultipartUploads(bucket: Bucket) {
- _s3Client.listMultipartUploads {
- it.bucket(bucket.name())
- }.uploads().forEach { upload ->
- _s3Client.abortMultipartUpload {
+ s3Client
+ .listMultipartUploads {
it.bucket(bucket.name())
- it.key(upload.key())
- it.uploadId(upload.uploadId())
+ }.uploads()
+ .forEach { upload ->
+ s3Client.abortMultipartUpload {
+ it.bucket(bucket.name())
+ it.key(upload.key())
+ it.uploadId(upload.uploadId())
+ }
}
- }
}
private val s3Endpoint: String?
@@ -421,47 +450,70 @@ internal abstract class S3TestBase {
protected val httpPort: Int
get() = Integer.getInteger("it.s3mock.port_http", 9090)
- protected fun createBlindlyTrustingSslContext(): SSLContext {
- return try {
+ protected fun createBlindlyTrustingSslContext(): SSLContext =
+ try {
val sc = SSLContext.getInstance("TLS")
- sc.init(null, arrayOf(object : X509ExtendedTrustManager() {
- override fun getAcceptedIssuers(): Array {
- return arrayOf()
- }
-
- override fun checkClientTrusted(certs: Array, authType: String) {
- // no-op
- }
+ sc.init(
+ null,
+ arrayOf(
+ object : X509ExtendedTrustManager() {
+ override fun getAcceptedIssuers(): Array = arrayOf()
+
+ override fun checkClientTrusted(
+ certs: Array,
+ authType: String,
+ ) {
+ // no-op
+ }
- override fun checkClientTrusted(arg0: Array, arg1: String, arg2: SSLEngine) {
- // no-op
- }
+ override fun checkClientTrusted(
+ arg0: Array,
+ arg1: String,
+ arg2: SSLEngine,
+ ) {
+ // no-op
+ }
- override fun checkClientTrusted(arg0: Array, arg1: String, arg2: Socket
- ) {
- // no-op
- }
+ override fun checkClientTrusted(
+ arg0: Array,
+ arg1: String,
+ arg2: Socket,
+ ) {
+ // no-op
+ }
- override fun checkServerTrusted(arg0: Array, arg1: String, arg2: SSLEngine) {
- // no-op
- }
+ override fun checkServerTrusted(
+ arg0: Array,
+ arg1: String,
+ arg2: SSLEngine,
+ ) {
+ // no-op
+ }
- override fun checkServerTrusted(arg0: Array, arg1: String, arg2: Socket) {
- // no-op
- }
+ override fun checkServerTrusted(
+ arg0: Array,
+ arg1: String,
+ arg2: Socket,
+ ) {
+ // no-op
+ }
- override fun checkServerTrusted(certs: Array, authType: String) {
- // no-op
- }
- }
- ), SecureRandom())
+ override fun checkServerTrusted(
+ certs: Array,
+ authType: String,
+ ) {
+ // no-op
+ }
+ },
+ ),
+ SecureRandom(),
+ )
sc
} catch (e: NoSuchAlgorithmException) {
throw RuntimeException("Unexpected exception", e)
} catch (e: KeyManagementException) {
throw RuntimeException("Unexpected exception", e)
}
- }
fun randomInputStream(size: Int): InputStream {
val content = ByteArray(size)
@@ -473,13 +525,13 @@ internal abstract class S3TestBase {
* Creates 5+MB of random bytes to upload as a valid part
* (all parts but the last must be at least 5MB in size)
*/
- fun randomBytes(): ByteArray = randomMBytes(_5MB.toInt() + Random.nextInt(_1MB))
+ fun randomBytes(): ByteArray = randomMBytes(FIVE_MB + Random.nextInt(ONE_MB))
/**
* Creates exactly 5MB of random bytes to upload as a valid part
* (all parts but the last must be at least 5MB in size)
*/
- fun random5MBytes(): ByteArray = randomMBytes(_5MB.toInt())
+ fun random5MBytes(): ByteArray = randomMBytes(FIVE_MB)
protected fun randomMBytes(size: Int): ByteArray {
val bytes = ByteArray(size)
@@ -493,7 +545,10 @@ internal abstract class S3TestBase {
return inputStream.use { it.readBytes() }
}
- fun concatByteArrays(arr1: ByteArray, arr2: ByteArray): ByteArray {
+ fun concatByteArrays(
+ arr1: ByteArray,
+ arr2: ByteArray,
+ ): ByteArray {
// Idiomatic Kotlin: allocate once and copy using copyInto to avoid System.arraycopy
val result = ByteArray(arr1.size + arr2.size)
arr1.copyInto(result, destinationOffset = 0)
@@ -505,8 +560,8 @@ internal abstract class S3TestBase {
ChecksumAlgorithm.fromValue(this.algorithmId())
fun S3Response.checksum(checksumAlgorithm: ChecksumAlgorithm): String? {
- fun S3Response.checksumSHA1(): String? {
- return when (this) {
+ fun S3Response.checksumSHA1(): String? =
+ when (this) {
is GetObjectResponse -> this.checksumSHA1()
is PutObjectResponse -> this.checksumSHA1()
is HeadObjectResponse -> this.checksumSHA1()
@@ -514,10 +569,9 @@ internal abstract class S3TestBase {
is GetObjectAttributesResponse -> this.checksum().checksumSHA1()
else -> throw RuntimeException("Unexpected response type ${this::class.java}")
}
- }
- fun S3Response.checksumSHA256(): String? {
- return when (this) {
+ fun S3Response.checksumSHA256(): String? =
+ when (this) {
is GetObjectResponse -> this.checksumSHA256()
is PutObjectResponse -> this.checksumSHA256()
is HeadObjectResponse -> this.checksumSHA256()
@@ -525,10 +579,9 @@ internal abstract class S3TestBase {
is GetObjectAttributesResponse -> this.checksum().checksumSHA256()
else -> throw RuntimeException("Unexpected response type ${this::class.java}")
}
- }
- fun S3Response.checksumCRC32(): String? {
- return when (this) {
+ fun S3Response.checksumCRC32(): String? =
+ when (this) {
is GetObjectResponse -> this.checksumCRC32()
is PutObjectResponse -> this.checksumCRC32()
is HeadObjectResponse -> this.checksumCRC32()
@@ -536,10 +589,9 @@ internal abstract class S3TestBase {
is GetObjectAttributesResponse -> this.checksum().checksumCRC32()
else -> throw RuntimeException("Unexpected response type ${this::class.java}")
}
- }
- fun S3Response.checksumCRC32C(): String? {
- return when (this) {
+ fun S3Response.checksumCRC32C(): String? =
+ when (this) {
is GetObjectResponse -> this.checksumCRC32C()
is PutObjectResponse -> this.checksumCRC32C()
is HeadObjectResponse -> this.checksumCRC32C()
@@ -547,10 +599,9 @@ internal abstract class S3TestBase {
is GetObjectAttributesResponse -> this.checksum().checksumCRC32C()
else -> throw RuntimeException("Unexpected response type ${this::class.java}")
}
- }
- fun S3Response.checksumCRC64NVME(): String? {
- return when (this) {
+ fun S3Response.checksumCRC64NVME(): String? =
+ when (this) {
is GetObjectResponse -> this.checksumCRC64NVME()
is PutObjectResponse -> this.checksumCRC64NVME()
is HeadObjectResponse -> this.checksumCRC64NVME()
@@ -558,7 +609,6 @@ internal abstract class S3TestBase {
is GetObjectAttributesResponse -> this.checksum().checksumCRC64NVME()
else -> throw RuntimeException("Unexpected response type ${this::class.java}")
}
- }
return when (checksumAlgorithm) {
ChecksumAlgorithm.SHA1 -> this.checksumSHA1()
@@ -582,31 +632,28 @@ internal abstract class S3TestBase {
const val TEST_IMAGE_TIFF = "src/test/resources/test-image.tiff"
const val UPLOAD_FILE_NAME = SAMPLE_FILE_LARGE
val UPLOAD_FILE = File(UPLOAD_FILE_NAME)
- val UPLOAD_FILE_PATH = UPLOAD_FILE.toPath()
+ val UPLOAD_FILE_PATH: Path = UPLOAD_FILE.toPath()
val UPLOAD_FILE_LENGTH = UPLOAD_FILE.length()
const val TEST_WRONG_KEY_ID = "key-ID-WRONGWRONGWRONG"
- const val _1MB = 1024 * 1024
- const val _5MB = 5L * _1MB
- const val BUFFER_SIZE = 128 * 1024
- private const val THREAD_COUNT = 50
+ const val ONE_MB = 1024 * 1024
+ const val FIVE_MB = 5 * ONE_MB
private const val PREFIX = "prefix"
- val MAPPER = XmlMapper.builder().build()
- private val TEST_FILE_NAMES = listOf(
- SAMPLE_FILE,
- SAMPLE_FILE_LARGE,
- TEST_IMAGE,
- TEST_IMAGE_LARGE,
- TEST_IMAGE_TIFF,
- )
+ val MAPPER: XmlMapper = XmlMapper.builder().build()
+ private val TEST_FILE_NAMES =
+ listOf(
+ SAMPLE_FILE,
+ SAMPLE_FILE_LARGE,
+ TEST_IMAGE,
+ TEST_IMAGE_LARGE,
+ TEST_IMAGE_TIFF,
+ )
@JvmStatic
- protected fun testFileNames(): Stream {
- return Stream.of(*TEST_FILE_NAMES.toTypedArray())
- }
+ protected fun testFileNames(): Stream = Stream.of(*TEST_FILE_NAMES.toTypedArray())
@JvmStatic
- protected fun storageClasses(): Stream {
- return listOf(
+ protected fun storageClasses(): Stream =
+ listOf(
StorageClass.STANDARD,
StorageClass.REDUCED_REDUNDANCY,
StorageClass.STANDARD_IA,
@@ -614,141 +661,355 @@ internal abstract class S3TestBase {
StorageClass.INTELLIGENT_TIERING,
StorageClass.GLACIER,
).stream()
- }
@JvmStatic
- protected fun checksumAlgorithms(): Stream {
- return listOf(
+ protected fun checksumAlgorithms(): Stream =
+ listOf(
DefaultChecksumAlgorithm.SHA256,
DefaultChecksumAlgorithm.SHA1,
DefaultChecksumAlgorithm.CRC32,
DefaultChecksumAlgorithm.CRC32C,
- DefaultChecksumAlgorithm.CRC64NVME
+ DefaultChecksumAlgorithm.CRC64NVME,
).stream()
- }
@JvmStatic
- protected fun charsSafe(): Stream {
- return Stream.of(
+ protected fun charsSafe(): Stream =
+ Stream.of(
"$PREFIX${chars_safe_alphanumeric()}",
- "$PREFIX${chars_safe_special()}"
+ "$PREFIX${chars_safe_special()}",
)
- }
@JvmStatic
- protected fun charsSafeKey(): String {
- return "$PREFIX${chars_safe_alphanumeric()}${chars_safe_special()}"
- }
+ protected fun charsSafeKey(): String = "$PREFIX${chars_safe_alphanumeric()}${chars_safe_special()}"
@JvmStatic
- protected fun charsSpecial(): Stream {
- return Stream.of(
+ protected fun charsSpecial(): Stream =
+ Stream.of(
"$PREFIX${chars_specialHandling()}",
- //"$PREFIX${chars_specialHandling_unicode()}" //TODO: some of these chars to not work.
+ // "$PREFIX${chars_specialHandling_unicode()}" //TODO: some of these chars to not work.
)
- }
@JvmStatic
- protected fun charsSpecialKey(): String {
- return "$PREFIX${chars_specialHandling()}"
- }
+ protected fun charsSpecialKey(): String = "$PREFIX${chars_specialHandling()}"
@JvmStatic
- protected fun charsToAvoid(): Stream {
- return Stream.of(
+ protected fun charsToAvoid(): Stream =
+ Stream.of(
"$PREFIX${chars_toAvoid()}",
- //"$PREFIX${chars_toAvoid_unicode()}" //TODO: some of these chars to not work.
+ // "$PREFIX${chars_toAvoid_unicode()}" //TODO: some of these chars to not work.
)
- }
@JvmStatic
- protected fun charsToAvoidKey(): String {
- return "$PREFIX${chars_toAvoid()}"
- }
+ protected fun charsToAvoidKey(): String = "$PREFIX${chars_toAvoid()}"
/**
* Chars that are safe to use
* https://docs.aws.amazon.com/AmazonS3/latest/userguide/object-keys.html
*/
@JvmStatic
- private fun chars_safe_alphanumeric(): String {
- return listOf(
- "0", "1", "2", "3", "4", "5", "6", "7", "8", "9",
- "a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", "m",
- "n", "o", "p", "q", "r", "s", "t", "u", "v", "w", "x", "y", "z",
- "A", "B", "C", "D", "E", "F", "G", "H", "I", "J", "K", "L", "M",
- "N", "O", "P", "Q", "R", "S", "T", "U", "V", "W", "X", "Y", "Z"
+ private fun chars_safe_alphanumeric(): String =
+ listOf(
+ "0",
+ "1",
+ "2",
+ "3",
+ "4",
+ "5",
+ "6",
+ "7",
+ "8",
+ "9",
+ "a",
+ "b",
+ "c",
+ "d",
+ "e",
+ "f",
+ "g",
+ "h",
+ "i",
+ "j",
+ "k",
+ "l",
+ "m",
+ "n",
+ "o",
+ "p",
+ "q",
+ "r",
+ "s",
+ "t",
+ "u",
+ "v",
+ "w",
+ "x",
+ "y",
+ "z",
+ "A",
+ "B",
+ "C",
+ "D",
+ "E",
+ "F",
+ "G",
+ "H",
+ "I",
+ "J",
+ "K",
+ "L",
+ "M",
+ "N",
+ "O",
+ "P",
+ "Q",
+ "R",
+ "S",
+ "T",
+ "U",
+ "V",
+ "W",
+ "X",
+ "Y",
+ "Z",
).joinToString(separator = "")
- }
+
/**
* Chars that are safe yet special
* https://docs.aws.amazon.com/AmazonS3/latest/userguide/object-keys.html
*/
@JvmStatic
- private fun chars_safe_special(): String {
- return listOf(
- "!", "-", "_", ".", "*", "'", "(", ")"
+ private fun chars_safe_special(): String =
+ listOf(
+ "!",
+ "-",
+ "_",
+ ".",
+ "*",
+ "'",
+ "(",
+ ")",
).joinToString(separator = "")
- }
+
/**
* Chars that might need special handling
* https://docs.aws.amazon.com/AmazonS3/latest/userguide/object-keys.html
*/
@JvmStatic
- private fun chars_specialHandling(): String {
- return listOf(
- "&", "$", "@", "=", ";", "/", ":", "+", " ", ",", "?"
+ private fun chars_specialHandling(): String =
+ listOf(
+ "&",
+ "$",
+ "@",
+ "=",
+ ";",
+ "/",
+ ":",
+ "+",
+ " ",
+ ",",
+ "?",
).joinToString(separator = "")
- }
+
/**
* Unicode chars that might need special handling
* https://docs.aws.amazon.com/AmazonS3/latest/userguide/object-keys.html
*/
@JvmStatic
- private fun chars_specialHandling_unicode(): String {
- return listOf(
- "\u0000", "\u0001", "\u0002", "\u0003", "\u0004", "\u0005", "\u0006", "\u0007", "\u0008", "\u0009",
- "\u000A", "\u000B", "\u000C", "\u000D", "\u000E", "\u000F",
- "\u0010", "\u0011", "\u0012", "\u0013", "\u0014", "\u0015", "\u0016", "\u0017", "\u0018", "\u0019",
- "\u001A", "\u001B", "\u001C", "\u001D", "\u001E", "\u001F", "\u007F",
+ private fun chars_specialHandling_unicode(): String =
+ listOf(
+ "\u0000",
+ "\u0001",
+ "\u0002",
+ "\u0003",
+ "\u0004",
+ "\u0005",
+ "\u0006",
+ "\u0007",
+ "\u0008",
+ "\u0009",
+ "\u000A",
+ "\u000B",
+ "\u000C",
+ "\u000D",
+ "\u000E",
+ "\u000F",
+ "\u0010",
+ "\u0011",
+ "\u0012",
+ "\u0013",
+ "\u0014",
+ "\u0015",
+ "\u0016",
+ "\u0017",
+ "\u0018",
+ "\u0019",
+ "\u001A",
+ "\u001B",
+ "\u001C",
+ "\u001D",
+ "\u001E",
+ "\u001F",
+ "\u007F",
).joinToString(separator = "")
- }
/**
* Chars to avoid
* https://docs.aws.amazon.com/AmazonS3/latest/userguide/object-keys.html
*/
@JvmStatic
- private fun chars_toAvoid(): String {
- return listOf(
- "\\", "{", "^", "}", "%", "`", "]", "\"", ">", "[", "~", "<", "#", "|"
+ private fun chars_toAvoid(): String =
+ listOf(
+ "\\",
+ "{",
+ "^",
+ "}",
+ "%",
+ "`",
+ "]",
+ "\"",
+ ">",
+ "[",
+ "~",
+ "<",
+ "#",
+ "|",
).joinToString(separator = "")
- }
/**
* Unicode chars to avoid
* https://docs.aws.amazon.com/AmazonS3/latest/userguide/object-keys.html
*/
@JvmStatic
- private fun chars_toAvoid_unicode(): String {
- return listOf(
- "\u0080", "\u0081", "\u0082", "\u0083", "\u0084", "\u0085", "\u0086", "\u0087", "\u0088", "\u0089",
- "\u008A", "\u008B", "\u008C", "\u008D", "\u008E", "\u008F",
- "\u0090", "\u0091", "\u0092", "\u0093", "\u0094", "\u0095", "\u0096", "\u0097", "\u0098", "\u0099",
- "\u009A", "\u009B", "\u009C", "\u009D", "\u009E", "\u009F",
- "\u00A0", "\u00A1", "\u00A2", "\u00A3", "\u00A4", "\u00A5", "\u00A6", "\u00A7", "\u00A8", "\u00A9",
- "\u00AA", "\u00AB", "\u00AC", "\u00AD", "\u00AE", "\u00AF",
- "\u00B0", "\u00B1", "\u00B2", "\u00B3", "\u00B4", "\u00B5", "\u00B6", "\u00B7", "\u00B8", "\u00B9",
- "\u00BA", "\u00BB", "\u00BC", "\u00BD", "\u00BE", "\u00BF",
- "\u00C0", "\u00C1", "\u00C2", "\u00C3", "\u00C4", "\u00C5", "\u00C6", "\u00C7", "\u00C8", "\u00C9",
- "\u00CA", "\u00CB", "\u00CC", "\u00CD", "\u00CE", "\u00CF",
- "\u00D0", "\u00D1", "\u00D2", "\u00D3", "\u00D4", "\u00D5", "\u00D6", "\u00D7", "\u00D8", "\u00D9",
- "\u00DA", "\u00DB", "\u00DC", "\u00DD", "\u00DE", "\u00DF",
- "\u0000", "\u0001", "\u0002", "\u0003", "\u0004", "\u0005", "\u0006", "\u0007", "\u0008", "\u0009",
- "\u000A", "\u000B", "\u000C", "\u000D", "\u000E", "\u000F",
- "\u0000", "\u0001", "\u0002", "\u0003", "\u0004", "\u0005", "\u0006", "\u0007", "\u0008", "\u0009",
- "\u000A", "\u000B", "\u000C", "\u000D", "\u000E", "\u000F",
+ private fun chars_toAvoid_unicode(): String =
+ listOf(
+ "\u0080",
+ "\u0081",
+ "\u0082",
+ "\u0083",
+ "\u0084",
+ "\u0085",
+ "\u0086",
+ "\u0087",
+ "\u0088",
+ "\u0089",
+ "\u008A",
+ "\u008B",
+ "\u008C",
+ "\u008D",
+ "\u008E",
+ "\u008F",
+ "\u0090",
+ "\u0091",
+ "\u0092",
+ "\u0093",
+ "\u0094",
+ "\u0095",
+ "\u0096",
+ "\u0097",
+ "\u0098",
+ "\u0099",
+ "\u009A",
+ "\u009B",
+ "\u009C",
+ "\u009D",
+ "\u009E",
+ "\u009F",
+ "\u00A0",
+ "\u00A1",
+ "\u00A2",
+ "\u00A3",
+ "\u00A4",
+ "\u00A5",
+ "\u00A6",
+ "\u00A7",
+ "\u00A8",
+ "\u00A9",
+ "\u00AA",
+ "\u00AB",
+ "\u00AC",
+ "\u00AD",
+ "\u00AE",
+ "\u00AF",
+ "\u00B0",
+ "\u00B1",
+ "\u00B2",
+ "\u00B3",
+ "\u00B4",
+ "\u00B5",
+ "\u00B6",
+ "\u00B7",
+ "\u00B8",
+ "\u00B9",
+ "\u00BA",
+ "\u00BB",
+ "\u00BC",
+ "\u00BD",
+ "\u00BE",
+ "\u00BF",
+ "\u00C0",
+ "\u00C1",
+ "\u00C2",
+ "\u00C3",
+ "\u00C4",
+ "\u00C5",
+ "\u00C6",
+ "\u00C7",
+ "\u00C8",
+ "\u00C9",
+ "\u00CA",
+ "\u00CB",
+ "\u00CC",
+ "\u00CD",
+ "\u00CE",
+ "\u00CF",
+ "\u00D0",
+ "\u00D1",
+ "\u00D2",
+ "\u00D3",
+ "\u00D4",
+ "\u00D5",
+ "\u00D6",
+ "\u00D7",
+ "\u00D8",
+ "\u00D9",
+ "\u00DA",
+ "\u00DB",
+ "\u00DC",
+ "\u00DD",
+ "\u00DE",
+ "\u00DF",
+ "\u0000",
+ "\u0001",
+ "\u0002",
+ "\u0003",
+ "\u0004",
+ "\u0005",
+ "\u0006",
+ "\u0007",
+ "\u0008",
+ "\u0009",
+ "\u000A",
+ "\u000B",
+ "\u000C",
+ "\u000D",
+ "\u000E",
+ "\u000F",
+ "\u0000",
+ "\u0001",
+ "\u0002",
+ "\u0003",
+ "\u0004",
+ "\u0005",
+ "\u0006",
+ "\u0007",
+ "\u0008",
+ "\u0009",
+ "\u000A",
+ "\u000B",
+ "\u000C",
+ "\u000D",
+ "\u000E",
+ "\u000F",
).joinToString(separator = "")
- }
}
}
diff --git a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/S3VerifiedFailure.kt b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/S3VerifiedFailure.kt
index f42538531..83df66daf 100644
--- a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/S3VerifiedFailure.kt
+++ b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/S3VerifiedFailure.kt
@@ -24,4 +24,7 @@ import org.junit.jupiter.api.extension.ExtendWith
*/
@Retention(AnnotationRetention.RUNTIME)
@ExtendWith(RealS3BackendUsedCondition::class)
-annotation class S3VerifiedFailure(val reason: String, val year: Int)
+annotation class S3VerifiedFailure(
+ val reason: String,
+ val year: Int,
+)
diff --git a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/S3VerifiedSuccess.kt b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/S3VerifiedSuccess.kt
index 87ca2fdc0..ce9ac6966 100644
--- a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/S3VerifiedSuccess.kt
+++ b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/S3VerifiedSuccess.kt
@@ -22,4 +22,6 @@ import org.junit.jupiter.api.extension.ExtendWith
*/
@Retention(AnnotationRetention.RUNTIME)
@ExtendWith(RealS3BackendUsedCondition::class)
-annotation class S3VerifiedSuccess(val year: Int)
+annotation class S3VerifiedSuccess(
+ val year: Int,
+)
diff --git a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/VersionsIT.kt b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/VersionsIT.kt
index 34cc226be..5b0a5cc16 100644
--- a/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/VersionsIT.kt
+++ b/integration-tests/src/test/kotlin/com/adobe/testing/s3mock/its/VersionsIT.kt
@@ -76,31 +76,35 @@ internal class VersionsIT : S3TestBase() {
}
}
- val versionId = s3Client.putObject(
- {
+ val versionId =
+ s3Client
+ .putObject(
+ {
+ it.bucket(bucketName)
+ it.key(UPLOAD_FILE_NAME)
+ it.checksumAlgorithm(ChecksumAlgorithm.SHA1)
+ },
+ RequestBody.fromFile(UPLOAD_FILE),
+ ).versionId()
+
+ s3Client
+ .getObjectAttributes {
it.bucket(bucketName)
it.key(UPLOAD_FILE_NAME)
- it.checksumAlgorithm(ChecksumAlgorithm.SHA1)
- }, RequestBody.fromFile(UPLOAD_FILE)
- ).versionId()
-
- s3Client.getObjectAttributes {
- it.bucket(bucketName)
- it.key(UPLOAD_FILE_NAME)
- it.versionId(versionId)
- it.objectAttributes(
- ObjectAttributes.OBJECT_SIZE,
- ObjectAttributes.STORAGE_CLASS,
- ObjectAttributes.E_TAG,
- ObjectAttributes.CHECKSUM
- )
- }.also {
- assertThat(it.versionId()).isEqualTo(versionId)
- // default storageClass is STANDARD, which is never returned from APIs
- assertThat(it.storageClass()).isEqualTo(StorageClass.STANDARD)
- assertThat(it.objectSize()).isEqualTo(UPLOAD_FILE_LENGTH)
- assertThat(it.checksum().checksumSHA1()).isEqualTo(expectedChecksum)
- }
+ it.versionId(versionId)
+ it.objectAttributes(
+ ObjectAttributes.OBJECT_SIZE,
+ ObjectAttributes.STORAGE_CLASS,
+ ObjectAttributes.E_TAG,
+ ObjectAttributes.CHECKSUM,
+ )
+ }.also {
+ assertThat(it.versionId()).isEqualTo(versionId)
+ // default storageClass is STANDARD, which is never returned from APIs
+ assertThat(it.storageClass()).isEqualTo(StorageClass.STANDARD)
+ assertThat(it.objectSize()).isEqualTo(UPLOAD_FILE_LENGTH)
+ assertThat(it.checksum().checksumSHA1()).isEqualTo(expectedChecksum)
+ }
}
@Test
@@ -115,44 +119,53 @@ internal class VersionsIT : S3TestBase() {
}
}
- val versionId1 = s3Client.putObject(
- {
+ val versionId1 =
+ s3Client
+ .putObject(
+ {
+ it.bucket(bucketName)
+ it.key(UPLOAD_FILE_NAME)
+ it.checksumAlgorithm(ChecksumAlgorithm.SHA1)
+ },
+ RequestBody.fromFile(UPLOAD_FILE),
+ ).versionId()
+
+ val versionId2 =
+ s3Client
+ .putObject(
+ {
+ it.bucket(bucketName)
+ it.key(UPLOAD_FILE_NAME)
+ it.checksumAlgorithm(ChecksumAlgorithm.SHA1)
+ },
+ RequestBody.fromFile(UPLOAD_FILE),
+ ).versionId()
+
+ s3Client
+ .getObject {
it.bucket(bucketName)
it.key(UPLOAD_FILE_NAME)
- it.checksumAlgorithm(ChecksumAlgorithm.SHA1)
- }, RequestBody.fromFile(UPLOAD_FILE)
- ).versionId()
+ it.versionId(versionId2)
+ }.use {
+ assertThat(it.response().versionId()).isEqualTo(versionId2)
+ }
- val versionId2 = s3Client.putObject(
- {
+ s3Client
+ .getObject {
it.bucket(bucketName)
it.key(UPLOAD_FILE_NAME)
- it.checksumAlgorithm(ChecksumAlgorithm.SHA1)
- }, RequestBody.fromFile(UPLOAD_FILE)
- ).versionId()
-
- s3Client.getObject {
- it.bucket(bucketName)
- it.key(UPLOAD_FILE_NAME)
- it.versionId(versionId2)
- }.use {
- assertThat(it.response().versionId()).isEqualTo(versionId2)
- }
-
- s3Client.getObject {
- it.bucket(bucketName)
- it.key(UPLOAD_FILE_NAME)
- it.versionId(versionId1)
- }.use {
- assertThat(it.response().versionId()).isEqualTo(versionId1)
- }
+ it.versionId(versionId1)
+ }.use {
+ assertThat(it.response().versionId()).isEqualTo(versionId1)
+ }
- s3Client.getObject {
- it.bucket(bucketName)
- it.key(UPLOAD_FILE_NAME)
- }.use {
- assertThat(it.response().versionId()).isEqualTo(versionId2)
- }
+ s3Client
+ .getObject {
+ it.bucket(bucketName)
+ it.key(UPLOAD_FILE_NAME)
+ }.use {
+ assertThat(it.response().versionId()).isEqualTo(versionId2)
+ }
}
@Test
@@ -167,21 +180,27 @@ internal class VersionsIT : S3TestBase() {
}
}
- val versionId1 = s3Client.putObject(
- {
- it.bucket(bucketName)
- it.key(UPLOAD_FILE_NAME)
- it.checksumAlgorithm(ChecksumAlgorithm.SHA1)
- }, RequestBody.fromFile(UPLOAD_FILE)
- ).versionId()
+ val versionId1 =
+ s3Client
+ .putObject(
+ {
+ it.bucket(bucketName)
+ it.key(UPLOAD_FILE_NAME)
+ it.checksumAlgorithm(ChecksumAlgorithm.SHA1)
+ },
+ RequestBody.fromFile(UPLOAD_FILE),
+ ).versionId()
- val versionId2 = s3Client.putObject(
- {
- it.bucket(bucketName)
- it.key(UPLOAD_FILE_NAME)
- it.checksumAlgorithm(ChecksumAlgorithm.SHA1)
- }, RequestBody.fromFile(UPLOAD_FILE)
- ).versionId()
+ val versionId2 =
+ s3Client
+ .putObject(
+ {
+ it.bucket(bucketName)
+ it.key(UPLOAD_FILE_NAME)
+ it.checksumAlgorithm(ChecksumAlgorithm.SHA1)
+ },
+ RequestBody.fromFile(UPLOAD_FILE),
+ ).versionId()
s3Client.deleteObject {
it.bucket(bucketName)
@@ -189,12 +208,13 @@ internal class VersionsIT : S3TestBase() {
it.versionId(versionId2)
}
- s3Client.getObject {
- it.bucket(bucketName)
- it.key(UPLOAD_FILE_NAME)
- }.use {
- assertThat(it.response().versionId()).isEqualTo(versionId1)
- }
+ s3Client
+ .getObject {
+ it.bucket(bucketName)
+ it.key(UPLOAD_FILE_NAME)
+ }.use {
+ assertThat(it.response().versionId()).isEqualTo(versionId1)
+ }
}
@Test
@@ -209,26 +229,31 @@ internal class VersionsIT : S3TestBase() {
}
}
- s3Client.putObject(
- {
- it.bucket(bucketName)
- it.key(UPLOAD_FILE_NAME)
- }, RequestBody.fromFile(UPLOAD_FILE)
- ).versionId()
+ s3Client
+ .putObject(
+ {
+ it.bucket(bucketName)
+ it.key(UPLOAD_FILE_NAME)
+ },
+ RequestBody.fromFile(UPLOAD_FILE),
+ ).versionId()
+
+ s3Client
+ .putObject(
+ {
+ it.bucket(bucketName)
+ it.key(UPLOAD_FILE_NAME)
+ },
+ RequestBody.fromFile(UPLOAD_FILE),
+ ).versionId()
- s3Client.putObject(
- {
+ s3Client
+ .deleteObject {
it.bucket(bucketName)
it.key(UPLOAD_FILE_NAME)
- }, RequestBody.fromFile(UPLOAD_FILE)
- ).versionId()
-
- s3Client.deleteObject {
- it.bucket(bucketName)
- it.key(UPLOAD_FILE_NAME)
- }.also {
- assertThat(it.deleteMarker()).isEqualTo(true)
- }
+ }.also {
+ assertThat(it.deleteMarker()).isEqualTo(true)
+ }
assertThatThrownBy {
s3Client.getObject {
diff --git a/mvnw b/mvnw
index 19529ddf8..e9cf8d330 100755
--- a/mvnw
+++ b/mvnw
@@ -19,7 +19,7 @@
# ----------------------------------------------------------------------------
# ----------------------------------------------------------------------------
-# Apache Maven Wrapper startup batch script, version 3.3.2
+# Apache Maven Wrapper startup batch script, version 3.3.3
#
# Optional ENV vars
# -----------------
@@ -105,14 +105,17 @@ trim() {
printf "%s" "${1}" | tr -d '[:space:]'
}
+scriptDir="$(dirname "$0")"
+scriptName="$(basename "$0")"
+
# parse distributionUrl and optional distributionSha256Sum, requires .mvn/wrapper/maven-wrapper.properties
while IFS="=" read -r key value; do
case "${key-}" in
distributionUrl) distributionUrl=$(trim "${value-}") ;;
distributionSha256Sum) distributionSha256Sum=$(trim "${value-}") ;;
esac
-done <"${0%/*}/.mvn/wrapper/maven-wrapper.properties"
-[ -n "${distributionUrl-}" ] || die "cannot read distributionUrl property in ${0%/*}/.mvn/wrapper/maven-wrapper.properties"
+done <"$scriptDir/.mvn/wrapper/maven-wrapper.properties"
+[ -n "${distributionUrl-}" ] || die "cannot read distributionUrl property in $scriptDir/.mvn/wrapper/maven-wrapper.properties"
case "${distributionUrl##*/}" in
maven-mvnd-*bin.*)
@@ -130,7 +133,7 @@ maven-mvnd-*bin.*)
distributionUrl="${distributionUrl%-bin.*}-$distributionPlatform.zip"
;;
maven-mvnd-*) MVN_CMD=mvnd.sh _MVNW_REPO_PATTERN=/maven/mvnd/ ;;
-*) MVN_CMD="mvn${0##*/mvnw}" _MVNW_REPO_PATTERN=/org/apache/maven/ ;;
+*) MVN_CMD="mvn${scriptName#mvnw}" _MVNW_REPO_PATTERN=/org/apache/maven/ ;;
esac
# apply MVNW_REPOURL and calculate MAVEN_HOME
@@ -227,7 +230,7 @@ if [ -n "${distributionSha256Sum-}" ]; then
echo "Please disable validation by removing 'distributionSha256Sum' from your maven-wrapper.properties." >&2
exit 1
elif command -v sha256sum >/dev/null; then
- if echo "$distributionSha256Sum $TMP_DOWNLOAD_DIR/$distributionUrlName" | sha256sum -c >/dev/null 2>&1; then
+ if echo "$distributionSha256Sum $TMP_DOWNLOAD_DIR/$distributionUrlName" | sha256sum -c - >/dev/null 2>&1; then
distributionSha256Result=true
fi
elif command -v shasum >/dev/null; then
@@ -252,8 +255,41 @@ if command -v unzip >/dev/null; then
else
tar xzf${__MVNW_QUIET_TAR:+"$__MVNW_QUIET_TAR"} "$TMP_DOWNLOAD_DIR/$distributionUrlName" -C "$TMP_DOWNLOAD_DIR" || die "failed to untar"
fi
-printf %s\\n "$distributionUrl" >"$TMP_DOWNLOAD_DIR/$distributionUrlNameMain/mvnw.url"
-mv -- "$TMP_DOWNLOAD_DIR/$distributionUrlNameMain" "$MAVEN_HOME" || [ -d "$MAVEN_HOME" ] || die "fail to move MAVEN_HOME"
+
+# Find the actual extracted directory name (handles snapshots where filename != directory name)
+actualDistributionDir=""
+
+# First try the expected directory name (for regular distributions)
+if [ -d "$TMP_DOWNLOAD_DIR/$distributionUrlNameMain" ]; then
+ if [ -f "$TMP_DOWNLOAD_DIR/$distributionUrlNameMain/bin/$MVN_CMD" ]; then
+ actualDistributionDir="$distributionUrlNameMain"
+ fi
+fi
+
+# If not found, search for any directory with the Maven executable (for snapshots)
+if [ -z "$actualDistributionDir" ]; then
+ # enable globbing to iterate over items
+ set +f
+ for dir in "$TMP_DOWNLOAD_DIR"/*; do
+ if [ -d "$dir" ]; then
+ if [ -f "$dir/bin/$MVN_CMD" ]; then
+ actualDistributionDir="$(basename "$dir")"
+ break
+ fi
+ fi
+ done
+ set -f
+fi
+
+if [ -z "$actualDistributionDir" ]; then
+ verbose "Contents of $TMP_DOWNLOAD_DIR:"
+ verbose "$(ls -la "$TMP_DOWNLOAD_DIR")"
+ die "Could not find Maven distribution directory in extracted archive"
+fi
+
+verbose "Found extracted Maven distribution directory: $actualDistributionDir"
+printf %s\\n "$distributionUrl" >"$TMP_DOWNLOAD_DIR/$actualDistributionDir/mvnw.url"
+mv -- "$TMP_DOWNLOAD_DIR/$actualDistributionDir" "$MAVEN_HOME" || [ -d "$MAVEN_HOME" ] || die "fail to move MAVEN_HOME"
clean || :
exec_maven "$@"
diff --git a/mvnw.cmd b/mvnw.cmd
index 249bdf382..2e2dbe039 100644
--- a/mvnw.cmd
+++ b/mvnw.cmd
@@ -19,7 +19,7 @@
@REM ----------------------------------------------------------------------------
@REM ----------------------------------------------------------------------------
-@REM Apache Maven Wrapper startup batch script, version 3.3.2
+@REM Apache Maven Wrapper startup batch script, version 3.3.3
@REM
@REM Optional ENV vars
@REM MVNW_REPOURL - repo url base for downloading maven distribution
@@ -40,7 +40,7 @@
@SET __MVNW_ARG0_NAME__=
@SET MVNW_USERNAME=
@SET MVNW_PASSWORD=
-@IF NOT "%__MVNW_CMD__%"=="" (%__MVNW_CMD__% %*)
+@IF NOT "%__MVNW_CMD__%"=="" ("%__MVNW_CMD__%" %*)
@echo Cannot start maven from wrapper >&2 && exit /b 1
@GOTO :EOF
: end batch / begin powershell #>
@@ -73,16 +73,30 @@ switch -wildcard -casesensitive ( $($distributionUrl -replace '^.*/','') ) {
# apply MVNW_REPOURL and calculate MAVEN_HOME
# maven home pattern: ~/.m2/wrapper/dists/{apache-maven-,maven-mvnd--}/
if ($env:MVNW_REPOURL) {
- $MVNW_REPO_PATTERN = if ($USE_MVND) { "/org/apache/maven/" } else { "/maven/mvnd/" }
- $distributionUrl = "$env:MVNW_REPOURL$MVNW_REPO_PATTERN$($distributionUrl -replace '^.*'+$MVNW_REPO_PATTERN,'')"
+ $MVNW_REPO_PATTERN = if ($USE_MVND -eq $False) { "/org/apache/maven/" } else { "/maven/mvnd/" }
+ $distributionUrl = "$env:MVNW_REPOURL$MVNW_REPO_PATTERN$($distributionUrl -replace "^.*$MVNW_REPO_PATTERN",'')"
}
$distributionUrlName = $distributionUrl -replace '^.*/',''
$distributionUrlNameMain = $distributionUrlName -replace '\.[^.]*$','' -replace '-bin$',''
-$MAVEN_HOME_PARENT = "$HOME/.m2/wrapper/dists/$distributionUrlNameMain"
+
+$MAVEN_M2_PATH = "$HOME/.m2"
if ($env:MAVEN_USER_HOME) {
- $MAVEN_HOME_PARENT = "$env:MAVEN_USER_HOME/wrapper/dists/$distributionUrlNameMain"
+ $MAVEN_M2_PATH = "$env:MAVEN_USER_HOME"
+}
+
+if (-not (Test-Path -Path $MAVEN_M2_PATH)) {
+ New-Item -Path $MAVEN_M2_PATH -ItemType Directory | Out-Null
+}
+
+$MAVEN_WRAPPER_DISTS = $null
+if ((Get-Item $MAVEN_M2_PATH).Target[0] -eq $null) {
+ $MAVEN_WRAPPER_DISTS = "$MAVEN_M2_PATH/wrapper/dists"
+} else {
+ $MAVEN_WRAPPER_DISTS = (Get-Item $MAVEN_M2_PATH).Target[0] + "/wrapper/dists"
}
-$MAVEN_HOME_NAME = ([System.Security.Cryptography.MD5]::Create().ComputeHash([byte[]][char[]]$distributionUrl) | ForEach-Object {$_.ToString("x2")}) -join ''
+
+$MAVEN_HOME_PARENT = "$MAVEN_WRAPPER_DISTS/$distributionUrlNameMain"
+$MAVEN_HOME_NAME = ([System.Security.Cryptography.SHA256]::Create().ComputeHash([byte[]][char[]]$distributionUrl) | ForEach-Object {$_.ToString("x2")}) -join ''
$MAVEN_HOME = "$MAVEN_HOME_PARENT/$MAVEN_HOME_NAME"
if (Test-Path -Path "$MAVEN_HOME" -PathType Container) {
@@ -134,7 +148,33 @@ if ($distributionSha256Sum) {
# unzip and move
Expand-Archive "$TMP_DOWNLOAD_DIR/$distributionUrlName" -DestinationPath "$TMP_DOWNLOAD_DIR" | Out-Null
-Rename-Item -Path "$TMP_DOWNLOAD_DIR/$distributionUrlNameMain" -NewName $MAVEN_HOME_NAME | Out-Null
+
+# Find the actual extracted directory name (handles snapshots where filename != directory name)
+$actualDistributionDir = ""
+
+# First try the expected directory name (for regular distributions)
+$expectedPath = Join-Path "$TMP_DOWNLOAD_DIR" "$distributionUrlNameMain"
+$expectedMvnPath = Join-Path "$expectedPath" "bin/$MVN_CMD"
+if ((Test-Path -Path $expectedPath -PathType Container) -and (Test-Path -Path $expectedMvnPath -PathType Leaf)) {
+ $actualDistributionDir = $distributionUrlNameMain
+}
+
+# If not found, search for any directory with the Maven executable (for snapshots)
+if (!$actualDistributionDir) {
+ Get-ChildItem -Path "$TMP_DOWNLOAD_DIR" -Directory | ForEach-Object {
+ $testPath = Join-Path $_.FullName "bin/$MVN_CMD"
+ if (Test-Path -Path $testPath -PathType Leaf) {
+ $actualDistributionDir = $_.Name
+ }
+ }
+}
+
+if (!$actualDistributionDir) {
+ Write-Error "Could not find Maven distribution directory in extracted archive"
+}
+
+Write-Verbose "Found extracted Maven distribution directory: $actualDistributionDir"
+Rename-Item -Path "$TMP_DOWNLOAD_DIR/$actualDistributionDir" -NewName $MAVEN_HOME_NAME | Out-Null
try {
Move-Item -Path "$TMP_DOWNLOAD_DIR/$MAVEN_HOME_NAME" -Destination $MAVEN_HOME_PARENT | Out-Null
} catch {
diff --git a/pom.xml b/pom.xml
index 889fb5980..3e446f029 100644
--- a/pom.xml
+++ b/pom.xml
@@ -51,7 +51,6 @@
- build-config
docker
integration-tests
server
@@ -126,6 +125,7 @@
6.0.0
1.7.0
UTF-8
+ UTF-8
false
4.0.0
@@ -134,6 +134,7 @@
7.11.0
2.10.3
0.0.11
+ 3.5.0
@@ -458,14 +459,9 @@
true
info
true
- build-config/checkstyle.xml
+ etc/checkstyle.xml
-
- com.adobe.testing
- s3mock-build-config
- ${project.version}
-
com.puppycrawl.tools
checkstyle
@@ -482,6 +478,21 @@
+
+ com.github.gantsign.maven
+ ktlint-maven-plugin
+ ${ktlint-maven-plugin.version}
+
+
+ check-style
+
+
+
+ check
+
+
+
+
maven-clean-plugin
${maven-clean-plugin.version}
@@ -550,6 +561,11 @@
${maven-failsafe-plugin.version}
true
+ random
+ methods
+ 4
+ false
+ true
@@ -587,6 +603,9 @@
false
${java.version}
false
+ ${project.build.sourceEncoding}
+ ${project.reporting.outputEncoding}
+ ${project.reporting.outputEncoding}
@@ -630,6 +649,11 @@
${maven-surefire-plugin.version}
true
+ random
+ methods
+ 4
+ false
+ true
diff --git a/server/pom.xml b/server/pom.xml
index f96177bcc..3380e5f7f 100644
--- a/server/pom.xml
+++ b/server/pom.xml
@@ -170,6 +170,10 @@
maven-checkstyle-plugin
+
+ com.github.gantsign.maven
+ ktlint-maven-plugin
+
org.springframework.boot
spring-boot-maven-plugin
diff --git a/server/src/main/java/com/adobe/testing/s3mock/BucketController.java b/server/src/main/java/com/adobe/testing/s3mock/BucketController.java
index e19b9b1f2..d4259e51a 100644
--- a/server/src/main/java/com/adobe/testing/s3mock/BucketController.java
+++ b/server/src/main/java/com/adobe/testing/s3mock/BucketController.java
@@ -81,9 +81,9 @@ public BucketController(BucketService bucketService) {
this.bucketService = bucketService;
}
- //================================================================================================
+ // ===============================================================================================
// /
- //================================================================================================
+ // ===============================================================================================
/**
* API Reference.
@@ -108,9 +108,9 @@ public ResponseEntity listBuckets(
return ResponseEntity.ok(listAllMyBucketsResult);
}
- //================================================================================================
+ // ===============================================================================================
// /{bucketName:.+}
- //================================================================================================
+ // ===============================================================================================
/**
* Create a bucket if the name matches a simplified version of the bucket naming rules.
@@ -119,9 +119,9 @@ public ResponseEntity listBuckets(
*/
@PutMapping(
value = {
- //AWS SDK V2 pattern
+ // AWS SDK V2 pattern
"/{bucketName:.+}",
- //AWS SDK V1 pattern
+ // AWS SDK V1 pattern
"/{bucketName:.+}/"
},
params = {
@@ -157,9 +157,9 @@ public ResponseEntity createBucket(
*/
@RequestMapping(
value = {
- //AWS SDK V2 pattern
+ // AWS SDK V2 pattern
"/{bucketName:.+}",
- //AWS SDK V1 pattern
+ // AWS SDK V1 pattern
"/{bucketName:.+}/"
},
method = RequestMethod.HEAD
@@ -179,9 +179,9 @@ public ResponseEntity headBucket(@PathVariable final String bucketName) {
*/
@DeleteMapping(
value = {
- //AWS SDK V2 pattern
+ // AWS SDK V2 pattern
"/{bucketName:.+}",
- //AWS SDK V1 pattern
+ // AWS SDK V1 pattern
"/{bucketName:.+}/"
},
params = {
@@ -201,9 +201,9 @@ public ResponseEntity deleteBucket(@PathVariable String bucketName) {
*/
@GetMapping(
value = {
- //AWS SDK V2 pattern
+ // AWS SDK V2 pattern
"/{bucketName:.+}",
- //AWS SDK V1 pattern
+ // AWS SDK V1 pattern
"/{bucketName:.+}/"
},
params = {
@@ -224,9 +224,9 @@ public ResponseEntity getVersioningConfiguration(@PathV
*/
@PutMapping(
value = {
- //AWS SDK V2 pattern
+ // AWS SDK V2 pattern
"/{bucketName:.+}",
- //AWS SDK V1 pattern
+ // AWS SDK V1 pattern
"/{bucketName:.+}/"
},
params = {
@@ -248,9 +248,9 @@ public ResponseEntity putVersioningConfiguration(
*/
@GetMapping(
value = {
- //AWS SDK V2 pattern
+ // AWS SDK V2 pattern
"/{bucketName:.+}",
- //AWS SDK V1 pattern
+ // AWS SDK V1 pattern
"/{bucketName:.+}/"
},
params = {
@@ -271,9 +271,9 @@ public ResponseEntity getObjectLockConfiguration(@PathV
*/
@PutMapping(
value = {
- //AWS SDK V2 pattern
+ // AWS SDK V2 pattern
"/{bucketName:.+}",
- //AWS SDK V1 pattern
+ // AWS SDK V1 pattern
"/{bucketName:.+}/"
},
params = {
@@ -294,9 +294,9 @@ public ResponseEntity putObjectLockConfiguration(
*/
@GetMapping(
value = {
- //AWS SDK V2 pattern
+ // AWS SDK V2 pattern
"/{bucketName:.+}",
- //AWS SDK V1 pattern
+ // AWS SDK V1 pattern
"/{bucketName:.+}/"
},
params = {
@@ -317,9 +317,9 @@ public ResponseEntity getBucketLifecycleConfigurat
*/
@PutMapping(
value = {
- //AWS SDK V2 pattern
+ // AWS SDK V2 pattern
"/{bucketName:.+}",
- //AWS SDK V1 pattern
+ // AWS SDK V1 pattern
"/{bucketName:.+}/"
},
params = {
@@ -340,9 +340,9 @@ public ResponseEntity putBucketLifecycleConfiguration(
*/
@DeleteMapping(
value = {
- //AWS SDK V2 pattern
+ // AWS SDK V2 pattern
"/{bucketName:.+}",
- //AWS SDK V1 pattern
+ // AWS SDK V1 pattern
"/{bucketName:.+}/"
},
params = {
@@ -380,9 +380,9 @@ public ResponseEntity getBucketLocation(@PathVariable String
*/
@GetMapping(
value = {
- //AWS SDK V2 pattern
+ // AWS SDK V2 pattern
"/{bucketName:.+}",
- //AWS SDK V1 pattern
+ // AWS SDK V1 pattern
"/{bucketName:.+}/"
},
params = {
@@ -419,9 +419,9 @@ public ResponseEntity listObjects(
*/
@GetMapping(
value = {
- //AWS SDK V2 pattern
+ // AWS SDK V2 pattern
"/{bucketName:.+}",
- //AWS SDK V1 pattern
+ // AWS SDK V1 pattern
"/{bucketName:.+}/"
},
params = {
@@ -455,9 +455,9 @@ public ResponseEntity listObjectsV2(
*/
@GetMapping(
value = {
- //AWS SDK V2 pattern
+ // AWS SDK V2 pattern
"/{bucketName:.+}",
- //AWS SDK V1 pattern
+ // AWS SDK V1 pattern
"/{bucketName:.+}/"
},
params = {
@@ -478,9 +478,15 @@ public ResponseEntity listObjectVersions(
bucketService.verifyBucketExists(bucketName);
bucketService.verifyMaxKeys(maxKeys);
bucketService.verifyEncodingType(encodingType);
- var listVersionsResult =
- bucketService.listVersions(bucketName, prefix, delimiter, encodingType, maxKeys, keyMarker,
- versionIdMarker);
+ var listVersionsResult = bucketService.listVersions(
+ bucketName,
+ prefix,
+ delimiter,
+ encodingType,
+ maxKeys,
+ keyMarker,
+ versionIdMarker
+ );
return ResponseEntity.ok(listVersionsResult);
}
diff --git a/server/src/main/java/com/adobe/testing/s3mock/MultipartController.java b/server/src/main/java/com/adobe/testing/s3mock/MultipartController.java
index 851df4f01..f9f610b8c 100644
--- a/server/src/main/java/com/adobe/testing/s3mock/MultipartController.java
+++ b/server/src/main/java/com/adobe/testing/s3mock/MultipartController.java
@@ -110,18 +110,18 @@ public MultipartController(BucketService bucketService, ObjectService objectServ
this.multipartService = multipartService;
}
- //================================================================================================
+ // ===============================================================================================
// /{bucketName:.+}
- //================================================================================================
+ // ===============================================================================================
/**
* API Reference.
*/
@GetMapping(
value = {
- //AWS SDK V2 pattern
+ // AWS SDK V2 pattern
"/{bucketName:.+}",
- //AWS SDK V1 pattern
+ // AWS SDK V1 pattern
"/{bucketName:.+}/"
},
params = {
@@ -152,9 +152,9 @@ public ResponseEntity listMultipartUploads(
return ResponseEntity.ok(result);
}
- //================================================================================================
+ // ===============================================================================================
// /{bucketName:.+}/{*key}
- //================================================================================================
+ // ===============================================================================================
/**
* API Reference.
@@ -236,7 +236,7 @@ public ResponseEntity uploadPart(
final var tempFileAndChecksum = multipartService.toTempFile(inputStream, httpHeaders);
bucketService.verifyBucketExists(bucketName);
multipartService.verifyMultipartUploadExists(bucketName, uploadId);
- multipartService.verifyPartNumberLimits(partNumber);
+ var partNum = multipartService.verifyPartNumberLimits(partNumber);
String checksum = null;
ChecksumAlgorithm checksumAlgorithm = null;
@@ -256,11 +256,11 @@ public ResponseEntity uploadPart(
multipartService.verifyChecksum(tempFile, checksum, checksumAlgorithm);
}
- //persist checksum per part
+ // persist checksum per part
var etag = multipartService.putPart(bucketName,
key.key(),
uploadId,
- partNumber,
+ partNum,
tempFile,
encryptionHeadersFrom(httpHeaders));
@@ -302,7 +302,7 @@ public ResponseEntity uploadPartCopy(
@RequestParam String partNumber,
@RequestHeader HttpHeaders httpHeaders) {
var bucket = bucketService.verifyBucketExists(bucketName);
- multipartService.verifyPartNumberLimits(partNumber);
+ var partNum = multipartService.verifyPartNumberLimits(partNumber);
var s3ObjectMetadata = objectService.verifyObjectExists(copySource.bucket(), copySource.key(),
copySource.versionId());
objectService.verifyObjectMatchingForCopy(match, noneMatch,
@@ -312,7 +312,7 @@ public ResponseEntity uploadPartCopy(
var result = multipartService.copyPart(copySource.bucket(),
copySource.key(),
copyRange,
- partNumber,
+ partNum,
bucketName,
key.key(),
uploadId,
@@ -327,11 +327,7 @@ public ResponseEntity uploadPartCopy(
h.set(X_AMZ_VERSION_ID, s3ObjectMetadata.versionId());
}
})
- .headers(h -> {
- if (encryptionHeaders != null) {
- h.setAll(encryptionHeaders);
- }
- })
+ .headers(h -> h.setAll(encryptionHeaders))
.body(result);
}
@@ -358,7 +354,7 @@ public ResponseEntity createMultipartUpload(
bucketService.verifyBucketExists(bucketName);
try {
- //workaround for AWS CRT-based S3 client: Consume (and discard) body in Initiate Multipart Upload request
+ // workaround for AWS CRT-based S3 client: Consume (and discard) body in Initiate Multipart Upload request
IOUtils.consume(inputStream);
} catch (IOException e) {
throw BAD_REQUEST_CONTENT;
@@ -382,11 +378,7 @@ public ResponseEntity createMultipartUpload(
return ResponseEntity
.ok()
- .headers(h -> {
- if (encryptionHeaders != null) {
- h.setAll(encryptionHeaders);
- }
- })
+ .headers(h -> h.setAll(encryptionHeaders))
.headers(h -> {
if (checksumAlgorithm != null) {
h.set(X_AMZ_CHECKSUM_ALGORITHM, checksumAlgorithm.toString());
@@ -462,12 +454,12 @@ public ResponseEntity completeMultipartUpload(
return ResponseEntity
.ok()
.headers(h -> {
- if (result.multipartUploadInfo().encryptionHeaders() != null) {
+ if (result != null) {
h.setAll(result.multipartUploadInfo().encryptionHeaders());
}
})
.headers(h -> {
- if (bucket.isVersioningEnabled() && result.versionId() != null) {
+ if (bucket.isVersioningEnabled() && result != null && result.versionId() != null) {
h.set(X_AMZ_VERSION_ID, result.versionId());
}
})
diff --git a/server/src/main/java/com/adobe/testing/s3mock/ObjectController.java b/server/src/main/java/com/adobe/testing/s3mock/ObjectController.java
index b21e89ab5..fc40ec542 100644
--- a/server/src/main/java/com/adobe/testing/s3mock/ObjectController.java
+++ b/server/src/main/java/com/adobe/testing/s3mock/ObjectController.java
@@ -68,6 +68,8 @@
import static com.adobe.testing.s3mock.util.HeaderUtil.storeHeadersFrom;
import static com.adobe.testing.s3mock.util.HeaderUtil.userMetadataFrom;
import static com.adobe.testing.s3mock.util.HeaderUtil.userMetadataHeadersFrom;
+import static org.springframework.http.HttpHeaders.ACCEPT_RANGES;
+import static org.springframework.http.HttpHeaders.CONTENT_RANGE;
import static org.springframework.http.HttpHeaders.CONTENT_TYPE;
import static org.springframework.http.HttpHeaders.IF_MATCH;
import static org.springframework.http.HttpHeaders.IF_MODIFIED_SINCE;
@@ -103,8 +105,6 @@
import com.adobe.testing.s3mock.store.S3ObjectMetadata;
import com.adobe.testing.s3mock.util.AwsHttpHeaders.MetadataDirective;
import com.adobe.testing.s3mock.util.CannedAclUtil;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.dataformat.xml.XmlMapper;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
@@ -140,7 +140,6 @@
@RequestMapping("${com.adobe.testing.s3mock.contextPath:}")
public class ObjectController {
private static final String RANGES_BYTES = "bytes";
- private static final ObjectMapper XML_MAPPER = new XmlMapper();
private final BucketService bucketService;
private final ObjectService objectService;
@@ -150,18 +149,18 @@ public ObjectController(BucketService bucketService, ObjectService objectService
this.objectService = objectService;
}
- //================================================================================================
+ // ===============================================================================================
// /{bucketName:.+}
- //================================================================================================
+ // ===============================================================================================
/**
* API Reference.
*/
@PostMapping(
value = {
- //AWS SDK V2 pattern
+ // AWS SDK V2 pattern
"/{bucketName:.+}",
- //AWS SDK V1 pattern
+ // AWS SDK V1 pattern
"/{bucketName:.+}/"
},
params = {
@@ -183,9 +182,9 @@ public ResponseEntity deleteObjects(
*/
@PostMapping(
value = {
- //AWS SDK V2 pattern
+ // AWS SDK V2 pattern
"/{bucketName:.+}",
- //AWS SDK V1 pattern
+ // AWS SDK V1 pattern
"/{bucketName:.+}/"
},
params = {
@@ -197,22 +196,12 @@ public ResponseEntity deleteObjects(
public ResponseEntity postObject(
@PathVariable String bucketName,
@RequestParam(value = KEY) ObjectKey key,
- @RequestParam(value = TAGGING, required = false) @Nullable String tagging,
+ @RequestParam(value = TAGGING, required = false) @Nullable List tags,
@RequestParam(value = CONTENT_TYPE, required = false) String contentType,
@RequestParam(value = CONTENT_MD5, required = false) String contentMd5,
- @RequestParam(value = X_AMZ_STORAGE_CLASS, required = false) @Nullable String rawStorageClass,
+ @RequestParam(value = X_AMZ_STORAGE_CLASS, required = false,
+ defaultValue = "STANDARD") StorageClass storageClass,
@RequestPart(FILE) MultipartFile file) throws IOException {
- List tags = null;
- if (tagging != null) {
- Tagging tempTagging = XML_MAPPER.readValue(tagging, Tagging.class);
- if (tempTagging.tagSet() != null) {
- tags = tempTagging.tagSet().tags();
- }
- }
- StorageClass storageClass = null;
- if (rawStorageClass != null) {
- storageClass = StorageClass.valueOf(rawStorageClass);
- }
String checksum = null;
ChecksumAlgorithm checksumAlgorithm = null;
@@ -236,14 +225,19 @@ public ResponseEntity postObject(
checksumAlgorithm,
checksum,
owner,
- storageClass);
+ storageClass
+ );
FileUtils.deleteQuietly(tempFile.toFile());
return ResponseEntity
.ok()
.headers(h -> h.setAll(checksumHeaderFrom(s3ObjectMetadata)))
- .headers(h -> h.setAll(s3ObjectMetadata.encryptionHeaders()))
+ .headers(h -> {
+ if (s3ObjectMetadata.encryptionHeaders() != null) {
+ h.setAll(s3ObjectMetadata.encryptionHeaders());
+ }
+ })
.lastModified(s3ObjectMetadata.lastModified())
.eTag(s3ObjectMetadata.etag())
.headers(h -> {
@@ -254,9 +248,9 @@ public ResponseEntity postObject(
.build();
}
- //================================================================================================
+ // ===============================================================================================
// /{bucketName:.+}/{*key}
- //================================================================================================
+ // ===============================================================================================
/**
* API Reference.
@@ -284,7 +278,7 @@ public ResponseEntity headObject(
return ResponseEntity.ok()
.eTag(s3ObjectMetadata.etag())
- .header(HttpHeaders.ACCEPT_RANGES, RANGES_BYTES)
+ .header(ACCEPT_RANGES, RANGES_BYTES)
.lastModified(s3ObjectMetadata.lastModified())
.contentLength(Long.parseLong(s3ObjectMetadata.size()))
.contentType(mediaTypeFrom(s3ObjectMetadata.contentType()))
@@ -293,9 +287,17 @@ public ResponseEntity headObject(
h.set(X_AMZ_VERSION_ID, s3ObjectMetadata.versionId());
}
})
- .headers(h -> h.setAll(s3ObjectMetadata.storeHeaders()))
+ .headers(h -> {
+ if (s3ObjectMetadata.storeHeaders() != null) {
+ h.setAll(s3ObjectMetadata.storeHeaders());
+ }
+ })
.headers(h -> h.setAll(userMetadataHeadersFrom(s3ObjectMetadata)))
- .headers(h -> h.setAll(s3ObjectMetadata.encryptionHeaders()))
+ .headers(h -> {
+ if (s3ObjectMetadata.encryptionHeaders() != null) {
+ h.setAll(s3ObjectMetadata.encryptionHeaders());
+ }
+ })
.headers(h -> h.setAll(checksumHeaderFrom(s3ObjectMetadata)))
.headers(h -> h.setAll(storageClassHeadersFrom(s3ObjectMetadata)))
.headers(h -> h.setAll(overrideHeadersFrom(queryParams)))
@@ -325,7 +327,7 @@ public ResponseEntity deleteObject(
try {
s3ObjectMetadata = objectService.verifyObjectExists(bucketName, key.key(), versionId);
} catch (S3Exception e) {
- //ignore NO_SUCH_KEY
+ // ignore NO_SUCH_KEY
}
objectService.verifyObjectMatching(match, matchLastModifiedTime, matchSize, s3ObjectMetadata);
@@ -345,7 +347,7 @@ public ResponseEntity deleteObject(
try {
objectService.verifyObjectExists(bucketName, key.key(), versionId);
} catch (S3Exception e) {
- //ignore all other exceptions here
+ // ignore all other exceptions here
if (e == NO_SUCH_KEY_DELETE_MARKER) {
h.set(X_AMZ_DELETE_MARKER, "true");
}
@@ -396,7 +398,7 @@ public ResponseEntity getObject(
return ResponseEntity
.ok()
.eTag(s3ObjectMetadata.etag())
- .header(HttpHeaders.ACCEPT_RANGES, RANGES_BYTES)
+ .header(ACCEPT_RANGES, RANGES_BYTES)
.lastModified(s3ObjectMetadata.lastModified())
.contentLength(Long.parseLong(s3ObjectMetadata.size()))
.contentType(mediaTypeFrom(s3ObjectMetadata.contentType()))
@@ -405,9 +407,17 @@ public ResponseEntity getObject(
h.set(X_AMZ_VERSION_ID, s3ObjectMetadata.versionId());
}
})
- .headers(h -> h.setAll(s3ObjectMetadata.storeHeaders()))
+ .headers(h -> {
+ if (s3ObjectMetadata.storeHeaders() != null) {
+ h.setAll(s3ObjectMetadata.storeHeaders());
+ }
+ })
.headers(h -> h.setAll(userMetadataHeadersFrom(s3ObjectMetadata)))
- .headers(h -> h.setAll(s3ObjectMetadata.encryptionHeaders()))
+ .headers(h -> {
+ if (s3ObjectMetadata.encryptionHeaders() != null) {
+ h.setAll(s3ObjectMetadata.encryptionHeaders());
+ }
+ })
.headers(h -> {
if (mode == ChecksumMode.ENABLED) {
h.setAll(checksumHeaderFrom(s3ObjectMetadata));
@@ -739,16 +749,16 @@ public ResponseEntity getObjectAttributes(
@RequestParam(value = VERSION_ID, required = false) @Nullable String versionId) {
var bucket = bucketService.verifyBucketExists(bucketName);
- //this is for either an object request, or a parts request.
+ // this is for either an object request, or a parts request.
var s3ObjectMetadata = objectService.verifyObjectExists(bucketName, key.key(), versionId);
objectService.verifyObjectMatching(match, noneMatch,
ifModifiedSince, ifUnmodifiedSince, s3ObjectMetadata);
- //S3Mock stores the etag with the additional quotation marks needed in the headers. This
+ // S3Mock stores the etag with the additional quotation marks needed in the headers. This
// response does not use eTag as a header, so it must not contain the quotation marks.
var etag = s3ObjectMetadata.etag().replace("\"", "");
var objectSize = Long.parseLong(s3ObjectMetadata.size());
- //in object attributes, S3 returns STANDARD, in all other APIs it returns null...
+ // in object attributes, S3 returns STANDARD, in all other APIs it returns null...
var storageClass = s3ObjectMetadata.storageClass() == null
? STANDARD
: s3ObjectMetadata.storageClass();
@@ -757,7 +767,7 @@ public ResponseEntity getObjectAttributes(
objectAttributes.contains(ObjectAttributes.ETAG.toString())
? etag
: null,
- null, //parts not supported right now
+ null, // parts not supported right now
objectAttributes.contains(ObjectAttributes.OBJECT_SIZE.toString())
? objectSize
: null,
@@ -856,7 +866,11 @@ public ResponseEntity putObject(
}
})
.headers(h -> h.setAll(checksumHeaderFrom(s3ObjectMetadata)))
- .headers(h -> h.setAll(s3ObjectMetadata.encryptionHeaders()))
+ .headers(h -> {
+ if (s3ObjectMetadata.encryptionHeaders() != null) {
+ h.setAll(s3ObjectMetadata.encryptionHeaders());
+ }
+ })
.header(X_AMZ_OBJECT_SIZE, s3ObjectMetadata.size())
.lastModified(s3ObjectMetadata.lastModified())
.eTag(s3ObjectMetadata.etag())
@@ -918,17 +932,25 @@ public ResponseEntity copyObject(
userMetadata,
storageClass);
- //return expiration
+ // return expiration
if (copyS3ObjectMetadata == null) {
return ResponseEntity
.notFound()
- .headers(headers -> headers.setAll(s3ObjectMetadata.encryptionHeaders()))
+ .headers(headers -> {
+ if (s3ObjectMetadata.encryptionHeaders() != null) {
+ headers.setAll(s3ObjectMetadata.encryptionHeaders());
+ }
+ })
.build();
}
return ResponseEntity
.ok()
- .headers(headers -> headers.setAll(s3ObjectMetadata.encryptionHeaders()))
+ .headers(headers -> {
+ if (s3ObjectMetadata.encryptionHeaders() != null) {
+ headers.setAll(s3ObjectMetadata.encryptionHeaders());
+ }
+ })
.headers(h -> {
if (sourceBucket.isVersioningEnabled() && copySource.versionId() != null) {
h.set(X_AMZ_COPY_SOURCE_VERSION_ID, copySource.versionId());
@@ -941,48 +963,52 @@ public ResponseEntity copyObject(
.body(new CopyObjectResult(copyS3ObjectMetadata));
}
- /**
- * Supports returning different ranges of an object.
- * E.g., if content has 100 bytes, the range request could be: bytes=10-100, 10--1 and 10-200
- * API Reference
- *
- * @param range {@link String}
- * @param s3ObjectMetadata {@link S3ObjectMetadata}
- */
- private ResponseEntity getObjectWithRange(HttpRange range,
- S3ObjectMetadata s3ObjectMetadata) {
+ private ResponseEntity getObjectWithRange(
+ HttpRange range,
+ S3ObjectMetadata s3ObjectMetadata
+ ) {
var fileSize = s3ObjectMetadata.dataPath().toFile().length();
- var bytesToRead = Math.min(fileSize - 1, range.getRangeEnd(fileSize))
- - range.getRangeStart(fileSize) + 1;
+ var startInclusive = range.getRangeStart(fileSize);
+ var endInclusive = Math.min(fileSize - 1, range.getRangeEnd(fileSize));
+ var contentLength = endInclusive - startInclusive + 1;
- if (bytesToRead < 0 || fileSize < range.getRangeStart(fileSize)) {
- return ResponseEntity.status(REQUESTED_RANGE_NOT_SATISFIABLE.value()).build();
+ if (contentLength < 0 || fileSize <= startInclusive) {
+ return ResponseEntity.status(REQUESTED_RANGE_NOT_SATISFIABLE).build();
}
return ResponseEntity
- .status(PARTIAL_CONTENT.value())
- .headers(headers -> headers.setAll(userMetadataHeadersFrom(s3ObjectMetadata)))
- .headers(headers -> headers.setAll(s3ObjectMetadata.storeHeaders()))
- .headers(headers -> headers.setAll(s3ObjectMetadata.encryptionHeaders()))
- .header(HttpHeaders.ACCEPT_RANGES, RANGES_BYTES)
- .header(HttpHeaders.CONTENT_RANGE,
- String.format("bytes %s-%s/%s",
- range.getRangeStart(fileSize), bytesToRead + range.getRangeStart(fileSize) - 1,
- s3ObjectMetadata.size()))
+ .status(PARTIAL_CONTENT)
+ .headers(headers -> applyS3MetadataHeaders(headers, s3ObjectMetadata))
+ .header(ACCEPT_RANGES, RANGES_BYTES)
+ .header(CONTENT_RANGE, String.format("bytes %d-%d/%d", startInclusive, endInclusive, fileSize))
.eTag(s3ObjectMetadata.etag())
.contentType(mediaTypeFrom(s3ObjectMetadata.contentType()))
.lastModified(s3ObjectMetadata.lastModified())
- .contentLength(bytesToRead)
+ .contentLength(contentLength)
.body(outputStream ->
- extractBytesToOutputStream(range, s3ObjectMetadata, outputStream, fileSize, bytesToRead)
+ extractBytesToOutputStream(startInclusive, s3ObjectMetadata, outputStream, contentLength)
);
}
- private static void extractBytesToOutputStream(HttpRange range, S3ObjectMetadata s3ObjectMetadata,
- OutputStream outputStream, long fileSize, long bytesToRead) throws IOException {
+ private void applyS3MetadataHeaders(HttpHeaders headers, S3ObjectMetadata metadata) {
+ headers.setAll(userMetadataHeadersFrom(metadata));
+ if (metadata.storeHeaders() != null) {
+ headers.setAll(metadata.storeHeaders());
+ }
+ if (metadata.encryptionHeaders() != null) {
+ headers.setAll(metadata.encryptionHeaders());
+ }
+ }
+
+ private static void extractBytesToOutputStream(
+ long startOffset,
+ S3ObjectMetadata s3ObjectMetadata,
+ OutputStream outputStream,
+ long bytesToRead
+ ) throws IOException {
try (var fis = Files.newInputStream(s3ObjectMetadata.dataPath())) {
- var skip = fis.skip(range.getRangeStart(fileSize));
- if (skip == range.getRangeStart(fileSize)) {
+ var skipped = fis.skip(startOffset);
+ if (skipped == startOffset) {
try (var bis = BoundedInputStream
.builder()
.setInputStream(fis)
diff --git a/server/src/main/java/com/adobe/testing/s3mock/S3MockConfiguration.java b/server/src/main/java/com/adobe/testing/s3mock/S3MockConfiguration.java
index b82d5825b..0e1baf4f3 100644
--- a/server/src/main/java/com/adobe/testing/s3mock/S3MockConfiguration.java
+++ b/server/src/main/java/com/adobe/testing/s3mock/S3MockConfiguration.java
@@ -141,7 +141,7 @@ MappingJackson2XmlHttpMessageConverter messageConverter() {
OrderedFormContentFilter httpPutFormContentFilter() {
return new OrderedFormContentFilter() {
@Override
- protected boolean shouldNotFilter(@NonNull HttpServletRequest request) {
+ protected boolean shouldNotFilter(HttpServletRequest request) {
return true;
}
};
@@ -183,9 +183,26 @@ ObjectCannedAclHeaderConverter objectCannedAclHeaderConverter() {
return new ObjectCannedAclHeaderConverter();
}
+ /**
+ * Spring only provides an ObjectMapper that can serialize but not deserialize XML.
+ */
+ private XmlMapper xmlMapper() {
+ var xmlMapper = XmlMapper.builder()
+ .findAndAddModules()
+ .enable(ToXmlGenerator.Feature.WRITE_XML_DECLARATION)
+ .enable(ToXmlGenerator.Feature.AUTO_DETECT_XSI_TYPE)
+ .enable(FromXmlParser.Feature.AUTO_DETECT_XSI_TYPE)
+ .build();
+ xmlMapper.setSerializationInclusion(JsonInclude.Include.NON_EMPTY);
+ xmlMapper.getFactory()
+ .getXMLOutputFactory()
+ .setProperty(WstxOutputProperties.P_USE_DOUBLE_QUOTES_IN_XML_DECL, true);
+ return xmlMapper;
+ }
+
@Bean
TaggingHeaderConverter taggingHeaderConverter() {
- return new TaggingHeaderConverter();
+ return new TaggingHeaderConverter(xmlMapper());
}
@Bean
diff --git a/server/src/main/java/com/adobe/testing/s3mock/TaggingHeaderConverter.java b/server/src/main/java/com/adobe/testing/s3mock/TaggingHeaderConverter.java
index 3dc8c7df9..4a6b66ead 100644
--- a/server/src/main/java/com/adobe/testing/s3mock/TaggingHeaderConverter.java
+++ b/server/src/main/java/com/adobe/testing/s3mock/TaggingHeaderConverter.java
@@ -17,7 +17,10 @@
package com.adobe.testing.s3mock;
import com.adobe.testing.s3mock.dto.Tag;
+import com.adobe.testing.s3mock.dto.Tagging;
import com.adobe.testing.s3mock.util.AwsHttpHeaders;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.dataformat.xml.XmlMapper;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.lang3.StringUtils;
@@ -27,13 +30,51 @@
/**
* Converts values of the {@link AwsHttpHeaders#X_AMZ_TAGGING} which is sent by the Amazon client.
* Example: x-amz-tagging: tag1=value1&tag2=value2
+ *
+ *
+ * It also converts XML tags into a list of {@link Tag} objects.
+ * Example: 'k1v1k2v2'
+ *
+ *
* API Reference
* API Reference
+ * API Reference
*/
class TaggingHeaderConverter implements Converter> {
+ private static final String XML_START = "<";
+ private static final String XML_END = ">";
+
+ private final XmlMapper xmlMapper;
+
+ public TaggingHeaderConverter(XmlMapper xmlMapper) {
+ this.xmlMapper = xmlMapper;
+ }
+
@Override
@Nullable
public List convert(String source) {
+ if (source.startsWith(XML_START) && source.endsWith(XML_END)) {
+ return convertTagXml(source);
+ }
+
+ return convertTagPairs(source);
+ }
+
+ @Nullable
+ private List convertTagXml(String source) {
+ try {
+ var tagging = this.xmlMapper.readValue(source, Tagging.class);
+ if (tagging.tagSet() != null) {
+ return tagging.tagSet().tags();
+ }
+ return null;
+ } catch (JsonProcessingException e) {
+ throw new RuntimeException("Failed to parse XML tags from header: " + source, e);
+ }
+ }
+
+ @Nullable
+ private static List convertTagPairs(String source) {
var tags = new ArrayList();
String[] tagPairs = StringUtils.split(source, '&');
for (String tag : tagPairs) {
diff --git a/server/src/main/java/com/adobe/testing/s3mock/dto/AccessControlPolicy.java b/server/src/main/java/com/adobe/testing/s3mock/dto/AccessControlPolicy.java
index 8de44575e..c6b1ee57a 100644
--- a/server/src/main/java/com/adobe/testing/s3mock/dto/AccessControlPolicy.java
+++ b/server/src/main/java/com/adobe/testing/s3mock/dto/AccessControlPolicy.java
@@ -37,11 +37,11 @@ public class AccessControlPolicy {
@JsonProperty("Grant")
@JacksonXmlElementWrapper(localName = "AccessControlList") List accessControlList;
@JsonProperty("Owner") Owner owner;
- //workaround for adding xmlns attribute to root element only.
+ // workaround for adding xmlns attribute to root element only.
@JacksonXmlProperty(isAttribute = true, localName = "xmlns") String xmlns;
public AccessControlPolicy() {
- //needed by Jackson
+ // needed by Jackson
}
public AccessControlPolicy(Owner owner, List accessControlList, String xmlns) {
diff --git a/server/src/main/java/com/adobe/testing/s3mock/dto/BucketLifecycleConfiguration.java b/server/src/main/java/com/adobe/testing/s3mock/dto/BucketLifecycleConfiguration.java
index 9911fb8e7..0d29fb612 100644
--- a/server/src/main/java/com/adobe/testing/s3mock/dto/BucketLifecycleConfiguration.java
+++ b/server/src/main/java/com/adobe/testing/s3mock/dto/BucketLifecycleConfiguration.java
@@ -32,7 +32,7 @@
public record BucketLifecycleConfiguration(
@JacksonXmlElementWrapper(useWrapping = false)
@JsonProperty("Rule") List rules,
- //workaround for adding xmlns attribute to root element only.
+ // workaround for adding xmlns attribute to root element only.
@JacksonXmlProperty(isAttribute = true, localName = "xmlns") String xmlns
) {
public BucketLifecycleConfiguration {
diff --git a/server/src/main/java/com/adobe/testing/s3mock/dto/CompleteMultipartUploadResult.java b/server/src/main/java/com/adobe/testing/s3mock/dto/CompleteMultipartUploadResult.java
index a0b834509..b7064e861 100644
--- a/server/src/main/java/com/adobe/testing/s3mock/dto/CompleteMultipartUploadResult.java
+++ b/server/src/main/java/com/adobe/testing/s3mock/dto/CompleteMultipartUploadResult.java
@@ -40,7 +40,7 @@ public record CompleteMultipartUploadResult(
@JsonProperty("ETag") String etag,
@JsonProperty("Key") String key,
@JsonProperty("Location") String location,
- //workaround for adding xmlns attribute to root element only.
+ // workaround for adding xmlns attribute to root element only.
@JacksonXmlProperty(isAttribute = true, localName = "xmlns") String xmlns,
@JsonIgnore MultipartUploadInfo multipartUploadInfo,
@JsonIgnore String versionId,
diff --git a/server/src/main/java/com/adobe/testing/s3mock/dto/CopyObjectResult.java b/server/src/main/java/com/adobe/testing/s3mock/dto/CopyObjectResult.java
index 61c836c26..8ef4b1410 100644
--- a/server/src/main/java/com/adobe/testing/s3mock/dto/CopyObjectResult.java
+++ b/server/src/main/java/com/adobe/testing/s3mock/dto/CopyObjectResult.java
@@ -32,7 +32,7 @@
public record CopyObjectResult(
@JsonProperty("ETag") String etag,
@JsonProperty("LastModified") String lastModified,
- //workaround for adding xmlns attribute to root element only.
+ // workaround for adding xmlns attribute to root element only.
@JacksonXmlProperty(isAttribute = true, localName = "xmlns") String xmlns
) {
public CopyObjectResult {
diff --git a/server/src/main/java/com/adobe/testing/s3mock/dto/CopyPartResult.java b/server/src/main/java/com/adobe/testing/s3mock/dto/CopyPartResult.java
index 3eb952175..4ba058ff4 100644
--- a/server/src/main/java/com/adobe/testing/s3mock/dto/CopyPartResult.java
+++ b/server/src/main/java/com/adobe/testing/s3mock/dto/CopyPartResult.java
@@ -39,7 +39,7 @@ public record CopyPartResult(
@JsonProperty("ETag") String etag,
@JsonFormat(pattern = "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'", timezone = "UTC")
@JsonProperty("LastModified") Date lastModified,
- //workaround for adding xmlns attribute to root element only.
+ // workaround for adding xmlns attribute to root element only.
@JacksonXmlProperty(isAttribute = true, localName = "xmlns") String xmlns
) {
@@ -50,12 +50,19 @@ public record CopyPartResult(
}
}
- public CopyPartResult(final Date date, final String etag) {
- this(null, null, null, null, null, etag, date, null);
- }
-
- public static CopyPartResult from(final Date date, final String etag) {
- return new CopyPartResult(date, etag);
+ public CopyPartResult(
+ Date date, String etag
+ ) {
+ this(
+ null,
+ null,
+ null,
+ null,
+ null,
+ etag,
+ date,
+ null
+ );
}
public CopyPartResult(
@@ -74,4 +81,8 @@ public CopyPartResult(
null
);
}
+
+ public static CopyPartResult from(final Date date, final String etag) {
+ return new CopyPartResult(date, etag);
+ }
}
diff --git a/server/src/main/java/com/adobe/testing/s3mock/dto/CreateBucketConfiguration.java b/server/src/main/java/com/adobe/testing/s3mock/dto/CreateBucketConfiguration.java
index 7f0b2c1d0..b74dc52f3 100644
--- a/server/src/main/java/com/adobe/testing/s3mock/dto/CreateBucketConfiguration.java
+++ b/server/src/main/java/com/adobe/testing/s3mock/dto/CreateBucketConfiguration.java
@@ -34,7 +34,7 @@ public record CreateBucketConfiguration(
@JsonSerialize(using = LocationConstraintSerializer.class)
@JsonDeserialize(using = LocationConstraintDeserializer.class)
@JsonProperty("LocationConstraint") LocationConstraint locationConstraint,
- //workaround for adding xmlns attribute to root element only.
+ // workaround for adding xmlns attribute to root element only.
@JacksonXmlProperty(isAttribute = true, localName = "xmlns") String xmlns
) {
public CreateBucketConfiguration {
diff --git a/server/src/main/java/com/adobe/testing/s3mock/dto/DefaultRetention.java b/server/src/main/java/com/adobe/testing/s3mock/dto/DefaultRetention.java
index 09e63a201..02d54bf27 100644
--- a/server/src/main/java/com/adobe/testing/s3mock/dto/DefaultRetention.java
+++ b/server/src/main/java/com/adobe/testing/s3mock/dto/DefaultRetention.java
@@ -24,7 +24,7 @@
*/
@S3Verified(year = 2025)
public record DefaultRetention(
- //TODO: setting days & years not allowed!
+ // TODO: setting days & years not allowed!
@JsonProperty("Days") Integer days,
@JsonProperty("Mode") Mode mode,
@JsonProperty("Years") Integer years
diff --git a/server/src/main/java/com/adobe/testing/s3mock/dto/DeleteResult.java b/server/src/main/java/com/adobe/testing/s3mock/dto/DeleteResult.java
index acc19646b..d2b86f109 100644
--- a/server/src/main/java/com/adobe/testing/s3mock/dto/DeleteResult.java
+++ b/server/src/main/java/com/adobe/testing/s3mock/dto/DeleteResult.java
@@ -32,7 +32,7 @@ public record DeleteResult(
@JsonProperty("Deleted") List deletedObjects,
@JacksonXmlElementWrapper(useWrapping = false)
@JsonProperty("Error") List errors,
- //workaround for adding xmlns attribute to root element only.
+ // workaround for adding xmlns attribute to root element only.
@JacksonXmlProperty(isAttribute = true, localName = "xmlns") String xmlns
) {
diff --git a/server/src/main/java/com/adobe/testing/s3mock/dto/GetObjectAttributesOutput.java b/server/src/main/java/com/adobe/testing/s3mock/dto/GetObjectAttributesOutput.java
index 1909d63fe..e65071552 100644
--- a/server/src/main/java/com/adobe/testing/s3mock/dto/GetObjectAttributesOutput.java
+++ b/server/src/main/java/com/adobe/testing/s3mock/dto/GetObjectAttributesOutput.java
@@ -31,13 +31,13 @@
@JsonRootName("GetObjectAttributesOutput")
public record GetObjectAttributesOutput(
@JsonProperty("Checksum") Checksum checksum,
- //This response does not use eTag as a header, so it must not contain the quotation marks.
+ // This response does not use eTag as a header, so it must not contain the quotation marks.
@JsonProperty("ETag") String etag,
@JacksonXmlElementWrapper(useWrapping = false)
@JsonProperty("ObjectParts") List objectParts,
@JsonProperty("ObjectSize") Long objectSize,
@JsonProperty("StorageClass") StorageClass storageClass,
- //workaround for adding xmlns attribute to root element only.
+ // workaround for adding xmlns attribute to root element only.
@JacksonXmlProperty(isAttribute = true, localName = "xmlns") String xmlns
) {
diff --git a/server/src/main/java/com/adobe/testing/s3mock/dto/InitiateMultipartUploadResult.java b/server/src/main/java/com/adobe/testing/s3mock/dto/InitiateMultipartUploadResult.java
index 16745d1b4..8d52ca154 100644
--- a/server/src/main/java/com/adobe/testing/s3mock/dto/InitiateMultipartUploadResult.java
+++ b/server/src/main/java/com/adobe/testing/s3mock/dto/InitiateMultipartUploadResult.java
@@ -31,7 +31,7 @@ public record InitiateMultipartUploadResult(
@JsonProperty("Bucket") String bucketName,
@JsonProperty("Key") String fileName,
@JsonProperty("UploadId") String uploadId,
- //workaround for adding xmlns attribute to root element only.
+ // workaround for adding xmlns attribute to root element only.
@JacksonXmlProperty(isAttribute = true, localName = "xmlns") String xmlns
) {
public InitiateMultipartUploadResult {
diff --git a/server/src/main/java/com/adobe/testing/s3mock/dto/LegalHold.java b/server/src/main/java/com/adobe/testing/s3mock/dto/LegalHold.java
index 16bfeb8e2..35c45937e 100644
--- a/server/src/main/java/com/adobe/testing/s3mock/dto/LegalHold.java
+++ b/server/src/main/java/com/adobe/testing/s3mock/dto/LegalHold.java
@@ -31,7 +31,7 @@
@JsonRootName("LegalHold")
public record LegalHold(
@JsonProperty("Status") Status status,
- //workaround for adding xmlns attribute to root element only.
+ // workaround for adding xmlns attribute to root element only.
@JacksonXmlProperty(isAttribute = true, localName = "xmlns") String xmlns
) {
diff --git a/server/src/main/java/com/adobe/testing/s3mock/dto/ListAllMyBucketsResult.java b/server/src/main/java/com/adobe/testing/s3mock/dto/ListAllMyBucketsResult.java
index cc7d50b50..d0ab54fd0 100644
--- a/server/src/main/java/com/adobe/testing/s3mock/dto/ListAllMyBucketsResult.java
+++ b/server/src/main/java/com/adobe/testing/s3mock/dto/ListAllMyBucketsResult.java
@@ -32,7 +32,7 @@ public record ListAllMyBucketsResult(
@JsonProperty("ContinuationToken") String continuationToken,
@JsonProperty("Owner") Owner owner,
@JsonProperty("Prefix") String prefix,
- //workaround for adding xmlns attribute to root element only.
+ // workaround for adding xmlns attribute to root element only.
@JacksonXmlProperty(isAttribute = true, localName = "xmlns") String xmlns
) {
public ListAllMyBucketsResult {
diff --git a/server/src/main/java/com/adobe/testing/s3mock/dto/ListBucketResult.java b/server/src/main/java/com/adobe/testing/s3mock/dto/ListBucketResult.java
index e9cafdb30..f62004aac 100644
--- a/server/src/main/java/com/adobe/testing/s3mock/dto/ListBucketResult.java
+++ b/server/src/main/java/com/adobe/testing/s3mock/dto/ListBucketResult.java
@@ -42,7 +42,7 @@ public record ListBucketResult(
@JsonProperty("Name") String name,
@JsonProperty("NextMarker") String nextMarker,
@JsonProperty("Prefix") String prefix,
- //workaround for adding xmlns attribute to root element only.
+ // workaround for adding xmlns attribute to root element only.
@JacksonXmlProperty(isAttribute = true, localName = "xmlns") String xmlns
) {
diff --git a/server/src/main/java/com/adobe/testing/s3mock/dto/ListBucketResultV2.java b/server/src/main/java/com/adobe/testing/s3mock/dto/ListBucketResultV2.java
index 06a3c37ac..35a9b9dfa 100644
--- a/server/src/main/java/com/adobe/testing/s3mock/dto/ListBucketResultV2.java
+++ b/server/src/main/java/com/adobe/testing/s3mock/dto/ListBucketResultV2.java
@@ -44,7 +44,7 @@ public record ListBucketResultV2(
@JsonProperty("NextContinuationToken") String nextContinuationToken,
@JsonProperty("Prefix") String prefix,
@JsonProperty("StartAfter") String startAfter,
- //workaround for adding xmlns attribute to root element only.
+ // workaround for adding xmlns attribute to root element only.
@JacksonXmlProperty(isAttribute = true, localName = "xmlns") String xmlns
) {
public ListBucketResultV2 {
diff --git a/server/src/main/java/com/adobe/testing/s3mock/dto/ListMultipartUploadsResult.java b/server/src/main/java/com/adobe/testing/s3mock/dto/ListMultipartUploadsResult.java
index 5a8dfb484..1fe4d9eff 100644
--- a/server/src/main/java/com/adobe/testing/s3mock/dto/ListMultipartUploadsResult.java
+++ b/server/src/main/java/com/adobe/testing/s3mock/dto/ListMultipartUploadsResult.java
@@ -44,7 +44,7 @@ public record ListMultipartUploadsResult(
@JacksonXmlElementWrapper(useWrapping = false)
@JsonProperty("Upload") List multipartUploads,
@JsonProperty("UploadIdMarker") String uploadIdMarker,
- //workaround for adding xmlns attribute to root element only.
+ // workaround for adding xmlns attribute to root element only.
@JacksonXmlProperty(isAttribute = true, localName = "xmlns") String xmlns
) {
public ListMultipartUploadsResult {
diff --git a/server/src/main/java/com/adobe/testing/s3mock/dto/ListPartsResult.java b/server/src/main/java/com/adobe/testing/s3mock/dto/ListPartsResult.java
index 22ea69098..edf908cdb 100644
--- a/server/src/main/java/com/adobe/testing/s3mock/dto/ListPartsResult.java
+++ b/server/src/main/java/com/adobe/testing/s3mock/dto/ListPartsResult.java
@@ -44,7 +44,7 @@ public record ListPartsResult(
@JsonProperty("PartNumberMarker") Integer partNumberMarker,
@JsonProperty("StorageClass") StorageClass storageClass,
@JsonProperty("UploadId") String uploadId,
- //workaround for adding xmlns attribute to root element only.
+ // workaround for adding xmlns attribute to root element only.
@JacksonXmlProperty(isAttribute = true, localName = "xmlns") String xmlns
) {
diff --git a/server/src/main/java/com/adobe/testing/s3mock/dto/LocationConstraint.java b/server/src/main/java/com/adobe/testing/s3mock/dto/LocationConstraint.java
index 38f479016..d11d5868c 100644
--- a/server/src/main/java/com/adobe/testing/s3mock/dto/LocationConstraint.java
+++ b/server/src/main/java/com/adobe/testing/s3mock/dto/LocationConstraint.java
@@ -33,7 +33,7 @@ public record LocationConstraint(
@JsonSerialize(using = RegionSerializer.class)
@JsonDeserialize(using = RegionDeserializer.class)
@JacksonXmlText Region region,
- //workaround for adding xmlns attribute to root element only.
+ // workaround for adding xmlns attribute to root element only.
@JacksonXmlProperty(isAttribute = true, localName = "xmlns") String xmlns
) {
public LocationConstraint {
diff --git a/server/src/main/java/com/adobe/testing/s3mock/dto/ObjectLockConfiguration.java b/server/src/main/java/com/adobe/testing/s3mock/dto/ObjectLockConfiguration.java
index 191a4a6c6..652bb3850 100644
--- a/server/src/main/java/com/adobe/testing/s3mock/dto/ObjectLockConfiguration.java
+++ b/server/src/main/java/com/adobe/testing/s3mock/dto/ObjectLockConfiguration.java
@@ -29,7 +29,7 @@
public record ObjectLockConfiguration(
@JsonProperty("ObjectLockEnabled") ObjectLockEnabled objectLockEnabled,
@JsonProperty("Rule") ObjectLockRule objectLockRule,
- //workaround for adding xmlns attribute to root element only.
+ // workaround for adding xmlns attribute to root element only.
@JacksonXmlProperty(isAttribute = true, localName = "xmlns") String xmlns
) {
public ObjectLockConfiguration {
diff --git a/server/src/main/java/com/adobe/testing/s3mock/dto/Owner.java b/server/src/main/java/com/adobe/testing/s3mock/dto/Owner.java
index 038ddad31..76e1a4472 100644
--- a/server/src/main/java/com/adobe/testing/s3mock/dto/Owner.java
+++ b/server/src/main/java/com/adobe/testing/s3mock/dto/Owner.java
@@ -25,7 +25,7 @@
*/
@S3Verified(year = 2025)
public record Owner(
- //AWS deprecated this field in 2025-05
+ // AWS deprecated this field in 2025-05
@Deprecated(since = "2025-10-01", forRemoval = true)
@JsonProperty("DisplayName") String displayName,
@JsonProperty("ID") String id
diff --git a/server/src/main/java/com/adobe/testing/s3mock/dto/RegionSerializer.java b/server/src/main/java/com/adobe/testing/s3mock/dto/RegionSerializer.java
index 781d21bfb..308f6a26e 100644
--- a/server/src/main/java/com/adobe/testing/s3mock/dto/RegionSerializer.java
+++ b/server/src/main/java/com/adobe/testing/s3mock/dto/RegionSerializer.java
@@ -30,7 +30,7 @@ public class RegionSerializer extends JsonSerializer {
public void serialize(Region value, JsonGenerator gen, SerializerProvider serializers)
throws IOException {
var regionString = value.toString();
- //API doc says to return "null" for the us-east-1 region.
+ // API doc says to return "null" for the us-east-1 region.
if ("us-east-1".equals(regionString)) {
gen.writeString("null");
} else {
diff --git a/server/src/main/java/com/adobe/testing/s3mock/dto/Retention.java b/server/src/main/java/com/adobe/testing/s3mock/dto/Retention.java
index 289d7fff3..f1ae91dca 100644
--- a/server/src/main/java/com/adobe/testing/s3mock/dto/Retention.java
+++ b/server/src/main/java/com/adobe/testing/s3mock/dto/Retention.java
@@ -36,7 +36,7 @@ public record Retention(
@JsonSerialize(using = InstantSerializer.class)
@JsonDeserialize(using = InstantDeserializer.class)
@JsonProperty("RetainUntilDate") Instant retainUntilDate,
- //workaround for adding xmlns attribute to root element only.
+ // workaround for adding xmlns attribute to root element only.
@JacksonXmlProperty(isAttribute = true, localName = "xmlns") String xmlns
) {
public Retention {
diff --git a/server/src/main/java/com/adobe/testing/s3mock/dto/Tagging.java b/server/src/main/java/com/adobe/testing/s3mock/dto/Tagging.java
index 3556773a1..f4bc0d50f 100644
--- a/server/src/main/java/com/adobe/testing/s3mock/dto/Tagging.java
+++ b/server/src/main/java/com/adobe/testing/s3mock/dto/Tagging.java
@@ -30,7 +30,7 @@
@JsonRootName("Tagging")
public record Tagging(
@JsonProperty("TagSet") TagSet tagSet,
- //workaround for adding xmlns attribute to root element only.
+ // workaround for adding xmlns attribute to root element only.
@JacksonXmlProperty(isAttribute = true, localName = "xmlns") String xmlns
) {
public Tagging {
diff --git a/server/src/main/java/com/adobe/testing/s3mock/dto/VersioningConfiguration.java b/server/src/main/java/com/adobe/testing/s3mock/dto/VersioningConfiguration.java
index 3aae3a68a..b93aebe2d 100644
--- a/server/src/main/java/com/adobe/testing/s3mock/dto/VersioningConfiguration.java
+++ b/server/src/main/java/com/adobe/testing/s3mock/dto/VersioningConfiguration.java
@@ -31,7 +31,7 @@
public record VersioningConfiguration(
@JsonProperty("MfaDelete") MFADelete mfaDelete,
@JsonProperty("Status") Status status,
- //workaround for adding xmlns attribute to root element only.
+ // workaround for adding xmlns attribute to root element only.
@JacksonXmlProperty(isAttribute = true, localName = "xmlns") String xmlns
) {
diff --git a/server/src/main/java/com/adobe/testing/s3mock/service/BucketService.java b/server/src/main/java/com/adobe/testing/s3mock/service/BucketService.java
index 87ec7d075..5ab4617d6 100644
--- a/server/src/main/java/com/adobe/testing/s3mock/service/BucketService.java
+++ b/server/src/main/java/com/adobe/testing/s3mock/service/BucketService.java
@@ -63,12 +63,23 @@
import java.util.Objects;
import java.util.UUID;
import java.util.concurrent.ConcurrentHashMap;
+import java.util.regex.Pattern;
import org.jspecify.annotations.Nullable;
import software.amazon.awssdk.utils.http.SdkHttpUtils;
public class BucketService {
private final Map listObjectsPagingStateCache = new ConcurrentHashMap<>();
private final Map listBucketsPagingStateCache = new ConcurrentHashMap<>();
+ // Validation patterns per S3 bucket naming rules
+ private static final Pattern ALLOWED_CHARS_AND_LENGTH =
+ Pattern.compile("^[a-z0-9.-]{3,63}$");
+ private static final Pattern STARTS_AND_ENDS_WITH_ALNUM =
+ Pattern.compile("^[a-z0-9].*[a-z0-9]$");
+ private static final Pattern ADJACENT_DOTS =
+ Pattern.compile("\\.\\.");
+ private static final Pattern IP_LIKE_FOUR_PARTS =
+ Pattern.compile("^(\\d{1,3}\\.){3}\\d{1,3}$");
+
private final BucketStore bucketStore;
private final ObjectStore objectStore;
@@ -79,21 +90,17 @@ public BucketService(BucketStore bucketStore, ObjectStore objectStore) {
public boolean isBucketEmpty(String bucketName) {
var bucketMetadata = bucketStore.getBucketMetadata(bucketName);
- if (bucketMetadata != null) {
- var objects = bucketMetadata.objects();
- if (!objects.isEmpty()) {
- for (var id : objects.values()) {
- var s3ObjectMetadata = objectStore.getS3ObjectMetadata(bucketMetadata, id, null);
- if (s3ObjectMetadata != null && !s3ObjectMetadata.deleteMarker()) {
- return false;
- }
+ var objects = bucketMetadata.objects();
+ if (!objects.isEmpty()) {
+ for (var id : objects.values()) {
+ var s3ObjectMetadata = objectStore.getS3ObjectMetadata(bucketMetadata, id, null);
+ if (s3ObjectMetadata != null && !s3ObjectMetadata.deleteMarker()) {
+ return false;
}
- return true;
}
- return bucketMetadata.objects().isEmpty();
- } else {
- throw new IllegalStateException("Requested Bucket does not exist: " + bucketName);
+ return true;
}
+ return bucketMetadata.objects().isEmpty();
}
public boolean doesBucketExist(String bucketName) {
@@ -111,7 +118,6 @@ public ListAllMyBucketsResult listBuckets(
var buckets = bucketStore
.listBuckets()
.stream()
- .filter(Objects::nonNull)
.filter(b -> b.name().startsWith(normalizedPrefix))
.sorted(Comparator.comparing(BucketMetadata::name))
.map(Bucket::from)
@@ -163,29 +169,25 @@ public Bucket createBucket(
public boolean deleteBucket(String bucketName) {
var bucketMetadata = bucketStore.getBucketMetadata(bucketName);
- if (bucketMetadata != null) {
- var objects = bucketMetadata.objects();
- if (!objects.isEmpty()) {
- for (var entry : objects.entrySet()) {
- var s3ObjectMetadata =
- objectStore.getS3ObjectMetadata(bucketMetadata, entry.getValue(), null);
- if (s3ObjectMetadata != null && s3ObjectMetadata.deleteMarker()) {
- //yes, we really want to delete the objects here, if they are delete markers, they
- //do not officially exist.
- objectStore.doDeleteObject(bucketMetadata, entry.getValue());
- bucketStore.removeFromBucket(entry.getKey(), bucketName);
- }
+ var objects = bucketMetadata.objects();
+ if (!objects.isEmpty()) {
+ for (var entry : objects.entrySet()) {
+ var s3ObjectMetadata =
+ objectStore.getS3ObjectMetadata(bucketMetadata, entry.getValue(), null);
+ if (s3ObjectMetadata != null && s3ObjectMetadata.deleteMarker()) {
+ // yes, we really want to delete the objects here, if they are delete markers, they
+ // do not officially exist.
+ objectStore.doDeleteObject(bucketMetadata, entry.getValue());
+ bucketStore.removeFromBucket(entry.getKey(), bucketName);
}
}
- //check again if bucket is empty
- bucketMetadata = bucketStore.getBucketMetadata(bucketName);
- if (!bucketMetadata.objects().isEmpty()) {
- throw new IllegalStateException("Bucket is not empty: " + bucketName);
- }
- return bucketStore.deleteBucket(bucketName);
- } else {
- throw new IllegalStateException("Requested Bucket does not exist: " + bucketName);
}
+ // check again if bucket is empty
+ bucketMetadata = bucketStore.getBucketMetadata(bucketName);
+ if (!bucketMetadata.objects().isEmpty()) {
+ throw new IllegalStateException("Bucket is not empty: " + bucketName);
+ }
+ return bucketStore.deleteBucket(bucketName);
}
public void setVersioningConfiguration(String bucketName, VersioningConfiguration configuration) {
@@ -244,7 +246,6 @@ public List getS3Objects(String bucketName, @Nullable String prefix) {
var uuids = bucketStore.lookupIdsInBucket(prefix, bucketName);
return uuids
.stream()
- .filter(Objects::nonNull)
.map(uuid -> objectStore.getS3ObjectMetadata(bucketMetadata, uuid, null))
.filter(Objects::nonNull)
.map(S3Object::from)
@@ -253,13 +254,15 @@ public List getS3Objects(String bucketName, @Nullable String prefix) {
.toList();
}
- public ListVersionsResult listVersions(String bucketName,
- String prefix,
- String delimiter,
- String encodingType,
+ public ListVersionsResult listVersions(
+ String bucketName,
+ @Nullable String prefix,
+ @Nullable String delimiter,
+ @Nullable String encodingType,
Integer maxKeys,
- String keyMarker,
- String versionIdMarker) {
+ @Nullable String keyMarker,
+ @Nullable String versionIdMarker
+ ) {
var result = listObjectsV1(bucketName, prefix, delimiter, keyMarker, encodingType, maxKeys);
var bucket = bucketStore.getBucketMetadata(bucketName);
@@ -496,12 +499,76 @@ public void verifyBucketObjectLockEnabled(String bucketName) {
}
/**
+ * Validates S3 bucket names according to the documented constraints.
* API Reference Bucket Naming.
*/
public void verifyBucketNameIsAllowed(String bucketName) {
- if (!bucketName.matches("[a-z0-9.-]+")) {
+ if (bucketName.isBlank()) {
+ throw INVALID_BUCKET_NAME;
+ }
+
+ // Allowed chars and length (3..63)
+ if (!ALLOWED_CHARS_AND_LENGTH.matcher(bucketName).matches()) {
+ throw INVALID_BUCKET_NAME;
+ }
+
+ // Must start and end with a letter or number
+ if (!STARTS_AND_ENDS_WITH_ALNUM.matcher(bucketName).matches()) {
+ throw INVALID_BUCKET_NAME;
+ }
+
+ // Must not contain two adjacent periods
+ if (ADJACENT_DOTS.matcher(bucketName).find()) {
+ throw INVALID_BUCKET_NAME;
+ }
+
+ // Must not be formatted as an IP address (e.g., 192.168.5.4)
+ if (IP_LIKE_FOUR_PARTS.matcher(bucketName).matches() && isValidIpv4(bucketName)) {
+ throw INVALID_BUCKET_NAME;
+ }
+
+ // Disallowed prefixes
+ if (bucketName.startsWith("xn--")) {
+ throw INVALID_BUCKET_NAME;
+ }
+ if (bucketName.startsWith("sthree-")) {
throw INVALID_BUCKET_NAME;
}
+ if (bucketName.startsWith("amzn-s3-demo-")) {
+ throw INVALID_BUCKET_NAME;
+ }
+ }
+
+ // Parses and validates IPv4 octets (0..255) to avoid false positives like 999.999.999.999
+ private static boolean isValidIpv4(String s) {
+ String[] parts = s.split("\\.");
+ if (parts.length != 4) {
+ return false;
+ }
+ for (String p : parts) {
+ if (p.isEmpty() || p.length() > 3) {
+ return false;
+ }
+ // Disallow leading plus/minus; Pattern already ensures digits only
+ int val;
+ try {
+ val = Integer.parseInt(p);
+ } catch (NumberFormatException e) {
+ return false;
+ }
+ if (val < 0 || val > 255) {
+ return false;
+ }
+ // Reject octets with leading zeros (e.g., "01") to avoid ambiguity with octal notation.
+ // While S3 only cares about the IP format, leading zeros are not allowed in IPv4 addresses
+ // per RFC 1123 and RFC 3986, and some systems interpret them as octal. If you need to allow
+ // octets with leading zeros (e.g., "01"), you may remove this check, but be aware of potential
+ // compatibility and ambiguity issues.
+ if (p.length() > 1 && p.startsWith("0")) {
+ return false;
+ }
+ }
+ return true;
}
public void verifyBucketIsEmpty(String bucketName) {
@@ -512,8 +579,8 @@ public void verifyBucketIsEmpty(String bucketName) {
public void verifyBucketDoesNotExist(String bucketName) {
if (bucketStore.doesBucketExist(bucketName)) {
- //currently, all buckets have the same owner in S3Mock. If the bucket exists, it's owned by
- //the owner that tries to create the bucket owns the existing bucket too.
+ // currently, all buckets have the same owner in S3Mock. If the bucket exists, it's owned by
+ // the owner that tries to create the bucket owns the existing bucket too.
throw BUCKET_ALREADY_OWNED_BY_YOU;
}
}
diff --git a/server/src/main/java/com/adobe/testing/s3mock/service/MultipartService.java b/server/src/main/java/com/adobe/testing/s3mock/service/MultipartService.java
index 5ed305847..29d0379a5 100644
--- a/server/src/main/java/com/adobe/testing/s3mock/service/MultipartService.java
+++ b/server/src/main/java/com/adobe/testing/s3mock/service/MultipartService.java
@@ -72,7 +72,7 @@ public String putPart(
String bucketName,
String key,
UUID uploadId,
- String partNumber,
+ Integer partNumber,
Path path,
Map encryptionHeaders) {
var bucketMetadata = bucketStore.getBucketMetadata(bucketName);
@@ -89,12 +89,13 @@ public CopyPartResult copyPart(
String bucketName,
String key,
HttpRange copyRange,
- String partNumber,
+ Integer partNumber,
String destinationBucket,
String destinationKey,
UUID uploadId,
Map encryptionHeaders,
- String versionId) {
+ @Nullable String versionId
+ ) {
var sourceBucketMetadata = bucketStore.getBucketMetadata(bucketName);
var destinationBucketMetadata = bucketStore.getBucketMetadata(destinationBucket);
var sourceId = sourceBucketMetadata.getID(key);
@@ -109,7 +110,7 @@ public CopyPartResult copyPart(
destinationBucketMetadata, destinationId, uploadId, encryptionHeaders, versionId);
return CopyPartResult.from(new Date(), "\"" + partEtag + "\"");
} catch (Exception e) {
- //something went wrong with writing the destination file, clean up ID from BucketStore.
+ // something went wrong with writing the destination file, clean up ID from BucketStore.
bucketStore.removeFromBucket(destinationKey, destinationBucket);
throw new IllegalStateException(String.format(
"Could not copy part. sourceBucket=%s, destinationBucket=%s, key=%s, sourceId=%s, "
@@ -203,10 +204,10 @@ public InitiateMultipartUploadResult createMultipartUpload(
Owner initiator,
Map userMetadata,
Map encryptionHeaders,
- List tags,
+ @Nullable List tags,
StorageClass storageClass,
- ChecksumType checksumType,
- ChecksumAlgorithm checksumAlgorithm) {
+ @Nullable ChecksumType checksumType,
+ @Nullable ChecksumAlgorithm checksumAlgorithm) {
var bucketMetadata = bucketStore.getBucketMetadata(bucketName);
var id = bucketStore.addKeyToBucket(key, bucketName);
@@ -226,7 +227,7 @@ public InitiateMultipartUploadResult createMultipartUpload(
checksumAlgorithm);
return new InitiateMultipartUploadResult(bucketName, key, multipartUpload.uploadId());
} catch (Exception e) {
- //something went wrong with writing the destination file, clean up ID from BucketStore.
+ // something went wrong with writing the destination file, clean up ID from BucketStore.
bucketStore.removeFromBucket(key, bucketName);
throw new IllegalStateException(String.format(
"Could prepare Multipart Upload. bucket=%s, key=%s, id=%s",
@@ -309,17 +310,13 @@ public ListMultipartUploadsResult listMultipartUploads(
);
}
- public void verifyPartNumberLimits(String partNumberString) {
- try {
- var partNumber = Integer.parseInt(partNumberString);
- if (partNumber < 1 || partNumber > 10000) {
- LOG.error("Multipart part number invalid. partNumber={}", partNumberString);
- throw INVALID_PART_NUMBER;
- }
- } catch (NumberFormatException nfe) {
- LOG.error("Multipart part number invalid. partNumber={}", partNumberString, nfe);
+ public int verifyPartNumberLimits(String partNumber) {
+ int number = Integer.parseInt(partNumber);
+ if (number < 1 || number > 10000) {
+ LOG.error("Multipart part number invalid. partNumber={}", partNumber);
throw INVALID_PART_NUMBER;
}
+ return number;
}
public void verifyMultipartParts(
diff --git a/server/src/main/java/com/adobe/testing/s3mock/service/ObjectService.java b/server/src/main/java/com/adobe/testing/s3mock/service/ObjectService.java
index c2a08953d..8d2dab691 100644
--- a/server/src/main/java/com/adobe/testing/s3mock/service/ObjectService.java
+++ b/server/src/main/java/com/adobe/testing/s3mock/service/ObjectService.java
@@ -114,7 +114,7 @@ public S3ObjectMetadata copyS3Object(
destinationBucketMetadata, destinationId, destinationKey,
encryptionHeaders, storeHeaders, userMetadata, storageClass);
} catch (Exception e) {
- //something went wrong with writing the destination file, clean up ID from BucketStore.
+ // something went wrong with writing the destination file, clean up ID from BucketStore.
bucketStore.removeFromBucket(destinationKey, destinationBucketName);
throw e;
}
@@ -280,7 +280,7 @@ public void verifyMd5(InputStream inputStream, @Nullable String contentMd5) {
}
/**
- * FOr copy use-cases, we need to return PRECONDITION_FAILED only.
+ * For copy use-cases, we need to return PRECONDITION_FAILED only.
*/
public void verifyObjectMatchingForCopy(
@Nullable List match,
@@ -369,7 +369,7 @@ public void verifyObjectMatching(
var setMatch = match != null && !match.isEmpty();
if (setMatch) {
if (match.contains(WILDCARD_ETAG) || match.contains(WILDCARD) || match.contains(etag)) {
- //request cares only that the object exists or that the etag matches.
+ // request cares only that the object exists or that the etag matches.
LOG.debug("Object {} exists", s3ObjectMetadata.key());
return;
} else if (!match.contains(etag)) {
diff --git a/server/src/main/java/com/adobe/testing/s3mock/store/BucketStore.java b/server/src/main/java/com/adobe/testing/s3mock/store/BucketStore.java
index a78d25e4d..b2f618a12 100644
--- a/server/src/main/java/com/adobe/testing/s3mock/store/BucketStore.java
+++ b/server/src/main/java/com/adobe/testing/s3mock/store/BucketStore.java
@@ -281,7 +281,12 @@ private void writeToDisk(BucketMetadata bucketMetadata) {
}
private Path getBucketFolderPath(String bucketName) {
- return Paths.get(rootFolder.getPath(), bucketName);
+ var rootPath = rootFolder.toPath().toAbsolutePath().normalize();
+ var candidate = rootPath.resolve(bucketName).normalize();
+ if (!candidate.startsWith(rootPath)) {
+ throw new IllegalArgumentException("Invalid bucket name (path traversal detected).");
+ }
+ return candidate;
}
private File createBucketFolder(String bucketName) {
diff --git a/server/src/main/java/com/adobe/testing/s3mock/store/MultipartStore.java b/server/src/main/java/com/adobe/testing/s3mock/store/MultipartStore.java
index 7e4b2e9dd..1e175983d 100644
--- a/server/src/main/java/com/adobe/testing/s3mock/store/MultipartStore.java
+++ b/server/src/main/java/com/adobe/testing/s3mock/store/MultipartStore.java
@@ -84,13 +84,13 @@ public MultipartUpload createMultipartUpload(
BucketMetadata bucket,
String key,
UUID id,
- String contentType,
+ @Nullable String contentType,
Map storeHeaders,
Owner owner,
Owner initiator,
Map userMetadata,
Map encryptionHeaders,
- List tags,
+ @Nullable List tags,
StorageClass storageClass,
@Nullable ChecksumType checksumType,
@Nullable ChecksumAlgorithm checksumAlgorithm) {
@@ -140,16 +140,13 @@ public List listMultipartUploads(BucketMetadata bucketMetadata,
.map(
path -> {
var fileName = path.getFileName().toString();
- var uploadMetadata = getUploadMetadata(bucketMetadata, UUID.fromString(fileName));
- if (uploadMetadata != null && !uploadMetadata.completed()) {
- return uploadMetadata.upload();
- } else {
- return null;
- }
+ return getUploadMetadata(bucketMetadata, UUID.fromString(fileName));
}
)
.filter(Objects::nonNull)
- .filter(multipartUpload -> isBlank(prefix) || multipartUpload.key().startsWith(prefix))
+ .filter(uploadMetadata -> !uploadMetadata.completed())
+ .map(MultipartUploadInfo::upload)
+ .filter(upload -> isBlank(prefix) || upload.key().startsWith(prefix))
.toList();
} catch (IOException e) {
throw new IllegalStateException("Could not load buckets from data directory ", e);
@@ -198,7 +195,7 @@ public String putPart(
BucketMetadata bucket,
UUID id,
UUID uploadId,
- String partNumber,
+ Integer partNumber,
Path path,
Map encryptionHeaders) {
var file = inputPathToFile(path, getPartPath(bucket, uploadId, partNumber));
@@ -253,7 +250,7 @@ public CompleteMultipartUploadResult completeMultipartUpload(
uploadInfo.storageClass(),
ChecksumType.COMPOSITE
);
- //delete parts and update MultipartInfo
+ // delete parts and update MultipartInfo
partsPaths.forEach(partPath -> FileUtils.deleteQuietly(partPath.toFile()));
var completedUploadInfo = uploadInfo.complete();
writeMetafile(bucket, completedUploadInfo);
@@ -348,7 +345,7 @@ public String copyPart(
BucketMetadata bucket,
UUID id,
@Nullable HttpRange copyRange,
- String partNumber,
+ Integer partNumber,
BucketMetadata destinationBucket,
UUID destinationId,
UUID uploadId,
@@ -368,7 +365,7 @@ public String copyPart(
private static InputStream toInputStream(List paths) {
var result = new ArrayList();
- for (var path: paths) {
+ for (var path : paths) {
try {
result.add(Files.newInputStream(path));
} catch (IOException e) {
@@ -413,15 +410,11 @@ private String copyPartToFile(
return hexDigest(partFile);
}
- @Nullable
private File createPartFile(
BucketMetadata bucket,
- @Nullable UUID id,
+ UUID id,
UUID uploadId,
- String partNumber) {
- if (id == null) {
- return null;
- }
+ Integer partNumber) {
var partFile = getPartPath(
bucket,
uploadId,
@@ -439,12 +432,6 @@ private File createPartFile(
return partFile;
}
- private static void validatePartNumber(String partNumber) {
- if (!partNumber.matches("^[1-9][0-9]*$")) {
- throw new IllegalArgumentException("Invalid part number: " + partNumber);
- }
- }
-
private void verifyMultipartUploadPreparation(
BucketMetadata bucket,
@Nullable UUID id,
@@ -475,8 +462,7 @@ private Path getMultipartsFolder(BucketMetadata bucket) {
return Paths.get(bucket.path().toString(), MULTIPARTS_FOLDER);
}
- private Path getPartPath(BucketMetadata bucket, UUID uploadId, String partNumber) {
- validatePartNumber(partNumber);
+ private Path getPartPath(BucketMetadata bucket, UUID uploadId, Integer partNumber) {
return getPartsFolder(bucket, uploadId).resolve(partNumber + PART_SUFFIX);
}
diff --git a/server/src/main/java/com/adobe/testing/s3mock/store/MultipartUploadInfo.java b/server/src/main/java/com/adobe/testing/s3mock/store/MultipartUploadInfo.java
index 836a83b02..8ce72a29c 100644
--- a/server/src/main/java/com/adobe/testing/s3mock/store/MultipartUploadInfo.java
+++ b/server/src/main/java/com/adobe/testing/s3mock/store/MultipartUploadInfo.java
@@ -30,13 +30,13 @@
*/
public record MultipartUploadInfo(
MultipartUpload upload,
- String contentType,
+ @Nullable String contentType,
Map userMetadata,
Map storeHeaders,
Map encryptionHeaders,
String bucket,
@Nullable StorageClass storageClass,
- List tags,
+ @Nullable List tags,
@Nullable String checksum,
@Nullable ChecksumType checksumType,
@Nullable ChecksumAlgorithm checksumAlgorithm,
diff --git a/server/src/main/java/com/adobe/testing/s3mock/store/ObjectStore.java b/server/src/main/java/com/adobe/testing/s3mock/store/ObjectStore.java
index a50d6c1ed..a9dca93c0 100644
--- a/server/src/main/java/com/adobe/testing/s3mock/store/ObjectStore.java
+++ b/server/src/main/java/com/adobe/testing/s3mock/store/ObjectStore.java
@@ -56,8 +56,8 @@ public class ObjectStore extends StoreBase {
private static final String VERSIONED_META_FILE = "%s-objectMetadata.json";
private static final String VERSIONED_DATA_FILE = "%s-binaryData";
private static final String VERSIONS_FILE = "versions.json";
- //if a bucket isn't version enabled, some APIs return "null" as the versionId for objects.
- //clients may also pass in "null" as a version, expecting the behaviour for non-versioned objects.
+ // if a bucket isn't version enabled, some APIs return "null" as the versionId for objects.
+ // clients may also pass in "null" as a version, expecting the behaviour for non-versioned objects.
private static final String NULL_VERSION = "null";
/**
@@ -98,7 +98,7 @@ public S3ObjectMetadata storeS3ObjectMetadata(
String versionId = null;
if (bucket.isVersioningEnabled()) {
var existingVersions = getS3ObjectVersions(bucket, id);
- if (existingVersions != null) {
+ if (!existingVersions.versions().isEmpty()) {
versionId = existingVersions.createVersion();
writeVersionsFile(bucket, id, existingVersions);
} else {
@@ -279,9 +279,7 @@ public void storeRetention(BucketMetadata bucket, UUID id, @Nullable String vers
public S3ObjectMetadata getS3ObjectMetadata(BucketMetadata bucket, UUID id, @Nullable String versionId) {
if (bucket.isVersioningEnabled() && versionId == null) {
var s3ObjectVersions = getS3ObjectVersions(bucket, id);
- if (s3ObjectVersions != null) {
- versionId = s3ObjectVersions.getLatestVersion();
- }
+ versionId = s3ObjectVersions.getLatestVersion();
}
var metaPath = getMetaFilePath(bucket, id, versionId);
@@ -297,7 +295,6 @@ public S3ObjectMetadata getS3ObjectMetadata(BucketMetadata bucket, UUID id, @Nul
return null;
}
- @Nullable
public S3ObjectVersions getS3ObjectVersions(BucketMetadata bucket, UUID id) {
var metaPath = getVersionFilePath(bucket, id);
@@ -310,15 +307,14 @@ public S3ObjectVersions getS3ObjectVersions(BucketMetadata bucket, UUID id) {
}
}
}
- return null;
+ return S3ObjectVersions.empty(id);
}
- @Nullable
public S3ObjectVersions createS3ObjectVersions(BucketMetadata bucket, UUID id) {
var metaPath = getVersionFilePath(bucket, id);
if (Files.exists(metaPath)) {
- //gracefully handle duplicate version creation
+ // gracefully handle duplicate version creation
return getS3ObjectVersions(bucket, id);
} else {
synchronized (lockStore.get(id)) {
@@ -469,15 +465,15 @@ private boolean doDeleteVersion(BucketMetadata bucket, UUID id, String versionId
synchronized (lockStore.get(id)) {
try {
var existingVersions = getS3ObjectVersions(bucket, id);
- if (existingVersions == null) {
- //no versions exist, nothing to delete.
+ if (existingVersions.versions().isEmpty()) {
+ // no versions exist, nothing to delete.
return false;
}
- if (existingVersions.versions().size() <= 1) {
- //this is the last version of an object, delete object completely.
+ if (existingVersions.versions().size() == 1) {
+ // this is the last version of an object, delete object completely.
return doDeleteObject(bucket, id);
} else {
- //there is at least one version of an object left, delete only the version.
+ // there is at least one version of an object left, delete only the version.
existingVersions.deleteVersion(versionId);
writeVersionsFile(bucket, id, existingVersions);
return false;
@@ -511,7 +507,7 @@ private boolean insertDeleteMarker(
synchronized (lockStore.get(id)) {
try {
var existingVersions = getS3ObjectVersions(bucket, id);
- if (existingVersions != null) {
+ if (!existingVersions.versions().isEmpty()) {
versionId = existingVersions.createVersion();
writeVersionsFile(bucket, id, existingVersions);
}
@@ -525,6 +521,7 @@ private boolean insertDeleteMarker(
/**
* Used to load metadata for all objects from a bucket when S3Mock starts.
+ *
* @param bucketMetadata metadata of existing bucket.
* @param ids ids of the keys to load
*/
@@ -533,7 +530,7 @@ void loadObjects(BucketMetadata bucketMetadata, Collection ids) {
for (var id : ids) {
lockStore.putIfAbsent(id, new Object());
var s3ObjectVersions = getS3ObjectVersions(bucketMetadata, id);
- if (s3ObjectVersions != null) {
+ if (!s3ObjectVersions.versions().isEmpty()) {
if (loadVersions(bucketMetadata, s3ObjectVersions)) {
loaded++;
}
diff --git a/server/src/main/java/com/adobe/testing/s3mock/store/S3ObjectVersions.java b/server/src/main/java/com/adobe/testing/s3mock/store/S3ObjectVersions.java
index fe9063209..0d3adf899 100644
--- a/server/src/main/java/com/adobe/testing/s3mock/store/S3ObjectVersions.java
+++ b/server/src/main/java/com/adobe/testing/s3mock/store/S3ObjectVersions.java
@@ -50,4 +50,8 @@ public String getLatestVersion() {
public void deleteVersion(String versionId) {
versions.remove(versionId);
}
+
+ public static S3ObjectVersions empty(UUID id) {
+ return new S3ObjectVersions(id);
+ }
}
diff --git a/server/src/main/java/com/adobe/testing/s3mock/store/StoreConfiguration.java b/server/src/main/java/com/adobe/testing/s3mock/store/StoreConfiguration.java
index 15a8dab53..7aa4f67e6 100644
--- a/server/src/main/java/com/adobe/testing/s3mock/store/StoreConfiguration.java
+++ b/server/src/main/java/com/adobe/testing/s3mock/store/StoreConfiguration.java
@@ -55,9 +55,7 @@ ObjectStore objectStore(
var objectStore = new ObjectStore(S3_OBJECT_DATE_FORMAT, objectMapper);
for (var bucketName : bucketNames) {
var bucketMetadata = bucketStore.getBucketMetadata(bucketName);
- if (bucketMetadata != null) {
- objectStore.loadObjects(bucketMetadata, bucketMetadata.objects().values());
- }
+ objectStore.loadObjects(bucketMetadata, bucketMetadata.objects().values());
}
return objectStore;
}
@@ -73,10 +71,10 @@ BucketStore bucketStore(
Region mockRegion = region == null ? properties.region() : region;
var bucketStore = new BucketStore(rootFolder, S3_OBJECT_DATE_FORMAT, mockRegion.id(), objectMapper);
- //load existing buckets first
+ // load existing buckets first
bucketStore.loadBuckets(bucketNames);
- //load initialBuckets if not part of existing buckets
+ // load initialBuckets if not part of existing buckets
List initialBuckets = List.of();
if (!legacyProperties.initialBuckets().isEmpty()) {
initialBuckets = legacyProperties.initialBuckets();
@@ -179,7 +177,7 @@ File rootFolder(
if (root.exists()) {
LOG.info("Using existing folder \"{}\" as root folder. Will retain files on exit: {}",
root.getAbsolutePath(), properties.retainFilesOnExit());
- //TODO: need to validate folder structure here?
+ // TODO: need to validate folder structure here?
} else if (!root.mkdir()) {
throw new IllegalStateException("Root folder could not be created. Path: "
+ root.getAbsolutePath());
diff --git a/server/src/main/java/com/adobe/testing/s3mock/util/AwsChunkedDecodingChecksumInputStream.java b/server/src/main/java/com/adobe/testing/s3mock/util/AwsChunkedDecodingChecksumInputStream.java
index 7b55b9145..14ab37ce1 100644
--- a/server/src/main/java/com/adobe/testing/s3mock/util/AwsChunkedDecodingChecksumInputStream.java
+++ b/server/src/main/java/com/adobe/testing/s3mock/util/AwsChunkedDecodingChecksumInputStream.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2017-2024 Adobe.
+ * Copyright 2017-2025 Adobe.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -65,7 +65,7 @@ public AwsChunkedDecodingChecksumInputStream(InputStream source, long decodedLen
@Override
public int read() throws IOException {
if (chunkLength == 0L) {
- //try to read chunk length
+ // try to read chunk length
var hexLengthBytes = readHexLength();
if (hexLengthBytes.length == 0) {
return -1;
@@ -74,7 +74,7 @@ public int read() throws IOException {
setChunkLength(hexLengthBytes);
if (chunkLength == 0L) {
- //chunk length found, but was "0". Try and find the checksum.
+ // chunk length found, but was "0". Try and find the checksum.
extractAlgorithmAndChecksum();
return -1;
}
diff --git a/server/src/main/java/com/adobe/testing/s3mock/util/AwsUnsignedChunkedDecodingChecksumInputStream.java b/server/src/main/java/com/adobe/testing/s3mock/util/AwsUnsignedChunkedDecodingChecksumInputStream.java
index ba06f280b..6ffedefeb 100644
--- a/server/src/main/java/com/adobe/testing/s3mock/util/AwsUnsignedChunkedDecodingChecksumInputStream.java
+++ b/server/src/main/java/com/adobe/testing/s3mock/util/AwsUnsignedChunkedDecodingChecksumInputStream.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2017-2024 Adobe.
+ * Copyright 2017-2025 Adobe.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -65,7 +65,7 @@ public AwsUnsignedChunkedDecodingChecksumInputStream(InputStream source, long de
@Override
public int read() throws IOException {
if (chunkLength == 0L) {
- //try to read chunk length
+ // try to read chunk length
var hexLengthBytes = readHexlength();
if (hexLengthBytes.length == 0) {
return -1;
@@ -74,7 +74,7 @@ public int read() throws IOException {
setChunkLength(hexLengthBytes);
if (chunkLength == 0L) {
- //chunk length found, but was "0". Try and find the checksum.
+ // chunk length found, but was "0". Try and find the checksum.
extractAlgorithmAndChecksum();
return -1;
}
diff --git a/server/src/main/java/com/adobe/testing/s3mock/util/HeaderUtil.java b/server/src/main/java/com/adobe/testing/s3mock/util/HeaderUtil.java
index 2ae19c79a..c0700ad0c 100644
--- a/server/src/main/java/com/adobe/testing/s3mock/util/HeaderUtil.java
+++ b/server/src/main/java/com/adobe/testing/s3mock/util/HeaderUtil.java
@@ -65,6 +65,7 @@ private HeaderUtil() {
/**
* Creates response headers from S3ObjectMetadata user metadata.
+ *
* @param s3ObjectMetadata {@link S3ObjectMetadata} S3Object where user metadata will be extracted
*/
public static Map userMetadataHeadersFrom(S3ObjectMetadata s3ObjectMetadata) {
@@ -75,7 +76,7 @@ public static Map userMetadataHeadersFrom(S3ObjectMetadata s3Obj
if (CI.startsWith(key, HEADER_X_AMZ_META_PREFIX)) {
metadataHeaders.put(key, value);
} else {
- //support case where metadata was stored locally in legacy format
+ // support case where metadata was stored locally in legacy format
metadataHeaders.put(HEADER_X_AMZ_META_PREFIX + key, value);
}
});
@@ -85,6 +86,7 @@ public static Map userMetadataHeadersFrom(S3ObjectMetadata s3Obj
/**
* Creates response headers from S3ObjectMetadata storageclass.
+ *
* @param s3ObjectMetadata {@link S3ObjectMetadata} S3Object where data will be extracted
*/
public static Map storageClassHeadersFrom(S3ObjectMetadata s3ObjectMetadata) {
@@ -98,6 +100,7 @@ public static Map storageClassHeadersFrom(S3ObjectMetadata s3Obj
/**
* Retrieves user metadata from request.
+ *
* @param headers {@link HttpHeaders}
* @return map containing user meta-data
*/
@@ -108,6 +111,7 @@ public static Map userMetadataFrom(HttpHeaders headers) {
/**
* Retrieves headers to store from request.
+ *
* @param headers {@link HttpHeaders}
* @return map containing headers to store
*/
@@ -124,6 +128,7 @@ public static Map storeHeadersFrom(HttpHeaders headers) {
/**
* Retrieves headers encryption headers from request.
+ *
* @param headers {@link HttpHeaders}
* @return map containing encryption headers
*/
@@ -173,7 +178,7 @@ public static boolean isChunkedEncoding(HttpHeaders headers) {
* the content-encoding header empty and does not return this header when your retrieve the
* object.
*
- * See API
+ * See API
*/
private static boolean isOnlyChunkedEncoding(HttpHeaders headers) {
var contentEncodingHeaders = headers.get(HttpHeaders.CONTENT_ENCODING);
@@ -190,7 +195,7 @@ public static MediaType mediaTypeFrom(@Nullable String contentType) {
}
}
- public static Map overrideHeadersFrom(Map queryParams) {
+ public static Map overrideHeadersFrom(Map queryParams) {
return queryParams
.entrySet()
.stream()
diff --git a/server/src/main/resources/application.properties b/server/src/main/resources/application.properties
index d03aac3d5..76fa162be 100644
--- a/server/src/main/resources/application.properties
+++ b/server/src/main/resources/application.properties
@@ -28,3 +28,5 @@ com.adobe.testing.s3mock.region=us-east-1
server.tomcat.max-swallow-size=10MB
management.endpoints.web.discovery.enabled=false
+
+spring.main.lazy-initialization=true
diff --git a/server/src/test/kotlin/com/adobe/testing/s3mock/BaseControllerTest.kt b/server/src/test/kotlin/com/adobe/testing/s3mock/BaseControllerTest.kt
index 40aa58bf4..3d520b343 100644
--- a/server/src/test/kotlin/com/adobe/testing/s3mock/BaseControllerTest.kt
+++ b/server/src/test/kotlin/com/adobe/testing/s3mock/BaseControllerTest.kt
@@ -35,7 +35,6 @@ import com.fasterxml.jackson.dataformat.xml.XmlMapper
import com.fasterxml.jackson.dataformat.xml.deser.FromXmlParser
import com.fasterxml.jackson.dataformat.xml.ser.ToXmlGenerator
import java.nio.file.Path
-import java.nio.file.Paths
import java.time.Instant
import java.util.UUID
@@ -47,12 +46,11 @@ internal abstract class BaseControllerTest {
.enable(ToXmlGenerator.Feature.AUTO_DETECT_XSI_TYPE)
.enable(FromXmlParser.Feature.AUTO_DETECT_XSI_TYPE)
.build()
-
- init {
- MAPPER.setSerializationInclusion(JsonInclude.Include.NON_EMPTY)
- MAPPER.factory.xmlOutputFactory
- .setProperty(WstxOutputProperties.P_USE_DOUBLE_QUOTES_IN_XML_DECL, true)
- }
+ .apply {
+ setSerializationInclusion(JsonInclude.Include.NON_EMPTY)
+ factory.xmlOutputFactory
+ .setProperty(WstxOutputProperties.P_USE_DOUBLE_QUOTES_IN_XML_DECL, true)
+ }
fun from(e: S3Exception): ErrorResponse = ErrorResponse(
e.code,
@@ -64,36 +62,35 @@ internal abstract class BaseControllerTest {
fun bucketMetadata(
name: String = TEST_BUCKET_NAME,
creationDate: String = Instant.now().toString(),
- path: Path = Paths.get("/tmp/foo/1"),
+ path: Path = Path.of("/tmp/foo/1"),
bucketRegion: String = "us-east-1",
versioningConfiguration: VersioningConfiguration? = null,
bucketInfo: BucketInfo? = null,
locationInfo: LocationInfo? = null
- ): BucketMetadata {
- return BucketMetadata(
- name,
- creationDate,
- versioningConfiguration,
- null,
- null,
- null,
- path,
- bucketRegion,
- bucketInfo,
- locationInfo,
- )
- }
+ ): BucketMetadata = BucketMetadata(
+ name,
+ creationDate,
+ versioningConfiguration,
+ null,
+ null,
+ null,
+ path,
+ bucketRegion,
+ bucketInfo,
+ locationInfo,
+ )
fun s3ObjectEncrypted(
key: String,
digest: String = UUID.randomUUID().toString(),
encryption: String?,
encryptionKey: String?
- ): S3ObjectMetadata {
- return s3ObjectMetadata(
- key, digest, encryption, encryptionKey,
- )
- }
+ ): S3ObjectMetadata = s3ObjectMetadata(
+ key = key,
+ digest = digest,
+ encryption = encryption,
+ encryptionKey = encryptionKey,
+ )
fun s3ObjectMetadata(
key: String,
@@ -109,32 +106,30 @@ internal abstract class BaseControllerTest {
checksumAlgorithm: ChecksumAlgorithm? = null,
userMetadata: Map? = null,
storeHeaders: Map? = null,
- ): S3ObjectMetadata {
- return S3ObjectMetadata(
- UUID.randomUUID(),
- key,
- Path.of(UPLOAD_FILE_NAME).toFile().length().toString(),
- "1234",
- digest,
- "text/plain",
- 1L,
- Path.of(UPLOAD_FILE_NAME),
- userMetadata,
- tags,
- legalHold,
- retention,
- Owner.DEFAULT_OWNER,
- storeHeaders,
- encryptionHeaders(encryption, encryptionKey),
- checksumAlgorithm,
- checksum,
- null,
- null,
- versionId,
- false,
- checksumType
- )
- }
+ ): S3ObjectMetadata = S3ObjectMetadata(
+ UUID.randomUUID(),
+ key,
+ Path.of(UPLOAD_FILE_NAME).toFile().length().toString(),
+ "1234",
+ digest,
+ "text/plain",
+ 1L,
+ Path.of(UPLOAD_FILE_NAME),
+ userMetadata,
+ tags,
+ legalHold,
+ retention,
+ Owner.DEFAULT_OWNER,
+ storeHeaders,
+ encryptionHeaders(encryption, encryptionKey),
+ checksumAlgorithm,
+ checksum,
+ null,
+ null,
+ versionId,
+ false,
+ checksumType
+ )
private fun encryptionHeaders(encryption: String?, encryptionKey: String?): Map = buildMap {
if (encryption != null) {
@@ -151,7 +146,7 @@ internal abstract class BaseControllerTest {
const val TEST_BUCKET_NAME = "test-bucket"
val CREATION_DATE = Instant.now().toString()
const val BUCKET_REGION = "us-west-2"
- val BUCKET_PATH: Path = Paths.get("/tmp/foo/1")
+ val BUCKET_PATH: Path = Path.of("/tmp/foo/1")
val TEST_BUCKET = Bucket(
TEST_BUCKET_NAME,
BUCKET_REGION,
diff --git a/server/src/test/kotlin/com/adobe/testing/s3mock/BucketControllerTest.kt b/server/src/test/kotlin/com/adobe/testing/s3mock/BucketControllerTest.kt
index 25b4f69bf..23ecf2f10 100644
--- a/server/src/test/kotlin/com/adobe/testing/s3mock/BucketControllerTest.kt
+++ b/server/src/test/kotlin/com/adobe/testing/s3mock/BucketControllerTest.kt
@@ -56,8 +56,8 @@ import com.adobe.testing.s3mock.util.AwsHttpHeaders.X_AMZ_BUCKET_REGION
import com.adobe.testing.s3mock.util.AwsHttpParameters
import org.assertj.core.api.Assertions.assertThat
import org.junit.jupiter.api.Test
-import org.mockito.ArgumentMatchers.any
-import org.mockito.ArgumentMatchers.anyString
+import org.mockito.kotlin.any
+import org.mockito.kotlin.anyOrNull
import org.mockito.kotlin.doThrow
import org.mockito.kotlin.eq
import org.mockito.kotlin.verify
@@ -75,7 +75,7 @@ import org.springframework.test.web.servlet.result.MockMvcResultMatchers.content
import org.springframework.test.web.servlet.result.MockMvcResultMatchers.header
import org.springframework.test.web.servlet.result.MockMvcResultMatchers.status
import org.springframework.web.util.UriComponentsBuilder
-import java.nio.file.Paths
+import java.nio.file.Path
import java.time.Instant
@MockitoBean(types = [KmsKeyStore::class, ObjectService::class, MultipartService::class, ObjectController::class, MultipartController::class])
@@ -95,12 +95,11 @@ internal class BucketControllerTest : BaseControllerTest() {
.accept(MediaType.APPLICATION_XML)
.contentType(MediaType.APPLICATION_XML))
.andExpect(status().isOk)
- .andReturn()
}
@Test
fun `HEAD bucket returns bucketInfo and locationInfo headers if available`() {
- whenever(bucketService.bucketLocationHeaders(any(BucketMetadata::class.java))).thenCallRealMethod()
+ whenever(bucketService.bucketLocationHeaders(any())).thenCallRealMethod()
givenBucket(bucketMetadata(
bucketRegion = BUCKET_REGION,
bucketInfo = BucketInfo(SINGLE_AVAILABILITY_ZONE, DIRECTORY),
@@ -119,7 +118,7 @@ internal class BucketControllerTest : BaseControllerTest() {
@Test
fun `HEAD bucket for non-existing bucket returns 404`() {
doThrow(S3Exception.NO_SUCH_BUCKET).whenever(bucketService)
- .verifyBucketExists(anyString())
+ .verifyBucketExists(any())
mockMvc.perform(
get("/test-bucket")
@@ -194,7 +193,7 @@ internal class BucketControllerTest : BaseControllerTest() {
@Throws(Exception::class)
fun testDeleteBucket_NotFound() {
doThrow(S3Exception.NO_SUCH_BUCKET)
- .whenever(bucketService).verifyBucketIsEmpty(anyString())
+ .whenever(bucketService).verifyBucketIsEmpty(any())
mockMvc.perform(
delete("/test-bucket")
@@ -210,7 +209,7 @@ internal class BucketControllerTest : BaseControllerTest() {
fun testDeleteBucket_Conflict() {
givenBucket()
doThrow(S3Exception.BUCKET_NOT_EMPTY)
- .whenever(bucketService).verifyBucketIsEmpty(anyString())
+ .whenever(bucketService).verifyBucketIsEmpty(any())
whenever(bucketService.getS3Objects(TEST_BUCKET_NAME, null))
.thenReturn(
@@ -235,7 +234,7 @@ internal class BucketControllerTest : BaseControllerTest() {
givenBucket()
doThrow(IllegalStateException("THIS IS EXPECTED"))
- .whenever(bucketService).verifyBucketIsEmpty(anyString())
+ .whenever(bucketService).verifyBucketIsEmpty(any())
mockMvc.perform(
delete("/test-bucket")
@@ -759,12 +758,12 @@ internal class BucketControllerTest : BaseControllerTest() {
whenever(
bucketService.listVersions(
eq(TEST_BUCKET_NAME),
- any(),
- any(),
- any(),
+ anyOrNull(),
+ anyOrNull(),
+ anyOrNull(),
eq(MAX_KEYS_DEFAULT),
- any(),
- any()
+ anyOrNull(),
+ anyOrNull()
)
).thenReturn(expected)
@@ -784,54 +783,52 @@ internal class BucketControllerTest : BaseControllerTest() {
}
- private fun givenBuckets(count: Int = 0,
- prefix: String? = null,
- continuationToken: String? = null,
- region: Region? = null,
- maxBuckets: Int = MAX_BUCKETS_DEFAULT): ListAllMyBucketsResult {
-
+ private fun givenBuckets(
+ count: Int = 0,
+ prefix: String? = null,
+ continuationToken: String? = null,
+ region: Region? = null,
+ maxBuckets: Int = MAX_BUCKETS_DEFAULT,
+ ): ListAllMyBucketsResult {
val namePrefix = "test-bucket"
- val bucketList = mutableListOf()
-
- for(i in 0 until count) {
- val bucket = Bucket(
+ val bucketList = List(count) { i ->
+ Bucket(
"$namePrefix-$i",
BUCKET_REGION,
Instant.now().toString(),
- Paths.get("/tmp/foo/$i")
+ Path.of("/tmp/foo/$i"),
)
- bucketList.add(bucket)
}
val expected = ListAllMyBucketsResult(
TEST_OWNER,
Buckets(bucketList),
prefix,
- continuationToken
- )
- whenever(bucketService.listBuckets(
- region,
continuationToken,
- maxBuckets,
- prefix)
+ )
+ whenever(
+ bucketService.listBuckets(
+ region,
+ continuationToken,
+ maxBuckets,
+ prefix,
+ )
).thenReturn(expected)
return expected
}
- private fun bucketContents(id: String): S3Object {
- return S3Object(
- ChecksumAlgorithm.SHA256,
- ChecksumType.FULL_OBJECT,
- "etag",
- id,
- "1234",
- TEST_OWNER,
- null,
- "size",
- StorageClass.STANDARD
- )
- }
+ private fun bucketContents(id: String) = S3Object(
+ ChecksumAlgorithm.SHA256,
+ ChecksumType.FULL_OBJECT,
+ "etag",
+ id,
+ "1234",
+ TEST_OWNER,
+ null,
+ "size",
+ StorageClass.STANDARD
+ )
private fun givenBucket(bucketMetadata: BucketMetadata = bucketMetadata()) {
whenever(bucketService.getBucket(TEST_BUCKET_NAME)).thenReturn(TEST_BUCKET)
diff --git a/server/src/test/kotlin/com/adobe/testing/s3mock/ChecksumTestUtil.kt b/server/src/test/kotlin/com/adobe/testing/s3mock/ChecksumTestUtil.kt
index 232a26aaf..1a48cf903 100644
--- a/server/src/test/kotlin/com/adobe/testing/s3mock/ChecksumTestUtil.kt
+++ b/server/src/test/kotlin/com/adobe/testing/s3mock/ChecksumTestUtil.kt
@@ -17,7 +17,6 @@
package com.adobe.testing.s3mock
import software.amazon.awssdk.checksums.DefaultChecksumAlgorithm
-import software.amazon.awssdk.checksums.SdkChecksum
import software.amazon.awssdk.checksums.spi.ChecksumAlgorithm
import software.amazon.awssdk.http.auth.aws.internal.signer.CredentialScope
import software.amazon.awssdk.http.auth.aws.internal.signer.RollingSigner
@@ -43,14 +42,14 @@ object ChecksumTestUtil {
signed: Boolean = true,
algorithm: ChecksumAlgorithm? = null,
): Pair {
- val builder = ChunkedEncodedInputStream.builder()
- builder.inputStream(Files.newInputStream(input.toPath()))
- if (algorithm != null) {
- setupChecksumTrailer(builder, algorithm)
- }
- if (signed) {
- setupSignedTrailerAndExtension(builder)
+ val builder = ChunkedEncodedInputStream.builder().apply {
+ inputStream(Files.newInputStream(input.toPath()))
+ algorithm?.let { setupChecksumTrailer(this, it) }
+ if (signed) {
+ setupSignedTrailerAndExtension(this)
+ }
}
+
val chunkedEncodingInputStream: InputStream = builder
.chunkSize(4000)
.build()
@@ -83,7 +82,7 @@ object ChecksumTestUtil {
val sdkChecksum = ChecksumUtil.fromChecksumAlgorithm(checksumAlgorithm)
val checksumInputStream = ChecksumInputStream(
builder.inputStream(),
- mutableSetOf(sdkChecksum)
+ mutableSetOf(sdkChecksum)
)
val checksumTrailer: TrailerProvider = ChecksumTrailerProvider(sdkChecksum, checksumHeaderName)
@@ -92,13 +91,11 @@ object ChecksumTestUtil {
}
@JvmStatic
- fun algorithms(): Stream {
- return listOf(
+ fun algorithms(): Stream = Stream.of(
DefaultChecksumAlgorithm.SHA256,
DefaultChecksumAlgorithm.SHA1,
DefaultChecksumAlgorithm.CRC32,
DefaultChecksumAlgorithm.CRC32C,
DefaultChecksumAlgorithm.CRC64NVME
- ).stream()
- }
+ )
}
diff --git a/server/src/test/kotlin/com/adobe/testing/s3mock/ContextPathObjectStoreControllerTest.kt b/server/src/test/kotlin/com/adobe/testing/s3mock/ContextPathObjectStoreControllerTest.kt
index 9fe216c24..4d8b69860 100644
--- a/server/src/test/kotlin/com/adobe/testing/s3mock/ContextPathObjectStoreControllerTest.kt
+++ b/server/src/test/kotlin/com/adobe/testing/s3mock/ContextPathObjectStoreControllerTest.kt
@@ -46,7 +46,6 @@ internal class ContextPathObjectStoreControllerTest : BaseControllerTest() {
private lateinit var mockMvc: MockMvc
@Test
- @Throws(Exception::class)
fun testListBuckets_Ok() {
val bucketList = listOf(
TEST_BUCKET,
diff --git a/server/src/test/kotlin/com/adobe/testing/s3mock/HttpRangeHeaderConverterTest.kt b/server/src/test/kotlin/com/adobe/testing/s3mock/HttpRangeHeaderConverterTest.kt
index 4be932288..ee130928a 100644
--- a/server/src/test/kotlin/com/adobe/testing/s3mock/HttpRangeHeaderConverterTest.kt
+++ b/server/src/test/kotlin/com/adobe/testing/s3mock/HttpRangeHeaderConverterTest.kt
@@ -23,9 +23,9 @@ internal class HttpRangeHeaderConverterTest {
fun testRangeHeader() {
val iut = HttpRangeHeaderConverter()
val rangeHeader = "bytes=1-2"
- val actual = iut.convert(rangeHeader)
- assertThat(actual).isNotNull()
- assertThat(requireNotNull(actual).getRangeStart(Long.MAX_VALUE)).isEqualTo(1)
+ val actual = requireNotNull(iut.convert(rangeHeader))
+
+ assertThat(actual.getRangeStart(Long.MAX_VALUE)).isEqualTo(1)
assertThat(actual.getRangeEnd(Long.MAX_VALUE)).isEqualTo(2)
}
}
diff --git a/server/src/test/kotlin/com/adobe/testing/s3mock/MultipartControllerTest.kt b/server/src/test/kotlin/com/adobe/testing/s3mock/MultipartControllerTest.kt
index 8ab01a9b9..80a23dbdc 100644
--- a/server/src/test/kotlin/com/adobe/testing/s3mock/MultipartControllerTest.kt
+++ b/server/src/test/kotlin/com/adobe/testing/s3mock/MultipartControllerTest.kt
@@ -78,7 +78,6 @@ internal class MultipartControllerTest : BaseControllerTest() {
private lateinit var mockMvc: MockMvc
@Test
- @Throws(Exception::class)
fun testCompleteMultipart_BadRequest_uploadTooSmall() {
givenBucket()
val parts = listOf(
@@ -86,19 +85,20 @@ internal class MultipartControllerTest : BaseControllerTest() {
createPart(1, 5L)
)
- val uploadRequest = CompleteMultipartUpload(ArrayList())
- for (part in parts) {
- uploadRequest.addPart(
- CompletedPart(
- null,
- null,
- null,
- null,
- null,
- part.etag,
- part.partNumber
+ val uploadRequest = CompleteMultipartUpload(mutableListOf()).apply {
+ parts.forEach { part ->
+ addPart(
+ CompletedPart(
+ null,
+ null,
+ null,
+ null,
+ null,
+ part.etag,
+ part.partNumber
+ )
)
- )
+ }
}
val key = "sampleFile.txt"
@@ -128,7 +128,6 @@ internal class MultipartControllerTest : BaseControllerTest() {
}
@Test
- @Throws(Exception::class)
fun testCompleteMultipart_BadRequest_uploadIdNotFound() {
givenBucket()
val uploadId = UUID.randomUUID()
@@ -147,19 +146,20 @@ internal class MultipartControllerTest : BaseControllerTest() {
anyList()
)
- val uploadRequest = CompleteMultipartUpload(ArrayList())
- for (part in parts) {
- uploadRequest.addPart(
- CompletedPart(
- null,
- null,
- null,
- null,
- null,
- part.etag,
- part.partNumber
+ val uploadRequest = CompleteMultipartUpload(mutableListOf()).apply {
+ parts.forEach { part ->
+ addPart(
+ CompletedPart(
+ null,
+ null,
+ null,
+ null,
+ null,
+ part.etag,
+ part.partNumber
+ )
)
- )
+ }
}
val key = "sampleFile.txt"
@@ -180,7 +180,6 @@ internal class MultipartControllerTest : BaseControllerTest() {
}
@Test
- @Throws(Exception::class)
fun testCompleteMultipart_BadRequest_partNotFound() {
givenBucket()
val key = "sampleFile.txt"
@@ -197,19 +196,20 @@ internal class MultipartControllerTest : BaseControllerTest() {
anyList()
)
- val uploadRequest = CompleteMultipartUpload(ArrayList())
- for (part in requestParts) {
- uploadRequest.addPart(
- CompletedPart(
- null,
- null,
- null,
- null,
- null,
- part.etag,
- part.partNumber
+ val uploadRequest = CompleteMultipartUpload(mutableListOf()).apply {
+ requestParts.forEach { part ->
+ addPart(
+ CompletedPart(
+ null,
+ null,
+ null,
+ null,
+ null,
+ part.etag,
+ part.partNumber
+ )
)
- )
+ }
}
val uri = UriComponentsBuilder
@@ -228,7 +228,6 @@ internal class MultipartControllerTest : BaseControllerTest() {
}
@Test
- @Throws(Exception::class)
fun testCompleteMultipart_BadRequest_invalidPartOrder() {
givenBucket()
@@ -249,19 +248,20 @@ internal class MultipartControllerTest : BaseControllerTest() {
createPart(0, 5L)
)
- val uploadRequest = CompleteMultipartUpload(ArrayList())
- for (part in requestParts) {
- uploadRequest.addPart(
- CompletedPart(
- null,
- null,
- null,
- null,
- null,
- part.etag,
- part.partNumber
+ val uploadRequest = CompleteMultipartUpload(mutableListOf()).apply {
+ requestParts.forEach { part ->
+ addPart(
+ CompletedPart(
+ null,
+ null,
+ null,
+ null,
+ null,
+ part.etag,
+ part.partNumber
+ )
)
- )
+ }
}
val uri = UriComponentsBuilder
@@ -288,9 +288,10 @@ internal class MultipartControllerTest : BaseControllerTest() {
val uploadId = UUID.randomUUID()
// parts
- val uploadRequest = CompleteMultipartUpload(ArrayList())
- uploadRequest.addPart(CompletedPart(null, null, null, null, null, "etag1", 1))
- uploadRequest.addPart(CompletedPart(null, null, null, null, null, "etag2", 2))
+ val uploadRequest = CompleteMultipartUpload(mutableListOf()).apply {
+ addPart(CompletedPart(null, null, null, null, null, "etag1", 1))
+ addPart(CompletedPart(null, null, null, null, null, "etag2", 2))
+ }
// object exists and matches
val s3meta = s3ObjectMetadata(key, UUID.randomUUID().toString())
@@ -368,8 +369,9 @@ internal class MultipartControllerTest : BaseControllerTest() {
val key = "ver/key.txt"
val uploadId = UUID.randomUUID()
- val uploadRequest = CompleteMultipartUpload(ArrayList())
- uploadRequest.addPart(CompletedPart(null, null, null, null, null, "etag1", 1))
+ val uploadRequest = CompleteMultipartUpload(mutableListOf()).apply {
+ addPart(CompletedPart(null, null, null, null, null, "etag1", 1))
+ }
val s3meta = s3ObjectMetadata(key, UUID.randomUUID().toString())
whenever(objectService.getObject(TEST_BUCKET_NAME, key, null)).thenReturn(s3meta)
@@ -432,8 +434,9 @@ internal class MultipartControllerTest : BaseControllerTest() {
val key = "nover/key.txt"
val uploadId = UUID.randomUUID()
- val uploadRequest = CompleteMultipartUpload(ArrayList())
- uploadRequest.addPart(CompletedPart(null, null, null, null, null, "etag1", 1))
+ val uploadRequest = CompleteMultipartUpload(mutableListOf()).apply {
+ addPart(CompletedPart(null, null, null, null, null, "etag1", 1))
+ }
val s3meta = s3ObjectMetadata(key, UUID.randomUUID().toString())
whenever(objectService.getObject(TEST_BUCKET_NAME, key, null)).thenReturn(s3meta)
@@ -496,8 +499,9 @@ internal class MultipartControllerTest : BaseControllerTest() {
val key = "pre/key.txt"
val uploadId = UUID.randomUUID()
- val uploadRequest = CompleteMultipartUpload(ArrayList())
- uploadRequest.addPart(CompletedPart(null, null, null, null, null, "etag1", 1))
+ val uploadRequest = CompleteMultipartUpload(mutableListOf()).apply {
+ addPart(CompletedPart(null, null, null, null, null, "etag1", 1))
+ }
val s3meta = s3ObjectMetadata(key, UUID.randomUUID().toString())
whenever(objectService.getObject(TEST_BUCKET_NAME, key, null)).thenReturn(s3meta)
@@ -533,8 +537,9 @@ internal class MultipartControllerTest : BaseControllerTest() {
val key = "missing-bucket/key.txt"
val uploadId = UUID.randomUUID()
- val uploadRequest = CompleteMultipartUpload(ArrayList())
- uploadRequest.addPart(CompletedPart(null, null, null, null, null, "etag1", 1))
+ val uploadRequest = CompleteMultipartUpload(mutableListOf()).apply {
+ addPart(CompletedPart(null, null, null, null, null, "etag1", 1))
+ }
val uri = UriComponentsBuilder
.fromUriString("/${TEST_BUCKET_NAME}/$key")
@@ -564,7 +569,7 @@ internal class MultipartControllerTest : BaseControllerTest() {
.whenever(multipartService)
.verifyMultipartUploadExists(TEST_BUCKET_NAME, uploadId, true)
- val uploadRequest = CompleteMultipartUpload(ArrayList())
+ val uploadRequest = CompleteMultipartUpload(mutableListOf())
uploadRequest.addPart(CompletedPart(null, null, null, null, null, "etag1", 1))
val uri = UriComponentsBuilder
@@ -1042,12 +1047,13 @@ internal class MultipartControllerTest : BaseControllerTest() {
fun testUploadPart_Ok_EtagReturned() {
val bucketMeta = bucketMetadata()
whenever(bucketService.verifyBucketExists(TEST_BUCKET_NAME)).thenReturn(bucketMeta)
+ whenever(multipartService.verifyPartNumberLimits("1")).thenReturn(1)
val uploadId = UUID.randomUUID()
val temp = java.nio.file.Files.createTempFile("junie", "part")
whenever(multipartService.toTempFile(any(), any())).thenReturn(Pair.of(temp, null))
whenever(
- multipartService.putPart(eq(TEST_BUCKET_NAME), eq("my/key.txt"), eq(uploadId), eq("1"), eq(temp), any())
+ multipartService.putPart(eq(TEST_BUCKET_NAME), eq("my/key.txt"), eq(uploadId), eq(1), eq(temp), any())
).thenReturn("etag-123")
val uri = UriComponentsBuilder
@@ -1075,6 +1081,7 @@ internal class MultipartControllerTest : BaseControllerTest() {
)
val bucketMeta = bucketMetadata(versioningConfiguration = versioningConfiguration)
whenever(bucketService.verifyBucketExists(TEST_BUCKET_NAME)).thenReturn(bucketMeta)
+ whenever(multipartService.verifyPartNumberLimits("1")).thenReturn(1)
val s3meta = s3ObjectMetadata(
key = "source/key.txt",
@@ -1094,7 +1101,7 @@ internal class MultipartControllerTest : BaseControllerTest() {
any(),
any(),
anyOrNull(),
- eq("1"),
+ eq(1),
any(),
any(),
any(),
@@ -1156,6 +1163,7 @@ internal class MultipartControllerTest : BaseControllerTest() {
fun testUploadPartCopy_InvalidPartNumber_BadRequest() {
val bucketMeta = bucketMetadata()
whenever(bucketService.verifyBucketExists(TEST_BUCKET_NAME)).thenReturn(bucketMeta)
+ whenever(multipartService.verifyPartNumberLimits("1")).thenReturn(1)
doThrow(S3Exception.INVALID_PART_NUMBER)
.whenever(multipartService)
@@ -1185,6 +1193,7 @@ internal class MultipartControllerTest : BaseControllerTest() {
fun testUploadPartCopy_SourceObjectNotFound() {
val bucketMeta = bucketMetadata()
whenever(bucketService.verifyBucketExists(TEST_BUCKET_NAME)).thenReturn(bucketMeta)
+ whenever(multipartService.verifyPartNumberLimits("1")).thenReturn(1)
doThrow(S3Exception.NO_SUCH_KEY)
.whenever(objectService)
@@ -1214,6 +1223,7 @@ internal class MultipartControllerTest : BaseControllerTest() {
fun testUploadPartCopy_PreconditionFailed() {
val bucketMeta = bucketMetadata()
whenever(bucketService.verifyBucketExists(TEST_BUCKET_NAME)).thenReturn(bucketMeta)
+ whenever(multipartService.verifyPartNumberLimits("1")).thenReturn(1)
val s3meta = s3ObjectMetadata("source/key.txt", UUID.randomUUID().toString())
whenever(objectService.verifyObjectExists(eq("source-bucket"), eq("source/key.txt"), anyOrNull()))
@@ -1255,6 +1265,7 @@ internal class MultipartControllerTest : BaseControllerTest() {
fun testUploadPartCopy_NoVersionHeaderWhenNotVersioned() {
val bucketMeta = bucketMetadata()
whenever(bucketService.verifyBucketExists(TEST_BUCKET_NAME)).thenReturn(bucketMeta)
+ whenever(multipartService.verifyPartNumberLimits("1")).thenReturn(1)
val s3meta = s3ObjectMetadata(
key = "source/key.txt",
@@ -1266,7 +1277,7 @@ internal class MultipartControllerTest : BaseControllerTest() {
val copyResult = CopyPartResult(Date(), "etag-xyz")
whenever(
multipartService.copyPart(
- any(), any(), anyOrNull(), eq("1"), any(), any(), any(), any
org.testcontainers
diff --git a/testsupport/testcontainers/src/main/java/com/adobe/testing/s3mock/testcontainers/S3MockContainer.java b/testsupport/testcontainers/src/main/java/com/adobe/testing/s3mock/testcontainers/S3MockContainer.java
index bd91a9846..c288ccecb 100644
--- a/testsupport/testcontainers/src/main/java/com/adobe/testing/s3mock/testcontainers/S3MockContainer.java
+++ b/testsupport/testcontainers/src/main/java/com/adobe/testing/s3mock/testcontainers/S3MockContainer.java
@@ -22,6 +22,9 @@
import org.testcontainers.containers.wait.strategy.Wait;
import org.testcontainers.utility.DockerImageName;
+/**
+ * Testcontainer for S3Mock.
+ */
public class S3MockContainer extends GenericContainer {
public static final String IMAGE_NAME = "adobe/s3mock";
private static final int S3MOCK_DEFAULT_HTTP_PORT = 9090;
@@ -61,14 +64,14 @@ public S3MockContainer withRetainFilesOnExit(boolean retainFilesOnExit) {
}
public S3MockContainer withValidKmsKeys(String kmsKeys) {
- //TODO: this uses the legacy-style properties. Leave for now as test that property translation
+ // TODO: this uses the legacy-style properties. Leave for now as test that property translation
// works in S3MockApplication.
this.addEnv("validKmsKeys", kmsKeys);
return self();
}
public S3MockContainer withInitialBuckets(String initialBuckets) {
- //TODO: this uses the legacy-style properties. Leave for now as test that property translation
+ // TODO: this uses the legacy-style properties. Leave for now as test that property translation
// works in S3MockApplication.
this.addEnv("initialBuckets", initialBuckets);
return self();
@@ -82,7 +85,7 @@ public S3MockContainer withInitialBuckets(String initialBuckets) {
*/
public S3MockContainer withVolumeAsRoot(String root) {
this.withFileSystemBind(root, "/s3mockroot", BindMode.READ_WRITE);
- //TODO: this uses the legacy-style properties. Leave for now as test that property translation
+ // TODO: this uses the legacy-style properties. Leave for now as test that property translation
// works in S3MockApplication.
this.addEnv("root", "/s3mockroot");
return self();