Skip to content
Merged
Show file tree
Hide file tree
Changes from 21 commits
Commits
Show all changes
28 commits
Select commit Hold shift + click to select a range
0a1a266
Add more unit tests
afranken Aug 30, 2025
0a7adf7
Add more unit tests
afranken Aug 30, 2025
2efa170
Add more unit tests
afranken Aug 30, 2025
a25248b
Add "close stale issues or PRs" action
afranken Aug 30, 2025
f4cd24f
Use new StringUtils API
afranken Aug 30, 2025
77c6a57
Refactor BucketControllerTest to MockMvc 1
afranken Aug 30, 2025
3aa519b
Refactor BucketControllerTest to MockMvc 2
afranken Aug 30, 2025
7b6b60e
Refactor BucketControllerTest to MockMvc 3
afranken Aug 30, 2025
2967c8e
Refactor BucketControllerTest to MockMvc 4
afranken Aug 30, 2025
f881874
Refactor ControllerTest convenience functions
afranken Aug 30, 2025
eae9d50
Refactor ContextPathObjectStoreControllerTest to MockMvc
afranken Aug 30, 2025
67a99ba
Refactor FaviconControllerTest to MockMvc
afranken Aug 30, 2025
76d31eb
Refactor MultipartControllerTest to MockMvc 1
afranken Aug 30, 2025
924673e
Refactor MultipartControllerTest to MockMvc 2
afranken Aug 30, 2025
9df218d
Refactor MultipartControllerTest to MockMvc 3
afranken Aug 30, 2025
52540c7
Refactor MultipartControllerTest to MockMvc 4
afranken Aug 31, 2025
9079f32
Refactor ObjectControllerTest to MockMvc 1
afranken Aug 31, 2025
ad81fbf
Refactor ObjectControllerTest to MockMvc 2
afranken Aug 31, 2025
6953a01
Refactor ObjectControllerTest to MockMvc 3
afranken Aug 31, 2025
a08af5e
Use idiomatic MockMvc assertions
afranken Aug 31, 2025
74e15b5
Use imports
afranken Aug 31, 2025
6d32f31
Idiomatic Kotlin part 1
afranken Aug 31, 2025
f3615f5
Idiomatic Kotlin part 2
afranken Aug 31, 2025
bb6d588
Idiomatic Kotlin part 3
afranken Aug 31, 2025
303ffc1
Idiomatic Kotlin part 4
afranken Aug 31, 2025
7a857d1
Idiomatic Kotlin part 5
afranken Aug 31, 2025
3caf178
Idiomatic Kotlin part 6
afranken Aug 31, 2025
556ca51
Idiomatic Kotlin part 7
afranken Aug 31, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 22 additions & 0 deletions .github/workflows/stale.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
name: stale.yml
on:
schedule:
- cron: '0 0 * * *' # Run every day at midnight

permissions:
issues: write
pull-requests: write

jobs:
close_stale_prs:
runs-on: ubuntu-latest
steps:
- name: Close stale issues and pull requests
uses: actions/stale@v9.1.0
with:
days-before-stale: 30
days-before-close: 5
stale-issue-message: 'This issue has been flagged as stale due to over 30 days of inactivity. If no updates or comments are made within the next 5 days, the system will automatically close it to maintain repository hygiene.'
close-issue-message: 'This issue has been automatically closed due to prolonged inactivity. It was previously marked as stale after 30 days without activity and has now been closed following an additional 5-day grace period. If you believe this issue should be reopened, please provide a comment with relevant updates or justification.'
stale-pr-message: 'This pull request has been marked as stale due to 30 days of inactivity. If no further updates or comments are made within the next 5 days, it will be automatically closed to maintain repository hygiene and reduce review backlog.'
close-pr-message: 'This pull request has been automatically closed due to extended inactivity. It was previously flagged as stale after 30 days without activity and has now been closed following a 5-day grace period. If you believe this pull request is still relevant, feel free to reopen it or submit a new one with updated context.'
Original file line number Diff line number Diff line change
Expand Up @@ -241,7 +241,7 @@ public BucketLifecycleConfiguration getBucketLifecycleConfiguration(String bucke

public List<S3Object> getS3Objects(String bucketName, @Nullable String prefix) {
var bucketMetadata = bucketStore.getBucketMetadata(bucketName);
var uuids = bucketStore.lookupKeysInBucket(prefix, bucketName);
var uuids = bucketStore.lookupIdsInBucket(prefix, bucketName);
return uuids
.stream()
.filter(Objects::nonNull)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.ConcurrentHashMap;
import java.util.function.Function;
import org.apache.commons.io.FileUtils;
import org.jspecify.annotations.Nullable;
import org.slf4j.Logger;
Expand Down Expand Up @@ -101,15 +102,27 @@ public synchronized UUID addKeyToBucket(String key, String bucketName) {
}
}

public List<UUID> lookupKeysInBucket(@Nullable String prefix, String bucketName) {
public List<UUID> lookupIdsInBucket(@Nullable String prefix, String bucketName) {
return lookupInBucket(prefix, bucketName, Map.Entry::getValue);
}

public List<String> lookupKeysInBucket(@Nullable String prefix, String bucketName) {
return lookupInBucket(prefix, bucketName, Map.Entry::getKey);
}

private <R> List<R> lookupInBucket(
@Nullable String prefix,
String bucketName,
Function<Map.Entry<String, UUID>, R> extract
) {
var bucketMetadata = getBucketMetadata(bucketName);
var normalizedPrefix = prefix == null ? "" : prefix;
synchronized (lockStore.get(bucketName)) {
return bucketMetadata.objects()
.entrySet()
.stream()
.filter(entry -> entry.getKey().startsWith(normalizedPrefix))
.map(Map.Entry::getValue)
.map(extract)
.toList();
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,9 +27,8 @@
import static com.adobe.testing.s3mock.util.AwsHttpHeaders.X_AMZ_SDK_CHECKSUM_ALGORITHM;
import static com.adobe.testing.s3mock.util.AwsHttpHeaders.X_AMZ_SERVER_SIDE_ENCRYPTION;
import static com.adobe.testing.s3mock.util.AwsHttpHeaders.X_AMZ_STORAGE_CLASS;
import static org.apache.commons.lang3.StringUtils.equalsIgnoreCase;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
import static org.apache.commons.lang3.StringUtils.startsWithIgnoreCase;
import static org.apache.commons.lang3.Strings.CI;

import com.adobe.testing.s3mock.dto.ChecksumAlgorithm;
import com.adobe.testing.s3mock.dto.StorageClass;
Expand Down Expand Up @@ -73,7 +72,7 @@ public static Map<String, String> userMetadataHeadersFrom(S3ObjectMetadata s3Obj
if (s3ObjectMetadata.userMetadata() != null) {
s3ObjectMetadata.userMetadata()
.forEach((key, value) -> {
if (startsWithIgnoreCase(key, HEADER_X_AMZ_META_PREFIX)) {
if (CI.startsWith(key, HEADER_X_AMZ_META_PREFIX)) {
metadataHeaders.put(key, value);
} else {
//support case where metadata was stored locally in legacy format
Expand Down Expand Up @@ -104,7 +103,7 @@ public static Map<String, String> storageClassHeadersFrom(S3ObjectMetadata s3Obj
*/
public static Map<String, String> userMetadataFrom(HttpHeaders headers) {
return parseHeadersToMap(headers,
header -> startsWithIgnoreCase(header, HEADER_X_AMZ_META_PREFIX));
header -> CI.startsWith(header, HEADER_X_AMZ_META_PREFIX));
}

/**
Expand All @@ -114,12 +113,12 @@ public static Map<String, String> userMetadataFrom(HttpHeaders headers) {
*/
public static Map<String, String> storeHeadersFrom(HttpHeaders headers) {
return parseHeadersToMap(headers,
header -> (equalsIgnoreCase(header, HttpHeaders.EXPIRES)
|| equalsIgnoreCase(header, HttpHeaders.CONTENT_LANGUAGE)
|| equalsIgnoreCase(header, HttpHeaders.CONTENT_DISPOSITION)
|| (equalsIgnoreCase(header, HttpHeaders.CONTENT_ENCODING)
header -> (CI.equals(header, HttpHeaders.EXPIRES)
|| CI.equals(header, HttpHeaders.CONTENT_LANGUAGE)
|| CI.equals(header, HttpHeaders.CONTENT_DISPOSITION)
|| (CI.equals(header, HttpHeaders.CONTENT_ENCODING)
&& !isOnlyChunkedEncoding(headers))
|| equalsIgnoreCase(header, HttpHeaders.CACHE_CONTROL)
|| CI.equals(header, HttpHeaders.CACHE_CONTROL)
));
}

Expand All @@ -130,7 +129,7 @@ public static Map<String, String> storeHeadersFrom(HttpHeaders headers) {
*/
public static Map<String, String> encryptionHeadersFrom(HttpHeaders headers) {
return parseHeadersToMap(headers,
header -> startsWithIgnoreCase(header, X_AMZ_SERVER_SIDE_ENCRYPTION));
header -> CI.startsWith(header, X_AMZ_SERVER_SIDE_ENCRYPTION));
}

private static Map<String, String> parseHeadersToMap(HttpHeaders headers,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,11 +15,29 @@
*/
package com.adobe.testing.s3mock

import com.adobe.testing.s3mock.dto.Bucket
import com.adobe.testing.s3mock.dto.BucketInfo
import com.adobe.testing.s3mock.dto.ChecksumAlgorithm
import com.adobe.testing.s3mock.dto.ChecksumType
import com.adobe.testing.s3mock.dto.ErrorResponse
import com.adobe.testing.s3mock.dto.LegalHold
import com.adobe.testing.s3mock.dto.LocationInfo
import com.adobe.testing.s3mock.dto.Owner
import com.adobe.testing.s3mock.dto.Retention
import com.adobe.testing.s3mock.dto.Tag
import com.adobe.testing.s3mock.dto.VersioningConfiguration
import com.adobe.testing.s3mock.store.BucketMetadata
import com.adobe.testing.s3mock.store.S3ObjectMetadata
import com.adobe.testing.s3mock.util.AwsHttpHeaders
import com.ctc.wstx.api.WstxOutputProperties
import com.fasterxml.jackson.annotation.JsonInclude
import com.fasterxml.jackson.dataformat.xml.XmlMapper
import com.fasterxml.jackson.dataformat.xml.deser.FromXmlParser
import com.fasterxml.jackson.dataformat.xml.ser.ToXmlGenerator
import java.nio.file.Path
import java.nio.file.Paths
import java.time.Instant
import java.util.UUID

internal abstract class BaseControllerTest {
companion object {
Expand All @@ -35,5 +53,115 @@ internal abstract class BaseControllerTest {
MAPPER.factory.xmlOutputFactory
.setProperty(WstxOutputProperties.P_USE_DOUBLE_QUOTES_IN_XML_DECL, true)
}

fun from(e: S3Exception): ErrorResponse {
return ErrorResponse(
e.code,
e.message,
null,
null
)
}

fun bucketMetadata(
name: String = TEST_BUCKET_NAME,
creationDate: String = Instant.now().toString(),
path: Path = Paths.get("/tmp/foo/1"),
bucketRegion: String = "us-east-1",
versioningConfiguration: VersioningConfiguration? = null,
bucketInfo: BucketInfo? = null,
locationInfo: LocationInfo? = null
): BucketMetadata {
return BucketMetadata(
name,
creationDate,
versioningConfiguration,
null,
null,
null,
path,
bucketRegion,
bucketInfo,
locationInfo,
)
}

fun s3ObjectEncrypted(
key: String,
digest: String = UUID.randomUUID().toString(),
encryption: String?,
encryptionKey: String?
): S3ObjectMetadata {
return s3ObjectMetadata(
key, digest, encryption, encryptionKey,
)
}

fun s3ObjectMetadata(
key: String,
digest: String = UUID.randomUUID().toString(),
encryption: String? = null,
encryptionKey: String? = null,
retention: Retention? = null,
tags: List<Tag>? = null,
legalHold: LegalHold? = null,
versionId: String? = null,
checksum: String? = null,
checksumType: ChecksumType? = ChecksumType.FULL_OBJECT,
checksumAlgorithm: ChecksumAlgorithm? = null,
userMetadata: Map<String, String>? = null,
storeHeaders: Map<String, String>? = null,
): S3ObjectMetadata {
return S3ObjectMetadata(
UUID.randomUUID(),
key,
Path.of(UPLOAD_FILE_NAME).toFile().length().toString(),
"1234",
digest,
"text/plain",
1L,
Path.of(UPLOAD_FILE_NAME),
userMetadata,
tags,
legalHold,
retention,
Owner.DEFAULT_OWNER,
storeHeaders,
encryptionHeaders(encryption, encryptionKey),
checksumAlgorithm,
checksum,
null,
null,
versionId,
false,
checksumType
)
}

private fun encryptionHeaders(encryption: String?, encryptionKey: String?): Map<String, String> {
val pairs = mutableListOf<Pair<String, String>>()
if (encryption != null) {
pairs.add(AwsHttpHeaders.X_AMZ_SERVER_SIDE_ENCRYPTION to encryption)
}
if(encryptionKey!= null) {
pairs.add(AwsHttpHeaders.X_AMZ_SERVER_SIDE_ENCRYPTION_AWS_KMS_KEY_ID to encryptionKey)
}

return pairs.associate { it.first to it.second }
}
val TEST_OWNER = Owner("s3-mock-file-store", "123")
val TEST_BUCKETMETADATA = bucketMetadata()
const val UPLOAD_FILE_NAME = "src/test/resources/sampleFile.txt"

const val TEST_BUCKET_NAME = "test-bucket"
val CREATION_DATE = Instant.now().toString()
const val BUCKET_REGION = "us-west-2"
val BUCKET_PATH: Path = Paths.get("/tmp/foo/1")
val TEST_BUCKET = Bucket(
TEST_BUCKET_NAME,
BUCKET_REGION,
CREATION_DATE,
BUCKET_PATH
)
}
}
Loading