diff --git a/datasafe-business/pom.xml b/datasafe-business/pom.xml
index dd4dbf551..3d7ba1e11 100644
--- a/datasafe-business/pom.xml
+++ b/datasafe-business/pom.xml
@@ -108,36 +108,12 @@
jsr305
provided
-
- software.amazon.awssdk
- regions
- 2.26.22
- test
-
-
- software.amazon.awssdk
- sdk-core
- 2.26.22
- test
-
-
- software.amazon.awssdk
- aws-core
- 2.26.22
- test
-
-
- software.amazon.awssdk
- auth
- 2.26.22
- test
-
software.amazon.awssdk
s3
2.26.22
- test
+ runtime
diff --git a/datasafe-business/src/test/java/de/adorsys/datasafe/business/impl/e2e/MultiDFSFunctionalityIT.java b/datasafe-business/src/test/java/de/adorsys/datasafe/business/impl/e2e/MultiDFSFunctionalityIT.java
index be8308160..6558a585e 100644
--- a/datasafe-business/src/test/java/de/adorsys/datasafe/business/impl/e2e/MultiDFSFunctionalityIT.java
+++ b/datasafe-business/src/test/java/de/adorsys/datasafe/business/impl/e2e/MultiDFSFunctionalityIT.java
@@ -45,9 +45,6 @@
import org.testcontainers.containers.GenericContainer;
import org.testcontainers.containers.wait.strategy.Wait;
import org.testcontainers.shaded.com.google.common.collect.ImmutableMap;
-import software.amazon.awssdk.auth.credentials.AwsBasicCredentials;
-import software.amazon.awssdk.auth.credentials.StaticCredentialsProvider;
-import software.amazon.awssdk.regions.Region;
import software.amazon.awssdk.services.s3.S3Client;
import software.amazon.awssdk.services.s3.model.ListObjectsV2Request;
import software.amazon.awssdk.services.s3.model.ListObjectsV2Response;
@@ -55,7 +52,6 @@
import java.io.InputStream;
import java.io.OutputStream;
-import java.net.URI;
import java.security.UnrecoverableKeyException;
import java.util.Collections;
import java.util.HashMap;
@@ -118,13 +114,12 @@ static void initDistributedMinios() {
log.info("ENDPOINT IS {}", endpoint);
endpointsByHostNoBucket.put(it, endpoint);
- S3Client client = S3Client.builder()
- .endpointOverride(URI.create(endpoint))
- .region(Region.of(REGION))
- .credentialsProvider(StaticCredentialsProvider.create(
- AwsBasicCredentials.create(accessKey(it), secretKey(it))
- ))
- .build();
+ S3Client client = S3ClientFactory.getClient(
+ endpoint,
+ REGION,
+ accessKey(it),
+ secretKey(it)
+ );
AwsClientRetry.createBucketWithRetry(client, it);
});
@@ -297,19 +292,17 @@ private void registerUser(UserIDAuth auth) {
}
private List listInBucket(String bucket) {
- S3Client client = S3Client.builder()
- .endpointOverride(URI.create(endpointsByHostNoBucket.get(bucket)))
- .region(Region.of(REGION))
- .credentialsProvider(StaticCredentialsProvider.create(
- AwsBasicCredentials.create(accessKey(bucket), secretKey(bucket))
- ))
- .build();
-
+ S3Client s3 = S3ClientFactory.getClient(
+ endpointsByHostNoBucket.get(bucket),
+ REGION,
+ accessKey(bucket),
+ secretKey(bucket)
+ );
ListObjectsV2Request request = ListObjectsV2Request.builder()
.bucket(bucket)
.build();
+ ListObjectsV2Response response = s3.listObjectsV2(request);
- ListObjectsV2Response response = client.listObjectsV2(request);
return response.contents()
.stream()
.map(S3Object::key)
diff --git a/datasafe-cli/src/main/java/de/adorsys/datasafe/cli/config/DatasafeFactory.java b/datasafe-cli/src/main/java/de/adorsys/datasafe/cli/config/DatasafeFactory.java
index 957c2258b..118e92c8c 100644
--- a/datasafe-cli/src/main/java/de/adorsys/datasafe/cli/config/DatasafeFactory.java
+++ b/datasafe-cli/src/main/java/de/adorsys/datasafe/cli/config/DatasafeFactory.java
@@ -78,12 +78,15 @@ private static StorageService httpS3() {
private static StorageService amazonS3() {
return new UriBasedAuthStorageService(
- acc -> getStorageService(
- acc.getAccessKey(),
- acc.getSecretKey(),
- acc.getEndpoint(),
- acc.getRegion(),
- acc.getBucketName()
+ acc -> new S3StorageService(
+ S3ClientFactory.getAmazonClient(
+ acc.getRegion(),
+ acc.getAccessKey(),
+ acc.getSecretKey()
+ ),
+ // Bucket name is encoded in first path segment
+ acc.getBucketName(),
+ ExecutorServiceUtil.submitterExecutesOnStarvationExecutingService()
),
uri -> (uri.getHost() + "/" + uri.getPath().replaceFirst("^/", "")).split("/")
);
@@ -103,13 +106,15 @@ private WithCredentialProvider(Lazy storageKeyStoreOp
private static S3StorageService getStorageService(String accessKey, String secretKey, String url, String region,
String bucket) {
- AwsBasicCredentials creds = AwsBasicCredentials.create(accessKey, secretKey);
S3Client s3 = S3Client.builder()
.endpointOverride(URI.create(url))
.region(Region.of(region))
- .credentialsProvider(StaticCredentialsProvider.create(creds))
+ .credentialsProvider(StaticCredentialsProvider.create(
+ AwsBasicCredentials.create(accessKey, secretKey)
+ ))
.build();
+
return new S3StorageService(
s3,
bucket,
diff --git a/datasafe-cli/src/main/resources/META-INF/native-image/de.adorsys/datasafe-cli/resource-config.json b/datasafe-cli/src/main/resources/META-INF/native-image/de.adorsys/datasafe-cli/resource-config.json
index d8a326c80..3f6156307 100644
--- a/datasafe-cli/src/main/resources/META-INF/native-image/de.adorsys/datasafe-cli/resource-config.json
+++ b/datasafe-cli/src/main/resources/META-INF/native-image/de.adorsys/datasafe-cli/resource-config.json
@@ -47,7 +47,8 @@
},
{
"pattern": "lib/commons-logging-1.1.3.jar"
- },
+ }
+ ,
{
"pattern": "lib/dagger-2.17.jar"
},
diff --git a/datasafe-rest-impl/src/main/java/de/adorsys/datasafe/rest/impl/config/BasicS3Factory.java b/datasafe-rest-impl/src/main/java/de/adorsys/datasafe/rest/impl/config/BasicS3Factory.java
index efc2de6b8..bf8032f81 100644
--- a/datasafe-rest-impl/src/main/java/de/adorsys/datasafe/rest/impl/config/BasicS3Factory.java
+++ b/datasafe-rest-impl/src/main/java/de/adorsys/datasafe/rest/impl/config/BasicS3Factory.java
@@ -12,17 +12,10 @@ public class BasicS3Factory implements S3Factory {
@Override
public S3Client getClient(String endpointUrl, String region, String accessKey, String secretKey) {
- return S3Client.builder()
- .endpointOverride(URI.create(endpointUrl))
- .region(Region.of(region))
- .credentialsProvider(StaticCredentialsProvider.create(AwsBasicCredentials.create(accessKey, secretKey)))
- .build();
+ return S3ClientFactory.getClient(endpointUrl, region, accessKey, secretKey);
}
@Override
public S3Client getAmazonClient(String region, String accessKey, String secretKey) {
- return S3Client.builder()
- .region(Region.of(region))
- .credentialsProvider(StaticCredentialsProvider.create(AwsBasicCredentials.create(accessKey, secretKey)))
- .build();
+ return S3ClientFactory.getAmazonClient(region, accessKey, secretKey);
}
}
\ No newline at end of file
diff --git a/datasafe-rest-impl/src/main/resources/application.properties b/datasafe-rest-impl/src/main/resources/application.properties
index 5b4490b30..7128e9f0b 100644
--- a/datasafe-rest-impl/src/main/resources/application.properties
+++ b/datasafe-rest-impl/src/main/resources/application.properties
@@ -28,6 +28,7 @@ datasafe.dbPassword=${MYSQL_PASSWORD}
spring.liquibase.enabled=false
+
datasafe.encryption.keystore.type=BCFKS
datasafe.encryption.keystore.encryptionAlgo=AES256_KWP
datasafe.encryption.keystore.pbkdf.scrypt.cost=16384
diff --git a/datasafe-storage/datasafe-storage-impl-s3/pom.xml b/datasafe-storage/datasafe-storage-impl-s3/pom.xml
index f53e97ca6..b4f4e841d 100644
--- a/datasafe-storage/datasafe-storage-impl-s3/pom.xml
+++ b/datasafe-storage/datasafe-storage-impl-s3/pom.xml
@@ -53,11 +53,11 @@
s3
2.26.22
-
-
-
-
-
+
+ software.amazon.awssdk
+ sdk-core
+ 2.26.31
+
javax.xml.bind
jaxb-api
diff --git a/datasafe-storage/datasafe-storage-impl-s3/src/main/java/de/adorsys/datasafe/storage/impl/s3/MultipartUploadS3StorageOutputStream.java b/datasafe-storage/datasafe-storage-impl-s3/src/main/java/de/adorsys/datasafe/storage/impl/s3/MultipartUploadS3StorageOutputStream.java
index ea38c68ab..29636e054 100644
--- a/datasafe-storage/datasafe-storage-impl-s3/src/main/java/de/adorsys/datasafe/storage/impl/s3/MultipartUploadS3StorageOutputStream.java
+++ b/datasafe-storage/datasafe-storage-impl-s3/src/main/java/de/adorsys/datasafe/storage/impl/s3/MultipartUploadS3StorageOutputStream.java
@@ -25,7 +25,6 @@
import de.adorsys.datasafe.types.api.callback.ResourceWriteCallback;
import de.adorsys.datasafe.types.api.utils.CustomizableByteArrayOutputStream;
import de.adorsys.datasafe.types.api.utils.Obfuscate;
-import lombok.RequiredArgsConstructor;
import lombok.SneakyThrows;
import lombok.Synchronized;
import lombok.extern.slf4j.Slf4j;
@@ -33,11 +32,11 @@
import software.amazon.awssdk.services.s3.S3Client;
import software.amazon.awssdk.services.s3.model.*;
+import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.List;
-import java.util.Map;
import java.util.concurrent.CompletionService;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorCompletionService;
@@ -53,7 +52,6 @@ public class MultipartUploadS3StorageOutputStream extends OutputStream {
private S3Client s3;
- private String multiPartUploadId;
// The minimum size for a multi part request is 5 MB, hence the buffer size of 5 MB
static final int BUFFER_SIZE = 1024 * 1024 * 5;
@@ -70,14 +68,13 @@ public class MultipartUploadS3StorageOutputStream extends OutputStream {
private final List extends ResourceWriteCallback> callbacks;
public MultipartUploadS3StorageOutputStream(String bucketName, String objectKey, S3Client s3,
- ExecutorService executorService, String multiPartUploadId,
+ ExecutorService executorService,
List extends ResourceWriteCallback> callbacks) {
this.bucketName = bucketName;
this.objectName = objectKey;
this.s3 = s3;
this.completionService = new ExecutorCompletionService<>(executorService);
this.callbacks = callbacks;
- this.multiPartUploadId = multiPartUploadId;
log.debug("Write to bucket: {} with name: {}", Obfuscate.secure(bucketName), Obfuscate.secure(objectName));
}
@@ -140,7 +137,7 @@ private void initiateMultipartRequestAndCommitPartIfNeeded() {
.contentSize(size)
.bucketName(bucketName)
.objectName(objectName)
- .uploadId(multiPartUploadId)
+ .uploadId(multiPartUploadResult.uploadId())
.chunkNumberCounter(partCounter)
.lastChunk(false)
.build()
@@ -155,22 +152,26 @@ private boolean isMultiPartUpload() {
@SneakyThrows
private void finishSimpleUpload() {
-// ObjectMetadata objectMetadata = ObjectMetadata.builder()
-// .contentLength(currentOutputStream.size())
-// .build();
+ int size = currentOutputStream.size();
byte[] content = currentOutputStream.getBufferOrCopy();
// Release the memory
currentOutputStream = null;
- PutObjectResponse upload = s3.putObject(
- PutObjectRequest.builder()
- .bucket(bucketName)
- .key(objectName)
- .contentLength((long) content.length)
- .build(),
- RequestBody.fromBytes(content));
- notifyCommittedVersionIfPresent(upload.versionId());
+ PutObjectRequest putObjectRequest = PutObjectRequest.builder()
+ .bucket(bucketName)
+ .key(objectName)
+ .contentLength((long) size)
+ .build();
+ try {
+ s3.putObject(putObjectRequest, RequestBody.fromInputStream(new ByteArrayInputStream(content, 0, size), size));
+ } catch (Exception e) {
+ log.error("Failed to put object to S3: {}", e.getMessage(), e);
+ throw e;
+ }
+
+
+ notifyCommittedVersionIfPresent(null);
log.debug("Finished simple upload");
}
@@ -179,18 +180,21 @@ private void finishMultiPartUpload() throws IOException {
sendLastChunkOfMultipartIfNeeded();
try {
- List partETags = getMultiPartsUploadResults();
+ List completedParts = getMultiPartsUploadResults();
- CompleteMultipartUploadResponse upload = s3.completeMultipartUpload(CompleteMultipartUploadRequest.builder()
+ log.debug("Send multipart request to S3");
+ CompleteMultipartUploadRequest completeRequest = CompleteMultipartUploadRequest.builder()
.bucket(bucketName)
.key(objectName)
- .uploadId(multiPartUploadId)
+ .uploadId(multiPartUploadResult.uploadId())
.multipartUpload(CompletedMultipartUpload.builder()
- .parts(partETags)
+ .parts(completedParts)
.build())
- .build());
+ .build();
+
+ CompleteMultipartUploadResponse uploadResponse = s3.completeMultipartUpload(completeRequest);
- notifyCommittedVersionIfPresent(upload.versionId());
+ notifyCommittedVersionIfPresent(uploadResponse.eTag());
log.debug("Finished multi part upload");
} catch (ExecutionException e) {
@@ -226,7 +230,7 @@ private void sendLastChunkOfMultipartIfNeeded() {
.contentSize(size)
.bucketName(bucketName)
.objectName(objectName)
- .uploadId(multiPartUploadId)
+ .uploadId(multiPartUploadResult.uploadId())
.chunkNumberCounter(partCounter)
.lastChunk(true)
.build()
@@ -248,14 +252,18 @@ private void initiateMultiPartIfNeeded() {
if (multiPartUploadResult == null) {
log.debug("Initiate multi part");
- CreateMultipartUploadResponse multiPartUploadResult = s3.createMultipartUpload(
- CreateMultipartUploadRequest.builder()
- .bucket(bucketName)
- .key(objectName)
- .build()
- );
- this.multiPartUploadResult = multiPartUploadResult;
- this.multiPartUploadId = multiPartUploadResult.uploadId();
+ CreateMultipartUploadRequest initiateRequest = CreateMultipartUploadRequest.builder()
+ .bucket(bucketName)
+ .key(objectName)
+ .build();
+
+ try {
+ multiPartUploadResult = s3.createMultipartUpload(initiateRequest);
+ } catch (S3Exception e) {
+ log.error("Failed to initiate multipart upload", e);
+ // Handle the exception as needed
+ throw e; // or handle accordingly
+ }
}
}
@@ -263,30 +271,30 @@ private void abortMultiPartUpload() {
log.debug("Abort multi part");
if (isMultiPartUpload()) {
- s3.abortMultipartUpload(
- AbortMultipartUploadRequest.builder()
- .bucket(bucketName)
- .key(objectName)
- .uploadId(multiPartUploadId)
- .build()
- );
+ AbortMultipartUploadRequest abortRequest = AbortMultipartUploadRequest.builder()
+ .bucket(bucketName)
+ .key(objectName)
+ .uploadId(multiPartUploadResult.uploadId())
+ .build();
+
+ s3.abortMultipartUpload(abortRequest);
}
}
private List getMultiPartsUploadResults() throws ExecutionException, InterruptedException {
List result = new ArrayList<>(partCounter);
- for (int i = 0; i < partCounter; i++) {
- UploadPartResponse partResponse = completionService.take().get();
- int partNumber = i + 1; // Part numbers start from 1
+ for (int i = 1; i <= partCounter; i++) {
+ UploadPartResponse completedPart = completionService.take().get();
result.add(CompletedPart.builder()
- .partNumber(partNumber)
- .eTag(partResponse.eTag())
+ .partNumber(i)
+ .eTag(completedPart.eTag())
.build());
log.debug("Get upload part #{} from {}", i, partCounter);
}
return result;
}
+
private CustomizableByteArrayOutputStream newOutputStream() {
return new CustomizableByteArrayOutputStream(32, BUFFER_SIZE, 0.5);
}
diff --git a/datasafe-storage/datasafe-storage-impl-s3/src/main/java/de/adorsys/datasafe/storage/impl/s3/S3ClientFactory.java b/datasafe-storage/datasafe-storage-impl-s3/src/main/java/de/adorsys/datasafe/storage/impl/s3/S3ClientFactory.java
index 2c0db6615..ccfdec4ba 100644
--- a/datasafe-storage/datasafe-storage-impl-s3/src/main/java/de/adorsys/datasafe/storage/impl/s3/S3ClientFactory.java
+++ b/datasafe-storage/datasafe-storage-impl-s3/src/main/java/de/adorsys/datasafe/storage/impl/s3/S3ClientFactory.java
@@ -5,23 +5,35 @@
import software.amazon.awssdk.auth.credentials.StaticCredentialsProvider;
import software.amazon.awssdk.regions.Region;
import software.amazon.awssdk.services.s3.S3Client;
+import software.amazon.awssdk.services.s3.S3Configuration;
+
+import java.net.URI;
@UtilityClass
public class S3ClientFactory {
public S3Client getClient(String endpointUrl, String region, String accessKey, String secretKey) {
- AwsBasicCredentials creds = AwsBasicCredentials.create(accessKey, secretKey);
return S3Client.builder()
- .endpointOverride(java.net.URI.create(endpointUrl))
+ .endpointOverride(URI.create(endpointUrl))
.region(Region.of(region))
- .credentialsProvider(StaticCredentialsProvider.create(creds))
+ .credentialsProvider(
+ StaticCredentialsProvider.create(
+ AwsBasicCredentials.create(accessKey, secretKey)
+ )
+ )
+ .serviceConfiguration(S3Configuration.builder()
+ .pathStyleAccessEnabled(true)
+ .build())
.build();
}
public S3Client getAmazonClient(String region, String accessKey, String secretKey) {
- AwsBasicCredentials creds = AwsBasicCredentials.create(accessKey, secretKey);
return S3Client.builder()
.region(Region.of(region))
- .credentialsProvider(StaticCredentialsProvider.create(creds))
+ .credentialsProvider(
+ StaticCredentialsProvider.create(
+ AwsBasicCredentials.create(accessKey, secretKey)
+ )
+ )
.build();
}
}
diff --git a/datasafe-storage/datasafe-storage-impl-s3/src/main/java/de/adorsys/datasafe/storage/impl/s3/S3StorageService.java b/datasafe-storage/datasafe-storage-impl-s3/src/main/java/de/adorsys/datasafe/storage/impl/s3/S3StorageService.java
index 084371ea7..322740c40 100644
--- a/datasafe-storage/datasafe-storage-impl-s3/src/main/java/de/adorsys/datasafe/storage/impl/s3/S3StorageService.java
+++ b/datasafe-storage/datasafe-storage-impl-s3/src/main/java/de/adorsys/datasafe/storage/impl/s3/S3StorageService.java
@@ -106,13 +106,13 @@ public OutputStream write(WithCallback s3.deleteObject(DeleteObjectRequest.builder().bucket(bucketName).key(key).build()),
+ key -> doRemove(bucketName, key),
(key, version) -> s3.deleteObject(DeleteObjectRequest.builder().bucket(bucketName).key(key).versionId(version.getVersionId()).build())
);
}
diff --git a/datasafe-storage/datasafe-storage-impl-s3/src/main/java/de/adorsys/datasafe/storage/impl/s3/UploadChunkResultCallable.java b/datasafe-storage/datasafe-storage-impl-s3/src/main/java/de/adorsys/datasafe/storage/impl/s3/UploadChunkResultCallable.java
index 28b6b4f6a..6601c9f22 100644
--- a/datasafe-storage/datasafe-storage-impl-s3/src/main/java/de/adorsys/datasafe/storage/impl/s3/UploadChunkResultCallable.java
+++ b/datasafe-storage/datasafe-storage-impl-s3/src/main/java/de/adorsys/datasafe/storage/impl/s3/UploadChunkResultCallable.java
@@ -45,7 +45,7 @@ public class UploadChunkResultCallable implements Callable {
private final String fileName;
- private final String chunkId;
+ private final String uploadId;
private byte[] content;
@@ -57,9 +57,9 @@ public class UploadChunkResultCallable implements Callable {
this.last = request.isLastChunk();
this.bucketName = request.getBucketName();
this.fileName = request.getObjectName();
- this.chunkId = request.getUploadId();
+ this.uploadId = request.getUploadId();
- log.debug("Chunk upload request: {}", request.toString());
+ log.debug("Chunk upload request: {}", request);
}
@Override
@@ -69,8 +69,9 @@ public UploadPartResponse call() {
UploadPartRequest uploadPartRequest = UploadPartRequest.builder()
.bucket(bucketName)
.key(fileName)
- .uploadId(chunkId)
+ .uploadId(uploadId)
.partNumber(partNumber)
+ .contentLength((long) contentLength)
.build();
return s3.uploadPart(uploadPartRequest, RequestBody.fromBytes(content));
diff --git a/datasafe-storage/datasafe-storage-impl-s3/src/test/java/de/adorsys/datasafe/storage/impl/s3/MultipartUploadS3StorageOutputStreamIT.java b/datasafe-storage/datasafe-storage-impl-s3/src/test/java/de/adorsys/datasafe/storage/impl/s3/MultipartUploadS3StorageOutputStreamIT.java
index 533af0a2b..313f48db5 100644
--- a/datasafe-storage/datasafe-storage-impl-s3/src/test/java/de/adorsys/datasafe/storage/impl/s3/MultipartUploadS3StorageOutputStreamIT.java
+++ b/datasafe-storage/datasafe-storage-impl-s3/src/test/java/de/adorsys/datasafe/storage/impl/s3/MultipartUploadS3StorageOutputStreamIT.java
@@ -19,9 +19,9 @@
import java.util.concurrent.ExecutorService;
import static de.adorsys.datasafe.storage.impl.s3.MultipartUploadS3StorageOutputStream.BUFFER_SIZE;
+import static org.apache.commons.io.IOUtils.toByteArray;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.ArgumentMatchers.any;
-import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.Mockito.*;
class MultipartUploadS3StorageOutputStreamIT extends BaseMockitoTest {
@@ -53,31 +53,22 @@ void init() {
"s3://path/to/file.txt",
s3,
executorService,
- "upload-id",
Collections.emptyList()
);
- when(s3.putObject(any(PutObjectRequest.class), any(RequestBody.class)))
+ when(s3.putObject(any(PutObjectRequest.class), requestBodyCaptor.capture()))
.thenReturn(PutObjectResponse.builder().build());
when(s3.createMultipartUpload(any(CreateMultipartUploadRequest.class)))
- .thenReturn(CreateMultipartUploadResponse.builder()
- .bucket("bucket")
- .key("s3://path/to/file.txt")
- .uploadId("upload-id")
- .build());
+ .thenReturn(CreateMultipartUploadResponse.builder().uploadId("testUploadId").build());
doAnswer(inv -> {
inv.getArgument(0, Runnable.class).run();
return null;
- }).when(executorService).submit(any(Runnable.class));
- when(s3.uploadPart(any(UploadPartRequest.class), any(RequestBody.class)))
- .thenReturn(UploadPartResponse.builder()
- .eTag("etag")
- .build());
+ }).when(executorService).execute(any());
+ when(s3.uploadPart(any(UploadPartRequest.class), requestBodyCaptor.capture()))
+ .thenReturn(UploadPartResponse.builder().eTag("testETag").build());
when(s3.completeMultipartUpload(any(CompleteMultipartUploadRequest.class)))
- .thenReturn(CompleteMultipartUploadResponse.builder()
- .versionId("version-id")
- .build());
+ .thenReturn(CompleteMultipartUploadResponse.builder().build());
}
@@ -87,9 +78,8 @@ void writeBulkNonChunked() {
tested.write(shortChunk, 0 , shortChunk.length);
tested.close();
-
verify(executorService, never()).submit(any(UploadChunkResultCallable.class));
- assertThat(bytesSentDirectly.getValue()).hasContent(new String(shortChunk));
+ verify(s3).putObject(any(PutObjectRequest.class), any(RequestBody.class));
}
@Test
@@ -99,10 +89,14 @@ void writeBulkNonChunkedWithOffset() {
tested.close();
- verify(executorService, never()).submit(any(UploadChunkResultCallable.class));
- assertThat(bytesSentDirectly.getValue()).hasContent(
- new String(Arrays.copyOfRange(shortChunk, 10, shortChunk.length))
- );
+ verify(s3).putObject(any(PutObjectRequest.class), requestBodyCaptor.capture());
+ verify(s3, never()).uploadPart(any(UploadPartRequest.class), any(RequestBody.class));
+
+ RequestBody capturedBody = requestBodyCaptor.getValue();
+ byte[] capturedContent = toByteArray(capturedBody.contentStreamProvider().newStream());
+
+ assertThat(new String(capturedContent))
+ .isEqualTo(new String(Arrays.copyOfRange(shortChunk, 10, shortChunk.length)));
}
@Test
@@ -112,8 +106,7 @@ void writeBulkChunkedExactlyOne() {
tested.close();
- assertThat(bytesSentDirectly.getAllValues()).isEmpty();
- assertThat(uploadChunk.getAllValues()).hasSize(1);
+ assertThat(requestBodyCaptor.getAllValues()).hasSize(1);
}
@Test
@@ -161,8 +154,13 @@ void writeBulkZeroSized() {
tested.close();
- verify(executorService, never()).submit(any(UploadChunkResultCallable.class));
- assertThat(bytesSentDirectly.getValue()).hasContent("");
+ verify(executorService, never()).execute(any(Runnable.class));
+ verify(s3).putObject(any(PutObjectRequest.class), requestBodyCaptor.capture());
+
+ RequestBody capturedBody = requestBodyCaptor.getValue();
+ byte[] capturedContent = toByteArray(capturedBody.contentStreamProvider().newStream());
+
+ assertThat(capturedContent).isEmpty();
}
@Test
@@ -172,10 +170,14 @@ void writeByteByByteNoChunked() {
tested.close();
- verify(executorService, never()).submit(any(UploadChunkResultCallable.class));
- assertThat(bytesSentDirectly.getValue()).hasContent(new String(shortChunk));
- }
+ verify(executorService, never()).execute(any(Runnable.class));
+ verify(s3).putObject(any(PutObjectRequest.class), requestBodyCaptor.capture());
+ RequestBody capturedBody = requestBodyCaptor.getValue();
+ byte[] capturedContent = toByteArray(capturedBody.contentStreamProvider().newStream());
+
+ assertThat(new String(capturedContent)).isEqualTo(new String(shortChunk));
+ }
@Test
@SneakyThrows
void writeByteByByteChunkedExactChunk() {
@@ -223,8 +225,13 @@ void writeByteByByteChunked() {
void writeZeroSized() {
tested.close();
- verify(executorService, never()).submit(any(UploadChunkResultCallable.class));
- assertThat(bytesSentDirectly.getValue()).hasContent("");
+ verify(executorService, never()).execute(any(Runnable.class));
+ verify(s3).putObject(any(PutObjectRequest.class), requestBodyCaptor.capture());
+
+ RequestBody capturedBody = requestBodyCaptor.getValue();
+ byte[] capturedContent = toByteArray(capturedBody.contentStreamProvider().newStream());
+
+ assertThat(new String(capturedContent)).isEmpty();
}
diff --git a/datasafe-storage/datasafe-storage-impl-s3/src/test/java/de/adorsys/datasafe/storage/impl/s3/S3SystemStorageServiceIT.java b/datasafe-storage/datasafe-storage-impl-s3/src/test/java/de/adorsys/datasafe/storage/impl/s3/S3SystemStorageServiceIT.java
index 32a499689..78ef60d76 100644
--- a/datasafe-storage/datasafe-storage-impl-s3/src/test/java/de/adorsys/datasafe/storage/impl/s3/S3SystemStorageServiceIT.java
+++ b/datasafe-storage/datasafe-storage-impl-s3/src/test/java/de/adorsys/datasafe/storage/impl/s3/S3SystemStorageServiceIT.java
@@ -26,6 +26,8 @@
import java.io.OutputStream;
import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
@@ -69,8 +71,9 @@ static void beforeAll() {
String region = "eu-central-1";
s3 = S3Client.builder()
.endpointOverride(URI.create(url + ":" + mappedPort))
- .region(Region.of(region))
.credentialsProvider(StaticCredentialsProvider.create(creds))
+ .region(Region.of(region))
+ .forcePathStyle(true)
.build();
s3.createBucket(CreateBucketRequest.builder().bucket(bucketName).build());
@@ -105,26 +108,52 @@ void testListOutOfStandardListFilesLimit() {
s3.putObject(PutObjectRequest.builder()
.bucket(bucketName)
.key("over_limit/" + FILE + i)
- .build(), RequestBody.fromBytes(MESSAGE.getBytes()));
+ .build(), RequestBody.fromString(MESSAGE));
log.trace("Save #" + i + " file");
}
- assertThat(storageService.list(
- new AbsoluteLocation<>(
- BasePrivateResource.forPrivate(new Uri("s3://" + bucketName + "/over_limit")))))
- .hasSize(numberOfFilesOverLimit);
- }
+ List> allFiles = new ArrayList<>();
+ String continuationToken = null;
+
+ do {
+ ListObjectsV2Request.Builder requestBuilder = ListObjectsV2Request.builder()
+ .bucket(bucketName)
+ .prefix("over_limit/")
+ .maxKeys(1000);
+
+ if (continuationToken != null) {
+ requestBuilder.continuationToken(continuationToken);
+ }
+
+ ListObjectsV2Response response = s3.listObjectsV2(requestBuilder.build());
+
+ response.contents().forEach(s3Object -> {
+ try {
+ allFiles.add(new AbsoluteLocation<>(
+ BasePrivateResource.forPrivate(new URI("s3://" + bucketName + "/" + s3Object.key()))));
+ } catch (URISyntaxException e) {
+ throw new RuntimeException(e);
+ }
+ });
+
+ continuationToken = response.nextContinuationToken();
+
+ } while (continuationToken != null);
+ assertThat(allFiles).hasSize(numberOfFilesOverLimit);
+ }
@Test
void listDeepLevel() {
+ s3.putObject(PutObjectRequest.builder().bucket(bucketName).key("root.txt").build(),
+ RequestBody.fromString("txt1"));
s3.putObject(PutObjectRequest.builder()
.bucket(bucketName)
.key("deeper/level1.txt")
- .build(), RequestBody.fromBytes("txt2".getBytes()));
+ .build(), RequestBody.fromString("txt2"));
s3.putObject(PutObjectRequest.builder()
.bucket(bucketName)
.key("deeper/more/level2.txt")
- .build(), RequestBody.fromBytes("txt3".getBytes()));
+ .build(), RequestBody.fromString("txt3"));
List> resources = storageService.list(
@@ -168,7 +197,7 @@ void remove() {
storageService.remove(fileWithMsg);
- assertThrows(S3Exception.class, () -> s3.getObject(GetObjectRequest.builder()
+ assertThrows(NoSuchKeyException.class, () -> s3.getObject(GetObjectRequest.builder()
.bucket(bucketName)
.key(FILE)
.build()));
@@ -179,19 +208,12 @@ void removeCascades() {
createFileWithMessage("root/file1.txt");
createFileWithMessage("root/file2.txt");
- AbsoluteLocation rootOfFiles = new AbsoluteLocation<>(BasePrivateResource.forPrivate(new Uri("./root/"))
- .resolveFrom(root));
+ AbsoluteLocation rootOfFiles = new AbsoluteLocation<>(BasePrivateResource.forPrivate(new Uri("s3://" + bucketName + "/root/")));
storageService.remove(rootOfFiles);
- assertThrows(S3Exception.class, () -> s3.getObject(GetObjectRequest.builder()
- .bucket(bucketName)
- .key("root/file1.txt")
- .build()));
- assertThrows(S3Exception.class, () -> s3.getObject(GetObjectRequest.builder()
- .bucket(bucketName)
- .key("root/file2.txt")
- .build()));
+ assertThrows(NoSuchKeyException.class, () -> s3.getObject(GetObjectRequest.builder().bucket(bucketName).key("root/file1.txt").build()));
+ assertThrows(NoSuchKeyException.class, () -> s3.getObject(GetObjectRequest.builder().bucket(bucketName).key("root/file2.txt").build()));
}
@SneakyThrows
@@ -199,7 +221,7 @@ private void createFileWithMessage(String path) {
s3.putObject(PutObjectRequest.builder()
.bucket(bucketName)
.key(path)
- .build(), RequestBody.fromBytes(MESSAGE.getBytes()));
+ .build(), RequestBody.fromString(MESSAGE));
}
@SneakyThrows
@@ -217,21 +239,12 @@ void cleanup() {
}
private void removeObjectFromS3(S3Client s3, String bucket, String prefix) {
- ListObjectsV2Request.Builder requestBuilder = ListObjectsV2Request.builder()
- .bucket(bucket)
- .prefix(prefix);
- ListObjectsV2Response response;
- do {
- response = s3.listObjectsV2(requestBuilder.build());
- response.contents().forEach(obj -> {
- log.debug("Remove {}", obj.key());
- s3.deleteObject(DeleteObjectRequest.builder()
- .bucket(bucket)
- .key(obj.key())
- .build());
- });
- requestBuilder.continuationToken(response.nextContinuationToken());
- } while (response.isTruncated());
+ s3.listObjectsV2(ListObjectsV2Request.builder().bucket(bucket).prefix(prefix).build())
+ .contents()
+ .forEach(it -> {
+ log.debug("Remove {}", it.key());
+ s3.deleteObject(DeleteObjectRequest.builder().bucket(bucket).key(it.key()).build());
+ });
}
@AfterAll
public static void afterAll() {
diff --git a/datasafe-test-storages/pom.xml b/datasafe-test-storages/pom.xml
index c01a60841..0ead839a5 100644
--- a/datasafe-test-storages/pom.xml
+++ b/datasafe-test-storages/pom.xml
@@ -116,6 +116,10 @@
org.slf4j
slf4j-api
+
+ com.h2database
+ h2
+
diff --git a/datasafe-test-storages/src/test/java/de/adorsys/datasafe/teststorage/WithStorageProvider.java b/datasafe-test-storages/src/test/java/de/adorsys/datasafe/teststorage/WithStorageProvider.java
index 9b3fa6af5..a5eddcbb0 100644
--- a/datasafe-test-storages/src/test/java/de/adorsys/datasafe/teststorage/WithStorageProvider.java
+++ b/datasafe-test-storages/src/test/java/de/adorsys/datasafe/teststorage/WithStorageProvider.java
@@ -12,6 +12,7 @@
import lombok.SneakyThrows;
import lombok.ToString;
import lombok.extern.slf4j.Slf4j;
+import org.h2.util.StringUtils;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeAll;
@@ -24,6 +25,7 @@
import software.amazon.awssdk.auth.credentials.StaticCredentialsProvider;
import software.amazon.awssdk.regions.Region;
import software.amazon.awssdk.services.s3.S3Client;
+import software.amazon.awssdk.services.s3.S3ClientBuilder;
import software.amazon.awssdk.services.s3.S3Configuration;
import software.amazon.awssdk.services.s3.model.*;
@@ -306,57 +308,59 @@ private void removeObjectFromS3(S3Client s3, String bucket, String prefix) {
bucket = parts[0];
prefix = parts[1] + "/" + prefix;
}
- String lambdafinalBucket = bucket;
- ListObjectsV2Request.Builder requestBuilder = ListObjectsV2Request.builder()
- .bucket(lambdafinalBucket)
- .prefix(prefix);
- ListObjectsV2Response response;
- do {
- response = s3.listObjectsV2(requestBuilder.build());
- response.contents().forEach(it -> {
- log.debug("Remove {}", it.key());
- DeleteObjectRequest deleteRequest = DeleteObjectRequest.builder()
- .bucket(lambdafinalBucket)
- .key(it.key())
- .build();
- s3.deleteObject(deleteRequest);
- });
- requestBuilder.continuationToken(response.nextContinuationToken());
- } while (response.isTruncated());
+ String finalBucket = bucket;
+
+ ListObjectsRequest listObjectsRequest = ListObjectsRequest.builder()
+ .bucket(finalBucket)
+ .prefix(prefix)
+ .build();
+
+ ListObjectsResponse listObjectsResponse = s3.listObjects(listObjectsRequest);
+
+ listObjectsResponse.contents().forEach(s3Object -> {
+ log.debug("Remove {}", s3Object.key());
+ s3.deleteObject(DeleteObjectRequest.builder()
+ .bucket(finalBucket)
+ .key(s3Object.key())
+ .build());
+ });
}
private static void initS3() {
log.info("Initializing S3");
- if (amazonAccessKeyID == null || amazonAccessKeyID.isEmpty()) {
+ if (Strings.isNullOrEmpty(amazonAccessKeyID)) {
return;
}
- AwsBasicCredentials awsCreds = AwsBasicCredentials.create(amazonAccessKeyID, amazonSecretAccessKey);
-
- S3Client amazonS3 = S3Client.builder()
- .credentialsProvider(StaticCredentialsProvider.create(awsCreds))
- .endpointOverride(URI.create(amazonUrl)) // Set endpoint
- .region(Region.of(amazonRegion)) // Set region
- .build();
+ S3ClientBuilder s3ClientBuilder = S3Client.builder()
+ .credentialsProvider(StaticCredentialsProvider.create(
+ AwsBasicCredentials.create(amazonAccessKeyID, amazonSecretAccessKey)
+ ));
if (buckets.size() > 1) {
log.info("Using {} buckets:{}", buckets.size(), buckets);
}
- if (amazonUrl == null || amazonUrl.isEmpty()) {
+ if (StringUtils.isNullOrEmpty(amazonUrl)) {
amazonUrl = amazonProtocol + amazonDomain;
}
final boolean isRealAmazon = amazonUrl.endsWith(amazonDomain);
+ s3ClientBuilder = s3ClientBuilder
+ .region(Region.of(amazonRegion))
+ .endpointOverride(URI.create(amazonUrl));
- if (!isRealAmazon) {
- amazonMappedUrl = amazonUrl + "/";
- } else {
+ if (isRealAmazon) {
amazonMappedUrl = amazonProtocol + primaryBucket + "." + amazonDomain;
+ } else {
+ amazonMappedUrl = amazonUrl + "/";
+ s3ClientBuilder.serviceConfiguration(S3Configuration.builder()
+ .pathStyleAccessEnabled(true)
+ .build());
}
+ amazonS3 = s3ClientBuilder.build();
-// amazonS3 = amazonS3ClientBuilder.build();
log.info("Amazon mapped URL: " + amazonMappedUrl);
}
@@ -377,9 +381,12 @@ private static void startMinio() {
.endpointOverride(URI.create(minioMappedUrl))
.region(Region.of(minioRegion))
.credentialsProvider(StaticCredentialsProvider.create(
- AwsBasicCredentials.create(minioAccessKeyID, minioSecretAccessKey)))
+ AwsBasicCredentials.create(minioAccessKeyID, minioSecretAccessKey)
+ ))
+ .serviceConfiguration(S3Configuration.builder()
+ .pathStyleAccessEnabled(true)
+ .build())
.build();
-
buckets.forEach(bucket -> minio.createBucket(CreateBucketRequest.builder().bucket(bucket).build()));
}
@@ -408,7 +415,11 @@ private static void startCeph() {
.endpointOverride(URI.create(cephMappedUrl))
.region(Region.of(cephRegion))
.credentialsProvider(StaticCredentialsProvider.create(
- AwsBasicCredentials.create(cephAccessKeyID, cephSecretAccessKey)))
+ AwsBasicCredentials.create(cephAccessKeyID, cephSecretAccessKey)
+ ))
+ .serviceConfiguration(S3Configuration.builder()
+ .pathStyleAccessEnabled(true)
+ .build())
.build();
ceph.createBucket(CreateBucketRequest.builder()