Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 6 additions & 1 deletion core/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -183,7 +183,12 @@
<groupId>com.google.protobuf</groupId>
<artifactId>protobuf-java-util</artifactId>
</dependency>
</dependencies>
<dependency>
<groupId>software.amazon.awssdk</groupId>
<artifactId>s3</artifactId>
<optional>true</optional>
</dependency>
</dependencies>

<profiles>

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,8 @@ public interface ZerocodeConstants {

String KAFKA = "kafka";
String KAFKA_TOPIC = "kafka-topic:";
String S3 = "s3";
String S3_BUCKET = "s3-bucket:";
String OK = "Ok";
String FAILED = "Failed";

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,4 +32,13 @@ public interface ApiServiceExecutor {
*/
String executeKafkaService(String kafkaServers, String kafkaTopic, String methodName, String requestJson, ScenarioExecutionState scenarioExecutionState);

/**
*
* @param bucketName The S3 bucket name extracted from the step URL (e.g. "s3-bucket:my-bucket" → "my-bucket")
* @param operation An S3 operation: upload, download, or list
* @param requestJson A JSON payload with fields matching the operation (key, file, localPath, prefix)
* @return String The S3 operation result in JSON
*/
String executeS3Service(String bucketName, String operation, String requestJson);

}
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import org.jsmart.zerocode.core.engine.executor.javaapi.JavaMethodExecutor;
import org.jsmart.zerocode.core.engine.preprocessor.ScenarioExecutionState;
import org.jsmart.zerocode.core.kafka.client.BasicKafkaClient;
import org.jsmart.zerocode.core.s3.client.BasicS3Client;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand All @@ -21,6 +22,9 @@ public class ApiServiceExecutorImpl implements ApiServiceExecutor {
@Inject
private BasicKafkaClient kafkaClient;

@Inject(optional = true)
private BasicS3Client s3Client;

@Inject(optional = true)
@Named("mock.api.port")
private int mockPort;
Expand Down Expand Up @@ -58,4 +62,13 @@ public String executeJavaOperation(String className, String methodName, String r
public String executeKafkaService(String kafkaServers, String kafkaTopic, String operation, String requestJson, ScenarioExecutionState scenarioExecutionState) {
return kafkaClient.execute(kafkaServers, kafkaTopic, operation, requestJson, scenarioExecutionState);
}

@Override
public String executeS3Service(String bucketName, String operation, String requestJson) {
if (s3Client == null) {
throw new RuntimeException(
"S3 client is not available. Please add 'software.amazon.awssdk:s3' to your project dependencies.");
}
return s3Client.execute(bucketName, operation, requestJson);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
import static java.util.Optional.ofNullable;
import static org.apache.commons.collections4.CollectionUtils.isEmpty;
import static org.jsmart.zerocode.core.constants.ZerocodeConstants.KAFKA_TOPIC;
import static org.jsmart.zerocode.core.constants.ZerocodeConstants.S3_BUCKET;

import org.jsmart.zerocode.core.di.provider.CsvParserProvider;
import org.jsmart.zerocode.core.domain.Parameterized;
Expand Down Expand Up @@ -489,6 +490,24 @@ private String executeApi(String logPrefixRelationshipId,
executionResult = apiExecutor.executeKafkaService(kafkaServers, topicName, operationName, resolvedRequestJsonMaskRemoved, scenarioExecutionState);
break;

case S3_CALL:
correlLogger.aRequestBuilder()
.relationshipId(logPrefixRelationshipId)
.requestTimeStamp(requestTimeStamp)
.step(thisStepName)
.url(url)
.method(operationName.toUpperCase())
.id(stepId)
.request(prettyPrintJson(resolvedRequestJsonMaskApplied));

String bucketName = url.substring(S3_BUCKET.length());
String s3Operation = operationName;
if (s3Operation.toUpperCase().startsWith("S3.")) {
s3Operation = s3Operation.substring(3);
}
executionResult = apiExecutor.executeS3Service(bucketName, s3Operation, resolvedRequestJsonMaskRemoved);
break;

case NONE:
correlLogger.aRequestBuilder()
.relationshipId(logPrefixRelationshipId)
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,184 @@
package org.jsmart.zerocode.core.s3.client;

import com.google.inject.Inject;
import com.google.inject.name.Named;
import org.jsmart.zerocode.core.s3.download.S3Downloader;
import org.jsmart.zerocode.core.s3.list.S3Lister;
import org.jsmart.zerocode.core.s3.upload.S3Uploader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import software.amazon.awssdk.auth.credentials.AwsBasicCredentials;
import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider;
import software.amazon.awssdk.auth.credentials.AwsSessionCredentials;
import software.amazon.awssdk.auth.credentials.DefaultCredentialsProvider;
import software.amazon.awssdk.auth.credentials.StaticCredentialsProvider;
import software.amazon.awssdk.regions.Region;
import software.amazon.awssdk.services.s3.S3Client;
import software.amazon.awssdk.services.s3.S3ClientBuilder;

import java.net.URI;

/**
* Central dispatcher for S3 operations in Zerocode test scenarios.
*
* <p>Mirrors the pattern of {@code BasicKafkaClient}. Routes S3 operation names
* (upload, download, list) to the appropriate handler class.
*
* <p>Credential resolution order (as specified in issue #742):
* <ol>
* <li>Environment variables: {@code AWS_ACCESS_KEY_ID}, {@code AWS_SECRET_ACCESS_KEY},
* {@code AWS_SESSION_TOKEN} (optional)</li>
* <li>Properties file: {@code s3.accessKey}, {@code s3.secretKey},
* {@code s3.token} (optional)</li>
* <li>AWS Default Credential Provider Chain (environment, ~/.aws/credentials, IAM roles, etc.)</li>
* </ol>
*
* <p>Optional properties:
* <ul>
* <li>{@code s3.endpoint} – custom S3-compatible endpoint (e.g. MinIO, LocalStack)</li>
* <li>{@code s3.region} – AWS region (defaults to {@code us-east-1})</li>
* </ul>
*
* <p>Step URL format: {@code s3-bucket:<bucket-name>}
*
* <p>Supported operations:
* <ul>
* <li>{@code upload} – upload a local file to S3</li>
* <li>{@code download} – download an S3 object to a local path</li>
* <li>{@code list} – list objects in a bucket (optionally filtered by prefix)</li>
* </ul>
*/
public class BasicS3Client {
private static final Logger LOGGER = LoggerFactory.getLogger(BasicS3Client.class);

@Inject
private S3Uploader uploader;

@Inject
private S3Downloader downloader;

@Inject
private S3Lister lister;

@Inject(optional = true)
@Named("s3.accessKey")
private String accessKey;

@Inject(optional = true)
@Named("s3.secretKey")
private String secretKey;

@Inject(optional = true)
@Named("s3.region")
private String region;

@Inject(optional = true)
@Named("s3.endpoint")
private String endpoint;

@Inject(optional = true)
@Named("s3.token")
private String sessionToken;

public BasicS3Client() {
}

/**
* Executes an S3 operation.
*
* @param bucketName the S3 bucket name (extracted from step URL after "s3-bucket:")
* @param operation the operation to perform: upload, download, or list
* @param requestJson the step request JSON
* @return JSON result string
*/
public String execute(String bucketName, String operation, String requestJson) {
LOGGER.debug("S3 operation: bucket={}, operation={}", bucketName, operation);

S3Client s3Client = buildS3Client();

try {
switch (operation.toLowerCase()) {
case "upload":
case "put":
return uploader.upload(s3Client, bucketName, requestJson);

case "download":
case "get":
return downloader.download(s3Client, bucketName, requestJson);

case "list":
return lister.list(s3Client, bucketName, requestJson);

default:
throw new RuntimeException("Unsupported S3 operation: '" + operation +
"'. Supported operations: upload, download, list");
}
} catch (RuntimeException e) {
LOGGER.error("S3 operation '{}' failed for bucket '{}': {}", operation, bucketName, e.getMessage());
throw e;
} finally {
s3Client.close();
}
}

/**
* Builds an S3Client with credential priority:
* 1) Environment variables (AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, AWS_SESSION_TOKEN)
* 2) Properties file (s3.accessKey, s3.secretKey, s3.token)
* 3) AWS Default Credentials Provider Chain
*/
S3Client buildS3Client() {
AwsCredentialsProvider credentialsProvider = resolveCredentials();

Region awsRegion = (region != null && !region.isEmpty())
? Region.of(region)
: Region.US_EAST_1;

S3ClientBuilder builder = S3Client.builder()
.region(awsRegion)
.credentialsProvider(credentialsProvider);

if (endpoint != null && !endpoint.isEmpty()) {
LOGGER.debug("Using custom S3 endpoint: {}", endpoint);
builder.endpointOverride(URI.create(endpoint));
}

return builder.build();
}

private AwsCredentialsProvider resolveCredentials() {
// 1) Check environment variables first
String envAccessKey = System.getenv("AWS_ACCESS_KEY_ID");
String envSecretKey = System.getenv("AWS_SECRET_ACCESS_KEY");
String envSessionToken = System.getenv("AWS_SESSION_TOKEN");

if (isNotBlank(envAccessKey) && isNotBlank(envSecretKey)) {
LOGGER.debug("Using AWS credentials from environment variables");
if (isNotBlank(envSessionToken)) {
return StaticCredentialsProvider.create(
AwsSessionCredentials.create(envAccessKey, envSecretKey, envSessionToken));
}
return StaticCredentialsProvider.create(
AwsBasicCredentials.create(envAccessKey, envSecretKey));
}

// 2) Check properties file
if (isNotBlank(accessKey) && isNotBlank(secretKey)) {
LOGGER.debug("Using AWS credentials from properties file");
if (isNotBlank(sessionToken)) {
return StaticCredentialsProvider.create(
AwsSessionCredentials.create(accessKey, secretKey, sessionToken));
}
return StaticCredentialsProvider.create(
AwsBasicCredentials.create(accessKey, secretKey));
}

// 3) Fall back to AWS default credential provider chain
LOGGER.debug("No explicit credentials found; using AWS default credential provider chain");
return DefaultCredentialsProvider.create();
}

private static boolean isNotBlank(String value) {
return value != null && !value.trim().isEmpty();
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,85 @@
package org.jsmart.zerocode.core.s3.domain;

import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;

/**
* Metadata for a single S3 object returned in a list response.
*
* <p>Example JSON:
* <pre>
* {
* "key": "reports/result.json",
* "size": 1024,
* "lastModified": "2024-03-15T10:30:00Z",
* "storageClass": "STANDARD"
* }
* </pre>
*/
@JsonInclude(JsonInclude.Include.NON_NULL)
public class ObjectInfo {

@JsonProperty("key")
private String key;

@JsonProperty("size")
private Long size;

@JsonProperty("lastModified")
private String lastModified;

@JsonProperty("storageClass")
private String storageClass;

public ObjectInfo() {
}

public ObjectInfo(String key, Long size, String lastModified, String storageClass) {
this.key = key;
this.size = size;
this.lastModified = lastModified;
this.storageClass = storageClass;
}

public String getKey() {
return key;
}

public void setKey(String key) {
this.key = key;
}

public Long getSize() {
return size;
}

public void setSize(Long size) {
this.size = size;
}

public String getLastModified() {
return lastModified;
}

public void setLastModified(String lastModified) {
this.lastModified = lastModified;
}

public String getStorageClass() {
return storageClass;
}

public void setStorageClass(String storageClass) {
this.storageClass = storageClass;
}

@Override
public String toString() {
return "ObjectInfo{" +
"key='" + key + '\'' +
", size=" + size +
", lastModified='" + lastModified + '\'' +
", storageClass='" + storageClass + '\'' +
'}';
}
}
Loading
Loading