Skip to content

Commit

Permalink
Merge pull request #527 from FedeAlonso/feat/e2e_test_for_request_com…
Browse files Browse the repository at this point in the history
…mand_cli

feat: new e2e automated test RHOAS (Kafka instances)
  • Loading branch information
FedeAlonso authored Apr 18, 2023
2 parents 842d3c8 + f881eef commit 1e8a4d9
Show file tree
Hide file tree
Showing 4 changed files with 86 additions and 40 deletions.
1 change: 0 additions & 1 deletion src/main/java/io/managed/services/test/Environment.java
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,6 @@ public class Environment {
* Setup constants from env variables or set default
*/
public static final String CLOUD_PROVIDER = getOrDefault(CLOUD_PROVIDER_ENV, "aws");

public static final String SUITE_ROOT = System.getProperty("user.dir");
public static final Path LOG_DIR = getOrDefault(LOG_DIR_ENV, Paths::get, Paths.get(SUITE_ROOT, "target", "logs")).resolve("test-run-" + DATE_FORMAT.format(LocalDateTime.now()));

Expand Down
70 changes: 41 additions & 29 deletions src/main/java/io/managed/services/test/cli/CLI.java
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
import lombok.extern.log4j.Log4j2;
import org.openapitools.jackson.nullable.JsonNullableModule;
import org.testng.Assert;

import java.io.File;
import java.io.IOException;
import java.nio.file.Path;
Expand All @@ -34,6 +35,7 @@
import java.util.Locale;
import java.util.concurrent.ExecutionException;
import java.util.stream.Collectors;

import static java.time.Duration.ofMinutes;
import static lombok.Lombok.sneakyThrow;

Expand All @@ -44,7 +46,7 @@ public class CLI {
private static final Duration DEFAULT_TIMEOUT = ofMinutes(3);

private static final String CLUSTER_CAPACITY_EXHAUSTED_CODE = "KAFKAS-MGMT-24";

private static final Locale LOCALE_EN = Locale.ENGLISH;

private final String workdir;
Expand Down Expand Up @@ -161,6 +163,15 @@ public KafkaRequestList listKafka() throws CliGenericException {
.getObjectValue(KafkaRequestList::createFromDiscriminatorValue);
}

public void UpdateKafkaOwner(String userName, String instanceName) throws CliGenericException {
retry(() -> exec("kafka", "update", "--owner", userName, "--name", instanceName, "-y"));
}

public void UpdateKafkaReauthentication(String newStatus, String instanceName) throws CliGenericException {
retry(() -> exec("kafka", "update", "--reauthentication", newStatus.toLowerCase(LOCALE_EN), "--name",
instanceName, "-y"));
}

public KafkaRequestList searchKafkaByName(String name) throws CliGenericException {
return retry(() -> exec("kafka", "list", "--search", name, "-o", "json"))
.parseNodeFromProcessOutput()
Expand All @@ -175,8 +186,8 @@ public ServiceAccountData describeServiceAccount(String id) throws CliGenericExc

public List<ServiceAccountData> listServiceAccount() throws CliGenericException {
return retry(() -> exec("service-account", "list", "-o", "json"))
.parseNodeFromProcessOutput()
.getCollectionOfObjectValues(ServiceAccountData::createFromDiscriminatorValue);
.parseNodeFromProcessOutput()
.getCollectionOfObjectValues(ServiceAccountData::createFromDiscriminatorValue);
}

public void deleteServiceAccount(String id) throws CliGenericException {
Expand Down Expand Up @@ -252,6 +263,7 @@ private ACLEntityType(String name, String flag) {
this.flag = flag;
}
}

//// kafka acl create
public void createAcl(ACLEntityType aclEntityType, String entityIdentificator, AclOperation operation, AclPermissionType permission, String topic) throws CliGenericException {
retry(() -> exec("kafka", "acl", "create", "-y", aclEntityType.flag, entityIdentificator, "--topic", topic, "--permission", permission.toString().toLowerCase(LOCALE_EN), "--operation", operation.toString().toLowerCase(LOCALE_EN)));
Expand Down Expand Up @@ -298,7 +310,7 @@ public Registry createServiceRegistry(String name) throws CliGenericException {
.getObjectValue(Registry::createFromDiscriminatorValue);
}

public Registry describeServiceRegistry(String id) throws CliGenericException {
public Registry describeServiceRegistry(String id) throws CliGenericException {
return retry(() -> exec("service-registry", "describe", "--id", id))
.parseNodeFromProcessOutput()
.getObjectValue(Registry::createFromDiscriminatorValue);
Expand Down Expand Up @@ -326,43 +338,43 @@ public void deleteServiceRegistry(String name) throws CliGenericException {

public List<Record> consumeRecords(String topicName, String instanceId, int partition, int offset) throws CliGenericException, JsonProcessingException {
List<String> cmd = List.of("kafka", "topic", "consume",
"--instance-id", instanceId,
"--name", topicName,
"--offset", Integer.toString(offset),
"--partition", Integer.toString(partition),
"--format", "json"
"--instance-id", instanceId,
"--name", topicName,
"--offset", Integer.toString(offset),
"--partition", Integer.toString(partition),
"--format", "json"
);

return consumeRecords(cmd);
}

public List<Record> consumeRecords(String topicName, String instanceId, int partition) throws CliGenericException, JsonProcessingException {
List<String> cmd = List.of("kafka", "topic", "consume",
"--instance-id", instanceId,
"--name", topicName,
"--partition", Integer.toString(partition),
"--format", "json"
"--instance-id", instanceId,
"--name", topicName,
"--partition", Integer.toString(partition),
"--format", "json"
);

return consumeRecords(cmd);
}

public Record produceRecords(String topicName, String instanceId, String message, int partition, String recordKey)
throws InterruptedException, ExecutionException, IOException {
throws InterruptedException, ExecutionException, IOException {
List<String> cmd = List.of("kafka", "topic", "produce",
"--instance-id", instanceId,
"--name", topicName,
"--partition", Integer.toString(partition),
"--key", recordKey
"--instance-id", instanceId,
"--name", topicName,
"--partition", Integer.toString(partition),
"--key", recordKey
);
return produceRecords(message, cmd);
}

public Record produceRecords(String topicName, String instanceId, String message)
throws IOException, ExecutionException, InterruptedException {
throws IOException, ExecutionException, InterruptedException {
List<String> cmd = List.of("kafka", "topic", "produce",
"--instance-id", instanceId,
"--name", topicName
"--instance-id", instanceId,
"--name", topicName
);
return produceRecords(message, cmd);
}
Expand Down Expand Up @@ -395,20 +407,20 @@ private List<Record> consumeRecords(List<String> cmd) throws CliGenericException
if (output.isEmpty()) {
return new ArrayList<Record>();
}

// specific separated JSON objects \n}\n which is separator of multiple inline jsons
String[] lines = output.split("\n\\}\n");
// append back '}' (i.e. curly bracket) so JSON objects will not miss this end symbol
List<String> messagesWithFixedFormat = Arrays.stream(lines).map(in -> in + "}").collect(Collectors.toList());
List<String> messagesWithFixedFormat = Arrays.stream(lines).map(in -> in + "}").collect(Collectors.toList());

var objectMapper = new ObjectMapper()
.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false)
.registerModule(new JavaTimeModule())
.registerModule(new JsonNullableModule());
var objectMapper = new ObjectMapper()
.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false)
.registerModule(new JavaTimeModule())
.registerModule(new JsonNullableModule());
List<Record> records = new ArrayList<>();

// each object is read as separated Record
for (String line: messagesWithFixedFormat) {
for (String line : messagesWithFixedFormat) {
Record record = objectMapper.readValue(line, Record.class);
records.add(record);
}
Expand All @@ -421,7 +433,7 @@ private <T, E extends Throwable> T retry(ThrowingSupplier<T, E> call) throws E {

private <T, E extends Throwable> T retryKafkaCreation(ThrowingSupplier<T, E> call) throws E {
return RetryUtils.retry(
1, null, call, CLI::retryConditionKafkaCreation, 12, Duration.ofSeconds(10));
1, null, call, CLI::retryConditionKafkaCreation, 12, Duration.ofSeconds(10));
}

private static boolean retryConditionKafkaCreation(Throwable t) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -308,8 +308,8 @@ public void testUserCannotAccessSystemTopics() {
LOGGER.info("Test user cannot access '__consumer_offsets' and '__transaction_state'");
var instanceApiTopics = primaryKafkaInstanceAPI.getTopics();
var o = instanceApiTopics.getItems().stream()
.filter(k -> "__consumer_offsets".equals(k.getName()) || "__transaction_state".equals(k.getName()))
.findAny();
.filter(k -> "__consumer_offsets".equals(k.getName()) || "__transaction_state".equals(k.getName()))
.findAny();
assertTrue(o.isEmpty());
}

Expand Down Expand Up @@ -682,7 +682,7 @@ public void testDefaultSecondaryUserCanNotDeleteACLs() {
// TODO investigate if acl should be able to be creatd even even its provide null values
LOGGER.info("Test that the secondary user by default can not delete ACLs");
assertThrows(ApiForbiddenException.class, () ->
secondaryKafkaInstanceAPI.deleteAcls(AclResourceType.TOPIC, "abc", AclPatternType.LITERAL, "cde", AclOperation.ALL, AclPermissionType.ALLOW));
secondaryKafkaInstanceAPI.deleteAcls(AclResourceType.TOPIC, "abc", AclPatternType.LITERAL, "cde", AclOperation.ALL, AclPermissionType.ALLOW));
}

@Test(priority = 6, groups = TestGroups.INTEGRATION)
Expand Down Expand Up @@ -757,7 +757,7 @@ public void testDefaultAdminUserCanNotDeleteACLs() {

LOGGER.info("Test that the admin user can not delete ACLs");
assertThrows(ApiForbiddenException.class, () -> adminKafkaInstanceAPI.deleteAcls(
AclResourceType.TOPIC, "xx", AclPatternType.LITERAL, "123", AclOperation.READ, AclPermissionType.ALLOW));
AclResourceType.TOPIC, "xx", AclPatternType.LITERAL, "123", AclOperation.READ, AclPermissionType.ALLOW));
}

@SneakyThrows
Expand Down Expand Up @@ -879,7 +879,7 @@ public void testPrimaryUserCanNotDeleteTheKafkaInstance() {

@Test(priority = 11, dependsOnMethods = "testAdminUserCanChangeTheKafkaInstanceOwner", groups = TestGroups.INTEGRATION)
public void testAlienUserCanNotDeleteTheKafkaInstance() {
LOGGER.info("Test that the aline user can not delete the Kafka instance");
LOGGER.info("Test that the alien user can not delete the Kafka instance");
assertThrows(ApiNotFoundException.class, () -> alienAPI.kafkaMgmt().deleteKafkaById(kafka.getId(), true));
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,19 +22,22 @@
import org.apache.logging.log4j.Logger;
import org.testng.SkipException;
import org.testng.annotations.AfterClass;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;

import java.io.File;
import java.util.List;
import java.util.Objects;
import java.lang.reflect.Method;

import static io.managed.services.test.TestUtils.bwait;
import static io.managed.services.test.client.kafka.KafkaMessagingUtils.testTopic;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNotNull;
import static org.testng.Assert.assertThrows;
import static org.testng.Assert.assertTrue;
import static org.testng.Assert.assertFalse;


/**
Expand Down Expand Up @@ -86,15 +89,28 @@ public void bootstrap() {
assertNotNull(Environment.PRIMARY_USERNAME, "the PRIMARY_USERNAME env is null");
assertNotNull(Environment.PRIMARY_PASSWORD, "the PRIMARY_PASSWORD env is null");
assertNotNull(Environment.PRIMARY_OFFLINE_TOKEN, "the PRIMARY_OFFLINE_TOKEN env is null");
assertNotNull(Environment.SECONDARY_USERNAME, "the SECONDARY_USERNAME env is null");
}

@AfterMethod(alwaysRun = true)
@SneakyThrows
public void cleanMethod(Method method) {
// Restore the owner to PRIMARY, after testing that changing owners works.
if (method.getName().equals("testUpdateKafkaOwner")) {
var adminOfflineToken = Environment.ADMIN_OFFLINE_TOKEN;
var kafkaMgmtApi = KafkaMgmtApiUtils.kafkaMgmtApi(Environment.OPENSHIFT_API_URI, adminOfflineToken);
KafkaMgmtApiUtils.changeKafkaInstanceOwner(kafkaMgmtApi, kafka, Environment.PRIMARY_USERNAME);
}
}

@AfterClass(alwaysRun = true)
@SneakyThrows
public void clean() {

var offlineToken = Environment.PRIMARY_OFFLINE_TOKEN;
var adminOfflineToken = Environment.ADMIN_OFFLINE_TOKEN;

var kafkaMgmtApi = KafkaMgmtApiUtils.kafkaMgmtApi(Environment.OPENSHIFT_API_URI, offlineToken);
var kafkaMgmtApi = KafkaMgmtApiUtils.kafkaMgmtApi(Environment.OPENSHIFT_API_URI, adminOfflineToken);
var securityMgmtApi = SecurityMgmtAPIUtils.securityMgmtApi(Environment.OPENSHIFT_API_URI, offlineToken);
var kafkaInstanceApi = KafkaInstanceApiUtils.kafkaInstanceApi(Environment.OPENSHIFT_API_URI, offlineToken);

Expand All @@ -121,7 +137,7 @@ public void clean() {

return;
}

try {
KafkaMgmtApiUtils.deleteKafkaByNameIfExists(kafkaMgmtApi, KAFKA_INSTANCE_NAME);
} catch (Throwable t) {
Expand Down Expand Up @@ -170,7 +186,7 @@ public void testDownloadCLI() {
@Test(dependsOnMethods = "testDownloadCLI")
@SneakyThrows
public void testLogin() {

LOGGER.info("login the CLI");
CLIUtils.login(vertx, cli, Environment.PRIMARY_USERNAME, Environment.PRIMARY_PASSWORD).get();

Expand All @@ -181,7 +197,7 @@ public void testLogin() {
@Test(dependsOnMethods = "testLogin")
@SneakyThrows
public void testCreateServiceAccount() {

LOGGER.info("create a service account");
serviceAccountSecret = CLIUtils.createServiceAccount(cli, SERVICE_ACCOUNT_NAME);

Expand All @@ -206,7 +222,7 @@ public void testDescribeServiceAccount() {

assertEquals(sa.getName(), SERVICE_ACCOUNT_NAME);
}

@Test(dependsOnMethods = "testLogin")
@SneakyThrows
public void testApplyKafkaInstance() {
Expand Down Expand Up @@ -517,6 +533,24 @@ public void testDeleteServiceAccount() {
() -> cli.describeServiceAccount(serviceAccount.getClientId()));
}

@Test(dependsOnMethods = "testApplyKafkaInstance", enabled = true)
@SneakyThrows
public void testUpdateKafkaOwner() {
cli.UpdateKafkaOwner(Environment.SECONDARY_USERNAME, KAFKA_INSTANCE_NAME);
var k = cli.describeKafkaByName(KAFKA_INSTANCE_NAME);
LOGGER.debug(k);
assertEquals(Environment.SECONDARY_USERNAME, k.getOwner());
}

@Test(dependsOnMethods = "testApplyKafkaInstance", enabled = true)
@SneakyThrows
public void testUpdateKafkaReauthentication() {
cli.UpdateKafkaReauthentication("false", KAFKA_INSTANCE_NAME);
var k = cli.describeKafkaByName(KAFKA_INSTANCE_NAME);
LOGGER.debug(k);
assertFalse(k.getReauthenticationEnabled());
}

@Test(dependsOnMethods = "testApplyKafkaInstance", priority = 3, enabled = true)
@SneakyThrows
public void testDeleteKafkaInstance() {
Expand Down Expand Up @@ -546,3 +580,4 @@ public void testLogout() {
assertThrows(CliGenericException.class, () -> cli.listKafka()); // unable to run the same command after logout
}
}

0 comments on commit 1e8a4d9

Please sign in to comment.