Skip to content

Commit

Permalink
Removes finally blocks and add @AfterEach to each ITs to stop running…
Browse files Browse the repository at this point in the history
… container (#118)

* Add precondition to ITs that no containers are running

Signed-off-by: see-quick <maros.orsak159@gmail.com>

* not needed filtering

Signed-off-by: see-quick <maros.orsak159@gmail.com>

* also add un-necessary labels

Signed-off-by: see-quick <maros.orsak159@gmail.com>

---------

Signed-off-by: see-quick <maros.orsak159@gmail.com>
  • Loading branch information
see-quick committed Dec 4, 2024
1 parent 1ca6f89 commit 4977c5c
Show file tree
Hide file tree
Showing 2 changed files with 101 additions and 134 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer;
import org.hamcrest.CoreMatchers;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
Expand Down Expand Up @@ -53,56 +54,40 @@ public class StrimziKafkaKRaftClusterIT extends AbstractIT {

@Test
void testKafkaClusterStartup() throws InterruptedException, ExecutionException {
try {
setUpKafkaKRaftCluster();
setUpKafkaKRaftCluster();

verifyReadinessOfKRaftCluster();
} finally {
systemUnderTest.stop();
}
verifyReadinessOfKRaftCluster();
}

@Test
void testKafkaClusterStartupWithSharedNetwork() throws InterruptedException, ExecutionException {
try {
systemUnderTest = new StrimziKafkaCluster.StrimziKafkaClusterBuilder()
.withNumberOfBrokers(NUMBER_OF_REPLICAS)
.withSharedNetwork()
.withKraft()
.build();
systemUnderTest.start();

verifyReadinessOfKRaftCluster();
} finally {
systemUnderTest.stop();
}
systemUnderTest = new StrimziKafkaCluster.StrimziKafkaClusterBuilder()
.withNumberOfBrokers(NUMBER_OF_REPLICAS)
.withSharedNetwork()
.withKraft()
.build();
systemUnderTest.start();

verifyReadinessOfKRaftCluster();
}

@Test
void testKafkaClusterFunctionality() throws ExecutionException, InterruptedException, TimeoutException {
setUpKafkaKRaftCluster();

try {
verifyFunctionalityOfKafkaCluster();
} finally {
systemUnderTest.stop();
}
verifyFunctionalityOfKafkaCluster();
}

@Test
void testKafkaClusterWithSharedNetworkFunctionality() throws ExecutionException, InterruptedException, TimeoutException {
try {
systemUnderTest = new StrimziKafkaCluster.StrimziKafkaClusterBuilder()
.withNumberOfBrokers(NUMBER_OF_REPLICAS)
.withSharedNetwork()
.withKraft()
.build();
systemUnderTest.start();

verifyFunctionalityOfKafkaCluster();
} finally {
systemUnderTest.stop();
}
systemUnderTest = new StrimziKafkaCluster.StrimziKafkaClusterBuilder()
.withNumberOfBrokers(NUMBER_OF_REPLICAS)
.withSharedNetwork()
.withKraft()
.build();
systemUnderTest.start();

verifyFunctionalityOfKafkaCluster();
}

@Test
Expand All @@ -111,28 +96,22 @@ void testStartClusterWithProxyContainer() {
DockerImageName.parse("ghcr.io/shopify/toxiproxy:2.11.0")
.asCompatibleSubstituteFor("shopify/toxiproxy"));

try {
systemUnderTest = new StrimziKafkaCluster.StrimziKafkaClusterBuilder()
.withNumberOfBrokers(NUMBER_OF_REPLICAS)
.withProxyContainer(proxyContainer)
.withKraft()
.build();

systemUnderTest.start();
List<String> bootstrapUrls = new ArrayList<>();
for (KafkaContainer kafkaContainer : systemUnderTest.getBrokers()) {
Proxy proxy = ((StrimziKafkaContainer) kafkaContainer).getProxy();
assertThat(proxy, notNullValue());
bootstrapUrls.add(kafkaContainer.getBootstrapServers());
}

assertThat(systemUnderTest.getBootstrapServers(),
is(String.join(",", bootstrapUrls)));
} finally {
if (systemUnderTest != null) {
systemUnderTest.stop();
}
systemUnderTest = new StrimziKafkaCluster.StrimziKafkaClusterBuilder()
.withNumberOfBrokers(NUMBER_OF_REPLICAS)
.withProxyContainer(proxyContainer)
.withKraft()
.build();

systemUnderTest.start();
List<String> bootstrapUrls = new ArrayList<>();
for (KafkaContainer kafkaContainer : systemUnderTest.getBrokers()) {
Proxy proxy = ((StrimziKafkaContainer) kafkaContainer).getProxy();
assertThat(proxy, notNullValue());
bootstrapUrls.add(kafkaContainer.getBootstrapServers());
}

assertThat(systemUnderTest.getBootstrapServers(),
is(String.join(",", bootstrapUrls)));
}

private void setUpKafkaKRaftCluster() {
Expand Down Expand Up @@ -215,4 +194,11 @@ private void verifyFunctionalityOfKafkaCluster() throws ExecutionException, Inte
});
}
}

@AfterEach
void afterEach() {
if (this.systemUnderTest != null) {
this.systemUnderTest.stop();
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -49,100 +49,83 @@ public class StrimziKafkaKraftContainerIT extends AbstractIT {
void testStartContainerWithEmptyConfiguration(final String imageName, final String kafkaVersion) throws ExecutionException, InterruptedException, TimeoutException {
supportsKraftMode(imageName);

try {
systemUnderTest = new StrimziKafkaContainer(imageName)
.withBrokerId(1)
.withKraft()
.waitForRunning();

systemUnderTest.start();
assertThat(systemUnderTest.getClusterId(), notNullValue());

String logsFromKafka = systemUnderTest.getLogs();
if (isLessThanKafka350(kafkaVersion)) {
assertThat(logsFromKafka, containsString("RaftManager nodeId=1"));
} else {
assertThat(logsFromKafka, containsString("ControllerServer id=1"));
assertThat(logsFromKafka, containsString("SocketServer listenerType=CONTROLLER, nodeId=1"));
}

verify();

assertThat(systemUnderTest.getBootstrapServers(), is("PLAINTEXT://" +
systemUnderTest.getHost() + ":" + systemUnderTest.getMappedPort(9092)));
} finally {
systemUnderTest.stop();
systemUnderTest = new StrimziKafkaContainer(imageName)
.withBrokerId(1)
.withKraft()
.waitForRunning();

systemUnderTest.start();
assertThat(systemUnderTest.getClusterId(), notNullValue());

String logsFromKafka = systemUnderTest.getLogs();
if (isLessThanKafka350(kafkaVersion)) {
assertThat(logsFromKafka, containsString("RaftManager nodeId=1"));
} else {
assertThat(logsFromKafka, containsString("ControllerServer id=1"));
assertThat(logsFromKafka, containsString("SocketServer listenerType=CONTROLLER, nodeId=1"));
}

verify();

assertThat(systemUnderTest.getBootstrapServers(), is("PLAINTEXT://" +
systemUnderTest.getHost() + ":" + systemUnderTest.getMappedPort(9092)));
}

@ParameterizedTest(name = "testStartContainerWithSomeConfiguration-{0}")
@MethodSource("retrieveKafkaVersionsFile")
void testStartContainerWithSomeConfiguration(final String imageName, final String kafkaVersion) throws ExecutionException, InterruptedException, TimeoutException {
supportsKraftMode(imageName);
try {
Map<String, String> kafkaConfiguration = new HashMap<>();

kafkaConfiguration.put("log.cleaner.enable", "false");
kafkaConfiguration.put("log.cleaner.backoff.ms", "1000");
kafkaConfiguration.put("ssl.enabled.protocols", "TLSv1");
kafkaConfiguration.put("log.index.interval.bytes", "2048");

systemUnderTest = new StrimziKafkaContainer(imageName)
.withBrokerId(1)
.withKraft()
.withKafkaConfigurationMap(kafkaConfiguration)
.waitForRunning();

systemUnderTest.start();

String logsFromKafka = systemUnderTest.getLogs();

if (isLessThanKafka350(kafkaVersion)) {
assertThat(logsFromKafka, containsString("RaftManager nodeId=1"));
} else {
assertThat(logsFromKafka, containsString("ControllerServer id=1"));
assertThat(logsFromKafka, containsString("SocketServer listenerType=CONTROLLER, nodeId=1"));
}
assertThat(logsFromKafka, containsString("log.cleaner.enable = false"));
assertThat(logsFromKafka, containsString("log.cleaner.backoff.ms = 1000"));
assertThat(logsFromKafka, containsString("ssl.enabled.protocols = [TLSv1]"));
assertThat(logsFromKafka, containsString("log.index.interval.bytes = 2048"));

verify();
} finally {
systemUnderTest.stop();

Map<String, String> kafkaConfiguration = new HashMap<>();

kafkaConfiguration.put("log.cleaner.enable", "false");
kafkaConfiguration.put("log.cleaner.backoff.ms", "1000");
kafkaConfiguration.put("ssl.enabled.protocols", "TLSv1");
kafkaConfiguration.put("log.index.interval.bytes", "2048");

systemUnderTest = new StrimziKafkaContainer(imageName)
.withBrokerId(1)
.withKraft()
.withKafkaConfigurationMap(kafkaConfiguration)
.waitForRunning();

systemUnderTest.start();

String logsFromKafka = systemUnderTest.getLogs();

if (isLessThanKafka350(kafkaVersion)) {
assertThat(logsFromKafka, containsString("RaftManager nodeId=1"));
} else {
assertThat(logsFromKafka, containsString("ControllerServer id=1"));
assertThat(logsFromKafka, containsString("SocketServer listenerType=CONTROLLER, nodeId=1"));
}
assertThat(logsFromKafka, containsString("log.cleaner.enable = false"));
assertThat(logsFromKafka, containsString("log.cleaner.backoff.ms = 1000"));
assertThat(logsFromKafka, containsString("ssl.enabled.protocols = [TLSv1]"));
assertThat(logsFromKafka, containsString("log.index.interval.bytes = 2048"));

verify();
}

@Test
void testUnsupportedKRaftUsingKafkaVersion() {
try {
systemUnderTest = new StrimziKafkaContainer()
.withKafkaVersion("2.8.2")
.withBrokerId(1)
.withKraft()
.waitForRunning();

assertThrows(UnsupportedKraftKafkaVersionException.class, () -> systemUnderTest.start());
} finally {
systemUnderTest.stop();
}
systemUnderTest = new StrimziKafkaContainer()
.withKafkaVersion("2.8.2")
.withBrokerId(1)
.withKraft()
.waitForRunning();

assertThrows(UnsupportedKraftKafkaVersionException.class, () -> systemUnderTest.start());
}

@Test
void testUnsupportedKRaftUsingImageName() {
try {
systemUnderTest = new StrimziKafkaContainer("quay.io/strimzi-test-container/test-container:latest-kafka-2.8.2")
.withBrokerId(1)
.withKraft()
.waitForRunning();

assertThrows(UnsupportedKraftKafkaVersionException.class, () -> systemUnderTest.start());
} finally {
systemUnderTest.stop();
systemUnderTest = new StrimziKafkaContainer("quay.io/strimzi-test-container/test-container:latest-kafka-2.8.2")
.withBrokerId(1)
.withKraft()
.waitForRunning();

}
assertThrows(UnsupportedKraftKafkaVersionException.class, () -> systemUnderTest.start());
}

@Test
Expand All @@ -163,8 +146,6 @@ void testWithKafkaLog() {
assertThat(systemUnderTest.getLogs(), CoreMatchers.containsString("INFO"));
assertThat(systemUnderTest.getLogs(), CoreMatchers.containsString("DEBUG"));
assertThat(systemUnderTest.getLogs(), CoreMatchers.containsString("TRACE"));

systemUnderTest.stop();
}

private void verify() throws InterruptedException, ExecutionException, TimeoutException {
Expand Down

0 comments on commit 4977c5c

Please sign in to comment.