-
-
Notifications
You must be signed in to change notification settings - Fork 1.7k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Browse files
Browse the repository at this point in the history
Co-authored-by: Sean Glover <sean@seanglover.com>
- Loading branch information
Showing
6 changed files
with
239 additions
and
2 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,17 @@ | ||
plugins { | ||
id 'java' | ||
} | ||
|
||
repositories { | ||
jcenter() | ||
} | ||
|
||
dependencies { | ||
testCompileOnly "org.projectlombok:lombok:1.18.10" | ||
testAnnotationProcessor "org.projectlombok:lombok:1.18.10" | ||
testCompile 'org.testcontainers:kafka' | ||
testCompile 'org.apache.kafka:kafka-clients:2.3.1' | ||
testCompile 'org.assertj:assertj-core:3.14.0' | ||
testCompile 'com.google.guava:guava:23.0' | ||
testCompile 'org.slf4j:slf4j-simple:1.7.30' | ||
} |
102 changes: 102 additions & 0 deletions
102
examples/kafka-cluster/src/test/java/com/example/kafkacluster/KafkaContainerCluster.java
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,102 @@ | ||
package com.example.kafkacluster; | ||
|
||
import lombok.SneakyThrows; | ||
import org.rnorth.ducttape.unreliables.Unreliables; | ||
import org.testcontainers.containers.Container; | ||
import org.testcontainers.containers.GenericContainer; | ||
import org.testcontainers.containers.Network; | ||
import org.testcontainers.lifecycle.Startable; | ||
import org.testcontainers.lifecycle.Startables; | ||
import org.testcontainers.containers.KafkaContainer; | ||
import org.testcontainers.utility.DockerImageName; | ||
|
||
import java.util.Collection; | ||
import java.util.concurrent.TimeUnit; | ||
import java.util.stream.Collectors; | ||
import java.util.stream.IntStream; | ||
import java.util.stream.Stream; | ||
|
||
import static java.util.concurrent.TimeUnit.SECONDS; | ||
|
||
/** | ||
* Provides an easy way to launch a Kafka cluster with multiple brokers. | ||
*/ | ||
public class KafkaContainerCluster implements Startable { | ||
|
||
private final int brokersNum; | ||
private final Network network; | ||
private final GenericContainer<?> zookeeper; | ||
private final Collection<KafkaContainer> brokers; | ||
|
||
public KafkaContainerCluster(String confluentPlatformVersion, int brokersNum, int internalTopicsRf) { | ||
if (brokersNum < 0) { | ||
throw new IllegalArgumentException("brokersNum '" + brokersNum + "' must be greater than 0"); | ||
} | ||
if (internalTopicsRf < 0 || internalTopicsRf > brokersNum) { | ||
throw new IllegalArgumentException("internalTopicsRf '" + internalTopicsRf + "' must be less than brokersNum and greater than 0"); | ||
} | ||
|
||
this.brokersNum = brokersNum; | ||
this.network = Network.newNetwork(); | ||
|
||
this.zookeeper = new GenericContainer<>(DockerImageName.parse("confluentinc/cp-zookeeper").withTag(confluentPlatformVersion)) | ||
.withNetwork(network) | ||
.withNetworkAliases("zookeeper") | ||
.withEnv("ZOOKEEPER_CLIENT_PORT", String.valueOf(KafkaContainer.ZOOKEEPER_PORT)); | ||
|
||
this.brokers = IntStream | ||
.range(0, this.brokersNum) | ||
.mapToObj(brokerNum -> { | ||
return new KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka").withTag(confluentPlatformVersion)) | ||
.withNetwork(this.network) | ||
.withNetworkAliases("broker-" + brokerNum) | ||
.dependsOn(this.zookeeper) | ||
.withExternalZookeeper("zookeeper:" + KafkaContainer.ZOOKEEPER_PORT) | ||
.withEnv("KAFKA_BROKER_ID", brokerNum + "") | ||
.withEnv("KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR", internalTopicsRf + "") | ||
.withEnv("KAFKA_OFFSETS_TOPIC_NUM_PARTITIONS", internalTopicsRf + "") | ||
.withEnv("KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR", internalTopicsRf + "") | ||
.withEnv("KAFKA_TRANSACTION_STATE_LOG_MIN_ISR", internalTopicsRf + ""); | ||
}) | ||
.collect(Collectors.toList()); | ||
} | ||
|
||
public Collection<KafkaContainer> getBrokers() { | ||
return this.brokers; | ||
} | ||
|
||
public String getBootstrapServers() { | ||
return brokers.stream() | ||
.map(KafkaContainer::getBootstrapServers) | ||
.collect(Collectors.joining(",")); | ||
} | ||
|
||
private Stream<GenericContainer<?>> allContainers() { | ||
return Stream.concat( | ||
this.brokers.stream(), | ||
Stream.of(this.zookeeper) | ||
); | ||
} | ||
|
||
@Override | ||
@SneakyThrows | ||
public void start() { | ||
Stream<Startable> startables = this.brokers.stream().map(Startable.class::cast); | ||
Startables.deepStart(startables).get(60, SECONDS); | ||
|
||
Unreliables.retryUntilTrue(30, TimeUnit.SECONDS, () -> { | ||
Container.ExecResult result = this.zookeeper.execInContainer( | ||
"sh", "-c", | ||
"zookeeper-shell zookeeper:" + KafkaContainer.ZOOKEEPER_PORT + " ls /brokers/ids | tail -n 1" | ||
); | ||
String brokers = result.getStdout(); | ||
|
||
return brokers != null && brokers.split(",").length == this.brokersNum; | ||
}); | ||
} | ||
|
||
@Override | ||
public void stop() { | ||
allContainers().parallel().forEach(GenericContainer::stop); | ||
} | ||
} |
97 changes: 97 additions & 0 deletions
97
examples/kafka-cluster/src/test/java/com/example/kafkacluster/KafkaContainerClusterTest.java
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,97 @@ | ||
package com.example.kafkacluster; | ||
|
||
import com.google.common.collect.ImmutableMap; | ||
import org.apache.kafka.clients.admin.AdminClient; | ||
import org.apache.kafka.clients.admin.AdminClientConfig; | ||
import org.apache.kafka.clients.admin.NewTopic; | ||
import org.apache.kafka.clients.consumer.ConsumerConfig; | ||
import org.apache.kafka.clients.consumer.ConsumerRecord; | ||
import org.apache.kafka.clients.consumer.ConsumerRecords; | ||
import org.apache.kafka.clients.consumer.KafkaConsumer; | ||
import org.apache.kafka.clients.producer.KafkaProducer; | ||
import org.apache.kafka.clients.producer.ProducerConfig; | ||
import org.apache.kafka.clients.producer.ProducerRecord; | ||
import org.apache.kafka.common.serialization.StringDeserializer; | ||
import org.apache.kafka.common.serialization.StringSerializer; | ||
import org.junit.Test; | ||
import org.rnorth.ducttape.unreliables.Unreliables; | ||
|
||
import java.time.Duration; | ||
import java.util.Collection; | ||
import java.util.Collections; | ||
import java.util.UUID; | ||
import java.util.concurrent.TimeUnit; | ||
|
||
import static org.assertj.core.api.Assertions.assertThat; | ||
import static org.assertj.core.api.Assertions.tuple; | ||
|
||
public class KafkaContainerClusterTest { | ||
|
||
@Test | ||
public void testKafkaContainerCluster() throws Exception { | ||
try ( | ||
KafkaContainerCluster cluster = new KafkaContainerCluster("5.2.1", 3, 2) | ||
) { | ||
cluster.start(); | ||
String bootstrapServers = cluster.getBootstrapServers(); | ||
|
||
assertThat(cluster.getBrokers()).hasSize(3); | ||
|
||
testKafkaFunctionality(bootstrapServers, 3, 2); | ||
} | ||
} | ||
|
||
protected void testKafkaFunctionality(String bootstrapServers, int partitions, int rf) throws Exception { | ||
try ( | ||
AdminClient adminClient = AdminClient.create(ImmutableMap.of( | ||
AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers | ||
)); | ||
|
||
KafkaProducer<String, String> producer = new KafkaProducer<>( | ||
ImmutableMap.of( | ||
ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers, | ||
ProducerConfig.CLIENT_ID_CONFIG, UUID.randomUUID().toString() | ||
), | ||
new StringSerializer(), | ||
new StringSerializer() | ||
); | ||
|
||
KafkaConsumer<String, String> consumer = new KafkaConsumer<>( | ||
ImmutableMap.of( | ||
ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers, | ||
ConsumerConfig.GROUP_ID_CONFIG, "tc-" + UUID.randomUUID(), | ||
ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest" | ||
), | ||
new StringDeserializer(), | ||
new StringDeserializer() | ||
); | ||
) { | ||
String topicName = "messages"; | ||
|
||
Collection<NewTopic> topics = Collections.singletonList(new NewTopic(topicName, partitions, (short) rf)); | ||
adminClient.createTopics(topics).all().get(30, TimeUnit.SECONDS); | ||
|
||
consumer.subscribe(Collections.singletonList(topicName)); | ||
|
||
producer.send(new ProducerRecord<>(topicName, "testcontainers", "rulezzz")).get(); | ||
|
||
Unreliables.retryUntilTrue(10, TimeUnit.SECONDS, () -> { | ||
ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(100)); | ||
|
||
if (records.isEmpty()) { | ||
return false; | ||
} | ||
|
||
assertThat(records) | ||
.hasSize(1) | ||
.extracting(ConsumerRecord::topic, ConsumerRecord::key, ConsumerRecord::value) | ||
.containsExactly(tuple(topicName, "testcontainers", "rulezzz")); | ||
|
||
return true; | ||
}); | ||
|
||
consumer.unsubscribe(); | ||
} | ||
} | ||
|
||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters