Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Use JUnit5 #691

Merged
merged 18 commits into from
Jan 16, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
124 changes: 61 additions & 63 deletions build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -7,68 +7,17 @@ val akkaVersion = "2.5.19"
val kafkaVersion = "2.1.0"
val kafkaVersionForDocs = "21"
val scalatestVersion = "3.0.5"
val junit4Version = "4.12"
val slf4jVersion = "1.7.25"
val kafkaClients = "org.apache.kafka" % "kafka-clients" % kafkaVersion

val coreDependencies = Seq(
"com.typesafe.akka" %% "akka-stream" % akkaVersion,
kafkaClients,
)

val testkitDependencies = Seq(
"com.typesafe.akka" %% "akka-testkit" % akkaVersion,
"com.typesafe.akka" %% "akka-stream-testkit" % akkaVersion,
"net.manub" %% "scalatest-embedded-kafka" % "2.0.0" exclude ("log4j", "log4j"),
"org.apache.commons" % "commons-compress" % "1.18", // embedded Kafka pulls in Avro which pulls in commons-compress 1.8.1
"org.scalatest" %% "scalatest" % scalatestVersion % Provided,
"junit" % "junit" % junit4Version % Provided,
"org.apache.kafka" %% "kafka" % kafkaVersion exclude ("org.slf4j", "slf4j-log4j12")
)

val confluentAvroSerializerVersion = "5.0.1"

val testDependencies = Seq(
"io.confluent" % "kafka-avro-serializer" % confluentAvroSerializerVersion % Test,
// See https://github.com/sbt/sbt/issues/3618#issuecomment-448951808
"javax.ws.rs" % "javax.ws.rs-api" % "2.1" artifacts Artifact("javax.ws.rs-api", "jar", "jar"),
"net.manub" %% "scalatest-embedded-schema-registry" % "2.0.0" % Test exclude ("log4j", "log4j") exclude ("org.slf4j", "slf4j-log4j12"),
"org.apache.commons" % "commons-compress" % "1.18", // embedded Kafka pulls in Avro, which pulls in commons-compress 1.8.1, see testing.md
"org.scalatest" %% "scalatest" % scalatestVersion % Test,
"io.spray" %% "spray-json" % "1.3.5" % Test,
"com.fasterxml.jackson.core" % "jackson-databind" % "2.9.7" % Test, // ApacheV2
"com.novocode" % "junit-interface" % "0.11" % Test,
"junit" % "junit" % junit4Version % Test,
"com.typesafe.akka" %% "akka-slf4j" % akkaVersion % Test,
"ch.qos.logback" % "logback-classic" % "1.2.3" % Test,
"org.slf4j" % "log4j-over-slf4j" % slf4jVersion % Test,
// Schema registry uses Glassfish which uses java.util.logging
"org.slf4j" % "jul-to-slf4j" % slf4jVersion % Test,
"org.mockito" % "mockito-core" % "2.23.4" % Test
)

val integrationTestDependencies = Seq(
"com.typesafe.akka" %% "akka-stream-testkit" % akkaVersion % IntegrationTest,
"org.scalatest" %% "scalatest" % scalatestVersion % IntegrationTest,
"com.spotify" % "docker-client" % "8.11.7" % IntegrationTest,
"com.typesafe.akka" %% "akka-slf4j" % akkaVersion % IntegrationTest,
"ch.qos.logback" % "logback-classic" % "1.2.3" % IntegrationTest,
"org.slf4j" % "log4j-over-slf4j" % "1.7.25" % IntegrationTest
)

val benchmarkDependencies = Seq(
"com.typesafe.scala-logging" %% "scala-logging" % "3.9.0",
"io.dropwizard.metrics" % "metrics-core" % "3.2.6",
"ch.qos.logback" % "logback-classic" % "1.2.3",
"org.slf4j" % "log4j-over-slf4j" % "1.7.25",
"com.typesafe.akka" %% "akka-slf4j" % akkaVersion % "it",
"com.typesafe.akka" %% "akka-stream-testkit" % akkaVersion % "it",
"org.scalatest" %% "scalatest" % scalatestVersion % "it"
)

val kafkaScale = settingKey[Int]("Number of kafka docker containers")

resolvers in ThisBuild ++= Seq(Resolver.bintrayRepo("manub", "maven"))
resolvers in ThisBuild ++= Seq(
// for Embedded Kafka
Resolver.bintrayRepo("manub", "maven"),
// for Jupiter interface (JUnit 5)
Resolver.jcenterRepo
)

val commonSettings = Seq(
organization := "com.typesafe.akka",
Expand Down Expand Up @@ -114,11 +63,13 @@ val commonSettings = Seq(
"akka.pattern" // for some reason Scaladoc creates this
),
// show full stack traces and test case durations
testOptions += Tests.Argument("-oDF"),
testOptions += Tests.Argument(TestFrameworks.ScalaTest, "-oDF"),
// https://github.com/maichler/sbt-jupiter-interface#framework-options
// -a Show stack traces and exception class name for AssertionErrors.
// -v Log "test run started" / "test started" / "test run finished" events on log level "info" instead of "debug".
// -q Suppress stdout for successful tests.
testOptions += Tests.Argument(TestFrameworks.JUnit, "-a", "-v", "-q"),
// -s Try to decode Scala names in stack traces and test names.
testOptions += Tests.Argument(jupiterTestFramework, "-a", "-v", "-q", "-s"),
scalafmtOnCompile := true,
headerLicense := Some(
HeaderLicense.Custom(
Expand Down Expand Up @@ -176,7 +127,10 @@ lazy val core = project
.settings(
name := "akka-stream-kafka",
AutomaticModuleName.settings("akka.stream.alpakka.kafka"),
libraryDependencies ++= coreDependencies,
libraryDependencies ++= Seq(
"com.typesafe.akka" %% "akka-stream" % akkaVersion,
"org.apache.kafka" % "kafka-clients" % kafkaVersion,
),
mimaPreviousArtifacts := Set(
organization.value %% name.value % previousStableVersion.value
.getOrElse(throw new Error("Unable to determine previous version"))
Expand All @@ -191,7 +145,15 @@ lazy val testkit = project
.settings(
name := "akka-stream-kafka-testkit",
AutomaticModuleName.settings("akka.stream.alpakka.kafka.testkit"),
libraryDependencies ++= testkitDependencies,
libraryDependencies ++= Seq(
"com.typesafe.akka" %% "akka-stream-testkit" % akkaVersion,
"net.manub" %% "scalatest-embedded-kafka" % "2.0.0" exclude ("log4j", "log4j"),
"org.apache.commons" % "commons-compress" % "1.18", // embedded Kafka pulls in Avro which pulls in commons-compress 1.8.1
"org.scalatest" %% "scalatest" % scalatestVersion % Provided,
"junit" % "junit" % "4.12" % Provided,
"org.junit.jupiter" % "junit-jupiter-api" % JupiterKeys.junitJupiterVersion.value % Provided,
"org.apache.kafka" %% "kafka" % kafkaVersion exclude ("org.slf4j", "slf4j-log4j12")
),
mimaPreviousArtifacts := Set(
organization.value %% name.value % previousStableVersion.value
.getOrElse(throw new Error("Unable to determine previous version"))
Expand All @@ -208,7 +170,35 @@ lazy val tests = project
.settings(automateHeaderSettings(IntegrationTest))
.settings(
name := "akka-stream-kafka-tests",
libraryDependencies ++= testDependencies ++ integrationTestDependencies,
libraryDependencies ++= Seq(
"io.confluent" % "kafka-avro-serializer" % confluentAvroSerializerVersion % Test,
// See https://github.com/sbt/sbt/issues/3618#issuecomment-448951808
"javax.ws.rs" % "javax.ws.rs-api" % "2.1" artifacts Artifact("javax.ws.rs-api", "jar", "jar"),
"net.manub" %% "scalatest-embedded-schema-registry" % "2.0.0" % Test exclude ("log4j", "log4j") exclude ("org.slf4j", "slf4j-log4j12"),
"org.apache.commons" % "commons-compress" % "1.18", // embedded Kafka pulls in Avro, which pulls in commons-compress 1.8.1, see testing.md
"org.scalatest" %% "scalatest" % scalatestVersion % Test,
"io.spray" %% "spray-json" % "1.3.5" % Test,
"com.fasterxml.jackson.core" % "jackson-databind" % "2.9.7" % Test, // ApacheV2
"org.junit.vintage" % "junit-vintage-engine" % JupiterKeys.junitVintageVersion.value % Test,
// See http://hamcrest.org/JavaHamcrest/distributables#upgrading-from-hamcrest-1x
"org.hamcrest" % "hamcrest-library" % "2.1" % Test,
"org.hamcrest" % "hamcrest" % "2.1" % Test,
"net.aichler" % "jupiter-interface" % JupiterKeys.jupiterVersion.value % Test,
"com.typesafe.akka" %% "akka-slf4j" % akkaVersion % Test,
"ch.qos.logback" % "logback-classic" % "1.2.3" % Test,
"org.slf4j" % "log4j-over-slf4j" % slf4jVersion % Test,
// Schema registry uses Glassfish which uses java.util.logging
"org.slf4j" % "jul-to-slf4j" % slf4jVersion % Test,
"org.mockito" % "mockito-core" % "2.23.4" % Test
) ++
Seq( // integration test dependencies
"com.typesafe.akka" %% "akka-stream-testkit" % akkaVersion % IntegrationTest,
"org.scalatest" %% "scalatest" % scalatestVersion % IntegrationTest,
"com.spotify" % "docker-client" % "8.11.7" % IntegrationTest,
"com.typesafe.akka" %% "akka-slf4j" % akkaVersion % IntegrationTest,
"ch.qos.logback" % "logback-classic" % "1.2.3" % IntegrationTest,
"org.slf4j" % "log4j-over-slf4j" % "1.7.25" % IntegrationTest
),
resolvers += "Confluent Maven Repo" at "https://packages.confluent.io/maven/",
publish / skip := true,
whitesourceIgnore := true,
Expand Down Expand Up @@ -275,7 +265,15 @@ lazy val benchmarks = project
skip in publish := true,
whitesourceIgnore := true,
IntegrationTest / parallelExecution := false,
libraryDependencies ++= benchmarkDependencies,
libraryDependencies ++= Seq(
"com.typesafe.scala-logging" %% "scala-logging" % "3.9.0",
"io.dropwizard.metrics" % "metrics-core" % "3.2.6",
"ch.qos.logback" % "logback-classic" % "1.2.3",
"org.slf4j" % "log4j-over-slf4j" % "1.7.25",
"com.typesafe.akka" %% "akka-slf4j" % akkaVersion % "it",
"com.typesafe.akka" %% "akka-stream-testkit" % akkaVersion % "it",
"org.scalatest" %% "scalatest" % scalatestVersion % "it"
),
kafkaScale := 1,
buildInfoPackage := "akka.kafka.benchmarks",
buildInfoKeys := Seq[BuildInfoKey](kafkaScale),
Expand Down
9 changes: 6 additions & 3 deletions docs/src/main/paradox/testing.md
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ The testkit contains helper classes used by the tests in the Alpakka Kafka conne

### Testing from Java code

Test classes may extend `akka.kafka.testkit.javadsl.EmbeddedKafkaJunit4Test` to automatically start and stop an embedded Kafka broker.
Test classes may extend `akka.kafka.testkit.javadsl.EmbeddedKafkaTest` (JUnit 5) or `akka.kafka.testkit.javadsl.EmbeddedKafkaJunit4Test` (JUnit 4) to automatically start and stop an embedded Kafka broker.

Furthermore it provides

Expand All @@ -48,11 +48,14 @@ Furthermore it provides
* unique topic creation (`createTopic(int number, int partitions, int replication)`), and
* `CompletionStage` value extraction helper (`<T> T resultOf(CompletionStage<T> stage, java.time.Duration timeout)`).

The example below shows a skeleton test class for use with JUnit 4.
The example below shows skeleton test classes for JUnit 4 and JUnit 5.

Java
Java JUnit 4
: @@snip [snip](/tests/src/test/java/docs/javadsl/AssignmentTest.java) { #testkit }

Java JUnit 5
: @@snip [snip](/tests/src/test/java/docs/javadsl/ProducerExampleTest.java) { #testkit }


### Testing from Scala code

Expand Down
3 changes: 3 additions & 0 deletions project/plugins.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -20,3 +20,6 @@ addSbtPlugin("com.github.ehsanyou" % "sbt-docker-compose" % "67284e73-envvars-2m
// depend directly on the patched version see https://github.com/akka/alpakka/issues/1388
addSbtPlugin("net.virtual-void" % "sbt-dependency-graph" % "0.9.2+10-148ba0ff")
resolvers += Resolver.bintrayIvyRepo("2m", "sbt-plugins")

addSbtPlugin("net.aichler" % "sbt-jupiter-interface" % "0.8.0")
resolvers += Resolver.jcenterRepo
2m marked this conversation as resolved.
Show resolved Hide resolved
Original file line number Diff line number Diff line change
@@ -0,0 +1,76 @@
/*
* Copyright (C) 2014 - 2016 Softwaremill <http://softwaremill.com>
* Copyright (C) 2016 - 2019 Lightbend Inc. <http://www.lightbend.com>
*/

package akka.kafka.testkit.javadsl;

import akka.Done;
import akka.actor.ActorSystem;
import akka.kafka.Subscriptions;
import akka.kafka.javadsl.Consumer;
import akka.kafka.javadsl.Producer;
import akka.kafka.testkit.internal.KafkaTestKitClass;
import akka.stream.Materializer;
import akka.stream.javadsl.Keep;
import akka.stream.javadsl.Sink;
import akka.stream.javadsl.Source;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.time.Duration;
import java.util.List;
import java.util.concurrent.CompletionStage;
import java.util.concurrent.TimeUnit;
import java.util.stream.IntStream;

public abstract class BaseKafkaTest extends KafkaTestKitClass {

public static final int partition0 = 0;

public final Logger log = LoggerFactory.getLogger(getClass());

protected final Materializer materializer;

protected BaseKafkaTest(ActorSystem system, Materializer materializer, String bootstrapServers) {
super(system, bootstrapServers);
this.materializer = materializer;
}

@Override
public Logger log() {
return log;
}

/** Overwrite to set different default timeout for [[#resultOf]]. */
protected Duration resultOfTimeout() {
return Duration.ofSeconds(5);
}

protected CompletionStage<Done> produceString(String topic, int messageCount, int partition) {
return Source.fromIterator(() -> IntStream.range(0, messageCount).iterator())
.map(Object::toString)
.map(n -> new ProducerRecord<String, String>(topic, partition, DefaultKey(), n))
.runWith(Producer.plainSink(producerDefaults()), materializer);
}

protected Consumer.DrainingControl<List<ConsumerRecord<String, String>>> consumeString(
String topic, long take) {
return Consumer.plainSource(
consumerDefaults().withGroupId(createGroupId(1)), Subscriptions.topics(topic))
.take(take)
.toMat(Sink.seq(), Keep.both())
.mapMaterializedValue(Consumer::createDrainingControl)
.run(materializer);
}

protected <T> T resultOf(CompletionStage<T> stage) throws Exception {
return resultOf(stage, resultOfTimeout());
}

protected <T> T resultOf(CompletionStage<T> stage, Duration timeout) throws Exception {
return stage.toCompletableFuture().get(timeout.toMillis(), TimeUnit.MILLISECONDS);
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
/*
* Copyright (C) 2014 - 2016 Softwaremill <http://softwaremill.com>
* Copyright (C) 2016 - 2019 Lightbend Inc. <http://www.lightbend.com>
*/

package akka.kafka.testkit.javadsl;

import akka.actor.ActorSystem;
import akka.stream.Materializer;
import net.manub.embeddedkafka.EmbeddedKafka$;
import net.manub.embeddedkafka.EmbeddedKafkaConfig;
import net.manub.embeddedkafka.EmbeddedKafkaConfig$;
import org.junit.After;
import org.junit.Before;
import scala.collection.immutable.HashMap$;

/**
* JUnit 5 aka Jupiter base-class with some convenience for creating an embedded Kafka broker before
* running the tests. Extending classes must be annotated with `@TestInstance(Lifecycle.PER_CLASS)`
* to create a single instance of the test class with `@BeforeAll` and `@AfterAll` annotated methods
* called by the test framework.
*/
public abstract class EmbeddedKafkaJunit4Test extends KafkaTest {

private static EmbeddedKafkaConfig embeddedKafkaConfig(
int kafkaPort, int zookeeperPort, int replicationFactor) {
return EmbeddedKafkaConfig$.MODULE$.apply(
kafkaPort,
zookeeperPort,
createReplicationFactorBrokerProps(replicationFactor),
HashMap$.MODULE$.empty(),
HashMap$.MODULE$.empty());
}

protected final int kafkaPort;
protected final int replicationFactor;

public EmbeddedKafkaJunit4Test(
ActorSystem system, Materializer materializer, int kafkaPort, int replicationFactor) {
super(system, materializer, "localhost:" + kafkaPort);
this.kafkaPort = kafkaPort;
this.replicationFactor = replicationFactor;
}

protected EmbeddedKafkaJunit4Test(ActorSystem system, Materializer materializer, int kafkaPort) {
this(system, materializer, kafkaPort, 1);
}

protected static void startEmbeddedKafka(int kafkaPort, int replicationFactor) {
EmbeddedKafka$.MODULE$.start(embeddedKafkaConfig(kafkaPort, kafkaPort + 1, replicationFactor));
}

protected static void stopEmbeddedKafka() {
EmbeddedKafka$.MODULE$.stop();
}

@Before
public void setupEmbeddedKafka() {
EmbeddedKafkaJunit4Test.startEmbeddedKafka(kafkaPort, replicationFactor);
setUpAdminClient();
}

@After
public void cleanUpEmbeddedKafka() {
cleanUpAdminClient();
EmbeddedKafkaJunit4Test.stopEmbeddedKafka();
}
}
Loading