Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

resolving unit test issues with embeddedkafka localhost resolution #4796

Merged
merged 1 commit into from
Apr 22, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,13 +6,13 @@ services:
zookeeper:
image: wurstmeister/zookeeper
ports:
- "2181:2181"
- "9092:9092"
logging:
driver: none

kafka:
image: wurstmeister/kafka
command: ["wait-for-it.sh", "zookeeper:2181", "--","start-kafka.sh"]
command: ["wait-for-it.sh", "zookeeper:9092", "--","start-kafka.sh"]
ports:
- "9092:9092"
logging:
Expand All @@ -24,7 +24,7 @@ services:
KAFKA_ADVERTISED_LISTENERS: INTERNAL://kafka:9092
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INTERNAL:PLAINTEXT
KAFKA_INTER_BROKER_LISTENER_NAME: INTERNAL
KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
KAFKA_ZOOKEEPER_CONNECT: zookeeper:9092
# KAFKA_CREATE_TOPICS: institution
KAFKA_CREATE_GROUP: institutions-group
KAFKA_BROKER_ID: 2
Expand Down
4 changes: 2 additions & 2 deletions docs/v1/panel.md
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ As such, it needs a mechanism for storing configuration information for addition
An easy way to satisfy this requirement is to launch a docker container with `ZooKeeper`, as follows:

```shell
$ docker run --rm -p 2181:2181 -p 2888:2888 -p 3888:3888 jplock/zookeeper
$ docker run --rm -p 9092:9092 -p 2888:2888 -p 3888:3888 jplock/zookeeper
```

#### PostgreSQL
Expand All @@ -75,7 +75,7 @@ docker run -e POSTGRES_PASSWORD=postgres -e POSTGRES_USER=postgres -e POSTGRES_D

```shell
export ZOOKEEPER_HOST=192.168.99.100
export ZOOKEEPER_PORT=2181
export ZOOKEEPER_PORT=9092
```

* Set the environment variables for the local Cassandra instance. `CASSANDRA_CLUSTER_HOSTS` also uses the Docker Machine IP:
Expand Down
3 changes: 2 additions & 1 deletion modified-lar/src/test/resources/application.conf
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,8 @@ kafka {
endpoint=""
endpoint = ${?KAFKA_SSL_ENDPOINT}
}
hosts = "localhost:6000"

hosts = "localhost:9092"
hosts = ${?KAFKA_CLUSTER_HOSTS}
idle-timeout = 5
idle-timeout = ${?KAFKA_IDLE_TIMEOUT}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,29 +2,30 @@ package hmda.publication.lar.publication

import akka.NotUsed
import akka.actor.ActorSystem
import akka.actor.typed.{ ActorSystem => TypedSystem }
import akka.actor.typed.{ActorSystem => TypedSystem}
import akka.actor.typed.scaladsl.AskPattern._
import akka.actor.typed.scaladsl.Behaviors
import akka.actor.typed.scaladsl.adapter._
import akka.actor.typed.{ ActorRef, SupervisorStrategy }
import akka.actor.typed.{ActorRef, SupervisorStrategy}
import akka.stream.scaladsl.Source
import akka.testkit.TestKit
import akka.util.Timeout
import com.adobe.testing.s3mock.S3MockApplication
import hmda.census.records.CensusRecords
import hmda.messages.submission.HmdaRawDataEvents.LineAdded
import hmda.model.census.Census
import hmda.model.filing.lar.{ LarGenerators, LoanApplicationRegister }
import hmda.model.filing.lar.{LarGenerators, LoanApplicationRegister}
import hmda.model.filing.submission.SubmissionId
import hmda.persistence.util.CassandraUtil
import hmda.query.repository.ModifiedLarRepository
import hmda.utils.EmbeddedPostgres
import hmda.utils.YearUtils.Period
import net.manub.embeddedkafka.{ EmbeddedK, EmbeddedKafka }
import io.github.embeddedkafka.EmbeddedKafkaConfig.defaultConfig.{kafkaPort, zooKeeperPort}
import io.github.embeddedkafka.{EmbeddedK, EmbeddedKafka, EmbeddedKafkaConfig}
import org.scalacheck.Gen
import org.scalatest.concurrent.{ PatienceConfiguration, ScalaFutures }
import org.scalatest.time.{ Millis, Minutes, Span }
import org.scalatest.{ BeforeAndAfterAll, Matchers, WordSpecLike }
import org.scalatest.concurrent.{PatienceConfiguration, ScalaFutures}
import org.scalatest.time.{Millis, Minutes, Span}
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}
import org.scalatestplus.scalacheck.ScalaCheckPropertyChecks
import slick.basic.DatabaseConfig
import slick.jdbc.JdbcProfile
Expand All @@ -51,10 +52,17 @@ class ModifiedLarPublisherSpec
var s3mock: S3MockApplication = _
var kafka: EmbeddedK = _

implicit val embedKafkaConfig = EmbeddedKafkaConfig(kafkaPort = 9092, zooKeeperPort = 9093)




override def beforeAll(): Unit = {
super.beforeAll()
s3mock = S3MockApplication.start(properties.asJava)
kafka = EmbeddedKafka.start()

kafka = EmbeddedKafka.start()(embedKafkaConfig)

CassandraUtil.startEmbeddedCassandra()
}

Expand Down
1 change: 1 addition & 0 deletions project/buildinfo.sbt
Original file line number Diff line number Diff line change
@@ -1 +1,2 @@
ThisBuild / evictionErrorLevel := Level.Info
addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.9.0")
2 changes: 1 addition & 1 deletion project/scoverage.sbt
Original file line number Diff line number Diff line change
@@ -1 +1 @@
addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.9.3")
addSbtPlugin("org.scoverage" % "sbt-scoverage" % "2.0.10")