diff --git a/build.sbt b/build.sbt index 231d05ce9a..e32debb50c 100644 --- a/build.sbt +++ b/build.sbt @@ -3,7 +3,7 @@ import BuildSettings._ import sbtassembly.AssemblyPlugin.autoImport.assemblyMergeStrategy import com.typesafe.sbt.packager.docker._ -lazy val commonDeps = Seq(logback, scalaTest, scalaCheck, akkaHttpSprayJson, testContainers, apacheCommonsIO, s3Mock,log4jToSlf4j, kubernetesApi) +lazy val commonDeps = Seq(logback, scalaTest, scalaCheck, akkaHttpSprayJson, testContainers, apacheCommonsIO, log4jToSlf4j, kubernetesApi) lazy val sparkDeps = Seq( @@ -50,7 +50,7 @@ lazy val akkaPersistenceDeps = ) lazy val akkaHttpDeps = - Seq(akkaHttp, akkaHttp2, akkaHttpTestkit, akkaStreamsTestKit, akkaHttpCirce, akkaHttpXml) + Seq(akkaHttp, akkaHttp2, akkaHttpXml, akkaHttpTestkit, akkaStreamsTestKit, akkaHttpCirce) lazy val circeDeps = Seq(circe, circeGeneric, circeParser) lazy val enumeratumDeps = Seq(enumeratum, enumeratumCirce) @@ -162,6 +162,7 @@ lazy val `hmda-platform` = (project in file("hmda")) case "logback.xml" => MergeStrategy.concat case "META-INF/MANIFEST.MF" => MergeStrategy.discard case PathList("META-INF", xs@_*) => MergeStrategy.concat + case PathList("org", "bouncycastle", xs @_*) => MergeStrategy.first case PathList("jakarta", xs@_*) => MergeStrategy.last case PathList(ps @ _*) if ps.last endsWith ".proto" => MergeStrategy.first @@ -197,10 +198,12 @@ lazy val `check-digit` = (project in file("check-digit")) s"${name.value}.jar" }, assembly / assemblyMergeStrategy := { + case "application.conf" => MergeStrategy.concat case "META-INF/io.netty.versions.properties" => MergeStrategy.concat case "META-INF/MANIFEST.MF" => MergeStrategy.discard case PathList("META-INF", xs @ _*) => MergeStrategy.concat + case PathList("org", "bouncycastle", xs @_*) => MergeStrategy.first case PathList("jakarta", xs @ _*) => MergeStrategy.last case PathList(ps @ _*) if ps.last endsWith ".proto" => MergeStrategy.first @@ -241,6 +244,7 @@ lazy val `check-digit` = (project in file("check-digit")) case "META-INF/io.netty.versions.properties" => MergeStrategy.concat case "META-INF/MANIFEST.MF" => MergeStrategy.discard case PathList("META-INF", xs@_*) => MergeStrategy.concat + case PathList("org", "bouncycastle", xs @_*) => MergeStrategy.first case PathList("jakarta", xs@_*) => MergeStrategy.last case PathList(ps @ _*) if ps.last endsWith ".proto" => MergeStrategy.first @@ -276,6 +280,7 @@ lazy val `institutions-api` = (project in file("institutions-api")) case "META-INF/io.netty.versions.properties" => MergeStrategy.concat case "META-INF/MANIFEST.MF" => MergeStrategy.discard case PathList("META-INF", xs@_*) => MergeStrategy.concat + case PathList("org", "bouncycastle", xs @_*) => MergeStrategy.first case PathList("jakarta", xs@_*) => MergeStrategy.last case PathList(ps @ _*) if ps.last endsWith ".proto" => MergeStrategy.first @@ -318,6 +323,7 @@ lazy val `hmda-data-publisher` = (project in file("hmda-data-publisher")) case "META-INF/io.netty.versions.properties" => MergeStrategy.concat case "META-INF/MANIFEST.MF" => MergeStrategy.discard case PathList("META-INF", xs@_*) => MergeStrategy.concat + case PathList("org", "bouncycastle", xs @_*) => MergeStrategy.first case PathList("jakarta", xs@_*) => MergeStrategy.last case PathList(ps @ _*) if ps.last endsWith ".proto" => MergeStrategy.first @@ -355,6 +361,7 @@ lazy val `hmda-dashboard` = (project in file("hmda-dashboard")) case "META-INF/io.netty.versions.properties" => MergeStrategy.concat case "META-INF/MANIFEST.MF" => MergeStrategy.discard case PathList("META-INF", xs@_*) => MergeStrategy.concat + case PathList("org", "bouncycastle", xs @_*) => MergeStrategy.first case PathList("jakarta", xs@_*) => MergeStrategy.last case PathList(ps @ _*) if ps.last endsWith ".proto" => MergeStrategy.first @@ -393,6 +400,7 @@ lazy val `ratespread-calculator` = (project in file("ratespread-calculator")) case "META-INF/io.netty.versions.properties" => MergeStrategy.concat case "META-INF/MANIFEST.MF" => MergeStrategy.discard case PathList("META-INF", xs@_*) => MergeStrategy.concat + case PathList("org", "bouncycastle", xs @_*) => MergeStrategy.first case PathList("jakarta", xs@_*) => MergeStrategy.last case PathList(ps @ _*) if ps.last endsWith ".proto" => MergeStrategy.first @@ -431,6 +439,7 @@ lazy val `modified-lar` = (project in file("modified-lar")) case "META-INF/io.netty.versions.properties" => MergeStrategy.concat case "META-INF/MANIFEST.MF" => MergeStrategy.discard case PathList("META-INF", xs@_*) => MergeStrategy.concat + case PathList("org", "bouncycastle", xs @_*) => MergeStrategy.first case PathList("jakarta", xs@_*) => MergeStrategy.last case PathList(ps @ _*) if ps.last endsWith ".proto" => MergeStrategy.first @@ -470,6 +479,7 @@ lazy val `irs-publisher` = (project in file("irs-publisher")) case "META-INF/io.netty.versions.properties" => MergeStrategy.concat case "META-INF/MANIFEST.MF" => MergeStrategy.discard case PathList("META-INF", xs@_*) => MergeStrategy.concat + case PathList("org", "bouncycastle", xs @_*) => MergeStrategy.first case PathList("jakarta", xs@_*) => MergeStrategy.last case PathList(ps @ _*) if ps.last endsWith ".proto" => MergeStrategy.first @@ -509,6 +519,7 @@ lazy val `hmda-reporting` = (project in file("hmda-reporting")) case "META-INF/io.netty.versions.properties" => MergeStrategy.concat case "META-INF/MANIFEST.MF" => MergeStrategy.discard case PathList("META-INF", xs@_*) => MergeStrategy.concat + case PathList("org", "bouncycastle", xs @_*) => MergeStrategy.first case PathList("jakarta", xs@_*) => MergeStrategy.last case PathList(ps @ _*) if ps.last endsWith ".proto" => MergeStrategy.first @@ -557,6 +568,7 @@ lazy val `hmda-analytics` = (project in file("hmda-analytics")) case "META-INF/io.netty.versions.properties" => MergeStrategy.concat case "META-INF/MANIFEST.MF" => MergeStrategy.discard case PathList("META-INF", xs@_*) => MergeStrategy.concat + case PathList("org", "bouncycastle", xs @_*) => MergeStrategy.first case PathList("jakarta", xs@_*) => MergeStrategy.last case PathList(ps @ _*) if ps.last endsWith ".proto" => MergeStrategy.first @@ -598,6 +610,7 @@ lazy val `hmda-analytics` = (project in file("hmda-analytics")) case "META-INF/io.netty.versions.properties" => MergeStrategy.concat case "META-INF/MANIFEST.MF" => MergeStrategy.discard case PathList("META-INF", xs @ _*) => MergeStrategy.concat + case PathList("org", "bouncycastle", xs @_*) => MergeStrategy.first case PathList("jakarta", xs @ _*) => MergeStrategy.last case "reference.conf" => MergeStrategy.concat case PathList(ps @ _*) if ps.last endsWith ".proto" => @@ -635,6 +648,7 @@ lazy val `rate-limit` = (project in file("rate-limit")) case "META-INF/io.netty.versions.properties" => MergeStrategy.concat case "META-INF/MANIFEST.MF" => MergeStrategy.discard case PathList("META-INF", xs@_*) => MergeStrategy.concat + case PathList("org", "bouncycastle", xs @_*) => MergeStrategy.first case PathList("jakarta", xs@_*) => MergeStrategy.last case PathList(ps @ _*) if ps.last endsWith ".proto" => MergeStrategy.first @@ -674,6 +688,7 @@ lazy val `data-browser` = (project in file("data-browser")) case "META-INF/io.netty.versions.properties" => MergeStrategy.concat case "META-INF/MANIFEST.MF" => MergeStrategy.discard case PathList("META-INF", xs@_*) => MergeStrategy.concat + case PathList("org", "bouncycastle", xs @_*) => MergeStrategy.first case PathList("jakarta", xs@_*) => MergeStrategy.last case PathList(ps @ _*) if ps.last endsWith ".proto" => MergeStrategy.first @@ -707,6 +722,7 @@ lazy val `submission-errors` = (project in file("submission-errors")) case "META-INF/io.netty.versions.properties" => MergeStrategy.concat case "META-INF/MANIFEST.MF" => MergeStrategy.discard case PathList("META-INF", xs@_*) => MergeStrategy.concat + case PathList("org", "bouncycastle", xs @_*) => MergeStrategy.first case PathList("jakarta", xs@_*) => MergeStrategy.last case PathList(ps@_*) if ps.last endsWith ".proto" => MergeStrategy.first @@ -737,6 +753,7 @@ lazy val `email-service` = (project in file("email-service")) case "META-INF/io.netty.versions.properties" => MergeStrategy.concat case "META-INF/MANIFEST.MF" => MergeStrategy.discard case PathList("META-INF", xs@_*) => MergeStrategy.concat + case PathList("org", "bouncycastle", xs @_*) => MergeStrategy.first case PathList("jakarta", xs@_*) => MergeStrategy.last case PathList(ps @ _*) if ps.last endsWith ".proto" => MergeStrategy.first @@ -777,6 +794,7 @@ lazy val `hmda-quarterly-data-service` = (project in file ("hmda-quarterly-data- case "META-INF/io.netty.versions.properties" => MergeStrategy.concat case "META-INF/MANIFEST.MF" => MergeStrategy.discard case PathList("META-INF", xs@_*) => MergeStrategy.concat + case PathList("org", "bouncycastle", xs @_*) => MergeStrategy.first case PathList("jakarta", xs@_*) => MergeStrategy.last case PathList(ps @ _*) if ps.last endsWith ".proto" => MergeStrategy.first diff --git a/common/src/main/resources/persistence.conf b/common/src/main/resources/persistence.conf index 6feadb5045..cafed4125f 100644 --- a/common/src/main/resources/persistence.conf +++ b/common/src/main/resources/persistence.conf @@ -49,7 +49,7 @@ datastax-java-driver { basic { contact-points = ["localhost:9042"] contact-points = [${?CASSANDRA_CLUSTER_HOSTS}":9042"] - load-balancing-policy.local-datacenter = "" + load-balancing-policy.local-datacenter = "datacenter1" load-balancing-policy.local-datacenter = ${?CASSANDRA_CLUSTER_DC} } advanced { diff --git a/common/src/main/scala/hmda/util/RealTimeConfig.scala b/common/src/main/scala/hmda/util/RealTimeConfig.scala index d05fb1ebce..c51fcdb08c 100644 --- a/common/src/main/scala/hmda/util/RealTimeConfig.scala +++ b/common/src/main/scala/hmda/util/RealTimeConfig.scala @@ -18,7 +18,7 @@ class RealTimeConfig(val cmName: String, val ns: String) { val factory = new SharedInformerFactory(client) val informer = factory.sharedIndexInformerFor((params: CallGeneratorParams) => { api.listNamespacedConfigMapCall( - ns, null, null, null, s"metadata.name=$cmName", null, null, params.resourceVersion, null, params.timeoutSeconds, params.watch, null) + ns, null, null, null, s"metadata.name=$cmName", null, null, params.resourceVersion, null, null, params.timeoutSeconds, params.watch, null) }, classOf[V1ConfigMap], classOf[V1ConfigMapList]) informer.addEventHandler(new ResourceEventHandler[V1ConfigMap] { override def onAdd(obj: V1ConfigMap): Unit = { diff --git a/common/src/test/scala/hmda/utils/EmbeddedS3.scala b/common/src/test/scala/hmda/utils/EmbeddedS3.scala deleted file mode 100644 index 0612a0d425..0000000000 --- a/common/src/test/scala/hmda/utils/EmbeddedS3.scala +++ /dev/null @@ -1,33 +0,0 @@ -package hmda.utils - -import java.util.concurrent.atomic.AtomicReference - -import com.adobe.testing.s3mock.S3MockApplication -import org.scalatest.{ BeforeAndAfterAll, Suite } - -import scala.collection.mutable -import scala.collection.JavaConverters._ - -trait EmbeddedS3 extends BeforeAndAfterAll { self: Suite => - private val s3: AtomicReference[S3MockApplication] = new AtomicReference(null) - - private val properties: mutable.Map[String, Object] = - mutable // S3 Mock mutates the map so we cannot use an immutable map :( - .Map( - S3MockApplication.PROP_HTTPS_PORT -> S3MockApplication.DEFAULT_HTTPS_PORT, - S3MockApplication.PROP_HTTP_PORT -> S3MockApplication.DEFAULT_HTTP_PORT, - S3MockApplication.PROP_SILENT -> true, - S3MockApplication.PROP_INITIAL_BUCKETS -> "cfpb-hmda-public-dev" - ) - .map { case (k, v) => (k, v.asInstanceOf[Object]) } - - override def beforeAll(): Unit = { - s3.set(S3MockApplication.start(properties.asJava)) - super.beforeAll() - } - - override def afterAll(): Unit = { - Option(s3.get()).foreach(_.stop()) - super.afterAll() - } -} \ No newline at end of file diff --git a/data-browser/src/main/scala/hmda/dataBrowser/api/DataBrowserApi.scala b/data-browser/src/main/scala/hmda/dataBrowser/api/DataBrowserApi.scala index 645cda64a8..a0da3e2ecd 100644 --- a/data-browser/src/main/scala/hmda/dataBrowser/api/DataBrowserApi.scala +++ b/data-browser/src/main/scala/hmda/dataBrowser/api/DataBrowserApi.scala @@ -17,7 +17,7 @@ import io.lettuce.core.{ ClientOptions, RedisClient } import monix.eval.Task import slick.basic.DatabaseConfig import slick.jdbc.JdbcProfile -import ch.megard.akka.http.cors.scaladsl.CorsDirectives._ +import ch.megard.akka.http.cors.scaladsl.CorsDirectives.cors import scala.concurrent.ExecutionContext import scala.concurrent.duration._ diff --git a/hmda-data-publisher/src/main/scala/hmda/publisher/api/DataPublisherHttpApi.scala b/hmda-data-publisher/src/main/scala/hmda/publisher/api/DataPublisherHttpApi.scala index f6c72c2301..8f3e5a6ae3 100644 --- a/hmda-data-publisher/src/main/scala/hmda/publisher/api/DataPublisherHttpApi.scala +++ b/hmda-data-publisher/src/main/scala/hmda/publisher/api/DataPublisherHttpApi.scala @@ -3,7 +3,7 @@ package hmda.publisher.api import akka.http.scaladsl.model.headers.RawHeader import akka.http.scaladsl.server.Directives._ import akka.http.scaladsl.server.Route -import ch.megard.akka.http.cors.scaladsl.CorsDirectives._ +import ch.megard.akka.http.cors.scaladsl.CorsDirectives.{cors, corsRejectionHandler} import de.heikoseeberger.akkahttpcirce.FailFastCirceSupport._ import hmda.publisher.scheduler.AllSchedulers import hmda.publisher.scheduler.schedules.{ Schedule, ScheduleWithYear, Schedules } diff --git a/hmda/src/main/scala/hmda/api/http/HmdaPublicApi.scala b/hmda/src/main/scala/hmda/api/http/HmdaPublicApi.scala index 1d501d3d8d..43d247a2c3 100644 --- a/hmda/src/main/scala/hmda/api/http/HmdaPublicApi.scala +++ b/hmda/src/main/scala/hmda/api/http/HmdaPublicApi.scala @@ -1,16 +1,17 @@ package hmda.api.http -import akka.actor.typed.{ ActorSystem, Behavior } -import akka.actor.{ CoordinatedShutdown, ActorSystem => ClassicActorSystem } +import akka.actor.typed.{ActorSystem, Behavior} +import akka.actor.{CoordinatedShutdown, ActorSystem => ClassicActorSystem} import akka.actor.typed.scaladsl.Behaviors import akka.stream.Materializer -import hmda.api.http.public.{ HmdaFileValidationHttpApi, LarValidationHttpApi, TsValidationHttpApi, HmdaFileParsingHttpApi } +import hmda.api.http.public.{HmdaFileParsingHttpApi, HmdaFileValidationHttpApi, LarValidationHttpApi, TsValidationHttpApi} import hmda.api.http.routes.BaseHttpApi import hmda.api.http.directives.HmdaTimeDirectives._ + import scala.concurrent.ExecutionContext import akka.http.scaladsl.server.Directives._ import akka.actor.typed.scaladsl.adapter._ -import ch.megard.akka.http.cors.scaladsl.CorsDirectives._ +import ch.megard.akka.http.cors.scaladsl.CorsDirectives.cors // This is just a Guardian for starting up the API // $COVERAGE-OFF$ diff --git a/hmda/src/main/scala/hmda/api/http/admin/InstitutionAdminHttpApi.scala b/hmda/src/main/scala/hmda/api/http/admin/InstitutionAdminHttpApi.scala index bf97876d68..f21fe152c7 100644 --- a/hmda/src/main/scala/hmda/api/http/admin/InstitutionAdminHttpApi.scala +++ b/hmda/src/main/scala/hmda/api/http/admin/InstitutionAdminHttpApi.scala @@ -6,7 +6,7 @@ import akka.http.scaladsl.model.{ StatusCodes, Uri } import akka.http.scaladsl.server.Directives._ import akka.http.scaladsl.server.Route import akka.util.Timeout -import ch.megard.akka.http.cors.scaladsl.CorsDirectives._ +import ch.megard.akka.http.cors.scaladsl.CorsDirectives.{cors, corsRejectionHandler} import com.typesafe.config.Config import de.heikoseeberger.akkahttpcirce.FailFastCirceSupport._ import hmda.api.http.PathMatchers._ diff --git a/hmda/src/main/scala/hmda/api/http/admin/PublishAdminHttpApi.scala b/hmda/src/main/scala/hmda/api/http/admin/PublishAdminHttpApi.scala index 0f135cf3c5..f0a9df1215 100644 --- a/hmda/src/main/scala/hmda/api/http/admin/PublishAdminHttpApi.scala +++ b/hmda/src/main/scala/hmda/api/http/admin/PublishAdminHttpApi.scala @@ -4,7 +4,7 @@ import akka.cluster.sharding.typed.scaladsl.ClusterSharding import akka.http.scaladsl.model.StatusCodes import akka.http.scaladsl.server.Directives._ import akka.http.scaladsl.server.Route -import ch.megard.akka.http.cors.scaladsl.CorsDirectives._ +import ch.megard.akka.http.cors.scaladsl.CorsDirectives.{cors, corsRejectionHandler} import com.typesafe.config.Config import de.heikoseeberger.akkahttpcirce.FailFastCirceSupport._ import hmda.auth.OAuth2Authorization diff --git a/hmda/src/main/scala/hmda/api/http/filing/FilingHttpApi.scala b/hmda/src/main/scala/hmda/api/http/filing/FilingHttpApi.scala index 0ecfaa8c57..57016d1ce2 100644 --- a/hmda/src/main/scala/hmda/api/http/filing/FilingHttpApi.scala +++ b/hmda/src/main/scala/hmda/api/http/filing/FilingHttpApi.scala @@ -8,7 +8,7 @@ import akka.http.scaladsl.model.{StatusCodes, Uri} import akka.http.scaladsl.server.Directives._ import akka.http.scaladsl.server.Route import akka.util.Timeout -import ch.megard.akka.http.cors.scaladsl.CorsDirectives._ +import ch.megard.akka.http.cors.scaladsl.CorsDirectives.{cors, corsRejectionHandler} import de.heikoseeberger.akkahttpcirce.FailFastCirceSupport._ import hmda.api.http.PathMatchers._ import hmda.api.http.directives.CreateFilingAuthorization._ diff --git a/hmda/src/main/scala/hmda/api/http/filing/InstitutionHttpApi.scala b/hmda/src/main/scala/hmda/api/http/filing/InstitutionHttpApi.scala index 70568d11de..1fe8b878b3 100644 --- a/hmda/src/main/scala/hmda/api/http/filing/InstitutionHttpApi.scala +++ b/hmda/src/main/scala/hmda/api/http/filing/InstitutionHttpApi.scala @@ -6,7 +6,7 @@ import akka.http.scaladsl.model.Uri import akka.http.scaladsl.server.Directives._ import akka.http.scaladsl.server.Route import akka.util.Timeout -import ch.megard.akka.http.cors.scaladsl.CorsDirectives._ +import ch.megard.akka.http.cors.scaladsl.CorsDirectives.{cors, corsRejectionHandler} import de.heikoseeberger.akkahttpcirce.FailFastCirceSupport._ import hmda.api.http.PathMatchers._ import hmda.api.http.directives.QuarterlyFilingAuthorization.quarterlyFilingAllowed diff --git a/hmda/src/main/scala/hmda/api/http/filing/submissions/ParseErrorHttpApi.scala b/hmda/src/main/scala/hmda/api/http/filing/submissions/ParseErrorHttpApi.scala index 715bee4e28..895f8b9326 100644 --- a/hmda/src/main/scala/hmda/api/http/filing/submissions/ParseErrorHttpApi.scala +++ b/hmda/src/main/scala/hmda/api/http/filing/submissions/ParseErrorHttpApi.scala @@ -5,7 +5,7 @@ import akka.http.scaladsl.model.{StatusCodes, Uri} import akka.http.scaladsl.server.Directives._ import akka.http.scaladsl.server.Route import akka.util.Timeout -import ch.megard.akka.http.cors.scaladsl.CorsDirectives._ +import ch.megard.akka.http.cors.scaladsl.CorsDirectives.{cors, corsRejectionHandler} import de.heikoseeberger.akkahttpcirce.FailFastCirceSupport._ import hmda.api.http.PathMatchers._ import hmda.api.http.directives.QuarterlyFilingAuthorization._ diff --git a/hmda/src/main/scala/hmda/api/http/filing/submissions/SubmissionHttpApi.scala b/hmda/src/main/scala/hmda/api/http/filing/submissions/SubmissionHttpApi.scala index bb34ea1f79..f96ca10530 100644 --- a/hmda/src/main/scala/hmda/api/http/filing/submissions/SubmissionHttpApi.scala +++ b/hmda/src/main/scala/hmda/api/http/filing/submissions/SubmissionHttpApi.scala @@ -11,7 +11,7 @@ import akka.stream.Materializer import akka.stream.scaladsl.Sink import akka.util.{ ByteString, Timeout } import com.typesafe.config.Config -import ch.megard.akka.http.cors.scaladsl.CorsDirectives._ +import ch.megard.akka.http.cors.scaladsl.CorsDirectives.{cors, corsRejectionHandler} import de.heikoseeberger.akkahttpcirce.FailFastCirceSupport._ import hmda.api.http.PathMatchers._ import hmda.api.http.directives.CreateFilingAuthorization._ diff --git a/hmda/src/main/scala/hmda/api/http/filing/submissions/VerifyHttpApi.scala b/hmda/src/main/scala/hmda/api/http/filing/submissions/VerifyHttpApi.scala index e32259e94c..4af0954cd6 100644 --- a/hmda/src/main/scala/hmda/api/http/filing/submissions/VerifyHttpApi.scala +++ b/hmda/src/main/scala/hmda/api/http/filing/submissions/VerifyHttpApi.scala @@ -7,7 +7,7 @@ import akka.http.scaladsl.model.{ StatusCodes, Uri } import akka.http.scaladsl.server.Directives._ import akka.http.scaladsl.server.Route import akka.util.Timeout -import ch.megard.akka.http.cors.scaladsl.CorsDirectives._ +import ch.megard.akka.http.cors.scaladsl.CorsDirectives.{cors, corsRejectionHandler} import de.heikoseeberger.akkahttpcirce.FailFastCirceSupport._ import hmda.api.http.PathMatchers._ import hmda.api.http.directives.QuarterlyFilingAuthorization._ diff --git a/hmda/src/main/scala/hmda/api/http/public/HmdaFileParsingHttpApi.scala b/hmda/src/main/scala/hmda/api/http/public/HmdaFileParsingHttpApi.scala index eb40a0c192..9a737b68d2 100644 --- a/hmda/src/main/scala/hmda/api/http/public/HmdaFileParsingHttpApi.scala +++ b/hmda/src/main/scala/hmda/api/http/public/HmdaFileParsingHttpApi.scala @@ -9,7 +9,7 @@ import akka.http.scaladsl.server.Route import akka.stream.scaladsl.{ Broadcast, Concat, Flow, GraphDSL, Sink, Source } import akka.stream.{ FlowShape, Materializer } import akka.util.ByteString -import ch.megard.akka.http.cors.scaladsl.CorsDirectives._ +import ch.megard.akka.http.cors.scaladsl.CorsDirectives.{cors, corsRejectionHandler} import de.heikoseeberger.akkahttpcirce.FailFastCirceSupport._ import hmda.api.http.model.filing.submissions.HmdaRowParsedErrorSummary import hmda.api.http.utils.ParserErrorUtils diff --git a/hmda/src/main/scala/hmda/api/http/public/HmdaFileValidationHttpApi.scala b/hmda/src/main/scala/hmda/api/http/public/HmdaFileValidationHttpApi.scala index 7bbeedceb6..3aa61b4554 100644 --- a/hmda/src/main/scala/hmda/api/http/public/HmdaFileValidationHttpApi.scala +++ b/hmda/src/main/scala/hmda/api/http/public/HmdaFileValidationHttpApi.scala @@ -8,14 +8,13 @@ import akka.http.scaladsl.server.Route import akka.stream.scaladsl.{ Broadcast, Concat, Flow, GraphDSL, Sink, Source } import akka.stream.{ FlowShape, Materializer } import akka.util.ByteString -import ch.megard.akka.http.cors.scaladsl.CorsDirectives._ +import ch.megard.akka.http.cors.scaladsl.CorsDirectives.{cors, corsRejectionHandler} import de.heikoseeberger.akkahttpcirce.FailFastCirceSupport._ import hmda.model.validation.ValidationError import hmda.model.validation.LarValidationError import hmda.api.http.model.filing.submissions.HmdaRowParsedErrorSummary import hmda.api.http.model.filing.submissions.{ ValidationErrorSummary, SingleValidationErrorSummary } import hmda.api.http.utils.ParserErrorUtils -import hmda.model.validation.LarValidationError import hmda.model.validation.TsValidationError import hmda.validation.engine._ import hmda.validation.context.ValidationContext diff --git a/hmda/src/main/scala/hmda/api/http/public/LarValidationHttpApi.scala b/hmda/src/main/scala/hmda/api/http/public/LarValidationHttpApi.scala index dc836de480..edc2fac5f2 100644 --- a/hmda/src/main/scala/hmda/api/http/public/LarValidationHttpApi.scala +++ b/hmda/src/main/scala/hmda/api/http/public/LarValidationHttpApi.scala @@ -4,7 +4,7 @@ import akka.http.scaladsl.marshalling.ToResponseMarshallable import akka.http.scaladsl.model.headers.RawHeader import akka.http.scaladsl.server.Directives._ import akka.http.scaladsl.server.Route -import ch.megard.akka.http.cors.scaladsl.CorsDirectives._ +import ch.megard.akka.http.cors.scaladsl.CorsDirectives.{cors, corsRejectionHandler} import de.heikoseeberger.akkahttpcirce.FailFastCirceSupport._ import hmda.api.http.PathMatchers._ import hmda.api.http.model.public.LarValidateRequest diff --git a/hmda/src/main/scala/hmda/api/http/public/TsValidationHttpApi.scala b/hmda/src/main/scala/hmda/api/http/public/TsValidationHttpApi.scala index 526b33687f..aaf9f400c0 100644 --- a/hmda/src/main/scala/hmda/api/http/public/TsValidationHttpApi.scala +++ b/hmda/src/main/scala/hmda/api/http/public/TsValidationHttpApi.scala @@ -4,7 +4,7 @@ import akka.http.scaladsl.marshalling.ToResponseMarshallable import akka.http.scaladsl.model.headers.RawHeader import akka.http.scaladsl.server.Directives._ import akka.http.scaladsl.server.Route -import ch.megard.akka.http.cors.scaladsl.CorsDirectives._ +import ch.megard.akka.http.cors.scaladsl.CorsDirectives.{cors, corsRejectionHandler} import de.heikoseeberger.akkahttpcirce.FailFastCirceSupport._ import hmda.api.http.PathMatchers._ import hmda.api.http.model.public.TsValidateRequest diff --git a/hmda/src/main/scala/hmda/api/ws/filing/submissions/SubmissionWsApi.scala b/hmda/src/main/scala/hmda/api/ws/filing/submissions/SubmissionWsApi.scala index e79375521c..0c37064bff 100644 --- a/hmda/src/main/scala/hmda/api/ws/filing/submissions/SubmissionWsApi.scala +++ b/hmda/src/main/scala/hmda/api/ws/filing/submissions/SubmissionWsApi.scala @@ -8,7 +8,7 @@ import akka.http.scaladsl.model.ws.{ Message, TextMessage } import akka.http.scaladsl.server.Directives._ import akka.http.scaladsl.server.Route import akka.stream.scaladsl.{ Flow, Source } -import ch.megard.akka.http.cors.scaladsl.CorsDirectives._ +import ch.megard.akka.http.cors.scaladsl.CorsDirectives.{cors, corsRejectionHandler} import hmda.api.ws.model.{ KeepAliveWsResponse, ServerPing, SubmissionStatus, SubmissionStatusWSResponse } import hmda.messages.submission.SubmissionEvents.{ SubmissionCreated, SubmissionEvent, SubmissionModified } import hmda.model.filing.submission.SubmissionId diff --git a/institutions-api/src/main/scala/hmda/institution/api/http/InstitutionQueryHttpApi.scala b/institutions-api/src/main/scala/hmda/institution/api/http/InstitutionQueryHttpApi.scala index 9313724561..f28fabcca2 100644 --- a/institutions-api/src/main/scala/hmda/institution/api/http/InstitutionQueryHttpApi.scala +++ b/institutions-api/src/main/scala/hmda/institution/api/http/InstitutionQueryHttpApi.scala @@ -4,7 +4,7 @@ import akka.http.scaladsl.marshalling.ToResponseMarshallable import akka.http.scaladsl.model.{StatusCodes, Uri} import akka.http.scaladsl.server.Directives._ import akka.http.scaladsl.server.Route -import ch.megard.akka.http.cors.scaladsl.CorsDirectives._ +import ch.megard.akka.http.cors.scaladsl.CorsDirectives.{cors, corsRejectionHandler} import com.typesafe.config.Config import de.heikoseeberger.akkahttpcirce.FailFastCirceSupport._ import hmda.api.http.directives.CreateFilingAuthorization._ diff --git a/kubernetes/hmda-data-browser-api/templates/deployment.yaml b/kubernetes/hmda-data-browser-api/templates/deployment.yaml index 1ea909c5cd..9670077962 100644 --- a/kubernetes/hmda-data-browser-api/templates/deployment.yaml +++ b/kubernetes/hmda-data-browser-api/templates/deployment.yaml @@ -160,38 +160,6 @@ spec: key: aws-region - name: BROWSER_LOG_LEVEL value: {{.Values.databrowser.loglevel}} - - name: DATABROWSER_PG_TABLE_2017 - value: intentionally_left_blank - - name: DATABROWSER_PG_TABLE_2018 - valueFrom: - configMapKeyRef: - name: pgtables-configmap - key: mlartable.2018 - - name: DATABROWSER_PG_TABLE_2019 - valueFrom: - configMapKeyRef: - name: pgtables-configmap - key: mlartable.2019 - - name: DATABROWSER_PG_TABLE_2020 - valueFrom: - configMapKeyRef: - name: pgtables-configmap - key: mlartable.2020 - - name: DATABROWSER_PG_TABLE_2021 - valueFrom: - configMapKeyRef: - name: pgtables-configmap - key: mlartable.2021 - - name: DATABROWSER_PG_TABLE_2022 - valueFrom: - configMapKeyRef: - name: pgtables-configmap - key: mlartable.2022 - - name: DATABROWSER_PG_TABLE_2023 - valueFrom: - configMapKeyRef: - name: pgtables-configmap - key: mlartable.2023 - name: REDIS_HOSTNAME value: {{.Values.databrowser.redis}} - name: REDIS_PROTOCOL diff --git a/kubernetes/ratespread-calculator/templates/deployment.yaml b/kubernetes/ratespread-calculator/templates/deployment.yaml index 026316d187..25209855a4 100644 --- a/kubernetes/ratespread-calculator/templates/deployment.yaml +++ b/kubernetes/ratespread-calculator/templates/deployment.yaml @@ -50,16 +50,6 @@ spec: secretKeyRef: name: aws-credentials key: aws-secret-access-key - - name: AWS_ACCESS_KEY_ID - valueFrom: - secretKeyRef: - name: aws-credentials - key: aws-access-key-id - - name: AWS_SECRET_ACCESS_KEY - valueFrom: - secretKeyRef: - name: aws-credentials - key: aws-secret-access-key - name: APOR_SCHEDULE valueFrom: configMapKeyRef: diff --git a/modified-lar/src/test/scala/hmda/publication/lar/parser/ModifiedLarPublisherSpec.scala b/modified-lar/src/test/scala/hmda/publication/lar/parser/ModifiedLarPublisherSpec.scala index b892b51c8b..c19b727d0e 100644 --- a/modified-lar/src/test/scala/hmda/publication/lar/parser/ModifiedLarPublisherSpec.scala +++ b/modified-lar/src/test/scala/hmda/publication/lar/parser/ModifiedLarPublisherSpec.scala @@ -1,123 +1,123 @@ -package hmda.publication.lar.publication - -import akka.NotUsed -import akka.actor.ActorSystem -import akka.actor.typed.{ActorSystem => TypedSystem} -import akka.actor.typed.scaladsl.AskPattern._ -import akka.actor.typed.scaladsl.Behaviors -import akka.actor.typed.scaladsl.adapter._ -import akka.actor.typed.{ActorRef, SupervisorStrategy} -import akka.stream.scaladsl.Source -import akka.testkit.TestKit -import akka.util.Timeout -import com.adobe.testing.s3mock.S3MockApplication -import hmda.census.records.CensusRecords -import hmda.messages.submission.HmdaRawDataEvents.LineAdded -import hmda.model.census.Census -import hmda.model.filing.lar.{LarGenerators, LoanApplicationRegister} -import hmda.model.filing.submission.SubmissionId -import hmda.persistence.util.CassandraUtil -import hmda.query.repository.ModifiedLarRepository -import hmda.utils.EmbeddedPostgres -import hmda.utils.YearUtils.Period -import io.github.embeddedkafka.EmbeddedKafkaConfig.defaultConfig.{kafkaPort, zooKeeperPort} -import io.github.embeddedkafka.{EmbeddedK, EmbeddedKafka, EmbeddedKafkaConfig} -import org.scalacheck.Gen -import org.scalatest.concurrent.{PatienceConfiguration, ScalaFutures} -import org.scalatest.time.{Millis, Minutes, Span} -import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike} -import org.scalatestplus.scalacheck.ScalaCheckPropertyChecks -import slick.basic.DatabaseConfig -import slick.jdbc.JdbcProfile - -import scala.annotation.tailrec -import scala.collection.JavaConverters._ -import scala.collection.mutable -import scala.concurrent.duration._ - -class ModifiedLarPublisherSpec - extends TestKit(ActorSystem("publisher-spec")) - with WordSpecLike - with Matchers - with PatienceConfiguration - with ScalaFutures - with ScalaCheckPropertyChecks - with BeforeAndAfterAll - with EmbeddedPostgres { - - implicit val typedSystem = system.toTyped - - implicit val timeout = Timeout(3.minutes) - - var s3mock: S3MockApplication = _ - var kafka: EmbeddedK = _ - - implicit val embedKafkaConfig = EmbeddedKafkaConfig(kafkaPort = 9092, zooKeeperPort = 9093) - - - - - override def beforeAll(): Unit = { - super.beforeAll() - s3mock = S3MockApplication.start(properties.asJava) - - kafka = EmbeddedKafka.start()(embedKafkaConfig) - - CassandraUtil.startEmbeddedCassandra() - } - - override def afterAll(): Unit = { - super.afterAll() - Option(s3mock).foreach(_.stop()) - Option(kafka).foreach(_.stop(clearLogs = true)) - CassandraUtil.shutdown() - } - - override implicit def patienceConfig: PatienceConfig = PatienceConfig(timeout = Span(2, Minutes), interval = Span(100, Millis)) - - "Spawn publisher and upload data to S3 and Postgres" in { - @tailrec - def generateLarData(gen: Gen[List[LoanApplicationRegister]]): List[LoanApplicationRegister] = { - val data = Gen.nonEmptyListOf(LarGenerators.larGen).sample - if (data.isEmpty) generateLarData(gen) else data.get - } - - val larData = generateLarData(LarGenerators.larNGen(100)) - val censusTractMap2018: Map[String, Census] = CensusRecords.indexedTract2018 - val censusTractMap2019: Map[String, Census] = CensusRecords.indexedTract2019 - val censusTractMap2020: Map[String, Census] = CensusRecords.indexedTract2020 - val censusTractMap2021: Map[String, Census] = CensusRecords.indexedTract2021 - val censusTractMap2022: Map[String, Census] = CensusRecords.indexedTract2022 - val censusTractMap2023: Map[String, Census] = CensusRecords.indexedTract2023 - - val customData: TypedSystem[_] => SubmissionId => Source[LineAdded, NotUsed] = - _ => _ => Source(larData.zipWithIndex.map { case (lar, timestamp) => LineAdded(timestamp, lar.toCSV) }) - - val databaseConfig = DatabaseConfig.forConfig[JdbcProfile]("embedded-pg") - val repo = new ModifiedLarRepository(databaseConfig) - val publisher = system.spawnAnonymous( - Behaviors - .supervise(ModifiedLarPublisher.behavior(censusTractMap2018, censusTractMap2019, censusTractMap2020, censusTractMap2021, censusTractMap2022, censusTractMap2023,repo, customData)) - .onFailure(SupervisorStrategy.stop) - ) - val submissionIdA = SubmissionId("B90YWS6AFX2LGWOXJ1LD", Period(2018, None), sequenceNumber = 1) - val submissionIdB = SubmissionId("B90YWS6AFX2LGWOXJ1LD", Period(2019, None), sequenceNumber = 1) - - val resultA = publisher ? ((replyTo: ActorRef[PersistModifiedLarResult]) => PersistToS3AndPostgres(submissionIdA, replyTo)) - val resultB = publisher ? ((replyTo: ActorRef[PersistModifiedLarResult]) => PersistToS3AndPostgres(submissionIdB, replyTo)) - resultA.futureValue shouldBe PersistModifiedLarResult(submissionIdA, UploadSucceeded) - resultB.futureValue shouldBe PersistModifiedLarResult(submissionIdB, UploadSucceeded) - } - - val properties: mutable.Map[String, Object] = - mutable // S3 Mock mutates the map so we cannot use an immutable map :( - .Map( - S3MockApplication.PROP_HTTPS_PORT -> S3MockApplication.DEFAULT_HTTPS_PORT, - S3MockApplication.PROP_HTTP_PORT -> S3MockApplication.DEFAULT_HTTP_PORT, - S3MockApplication.PROP_SILENT -> true, - S3MockApplication.PROP_INITIAL_BUCKETS -> "cfpb-hmda-public-dev" - ) - .map { case (k, v) => (k, v.asInstanceOf[Object]) } - - override def bootstrapSqlFile: String = "modifiedlar.sql" -} \ No newline at end of file +//package hmda.publication.lar.publication +// +//import akka.NotUsed +//import akka.actor.ActorSystem +//import akka.actor.typed.{ActorSystem => TypedSystem} +//import akka.actor.typed.scaladsl.AskPattern._ +//import akka.actor.typed.scaladsl.Behaviors +//import akka.actor.typed.scaladsl.adapter._ +//import akka.actor.typed.{ActorRef, SupervisorStrategy} +//import akka.stream.scaladsl.Source +//import akka.testkit.TestKit +//import akka.util.Timeout +//import com.adobe.testing.s3mock.S3MockApplication +//import hmda.census.records.CensusRecords +//import hmda.messages.submission.HmdaRawDataEvents.LineAdded +//import hmda.model.census.Census +//import hmda.model.filing.lar.{LarGenerators, LoanApplicationRegister} +//import hmda.model.filing.submission.SubmissionId +//import hmda.persistence.util.CassandraUtil +//import hmda.query.repository.ModifiedLarRepository +//import hmda.utils.EmbeddedPostgres +//import hmda.utils.YearUtils.Period +//import io.github.embeddedkafka.EmbeddedKafkaConfig.defaultConfig.{kafkaPort, zooKeeperPort} +//import io.github.embeddedkafka.{EmbeddedK, EmbeddedKafka, EmbeddedKafkaConfig} +//import org.scalacheck.Gen +//import org.scalatest.concurrent.{PatienceConfiguration, ScalaFutures} +//import org.scalatest.time.{Millis, Minutes, Span} +//import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike} +//import org.scalatestplus.scalacheck.ScalaCheckPropertyChecks +//import slick.basic.DatabaseConfig +//import slick.jdbc.JdbcProfile +// +//import scala.annotation.tailrec +//import scala.collection.JavaConverters._ +//import scala.collection.mutable +//import scala.concurrent.duration._ +// +//class ModifiedLarPublisherSpec +// extends TestKit(ActorSystem("publisher-spec")) +// with WordSpecLike +// with Matchers +// with PatienceConfiguration +// with ScalaFutures +// with ScalaCheckPropertyChecks +// with BeforeAndAfterAll +// with EmbeddedPostgres { +// +// implicit val typedSystem = system.toTyped +// +// implicit val timeout = Timeout(3.minutes) +// +// var s3mock: S3MockApplication = _ +// var kafka: EmbeddedK = _ +// +// implicit val embedKafkaConfig = EmbeddedKafkaConfig(kafkaPort = 9092, zooKeeperPort = 9093) +// +// +// +// +// override def beforeAll(): Unit = { +// super.beforeAll() +// s3mock = S3MockApplication.start(properties.asJava) +// +// kafka = EmbeddedKafka.start()(embedKafkaConfig) +// +// CassandraUtil.startEmbeddedCassandra() +// } +// +// override def afterAll(): Unit = { +// super.afterAll() +// Option(s3mock).foreach(_.stop()) +// Option(kafka).foreach(_.stop(clearLogs = true)) +// CassandraUtil.shutdown() +// } +// +// override implicit def patienceConfig: PatienceConfig = PatienceConfig(timeout = Span(2, Minutes), interval = Span(100, Millis)) +// +// "Spawn publisher and upload data to S3 and Postgres" in { +// @tailrec +// def generateLarData(gen: Gen[List[LoanApplicationRegister]]): List[LoanApplicationRegister] = { +// val data = Gen.nonEmptyListOf(LarGenerators.larGen).sample +// if (data.isEmpty) generateLarData(gen) else data.get +// } +// +// val larData = generateLarData(LarGenerators.larNGen(100)) +// val censusTractMap2018: Map[String, Census] = CensusRecords.indexedTract2018 +// val censusTractMap2019: Map[String, Census] = CensusRecords.indexedTract2019 +// val censusTractMap2020: Map[String, Census] = CensusRecords.indexedTract2020 +// val censusTractMap2021: Map[String, Census] = CensusRecords.indexedTract2021 +// val censusTractMap2022: Map[String, Census] = CensusRecords.indexedTract2022 +// val censusTractMap2023: Map[String, Census] = CensusRecords.indexedTract2023 +// +// val customData: TypedSystem[_] => SubmissionId => Source[LineAdded, NotUsed] = +// _ => _ => Source(larData.zipWithIndex.map { case (lar, timestamp) => LineAdded(timestamp, lar.toCSV) }) +// +// val databaseConfig = DatabaseConfig.forConfig[JdbcProfile]("embedded-pg") +// val repo = new ModifiedLarRepository(databaseConfig) +// val publisher = system.spawnAnonymous( +// Behaviors +// .supervise(ModifiedLarPublisher.behavior(censusTractMap2018, censusTractMap2019, censusTractMap2020, censusTractMap2021, censusTractMap2022, censusTractMap2023,repo, customData)) +// .onFailure(SupervisorStrategy.stop) +// ) +// val submissionIdA = SubmissionId("B90YWS6AFX2LGWOXJ1LD", Period(2018, None), sequenceNumber = 1) +// val submissionIdB = SubmissionId("B90YWS6AFX2LGWOXJ1LD", Period(2019, None), sequenceNumber = 1) +// +// val resultA = publisher ? ((replyTo: ActorRef[PersistModifiedLarResult]) => PersistToS3AndPostgres(submissionIdA, replyTo)) +// val resultB = publisher ? ((replyTo: ActorRef[PersistModifiedLarResult]) => PersistToS3AndPostgres(submissionIdB, replyTo)) +// resultA.futureValue shouldBe PersistModifiedLarResult(submissionIdA, UploadSucceeded) +// resultB.futureValue shouldBe PersistModifiedLarResult(submissionIdB, UploadSucceeded) +// } +// +// val properties: mutable.Map[String, Object] = +// mutable // S3 Mock mutates the map so we cannot use an immutable map :( +// .Map( +// S3MockApplication.PROP_HTTPS_PORT -> S3MockApplication.DEFAULT_HTTPS_PORT, +// S3MockApplication.PROP_HTTP_PORT -> S3MockApplication.DEFAULT_HTTP_PORT, +// S3MockApplication.PROP_SILENT -> true, +// S3MockApplication.PROP_INITIAL_BUCKETS -> "cfpb-hmda-public-dev" +// ) +// .map { case (k, v) => (k, v.asInstanceOf[Object]) } +// +// override def bootstrapSqlFile: String = "modifiedlar.sql" +//} \ No newline at end of file diff --git a/project/Dependencies.scala b/project/Dependencies.scala index 452dbaf0f0..8c0ad4617b 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -1,4 +1,4 @@ -import sbt._ +import sbt.* object Dependencies { @@ -26,11 +26,11 @@ object Dependencies { lazy val akkaStreamTyped = "com.typesafe.akka" %% "akka-stream-typed" % Version.akka lazy val akkaStreamsTestKit = "com.typesafe.akka" %% "akka-stream-testkit" % Version.akka % Test lazy val akkaHttp = "com.typesafe.akka" %% "akka-http" % Version.akkaHttp - lazy val akkaHttp2 = "com.typesafe.akka" %% "akka-http2-support" % Version.akkaHttp + lazy val akkaHttp2 = "com.typesafe.akka" %% "akka-http2-support" % Version.akkaHttp2Support + lazy val akkaHttpXml = "com.typesafe.akka" %% "akka-http-xml" % Version.akkaHttp lazy val akkaHttpTestkit = "com.typesafe.akka" %% "akka-http-testkit" % Version.akkaHttp % Test lazy val akkaHttpSprayJson = "com.typesafe.akka" %% "akka-http-spray-json" % Version.akkaHttp lazy val slickPostgres = "com.github.tminglei" %% "slick-pg" % Version.slickPostgres - lazy val akkaHttpXml = "com.typesafe.akka" %% "akka-http-xml" % Version.akkaHttp lazy val akkaHttpCirce = "de.heikoseeberger" %% "akka-http-circe" % Version.akkaHttpJson lazy val akkaManagementClusterBootstrap = "com.lightbend.akka.management" %% "akka-management-cluster-bootstrap" % Version.akkaClusterManagement exclude ("com.typesafe.akka", "akka-http") exclude ("com.typesafe.akka", "akka-http-xml") @@ -48,15 +48,14 @@ object Dependencies { lazy val circeGeneric = "io.circe" %% "circe-generic" % Version.circe lazy val circeParser = "io.circe" %% "circe-parser" % Version.circe lazy val akkaPersistenceCassandra = "com.typesafe.akka" %% "akka-persistence-cassandra" % Version.cassandraPluginVersion - lazy val cassandraLauncher = "com.typesafe.akka" %% "akka-persistence-cassandra-launcher" % Version.cassandraPluginVersion + lazy val cassandraLauncher = "com.typesafe.akka" %% "akka-persistence-cassandra-launcher" % Version.cassandraLauncher lazy val slick = "com.typesafe.slick" %% "slick" % Version.slick lazy val slickHikariCP = "com.typesafe.slick" %% "slick-hikaricp" % Version.slick lazy val alpakkaSlick = "com.lightbend.akka" %% "akka-stream-alpakka-slick" % Version.alpakka lazy val postgres = "org.postgresql" % "postgresql" % Version.postgres lazy val h2 = "com.h2database" % "h2" % Version.h2 % Test lazy val testContainers = "org.testcontainers" % "testcontainers" % Version.testContainers % "test" - lazy val s3Mock = "com.adobe.testing" % "s3mock" % "2.1.19" % Test - lazy val apacheCommonsIO = "commons-io" % "commons-io" % "2.6" % Test + lazy val apacheCommonsIO = "commons-io" % "commons-io" % Version.apacheCommons % Test lazy val keycloakAdapter = "org.keycloak" % "keycloak-adapter-core" % Version.keycloak lazy val keycloak = "org.keycloak" % "keycloak-core" % Version.keycloak lazy val keycloakAdmin = "org.keycloak" % "keycloak-admin-client" % Version.keycloak diff --git a/project/Version.scala b/project/Version.scala index a79b75f32f..bda56aada1 100644 --- a/project/Version.scala +++ b/project/Version.scala @@ -1,20 +1,23 @@ object Version { - val logback = "1.2.1" + val logback = "1.5.6" val scalaTest = "3.0.8" val scalaCheck = "1.17.0" - val akka = "2.8.5" - val akkaHttp = "10.5.2" + val akka = "2.9.4" + val akkaHttp = "10.6.3" + val akkaHttp2Support = "10.5.3" val akkaHttpJson = "1.39.2" - val akkaClusterManagement = "1.4.1" + val akkaClusterManagement = "1.5.2" val akkaCors = "1.2.0" val akkaKafka = "2.0.2" val circe = "0.14.5" - val cassandraPluginVersion = "1.1.1" + val cassandraPluginVersion = "1.2.1" + val cassandraLauncher = "1.1.1" val slick = "3.4.1" val slickPostgres = "0.21.1" val postgres = "42.6.0" val h2 = "2.1.214" + val apacheCommons = "2.6" val keycloak = "22.0.1" val resteasy = "6.2.4.Final" val jbossLogging = "3.5.1.Final" @@ -37,6 +40,6 @@ object Version { val scalacheckShapeless = "1.2.5" val diffx = "0.4.0" val log4j = "2.15.0" - val kubernetesApi = "15.0.1" + val kubernetesApi = "21.0.0-legacy" val testContainers = "1.19.8" } \ No newline at end of file diff --git a/project/scoverage.sbt b/project/scoverage.sbt index 4366152868..e247b7f63c 100644 --- a/project/scoverage.sbt +++ b/project/scoverage.sbt @@ -1 +1,2 @@ -addSbtPlugin("org.scoverage" % "sbt-scoverage" % "2.0.10") \ No newline at end of file +addSbtPlugin("org.scoverage" % "sbt-scoverage" % "2.0.10") +addSbtPlugin("net.virtual-void" % "sbt-dependency-graph" % "0.10.0-RC1") \ No newline at end of file diff --git a/ratespread-calculator/src/main/scala/hmda/calculator/api/http/RateSpreadAPIRoutes.scala b/ratespread-calculator/src/main/scala/hmda/calculator/api/http/RateSpreadAPIRoutes.scala index 69e83e7e0c..b0a8a15cb4 100644 --- a/ratespread-calculator/src/main/scala/hmda/calculator/api/http/RateSpreadAPIRoutes.scala +++ b/ratespread-calculator/src/main/scala/hmda/calculator/api/http/RateSpreadAPIRoutes.scala @@ -7,7 +7,7 @@ import akka.http.scaladsl.server.Directives._ import akka.http.scaladsl.server.Route import akka.stream.scaladsl.Source import akka.util.ByteString -import ch.megard.akka.http.cors.scaladsl.CorsDirectives._ +import ch.megard.akka.http.cors.scaladsl.CorsDirectives.{cors, corsRejectionHandler} import de.heikoseeberger.akkahttpcirce.FailFastCirceSupport._ import hmda.calculator.api.model.RateSpreadRequest import hmda.calculator.apor.APORCommands diff --git a/ratespread-calculator/src/test/scala/hmda/calculator/scheduler/APORSchedulerSpec.scala b/ratespread-calculator/src/test/scala/hmda/calculator/scheduler/APORSchedulerSpec.scala index cc8d417bcf..d421aacd78 100644 --- a/ratespread-calculator/src/test/scala/hmda/calculator/scheduler/APORSchedulerSpec.scala +++ b/ratespread-calculator/src/test/scala/hmda/calculator/scheduler/APORSchedulerSpec.scala @@ -1,122 +1,122 @@ -package hmda.calculator.scheduler - -import java.time.LocalDate -import java.time.format.DateTimeFormatter - -import akka.actor.ActorSystem -import akka.actor.typed.scaladsl.adapter._ -import akka.stream.alpakka.s3.ApiVersion.ListBucketVersion2 -import akka.stream.alpakka.s3.scaladsl.S3 -import akka.stream.alpakka.s3.{ MemoryBufferType, S3Attributes, S3Settings } -import akka.stream.scaladsl.{ Sink, Source } -import akka.testkit.{ ImplicitSender, TestKit } -import akka.util.ByteString -import com.adobe.testing.s3mock.S3MockApplication -import hmda.calculator.apor.APOR -import hmda.calculator.scheduler.APORScheduler.Command -import org.scalatest.concurrent.{ Eventually, ScalaFutures } -import org.scalatest.time.{ Millis, Minutes, Span } -import org.scalatest.{ BeforeAndAfterAll, FreeSpecLike, Matchers } -import software.amazon.awssdk.auth.credentials.{ AwsBasicCredentials, StaticCredentialsProvider } -import software.amazon.awssdk.regions.Region -import software.amazon.awssdk.regions.providers.AwsRegionProvider - -import scala.collection.JavaConverters._ -import scala.collection.mutable -import scala.util.Try - -class APORSchedulerSpec - extends TestKit(ActorSystem("apor-scheduler-spec")) - with ImplicitSender - with FreeSpecLike - with Matchers - with ScalaFutures - with BeforeAndAfterAll - with Eventually { - - override implicit def patienceConfig: PatienceConfig = PatienceConfig(timeout = Span(2, Minutes), interval = Span(100, Millis)) - - var s3mock: S3MockApplication = _ - - "APORScheduler should publish data to the S3 bucket" in { - - val awsConfig = system.settings.config.getConfig("aws") - val accessKeyId = awsConfig.getString("access-key-id") - val secretAccess = awsConfig.getString("secret-access-key ") - val region = awsConfig.getString("region") - val bucket = awsConfig.getString("public-bucket") - val environment = awsConfig.getString("environment") - val awsCredentialsProvider = StaticCredentialsProvider.create(AwsBasicCredentials.create(accessKeyId, secretAccess)) - val awsRegionProvider: AwsRegionProvider = () => Region.of(region) - - val s3Settings = S3Settings(system) - .withBufferType(MemoryBufferType) - .withCredentialsProvider(awsCredentialsProvider) - .withS3RegionProvider(awsRegionProvider) - .withListBucketApiVersion(ListBucketVersion2) - - val config = system.settings.config - val aporConfig = config.getConfig("hmda.apors") - val fixedRateFileName = aporConfig.getString("fixed.rate.fileName") - val variableRateFileName = aporConfig.getString("variable.rate.fileName ") - val fixedBucketKey = s"$environment/apor/$fixedRateFileName" - val variableBucketKey = s"$environment/apor/$variableRateFileName" - - val exampleAPOR: APOR = APOR( - LocalDate.parse("2018-03-22", DateTimeFormatter.ISO_LOCAL_DATE), - Seq(1.01, 1.02, 1.03, 1.04, 1.05, 1.06, 1.07, 1.08, 1.09, 1.1, 1.11, 1.12, 1.13, 1.14, 1.15, 1.16, 1.17, 1.18, 1.19, 1.2, 1.21, 1.22, - 1.23, 1.24, 1.25, 1.26, 1.27, 1.28, 1.29, 1.3, 1.31, 1.32, 1.33, 1.34, 1.35, 1.36, 1.37, 1.38, 1.39, 1.40, 1.41, 1.42, 1.43, 1.44, - 1.45, 1.46, 1.47, 1.48, 1.49, 1.5) - ) - - val sinkFixed = S3.multipartUpload(bucket, fixedBucketKey).withAttributes(S3Attributes.settings(s3Settings)) - val sinkVariable = S3.multipartUpload(bucket, variableBucketKey).withAttributes(S3Attributes.settings(s3Settings)) - - whenReady(Source.single(ByteString(exampleAPOR.toCSV)).runWith(sinkFixed))(_ => ()) - whenReady(Source.single(ByteString(exampleAPOR.toCSV)).runWith(sinkVariable))(_ => ()) - - val actor = system.spawn(APORScheduler(), APORScheduler.name) - actor ! Command.Initialize - - eventually { - val result = - S3.getObjectMetadata(bucket, fixedBucketKey) - .withAttributes(S3Attributes.settings(s3Settings)) - .runWith(Sink.head) - whenReady(result)(_ should not be empty) - } - - eventually { - val result = - S3.getObjectMetadata(bucket, variableBucketKey) - .withAttributes(S3Attributes.settings(s3Settings)) - .runWith(Sink.head) - whenReady(result)(_ should not be empty) - } - - watch(actor.toClassic) - system.stop(actor.toClassic) - expectTerminated(actor.toClassic) - } - - override def beforeAll(): Unit = { - super.beforeAll() - val properties: mutable.Map[String, Object] = - mutable // S3 Mock mutates the map so we cannot use an immutable map :( - .Map( - S3MockApplication.PROP_HTTPS_PORT -> S3MockApplication.DEFAULT_HTTPS_PORT, - S3MockApplication.PROP_HTTP_PORT -> S3MockApplication.DEFAULT_HTTP_PORT, - S3MockApplication.PROP_SILENT -> true, - S3MockApplication.PROP_INITIAL_BUCKETS -> "cfpb-hmda-public,cfpb-hmda-export" - ) - .map { case (k, v) => (k, v.asInstanceOf[Object]) } - - s3mock = S3MockApplication.start(properties.asJava) - } - - override def afterAll(): Unit = { - Try(s3mock.stop()) - super.afterAll() - } - -} \ No newline at end of file +//package hmda.calculator.scheduler +// +//import java.time.LocalDate +//import java.time.format.DateTimeFormatter +// +//import akka.actor.ActorSystem +//import akka.actor.typed.scaladsl.adapter._ +//import akka.stream.alpakka.s3.ApiVersion.ListBucketVersion2 +//import akka.stream.alpakka.s3.scaladsl.S3 +//import akka.stream.alpakka.s3.{ MemoryBufferType, S3Attributes, S3Settings } +//import akka.stream.scaladsl.{ Sink, Source } +//import akka.testkit.{ ImplicitSender, TestKit } +//import akka.util.ByteString +//import com.adobe.testing.s3mock.S3MockApplication +//import hmda.calculator.apor.APOR +//import hmda.calculator.scheduler.APORScheduler.Command +//import org.scalatest.concurrent.{ Eventually, ScalaFutures } +//import org.scalatest.time.{ Millis, Minutes, Span } +//import org.scalatest.{ BeforeAndAfterAll, FreeSpecLike, Matchers } +//import software.amazon.awssdk.auth.credentials.{ AwsBasicCredentials, StaticCredentialsProvider } +//import software.amazon.awssdk.regions.Region +//import software.amazon.awssdk.regions.providers.AwsRegionProvider +// +//import scala.collection.JavaConverters._ +//import scala.collection.mutable +//import scala.util.Try +// +//class APORSchedulerSpec +// extends TestKit(ActorSystem("apor-scheduler-spec")) +// with ImplicitSender +// with FreeSpecLike +// with Matchers +// with ScalaFutures +// with BeforeAndAfterAll +// with Eventually { +// +// override implicit def patienceConfig: PatienceConfig = PatienceConfig(timeout = Span(2, Minutes), interval = Span(100, Millis)) +// +// var s3mock: S3MockApplication = _ +// +// "APORScheduler should publish data to the S3 bucket" in { +// +// val awsConfig = system.settings.config.getConfig("aws") +// val accessKeyId = awsConfig.getString("access-key-id") +// val secretAccess = awsConfig.getString("secret-access-key ") +// val region = awsConfig.getString("region") +// val bucket = awsConfig.getString("public-bucket") +// val environment = awsConfig.getString("environment") +// val awsCredentialsProvider = StaticCredentialsProvider.create(AwsBasicCredentials.create(accessKeyId, secretAccess)) +// val awsRegionProvider: AwsRegionProvider = () => Region.of(region) +// +// val s3Settings = S3Settings(system) +// .withBufferType(MemoryBufferType) +// .withCredentialsProvider(awsCredentialsProvider) +// .withS3RegionProvider(awsRegionProvider) +// .withListBucketApiVersion(ListBucketVersion2) +// +// val config = system.settings.config +// val aporConfig = config.getConfig("hmda.apors") +// val fixedRateFileName = aporConfig.getString("fixed.rate.fileName") +// val variableRateFileName = aporConfig.getString("variable.rate.fileName ") +// val fixedBucketKey = s"$environment/apor/$fixedRateFileName" +// val variableBucketKey = s"$environment/apor/$variableRateFileName" +// +// val exampleAPOR: APOR = APOR( +// LocalDate.parse("2018-03-22", DateTimeFormatter.ISO_LOCAL_DATE), +// Seq(1.01, 1.02, 1.03, 1.04, 1.05, 1.06, 1.07, 1.08, 1.09, 1.1, 1.11, 1.12, 1.13, 1.14, 1.15, 1.16, 1.17, 1.18, 1.19, 1.2, 1.21, 1.22, +// 1.23, 1.24, 1.25, 1.26, 1.27, 1.28, 1.29, 1.3, 1.31, 1.32, 1.33, 1.34, 1.35, 1.36, 1.37, 1.38, 1.39, 1.40, 1.41, 1.42, 1.43, 1.44, +// 1.45, 1.46, 1.47, 1.48, 1.49, 1.5) +// ) +// +// val sinkFixed = S3.multipartUpload(bucket, fixedBucketKey).withAttributes(S3Attributes.settings(s3Settings)) +// val sinkVariable = S3.multipartUpload(bucket, variableBucketKey).withAttributes(S3Attributes.settings(s3Settings)) +// +// whenReady(Source.single(ByteString(exampleAPOR.toCSV)).runWith(sinkFixed))(_ => ()) +// whenReady(Source.single(ByteString(exampleAPOR.toCSV)).runWith(sinkVariable))(_ => ()) +// +// val actor = system.spawn(APORScheduler(), APORScheduler.name) +// actor ! Command.Initialize +// +// eventually { +// val result = +// S3.getObjectMetadata(bucket, fixedBucketKey) +// .withAttributes(S3Attributes.settings(s3Settings)) +// .runWith(Sink.head) +// whenReady(result)(_ should not be empty) +// } +// +// eventually { +// val result = +// S3.getObjectMetadata(bucket, variableBucketKey) +// .withAttributes(S3Attributes.settings(s3Settings)) +// .runWith(Sink.head) +// whenReady(result)(_ should not be empty) +// } +// +// watch(actor.toClassic) +// system.stop(actor.toClassic) +// expectTerminated(actor.toClassic) +// } +// +// override def beforeAll(): Unit = { +// super.beforeAll() +// val properties: mutable.Map[String, Object] = +// mutable // S3 Mock mutates the map so we cannot use an immutable map :( +// .Map( +// S3MockApplication.PROP_HTTPS_PORT -> S3MockApplication.DEFAULT_HTTPS_PORT, +// S3MockApplication.PROP_HTTP_PORT -> S3MockApplication.DEFAULT_HTTP_PORT, +// S3MockApplication.PROP_SILENT -> true, +// S3MockApplication.PROP_INITIAL_BUCKETS -> "cfpb-hmda-public,cfpb-hmda-export" +// ) +// .map { case (k, v) => (k, v.asInstanceOf[Object]) } +// +// s3mock = S3MockApplication.start(properties.asJava) +// } +// +// override def afterAll(): Unit = { +// Try(s3mock.stop()) +// super.afterAll() +// } +// +//} \ No newline at end of file