Skip to content

Commit

Permalink
Make cross spark tests acutally work
Browse files Browse the repository at this point in the history
  • Loading branch information
pomadchin committed Nov 10, 2021
1 parent 0779fcc commit 55e1cc5
Show file tree
Hide file tree
Showing 2 changed files with 17 additions and 29 deletions.
6 changes: 0 additions & 6 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -36,12 +36,6 @@ jobs:
- name: Test & Compute Coverage
run: sbt ++${{ matrix.scala }} coverage frameless-test

- name: Test & Compute Coverage Spark 3.1.x
run: sbt ++${{ matrix.scala }} coverage frameless-test-spark31

- name: Test & Compute Coverage Spark 3.0.x
run: sbt ++${{ matrix.scala }} coverage frameless-test-spark30

- name: Upload Codecov Results
run: codecov -F ${{ matrix.scala }}

Expand Down
40 changes: 17 additions & 23 deletions build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,6 @@ val previousVersion = "0.10.1"

/** A list of projects that can be safely compiled across Scala versions. */
val projectsCrossVersion = "core" :: "cats" :: "dataset" :: "refined" :: "ml" :: Nil
val projectsSpark31 = projectsCrossVersion.head :: projectsCrossVersion.tail.map(_ + "-spark31")
val projectsSpark30 = projectsCrossVersion.head :: projectsCrossVersion.tail.map(_ + "-spark30")

ThisBuild / versionScheme := Some("semver-spec")

Expand Down Expand Up @@ -51,17 +49,13 @@ lazy val root = Project("frameless", file("." + "frameless")).in(file("."))
/** Not all Spark versions support Scala 2.13. These commands are launched for the supported subset of projects only. */
commands ++= Seq(
// run tests separately for different Spark versions to reduce pressure on CI
command("frameless-test")(projectsCrossVersion.map(_ + "/test") ::: projectsCrossVersion.map(_ + "/test/coverageReport")).value,
command212("frameless-test-spark31")(projectsSpark31.map(_ + "/test") ::: projectsSpark31.map(_ + "/test/coverageReport")).value,
command212("frameless-test-spark30")(projectsSpark30.map(_ + "/test") ::: projectsSpark30.map(_ + "/test/coverageReport")).value,
commandCrossVersion("frameless-test")(projectsCrossVersion.map(_ + "/test") ::: projectsCrossVersion.map(_ + "/test/coverageReport"), "test" :: "coverageReport" :: Nil).value,
commandCrossVersion("frameless-mimaReportBinaryIssues")(projectsCrossVersion.map(_ + "/mimaReportBinaryIssues"), "mimaReportBinaryIssues" :: Nil).value,
commandCrossVersion("frameless-publish")(projectsCrossVersion.map(_ + "/publish"), "publish" :: Nil).value,
commandCrossVersion("frameless-publishSigned")(projectsCrossVersion.map(_ + "/publishSigned"), "publishSigned" :: Nil).value,
)
)

def command(name: String)(commands: List[String]) = commandCrossVersion(name)(commands, commands)
def command212(name: String)(commands212: List[String]) = commandCrossVersion(name)(Nil, commands212)
def commandCrossVersion(name: String)(commands213: List[String], commands212: List[String]) = Def.setting { Command.command(name) { currentState =>
CrossVersion.partialVersion(scalaVersion.value) match {
case Some((2, 13)) => commands213 ::: currentState
Expand All @@ -79,16 +73,16 @@ lazy val cats = project
.settings(catsSettings)
.dependsOn(dataset % "test->test;compile->compile;provided->provided")

lazy val `cats-spark31` = (project in file("cats"))
lazy val `cats-spark31` = project
.settings(name := "frameless-cats-spark31")
.settings(target := file("cats-spark31/target"))
.settings(sourceDirectory := (cats / sourceDirectory).value)
.settings(catsSettings)
.settings(mimaPreviousArtifacts := Set.empty)
.dependsOn(`dataset-spark31` % "test->test;compile->compile;provided->provided")

lazy val `cats-spark30` = (project in file("cats"))
lazy val `cats-spark30` = project
.settings(name := "frameless-cats-spark30")
.settings(target := file("cats-spark30/target"))
.settings(sourceDirectory := (cats / sourceDirectory).value)
.settings(catsSettings)
.settings(mimaPreviousArtifacts := Set.empty)
.dependsOn(`dataset-spark30` % "test->test;compile->compile;provided->provided")
Expand All @@ -99,17 +93,17 @@ lazy val dataset = project
.settings(sparkDependencies(sparkVersion))
.dependsOn(core % "test->test;compile->compile")

lazy val `dataset-spark31` = (project in file("dataset"))
lazy val `dataset-spark31` = project
.settings(name := "frameless-dataset-spark31")
.settings(target := file("dataset-spark31/target"))
.settings(sourceDirectory := (dataset / sourceDirectory).value)
.settings(datasetSettings)
.settings(sparkDependencies(spark31Version))
.settings(mimaPreviousArtifacts := Set.empty)
.dependsOn(core % "test->test;compile->compile")

lazy val `dataset-spark30` = (project in file("dataset"))
lazy val `dataset-spark30` = project
.settings(name := "frameless-dataset-spark30")
.settings(target := file("dataset-spark30/target"))
.settings(sourceDirectory := (dataset / sourceDirectory).value)
.settings(datasetSettings)
.settings(sparkDependencies(spark30Version))
.settings(mimaPreviousArtifacts := Set.empty)
Expand All @@ -120,15 +114,15 @@ lazy val refined = project
.settings(refinedSettings)
.dependsOn(dataset % "test->test;compile->compile;provided->provided")

lazy val `refined-spark31` = (project in file("refined"))
lazy val `refined-spark31` = project
.settings(name := "frameless-refined-spark31")
.settings(target := file("refined-spark31/target"))
.settings(sourceDirectory := (refined / sourceDirectory).value)
.settings(refinedSettings)
.dependsOn(`dataset-spark31` % "test->test;compile->compile;provided->provided")

lazy val `refined-spark30` = (project in file("refined"))
lazy val `refined-spark30` = project
.settings(name := "frameless-refined-spark30")
.settings(target := file("refined-spark30/target"))
.settings(sourceDirectory := (refined / sourceDirectory).value)
.settings(refinedSettings)
.dependsOn(`dataset-spark30` % "test->test;compile->compile;provided->provided")

Expand All @@ -141,9 +135,9 @@ lazy val ml = project
dataset % "test->test;compile->compile;provided->provided"
)

lazy val `ml-spark31` = (project in file("ml"))
lazy val `ml-spark31` = project
.settings(name := "frameless-ml-spark31")
.settings(target := file("ml-spark31/target"))
.settings(sourceDirectory := (ml / sourceDirectory).value)
.settings(mlSettings)
.settings(sparkMlDependencies(spark31Version))
.settings(mimaPreviousArtifacts := Set.empty)
Expand All @@ -152,9 +146,9 @@ lazy val `ml-spark31` = (project in file("ml"))
`dataset-spark31` % "test->test;compile->compile;provided->provided"
)

lazy val `ml-spark30` = (project in file("ml"))
lazy val `ml-spark30` = project
.settings(name := "frameless-ml-spark30")
.settings(target := file("ml-spark30/target"))
.settings(sourceDirectory := (ml / sourceDirectory).value)
.settings(mlSettings)
.settings(sparkMlDependencies(spark30Version))
.settings(mimaPreviousArtifacts := Set.empty)
Expand Down

0 comments on commit 55e1cc5

Please sign in to comment.