Skip to content

Commit

Permalink
Solve #46: Merge branch 'master' of https://github.com/juanrh/sscheck
Browse files Browse the repository at this point in the history
  • Loading branch information
juanrh committed May 15, 2016
2 parents eabb766 + 279151d commit f5f63f8
Show file tree
Hide file tree
Showing 8 changed files with 29 additions and 26 deletions.
3 changes: 2 additions & 1 deletion .travis.yml
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
language: scala
scala:
- 2.10.5
- 2.10.6
- 2.11.8
jdk:
- oraclejdk7
script:
Expand Down
14 changes: 12 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,20 @@ Utilities for using ScalaCheck with Spark and Spark Streaming, based on Specs2

[Jenkins](https://juanrhcubox.duckdns.org:8080/)

Use linear temporal logic to write ScalaCheck properties for Spark Streaming programs, see the [**Quickstart**](https://github.com/juanrh/sscheck/wiki/Quickstart) for details.
Use linear temporal logic to write ScalaCheck properties for Spark Streaming programs, see the [**Quickstart**](https://github.com/juanrh/sscheck/wiki/Quickstart) for details. See also

* [**scaladoc**](http://juanrh.github.io/doc/sscheck/api)
* sbt dependency

```scala
lazy val sscheckVersion = "0.2.4"
libraryDependencies += "es.ucm.fdi" %% "sscheck" % sscheckVersion
resolvers += Resolver.bintrayRepo("juanrh", "maven")
```
See latest version in [bintray](https://bintray.com/juanrh/maven/sscheck/view)

# Acknowledgements
This work has been partially supported by MICINN Spanish project StrongSoft (TIN2012-39391-C04-04), by the
Spanish MINECO project CAVI-ART (TIN2013-44742-C4-3-R), and by the Comunidad de Madrid project [N-Greens Software-CM](http://n-greens-cm.org/) (S2013/ICE-2731).

Some parts of this code are based on [Spark Testing Base](https://github.com/holdenk/spark-testing-base) by Holden Karau
Some parts of this code are based on or have been taken from [Spark Testing Base](https://github.com/holdenk/spark-testing-base) by Holden Karau
3 changes: 3 additions & 0 deletions RELEASE_NOTES.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
# sscheck 0.2.4
Added cross Scala version compatibility, 2.10 and 2.11, see [#42](https://github.com/juanrh/sscheck/pull/42)

# sscheck 0.2.3
Bug fixing and code cleanup
* Remove dependency to spark-testing-base and multisets in order to fix [#36](https://github.com/juanrh/sscheck/issues/36)
Expand Down
21 changes: 7 additions & 14 deletions build.sbt
Original file line number Diff line number Diff line change
@@ -1,26 +1,22 @@
import com.typesafe.sbteclipse.plugin.EclipsePlugin.EclipseKeys._

name := "sscheck"

organization := "es.ucm.fdi"

version := "0.2.3" // "0.2.3-SNAPSHOT" //

scalaVersion := "2.10.6"
version := "0.2.4" // "0.2.4-SNAPSHOT" //

autoScalaLibrary := false
scalaVersion := "2.11.8"

crossScalaVersions := Seq("2.10.6")
crossScalaVersions := Seq("2.10.6", "2.11.8")

licenses += ("Apache-2.0", url("http://www.apache.org/licenses/LICENSE-2.0"))

bintrayPackageLabels := Seq("testing")

bintrayVcsUrl := Some("git@github.com:juanrh/sscheck.git")
bintrayVcsUrl := Some("git@github.com:juanrh/sscheck.git")

lazy val sparkVersion = "1.6.1"

lazy val specs2Version = "3.6.4"
lazy val specs2Version = "3.6.4"

// Use `sbt doc` to generate scaladoc, more on chapter 14.8 of "Scala Cookbook"

Expand All @@ -39,7 +35,7 @@ parallelExecution := false
// This is critical so log4j.properties is found by eclipse
EclipseKeys.createSrc := EclipseCreateSrc.Default + EclipseCreateSrc.Resource

// Spark
// Spark
libraryDependencies += "org.apache.spark" %% "spark-core" % sparkVersion

libraryDependencies += "org.apache.spark" %% "spark-streaming" % sparkVersion
Expand All @@ -57,10 +53,7 @@ libraryDependencies += "org.specs2" %% "specs2-matcher-extra" % specs2Version

libraryDependencies += "org.specs2" %% "specs2-junit" % specs2Version

// note this is discontinued for scala 2.11, which uses https://github.com/typesafehub/scala-logging#contribution-policy
libraryDependencies += "com.typesafe" % "scalalogging-log4j_2.10" % "1.1.0"

libraryDependencies += "com.typesafe" %% "scalalogging-slf4j" % "1.1.0"
libraryDependencies += "org.slf4j" % "slf4j-api" % "1.7.21"

resolvers ++= Seq(
"MVN Repository.com" at "http://mvnrepository.com/artifact/",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@ import scala.reflect.ClassTag
import scala.concurrent.SyncVar
import scala.util.{Try, Success, Failure}

import com.typesafe.scalalogging.slf4j.Logger
import org.slf4j.LoggerFactory

import scala.util.Properties.lineSeparator
Expand All @@ -24,7 +23,7 @@ import es.ucm.fdi.sscheck.spark.streaming
import es.ucm.fdi.sscheck.spark.streaming.TestInputStream

object DStreamTLProperty {
@transient private val logger = Logger(LoggerFactory.getLogger("DStreamTLProperty"))
@transient private val logger = LoggerFactory.getLogger("DStreamTLProperty")
}

trait DStreamTLProperty
Expand Down Expand Up @@ -146,7 +145,7 @@ case class TestCaseTimeoutException(msg : String)
extends PropExecutionException(msg)

object TestCaseContext {
@transient private val logger = Logger(LoggerFactory.getLogger("TestCaseContext"))
@transient private val logger = LoggerFactory.getLogger("TestCaseContext")

// Constants used for printing a sample of the generated values for each batch
val msgHeader = "-"*43
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@ package es.ucm.fdi.sscheck.spark

import org.apache.spark._

import com.typesafe.scalalogging.slf4j.Logger
import org.slf4j.LoggerFactory

/** This trait can be used to share a Spark Context. The context is created
Expand All @@ -14,7 +13,7 @@ trait SharedSparkContext

// cannot use private[this] due to https://issues.scala-lang.org/browse/SI-8087
//@transient private[this] val logger = Logger(LoggerFactory.getLogger("SharedSparkContext"))
@transient private val logger = Logger(LoggerFactory.getLogger("SharedSparkContext"))
@transient private val logger = LoggerFactory.getLogger("SharedSparkContext")

/** Override for custom config
* */
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@ package es.ucm.fdi.sscheck.spark.streaming

import org.apache.spark.streaming.{StreamingContext,Duration}

import com.typesafe.scalalogging.slf4j.Logger
import org.slf4j.LoggerFactory

import scala.util.Try
Expand All @@ -14,7 +13,7 @@ trait SharedStreamingContext

// cannot use private[this] due to https://issues.scala-lang.org/browse/SI-8087
// @transient private[this] val logger = Logger(LoggerFactory.getLogger("SharedStreamingContext"))
@transient private val logger = Logger(LoggerFactory.getLogger("SharedStreamingContext"))
@transient private val logger = LoggerFactory.getLogger("SharedStreamingContext")

/** Override for custom config
* */
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@ import org.apache.spark.rdd.RDD
import scala.collection.mutable.Queue
import scala.concurrent.duration._

import com.typesafe.scalalogging.slf4j.Logger
import org.slf4j.LoggerFactory

import es.ucm.fdi.sscheck.matcher.specs2.RDDMatchers._
Expand All @@ -25,7 +24,7 @@ class SharedStreamingContextBeforeAfterEachTest
with SharedStreamingContextBeforeAfterEach {

// cannot use private[this] due to https://issues.scala-lang.org/browse/SI-8087
@transient private val logger = Logger(LoggerFactory.getLogger("SharedStreamingContextBeforeAfterEachTest"))
@transient private val logger = LoggerFactory.getLogger("SharedStreamingContextBeforeAfterEachTest")

// Spark configuration
override def sparkMaster : String = "local[5]"
Expand Down

0 comments on commit f5f63f8

Please sign in to comment.