diff --git a/analyzer/src/main/scala/com/avsystem/commons/analyzer/AnalyzerPlugin.scala b/analyzer/src/main/scala/com/avsystem/commons/analyzer/AnalyzerPlugin.scala index bb1fff40d..e91ef8d0c 100644 --- a/analyzer/src/main/scala/com/avsystem/commons/analyzer/AnalyzerPlugin.scala +++ b/analyzer/src/main/scala/com/avsystem/commons/analyzer/AnalyzerPlugin.scala @@ -75,7 +75,7 @@ final class AnalyzerPlugin(val global: Global) extends Plugin { plugin => import global._ - def newPhase(prev: Phase) = new StdPhase(prev) { + def newPhase(prev: Phase): StdPhase = new StdPhase(prev) { def apply(unit: CompilationUnit): Unit = rules.foreach(rule => if (rule.level != Level.Off) rule.analyze(unit.asInstanceOf[rule.global.CompilationUnit])) } diff --git a/benchmark/jvm/src/main/scala/com/avsystem/commons/mongo/BsonCodecBenchmark.scala b/benchmark/jvm/src/main/scala/com/avsystem/commons/mongo/BsonCodecBenchmark.scala index 290dbcbdf..987a3d68b 100644 --- a/benchmark/jvm/src/main/scala/com/avsystem/commons/mongo/BsonCodecBenchmark.scala +++ b/benchmark/jvm/src/main/scala/com/avsystem/commons/mongo/BsonCodecBenchmark.scala @@ -1,9 +1,9 @@ package com.avsystem.commons package mongo -import java.nio.ByteBuffer +import com.avsystem.commons.ser.{Nested, Toplevel} -import com.avsystem.commons.rpc.akka.serialization.{Nested, Something} +import java.nio.ByteBuffer import org.bson.codecs.{BsonDocumentCodec, DecoderContext, EncoderContext} import org.bson.io.BasicOutputBuffer import org.bson.{BsonArray, BsonBinaryReader, BsonBinaryWriter, BsonDocument, BsonInt32, BsonString} @@ -18,11 +18,11 @@ class BsonCodecBenchmark { import BsonCodecBenchmark._ - private val something = Something(42, Nested(List(4, 8, 15, 16, 23, 42, 0), 131), "lol") + private val something = Toplevel(42, Nested(List(4, 8, 15, 16, 23, 42, 0), 131), "lol") private val doc = somethingCodec.toDocument(something) private val bytes = binaryEncode(something) - def binaryEncode(something: Something): Array[Byte] = { + def binaryEncode(something: Toplevel): Array[Byte] = { val output = new BasicOutputBuffer() val writer = new BsonBinaryWriter(output) val doc = somethingCodec.toDocument(something) @@ -36,7 +36,7 @@ class BsonCodecBenchmark { } @Benchmark - def binaryDecoding(): Something = { + def binaryDecoding(): Toplevel = { val reader = new BsonBinaryReader(ByteBuffer.wrap(bytes)) val doc = bsonDocumentCodec.decode(reader, DecoderContext.builder().build()) somethingCodec.fromDocument(new Doc(doc)) @@ -48,7 +48,7 @@ class BsonCodecBenchmark { } @Benchmark - def decoding(): Something = { + def decoding(): Toplevel = { somethingCodec.fromDocument(doc) } } @@ -76,13 +76,13 @@ object BsonCodecBenchmark { val nestedKey: DocKey[Nested, BsonDocument] = nestedCodec.bsonCodec.key("nested") - val somethingCodec = new DocumentCodec[Something] { - override def toDocument(t: Something): Doc = Doc() + val somethingCodec = new DocumentCodec[Toplevel] { + override def toDocument(t: Toplevel): Doc = Doc() .put(intKey, t.int) .put(nestedKey, t.nested) .put(strKey, t.str) - override def fromDocument(doc: Doc): Something = Something( + override def fromDocument(doc: Doc): Toplevel = Toplevel( int = doc.require(intKey), nested = doc.require(nestedKey), str = doc.require(strKey) diff --git a/benchmark/jvm/src/main/scala/com/avsystem/commons/mongo/BsonInputOutputBenchmark.scala b/benchmark/jvm/src/main/scala/com/avsystem/commons/mongo/BsonInputOutputBenchmark.scala index c688017d9..4d67e69bc 100644 --- a/benchmark/jvm/src/main/scala/com/avsystem/commons/mongo/BsonInputOutputBenchmark.scala +++ b/benchmark/jvm/src/main/scala/com/avsystem/commons/mongo/BsonInputOutputBenchmark.scala @@ -1,10 +1,10 @@ package com.avsystem.commons package mongo +import com.avsystem.commons.ser.{Nested, Toplevel} + import java.io.StringWriter import java.nio.ByteBuffer - -import com.avsystem.commons.rpc.akka.serialization.{Nested, Something} import org.bson.io.BasicOutputBuffer import org.bson.json.{JsonReader, JsonWriter} import org.bson.{BsonBinaryReader, BsonBinaryWriter, BsonDocument, BsonDocumentReader, BsonDocumentWriter, BsonReader, BsonValue, BsonWriter} @@ -16,29 +16,29 @@ import org.openjdk.jmh.annotations.{Benchmark, BenchmarkMode, Fork, Measurement, @BenchmarkMode(Array(Mode.Throughput)) @State(Scope.Thread) class BsonInputOutputBenchmark { - private val something = Something(42, Nested(List(4, 8, 15, 16, 23, 42, 0), 131), "lol") + private val something = Toplevel(42, Nested(List(4, 8, 15, 16, 23, 42, 0), 131), "lol") private val bytes = binaryEncode(something) private val doc = documentEncode(something) private val json = jsonEncode(something) - def write(something: Something, bsonWriter: BsonWriter): Unit = { + def write(something: Toplevel, bsonWriter: BsonWriter): Unit = { val output = new BsonWriterOutput(bsonWriter) - Something.codec.write(output, something) + Toplevel.codec.write(output, something) } - def binaryEncode(something: Something): Array[Byte] = { + def binaryEncode(something: Toplevel): Array[Byte] = { val bsonOutput = new BasicOutputBuffer() write(something, new BsonBinaryWriter(bsonOutput)) bsonOutput.toByteArray } - def documentEncode(something: Something): BsonDocument = { + def documentEncode(something: Toplevel): BsonDocument = { val doc = new BsonDocument() write(something, new BsonDocumentWriter(doc)) doc } - def jsonEncode(something: Something): String = { + def jsonEncode(something: Toplevel): String = { val stringWriter = new StringWriter() write(something, new JsonWriter(stringWriter)) stringWriter.toString @@ -64,23 +64,23 @@ class BsonInputOutputBenchmark { BsonValueOutput.write(something) } - def read(bsonReader: BsonReader): Something = { + def read(bsonReader: BsonReader): Toplevel = { val input = new BsonReaderInput(bsonReader) - Something.codec.read(input) + Toplevel.codec.read(input) } @Benchmark - def binaryDecoding(): Something = { + def binaryDecoding(): Toplevel = { read(new BsonBinaryReader(ByteBuffer.wrap(bytes))) } @Benchmark - def documentDecoding(): Something = { + def documentDecoding(): Toplevel = { read(new BsonDocumentReader(doc)) } @Benchmark - def jsonDecoding(): Something = { + def jsonDecoding(): Toplevel = { read(new JsonReader(json)) } } diff --git a/core/src/main/scala/com/avsystem/commons/serialization/SimpleValueInputOutput.scala b/core/src/main/scala/com/avsystem/commons/serialization/SimpleValueInputOutput.scala index eecbb14ba..a0b408ce0 100644 --- a/core/src/main/scala/com/avsystem/commons/serialization/SimpleValueInputOutput.scala +++ b/core/src/main/scala/com/avsystem/commons/serialization/SimpleValueInputOutput.scala @@ -51,28 +51,28 @@ class SimpleValueOutput( def this(consumer: Any => Unit) = this(consumer, new MHashMap[String, Any], new ListBuffer[Any]) - def writeNull(): Unit = consumer(null) - def writeBoolean(boolean: Boolean): Unit = consumer(boolean) - def writeString(str: String): Unit = consumer(str) - def writeInt(int: Int): Unit = consumer(int) - def writeLong(long: Long): Unit = consumer(long) - def writeDouble(double: Double): Unit = consumer(double) - def writeBigInt(bigInt: BigInt): Unit = consumer(bigInt) - def writeBigDecimal(bigDecimal: BigDecimal): Unit = consumer(bigDecimal) - def writeBinary(binary: Array[Byte]): Unit = consumer(binary) + override def writeNull(): Unit = consumer(null) + override def writeBoolean(boolean: Boolean): Unit = consumer(boolean) + override def writeString(str: String): Unit = consumer(str) + override def writeInt(int: Int): Unit = consumer(int) + override def writeLong(long: Long): Unit = consumer(long) + override def writeDouble(double: Double): Unit = consumer(double) + override def writeBigInt(bigInt: BigInt): Unit = consumer(bigInt) + override def writeBigDecimal(bigDecimal: BigDecimal): Unit = consumer(bigDecimal) + override def writeBinary(binary: Array[Byte]): Unit = consumer(binary) def writeList(): ListOutput = new ListOutput { private val buffer = newListRepr override def declareSize(size: Int): Unit = buffer.sizeHint(size) - def writeElement() = new SimpleValueOutput(buffer += _, newObjectRepr, newListRepr) - def finish(): Unit = consumer(buffer.result()) + override def writeElement(): SimpleValueOutput = new SimpleValueOutput(buffer += _, newObjectRepr, newListRepr) + override def finish(): Unit = consumer(buffer.result()) } def writeObject(): ObjectOutput = new ObjectOutput { private val result = newObjectRepr override def declareSize(size: Int): Unit = result.sizeHint(size) - def writeField(key: String) = new SimpleValueOutput(v => result += ((key, v)), newObjectRepr, newListRepr) - def finish(): Unit = consumer(result) + override def writeField(key: String): SimpleValueOutput = new SimpleValueOutput(v => result += ((key, v)), newObjectRepr, newListRepr) + override def finish(): Unit = consumer(result) } } @@ -95,15 +95,15 @@ class SimpleValueInput(value: Any) extends InputAndSimpleInput { case _ => throw new ReadFailure(s"Expected ${classTag[B].runtimeClass} but got ${value.getClass}") } - def readNull(): Boolean = value == null - def readBoolean(): Boolean = doReadUnboxed[Boolean, JBoolean] - def readString(): String = doRead[String] - def readInt(): Int = doReadUnboxed[Int, JInteger] - def readLong(): Long = doReadUnboxed[Long, JLong] - def readDouble(): Double = doReadUnboxed[Double, JDouble] - def readBigInt(): BigInt = doRead[JBigInteger] - def readBigDecimal(): BigDecimal = doRead[JBigDecimal] - def readBinary(): Array[Byte] = doRead[Array[Byte]] + override def readNull(): Boolean = value == null + override def readBoolean(): Boolean = doReadUnboxed[Boolean, JBoolean] + override def readString(): String = doRead[String] + override def readInt(): Int = doReadUnboxed[Int, JInteger] + override def readLong(): Long = doReadUnboxed[Long, JLong] + override def readDouble(): Double = doReadUnboxed[Double, JDouble] + override def readBigInt(): BigInt = doRead[JBigInteger] + override def readBigDecimal(): BigDecimal = doRead[JBigDecimal] + override def readBinary(): Array[Byte] = doRead[Array[Byte]] def readObject(): ObjectInput = new ObjectInput { @@ -112,10 +112,10 @@ class SimpleValueInput(value: Any) extends InputAndSimpleInput { case (k, v) => new SimpleValueFieldInput(k, v) } override def knownSize: Int = if(map.isEmpty) 0 else map.knownSize - def nextField(): SimpleValueFieldInput = it.next() + override def nextField(): SimpleValueFieldInput = it.next() override def peekField(name: String): Opt[SimpleValueFieldInput] = map.get(name).map(new SimpleValueFieldInput(name, _)).toOpt // values may be null! - def hasNext: Boolean = it.hasNext + override def hasNext: Boolean = it.hasNext } def readList(): ListInput = @@ -123,11 +123,11 @@ class SimpleValueInput(value: Any) extends InputAndSimpleInput { private val inputSeq: BSeq[Any] = doRead[BSeq[Any]] private val it = inputSeq.iterator.map(new SimpleValueInput(_)) override def knownSize: Int = if(inputSeq.isEmpty) 0 else inputSeq.knownSize - def nextElement(): SimpleValueInput = it.next() - def hasNext: Boolean = it.hasNext + override def nextElement(): SimpleValueInput = it.next() + override def hasNext: Boolean = it.hasNext } - def skip(): Unit = () + override def skip(): Unit = () } class SimpleValueFieldInput(val fieldName: String, value: Any) diff --git a/core/src/test/scala/com/avsystem/commons/concurrent/ObservableExtensionsTest.scala b/core/src/test/scala/com/avsystem/commons/concurrent/ObservableExtensionsTest.scala index 008b62cf1..534934204 100644 --- a/core/src/test/scala/com/avsystem/commons/concurrent/ObservableExtensionsTest.scala +++ b/core/src/test/scala/com/avsystem/commons/concurrent/ObservableExtensionsTest.scala @@ -16,7 +16,7 @@ class ObservableExtensionsTest extends AnyFunSuite with Matchers private implicit val scheduler: Scheduler = Scheduler(RunNowEC) test("headOptL") { - forAll { ints: List[Int] => + forAll { (ints: List[Int]) => Observable.fromIterable(ints).headOptL.runToFuture.futureValue shouldBe ints.headOpt } } @@ -26,7 +26,7 @@ class ObservableExtensionsTest extends AnyFunSuite with Matchers } test("findOptL") { - forAll { ints: List[Int] => + forAll { (ints: List[Int]) => Observable.fromIterable(ints).findOptL(_ > 1).runToFuture.futureValue shouldBe ints.findOpt(_ > 1) } } @@ -38,13 +38,13 @@ class ObservableExtensionsTest extends AnyFunSuite with Matchers } test("distinct") { - forAll { ints: List[Int] => + forAll { (ints: List[Int]) => Observable.fromIterable(ints).distinct.toListL.runToFuture.futureValue shouldBe ints.distinct } } test("distinctBy") { - forAll { ints: List[Int] => + forAll { (ints: List[Int]) => val f: Int => Int = _ % 256 Observable.fromIterable(ints).distinctBy(f).toListL.runToFuture.futureValue shouldBe @@ -53,20 +53,20 @@ class ObservableExtensionsTest extends AnyFunSuite with Matchers } test("sortedL") { - forAll { ints: List[Int] => + forAll { (ints: List[Int]) => Observable.fromIterable(ints).sortedL.runToFuture.futureValue shouldBe ints.sorted } } test("sortedByL") { - forAll { ints: List[Int] => + forAll { (ints: List[Int]) => val f: Int => Int = _ % 256 Observable.fromIterable(ints).sortedByL(f).runToFuture.futureValue shouldBe ints.sortBy(f) } } test("toL") { - forAll { ints: List[(Int, Int)] => + forAll { (ints: List[(Int, Int)]) => def testFactory[T](factory: Factory[(Int, Int), T])(implicit position: Position) = Observable.fromIterable(ints).toL(factory).runToFuture.futureValue shouldBe factory.fromSpecific(ints) @@ -100,7 +100,7 @@ class ObservableExtensionsTest extends AnyFunSuite with Matchers } test("mkMapL") { - forAll { ints: List[Int] => + forAll { (ints: List[Int]) => Observable.fromIterable(ints).mkMapL(_ % 3, _ + 2).runToFuture.futureValue shouldBe ints.mkMap(_ % 3, _ + 2) } } diff --git a/core/src/test/scala/com/avsystem/commons/concurrent/TaskExtensionsTest.scala b/core/src/test/scala/com/avsystem/commons/concurrent/TaskExtensionsTest.scala index ca188894d..35bdb36d2 100644 --- a/core/src/test/scala/com/avsystem/commons/concurrent/TaskExtensionsTest.scala +++ b/core/src/test/scala/com/avsystem/commons/concurrent/TaskExtensionsTest.scala @@ -24,7 +24,7 @@ class TaskExtensionsTest extends AnyFunSuite with Matchers with ScalaCheckDriven } test("traverseMap") { - forAll { data: List[(String, Int)] => + forAll { (data: List[(String, Int)]) => val map = data.toMap val expected = map.view.map({ case (key, value) => (key + key, value + 2) }).toMap val result = Task.traverseMap(map)({ case (key, value) => Task((key + key, value + 2)) }).runToFuture.futureValue @@ -33,7 +33,7 @@ class TaskExtensionsTest extends AnyFunSuite with Matchers with ScalaCheckDriven } test("traverseMapValues") { - forAll { data: List[(String, Int)] => + forAll { (data: List[(String, Int)]) => val map = data.toMap val expected = map.view.mapValues(value => value + 2).toMap val result = Task.traverseMapValues(map)({ case (key, value) => Task(value + 2) }).runToFuture.futureValue diff --git a/core/src/test/scala/com/avsystem/commons/serialization/IsoInstantTest.scala b/core/src/test/scala/com/avsystem/commons/serialization/IsoInstantTest.scala index e74b80001..b0b632c81 100644 --- a/core/src/test/scala/com/avsystem/commons/serialization/IsoInstantTest.scala +++ b/core/src/test/scala/com/avsystem/commons/serialization/IsoInstantTest.scala @@ -26,7 +26,7 @@ class IsoInstantTest extends AnyFunSuite with ScalaCheckPropertyChecks { test("roundtrip") { val genTstamp = Gen.choose[Long](-(1L << 50), 1L << 50) - forAll(genTstamp) { l: Long => + forAll(genTstamp) { (l: Long) => assert(IsoInstant.parse(IsoInstant.format(l)) == l) } } diff --git a/core/src/test/scala/com/avsystem/commons/serialization/json/JsonStringInputOutputTest.scala b/core/src/test/scala/com/avsystem/commons/serialization/json/JsonStringInputOutputTest.scala index 814296576..4a6c1a8bb 100644 --- a/core/src/test/scala/com/avsystem/commons/serialization/json/JsonStringInputOutputTest.scala +++ b/core/src/test/scala/com/avsystem/commons/serialization/json/JsonStringInputOutputTest.scala @@ -205,7 +205,7 @@ class JsonStringInputOutputTest extends AnyFunSuite with SerializationTestUtils } test("serialize all types") { - forAll { item: CompleteItem => + forAll { (item: CompleteItem) => val serialized = write(item) val deserialized = read[CompleteItem](serialized) @@ -363,7 +363,7 @@ class JsonStringInputOutputTest extends AnyFunSuite with SerializationTestUtils Gen.sized(sz => sized(math.min(sz, 1))) } - forAll { dncc: DeepNestedTestCC => + forAll { (dncc: DeepNestedTestCC) => val serialized = write(dncc) val deserialized = read[DeepNestedTestCC](serialized) diff --git a/jetty/src/test/scala/com/avsystem/commons/jetty/rpc/JettyRPCFrameworkTest.scala b/jetty/src/test/scala/com/avsystem/commons/jetty/rpc/JettyRPCFrameworkTest.scala index da678fe6d..f2d7397a3 100644 --- a/jetty/src/test/scala/com/avsystem/commons/jetty/rpc/JettyRPCFrameworkTest.scala +++ b/jetty/src/test/scala/com/avsystem/commons/jetty/rpc/JettyRPCFrameworkTest.scala @@ -53,7 +53,7 @@ class JettyRPCFrameworkTest extends AnyFunSuite with ScalaFutures with Matchers val impl: SomeApi = new SomeApi { override def keks: Future[Long] = Future.successful(keksResult) override def isTop(keks: Long): Future[Boolean] = Future.successful(keks == Int.MaxValue) - override val topper = new TopperImpl("%s", topKeksResult) + override val topper: TopperImpl = new TopperImpl("%s", topKeksResult) override def differentTopper(helloPattern: String): Topper = new TopperImpl(helloPattern, topKeksResult) override def erroneousKeks: Future[Int] = Future.failed(new RuntimeException("cannot into")) } diff --git a/mongo/jvm/src/test/scala-2.13/com/avsystem/commons/mongo/typed/PolyDataWithCustomImplicits.scala b/mongo/jvm/src/test/scala-2.13/com/avsystem/commons/mongo/typed/PolyDataWithCustomImplicits.scala index 704c5667c..acbd3b639 100644 --- a/mongo/jvm/src/test/scala-2.13/com/avsystem/commons/mongo/typed/PolyDataWithCustomImplicits.scala +++ b/mongo/jvm/src/test/scala-2.13/com/avsystem/commons/mongo/typed/PolyDataWithCustomImplicits.scala @@ -8,7 +8,7 @@ case class CustomWrappy(value: String) object CustomImplicits { implicit val customWrappyCodec: GenCodec[CustomWrappy] = - GenCodec.transformed[CustomWrappy, String](_.value, CustomWrappy) + GenCodec.transformed[CustomWrappy, String](_.value, CustomWrappy.apply) } abstract class CustomPolyDataCompanion[D[_]]( diff --git a/mongo/jvm/src/test/scala/com/avsystem/commons/mongo/BigDecimalEncodingTest.scala b/mongo/jvm/src/test/scala/com/avsystem/commons/mongo/BigDecimalEncodingTest.scala index b97bb8626..2f1759de2 100644 --- a/mongo/jvm/src/test/scala/com/avsystem/commons/mongo/BigDecimalEncodingTest.scala +++ b/mongo/jvm/src/test/scala/com/avsystem/commons/mongo/BigDecimalEncodingTest.scala @@ -6,7 +6,7 @@ import org.scalatestplus.scalacheck.ScalaCheckPropertyChecks class BigDecimalEncodingTest extends AnyFunSuite with ScalaCheckPropertyChecks { test("BigDecimal BSON encoding") { - forAll { value: BigDecimal => + forAll { (value: BigDecimal) => assert(value == BsonInput.bigDecimalFromBytes(BsonOutput.bigDecimalBytes(value))) } } diff --git a/mongo/jvm/src/test/scala/com/avsystem/commons/mongo/Decimal128UtilsTest.scala b/mongo/jvm/src/test/scala/com/avsystem/commons/mongo/Decimal128UtilsTest.scala index 1f5c6e63c..4de38df36 100644 --- a/mongo/jvm/src/test/scala/com/avsystem/commons/mongo/Decimal128UtilsTest.scala +++ b/mongo/jvm/src/test/scala/com/avsystem/commons/mongo/Decimal128UtilsTest.scala @@ -8,7 +8,7 @@ import org.scalatestplus.scalacheck.ScalaCheckPropertyChecks class Decimal128UtilsTest extends AnyFunSuite with ScalaCheckPropertyChecks { test("Decimal128Utils.fromBigDecimal is equivalent to new Decimal128") { - forAll(Arbitrary.arbitrary[BigDecimal]) { bd: BigDecimal => + forAll(Arbitrary.arbitrary[BigDecimal]) { (bd: BigDecimal) => val usingUtils = Decimal128Utils.fromBigDecimal(bd) val usingConstructor = try new Decimal128(bd.bigDecimal).opt catch { case _: NumberFormatException => Opt.Empty diff --git a/project/Commons.scala b/project/Commons.scala index 839815d3d..a08cc9836 100644 --- a/project/Commons.scala +++ b/project/Commons.scala @@ -134,6 +134,7 @@ object Commons extends ProjectGroup("commons") { "-language:experimental.macros", "-language:higherKinds", "-Xfatal-warnings", + "-Xsource:3", "-Xlint:-missing-interpolator,-adapted-args,-unused,_", "-Ycache-plugin-class-loader:last-modified", "-Ycache-macro-class-loader:last-modified", diff --git a/redis/src/main/scala/com/avsystem/commons/redis/RedisRecordCodec.scala b/redis/src/main/scala/com/avsystem/commons/redis/RedisRecordCodec.scala index ed0d6285c..ce580efb2 100644 --- a/redis/src/main/scala/com/avsystem/commons/redis/RedisRecordCodec.scala +++ b/redis/src/main/scala/com/avsystem/commons/redis/RedisRecordCodec.scala @@ -38,7 +38,7 @@ object RedisRecordCodec extends LowPriorityRedisRecordCodecs { private def bulks[F: RedisDataCodec, V: RedisDataCodec](it: Iterator[(F, V)], size: Int): IndexedSeq[BulkStringMsg] = it.flatMap { case (f, v) => List(RedisDataCodec.write(f), RedisDataCodec.write(v)) } - .map(BulkStringMsg).to(new SizedArraySeqFactory[BulkStringMsg](size)) + .map(BulkStringMsg.apply).to(new SizedArraySeqFactory[BulkStringMsg](size)) } sealed trait LowPriorityRedisRecordCodecs { this: RedisRecordCodec.type => implicit def fromApplyUnapplyCodec[T](implicit codec: GenObjectCodec[T]): RedisRecordCodec[T] = diff --git a/redis/src/main/scala/com/avsystem/commons/redis/Transaction.scala b/redis/src/main/scala/com/avsystem/commons/redis/Transaction.scala index ef2da71cd..03b9b2fb0 100644 --- a/redis/src/main/scala/com/avsystem/commons/redis/Transaction.scala +++ b/redis/src/main/scala/com/avsystem/commons/redis/Transaction.scala @@ -81,7 +81,7 @@ final class Transaction[+A](batch: RedisBatch[A]) extends SinglePackBatch[A] { } singleError orElse errors.map(a => TransactionReply(IArraySeq.unsafeWrapArray(a))) orElse - normalResult.map(TransactionReply) + normalResult.map(TransactionReply.apply) case i => message match { case RedisMsg.Queued => diff --git a/redis/src/main/scala/com/avsystem/commons/redis/commands/RedisInfo.scala b/redis/src/main/scala/com/avsystem/commons/redis/commands/RedisInfo.scala index 5ddeb8dd3..0464a0430 100644 --- a/redis/src/main/scala/com/avsystem/commons/redis/commands/RedisInfo.scala +++ b/redis/src/main/scala/com/avsystem/commons/redis/commands/RedisInfo.scala @@ -192,7 +192,7 @@ trait ReplicationInfo extends RedisInfo { /** * @param slaveId ranges from 0 to [[connectedSlaves]]-1 */ - def slaveInfo(slaveId: Int): Opt[SlaveInfo] = get(s"slave$slaveId").map(SlaveInfo) + def slaveInfo(slaveId: Int): Opt[SlaveInfo] = get(s"slave$slaveId").map(SlaveInfo.apply) def masterReplOffset: Opt[Long] = get("master_repl_offset").map(_.toLong) def replBacklogActive: Opt[Boolean] = get("repl_backlog_active").map(_ == "1") def replBacklogSize: Opt[Long] = get("repl_backlog_size").map(_.toLong) @@ -227,7 +227,7 @@ trait CommandstatsInfo extends RedisInfo { override protected def indexedPrefixes: List[String] = "cmdstat_" :: super.indexedPrefixes lazy val executedCommands: BSeq[String] = keysByPrefix.getOrElse("cmdstat_", Nil).map(_.stripPrefix("cmdstat_")) - def commandStat(command: String): Opt[CommandStat] = get(s"cmdstat_$command").map(CommandStat) + def commandStat(command: String): Opt[CommandStat] = get(s"cmdstat_$command").map(CommandStat.apply) } object CommandstatsInfo extends RedisInfoSection[CommandstatsInfo]("commandstats") @@ -246,7 +246,7 @@ trait KeyspaceInfo extends RedisInfo { override protected def indexedPrefixes: List[String] = "db" :: super.indexedPrefixes lazy val nonEmptyDbs: Seq[Int] = keysByPrefix.getOrElse("db", Nil).iterator.map(_.stripPrefix("db").toInt).toList - def dbStat(dbId: Int): Opt[DbStat] = get(s"db$dbId").map(DbStat) + def dbStat(dbId: Int): Opt[DbStat] = get(s"db$dbId").map(DbStat.apply) } object KeyspaceInfo extends RedisInfoSection[KeyspaceInfo]("keyspace") diff --git a/redis/src/main/scala/com/avsystem/commons/redis/commands/ReplyDecoders.scala b/redis/src/main/scala/com/avsystem/commons/redis/commands/ReplyDecoders.scala index b843445ab..278af90ea 100644 --- a/redis/src/main/scala/com/avsystem/commons/redis/commands/ReplyDecoders.scala +++ b/redis/src/main/scala/com/avsystem/commons/redis/commands/ReplyDecoders.scala @@ -94,7 +94,7 @@ object ReplyDecoders { val bulkAsNodeInfos: ReplyDecoder[Seq[NodeInfo]] = { case BulkStringMsg(nodeInfos) => - nodeInfos.utf8String.split("\n").iterator.filter(_.nonEmpty).map(NodeInfo).toIndexedSeq + nodeInfos.utf8String.split("\n").iterator.filter(_.nonEmpty).map(NodeInfo.apply).toIndexedSeq } val bulkAsNodeInfo: ReplyDecoder[NodeInfo] = @@ -243,7 +243,7 @@ object ReplyDecoders { } val multiBulkAsSlowlogEntry: ReplyDecoder[SlowlogEntry] = { - case msg@ArrayMsg(IndexedSeq(IntegerMsg(id), IntegerMsg(timestamp), IntegerMsg(duration), ArrayMsg(rawCommand), rest@_*)) => + case msg@ArrayMsg(IndexedSeq(IntegerMsg(id), IntegerMsg(timestamp), IntegerMsg(duration), ArrayMsg(rawCommand), rest*)) => val commandArgs = rawCommand.map { case BulkStringMsg(arg) => arg case el => throw new UnexpectedReplyException(s"Unexpected message for SLOWLOG command argument: $el") @@ -263,7 +263,7 @@ object ReplyDecoders { } val multiBulkAsSlotRangeMapping: ReplyDecoder[SlotRangeMapping] = { - case ArrayMsg(IndexedSeq(IntegerMsg(from), IntegerMsg(to), master, slaves@_*)) => + case ArrayMsg(IndexedSeq(IntegerMsg(from), IntegerMsg(to), master, slaves*)) => val range = SlotRange(from.toInt, to.toInt) def parseNode(rr: RedisMsg) = rr match { case ArrayMsg(IndexedSeq(BulkStringMsg(ip), IntegerMsg(port), BulkStringMsg(nodeId), _*)) => diff --git a/redis/src/main/scala/com/avsystem/commons/redis/commands/cluster.scala b/redis/src/main/scala/com/avsystem/commons/redis/commands/cluster.scala index e76a4d15d..9efb47885 100644 --- a/redis/src/main/scala/com/avsystem/commons/redis/commands/cluster.scala +++ b/redis/src/main/scala/com/avsystem/commons/redis/commands/cluster.scala @@ -281,7 +281,7 @@ case class NodeInfo(infoLine: String) { val address: NodeAddress = NodeAddress.parse(splitAddr(0)) val clusterPort: Opt[String] = splitAddr.opt.filter(_.length > 1).map(_.apply(1)) val flags: NodeFlags = NodeFlags(splitLine(2)) - val master: Opt[NodeId] = Opt(splitLine(3)).filter(_ != "-").map(NodeId) + val master: Opt[NodeId] = Opt(splitLine(3)).filter(_ != "-").map(NodeId.apply) val pingSent: Long = splitLine(4).toLong val pongRecv: Long = splitLine(5).toLong val configEpoch: Long = splitLine(6).toLong diff --git a/redis/src/main/scala/com/avsystem/commons/redis/commands/keys.scala b/redis/src/main/scala/com/avsystem/commons/redis/commands/keys.scala index a60c135ff..c9a545b05 100644 --- a/redis/src/main/scala/com/avsystem/commons/redis/commands/keys.scala +++ b/redis/src/main/scala/com/avsystem/commons/redis/commands/keys.scala @@ -179,7 +179,7 @@ trait KeyedKeysApi extends ApiSubset { override def immediateResult: Opt[Int] = whenEmpty(keys, 0) } - private final class Dump(key: Key) extends RedisOptCommand[Dumped](bulk(Dumped)) with NodeCommand { + private final class Dump(key: Key) extends RedisOptCommand[Dumped](bulk(Dumped.apply)) with NodeCommand { val encoded: Encoded = encoder("DUMP").key(key).result } diff --git a/redis/src/main/scala/com/avsystem/commons/redis/util/HeadIterable.scala b/redis/src/main/scala/com/avsystem/commons/redis/util/HeadIterable.scala index 1685796f0..a687e97e8 100644 --- a/redis/src/main/scala/com/avsystem/commons/redis/util/HeadIterable.scala +++ b/redis/src/main/scala/com/avsystem/commons/redis/util/HeadIterable.scala @@ -2,7 +2,7 @@ package com.avsystem.commons package redis.util final class HeadIterable[+A](head: A, tail: Iterable[A]) extends Iterable[A] { - def iterator = new HeadIterator(head, tail.iterator) + override def iterator: HeadIterator[A] = new HeadIterator(head, tail.iterator) override def isEmpty = false override def foreach[U](f: A => U): Unit = { @@ -14,7 +14,7 @@ final class HeadIterable[+A](head: A, tail: Iterable[A]) extends Iterable[A] { final class HeadIterator[+A](head: A, tail: Iterator[A]) extends Iterator[A] { private[this] var atHead = true def hasNext: Boolean = atHead || tail.hasNext - def next(): A = + override def next(): A = if (atHead) { atHead = false head diff --git a/redis/src/test/scala/com/avsystem/commons/redis/commands/GeoApiSuite.scala b/redis/src/test/scala/com/avsystem/commons/redis/commands/GeoApiSuite.scala index 3c8d7de50..b61ec3e18 100644 --- a/redis/src/test/scala/com/avsystem/commons/redis/commands/GeoApiSuite.scala +++ b/redis/src/test/scala/com/avsystem/commons/redis/commands/GeoApiSuite.scala @@ -49,7 +49,7 @@ trait GeoApiSuite extends CommandsSuite { Opt.Empty, Opt.Empty, Opt.Empty - ).map(_.map(GeoHash))) + ).map(_.map(GeoHash.apply))) geohash(OtherKey, Cities.keys.toList).assertEquals(List.fill(Cities.size)(Opt.Empty)) }