diff --git a/quill-jasync-postgres/src/test/scala/io/getquill/context/jasync/postgres/ArrayAsyncEncodingSpec.scala b/quill-jasync-postgres/src/test/scala/io/getquill/context/jasync/postgres/ArrayAsyncEncodingSpec.scala index 0027033ac..44666edba 100644 --- a/quill-jasync-postgres/src/test/scala/io/getquill/context/jasync/postgres/ArrayAsyncEncodingSpec.scala +++ b/quill-jasync-postgres/src/test/scala/io/getquill/context/jasync/postgres/ArrayAsyncEncodingSpec.scala @@ -18,7 +18,7 @@ class ArrayAsyncEncodingSpec extends ArrayEncodingBaseSpec { inline def q = quote(query[ArraysTestEntity]) "Support all sql base types and `Iterable` implementers" in { - await(ctx.run(q.insert(lift(e)))) + await(ctx.run(q.insertValue(lift(e)))) val actual = await(ctx.run(q)).head actual mustEqual e baseEntityDeepCheck(actual, e) @@ -28,7 +28,7 @@ class ArrayAsyncEncodingSpec extends ArrayEncodingBaseSpec { case class JodaTimes(timestamps: Seq[JodaLocalDateTime], dates: Seq[JodaLocalDate]) val jE = JodaTimes(Seq(JodaLocalDateTime.now()), Seq(JodaLocalDate.now())) inline def jQ = quote(querySchema[JodaTimes]("ArraysTestEntity")) - await(ctx.run(jQ.insert(lift(jE)))) + await(ctx.run(jQ.insertValue(lift(jE)))) val actual = await(ctx.run(jQ)).head actual.timestamps mustBe jE.timestamps actual.dates mustBe jE.dates @@ -38,7 +38,7 @@ class ArrayAsyncEncodingSpec extends ArrayEncodingBaseSpec { case class JodaTimes(timestamps: Seq[JodaDateTime]) val jE = JodaTimes(Seq(JodaDateTime.now())) inline def jQ = quote(querySchema[JodaTimes]("ArraysTestEntity")) - await(ctx.run(jQ.insert(lift(jE)))) + await(ctx.run(jQ.insertValue(lift(jE)))) val actual = await(ctx.run(jQ)).head actual.timestamps mustBe jE.timestamps } @@ -47,7 +47,7 @@ class ArrayAsyncEncodingSpec extends ArrayEncodingBaseSpec { case class Java8Times(timestamps: Seq[LocalDateTime], dates: Seq[LocalDate]) val jE = Java8Times(Seq(LocalDateTime.now()), Seq(LocalDate.now())) inline def jQ = quote(querySchema[Java8Times]("ArraysTestEntity")) - await(ctx.run(jQ.insert(lift(jE)))) + await(ctx.run(jQ.insertValue(lift(jE)))) val actual = await(ctx.run(jQ)).head actual.timestamps mustBe jE.timestamps actual.dates mustBe jE.dates @@ -55,7 +55,7 @@ class ArrayAsyncEncodingSpec extends ArrayEncodingBaseSpec { "Support Iterable encoding basing on MappedEncoding" in { inline def wrapQ = quote(querySchema[WrapEntity]("ArraysTestEntity")) - await(ctx.run(wrapQ.insert(lift(wrapE)))) + await(ctx.run(wrapQ.insertValue(lift(wrapE)))) await(ctx.run(wrapQ)).head mustBe wrapE } @@ -66,7 +66,7 @@ class ArrayAsyncEncodingSpec extends ArrayEncodingBaseSpec { arrayDecoder[LocalDate, LocalDate, Col](identity) } import newCtx._ - inline def insertQ = quote { query[ArraysTestEntity].insert(lift(e)) } + inline def insertQ = quote { query[ArraysTestEntity].insertValue(lift(e)) } await(newCtx.run(insertQ)) intercept[IllegalStateException] { await(newCtx.run(query[ArraysTestEntity])).head mustBe e @@ -75,7 +75,7 @@ class ArrayAsyncEncodingSpec extends ArrayEncodingBaseSpec { } "Arrays in where clause" in { - await(ctx.run(q.insert(lift(e)))) + await(ctx.run(q.insertValue(lift(e)))) val actual1 = await(ctx.run(q.filter(_.texts == lift(List("test"))))) val actual2 = await(ctx.run(q.filter(_.texts == lift(List("test2"))))) baseEntityDeepCheck(actual1.head, e) @@ -154,7 +154,7 @@ class ArrayAsyncEncodingSpec extends ArrayEncodingBaseSpec { inline def realEntity = quote { querySchema[RealEncodingTestEntity]("EncodingTestEntity") } - await(ctx.run(realEntity.insert(lift(insertValue)))) + await(ctx.run(realEntity.insertValue(lift(insertValue)))) case class EncodingTestEntity(v1: List[String]) intercept[IllegalStateException](await(ctx.run(query[EncodingTestEntity]))) @@ -165,7 +165,7 @@ class ArrayAsyncEncodingSpec extends ArrayEncodingBaseSpec { val q = quote(query[ArraysTestEntity]) "Support all sql base types and `Iterable` implementers" in { - await(ctx.run(q.insert(lift(e)))) + await(ctx.run(q.insertValue(lift(e)))) val actual = await(ctx.run(q)).head actual mustEqual e baseEntityDeepCheck(actual, e) @@ -175,7 +175,7 @@ class ArrayAsyncEncodingSpec extends ArrayEncodingBaseSpec { case class JodaTimes(timestamps: Seq[JodaLocalDateTime], dates: Seq[JodaLocalDate]) val jE = JodaTimes(Seq(JodaLocalDateTime.now()), Seq(JodaLocalDate.now())) val jQ = quote(querySchema[JodaTimes]("ArraysTestEntity")) - await(ctx.run(jQ.insert(lift(jE)))) + await(ctx.run(jQ.insertValue(lift(jE)))) val actual = await(ctx.run(jQ)).head actual.timestamps mustBe jE.timestamps actual.dates mustBe jE.dates @@ -185,7 +185,7 @@ class ArrayAsyncEncodingSpec extends ArrayEncodingBaseSpec { case class JodaTimes(timestamps: Seq[JodaDateTime]) val jE = JodaTimes(Seq(JodaDateTime.now())) val jQ = quote(querySchema[JodaTimes]("ArraysTestEntity")) - await(ctx.run(jQ.insert(lift(jE)))) + await(ctx.run(jQ.insertValue(lift(jE)))) val actual = await(ctx.run(jQ)).head actual.timestamps mustBe jE.timestamps } @@ -194,7 +194,7 @@ class ArrayAsyncEncodingSpec extends ArrayEncodingBaseSpec { case class Java8Times(timestamps: Seq[LocalDateTime], dates: Seq[LocalDate]) val jE = Java8Times(Seq(LocalDateTime.now()), Seq(LocalDate.now())) val jQ = quote(querySchema[Java8Times]("ArraysTestEntity")) - await(ctx.run(jQ.insert(lift(jE)))) + await(ctx.run(jQ.insertValue(lift(jE)))) val actual = await(ctx.run(jQ)).head actual.timestamps mustBe jE.timestamps actual.dates mustBe jE.dates @@ -202,7 +202,7 @@ class ArrayAsyncEncodingSpec extends ArrayEncodingBaseSpec { "Support Iterable encoding basing on MappedEncoding" in { val wrapQ = quote(querySchema[WrapEntity]("ArraysTestEntity")) - await(ctx.run(wrapQ.insert(lift(wrapE)))) + await(ctx.run(wrapQ.insertValue(lift(wrapE)))) await(ctx.run(wrapQ)).head mustBe wrapE } @@ -213,7 +213,7 @@ class ArrayAsyncEncodingSpec extends ArrayEncodingBaseSpec { arrayDecoder[LocalDate, LocalDate, Col](identity) } import newCtx._ - val insertQ = quote { query[ArraysTestEntity].insert(lift(e)) } + val insertQ = quote { query[ArraysTestEntity].insertValue(lift(e)) } await(newCtx.run(insertQ)) intercept[IllegalStateException] { await(newCtx.run(query[ArraysTestEntity])).head mustBe e @@ -222,7 +222,7 @@ class ArrayAsyncEncodingSpec extends ArrayEncodingBaseSpec { } "Arrays in where clause" in { - await(ctx.run(q.insert(lift(e)))) + await(ctx.run(q.insertValue(lift(e)))) val actual1 = await(ctx.run(q.filter(_.texts == lift(List("test"))))) val actual2 = await(ctx.run(q.filter(_.texts == lift(List("test2"))))) baseEntityDeepCheck(actual1.head, e) @@ -301,7 +301,7 @@ class ArrayAsyncEncodingSpec extends ArrayEncodingBaseSpec { val realEntity = quote { querySchema[RealEncodingTestEntity]("EncodingTestEntity") } - await(ctx.run(realEntity.insert(lift(insertValue)))) + await(ctx.run(realEntity.insertValue(lift(insertValue)))) case class EncodingTestEntity(v1: List[String]) intercept[IllegalStateException](await(ctx.run(query[EncodingTestEntity]))) diff --git a/quill-jasync-postgres/src/test/scala/io/getquill/context/jasync/postgres/PostgresAsyncEncodingSpec.scala b/quill-jasync-postgres/src/test/scala/io/getquill/context/jasync/postgres/PostgresAsyncEncodingSpec.scala index 524c915d9..bcf464242 100644 --- a/quill-jasync-postgres/src/test/scala/io/getquill/context/jasync/postgres/PostgresAsyncEncodingSpec.scala +++ b/quill-jasync-postgres/src/test/scala/io/getquill/context/jasync/postgres/PostgresAsyncEncodingSpec.scala @@ -38,7 +38,7 @@ class PostgresAsyncEncodingSpec extends EncodingSpec { val rez0 = Await.result(testContext.run(q0), Duration.Inf) //insert new uuid - val rez1 = Await.result(testContext.run(query[EncodingUUIDTestEntity].insert(lift(EncodingUUIDTestEntity(testUUID)))), Duration.Inf) + val rez1 = Await.result(testContext.run(query[EncodingUUIDTestEntity].insertValue(lift(EncodingUUIDTestEntity(testUUID)))), Duration.Inf) //verify you can get the uuid back from the db val q2 = quote(query[EncodingUUIDTestEntity].map(p => p.v1)) @@ -95,7 +95,7 @@ class PostgresAsyncEncodingSpec extends EncodingSpec { val entity = DateEncodingTestEntity(JodaLocalDate.now, JodaLocalDateTime.now, JodaDateTime.now) val r = for { _ <- testContext.run(query[DateEncodingTestEntity].delete) - _ <- testContext.run(query[DateEncodingTestEntity].insert(lift(entity))) + _ <- testContext.run(query[DateEncodingTestEntity].insertValue(lift(entity))) result <- testContext.run(query[DateEncodingTestEntity]) } yield result Await.result(r, Duration.Inf) mustBe Seq(entity) @@ -106,7 +106,7 @@ class PostgresAsyncEncodingSpec extends EncodingSpec { val entity = DateEncodingTestEntity(LocalDate.now, LocalDateTime.now, ZonedDateTime.now) val r = for { _ <- testContext.run(query[DateEncodingTestEntity].delete) - _ <- testContext.run(query[DateEncodingTestEntity].insert(lift(entity))) + _ <- testContext.run(query[DateEncodingTestEntity].insertValue(lift(entity))) result <- testContext.run(query[DateEncodingTestEntity]) } yield result Await.result(r, Duration.Inf) mustBe Seq(entity) diff --git a/quill-jasync-postgres/src/test/scala/io/getquill/context/jasync/postgres/PostgresJAsyncContextSpec.scala b/quill-jasync-postgres/src/test/scala/io/getquill/context/jasync/postgres/PostgresJAsyncContextSpec.scala index 34333af88..02a06e312 100644 --- a/quill-jasync-postgres/src/test/scala/io/getquill/context/jasync/postgres/PostgresJAsyncContextSpec.scala +++ b/quill-jasync-postgres/src/test/scala/io/getquill/context/jasync/postgres/PostgresJAsyncContextSpec.scala @@ -20,7 +20,7 @@ class PostgresJAsyncContextSpec extends Spec { "Insert with returning with single column table" in { val inserted: Long = await(testContext.run { - qr4.insert(lift(TestEntity4(0))).returningGenerated(_.i) + qr4.insertValue(lift(TestEntity4(0))).returningGenerated(_.i) }) await(testContext.run(qr4.filter(_.i == lift(inserted)))) .head.i mustBe inserted @@ -28,7 +28,7 @@ class PostgresJAsyncContextSpec extends Spec { "Insert with returning with multiple columns" in { await(testContext.run(qr1.delete)) val inserted = await(testContext.run { - qr1.insert(lift(TestEntity("foo", 1, 18L, Some(123), true))).returning(r => (r.i, r.s, r.o)) + qr1.insertValue(lift(TestEntity("foo", 1, 18L, Some(123), true))).returning(r => (r.i, r.s, r.o)) }) (1, "foo", Some(123)) mustBe inserted } diff --git a/quill-jasync-postgres/src/test/scala/io/getquill/context/jasync/postgres/TransactionSpec.scala b/quill-jasync-postgres/src/test/scala/io/getquill/context/jasync/postgres/TransactionSpec.scala index 60b02d1e5..181ed6d85 100644 --- a/quill-jasync-postgres/src/test/scala/io/getquill/context/jasync/postgres/TransactionSpec.scala +++ b/quill-jasync-postgres/src/test/scala/io/getquill/context/jasync/postgres/TransactionSpec.scala @@ -27,16 +27,16 @@ class TransactionSpec extends PeopleSpec { for { _ <- testContext.run(query[Couple].delete) _ <- testContext.run(query[Person].delete) - } yield {} + } yield {} } } Try { await { testContext.transaction { implicit ec => for { - _ <- testContext.run(query[Couple].insert(lift(Couple("Alex", "Bert")))) + _ <- testContext.run(query[Couple].insertValue(lift(Couple("Alex", "Bert")))) _ <- scala.concurrent.Future { throw new RuntimeException("Blahblahblah") } - _ <- testContext.run(query[Person].insert(lift(Person("Alex", 60)))) + _ <- testContext.run(query[Person].insertValue(lift(Person("Alex", 60)))) } yield {} } } @@ -54,18 +54,18 @@ class TransactionSpec extends PeopleSpec { for { _ <- testContext.run(query[Couple].delete) _ <- testContext.run(query[Person].delete) - } yield {} + } yield {} } } await { testContext.transaction { implicit ec => for { - _ <- testContext.run(query[Couple].insert(lift(Couple("Alex", "Bert")))) - _ <- testContext.run(query[Person].insert(lift(Person("Alex", 60)))) + _ <- testContext.run(query[Couple].insertValue(lift(Couple("Alex", "Bert")))) + _ <- testContext.run(query[Person].insertValue(lift(Person("Alex", 60)))) } yield {} } } - + await(testContext.run(query[Couple])) mustEqual List(Couple("Alex", "Bert")) await(testContext.run(query[Person])) mustEqual List(Person("Alex", 60)) } diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/ArrayJdbcEncodingSpec.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/ArrayJdbcEncodingSpec.scala index 4b98cfb80..c6544ff2e 100644 --- a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/ArrayJdbcEncodingSpec.scala +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/ArrayJdbcEncodingSpec.scala @@ -15,7 +15,7 @@ class ArrayJdbcEncodingSpec extends ArrayEncodingBaseSpec { val corrected = e.copy(timestamps = e.timestamps.map(d => new Timestamp(d.getTime))) "Support all sql base types and `Seq` implementers" in { - ctx.run(q.insert(lift(corrected))) + ctx.run(q.insertValue(lift(corrected))) val actual = ctx.run(q).head actual mustEqual corrected baseEntityDeepCheck(actual, corrected) @@ -23,7 +23,7 @@ class ArrayJdbcEncodingSpec extends ArrayEncodingBaseSpec { "Support Seq encoding basing on MappedEncoding" in { val wrapQ = quote(querySchema[WrapEntity]("ArraysTestEntity")) - ctx.run(wrapQ.insert(lift(wrapE))) + ctx.run(wrapQ.insertValue(lift(wrapE))) ctx.run(wrapQ).head.texts mustBe wrapE.texts } @@ -31,7 +31,7 @@ class ArrayJdbcEncodingSpec extends ArrayEncodingBaseSpec { case class Timestamps(timestamps: List[Timestamp]) val tE = Timestamps(List(new Timestamp(System.currentTimeMillis()))) val tQ = quote(querySchema[Timestamps]("ArraysTestEntity")) - ctx.run(tQ.insert(lift(tE))) + ctx.run(tQ.insertValue(lift(tE))) ctx.run(tQ).head.timestamps mustBe tE.timestamps } @@ -42,7 +42,7 @@ class ArrayJdbcEncodingSpec extends ArrayEncodingBaseSpec { arrayDecoder[LocalDate, LocalDate, Col](identity) } import newCtx._ - newCtx.run(query[ArraysTestEntity].insert(lift(corrected))) + newCtx.run(query[ArraysTestEntity].insertValue(lift(corrected))) intercept[IllegalStateException] { newCtx.run(query[ArraysTestEntity]).head mustBe corrected } @@ -57,12 +57,12 @@ class ArrayJdbcEncodingSpec extends ArrayEncodingBaseSpec { implicit def arrayUUIDEncoder[Col <: Seq[UUID]]: Encoder[Col] = arrayRawEncoder[UUID, Col]("uuid") implicit def arrayUUIDDecoder[Col <: Seq[UUID]](implicit bf: CBF[UUID, Col]): Decoder[Col] = arrayRawDecoder[UUID, Col] - ctx.run(q.insert(lift(e))) + ctx.run(q.insertValue(lift(e))) ctx.run(q).head.uuids mustBe e.uuids } "Arrays in where clause" in { - ctx.run(q.insert(lift(corrected))) + ctx.run(q.insertValue(lift(corrected))) val actual1 = ctx.run(q.filter(_.texts == lift(List("test")))) val actual2 = ctx.run(q.filter(_.texts == lift(List("test2")))) actual1 mustEqual List(corrected) @@ -71,7 +71,7 @@ class ArrayJdbcEncodingSpec extends ArrayEncodingBaseSpec { "empty array on found null" in { case class ArraysTestEntity(texts: Option[List[String]]) - ctx.run(query[ArraysTestEntity].insert(lift(ArraysTestEntity(None)))) + ctx.run(query[ArraysTestEntity].insertValue(lift(ArraysTestEntity(None)))) case class E(texts: List[String]) ctx.run(querySchema[E]("ArraysTestEntity")).headOption.map(_.texts) mustBe Some(Nil) diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/JdbcContextSpec.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/JdbcContextSpec.scala index 653f0f780..3a1f9014b 100644 --- a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/JdbcContextSpec.scala +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/JdbcContextSpec.scala @@ -57,7 +57,7 @@ class JdbcContextSpec extends Spec { "Insert with returning generated with single column table" in { ctx.run(qr4.delete) inline def insert = quote { - qr4.insert(lift(TestEntity4(0))).returningGenerated(_.i) + qr4.insertValue(lift(TestEntity4(0))).returningGenerated(_.i) } val inserted1 = ctx.run(insert) @@ -72,7 +72,7 @@ class JdbcContextSpec extends Spec { "Insert with returning generated with single column table using query" in { ctx.run(qr5.delete) - val id = ctx.run(qr5.insert(lift(TestEntity5(0, "foo"))).returningGenerated(_.i)) + val id = ctx.run(qr5.insertValue(lift(TestEntity5(0, "foo"))).returningGenerated(_.i)) val id2 = ctx.run { qr5.insert(_.s -> "bar").returningGenerated(r => query[TestEntity5].filter(_.s == "foo").map(_.i).max) }.get @@ -83,7 +83,7 @@ class JdbcContextSpec extends Spec { "with multiple columns" in { ctx.run(qr1.delete) val inserted = ctx.run { - qr1.insert(lift(TestEntity("foo", 1, 18L, Some(123), true))).returning(r => (r.i, r.s, r.o)) + qr1.insertValue(lift(TestEntity("foo", 1, 18L, Some(123), true))).returning(r => (r.i, r.s, r.o)) } (1, "foo", Some(123)) mustBe inserted } @@ -91,7 +91,7 @@ class JdbcContextSpec extends Spec { "with multiple columns and operations" in { ctx.run(qr1.delete) val inserted = ctx.run { - qr1.insert(lift(TestEntity("foo", 1, 18L, Some(123), true))).returning(r => (r.i + 100, r.s, r.o.map(_ + 100))) + qr1.insertValue(lift(TestEntity("foo", 1, 18L, Some(123), true))).returning(r => (r.i + 100, r.s, r.o.map(_ + 100))) } (1 + 100, "foo", Some(123 + 100)) mustBe inserted } @@ -102,7 +102,7 @@ class JdbcContextSpec extends Spec { ctx.run(qr1.delete) val inserted = ctx.run { - qr1.insert(lift(TestEntity("two", 36, 18L, Some(123), true))).returning(r => + qr1.insertValue(lift(TestEntity("two", 36, 18L, Some(123), true))).returning(r => (r.i, r.s + "_s", qr2.filter(rr => rr.i == r.i).map(_.s).max)) } (36, "two_s", Some("foobar")) mustBe inserted @@ -115,7 +115,7 @@ class JdbcContextSpec extends Spec { val value = "foobar" ctx.run(qr1.delete) val inserted = ctx.run { - qr1.insert(lift(TestEntity("two", 36, 18L, Some(123), true))).returning(r => + qr1.insertValue(lift(TestEntity("two", 36, 18L, Some(123), true))).returning(r => (r.i, r.s + "_s", qr2.filter(rr => rr.i == r.i && rr.s == lift(value)).map(_.s).max)) } (36, "two_s", Some("foobar")) mustBe inserted @@ -123,9 +123,9 @@ class JdbcContextSpec extends Spec { "with multiple columns and query - same table" in { ctx.run(qr1.delete) - ctx.run(qr1.insert(lift(TestEntity("one", 1, 18L, Some(1), true)))) + ctx.run(qr1.insertValue(lift(TestEntity("one", 1, 18L, Some(1), true)))) val inserted = ctx.run { - qr1.insert(lift(TestEntity("two", 2, 18L, Some(123), true))).returning(r => + qr1.insertValue(lift(TestEntity("two", 2, 18L, Some(123), true))).returning(r => (r.i, r.s + "_s", qr1.filter(rr => rr.o.exists(_ == r.i - 1)).map(_.s).max)) } (2, "two_s", Some("one")) mustBe inserted @@ -133,9 +133,9 @@ class JdbcContextSpec extends Spec { "with multiple columns and query embedded" in { ctx.run(qr1Emb.delete) - ctx.run(qr1Emb.insert(lift(TestEntityEmb(Emb("one", 1), 18L, Some(123))))) + ctx.run(qr1Emb.insertValue(lift(TestEntityEmb(Emb("one", 1), 18L, Some(123))))) val inserted = ctx.run { - qr1Emb.insert(lift(TestEntityEmb(Emb("two", 2), 18L, Some(123)))).returning(r => + qr1Emb.insertValue(lift(TestEntityEmb(Emb("two", 2), 18L, Some(123)))).returning(r => (r.emb.i, r.o)) } (2, Some(123)) mustBe inserted @@ -145,7 +145,7 @@ class JdbcContextSpec extends Spec { case class Return(id: Int, str: String, opt: Option[Int]) ctx.run(qr1.delete) val inserted = ctx.run { - qr1.insert(lift(TestEntity("foo", 1, 18L, Some(123), true))).returning(r => Return(r.i, r.s, r.o)) + qr1.insertValue(lift(TestEntity("foo", 1, 18L, Some(123), true))).returning(r => Return(r.i, r.s, r.o)) } Return(1, "foo", Some(123)) mustBe inserted } @@ -154,20 +154,20 @@ class JdbcContextSpec extends Spec { "update returning" - { "with multiple columns" in { ctx.run(qr1.delete) - ctx.run(qr1.insert(lift(TestEntity("foo", 1, 18L, Some(123), true)))) + ctx.run(qr1.insertValue(lift(TestEntity("foo", 1, 18L, Some(123), true)))) val updated = ctx.run { - qr1.update(lift(TestEntity("bar", 2, 42L, Some(321), true))).returning(r => (r.i, r.s, r.o)) + qr1.updateValue(lift(TestEntity("bar", 2, 42L, Some(321), true))).returning(r => (r.i, r.s, r.o)) } (2, "bar", Some(321)) mustBe updated } "with multiple columns and operations" in { ctx.run(qr1.delete) - ctx.run(qr1.insert(lift(TestEntity("foo", 1, 18L, Some(123), true)))) + ctx.run(qr1.insertValue(lift(TestEntity("foo", 1, 18L, Some(123), true)))) val updated = ctx.run { - qr1.update(lift(TestEntity("bar", 2, 42L, Some(321), true))).returning(r => (r.i + 100, r.s, r.o.map(_ + 100))) + qr1.updateValue(lift(TestEntity("bar", 2, 42L, Some(321), true))).returning(r => (r.i + 100, r.s, r.o.map(_ + 100))) } (2 + 100, "bar", Some(321 + 100)) mustBe updated } @@ -177,10 +177,10 @@ class JdbcContextSpec extends Spec { ctx.run(qr2.insert(_.i -> 36, _.l -> 0L, _.s -> "foobar")) ctx.run(qr1.delete) - ctx.run(qr1.insert(lift(TestEntity("foo", 1, 18L, Some(123), true)))) + ctx.run(qr1.insertValue(lift(TestEntity("foo", 1, 18L, Some(123), true)))) val updated = ctx.run { - qr1.update(lift(TestEntity("bar", 36, 42L, Some(321), true))).returning(r => + qr1.updateValue(lift(TestEntity("bar", 36, 42L, Some(321), true))).returning(r => (r.i, r.s + "_s", qr2.filter(rr => rr.i == r.i).map(_.s).max)) } (36, "bar_s", Some("foobar")) mustBe updated @@ -192,10 +192,10 @@ class JdbcContextSpec extends Spec { val value = "foobar" ctx.run(qr1.delete) - ctx.run(qr1.insert(lift(TestEntity("foo", 1, 18L, Some(123), true)))) + ctx.run(qr1.insertValue(lift(TestEntity("foo", 1, 18L, Some(123), true)))) val updated = ctx.run { - qr1.update(lift(TestEntity("bar", 36, 42L, Some(321), true))).returning(r => + qr1.updateValue(lift(TestEntity("bar", 36, 42L, Some(321), true))).returning(r => (r.i, r.s + "_s", qr2.filter(rr => rr.i == r.i && rr.s == lift(value)).map(_.s).max)) } (36, "bar_s", Some("foobar")) mustBe updated @@ -203,10 +203,10 @@ class JdbcContextSpec extends Spec { "with multiple columns and query - same table" in { ctx.run(qr1.delete) - ctx.run(qr1.insert(lift(TestEntity("one", 1, 18L, Some(1), true)))) + ctx.run(qr1.insertValue(lift(TestEntity("one", 1, 18L, Some(1), true)))) val updated = ctx.run { - qr1.update(lift(TestEntity("two", 2, 18L, Some(123), true))).returning(r => + qr1.updateValue(lift(TestEntity("two", 2, 18L, Some(123), true))).returning(r => (r.i, r.s + "_s", qr1.filter(rr => rr.o.exists(_ == r.i - 1)).map(_.s).max)) } (2, "two_s", Some("one")) mustBe updated @@ -214,10 +214,10 @@ class JdbcContextSpec extends Spec { "with multiple columns and query embedded" in { ctx.run(qr1Emb.delete) - ctx.run(qr1Emb.insert(lift(TestEntityEmb(Emb("one", 1), 18L, Some(123))))) + ctx.run(qr1Emb.insertValue(lift(TestEntityEmb(Emb("one", 1), 18L, Some(123))))) val updated = ctx.run { - qr1Emb.update(lift(TestEntityEmb(Emb("two", 2), 18L, Some(123)))).returning(r => (r.emb.i, r.o)) + qr1Emb.updateValue(lift(TestEntityEmb(Emb("two", 2), 18L, Some(123)))).returning(r => (r.emb.i, r.o)) } (2, Some(123)) mustBe updated } @@ -225,10 +225,10 @@ class JdbcContextSpec extends Spec { "with multiple columns - case class" in { case class Return(id: Int, str: String, opt: Option[Int]) ctx.run(qr1.delete) - ctx.run(qr1.insert(lift(TestEntity("one", 1, 18L, Some(1), true)))) + ctx.run(qr1.insertValue(lift(TestEntity("one", 1, 18L, Some(1), true)))) val updated = ctx.run { - qr1.update(lift(TestEntity("foo", 1, 18L, Some(123), true))).returning(r => Return(r.i, r.s, r.o)) + qr1.updateValue(lift(TestEntity("foo", 1, 18L, Some(123), true))).returning(r => Return(r.i, r.s, r.o)) } Return(1, "foo", Some(123)) mustBe updated } @@ -237,7 +237,7 @@ class JdbcContextSpec extends Spec { "delete returning" - { "with multiple columns" in { ctx.run(qr1.delete) - ctx.run(qr1.insert(lift(TestEntity("foo", 1, 18L, Some(123), true)))) + ctx.run(qr1.insertValue(lift(TestEntity("foo", 1, 18L, Some(123), true)))) val deleted = ctx.run { qr1.delete.returning(r => (r.i, r.s, r.o)) @@ -247,7 +247,7 @@ class JdbcContextSpec extends Spec { "with multiple columns and operations" in { ctx.run(qr1.delete) - ctx.run(qr1.insert(lift(TestEntity("foo", 1, 18L, Some(123), true)))) + ctx.run(qr1.insertValue(lift(TestEntity("foo", 1, 18L, Some(123), true)))) val deleted = ctx.run { qr1.delete.returning(r => (r.i + 100, r.s, r.o.map(_ + 100))) @@ -260,7 +260,7 @@ class JdbcContextSpec extends Spec { ctx.run(qr2.insert(_.i -> 1, _.l -> 0L, _.s -> "foobar")) ctx.run(qr1.delete) - ctx.run(qr1.insert(lift(TestEntity("foo", 1, 18L, Some(123), true)))) + ctx.run(qr1.insertValue(lift(TestEntity("foo", 1, 18L, Some(123), true)))) val deleted = ctx.run { qr1.delete.returning(r => @@ -275,7 +275,7 @@ class JdbcContextSpec extends Spec { val value = "foobar" ctx.run(qr1.delete) - ctx.run(qr1.insert(lift(TestEntity("foo", 1, 18L, Some(123), true)))) + ctx.run(qr1.insertValue(lift(TestEntity("foo", 1, 18L, Some(123), true)))) val deleted = ctx.run { qr1.delete.returning(r => @@ -286,7 +286,7 @@ class JdbcContextSpec extends Spec { "with multiple columns and query - same table" in { ctx.run(qr1.delete) - ctx.run(qr1.insert(lift(TestEntity("one", 2, 18L, Some(1), true)))) + ctx.run(qr1.insertValue(lift(TestEntity("one", 2, 18L, Some(1), true)))) val deleted = ctx.run { qr1.delete.returning(r => @@ -297,7 +297,7 @@ class JdbcContextSpec extends Spec { "with multiple columns and query embedded" in { ctx.run(qr1Emb.delete) - ctx.run(qr1Emb.insert(lift(TestEntityEmb(Emb("one", 1), 18L, Some(123))))) + ctx.run(qr1Emb.insertValue(lift(TestEntityEmb(Emb("one", 1), 18L, Some(123))))) val deleted = ctx.run { qr1Emb.delete.returning(r => (r.emb.i, r.o)) @@ -308,7 +308,7 @@ class JdbcContextSpec extends Spec { "with multiple columns - case class" in { case class Return(id: Int, str: String, opt: Option[Int]) ctx.run(qr1.delete) - ctx.run(qr1.insert(lift(TestEntity("one", 1, 18L, Some(123), true)))) + ctx.run(qr1.insertValue(lift(TestEntity("one", 1, 18L, Some(123), true)))) val deleted = ctx.run { qr1.delete.returning(r => Return(r.i, r.s, r.o)) diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/JdbcEncodingSpec.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/JdbcEncodingSpec.scala index afb93dc42..fa0e3767b 100644 --- a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/JdbcEncodingSpec.scala +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/JdbcEncodingSpec.scala @@ -43,10 +43,10 @@ class JdbcEncodingSpec extends EncodingSpec { val res: (List[EncodingTestEntity], List[EncodingTestEntity]) = { val steps = { testContext.run(query[EncodingTestEntity].delete) - testContext.run(query[EncodingTestEntity].insert(lift(e1))) + testContext.run(query[EncodingTestEntity].insertValue(lift(e1))) val withoutNull = testContext.run(query[EncodingTestEntity]) testContext.run(query[EncodingTestEntity].delete) - testContext.run(query[EncodingTestEntity].insert(lift(e2))) + testContext.run(query[EncodingTestEntity].insertValue(lift(e2))) val withNull = testContext.run(query[EncodingTestEntity]) (withoutNull, withNull) } diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/ProductJdbcSpec.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/ProductJdbcSpec.scala index cce26fb1d..d82286ad6 100644 --- a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/ProductJdbcSpec.scala +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/ProductJdbcSpec.scala @@ -56,7 +56,7 @@ class ProductJdbcSpec extends ProductSpec { "Single product insert with a method quotation" in { val prd = Product(0L, "test3", 3L) - val inserted = testContext.run(productInsert(lift(prd))) + val inserted = testContext.run(productInsert (lift(prd))) val returnedProduct = testContext.run(productById(lift(inserted))).head returnedProduct.description mustEqual "test3" returnedProduct.sku mustEqual 3L diff --git a/quill-sql-tests/src/test/scala/io/getquill/ported/quotationspec/ActionTest.scala b/quill-sql-tests/src/test/scala/io/getquill/ported/quotationspec/ActionTest.scala index bd01bd577..7696c2f5b 100644 --- a/quill-sql-tests/src/test/scala/io/getquill/ported/quotationspec/ActionTest.scala +++ b/quill-sql-tests/src/test/scala/io/getquill/ported/quotationspec/ActionTest.scala @@ -36,7 +36,7 @@ class ActionTest extends Spec with TestEntities with Inside { } "case class" in { inline def q = quote { - (t: TestEntity) => qr1.update(t) + (t: TestEntity) => qr1.updateValue(t) } val n = quote { (t: TestEntity) => @@ -74,7 +74,7 @@ class ActionTest extends Spec with TestEntities with Inside { } "case class" in { inline def q = quote { - (t: TestEntity) => qr1.insert(t) + (t: TestEntity) => qr1.insertValue(t) } val n = quote { (t: TestEntity) => diff --git a/quill-sql-tests/src/test/scala/io/getquill/ported/sqlidiomspec/ActionSpec.scala b/quill-sql-tests/src/test/scala/io/getquill/ported/sqlidiomspec/ActionSpec.scala index b8796b23d..123009c55 100644 --- a/quill-sql-tests/src/test/scala/io/getquill/ported/sqlidiomspec/ActionSpec.scala +++ b/quill-sql-tests/src/test/scala/io/getquill/ported/sqlidiomspec/ActionSpec.scala @@ -46,9 +46,9 @@ class ActionSpec extends Spec { val groups = testContext.run( liftQuery(List(v)).foreach(v => query[TestEntity].insert(v)) - ).groups mustEqual + ).groups mustEqual List(( - "INSERT INTO TestEntity (s,i,l,o,b) VALUES (?, ?, ?, ?, ?)", + "INSERT INTO TestEntity (s,i,l,o,b) VALUES (?, ?, ?, ?, ?)", List(Row("_1" -> "s", "_2" -> 1, "_3" -> 2L, "_4" -> Some("_1" -> 1), "_5" -> true)) ) ) @@ -90,14 +90,14 @@ class ActionSpec extends Spec { "returning" in testContext.withDialect(MirrorSqlDialectWithReturnMulti) { ctx => import ctx._ val q = quote { - query[TestEntity].insert(lift(TestEntity("s", 1, 2L, Some(1), true))).returning(_.l) + query[TestEntity].insertValue(lift(TestEntity("s", 1, 2L, Some(1), true))).returning(_.l) } val run = ctx.run(q).string mustEqual "INSERT INTO TestEntity (s,i,l,o,b) VALUES (?, ?, ?, ?, ?)" } "returning generated" in { val q = quote { - query[TestEntity].insert(lift(TestEntity("s", 1, 2L, Some(1), true))).returningGenerated(_.l) + query[TestEntity].insertValue(lift(TestEntity("s", 1, 2L, Some(1), true))).returningGenerated(_.l) } val run = testContext.run(q).string mustEqual "INSERT INTO TestEntity (s,i,o,b) VALUES (?, ?, ?, ?)" @@ -105,14 +105,14 @@ class ActionSpec extends Spec { "returning with single column table" in testContext.withDialect(MirrorSqlDialectWithReturnMulti) { ctx => import ctx._ val q = quote { - qr4.insert(lift(TestEntity4(0))).returning(_.i) + qr4.insertValue(lift(TestEntity4(0))).returning(_.i) } ctx.run(q).string mustEqual "INSERT INTO TestEntity4 (i) VALUES (?)" } "returning generated with single column table" in { val q = quote { - qr4.insert(lift(TestEntity4(0))).returningGenerated(_.i) + qr4.insertValue(lift(TestEntity4(0))).returningGenerated(_.i) } testContext.run(q).string mustEqual "INSERT INTO TestEntity4 DEFAULT VALUES" diff --git a/quill-sql/src/main/scala/io/getquill/Dsl.scala b/quill-sql/src/main/scala/io/getquill/Dsl.scala index 43db883d1..4e6ae764c 100644 --- a/quill-sql/src/main/scala/io/getquill/Dsl.scala +++ b/quill-sql/src/main/scala/io/getquill/Dsl.scala @@ -53,7 +53,7 @@ trait Dsl extends QuoteDsl with QueryDsl with MetaDsl trait MetaDsl extends QueryDsl { - inline def schemaMeta[T](inline entity: String, inline columns: (T => (Any, String))*): SchemaMeta[T] = + inline def schemaMeta[T](inline entity: String, inline columns: (T => (Any, String))*): SchemaMeta[T] = ${ SchemaMetaMacro[T]('this, 'entity, 'columns) } inline def queryMeta[T, R](inline expand: Quoted[Query[T] => Query[R]])(inline extract: R => T): QueryMeta[T, R] = @@ -100,14 +100,14 @@ trait QuoteDsl { // Doing: val p = quote { query[Person] } // and then doing: val q = quote { p.insert(_.name -> "blah") } - // or then doing: val q = quote { p.insert(lift(Person("Joe", 123))) } + // or then doing: val q = quote { p.insertValue(lift(Person("Joe", 123))) } // confuses Dotty since it needs to be `p.unquote` first and it can't determine which // variant of the function it is supposed to use. Therefore we have to explicitly define // these functions on the quoted variant of the EntityQuery for the types to infer correctly. // see ActionSpec.scala action->insert->simple, using nested select, etc... tets for examples of this extension [T](inline quotedEntity: Quoted[EntityQuery[T]]) - inline def insert(inline f: (T => (Any, Any)), inline f2: (T => (Any, Any))*): Insert[T] = unquote[EntityQuery[T]](quotedEntity).insert(f, f2: _*) - inline def update(inline f: (T => (Any, Any)), inline f2: (T => (Any, Any))*): Update[T] = unquote[EntityQuery[T]](quotedEntity).update(f, f2: _*) - inline def insert(inline value: T): Insert[T] = unquote[EntityQuery[T]](quotedEntity).insert(value) - inline def update(inline value: T): Update[T] = unquote[EntityQuery[T]](quotedEntity).update(value) + inline def insert(inline f: (T => (Any, Any)), inline f2: (T => (Any, Any))*)(using DummyImplicit): Insert[T] = unquote[EntityQuery[T]](quotedEntity).insert(f, f2: _*) + inline def update(inline f: (T => (Any, Any)), inline f2: (T => (Any, Any))*)(using DummyImplicit): Update[T] = unquote[EntityQuery[T]](quotedEntity).update(f, f2: _*) + inline def insertValue(inline value: T): Insert[T] = unquote[EntityQuery[T]](quotedEntity).insert(value) + inline def updateValue(inline value: T): Update[T] = unquote[EntityQuery[T]](quotedEntity).update(value) } diff --git a/quill-sql/src/main/scala/io/getquill/context/Context.scala b/quill-sql/src/main/scala/io/getquill/context/Context.scala index 231993470..8f8a19572 100644 --- a/quill-sql/src/main/scala/io/getquill/context/Context.scala +++ b/quill-sql/src/main/scala/io/getquill/context/Context.scala @@ -40,7 +40,7 @@ object ExecutionType: case object Dynamic extends ExecutionType case object Static extends ExecutionType -/** +/** * Metadata related to query execution. Note that AST should be lazy so as not to be evaluated * at runtime (which would happen with a by-value property since `{ ExecutionInfo(stuff, ast) } is spliced * into a query-execution site) @@ -75,7 +75,7 @@ trait ProtoContext[Dialect <: io.getquill.idiom.Idiom, Naming <: io.getquill.Nam def idiom: Dialect def naming: Naming - + val identityPrepare: Prepare = (Nil, _) val identityExtractor = identity[ResultRow] _ @@ -109,11 +109,11 @@ import io.getquill.generic.DecodeAlternate // TODO Needs to be portable (i.e. plug into current contexts when compiled with Scala 3) trait Context[Dialect <: io.getquill.idiom.Idiom, Naming <: io.getquill.NamingStrategy] extends ProtoContext[Dialect, Naming] -with EncodingDsl +with EncodingDsl with Closeable { self => - + type DatasourceContextBehavior <: DatasourceContextInjection // TODO Go back to this when implementing GenericDecoder using standard method @@ -122,16 +122,16 @@ with Closeable // inline def decode(t: T) = ${ DecodeAlternate[T, ResultRow] } implicit inline def dec[T]: GenericDecoder[ResultRow, T, DecodingType.Generic] = ${ GenericDecoder.summon[T, ResultRow] } - + //def probe(statement: String): Try[_] // todo add 'prepare' i.e. encoders here //def executeAction[T](cql: String, prepare: Prepare = identityPrepare)(implicit executionContext: ExecutionContext): Result[RunActionResult] - inline def lift[T](inline runtimeValue: T): T = + inline def lift[T](inline runtimeValue: T): T = ${ LiftMacro[T, PrepareRow]('runtimeValue) } // Needs PrepareRow in order to be able to summon encoders - inline def liftQuery[U[_] <: Iterable[_], T](inline runtimeValue: U[T]): Query[T] = + inline def liftQuery[U[_] <: Iterable[_], T](inline runtimeValue: U[T]): Query[T] = ${ LiftQueryMacro[T, U, PrepareRow]('runtimeValue) } extension [T](inline q: Query[T]) { @@ -176,7 +176,7 @@ with Closeable val ca = new ContextOperation[E, T, Dialect, Naming, PrepareRow, ResultRow, this.type, Result[RunActionReturningResult[T]]](self.idiom, self.naming) { def execute(sql: String, prepare: PrepareRow => (List[Any], PrepareRow), extraction: Extraction[ResultRow, T], executionInfo: ExecutionInfo) = // Need an extractor with special information that helps with the SQL returning specifics - val Extraction.Returning(extract, returningBehavior) = + val Extraction.Returning(extract, returningBehavior) = // Just match on the type and throw an exception. The outside val right above will do the deconstruction extraction match // Can't check types inside Returning[_, _] during runtime due to type-erasure so scala will give a warning @@ -207,7 +207,7 @@ with Closeable def execute(sql: String, prepares: List[PrepareRow => (List[Any], PrepareRow)], extraction: Extraction[ResultRow, T], executionInfo: ExecutionInfo) = val runContext = DatasourceContextInjectionMacro[DatasourceContextBehavior, DatasourceContext, this.type](context) - val Extraction.Returning(extract, returningBehavior) = + val Extraction.Returning(extract, returningBehavior) = extraction match case _: Extraction.Returning[_, _] => extraction case _ => throw new IllegalArgumentException("Returning Extractor required") diff --git a/quill-sql/src/test/scala/io/getquill/context/sql/ArrayOpsSpec.scala b/quill-sql/src/test/scala/io/getquill/context/sql/ArrayOpsSpec.scala index 4b7be427e..8dcb0d4d7 100644 --- a/quill-sql/src/test/scala/io/getquill/context/sql/ArrayOpsSpec.scala +++ b/quill-sql/src/test/scala/io/getquill/context/sql/ArrayOpsSpec.scala @@ -21,7 +21,7 @@ trait ArrayOpsSpec extends Spec { //hello inline def entity = quote(query[ArrayOps]) inline def insertEntries = quote { - liftQuery(entriesList).foreach(e => entity.insert(e)) + liftQuery(entriesList).foreach(e => entity.insertValue(e)) } object `contains` {